]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
57extern int xmalloc ();
58extern void free ();
59
60/* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62char *emit_filename;
63int emit_lineno;
64
65/* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68int expr_stmts_for_value;
69
70/* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73static tree last_expr_type;
74static rtx last_expr_value;
75
76/* Number of binding contours started so far in this function. */
77
78int block_start_count;
79
80/* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83extern int current_function_returns_pcc_struct;
84
85/* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89extern rtx cleanup_label;
90
91/* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95extern rtx return_label;
96
97/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99extern rtx save_expr_regs;
100
101/* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104extern int frame_offset;
105
106/* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108extern rtx tail_recursion_label;
109
110/* Place after which to insert the tail_recursion_label if we need one. */
111extern rtx tail_recursion_reentry;
112
113/* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118extern rtx arg_pointer_save_area;
119
120/* Chain of all RTL_EXPRs that have insns in them. */
121extern tree rtl_expr_chain;
122
123#if 0 /* Turned off because 0 seems to work just as well. */
124/* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127static tree empty_cleanup_list;
128#endif
129\f
130/* Functions and data structures for expanding case statements. */
131
132/* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
28d81abb
RK
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153struct case_node
154{
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161};
162
163typedef struct case_node case_node;
164typedef struct case_node *case_node_ptr;
165
166/* These are used by estimate_case_costs and balance_case_nodes. */
167
168/* This must be a signed type, and non-ANSI compilers lack signed char. */
169static short *cost_table;
170static int use_cost_table;
171
172static int estimate_case_costs ();
173static void balance_case_nodes ();
174static void emit_case_nodes ();
175static void group_case_nodes ();
176static void emit_jump_if_reachable ();
177
178static int warn_if_unused_value ();
179static void expand_goto_internal ();
180static int expand_fixup ();
181void fixup_gotos ();
182void free_temp_slots ();
183static void expand_cleanups ();
184static void fixup_cleanups ();
185static void expand_null_return_1 ();
186static int tail_recursion_args ();
187static void do_jump_if_equal ();
188\f
189/* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214struct nesting
215{
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327};
328
329/* Chain of all pending binding contours. */
330struct nesting *block_stack;
331
332/* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334struct nesting *stack_block_stack;
335
336/* Chain of all pending conditional statements. */
337struct nesting *cond_stack;
338
339/* Chain of all pending loops. */
340struct nesting *loop_stack;
341
342/* Chain of all pending case or switch statements. */
343struct nesting *case_stack;
344
345/* Chain of all pending exception contours. */
346struct nesting *except_stack;
347
348/* Separate chain including all of the above,
349 chained through the `all' field. */
350struct nesting *nesting_stack;
351
352/* Number of entries on nesting_stack now. */
353int nesting_depth;
354
355/* Allocate and return a new `struct nesting'. */
356
357#define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363#define POPSTACK(STACK) \
364do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371\f
372/* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380struct goto_fixup
381{
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407};
408
409static struct goto_fixup *goto_fixup_chain;
410
411/* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414struct label_chain
415{
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419};
420\f
421void
422init_stmt ()
423{
424 gcc_obstack_init (&stmt_obstack);
425#if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427#endif
428}
429
430void
431init_stmt_for_function ()
432{
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449}
450
451void
452save_stmt_status (p)
453 struct function *p;
454{
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469}
470
471void
472restore_stmt_status (p)
473 struct function *p;
474{
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489}
490\f
491/* Emit a no-op instruction. */
492
493void
494emit_nop ()
495{
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501}
502\f
503/* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506rtx
507label_rtx (label)
508 tree label;
509{
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517}
518
519/* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521void
522emit_jump (label)
523 rtx label;
524{
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528}
529
530/* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533void
534expand_computed_goto (exp)
535 tree exp;
536{
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
de14fd73 538 emit_queue ();
28d81abb
RK
539 emit_indirect_jump (x);
540 emit_barrier ();
541}
542\f
543/* Handle goto statements and the labels that they can go to. */
544
545/* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556void
557expand_label (label)
558 tree label;
559{
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
569 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574}
575
576/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579void
580declare_nonlocal_label (label)
581 tree label;
582{
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slot == 0)
586 {
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
589 nonlocal_goto_stack_level
590 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
591 emit_insn_before (gen_move_insn (nonlocal_goto_stack_level,
592 stack_pointer_rtx),
593 tail_recursion_reentry);
594 }
595}
596
597/* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
600
601void
602expand_goto (label)
603 tree label;
604{
605 /* Check for a nonlocal goto to a containing function. */
606 tree context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
608 {
609 struct function *p = find_function_data (context);
610 rtx temp;
611 p->has_nonlocal_label = 1;
612#if HAVE_nonlocal_goto
613 if (HAVE_nonlocal_goto)
614 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
615 p->nonlocal_goto_handler_slot,
616 p->nonlocal_goto_stack_level,
617 gen_rtx (LABEL_REF, Pmode,
618 label_rtx (label))));
619 else
620#endif
621 {
622 /* Restore frame pointer for containing function.
623 This sets the actual hard register used for the frame pointer
624 to the location of the function's incoming static chain info.
625 The non-local goto handler will then adjust it to contain the
626 proper value and reload the argument pointer, if needed. */
627 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
628 /* Get addr of containing function's current nonlocal goto handler,
629 which will do any cleanups and then jump to the label. */
630 temp = copy_to_reg (p->nonlocal_goto_handler_slot);
631 /* Restore the stack pointer. Note this uses fp just restored. */
632 emit_move_insn (stack_pointer_rtx, p->nonlocal_goto_stack_level);
633 /* Put in the static chain register the nonlocal label address. */
634 emit_move_insn (static_chain_rtx,
635 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
636 /* USE of frame_pointer_rtx added for consistency; not clear if
637 really needed. */
638 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
639 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
640 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
641 emit_indirect_jump (temp);
642 }
643 }
644 else
645 expand_goto_internal (label, label_rtx (label), 0);
646}
647
648/* Generate RTL code for a `goto' statement with target label BODY.
649 LABEL should be a LABEL_REF.
650 LAST_INSN, if non-0, is the rtx we should consider as the last
651 insn emitted (for the purposes of cleaning up a return). */
652
653static void
654expand_goto_internal (body, label, last_insn)
655 tree body;
656 rtx label;
657 rtx last_insn;
658{
659 struct nesting *block;
660 rtx stack_level = 0;
661
662 if (GET_CODE (label) != CODE_LABEL)
663 abort ();
664
665 /* If label has already been defined, we can tell now
666 whether and how we must alter the stack level. */
667
668 if (PREV_INSN (label) != 0)
669 {
670 /* Find the innermost pending block that contains the label.
671 (Check containment by comparing insn-uids.)
672 Then restore the outermost stack level within that block,
673 and do cleanups of all blocks contained in it. */
674 for (block = block_stack; block; block = block->next)
675 {
676 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
677 break;
678 if (block->data.block.stack_level != 0)
679 stack_level = block->data.block.stack_level;
680 /* Execute the cleanups for blocks we are exiting. */
681 if (block->data.block.cleanups != 0)
682 {
683 expand_cleanups (block->data.block.cleanups, 0);
684 do_pending_stack_adjust ();
685 }
686 }
687
688 if (stack_level)
689 {
690 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
691 the stack pointer. This one should be deleted as dead by flow. */
692 clear_pending_stack_adjust ();
693 do_pending_stack_adjust ();
694 emit_move_insn (stack_pointer_rtx, stack_level);
695 }
696
697 if (body != 0 && DECL_TOO_LATE (body))
698 error ("jump to `%s' invalidly jumps into binding contour",
699 IDENTIFIER_POINTER (DECL_NAME (body)));
700 }
701 /* Label not yet defined: may need to put this goto
702 on the fixup list. */
703 else if (! expand_fixup (body, label, last_insn))
704 {
705 /* No fixup needed. Record that the label is the target
706 of at least one goto that has no fixup. */
707 if (body != 0)
708 TREE_ADDRESSABLE (body) = 1;
709 }
710
711 emit_jump (label);
712}
713\f
714/* Generate if necessary a fixup for a goto
715 whose target label in tree structure (if any) is TREE_LABEL
716 and whose target in rtl is RTL_LABEL.
717
718 If LAST_INSN is nonzero, we pretend that the jump appears
719 after insn LAST_INSN instead of at the current point in the insn stream.
720
721 The fixup will be used later to insert insns at this point
722 to restore the stack level as appropriate for the target label.
723
724 Value is nonzero if a fixup is made. */
725
726static int
727expand_fixup (tree_label, rtl_label, last_insn)
728 tree tree_label;
729 rtx rtl_label;
730 rtx last_insn;
731{
732 struct nesting *block, *end_block;
733
734 /* See if we can recognize which block the label will be output in.
735 This is possible in some very common cases.
736 If we succeed, set END_BLOCK to that block.
737 Otherwise, set it to 0. */
738
739 if (cond_stack
740 && (rtl_label == cond_stack->data.cond.endif_label
741 || rtl_label == cond_stack->data.cond.next_label))
742 end_block = cond_stack;
743 /* If we are in a loop, recognize certain labels which
744 are likely targets. This reduces the number of fixups
745 we need to create. */
746 else if (loop_stack
747 && (rtl_label == loop_stack->data.loop.start_label
748 || rtl_label == loop_stack->data.loop.end_label
749 || rtl_label == loop_stack->data.loop.continue_label))
750 end_block = loop_stack;
751 else
752 end_block = 0;
753
754 /* Now set END_BLOCK to the binding level to which we will return. */
755
756 if (end_block)
757 {
758 struct nesting *next_block = end_block->all;
759 block = block_stack;
760
761 /* First see if the END_BLOCK is inside the innermost binding level.
762 If so, then no cleanups or stack levels are relevant. */
763 while (next_block && next_block != block)
764 next_block = next_block->all;
765
766 if (next_block)
767 return 0;
768
769 /* Otherwise, set END_BLOCK to the innermost binding level
770 which is outside the relevant control-structure nesting. */
771 next_block = block_stack->next;
772 for (block = block_stack; block != end_block; block = block->all)
773 if (block == next_block)
774 next_block = next_block->next;
775 end_block = next_block;
776 }
777
778 /* Does any containing block have a stack level or cleanups?
779 If not, no fixup is needed, and that is the normal case
780 (the only case, for standard C). */
781 for (block = block_stack; block != end_block; block = block->next)
782 if (block->data.block.stack_level != 0
783 || block->data.block.cleanups != 0)
784 break;
785
786 if (block != end_block)
787 {
788 /* Ok, a fixup is needed. Add a fixup to the list of such. */
789 struct goto_fixup *fixup
790 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
791 /* In case an old stack level is restored, make sure that comes
792 after any pending stack adjust. */
793 /* ?? If the fixup isn't to come at the present position,
794 doing the stack adjust here isn't useful. Doing it with our
795 settings at that location isn't useful either. Let's hope
796 someone does it! */
797 if (last_insn == 0)
798 do_pending_stack_adjust ();
799 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
800 fixup->target = tree_label;
801 fixup->target_rtl = rtl_label;
802 fixup->block_start_count = block_start_count;
803 fixup->stack_level = 0;
804 fixup->cleanup_list_list
805 = (((block->data.block.outer_cleanups
806#if 0
807 && block->data.block.outer_cleanups != empty_cleanup_list
808#endif
809 )
810 || block->data.block.cleanups)
811 ? tree_cons (0, block->data.block.cleanups,
812 block->data.block.outer_cleanups)
813 : 0);
814 fixup->next = goto_fixup_chain;
815 goto_fixup_chain = fixup;
816 }
817
818 return block != 0;
819}
820
821/* When exiting a binding contour, process all pending gotos requiring fixups.
822 THISBLOCK is the structure that describes the block being exited.
823 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
824 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
825 FIRST_INSN is the insn that began this contour.
826
827 Gotos that jump out of this contour must restore the
828 stack level and do the cleanups before actually jumping.
829
830 DONT_JUMP_IN nonzero means report error there is a jump into this
831 contour from before the beginning of the contour.
832 This is also done if STACK_LEVEL is nonzero. */
833
834void
835fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
836 struct nesting *thisblock;
837 rtx stack_level;
838 tree cleanup_list;
839 rtx first_insn;
840 int dont_jump_in;
841{
842 register struct goto_fixup *f, *prev;
843
844 /* F is the fixup we are considering; PREV is the previous one. */
845 /* We run this loop in two passes so that cleanups of exited blocks
846 are run first, and blocks that are exited are marked so
847 afterwards. */
848
849 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
850 {
851 /* Test for a fixup that is inactive because it is already handled. */
852 if (f->before_jump == 0)
853 {
854 /* Delete inactive fixup from the chain, if that is easy to do. */
855 if (prev != 0)
856 prev->next = f->next;
857 }
858 /* Has this fixup's target label been defined?
859 If so, we can finalize it. */
860 else if (PREV_INSN (f->target_rtl) != 0)
861 {
862 /* Get the first non-label after the label
863 this goto jumps to. If that's before this scope begins,
864 we don't have a jump into the scope. */
865 rtx after_label = f->target_rtl;
866 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
867 after_label = NEXT_INSN (after_label);
868
869 /* If this fixup jumped into this contour from before the beginning
870 of this contour, report an error. */
871 /* ??? Bug: this does not detect jumping in through intermediate
872 blocks that have stack levels or cleanups.
873 It detects only a problem with the innermost block
874 around the label. */
875 if (f->target != 0
876 && (dont_jump_in || stack_level || cleanup_list)
877 /* If AFTER_LABEL is 0, it means the jump goes to the end
878 of the rtl, which means it jumps into this scope. */
879 && (after_label == 0
880 || INSN_UID (first_insn) < INSN_UID (after_label))
881 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
882 && ! TREE_REGDECL (f->target))
883 {
884 error_with_decl (f->target,
885 "label `%s' used before containing binding contour");
886 /* Prevent multiple errors for one label. */
887 TREE_REGDECL (f->target) = 1;
888 }
889
890 /* Execute cleanups for blocks this jump exits. */
891 if (f->cleanup_list_list)
892 {
893 tree lists;
894 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
895 /* Marked elements correspond to blocks that have been closed.
896 Do their cleanups. */
897 if (TREE_ADDRESSABLE (lists)
898 && TREE_VALUE (lists) != 0)
899 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
900 }
901
902 /* Restore stack level for the biggest contour that this
903 jump jumps out of. */
904 if (f->stack_level)
905 emit_insn_after (gen_move_insn (stack_pointer_rtx, f->stack_level),
906 f->before_jump);
907 f->before_jump = 0;
908 }
909 }
910
911 /* Mark the cleanups of exited blocks so that they are executed
912 by the code above. */
913 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
914 if (f->before_jump != 0
915 && PREV_INSN (f->target_rtl) == 0
916 /* Label has still not appeared. If we are exiting a block with
917 a stack level to restore, that started before the fixup,
918 mark this stack level as needing restoration
919 when the fixup is later finalized.
920 Also mark the cleanup_list_list element for F
921 that corresponds to this block, so that ultimately
922 this block's cleanups will be executed by the code above. */
923 && thisblock != 0
924 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
925 it means the label is undefined. That's erroneous, but possible. */
926 && (thisblock->data.block.block_start_count
927 <= f->block_start_count))
928 {
929 tree lists = f->cleanup_list_list;
930 for (; lists; lists = TREE_CHAIN (lists))
931 /* If the following elt. corresponds to our containing block
932 then the elt. must be for this block. */
933 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
934 TREE_ADDRESSABLE (lists) = 1;
935
936 if (stack_level)
937 f->stack_level = stack_level;
938 }
939}
940\f
941/* Generate RTL for an asm statement (explicit assembler code).
942 BODY is a STRING_CST node containing the assembler code text,
943 or an ADDR_EXPR containing a STRING_CST. */
944
945void
946expand_asm (body)
947 tree body;
948{
949 if (TREE_CODE (body) == ADDR_EXPR)
950 body = TREE_OPERAND (body, 0);
951
952 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
953 TREE_STRING_POINTER (body)));
954 last_expr_type = 0;
955}
956
957/* Generate RTL for an asm statement with arguments.
958 STRING is the instruction template.
959 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
960 Each output or input has an expression in the TREE_VALUE and
961 a constraint-string in the TREE_PURPOSE.
962 CLOBBERS is a list of STRING_CST nodes each naming a hard register
963 that is clobbered by this insn.
964
965 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
966 Some elements of OUTPUTS may be replaced with trees representing temporary
967 values. The caller should copy those temporary values to the originally
968 specified lvalues.
969
970 VOL nonzero means the insn is volatile; don't optimize it. */
971
972void
973expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
974 tree string, outputs, inputs, clobbers;
975 int vol;
976 char *filename;
977 int line;
978{
979 rtvec argvec, constraints;
980 rtx body;
981 int ninputs = list_length (inputs);
982 int noutputs = list_length (outputs);
983 int nclobbers = list_length (clobbers);
984 tree tail;
985 register int i;
986 /* Vector of RTX's of evaluated output operands. */
987 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
988 /* The insn we have emitted. */
989 rtx insn;
990
991 last_expr_type = 0;
992
993 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
994 {
995 tree val = TREE_VALUE (tail);
996 tree val1;
997 int j;
998 int found_equal;
999
1000 /* If there's an erroneous arg, emit no insn. */
1001 if (TREE_TYPE (val) == error_mark_node)
1002 return;
1003
1004 /* Make sure constraint has `=' and does not have `+'. */
1005
1006 found_equal = 0;
1007 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1008 {
1009 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1010 {
1011 error ("output operand constraint contains `+'");
1012 return;
1013 }
1014 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1015 found_equal = 1;
1016 }
1017 if (! found_equal)
1018 {
1019 error ("output operand constraint lacks `='");
1020 return;
1021 }
1022
1023 /* If an output operand is not a variable or indirect ref,
1024 or a part of one,
1025 create a SAVE_EXPR which is a pseudo-reg
1026 to act as an intermediate temporary.
1027 Make the asm insn write into that, then copy it to
1028 the real output operand. */
1029
1030 while (TREE_CODE (val) == COMPONENT_REF
1031 || TREE_CODE (val) == ARRAY_REF)
1032 val = TREE_OPERAND (val, 0);
1033
1034 if (TREE_CODE (val) != VAR_DECL
1035 && TREE_CODE (val) != PARM_DECL
1036 && TREE_CODE (val) != INDIRECT_REF)
1037 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1038
1039 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1040 }
1041
1042 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1043 {
1044 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1045 return;
1046 }
1047
1048 /* Make vectors for the expression-rtx and constraint strings. */
1049
1050 argvec = rtvec_alloc (ninputs);
1051 constraints = rtvec_alloc (ninputs);
1052
1053 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1054 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1055 filename, line);
1056 MEM_VOLATILE_P (body) = vol;
1057
1058 /* Eval the inputs and put them into ARGVEC.
1059 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1060
1061 i = 0;
1062 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1063 {
1064 int j;
1065
1066 /* If there's an erroneous arg, emit no insn,
1067 because the ASM_INPUT would get VOIDmode
1068 and that could cause a crash in reload. */
1069 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1070 return;
1071 if (TREE_PURPOSE (tail) == NULL_TREE)
1072 {
1073 error ("hard register `%s' listed as input operand to `asm'",
1074 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1075 return;
1076 }
1077
1078 /* Make sure constraint has neither `=' nor `+'. */
1079
1080 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1081 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1082 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1083 {
1084 error ("input operand constraint contains `%c'",
1085 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1086 return;
1087 }
1088
1089 XVECEXP (body, 3, i) /* argvec */
1090 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1091 XVECEXP (body, 4, i) /* constraints */
1092 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1093 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1094 i++;
1095 }
1096
1097 /* Protect all the operands from the queue,
1098 now that they have all been evaluated. */
1099
1100 for (i = 0; i < ninputs; i++)
1101 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1102
1103 for (i = 0; i < noutputs; i++)
1104 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1105
1106 /* Now, for each output, construct an rtx
1107 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1108 ARGVEC CONSTRAINTS))
1109 If there is more than one, put them inside a PARALLEL. */
1110
1111 if (noutputs == 1 && nclobbers == 0)
1112 {
1113 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1114 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1115 }
1116 else if (noutputs == 0 && nclobbers == 0)
1117 {
1118 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1119 insn = emit_insn (body);
1120 }
1121 else
1122 {
1123 rtx obody = body;
1124 int num = noutputs;
1125 if (num == 0) num = 1;
1126 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1127
1128 /* For each output operand, store a SET. */
1129
1130 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1131 {
1132 XVECEXP (body, 0, i)
1133 = gen_rtx (SET, VOIDmode,
1134 output_rtx[i],
1135 gen_rtx (ASM_OPERANDS, VOIDmode,
1136 TREE_STRING_POINTER (string),
1137 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1138 i, argvec, constraints,
1139 filename, line));
1140 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1141 }
1142
1143 /* If there are no outputs (but there are some clobbers)
1144 store the bare ASM_OPERANDS into the PARALLEL. */
1145
1146 if (i == 0)
1147 XVECEXP (body, 0, i++) = obody;
1148
1149 /* Store (clobber REG) for each clobbered register specified. */
1150
1151 for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++)
1152 {
28d81abb 1153 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1154 int j = decode_reg_name (regname);
28d81abb 1155
b4ac57ab 1156 if (j < 0)
28d81abb 1157 {
dcfedcd0
RK
1158 if (j == -3)
1159 continue;
1160
28d81abb
RK
1161 error ("unknown register name `%s' in `asm'", regname);
1162 return;
1163 }
1164
1165 /* Use QImode since that's guaranteed to clobber just one reg. */
1166 XVECEXP (body, 0, i)
1167 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1168 }
1169
1170 insn = emit_insn (body);
1171 }
1172
1173 free_temp_slots ();
1174}
1175\f
1176/* Generate RTL to evaluate the expression EXP
1177 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1178
1179void
1180expand_expr_stmt (exp)
1181 tree exp;
1182{
1183 /* If -W, warn about statements with no side effects,
1184 except for an explicit cast to void (e.g. for assert()), and
1185 except inside a ({...}) where they may be useful. */
1186 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1187 {
1188 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1189 && !(TREE_CODE (exp) == CONVERT_EXPR
1190 && TREE_TYPE (exp) == void_type_node))
1191 warning_with_file_and_line (emit_filename, emit_lineno,
1192 "statement with no effect");
1193 else if (warn_unused)
1194 warn_if_unused_value (exp);
1195 }
1196 last_expr_type = TREE_TYPE (exp);
1197 if (! flag_syntax_only)
1198 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1199 VOIDmode, 0);
1200
1201 /* If all we do is reference a volatile value in memory,
1202 copy it to a register to be sure it is actually touched. */
1203 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1204 && TREE_THIS_VOLATILE (exp))
1205 {
1206 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1207 copy_to_reg (last_expr_value);
1208 else
ddbe9812
RS
1209 {
1210 rtx lab = gen_label_rtx ();
1211
1212 /* Compare the value with itself to reference it. */
1213 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1214 expand_expr (TYPE_SIZE (last_expr_type),
1215 0, VOIDmode, 0),
1216 BLKmode, 0,
1217 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1218 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1219 emit_label (lab);
1220 }
28d81abb
RK
1221 }
1222
1223 /* If this expression is part of a ({...}) and is in memory, we may have
1224 to preserve temporaries. */
1225 preserve_temp_slots (last_expr_value);
1226
1227 /* Free any temporaries used to evaluate this expression. Any temporary
1228 used as a result of this expression will already have been preserved
1229 above. */
1230 free_temp_slots ();
1231
1232 emit_queue ();
1233}
1234
1235/* Warn if EXP contains any computations whose results are not used.
1236 Return 1 if a warning is printed; 0 otherwise. */
1237
1238static int
1239warn_if_unused_value (exp)
1240 tree exp;
1241{
1242 if (TREE_USED (exp))
1243 return 0;
1244
1245 switch (TREE_CODE (exp))
1246 {
1247 case PREINCREMENT_EXPR:
1248 case POSTINCREMENT_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case POSTDECREMENT_EXPR:
1251 case MODIFY_EXPR:
1252 case INIT_EXPR:
1253 case TARGET_EXPR:
1254 case CALL_EXPR:
1255 case METHOD_CALL_EXPR:
1256 case RTL_EXPR:
1257 case WRAPPER_EXPR:
1258 case ANTI_WRAPPER_EXPR:
1259 case WITH_CLEANUP_EXPR:
1260 case EXIT_EXPR:
1261 /* We don't warn about COND_EXPR because it may be a useful
1262 construct if either arm contains a side effect. */
1263 case COND_EXPR:
1264 return 0;
1265
1266 case BIND_EXPR:
1267 /* For a binding, warn if no side effect within it. */
1268 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1269
1270 case TRUTH_ORIF_EXPR:
1271 case TRUTH_ANDIF_EXPR:
1272 /* In && or ||, warn if 2nd operand has no side effect. */
1273 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1274
1275 case COMPOUND_EXPR:
1276 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1277 return 1;
4d23e509
RS
1278 /* Let people do `(foo (), 0)' without a warning. */
1279 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1280 return 0;
28d81abb
RK
1281 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1282
1283 case NOP_EXPR:
1284 case CONVERT_EXPR:
b4ac57ab 1285 case NON_LVALUE_EXPR:
28d81abb
RK
1286 /* Don't warn about values cast to void. */
1287 if (TREE_TYPE (exp) == void_type_node)
1288 return 0;
1289 /* Don't warn about conversions not explicit in the user's program. */
1290 if (TREE_NO_UNUSED_WARNING (exp))
1291 return 0;
1292 /* Assignment to a cast usually results in a cast of a modify.
1293 Don't complain about that. */
1294 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1295 return 0;
1296 /* Sometimes it results in a cast of a cast of a modify.
1297 Don't complain about that. */
1298 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1299 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1300 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1301 return 0;
1302
1303 default:
ddbe9812
RS
1304 /* Referencing a volatile value is a side effect, so don't warn. */
1305 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1306 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1307 && TREE_THIS_VOLATILE (exp))
1308 return 0;
28d81abb
RK
1309 warning_with_file_and_line (emit_filename, emit_lineno,
1310 "value computed is not used");
1311 return 1;
1312 }
1313}
1314
1315/* Clear out the memory of the last expression evaluated. */
1316
1317void
1318clear_last_expr ()
1319{
1320 last_expr_type = 0;
1321}
1322
1323/* Begin a statement which will return a value.
1324 Return the RTL_EXPR for this statement expr.
1325 The caller must save that value and pass it to expand_end_stmt_expr. */
1326
1327tree
1328expand_start_stmt_expr ()
1329{
1330 /* Make the RTL_EXPR node temporary, not momentary,
1331 so that rtl_expr_chain doesn't become garbage. */
1332 int momentary = suspend_momentary ();
1333 tree t = make_node (RTL_EXPR);
1334 resume_momentary (momentary);
1335 start_sequence ();
1336 NO_DEFER_POP;
1337 expr_stmts_for_value++;
1338 return t;
1339}
1340
1341/* Restore the previous state at the end of a statement that returns a value.
1342 Returns a tree node representing the statement's value and the
1343 insns to compute the value.
1344
1345 The nodes of that expression have been freed by now, so we cannot use them.
1346 But we don't want to do that anyway; the expression has already been
1347 evaluated and now we just want to use the value. So generate a RTL_EXPR
1348 with the proper type and RTL value.
1349
1350 If the last substatement was not an expression,
1351 return something with type `void'. */
1352
1353tree
1354expand_end_stmt_expr (t)
1355 tree t;
1356{
1357 OK_DEFER_POP;
1358
1359 if (last_expr_type == 0)
1360 {
1361 last_expr_type = void_type_node;
1362 last_expr_value = const0_rtx;
1363 }
1364 else if (last_expr_value == 0)
1365 /* There are some cases where this can happen, such as when the
1366 statement is void type. */
1367 last_expr_value = const0_rtx;
1368 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1369 /* Remove any possible QUEUED. */
1370 last_expr_value = protect_from_queue (last_expr_value, 0);
1371
1372 emit_queue ();
1373
1374 TREE_TYPE (t) = last_expr_type;
1375 RTL_EXPR_RTL (t) = last_expr_value;
1376 RTL_EXPR_SEQUENCE (t) = get_insns ();
1377
1378 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1379
1380 end_sequence ();
1381
1382 /* Don't consider deleting this expr or containing exprs at tree level. */
1383 TREE_SIDE_EFFECTS (t) = 1;
1384 /* Propagate volatility of the actual RTL expr. */
1385 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1386
1387 last_expr_type = 0;
1388 expr_stmts_for_value--;
1389
1390 return t;
1391}
1392\f
1393/* The exception handling nesting looks like this:
1394
1395 <-- Level N-1
1396 { <-- exception handler block
1397 <-- Level N
1398 <-- in an exception handler
1399 { <-- try block
1400 : <-- in a TRY block
1401 : <-- in an exception handler
1402 :
1403 }
1404
1405 { <-- except block
1406 : <-- in an except block
1407 : <-- in an exception handler
1408 :
1409 }
1410
1411 }
1412
1413/* Return nonzero iff in a try block at level LEVEL. */
1414
1415int
1416in_try_block (level)
1417 int level;
1418{
1419 struct nesting *n = except_stack;
1420 while (1)
1421 {
1422 while (n && n->data.except_stmt.after_label != 0)
1423 n = n->next;
1424 if (n == 0)
1425 return 0;
1426 if (level == 0)
1427 return n != 0;
1428 level--;
1429 n = n->next;
1430 }
1431}
1432
1433/* Return nonzero iff in an except block at level LEVEL. */
1434
1435int
1436in_except_block (level)
1437 int level;
1438{
1439 struct nesting *n = except_stack;
1440 while (1)
1441 {
1442 while (n && n->data.except_stmt.after_label == 0)
1443 n = n->next;
1444 if (n == 0)
1445 return 0;
1446 if (level == 0)
1447 return n != 0;
1448 level--;
1449 n = n->next;
1450 }
1451}
1452
1453/* Return nonzero iff in an exception handler at level LEVEL. */
1454
1455int
1456in_exception_handler (level)
1457 int level;
1458{
1459 struct nesting *n = except_stack;
1460 while (n && level--)
1461 n = n->next;
1462 return n != 0;
1463}
1464
1465/* Record the fact that the current exception nesting raises
1466 exception EX. If not in an exception handler, return 0. */
1467int
1468expand_raise (ex)
1469 tree ex;
1470{
1471 tree *raises_ptr;
1472
1473 if (except_stack == 0)
1474 return 0;
1475 raises_ptr = &except_stack->data.except_stmt.raised;
1476 if (! value_member (ex, *raises_ptr))
1477 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1478 return 1;
1479}
1480
1481/* Generate RTL for the start of a try block.
1482
1483 TRY_CLAUSE is the condition to test to enter the try block. */
1484
1485void
1486expand_start_try (try_clause, exitflag, escapeflag)
1487 tree try_clause;
1488 int exitflag;
1489 int escapeflag;
1490{
1491 struct nesting *thishandler = ALLOC_NESTING ();
1492
1493 /* Make an entry on cond_stack for the cond we are entering. */
1494
1495 thishandler->next = except_stack;
1496 thishandler->all = nesting_stack;
1497 thishandler->depth = ++nesting_depth;
1498 thishandler->data.except_stmt.raised = 0;
1499 thishandler->data.except_stmt.handled = 0;
1500 thishandler->data.except_stmt.first_insn = get_insns ();
1501 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1502 thishandler->data.except_stmt.unhandled_label = 0;
1503 thishandler->data.except_stmt.after_label = 0;
1504 thishandler->data.except_stmt.escape_label
1505 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1506 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1507 except_stack = thishandler;
1508 nesting_stack = thishandler;
1509
1510 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1511}
1512
1513/* End of a TRY block. Nothing to do for now. */
1514
1515void
1516expand_end_try ()
1517{
1518 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1519 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1520}
1521
1522/* Start an `except' nesting contour.
1523 EXITFLAG says whether this contour should be able to `exit' something.
1524 ESCAPEFLAG says whether this contour should be escapable. */
1525
1526void
1527expand_start_except (exitflag, escapeflag)
1528 int exitflag;
1529 int escapeflag;
1530{
1531 if (exitflag)
1532 {
1533 struct nesting *n;
1534 /* An `exit' from catch clauses goes out to next exit level,
1535 if there is one. Otherwise, it just goes to the end
1536 of the construct. */
1537 for (n = except_stack->next; n; n = n->next)
1538 if (n->exit_label != 0)
1539 {
1540 except_stack->exit_label = n->exit_label;
1541 break;
1542 }
1543 if (n == 0)
1544 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1545 }
1546 if (escapeflag)
1547 {
1548 struct nesting *n;
1549 /* An `escape' from catch clauses goes out to next escape level,
1550 if there is one. Otherwise, it just goes to the end
1551 of the construct. */
1552 for (n = except_stack->next; n; n = n->next)
1553 if (n->data.except_stmt.escape_label != 0)
1554 {
1555 except_stack->data.except_stmt.escape_label
1556 = n->data.except_stmt.escape_label;
1557 break;
1558 }
1559 if (n == 0)
1560 except_stack->data.except_stmt.escape_label
1561 = except_stack->data.except_stmt.after_label;
1562 }
1563 do_pending_stack_adjust ();
1564 emit_label (except_stack->data.except_stmt.except_label);
1565}
1566
1567/* Generate code to `escape' from an exception contour. This
1568 is like `exiting', but does not conflict with constructs which
1569 use `exit_label'.
1570
1571 Return nonzero if this contour is escapable, otherwise
1572 return zero, and language-specific code will emit the
1573 appropriate error message. */
1574int
1575expand_escape_except ()
1576{
1577 struct nesting *n;
1578 last_expr_type = 0;
1579 for (n = except_stack; n; n = n->next)
1580 if (n->data.except_stmt.escape_label != 0)
1581 {
1582 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1583 return 1;
1584 }
1585
1586 return 0;
1587}
1588
1589/* Finish processing and `except' contour.
1590 Culls out all exceptions which might be raise but not
1591 handled, and returns the list to the caller.
1592 Language-specific code is responsible for dealing with these
1593 exceptions. */
1594
1595tree
1596expand_end_except ()
1597{
1598 struct nesting *n;
1599 tree raised = NULL_TREE;
1600
1601 do_pending_stack_adjust ();
1602 emit_label (except_stack->data.except_stmt.after_label);
1603
1604 n = except_stack->next;
1605 if (n)
1606 {
1607 /* Propagate exceptions raised but not handled to next
1608 highest level. */
1609 tree handled = except_stack->data.except_stmt.raised;
1610 if (handled != void_type_node)
1611 {
1612 tree prev = NULL_TREE;
1613 raised = except_stack->data.except_stmt.raised;
1614 while (handled)
1615 {
1616 tree this_raise;
1617 for (this_raise = raised, prev = 0; this_raise;
1618 this_raise = TREE_CHAIN (this_raise))
1619 {
1620 if (value_member (TREE_VALUE (this_raise), handled))
1621 {
1622 if (prev)
1623 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1624 else
1625 {
1626 raised = TREE_CHAIN (raised);
1627 if (raised == NULL_TREE)
1628 goto nada;
1629 }
1630 }
1631 else
1632 prev = this_raise;
1633 }
1634 handled = TREE_CHAIN (handled);
1635 }
1636 if (prev == NULL_TREE)
1637 prev = raised;
1638 if (prev)
1639 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1640 nada:
1641 n->data.except_stmt.raised = raised;
1642 }
1643 }
1644
1645 POPSTACK (except_stack);
1646 last_expr_type = 0;
1647 return raised;
1648}
1649
1650/* Record that exception EX is caught by this exception handler.
1651 Return nonzero if in exception handling construct, otherwise return 0. */
1652int
1653expand_catch (ex)
1654 tree ex;
1655{
1656 tree *raises_ptr;
1657
1658 if (except_stack == 0)
1659 return 0;
1660 raises_ptr = &except_stack->data.except_stmt.handled;
1661 if (*raises_ptr != void_type_node
1662 && ex != NULL_TREE
1663 && ! value_member (ex, *raises_ptr))
1664 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1665 return 1;
1666}
1667
1668/* Record that this exception handler catches all exceptions.
1669 Return nonzero if in exception handling construct, otherwise return 0. */
1670
1671int
1672expand_catch_default ()
1673{
1674 if (except_stack == 0)
1675 return 0;
1676 except_stack->data.except_stmt.handled = void_type_node;
1677 return 1;
1678}
1679
1680int
1681expand_end_catch ()
1682{
1683 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1684 return 0;
1685 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1686 return 1;
1687}
1688\f
1689/* Generate RTL for the start of an if-then. COND is the expression
1690 whose truth should be tested.
1691
1692 If EXITFLAG is nonzero, this conditional is visible to
1693 `exit_something'. */
1694
1695void
1696expand_start_cond (cond, exitflag)
1697 tree cond;
1698 int exitflag;
1699{
1700 struct nesting *thiscond = ALLOC_NESTING ();
1701
1702 /* Make an entry on cond_stack for the cond we are entering. */
1703
1704 thiscond->next = cond_stack;
1705 thiscond->all = nesting_stack;
1706 thiscond->depth = ++nesting_depth;
1707 thiscond->data.cond.next_label = gen_label_rtx ();
1708 /* Before we encounter an `else', we don't need a separate exit label
1709 unless there are supposed to be exit statements
1710 to exit this conditional. */
1711 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1712 thiscond->data.cond.endif_label = thiscond->exit_label;
1713 cond_stack = thiscond;
1714 nesting_stack = thiscond;
1715
1716 do_jump (cond, thiscond->data.cond.next_label, NULL);
1717}
1718
1719/* Generate RTL between then-clause and the elseif-clause
1720 of an if-then-elseif-.... */
1721
1722void
1723expand_start_elseif (cond)
1724 tree cond;
1725{
1726 if (cond_stack->data.cond.endif_label == 0)
1727 cond_stack->data.cond.endif_label = gen_label_rtx ();
1728 emit_jump (cond_stack->data.cond.endif_label);
1729 emit_label (cond_stack->data.cond.next_label);
1730 cond_stack->data.cond.next_label = gen_label_rtx ();
1731 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1732}
1733
1734/* Generate RTL between the then-clause and the else-clause
1735 of an if-then-else. */
1736
1737void
1738expand_start_else ()
1739{
1740 if (cond_stack->data.cond.endif_label == 0)
1741 cond_stack->data.cond.endif_label = gen_label_rtx ();
1742 emit_jump (cond_stack->data.cond.endif_label);
1743 emit_label (cond_stack->data.cond.next_label);
1744 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1745}
1746
1747/* Generate RTL for the end of an if-then.
1748 Pop the record for it off of cond_stack. */
1749
1750void
1751expand_end_cond ()
1752{
1753 struct nesting *thiscond = cond_stack;
1754
1755 do_pending_stack_adjust ();
1756 if (thiscond->data.cond.next_label)
1757 emit_label (thiscond->data.cond.next_label);
1758 if (thiscond->data.cond.endif_label)
1759 emit_label (thiscond->data.cond.endif_label);
1760
1761 POPSTACK (cond_stack);
1762 last_expr_type = 0;
1763}
1764\f
1765/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1766 loop should be exited by `exit_something'. This is a loop for which
1767 `expand_continue' will jump to the top of the loop.
1768
1769 Make an entry on loop_stack to record the labels associated with
1770 this loop. */
1771
1772struct nesting *
1773expand_start_loop (exit_flag)
1774 int exit_flag;
1775{
1776 register struct nesting *thisloop = ALLOC_NESTING ();
1777
1778 /* Make an entry on loop_stack for the loop we are entering. */
1779
1780 thisloop->next = loop_stack;
1781 thisloop->all = nesting_stack;
1782 thisloop->depth = ++nesting_depth;
1783 thisloop->data.loop.start_label = gen_label_rtx ();
1784 thisloop->data.loop.end_label = gen_label_rtx ();
1785 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1786 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1787 loop_stack = thisloop;
1788 nesting_stack = thisloop;
1789
1790 do_pending_stack_adjust ();
1791 emit_queue ();
1792 emit_note (0, NOTE_INSN_LOOP_BEG);
1793 emit_label (thisloop->data.loop.start_label);
1794
1795 return thisloop;
1796}
1797
1798/* Like expand_start_loop but for a loop where the continuation point
1799 (for expand_continue_loop) will be specified explicitly. */
1800
1801struct nesting *
1802expand_start_loop_continue_elsewhere (exit_flag)
1803 int exit_flag;
1804{
1805 struct nesting *thisloop = expand_start_loop (exit_flag);
1806 loop_stack->data.loop.continue_label = gen_label_rtx ();
1807 return thisloop;
1808}
1809
1810/* Specify the continuation point for a loop started with
1811 expand_start_loop_continue_elsewhere.
1812 Use this at the point in the code to which a continue statement
1813 should jump. */
1814
1815void
1816expand_loop_continue_here ()
1817{
1818 do_pending_stack_adjust ();
1819 emit_note (0, NOTE_INSN_LOOP_CONT);
1820 emit_label (loop_stack->data.loop.continue_label);
1821}
1822
1823/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1824 Pop the block off of loop_stack. */
1825
1826void
1827expand_end_loop ()
1828{
1829 register rtx insn = get_last_insn ();
1830 register rtx start_label = loop_stack->data.loop.start_label;
1831 rtx last_test_insn = 0;
1832 int num_insns = 0;
1833
1834 /* Mark the continue-point at the top of the loop if none elsewhere. */
1835 if (start_label == loop_stack->data.loop.continue_label)
1836 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1837
1838 do_pending_stack_adjust ();
1839
1840 /* If optimizing, perhaps reorder the loop. If the loop
1841 starts with a conditional exit, roll that to the end
1842 where it will optimize together with the jump back.
1843
1844 We look for the last conditional branch to the exit that we encounter
1845 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1846 branch to the exit first, use it.
1847
1848 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1849 because moving them is not valid. */
1850
1851 if (optimize
1852 &&
1853 ! (GET_CODE (insn) == JUMP_INSN
1854 && GET_CODE (PATTERN (insn)) == SET
1855 && SET_DEST (PATTERN (insn)) == pc_rtx
1856 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1857 {
1858 /* Scan insns from the top of the loop looking for a qualified
1859 conditional exit. */
1860 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1861 insn = NEXT_INSN (insn))
1862 {
1863 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1864 break;
1865
1866 if (GET_CODE (insn) == NOTE
1867 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1868 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1869 break;
1870
1871 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1872 num_insns++;
1873
1874 if (last_test_insn && num_insns > 30)
1875 break;
1876
1877 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1878 && SET_DEST (PATTERN (insn)) == pc_rtx
1879 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1880 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1881 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1882 == loop_stack->data.loop.end_label))
1883 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1884 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1885 == loop_stack->data.loop.end_label))))
1886 last_test_insn = insn;
1887
1888 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1889 && GET_CODE (PATTERN (insn)) == SET
1890 && SET_DEST (PATTERN (insn)) == pc_rtx
1891 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1892 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1893 == loop_stack->data.loop.end_label))
1894 /* Include BARRIER. */
1895 last_test_insn = NEXT_INSN (insn);
1896 }
1897
1898 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1899 {
1900 /* We found one. Move everything from there up
1901 to the end of the loop, and add a jump into the loop
1902 to jump to there. */
1903 register rtx newstart_label = gen_label_rtx ();
1904 register rtx start_move = start_label;
1905
b4ac57ab 1906 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1907 then we want to move this note also. */
1908 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1909 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1910 == NOTE_INSN_LOOP_CONT))
1911 start_move = PREV_INSN (start_move);
1912
1913 emit_label_after (newstart_label, PREV_INSN (start_move));
1914 reorder_insns (start_move, last_test_insn, get_last_insn ());
1915 emit_jump_insn_after (gen_jump (start_label),
1916 PREV_INSN (newstart_label));
1917 emit_barrier_after (PREV_INSN (newstart_label));
1918 start_label = newstart_label;
1919 }
1920 }
1921
1922 emit_jump (start_label);
1923 emit_note (0, NOTE_INSN_LOOP_END);
1924 emit_label (loop_stack->data.loop.end_label);
1925
1926 POPSTACK (loop_stack);
1927
1928 last_expr_type = 0;
1929}
1930
1931/* Generate a jump to the current loop's continue-point.
1932 This is usually the top of the loop, but may be specified
1933 explicitly elsewhere. If not currently inside a loop,
1934 return 0 and do nothing; caller will print an error message. */
1935
1936int
1937expand_continue_loop (whichloop)
1938 struct nesting *whichloop;
1939{
1940 last_expr_type = 0;
1941 if (whichloop == 0)
1942 whichloop = loop_stack;
1943 if (whichloop == 0)
1944 return 0;
1945 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1946 return 1;
1947}
1948
1949/* Generate a jump to exit the current loop. If not currently inside a loop,
1950 return 0 and do nothing; caller will print an error message. */
1951
1952int
1953expand_exit_loop (whichloop)
1954 struct nesting *whichloop;
1955{
1956 last_expr_type = 0;
1957 if (whichloop == 0)
1958 whichloop = loop_stack;
1959 if (whichloop == 0)
1960 return 0;
1961 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1962 return 1;
1963}
1964
1965/* Generate a conditional jump to exit the current loop if COND
1966 evaluates to zero. If not currently inside a loop,
1967 return 0 and do nothing; caller will print an error message. */
1968
1969int
1970expand_exit_loop_if_false (whichloop, cond)
1971 struct nesting *whichloop;
1972 tree cond;
1973{
1974 last_expr_type = 0;
1975 if (whichloop == 0)
1976 whichloop = loop_stack;
1977 if (whichloop == 0)
1978 return 0;
1979 do_jump (cond, whichloop->data.loop.end_label, NULL);
1980 return 1;
1981}
1982
1983/* Return non-zero if we should preserve sub-expressions as separate
1984 pseudos. We never do so if we aren't optimizing. We always do so
1985 if -fexpensive-optimizations.
1986
1987 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
1988 the loop may still be a small one. */
1989
1990int
1991preserve_subexpressions_p ()
1992{
1993 rtx insn;
1994
1995 if (flag_expensive_optimizations)
1996 return 1;
1997
1998 if (optimize == 0 || loop_stack == 0)
1999 return 0;
2000
2001 insn = get_last_insn_anywhere ();
2002
2003 return (insn
2004 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2005 < n_non_fixed_regs * 3));
2006
2007}
2008
2009/* Generate a jump to exit the current loop, conditional, binding contour
2010 or case statement. Not all such constructs are visible to this function,
2011 only those started with EXIT_FLAG nonzero. Individual languages use
2012 the EXIT_FLAG parameter to control which kinds of constructs you can
2013 exit this way.
2014
2015 If not currently inside anything that can be exited,
2016 return 0 and do nothing; caller will print an error message. */
2017
2018int
2019expand_exit_something ()
2020{
2021 struct nesting *n;
2022 last_expr_type = 0;
2023 for (n = nesting_stack; n; n = n->all)
2024 if (n->exit_label != 0)
2025 {
2026 expand_goto_internal (0, n->exit_label, 0);
2027 return 1;
2028 }
2029
2030 return 0;
2031}
2032\f
2033/* Generate RTL to return from the current function, with no value.
2034 (That is, we do not do anything about returning any value.) */
2035
2036void
2037expand_null_return ()
2038{
2039 struct nesting *block = block_stack;
2040 rtx last_insn = 0;
2041
2042 /* Does any pending block have cleanups? */
2043
2044 while (block && block->data.block.cleanups == 0)
2045 block = block->next;
2046
2047 /* If yes, use a goto to return, since that runs cleanups. */
2048
2049 expand_null_return_1 (last_insn, block != 0);
2050}
2051
2052/* Generate RTL to return from the current function, with value VAL. */
2053
2054void
2055expand_value_return (val)
2056 rtx val;
2057{
2058 struct nesting *block = block_stack;
2059 rtx last_insn = get_last_insn ();
2060 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2061
2062 /* Copy the value to the return location
2063 unless it's already there. */
2064
2065 if (return_reg != val)
2066 emit_move_insn (return_reg, val);
2067 if (GET_CODE (return_reg) == REG
2068 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2069 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2070
2071 /* Does any pending block have cleanups? */
2072
2073 while (block && block->data.block.cleanups == 0)
2074 block = block->next;
2075
2076 /* If yes, use a goto to return, since that runs cleanups.
2077 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2078
2079 expand_null_return_1 (last_insn, block != 0);
2080}
2081
2082/* Output a return with no value. If LAST_INSN is nonzero,
2083 pretend that the return takes place after LAST_INSN.
2084 If USE_GOTO is nonzero then don't use a return instruction;
2085 go to the return label instead. This causes any cleanups
2086 of pending blocks to be executed normally. */
2087
2088static void
2089expand_null_return_1 (last_insn, use_goto)
2090 rtx last_insn;
2091 int use_goto;
2092{
2093 rtx end_label = cleanup_label ? cleanup_label : return_label;
2094
2095 clear_pending_stack_adjust ();
2096 do_pending_stack_adjust ();
2097 last_expr_type = 0;
2098
2099 /* PCC-struct return always uses an epilogue. */
2100 if (current_function_returns_pcc_struct || use_goto)
2101 {
2102 if (end_label == 0)
2103 end_label = return_label = gen_label_rtx ();
2104 expand_goto_internal (0, end_label, last_insn);
2105 return;
2106 }
2107
2108 /* Otherwise output a simple return-insn if one is available,
2109 unless it won't do the job. */
2110#ifdef HAVE_return
2111 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2112 {
2113 emit_jump_insn (gen_return ());
2114 emit_barrier ();
2115 return;
2116 }
2117#endif
2118
2119 /* Otherwise jump to the epilogue. */
2120 expand_goto_internal (0, end_label, last_insn);
2121}
2122\f
2123/* Generate RTL to evaluate the expression RETVAL and return it
2124 from the current function. */
2125
2126void
2127expand_return (retval)
2128 tree retval;
2129{
2130 /* If there are any cleanups to be performed, then they will
2131 be inserted following LAST_INSN. It is desirable
2132 that the last_insn, for such purposes, should be the
2133 last insn before computing the return value. Otherwise, cleanups
2134 which call functions can clobber the return value. */
2135 /* ??? rms: I think that is erroneous, because in C++ it would
2136 run destructors on variables that might be used in the subsequent
2137 computation of the return value. */
2138 rtx last_insn = 0;
2139 register rtx val = 0;
2140 register rtx op0;
2141 tree retval_rhs;
2142 int cleanups;
2143 struct nesting *block;
2144
2145 /* If function wants no value, give it none. */
2146 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2147 {
2148 expand_expr (retval, 0, VOIDmode, 0);
2149 expand_null_return ();
2150 return;
2151 }
2152
2153 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2154 cleanups = any_pending_cleanups (1);
2155
2156 if (TREE_CODE (retval) == RESULT_DECL)
2157 retval_rhs = retval;
2158 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2159 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2160 retval_rhs = TREE_OPERAND (retval, 1);
2161 else if (TREE_TYPE (retval) == void_type_node)
2162 /* Recognize tail-recursive call to void function. */
2163 retval_rhs = retval;
2164 else
2165 retval_rhs = NULL_TREE;
2166
2167 /* Only use `last_insn' if there are cleanups which must be run. */
2168 if (cleanups || cleanup_label != 0)
2169 last_insn = get_last_insn ();
2170
2171 /* Distribute return down conditional expr if either of the sides
2172 may involve tail recursion (see test below). This enhances the number
2173 of tail recursions we see. Don't do this always since it can produce
2174 sub-optimal code in some cases and we distribute assignments into
2175 conditional expressions when it would help. */
2176
2177 if (optimize && retval_rhs != 0
2178 && frame_offset == 0
2179 && TREE_CODE (retval_rhs) == COND_EXPR
2180 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2181 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2182 {
2183 rtx label = gen_label_rtx ();
2184 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2185 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2186 DECL_RESULT (current_function_decl),
2187 TREE_OPERAND (retval_rhs, 1)));
2188 emit_label (label);
2189 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2190 DECL_RESULT (current_function_decl),
2191 TREE_OPERAND (retval_rhs, 2)));
2192 return;
2193 }
2194
2195 /* For tail-recursive call to current function,
2196 just jump back to the beginning.
2197 It's unsafe if any auto variable in this function
2198 has its address taken; for simplicity,
2199 require stack frame to be empty. */
2200 if (optimize && retval_rhs != 0
2201 && frame_offset == 0
2202 && TREE_CODE (retval_rhs) == CALL_EXPR
2203 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2204 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2205 /* Finish checking validity, and if valid emit code
2206 to set the argument variables for the new call. */
2207 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2208 DECL_ARGUMENTS (current_function_decl)))
2209 {
2210 if (tail_recursion_label == 0)
2211 {
2212 tail_recursion_label = gen_label_rtx ();
2213 emit_label_after (tail_recursion_label,
2214 tail_recursion_reentry);
2215 }
2216 expand_goto_internal (0, tail_recursion_label, last_insn);
2217 emit_barrier ();
2218 return;
2219 }
2220#ifdef HAVE_return
2221 /* This optimization is safe if there are local cleanups
2222 because expand_null_return takes care of them.
2223 ??? I think it should also be safe when there is a cleanup label,
2224 because expand_null_return takes care of them, too.
2225 Any reason why not? */
2226 if (HAVE_return && cleanup_label == 0
2227 && ! current_function_returns_pcc_struct)
2228 {
2229 /* If this is return x == y; then generate
2230 if (x == y) return 1; else return 0;
2231 if we can do it with explicit return insns. */
2232 if (retval_rhs)
2233 switch (TREE_CODE (retval_rhs))
2234 {
2235 case EQ_EXPR:
2236 case NE_EXPR:
2237 case GT_EXPR:
2238 case GE_EXPR:
2239 case LT_EXPR:
2240 case LE_EXPR:
2241 case TRUTH_ANDIF_EXPR:
2242 case TRUTH_ORIF_EXPR:
2243 case TRUTH_AND_EXPR:
2244 case TRUTH_OR_EXPR:
2245 case TRUTH_NOT_EXPR:
2246 op0 = gen_label_rtx ();
2247 jumpifnot (retval_rhs, op0);
2248 expand_value_return (const1_rtx);
2249 emit_label (op0);
2250 expand_value_return (const0_rtx);
2251 return;
2252 }
2253 }
2254#endif /* HAVE_return */
2255
2256 if (cleanups
2257 && retval_rhs != 0
2258 && TREE_TYPE (retval_rhs) != void_type_node
2259 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2260 {
2261 /* Calculate the return value into a pseudo reg. */
2262 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2263 emit_queue ();
2264 /* All temporaries have now been used. */
2265 free_temp_slots ();
2266 /* Return the calculated value, doing cleanups first. */
2267 expand_value_return (val);
2268 }
2269 else
2270 {
2271 /* No cleanups or no hard reg used;
2272 calculate value into hard return reg. */
2273 expand_expr (retval, 0, VOIDmode, 0);
2274 emit_queue ();
2275 free_temp_slots ();
2276 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2277 }
2278}
2279
2280/* Return 1 if the end of the generated RTX is not a barrier.
2281 This means code already compiled can drop through. */
2282
2283int
2284drop_through_at_end_p ()
2285{
2286 rtx insn = get_last_insn ();
2287 while (insn && GET_CODE (insn) == NOTE)
2288 insn = PREV_INSN (insn);
2289 return insn && GET_CODE (insn) != BARRIER;
2290}
2291\f
2292/* Emit code to alter this function's formal parms for a tail-recursive call.
2293 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2294 FORMALS is the chain of decls of formals.
2295 Return 1 if this can be done;
2296 otherwise return 0 and do not emit any code. */
2297
2298static int
2299tail_recursion_args (actuals, formals)
2300 tree actuals, formals;
2301{
2302 register tree a = actuals, f = formals;
2303 register int i;
2304 register rtx *argvec;
2305
2306 /* Check that number and types of actuals are compatible
2307 with the formals. This is not always true in valid C code.
2308 Also check that no formal needs to be addressable
2309 and that all formals are scalars. */
2310
2311 /* Also count the args. */
2312
2313 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2314 {
2315 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2316 return 0;
2317 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2318 return 0;
2319 }
2320 if (a != 0 || f != 0)
2321 return 0;
2322
2323 /* Compute all the actuals. */
2324
2325 argvec = (rtx *) alloca (i * sizeof (rtx));
2326
2327 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2328 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2329
2330 /* Find which actual values refer to current values of previous formals.
2331 Copy each of them now, before any formal is changed. */
2332
2333 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2334 {
2335 int copy = 0;
2336 register int j;
2337 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2338 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2339 { copy = 1; break; }
2340 if (copy)
2341 argvec[i] = copy_to_reg (argvec[i]);
2342 }
2343
2344 /* Store the values of the actuals into the formals. */
2345
2346 for (f = formals, a = actuals, i = 0; f;
2347 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2348 {
2349 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2350 emit_move_insn (DECL_RTL (f), argvec[i]);
2351 else
2352 convert_move (DECL_RTL (f), argvec[i],
2353 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2354 }
2355
2356 free_temp_slots ();
2357 return 1;
2358}
2359\f
2360/* Generate the RTL code for entering a binding contour.
2361 The variables are declared one by one, by calls to `expand_decl'.
2362
2363 EXIT_FLAG is nonzero if this construct should be visible to
2364 `exit_something'. */
2365
2366void
2367expand_start_bindings (exit_flag)
2368 int exit_flag;
2369{
2370 struct nesting *thisblock = ALLOC_NESTING ();
2371
2372 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2373
2374 /* Make an entry on block_stack for the block we are entering. */
2375
2376 thisblock->next = block_stack;
2377 thisblock->all = nesting_stack;
2378 thisblock->depth = ++nesting_depth;
2379 thisblock->data.block.stack_level = 0;
2380 thisblock->data.block.cleanups = 0;
2381 thisblock->data.block.function_call_count = 0;
2382#if 0
2383 if (block_stack)
2384 {
2385 if (block_stack->data.block.cleanups == NULL_TREE
2386 && (block_stack->data.block.outer_cleanups == NULL_TREE
2387 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2388 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2389 else
2390 thisblock->data.block.outer_cleanups
2391 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2392 block_stack->data.block.outer_cleanups);
2393 }
2394 else
2395 thisblock->data.block.outer_cleanups = 0;
2396#endif
2397#if 1
2398 if (block_stack
2399 && !(block_stack->data.block.cleanups == NULL_TREE
2400 && block_stack->data.block.outer_cleanups == NULL_TREE))
2401 thisblock->data.block.outer_cleanups
2402 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2403 block_stack->data.block.outer_cleanups);
2404 else
2405 thisblock->data.block.outer_cleanups = 0;
2406#endif
2407 thisblock->data.block.label_chain = 0;
2408 thisblock->data.block.innermost_stack_block = stack_block_stack;
2409 thisblock->data.block.first_insn = note;
2410 thisblock->data.block.block_start_count = ++block_start_count;
2411 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2412 block_stack = thisblock;
2413 nesting_stack = thisblock;
2414
2415 /* Make a new level for allocating stack slots. */
2416 push_temp_slots ();
2417}
2418
2419/* Generate RTL code to terminate a binding contour.
2420 VARS is the chain of VAR_DECL nodes
2421 for the variables bound in this contour.
2422 MARK_ENDS is nonzero if we should put a note at the beginning
2423 and end of this binding contour.
2424
2425 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2426 (That is true automatically if the contour has a saved stack level.) */
2427
2428void
2429expand_end_bindings (vars, mark_ends, dont_jump_in)
2430 tree vars;
2431 int mark_ends;
2432 int dont_jump_in;
2433{
2434 register struct nesting *thisblock = block_stack;
2435 register tree decl;
2436
2437 if (warn_unused)
2438 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2439 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2440 warning_with_decl (decl, "unused variable `%s'");
2441
2442 /* Mark the beginning and end of the scope if requested. */
2443
2444 if (mark_ends)
2445 emit_note (0, NOTE_INSN_BLOCK_END);
2446 else
2447 /* Get rid of the beginning-mark if we don't make an end-mark. */
2448 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2449
2450 if (thisblock->exit_label)
2451 {
2452 do_pending_stack_adjust ();
2453 emit_label (thisblock->exit_label);
2454 }
2455
2456 /* If necessary, make a handler for nonlocal gotos taking
2457 place in the function calls in this block. */
2458 if (function_call_count != thisblock->data.block.function_call_count
2459 && nonlocal_labels
2460 /* Make handler for outermost block
2461 if there were any nonlocal gotos to this function. */
2462 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2463 /* Make handler for inner block if it has something
2464 special to do when you jump out of it. */
2465 : (thisblock->data.block.cleanups != 0
2466 || thisblock->data.block.stack_level != 0)))
2467 {
2468 tree link;
2469 rtx afterward = gen_label_rtx ();
2470 rtx handler_label = gen_label_rtx ();
2471 rtx save_receiver = gen_reg_rtx (Pmode);
2472
2473 /* Don't let jump_optimize delete the handler. */
2474 LABEL_PRESERVE_P (handler_label) = 1;
2475
2476 /* Record the handler address in the stack slot for that purpose,
2477 during this block, saving and restoring the outer value. */
2478 if (thisblock->next != 0)
2479 {
2480 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2481 emit_insn_before (gen_move_insn (save_receiver,
2482 nonlocal_goto_handler_slot),
2483 thisblock->data.block.first_insn);
2484 }
2485 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2486 gen_rtx (LABEL_REF, Pmode,
2487 handler_label)),
2488 thisblock->data.block.first_insn);
2489
2490 /* Jump around the handler; it runs only when specially invoked. */
2491 emit_jump (afterward);
2492 emit_label (handler_label);
2493
2494#ifdef HAVE_nonlocal_goto
2495 if (! HAVE_nonlocal_goto)
2496#endif
2497 /* First adjust our frame pointer to its actual value. It was
2498 previously set to the start of the virtual area corresponding to
2499 the stacked variables when we branched here and now needs to be
2500 adjusted to the actual hardware fp value.
2501
2502 Assignments are to virtual registers are converted by
2503 instantiate_virtual_regs into the corresponding assignment
2504 to the underlying register (fp in this case) that makes
2505 the original assignment true.
2506 So the following insn will actually be
2507 decrementing fp by STARTING_FRAME_OFFSET. */
2508 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2509
2510#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2511 if (fixed_regs[ARG_POINTER_REGNUM])
2512 {
2513 /* Now restore our arg pointer from the address at which it was saved
2514 in our stack frame.
2515 If there hasn't be space allocated for it yet, make some now. */
2516 if (arg_pointer_save_area == 0)
2517 arg_pointer_save_area
2518 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
b4ac57ab
RS
2519 emit_move_insn (virtual_incoming_args_rtx,
2520 /* We need a pseudo here,
2521 or else instantiate_virtual_regs_1 complains. */
2522 copy_to_reg (arg_pointer_save_area));
28d81abb
RK
2523 }
2524#endif
2525
2526 /* The handler expects the desired label address in the static chain
2527 register. It tests the address and does an appropriate jump
2528 to whatever label is desired. */
2529 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2530 /* Skip any labels we shouldn't be able to jump to from here. */
2531 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2532 {
2533 rtx not_this = gen_label_rtx ();
2534 rtx this = gen_label_rtx ();
2535 do_jump_if_equal (static_chain_rtx,
2536 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2537 this, 0);
2538 emit_jump (not_this);
2539 emit_label (this);
2540 expand_goto (TREE_VALUE (link));
2541 emit_label (not_this);
2542 }
2543 /* If label is not recognized, abort. */
2544 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2545 VOIDmode, 0);
2546 emit_label (afterward);
2547 }
2548
2549 /* Don't allow jumping into a block that has cleanups or a stack level. */
2550 if (dont_jump_in
2551 || thisblock->data.block.stack_level != 0
2552 || thisblock->data.block.cleanups != 0)
2553 {
2554 struct label_chain *chain;
2555
2556 /* Any labels in this block are no longer valid to go to.
2557 Mark them to cause an error message. */
2558 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2559 {
2560 DECL_TOO_LATE (chain->label) = 1;
2561 /* If any goto without a fixup came to this label,
2562 that must be an error, because gotos without fixups
2563 come from outside all saved stack-levels and all cleanups. */
2564 if (TREE_ADDRESSABLE (chain->label))
2565 error_with_decl (chain->label,
2566 "label `%s' used before containing binding contour");
2567 }
2568 }
2569
2570 /* Restore stack level in effect before the block
2571 (only if variable-size objects allocated). */
2572 /* Perform any cleanups associated with the block. */
2573
2574 if (thisblock->data.block.stack_level != 0
2575 || thisblock->data.block.cleanups != 0)
2576 {
2577 /* Don't let cleanups affect ({...}) constructs. */
2578 int old_expr_stmts_for_value = expr_stmts_for_value;
2579 rtx old_last_expr_value = last_expr_value;
2580 tree old_last_expr_type = last_expr_type;
2581 expr_stmts_for_value = 0;
2582
2583 /* Do the cleanups. */
2584 expand_cleanups (thisblock->data.block.cleanups, 0);
2585 do_pending_stack_adjust ();
2586
2587 expr_stmts_for_value = old_expr_stmts_for_value;
2588 last_expr_value = old_last_expr_value;
2589 last_expr_type = old_last_expr_type;
2590
2591 /* Restore the stack level. */
2592
2593 if (thisblock->data.block.stack_level != 0)
2594 {
2595 emit_move_insn (stack_pointer_rtx,
2596 thisblock->data.block.stack_level);
2597 if (nonlocal_goto_stack_level != 0)
2598 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2599 }
2600
2601 /* Any gotos out of this block must also do these things.
2602 Also report any gotos with fixups that came to labels in this level. */
2603 fixup_gotos (thisblock,
2604 thisblock->data.block.stack_level,
2605 thisblock->data.block.cleanups,
2606 thisblock->data.block.first_insn,
2607 dont_jump_in);
2608 }
2609
2610 /* If doing stupid register allocation, make sure lives of all
2611 register variables declared here extend thru end of scope. */
2612
2613 if (obey_regdecls)
2614 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2615 {
2616 rtx rtl = DECL_RTL (decl);
2617 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2618 use_variable (rtl);
2619 }
2620
2621 /* Restore block_stack level for containing block. */
2622
2623 stack_block_stack = thisblock->data.block.innermost_stack_block;
2624 POPSTACK (block_stack);
2625
2626 /* Pop the stack slot nesting and free any slots at this level. */
2627 pop_temp_slots ();
2628}
2629\f
2630/* Generate RTL for the automatic variable declaration DECL.
2631 (Other kinds of declarations are simply ignored if seen here.)
2632 CLEANUP is an expression to be executed at exit from this binding contour;
2633 for example, in C++, it might call the destructor for this variable.
2634
2635 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2636 either before or after calling `expand_decl' but before compiling
2637 any subsequent expressions. This is because CLEANUP may be expanded
2638 more than once, on different branches of execution.
2639 For the same reason, CLEANUP may not contain a CALL_EXPR
2640 except as its topmost node--else `preexpand_calls' would get confused.
2641
2642 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2643 that is not associated with any particular variable.
2644
2645 There is no special support here for C++ constructors.
2646 They should be handled by the proper code in DECL_INITIAL. */
2647
2648void
2649expand_decl (decl)
2650 register tree decl;
2651{
2652 struct nesting *thisblock = block_stack;
2653 tree type = TREE_TYPE (decl);
2654
2655 /* Only automatic variables need any expansion done.
2656 Static and external variables, and external functions,
2657 will be handled by `assemble_variable' (called from finish_decl).
2658 TYPE_DECL and CONST_DECL require nothing.
2659 PARM_DECLs are handled in `assign_parms'. */
2660
2661 if (TREE_CODE (decl) != VAR_DECL)
2662 return;
2663 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2664 return;
2665
2666 /* Create the RTL representation for the variable. */
2667
2668 if (type == error_mark_node)
2669 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2670 else if (DECL_SIZE (decl) == 0)
2671 /* Variable with incomplete type. */
2672 {
2673 if (DECL_INITIAL (decl) == 0)
2674 /* Error message was already done; now avoid a crash. */
2675 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2676 else
2677 /* An initializer is going to decide the size of this array.
2678 Until we know the size, represent its address with a reg. */
2679 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2680 }
2681 else if (DECL_MODE (decl) != BLKmode
2682 /* If -ffloat-store, don't put explicit float vars
2683 into regs. */
2684 && !(flag_float_store
2685 && TREE_CODE (type) == REAL_TYPE)
2686 && ! TREE_THIS_VOLATILE (decl)
2687 && ! TREE_ADDRESSABLE (decl)
2688 && (TREE_REGDECL (decl) || ! obey_regdecls))
2689 {
2690 /* Automatic variable that can go in a register. */
2691 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2692 if (TREE_CODE (type) == POINTER_TYPE)
2693 mark_reg_pointer (DECL_RTL (decl));
2694 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2695 }
2696 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2697 {
2698 /* Variable of fixed size that goes on the stack. */
2699 rtx oldaddr = 0;
2700 rtx addr;
2701
2702 /* If we previously made RTL for this decl, it must be an array
2703 whose size was determined by the initializer.
2704 The old address was a register; set that register now
2705 to the proper address. */
2706 if (DECL_RTL (decl) != 0)
2707 {
2708 if (GET_CODE (DECL_RTL (decl)) != MEM
2709 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2710 abort ();
2711 oldaddr = XEXP (DECL_RTL (decl), 0);
2712 }
2713
2714 DECL_RTL (decl)
2715 = assign_stack_temp (DECL_MODE (decl),
2716 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2717 + BITS_PER_UNIT - 1)
2718 / BITS_PER_UNIT),
2719 1);
2720
2721 /* Set alignment we actually gave this decl. */
2722 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2723 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2724
2725 if (oldaddr)
2726 {
2727 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2728 if (addr != oldaddr)
2729 emit_move_insn (oldaddr, addr);
2730 }
2731
2732 /* If this is a memory ref that contains aggregate components,
2733 mark it as such for cse and loop optimize. */
2734 MEM_IN_STRUCT_P (DECL_RTL (decl))
2735 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2736 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2737 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2738#if 0
2739 /* If this is in memory because of -ffloat-store,
2740 set the volatile bit, to prevent optimizations from
2741 undoing the effects. */
2742 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2743 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2744#endif
2745 }
2746 else
2747 /* Dynamic-size object: must push space on the stack. */
2748 {
2749 rtx address, size;
2750
2751 /* Record the stack pointer on entry to block, if have
2752 not already done so. */
2753 if (thisblock->data.block.stack_level == 0)
2754 {
2755 do_pending_stack_adjust ();
2756 thisblock->data.block.stack_level
2757 = copy_to_reg (stack_pointer_rtx);
2758 stack_block_stack = thisblock;
2759 }
2760
2761 /* Compute the variable's size, in bytes. */
2762 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2763 DECL_SIZE (decl),
2764 size_int (BITS_PER_UNIT)),
2765 0, VOIDmode, 0);
2766 free_temp_slots ();
2767
2768 /* Allocate space on the stack for the variable. */
5130a5cc 2769 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
28d81abb
RK
2770
2771 if (nonlocal_goto_stack_level != 0)
2772 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2773
2774 /* Reference the variable indirect through that rtx. */
2775 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2776
2777 /* Indicate the alignment we actually gave this variable. */
2778#ifdef STACK_BOUNDARY
2779 DECL_ALIGN (decl) = STACK_BOUNDARY;
2780#else
2781 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2782#endif
2783 }
2784
2785 if (TREE_THIS_VOLATILE (decl))
2786 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2787 if (TREE_READONLY (decl))
2788 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2789
2790 /* If doing stupid register allocation, make sure life of any
2791 register variable starts here, at the start of its scope. */
2792
2793 if (obey_regdecls)
2794 use_variable (DECL_RTL (decl));
2795}
2796\f
2797/* Emit code to perform the initialization of a declaration DECL. */
2798
2799void
2800expand_decl_init (decl)
2801 tree decl;
2802{
b4ac57ab
RS
2803 int was_used = TREE_USED (decl);
2804
28d81abb
RK
2805 if (TREE_STATIC (decl))
2806 return;
2807
2808 /* Compute and store the initial value now. */
2809
2810 if (DECL_INITIAL (decl) == error_mark_node)
2811 {
2812 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2813 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2814 || code == POINTER_TYPE)
2815 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2816 0, 0);
2817 emit_queue ();
2818 }
2819 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2820 {
2821 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2822 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2823 emit_queue ();
2824 }
2825
b4ac57ab
RS
2826 /* Don't let the initialization count as "using" the variable. */
2827 TREE_USED (decl) = was_used;
2828
28d81abb
RK
2829 /* Free any temporaries we made while initializing the decl. */
2830 free_temp_slots ();
2831}
2832
2833/* CLEANUP is an expression to be executed at exit from this binding contour;
2834 for example, in C++, it might call the destructor for this variable.
2835
2836 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2837 either before or after calling `expand_decl' but before compiling
2838 any subsequent expressions. This is because CLEANUP may be expanded
2839 more than once, on different branches of execution.
2840 For the same reason, CLEANUP may not contain a CALL_EXPR
2841 except as its topmost node--else `preexpand_calls' would get confused.
2842
2843 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2844 that is not associated with any particular variable. */
2845
2846int
2847expand_decl_cleanup (decl, cleanup)
2848 tree decl, cleanup;
2849{
2850 struct nesting *thisblock = block_stack;
2851
2852 /* Error if we are not in any block. */
2853 if (thisblock == 0)
2854 return 0;
2855
2856 /* Record the cleanup if there is one. */
2857
2858 if (cleanup != 0)
2859 {
2860 thisblock->data.block.cleanups
2861 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2862 /* If this block has a cleanup, it belongs in stack_block_stack. */
2863 stack_block_stack = thisblock;
2864 }
2865 return 1;
2866}
2867\f
2868/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2869 DECL_ELTS is the list of elements that belong to DECL's type.
2870 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2871
2872void
2873expand_anon_union_decl (decl, cleanup, decl_elts)
2874 tree decl, cleanup, decl_elts;
2875{
2876 struct nesting *thisblock = block_stack;
2877 rtx x;
2878
2879 expand_decl (decl, cleanup);
2880 x = DECL_RTL (decl);
2881
2882 while (decl_elts)
2883 {
2884 tree decl_elt = TREE_VALUE (decl_elts);
2885 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2887
2888 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2889 instead create a new MEM rtx with the proper mode. */
2890 if (GET_CODE (x) == MEM)
2891 {
2892 if (mode == GET_MODE (x))
2893 DECL_RTL (decl_elt) = x;
2894 else
2895 {
2896 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2897 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2898 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2899 }
2900 }
2901 else if (GET_CODE (x) == REG)
2902 {
2903 if (mode == GET_MODE (x))
2904 DECL_RTL (decl_elt) = x;
2905 else
2906 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2907 }
2908 else
2909 abort ();
2910
2911 /* Record the cleanup if there is one. */
2912
2913 if (cleanup != 0)
2914 thisblock->data.block.cleanups
2915 = temp_tree_cons (decl_elt, cleanup_elt,
2916 thisblock->data.block.cleanups);
2917
2918 decl_elts = TREE_CHAIN (decl_elts);
2919 }
2920}
2921\f
2922/* Expand a list of cleanups LIST.
2923 Elements may be expressions or may be nested lists.
2924
2925 If DONT_DO is nonnull, then any list-element
2926 whose TREE_PURPOSE matches DONT_DO is omitted.
2927 This is sometimes used to avoid a cleanup associated with
2928 a value that is being returned out of the scope. */
2929
2930static void
2931expand_cleanups (list, dont_do)
2932 tree list;
2933 tree dont_do;
2934{
2935 tree tail;
2936 for (tail = list; tail; tail = TREE_CHAIN (tail))
2937 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2938 {
2939 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2940 expand_cleanups (TREE_VALUE (tail), dont_do);
2941 else
2942 {
2943 /* Cleanups may be run multiple times. For example,
2944 when exiting a binding contour, we expand the
2945 cleanups associated with that contour. When a goto
2946 within that binding contour has a target outside that
2947 contour, it will expand all cleanups from its scope to
2948 the target. Though the cleanups are expanded multiple
2949 times, the control paths are non-overlapping so the
2950 cleanups will not be executed twice. */
2951 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2952 free_temp_slots ();
2953 }
2954 }
2955}
2956
2957/* Expand a list of cleanups for a goto fixup.
2958 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2959 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2960
2961static void
2962fixup_cleanups (list, before_jump)
2963 tree list;
2964 rtx *before_jump;
2965{
2966 rtx beyond_jump = get_last_insn ();
2967 rtx new_before_jump;
2968
2969 expand_cleanups (list, 0);
2970 /* Pop any pushes done in the cleanups,
2971 in case function is about to return. */
2972 do_pending_stack_adjust ();
2973
2974 new_before_jump = get_last_insn ();
2975
2976 if (beyond_jump != new_before_jump)
2977 {
2978 /* If cleanups expand to nothing, don't reorder. */
2979 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
2980 *before_jump = new_before_jump;
2981 }
2982}
2983
2984/* Move all cleanups from the current block_stack
2985 to the containing block_stack, where they are assumed to
2986 have been created. If anything can cause a temporary to
2987 be created, but not expanded for more than one level of
2988 block_stacks, then this code will have to change. */
2989
2990void
2991move_cleanups_up ()
2992{
2993 struct nesting *block = block_stack;
2994 struct nesting *outer = block->next;
2995
2996 outer->data.block.cleanups
2997 = chainon (block->data.block.cleanups,
2998 outer->data.block.cleanups);
2999 block->data.block.cleanups = 0;
3000}
3001
3002tree
3003last_cleanup_this_contour ()
3004{
3005 if (block_stack == 0)
3006 return 0;
3007
3008 return block_stack->data.block.cleanups;
3009}
3010
3011/* Return 1 if there are any pending cleanups at this point.
3012 If THIS_CONTOUR is nonzero, check the current contour as well.
3013 Otherwise, look only at the contours that enclose this one. */
3014
3015int
3016any_pending_cleanups (this_contour)
3017 int this_contour;
3018{
3019 struct nesting *block;
3020
3021 if (block_stack == 0)
3022 return 0;
3023
3024 if (this_contour && block_stack->data.block.cleanups != NULL)
3025 return 1;
3026 if (block_stack->data.block.cleanups == 0
3027 && (block_stack->data.block.outer_cleanups == 0
3028#if 0
3029 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3030#endif
3031 ))
3032 return 0;
3033
3034 for (block = block_stack->next; block; block = block->next)
3035 if (block->data.block.cleanups != 0)
3036 return 1;
3037
3038 return 0;
3039}
3040\f
3041/* Enter a case (Pascal) or switch (C) statement.
3042 Push a block onto case_stack and nesting_stack
3043 to accumulate the case-labels that are seen
3044 and to record the labels generated for the statement.
3045
3046 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3047 Otherwise, this construct is transparent for `exit_something'.
3048
3049 EXPR is the index-expression to be dispatched on.
3050 TYPE is its nominal type. We could simply convert EXPR to this type,
3051 but instead we take short cuts. */
3052
3053void
3054expand_start_case (exit_flag, expr, type, printname)
3055 int exit_flag;
3056 tree expr;
3057 tree type;
3058 char *printname;
3059{
3060 register struct nesting *thiscase = ALLOC_NESTING ();
3061
3062 /* Make an entry on case_stack for the case we are entering. */
3063
3064 thiscase->next = case_stack;
3065 thiscase->all = nesting_stack;
3066 thiscase->depth = ++nesting_depth;
3067 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3068 thiscase->data.case_stmt.case_list = 0;
3069 thiscase->data.case_stmt.index_expr = expr;
3070 thiscase->data.case_stmt.nominal_type = type;
3071 thiscase->data.case_stmt.default_label = 0;
3072 thiscase->data.case_stmt.num_ranges = 0;
3073 thiscase->data.case_stmt.printname = printname;
3074 thiscase->data.case_stmt.seenlabel = 0;
3075 case_stack = thiscase;
3076 nesting_stack = thiscase;
3077
3078 do_pending_stack_adjust ();
3079
3080 /* Make sure case_stmt.start points to something that won't
3081 need any transformation before expand_end_case. */
3082 if (GET_CODE (get_last_insn ()) != NOTE)
3083 emit_note (0, NOTE_INSN_DELETED);
3084
3085 thiscase->data.case_stmt.start = get_last_insn ();
3086}
3087
3088/* Start a "dummy case statement" within which case labels are invalid
3089 and are not connected to any larger real case statement.
3090 This can be used if you don't want to let a case statement jump
3091 into the middle of certain kinds of constructs. */
3092
3093void
3094expand_start_case_dummy ()
3095{
3096 register struct nesting *thiscase = ALLOC_NESTING ();
3097
3098 /* Make an entry on case_stack for the dummy. */
3099
3100 thiscase->next = case_stack;
3101 thiscase->all = nesting_stack;
3102 thiscase->depth = ++nesting_depth;
3103 thiscase->exit_label = 0;
3104 thiscase->data.case_stmt.case_list = 0;
3105 thiscase->data.case_stmt.start = 0;
3106 thiscase->data.case_stmt.nominal_type = 0;
3107 thiscase->data.case_stmt.default_label = 0;
3108 thiscase->data.case_stmt.num_ranges = 0;
3109 case_stack = thiscase;
3110 nesting_stack = thiscase;
3111}
3112
3113/* End a dummy case statement. */
3114
3115void
3116expand_end_case_dummy ()
3117{
3118 POPSTACK (case_stack);
3119}
3120
3121/* Return the data type of the index-expression
3122 of the innermost case statement, or null if none. */
3123
3124tree
3125case_index_expr_type ()
3126{
3127 if (case_stack)
3128 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3129 return 0;
3130}
3131\f
3132/* Accumulate one case or default label inside a case or switch statement.
3133 VALUE is the value of the case (a null pointer, for a default label).
3134
3135 If not currently inside a case or switch statement, return 1 and do
3136 nothing. The caller will print a language-specific error message.
3137 If VALUE is a duplicate or overlaps, return 2 and do nothing
3138 except store the (first) duplicate node in *DUPLICATE.
3139 If VALUE is out of range, return 3 and do nothing.
3140 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3141 Return 0 on success.
3142
3143 Extended to handle range statements. */
3144
3145int
3146pushcase (value, label, duplicate)
3147 register tree value;
3148 register tree label;
3149 tree *duplicate;
3150{
3151 register struct case_node **l;
3152 register struct case_node *n;
3153 tree index_type;
3154 tree nominal_type;
3155
3156 /* Fail if not inside a real case statement. */
3157 if (! (case_stack && case_stack->data.case_stmt.start))
3158 return 1;
3159
3160 if (stack_block_stack
3161 && stack_block_stack->depth > case_stack->depth)
3162 return 5;
3163
3164 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3165 nominal_type = case_stack->data.case_stmt.nominal_type;
3166
3167 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3168 if (index_type == error_mark_node)
3169 return 0;
3170
3171 /* Convert VALUE to the type in which the comparisons are nominally done. */
3172 if (value != 0)
3173 value = convert (nominal_type, value);
3174
3175 /* If this is the first label, warn if any insns have been emitted. */
3176 if (case_stack->data.case_stmt.seenlabel == 0)
3177 {
3178 rtx insn;
3179 for (insn = case_stack->data.case_stmt.start;
3180 insn;
3181 insn = NEXT_INSN (insn))
3182 {
3183 if (GET_CODE (insn) == CODE_LABEL)
3184 break;
3185 if (GET_CODE (insn) != NOTE
3186 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3187 {
3188 warning ("unreachable code at beginning of %s",
3189 case_stack->data.case_stmt.printname);
3190 break;
3191 }
3192 }
3193 }
3194 case_stack->data.case_stmt.seenlabel = 1;
3195
3196 /* Fail if this value is out of range for the actual type of the index
3197 (which may be narrower than NOMINAL_TYPE). */
3198 if (value != 0 && ! int_fits_type_p (value, index_type))
3199 return 3;
3200
3201 /* Fail if this is a duplicate or overlaps another entry. */
3202 if (value == 0)
3203 {
3204 if (case_stack->data.case_stmt.default_label != 0)
3205 {
3206 *duplicate = case_stack->data.case_stmt.default_label;
3207 return 2;
3208 }
3209 case_stack->data.case_stmt.default_label = label;
3210 }
3211 else
3212 {
3213 /* Find the elt in the chain before which to insert the new value,
3214 to keep the chain sorted in increasing order.
3215 But report an error if this element is a duplicate. */
3216 for (l = &case_stack->data.case_stmt.case_list;
3217 /* Keep going past elements distinctly less than VALUE. */
3218 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3219 l = &(*l)->right)
3220 ;
3221 if (*l)
3222 {
3223 /* Element we will insert before must be distinctly greater;
3224 overlap means error. */
3225 if (! tree_int_cst_lt (value, (*l)->low))
3226 {
3227 *duplicate = (*l)->code_label;
3228 return 2;
3229 }
3230 }
3231
3232 /* Add this label to the chain, and succeed.
3233 Copy VALUE so it is on temporary rather than momentary
3234 obstack and will thus survive till the end of the case statement. */
3235 n = (struct case_node *) oballoc (sizeof (struct case_node));
3236 n->left = 0;
3237 n->right = *l;
3238 n->high = n->low = copy_node (value);
3239 n->code_label = label;
3240 *l = n;
3241 }
3242
3243 expand_label (label);
3244 return 0;
3245}
3246
3247/* Like pushcase but this case applies to all values
3248 between VALUE1 and VALUE2 (inclusive).
3249 The return value is the same as that of pushcase
3250 but there is one additional error code:
3251 4 means the specified range was empty. */
3252
3253int
3254pushcase_range (value1, value2, label, duplicate)
3255 register tree value1, value2;
3256 register tree label;
3257 tree *duplicate;
3258{
3259 register struct case_node **l;
3260 register struct case_node *n;
3261 tree index_type;
3262 tree nominal_type;
3263
3264 /* Fail if not inside a real case statement. */
3265 if (! (case_stack && case_stack->data.case_stmt.start))
3266 return 1;
3267
3268 if (stack_block_stack
3269 && stack_block_stack->depth > case_stack->depth)
3270 return 5;
3271
3272 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3273 nominal_type = case_stack->data.case_stmt.nominal_type;
3274
3275 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3276 if (index_type == error_mark_node)
3277 return 0;
3278
3279 /* If this is the first label, warn if any insns have been emitted. */
3280 if (case_stack->data.case_stmt.seenlabel == 0)
3281 {
3282 rtx insn;
3283 for (insn = case_stack->data.case_stmt.start;
3284 insn;
3285 insn = NEXT_INSN (insn))
3286 {
3287 if (GET_CODE (insn) == CODE_LABEL)
3288 break;
3289 if (GET_CODE (insn) != NOTE
3290 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3291 {
3292 warning ("unreachable code at beginning of %s",
3293 case_stack->data.case_stmt.printname);
3294 break;
3295 }
3296 }
3297 }
3298 case_stack->data.case_stmt.seenlabel = 1;
3299
3300 /* Convert VALUEs to type in which the comparisons are nominally done. */
3301 if (value1 == 0) /* Negative infinity. */
3302 value1 = TYPE_MIN_VALUE(index_type);
3303 value1 = convert (nominal_type, value1);
3304
3305 if (value2 == 0) /* Positive infinity. */
3306 value2 = TYPE_MAX_VALUE(index_type);
3307 value2 = convert (nominal_type, value2);
3308
3309 /* Fail if these values are out of range. */
3310 if (! int_fits_type_p (value1, index_type))
3311 return 3;
3312
3313 if (! int_fits_type_p (value2, index_type))
3314 return 3;
3315
3316 /* Fail if the range is empty. */
3317 if (tree_int_cst_lt (value2, value1))
3318 return 4;
3319
3320 /* If the bounds are equal, turn this into the one-value case. */
3321 if (tree_int_cst_equal (value1, value2))
3322 return pushcase (value1, label, duplicate);
3323
3324 /* Find the elt in the chain before which to insert the new value,
3325 to keep the chain sorted in increasing order.
3326 But report an error if this element is a duplicate. */
3327 for (l = &case_stack->data.case_stmt.case_list;
3328 /* Keep going past elements distinctly less than this range. */
3329 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3330 l = &(*l)->right)
3331 ;
3332 if (*l)
3333 {
3334 /* Element we will insert before must be distinctly greater;
3335 overlap means error. */
3336 if (! tree_int_cst_lt (value2, (*l)->low))
3337 {
3338 *duplicate = (*l)->code_label;
3339 return 2;
3340 }
3341 }
3342
3343 /* Add this label to the chain, and succeed.
3344 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3345 obstack and will thus survive till the end of the case statement. */
3346
3347 n = (struct case_node *) oballoc (sizeof (struct case_node));
3348 n->left = 0;
3349 n->right = *l;
3350 n->low = copy_node (value1);
3351 n->high = copy_node (value2);
3352 n->code_label = label;
3353 *l = n;
3354
3355 expand_label (label);
3356
3357 case_stack->data.case_stmt.num_ranges++;
3358
3359 return 0;
3360}
3361\f
3362/* Called when the index of a switch statement is an enumerated type
3363 and there is no default label.
3364
3365 Checks that all enumeration literals are covered by the case
3366 expressions of a switch. Also, warn if there are any extra
3367 switch cases that are *not* elements of the enumerated type.
3368
3369 If all enumeration literals were covered by the case expressions,
3370 turn one of the expressions into the default expression since it should
3371 not be possible to fall through such a switch. */
3372
3373void
3374check_for_full_enumeration_handling (type)
3375 tree type;
3376{
3377 register struct case_node *n;
3378 register struct case_node **l;
3379 register tree chain;
3380 int all_values = 1;
3381
3382 /* The time complexity of this loop is currently O(N * M), with
3383 N being the number of enumerals in the enumerated type, and
3384 M being the number of case expressions in the switch. */
3385
3386 for (chain = TYPE_VALUES (type);
3387 chain;
3388 chain = TREE_CHAIN (chain))
3389 {
3390 /* Find a match between enumeral and case expression, if possible.
3391 Quit looking when we've gone too far (since case expressions
3392 are kept sorted in ascending order). Warn about enumerals not
3393 handled in the switch statement case expression list. */
3394
3395 for (n = case_stack->data.case_stmt.case_list;
3396 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3397 n = n->right)
3398 ;
3399
3400 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3401 {
3402 if (warn_switch)
3403 warning ("enumerated value `%s' not handled in switch",
3404 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3405 all_values = 0;
3406 }
3407 }
3408
3409 /* Now we go the other way around; we warn if there are case
3410 expressions that don't correspond to enumerals. This can
3411 occur since C and C++ don't enforce type-checking of
3412 assignments to enumeration variables. */
3413
3414 if (warn_switch)
3415 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3416 {
3417 for (chain = TYPE_VALUES (type);
3418 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3419 chain = TREE_CHAIN (chain))
3420 ;
3421
3422 if (!chain)
3423 warning ("case value `%d' not in enumerated type `%s'",
3424 TREE_INT_CST_LOW (n->low),
3425 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3426 == IDENTIFIER_NODE)
3427 ? TYPE_NAME (type)
3428 : DECL_NAME (TYPE_NAME (type))));
3429 }
3430
3431 /* If all values were found as case labels, make one of them the default
3432 label. Thus, this switch will never fall through. We arbitrarily pick
3433 the last one to make the default since this is likely the most
3434 efficient choice. */
3435
3436 if (all_values)
3437 {
3438 for (l = &case_stack->data.case_stmt.case_list;
3439 (*l)->right != 0;
3440 l = &(*l)->right)
3441 ;
3442
3443 case_stack->data.case_stmt.default_label = (*l)->code_label;
3444 *l = 0;
3445 }
3446}
3447\f
3448/* Terminate a case (Pascal) or switch (C) statement
3449 in which CASE_INDEX is the expression to be tested.
3450 Generate the code to test it and jump to the right place. */
3451
3452void
3453expand_end_case (orig_index)
3454 tree orig_index;
3455{
3456 tree minval, maxval, range;
3457 rtx default_label = 0;
3458 register struct case_node *n;
3459 int count;
3460 rtx index;
3461 rtx table_label = gen_label_rtx ();
3462 int ncases;
3463 rtx *labelvec;
3464 register int i;
3465 rtx before_case;
3466 register struct nesting *thiscase = case_stack;
3467 tree index_expr = thiscase->data.case_stmt.index_expr;
3468 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3469
3470 do_pending_stack_adjust ();
3471
3472 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3473 if (TREE_TYPE (index_expr) != error_mark_node)
3474 {
3475 /* If switch expression was an enumerated type, check that all
3476 enumeration literals are covered by the cases.
3477 No sense trying this if there's a default case, however. */
3478
3479 if (!thiscase->data.case_stmt.default_label
3480 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3481 && TREE_CODE (index_expr) != INTEGER_CST)
3482 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3483
3484 /* If this is the first label, warn if any insns have been emitted. */
3485 if (thiscase->data.case_stmt.seenlabel == 0)
3486 {
3487 rtx insn;
3488 for (insn = get_last_insn ();
3489 insn != case_stack->data.case_stmt.start;
3490 insn = PREV_INSN (insn))
3491 if (GET_CODE (insn) != NOTE
3492 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3493 {
3494 warning ("unreachable code at beginning of %s",
3495 case_stack->data.case_stmt.printname);
3496 break;
3497 }
3498 }
3499
3500 /* If we don't have a default-label, create one here,
3501 after the body of the switch. */
3502 if (thiscase->data.case_stmt.default_label == 0)
3503 {
3504 thiscase->data.case_stmt.default_label
3505 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3506 expand_label (thiscase->data.case_stmt.default_label);
3507 }
3508 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3509
3510 before_case = get_last_insn ();
3511
3512 /* Simplify the case-list before we count it. */
3513 group_case_nodes (thiscase->data.case_stmt.case_list);
3514
3515 /* Get upper and lower bounds of case values.
3516 Also convert all the case values to the index expr's data type. */
3517
3518 count = 0;
3519 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3520 {
3521 /* Check low and high label values are integers. */
3522 if (TREE_CODE (n->low) != INTEGER_CST)
3523 abort ();
3524 if (TREE_CODE (n->high) != INTEGER_CST)
3525 abort ();
3526
3527 n->low = convert (TREE_TYPE (index_expr), n->low);
3528 n->high = convert (TREE_TYPE (index_expr), n->high);
3529
3530 /* Count the elements and track the largest and smallest
3531 of them (treating them as signed even if they are not). */
3532 if (count++ == 0)
3533 {
3534 minval = n->low;
3535 maxval = n->high;
3536 }
3537 else
3538 {
3539 if (INT_CST_LT (n->low, minval))
3540 minval = n->low;
3541 if (INT_CST_LT (maxval, n->high))
3542 maxval = n->high;
3543 }
3544 /* A range counts double, since it requires two compares. */
3545 if (! tree_int_cst_equal (n->low, n->high))
3546 count++;
3547 }
3548
3549 /* Compute span of values. */
3550 if (count != 0)
3551 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3552 maxval, minval));
3553
3554 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3555 {
3556 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3557 emit_queue ();
3558 emit_jump (default_label);
3559 }
3560 /* If range of values is much bigger than number of values,
3561 make a sequence of conditional branches instead of a dispatch.
3562 If the switch-index is a constant, do it this way
3563 because we can optimize it. */
3564 else if (TREE_INT_CST_HIGH (range) != 0
3565#ifdef HAVE_casesi
3566 || (HAVE_casesi ? count < 4 : count < 5)
3567#else
3568 /* If machine does not have a case insn that compares the
3569 bounds, this means extra overhead for dispatch tables
3570 which raises the threshold for using them. */
3571 || count < 5
3572#endif
3573 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3574 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3575 /* These will reduce to a constant. */
28d81abb 3576 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3577 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3578 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3579 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3580 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3581 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb
RK
3582 {
3583 index = expand_expr (index_expr, 0, VOIDmode, 0);
3584
3585 /* If the index is a short or char that we do not have
3586 an insn to handle comparisons directly, convert it to
3587 a full integer now, rather than letting each comparison
3588 generate the conversion. */
3589
3590 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3591 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3592 == CODE_FOR_nothing))
3593 {
3594 enum machine_mode wider_mode;
3595 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3596 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3597 if (cmp_optab->handlers[(int) wider_mode].insn_code
3598 != CODE_FOR_nothing)
3599 {
3600 index = convert_to_mode (wider_mode, index, unsignedp);
3601 break;
3602 }
3603 }
3604
3605 emit_queue ();
3606 do_pending_stack_adjust ();
3607
3608 index = protect_from_queue (index, 0);
3609 if (GET_CODE (index) == MEM)
3610 index = copy_to_reg (index);
3611 if (GET_CODE (index) == CONST_INT
3612 || TREE_CODE (index_expr) == INTEGER_CST)
3613 {
3614 /* Make a tree node with the proper constant value
3615 if we don't already have one. */
3616 if (TREE_CODE (index_expr) != INTEGER_CST)
3617 {
3618 index_expr
3619 = build_int_2 (INTVAL (index),
3620 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3621 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3622 }
3623
3624 /* For constant index expressions we need only
3625 issue a unconditional branch to the appropriate
3626 target code. The job of removing any unreachable
3627 code is left to the optimisation phase if the
3628 "-O" option is specified. */
3629 for (n = thiscase->data.case_stmt.case_list;
3630 n;
3631 n = n->right)
3632 {
3633 if (! tree_int_cst_lt (index_expr, n->low)
3634 && ! tree_int_cst_lt (n->high, index_expr))
3635 break;
3636 }
3637 if (n)
3638 emit_jump (label_rtx (n->code_label));
3639 else
3640 emit_jump (default_label);
3641 }
3642 else
3643 {
3644 /* If the index expression is not constant we generate
3645 a binary decision tree to select the appropriate
3646 target code. This is done as follows:
3647
3648 The list of cases is rearranged into a binary tree,
3649 nearly optimal assuming equal probability for each case.
3650
3651 The tree is transformed into RTL, eliminating
3652 redundant test conditions at the same time.
3653
3654 If program flow could reach the end of the
3655 decision tree an unconditional jump to the
3656 default code is emitted. */
3657
3658 use_cost_table
3659 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3660 && default_label != 0
3661 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3662 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3663 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3664 default_label, TREE_TYPE (index_expr));
3665 emit_jump_if_reachable (default_label);
3666 }
3667 }
3668 else
3669 {
3670 int win = 0;
3671#ifdef HAVE_casesi
3672 if (HAVE_casesi)
3673 {
c4fcf531 3674 enum machine_mode index_mode = SImode;
5130a5cc 3675 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3676
28d81abb 3677 /* Convert the index to SImode. */
c4fcf531
RS
3678 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3679 > GET_MODE_BITSIZE (index_mode))
28d81abb
RK
3680 {
3681 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3682 index_expr, minval);
3683 minval = integer_zero_node;
3684 }
c4fcf531 3685 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
5130a5cc 3686 index_expr = convert (type_for_size (index_bits, 0),
28d81abb
RK
3687 index_expr);
3688 index = expand_expr (index_expr, 0, VOIDmode, 0);
3689 emit_queue ();
3690 index = protect_from_queue (index, 0);
3691 do_pending_stack_adjust ();
3692
3693 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3694 expand_expr (range, 0, VOIDmode, 0),
3695 table_label, default_label));
3696 win = 1;
3697 }
3698#endif
3699#ifdef HAVE_tablejump
3700 if (! win && HAVE_tablejump)
3701 {
3702 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3703 fold (build (MINUS_EXPR,
3704 TREE_TYPE (index_expr),
3705 index_expr, minval)));
28d81abb
RK
3706 index = expand_expr (index_expr, 0, VOIDmode, 0);
3707 emit_queue ();
5130a5cc
RS
3708 /* convert_to_mode calls protect_from_queue. */
3709 index = convert_to_mode (Pmode, index, 1);
28d81abb
RK
3710 do_pending_stack_adjust ();
3711
df782671 3712 do_tablejump (index, Pmode,
28d81abb
RK
3713 gen_rtx (CONST_INT, VOIDmode,
3714 TREE_INT_CST_LOW (range)),
3715 table_label, default_label);
3716 win = 1;
3717 }
3718#endif
3719 if (! win)
3720 abort ();
3721
3722 /* Get table of labels to jump to, in order of case index. */
3723
3724 ncases = TREE_INT_CST_LOW (range) + 1;
3725 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3726 bzero (labelvec, ncases * sizeof (rtx));
3727
3728 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3729 {
3730 register int i
3731 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3732
3733 while (1)
3734 {
3735 labelvec[i]
3736 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3737 if (i + TREE_INT_CST_LOW (minval)
3738 == TREE_INT_CST_LOW (n->high))
3739 break;
3740 i++;
3741 }
3742 }
3743
3744 /* Fill in the gaps with the default. */
3745 for (i = 0; i < ncases; i++)
3746 if (labelvec[i] == 0)
3747 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3748
3749 /* Output the table */
3750 emit_label (table_label);
3751
3752 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3753 were an expression, instead of a an #ifdef/#ifndef. */
3754 if (
3755#ifdef CASE_VECTOR_PC_RELATIVE
3756 1 ||
3757#endif
3758 flag_pic)
3759 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3760 gen_rtx (LABEL_REF, Pmode, table_label),
3761 gen_rtvec_v (ncases, labelvec)));
3762 else
3763 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3764 gen_rtvec_v (ncases, labelvec)));
3765
3766 /* If the case insn drops through the table,
3767 after the table we must jump to the default-label.
3768 Otherwise record no drop-through after the table. */
3769#ifdef CASE_DROPS_THROUGH
3770 emit_jump (default_label);
3771#else
3772 emit_barrier ();
3773#endif
3774 }
3775
915f619f
JW
3776 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3777 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3778 thiscase->data.case_stmt.start);
3779 }
3780 if (thiscase->exit_label)
3781 emit_label (thiscase->exit_label);
3782
3783 POPSTACK (case_stack);
3784
3785 free_temp_slots ();
3786}
3787
3788/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3789
3790static void
3791do_jump_if_equal (op1, op2, label, unsignedp)
3792 rtx op1, op2, label;
3793 int unsignedp;
3794{
3795 if (GET_CODE (op1) == CONST_INT
3796 && GET_CODE (op2) == CONST_INT)
3797 {
3798 if (INTVAL (op1) == INTVAL (op2))
3799 emit_jump (label);
3800 }
3801 else
3802 {
3803 enum machine_mode mode = GET_MODE (op1);
3804 if (mode == VOIDmode)
3805 mode = GET_MODE (op2);
3806 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3807 emit_jump_insn (gen_beq (label));
3808 }
3809}
3810\f
3811/* Not all case values are encountered equally. This function
3812 uses a heuristic to weight case labels, in cases where that
3813 looks like a reasonable thing to do.
3814
3815 Right now, all we try to guess is text, and we establish the
3816 following weights:
3817
3818 chars above space: 16
3819 digits: 16
3820 default: 12
3821 space, punct: 8
3822 tab: 4
3823 newline: 2
3824 other "\" chars: 1
3825 remaining chars: 0
3826
3827 If we find any cases in the switch that are not either -1 or in the range
3828 of valid ASCII characters, or are control characters other than those
3829 commonly used with "\", don't treat this switch scanning text.
3830
3831 Return 1 if these nodes are suitable for cost estimation, otherwise
3832 return 0. */
3833
3834static int
3835estimate_case_costs (node)
3836 case_node_ptr node;
3837{
3838 tree min_ascii = build_int_2 (-1, -1);
3839 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3840 case_node_ptr n;
3841 int i;
3842
3843 /* If we haven't already made the cost table, make it now. Note that the
3844 lower bound of the table is -1, not zero. */
3845
3846 if (cost_table == NULL)
3847 {
3848 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3849 bzero (cost_table - 1, 129 * sizeof (short));
3850
3851 for (i = 0; i < 128; i++)
3852 {
3853 if (isalnum (i))
3854 cost_table[i] = 16;
3855 else if (ispunct (i))
3856 cost_table[i] = 8;
3857 else if (iscntrl (i))
3858 cost_table[i] = -1;
3859 }
3860
3861 cost_table[' '] = 8;
3862 cost_table['\t'] = 4;
3863 cost_table['\0'] = 4;
3864 cost_table['\n'] = 2;
3865 cost_table['\f'] = 1;
3866 cost_table['\v'] = 1;
3867 cost_table['\b'] = 1;
3868 }
3869
3870 /* See if all the case expressions look like text. It is text if the
3871 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3872 as signed arithmetic since we don't want to ever access cost_table with a
3873 value less than -1. Also check that none of the constants in a range
3874 are strange control characters. */
3875
3876 for (n = node; n; n = n->right)
3877 {
3878 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3879 return 0;
3880
3881 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3882 if (cost_table[i] < 0)
3883 return 0;
3884 }
3885
3886 /* All interesting values are within the range of interesting
3887 ASCII characters. */
3888 return 1;
3889}
3890
3891/* Scan an ordered list of case nodes
3892 combining those with consecutive values or ranges.
3893
3894 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3895
3896static void
3897group_case_nodes (head)
3898 case_node_ptr head;
3899{
3900 case_node_ptr node = head;
3901
3902 while (node)
3903 {
3904 rtx lb = next_real_insn (label_rtx (node->code_label));
3905 case_node_ptr np = node;
3906
3907 /* Try to group the successors of NODE with NODE. */
3908 while (((np = np->right) != 0)
3909 /* Do they jump to the same place? */
3910 && next_real_insn (label_rtx (np->code_label)) == lb
3911 /* Are their ranges consecutive? */
3912 && tree_int_cst_equal (np->low,
3913 fold (build (PLUS_EXPR,
3914 TREE_TYPE (node->high),
3915 node->high,
3916 integer_one_node)))
3917 /* An overflow is not consecutive. */
3918 && tree_int_cst_lt (node->high,
3919 fold (build (PLUS_EXPR,
3920 TREE_TYPE (node->high),
3921 node->high,
3922 integer_one_node))))
3923 {
3924 node->high = np->high;
3925 }
3926 /* NP is the first node after NODE which can't be grouped with it.
3927 Delete the nodes in between, and move on to that node. */
3928 node->right = np;
3929 node = np;
3930 }
3931}
3932
3933/* Take an ordered list of case nodes
3934 and transform them into a near optimal binary tree,
3935 on the assumtion that any target code selection value is as
3936 likely as any other.
3937
3938 The transformation is performed by splitting the ordered
3939 list into two equal sections plus a pivot. The parts are
3940 then attached to the pivot as left and right branches. Each
3941 branch is is then transformed recursively. */
3942
3943static void
3944balance_case_nodes (head, parent)
3945 case_node_ptr *head;
3946 case_node_ptr parent;
3947{
3948 register case_node_ptr np;
3949
3950 np = *head;
3951 if (np)
3952 {
3953 int cost = 0;
3954 int i = 0;
3955 int ranges = 0;
3956 register case_node_ptr *npp;
3957 case_node_ptr left;
3958
3959 /* Count the number of entries on branch. Also count the ranges. */
3960
3961 while (np)
3962 {
3963 if (!tree_int_cst_equal (np->low, np->high))
3964 {
3965 ranges++;
3966 if (use_cost_table)
3967 cost += cost_table[TREE_INT_CST_LOW (np->high)];
3968 }
3969
3970 if (use_cost_table)
3971 cost += cost_table[TREE_INT_CST_LOW (np->low)];
3972
3973 i++;
3974 np = np->right;
3975 }
3976
3977 if (i > 2)
3978 {
3979 /* Split this list if it is long enough for that to help. */
3980 npp = head;
3981 left = *npp;
3982 if (use_cost_table)
3983 {
3984 /* Find the place in the list that bisects the list's total cost,
3985 Here I gets half the total cost. */
3986 int n_moved = 0;
3987 i = (cost + 1) / 2;
3988 while (1)
3989 {
3990 /* Skip nodes while their cost does not reach that amount. */
3991 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3992 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
3993 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
3994 if (i <= 0)
3995 break;
3996 npp = &(*npp)->right;
3997 n_moved += 1;
3998 }
3999 if (n_moved == 0)
4000 {
4001 /* Leave this branch lopsided, but optimize left-hand
4002 side and fill in `parent' fields for right-hand side. */
4003 np = *head;
4004 np->parent = parent;
4005 balance_case_nodes (&np->left, np);
4006 for (; np->right; np = np->right)
4007 np->right->parent = np;
4008 return;
4009 }
4010 }
4011 /* If there are just three nodes, split at the middle one. */
4012 else if (i == 3)
4013 npp = &(*npp)->right;
4014 else
4015 {
4016 /* Find the place in the list that bisects the list's total cost,
4017 where ranges count as 2.
4018 Here I gets half the total cost. */
4019 i = (i + ranges + 1) / 2;
4020 while (1)
4021 {
4022 /* Skip nodes while their cost does not reach that amount. */
4023 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4024 i--;
4025 i--;
4026 if (i <= 0)
4027 break;
4028 npp = &(*npp)->right;
4029 }
4030 }
4031 *head = np = *npp;
4032 *npp = 0;
4033 np->parent = parent;
4034 np->left = left;
4035
4036 /* Optimize each of the two split parts. */
4037 balance_case_nodes (&np->left, np);
4038 balance_case_nodes (&np->right, np);
4039 }
4040 else
4041 {
4042 /* Else leave this branch as one level,
4043 but fill in `parent' fields. */
4044 np = *head;
4045 np->parent = parent;
4046 for (; np->right; np = np->right)
4047 np->right->parent = np;
4048 }
4049 }
4050}
4051\f
4052/* Search the parent sections of the case node tree
4053 to see if a test for the lower bound of NODE would be redundant.
4054 INDEX_TYPE is the type of the index expression.
4055
4056 The instructions to generate the case decision tree are
4057 output in the same order as nodes are processed so it is
4058 known that if a parent node checks the range of the current
4059 node minus one that the current node is bounded at its lower
4060 span. Thus the test would be redundant. */
4061
4062static int
4063node_has_low_bound (node, index_type)
4064 case_node_ptr node;
4065 tree index_type;
4066{
4067 tree low_minus_one;
4068 case_node_ptr pnode;
4069
4070 /* If the lower bound of this node is the lowest value in the index type,
4071 we need not test it. */
4072
4073 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4074 return 1;
4075
4076 /* If this node has a left branch, the value at the left must be less
4077 than that at this node, so it cannot be bounded at the bottom and
4078 we need not bother testing any further. */
4079
4080 if (node->left)
4081 return 0;
4082
4083 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4084 node->low, integer_one_node));
4085
4086 /* If the subtraction above overflowed, we can't verify anything.
4087 Otherwise, look for a parent that tests our value - 1. */
4088
4089 if (! tree_int_cst_lt (low_minus_one, node->low))
4090 return 0;
4091
4092 for (pnode = node->parent; pnode; pnode = pnode->parent)
4093 if (tree_int_cst_equal (low_minus_one, pnode->high))
4094 return 1;
4095
4096 return 0;
4097}
4098
4099/* Search the parent sections of the case node tree
4100 to see if a test for the upper bound of NODE would be redundant.
4101 INDEX_TYPE is the type of the index expression.
4102
4103 The instructions to generate the case decision tree are
4104 output in the same order as nodes are processed so it is
4105 known that if a parent node checks the range of the current
4106 node plus one that the current node is bounded at its upper
4107 span. Thus the test would be redundant. */
4108
4109static int
4110node_has_high_bound (node, index_type)
4111 case_node_ptr node;
4112 tree index_type;
4113{
4114 tree high_plus_one;
4115 case_node_ptr pnode;
4116
4117 /* If the upper bound of this node is the highest value in the type
4118 of the index expression, we need not test against it. */
4119
4120 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4121 return 1;
4122
4123 /* If this node has a right branch, the value at the right must be greater
4124 than that at this node, so it cannot be bounded at the top and
4125 we need not bother testing any further. */
4126
4127 if (node->right)
4128 return 0;
4129
4130 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4131 node->high, integer_one_node));
4132
4133 /* If the addition above overflowed, we can't verify anything.
4134 Otherwise, look for a parent that tests our value + 1. */
4135
4136 if (! tree_int_cst_lt (node->high, high_plus_one))
4137 return 0;
4138
4139 for (pnode = node->parent; pnode; pnode = pnode->parent)
4140 if (tree_int_cst_equal (high_plus_one, pnode->low))
4141 return 1;
4142
4143 return 0;
4144}
4145
4146/* Search the parent sections of the
4147 case node tree to see if both tests for the upper and lower
4148 bounds of NODE would be redundant. */
4149
4150static int
4151node_is_bounded (node, index_type)
4152 case_node_ptr node;
4153 tree index_type;
4154{
4155 return (node_has_low_bound (node, index_type)
4156 && node_has_high_bound (node, index_type));
4157}
4158
4159/* Emit an unconditional jump to LABEL unless it would be dead code. */
4160
4161static void
4162emit_jump_if_reachable (label)
4163 rtx label;
4164{
4165 if (GET_CODE (get_last_insn ()) != BARRIER)
4166 emit_jump (label);
4167}
4168\f
4169/* Emit step-by-step code to select a case for the value of INDEX.
4170 The thus generated decision tree follows the form of the
4171 case-node binary tree NODE, whose nodes represent test conditions.
4172 INDEX_TYPE is the type of the index of the switch.
4173
4174 Care is taken to prune redundant tests from the decision tree
4175 by detecting any boundary conditions already checked by
4176 emitted rtx. (See node_has_high_bound, node_has_low_bound
4177 and node_is_bounded, above.)
4178
4179 Where the test conditions can be shown to be redundant we emit
4180 an unconditional jump to the target code. As a further
4181 optimization, the subordinates of a tree node are examined to
4182 check for bounded nodes. In this case conditional and/or
4183 unconditional jumps as a result of the boundary check for the
4184 current node are arranged to target the subordinates associated
4185 code for out of bound conditions on the current node node.
4186
4187 We can asume that when control reaches the code generated here,
4188 the index value has already been compared with the parents
4189 of this node, and determined to be on the same side of each parent
4190 as this node is. Thus, if this node tests for the value 51,
4191 and a parent tested for 52, we don't need to consider
4192 the possibility of a value greater than 51. If another parent
4193 tests for the value 50, then this node need not test anything. */
4194
4195static void
4196emit_case_nodes (index, node, default_label, index_type)
4197 rtx index;
4198 case_node_ptr node;
4199 rtx default_label;
4200 tree index_type;
4201{
4202 /* If INDEX has an unsigned type, we must make unsigned branches. */
4203 int unsignedp = TREE_UNSIGNED (index_type);
4204 typedef rtx rtx_function ();
4205 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4206 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4207 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4208 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4209 enum machine_mode mode = GET_MODE (index);
4210
4211 /* See if our parents have already tested everything for us.
4212 If they have, emit an unconditional jump for this node. */
4213 if (node_is_bounded (node, index_type))
4214 emit_jump (label_rtx (node->code_label));
4215
4216 else if (tree_int_cst_equal (node->low, node->high))
4217 {
4218 /* Node is single valued. First see if the index expression matches
4219 this node and then check our children, if any. */
4220
4221 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4222 label_rtx (node->code_label), unsignedp);
4223
4224 if (node->right != 0 && node->left != 0)
4225 {
4226 /* This node has children on both sides.
4227 Dispatch to one side or the other
4228 by comparing the index value with this node's value.
4229 If one subtree is bounded, check that one first,
4230 so we can avoid real branches in the tree. */
4231
4232 if (node_is_bounded (node->right, index_type))
4233 {
4234 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4235 GT, 0, mode, unsignedp, 0);
4236
4237 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4238 emit_case_nodes (index, node->left, default_label, index_type);
4239 }
4240
4241 else if (node_is_bounded (node->left, index_type))
4242 {
4243 emit_cmp_insn (index, expand_expr (node->high, 0,
4244 VOIDmode, 0),
4245 LT, 0, mode, unsignedp, 0);
4246 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4247 emit_case_nodes (index, node->right, default_label, index_type);
4248 }
4249
4250 else
4251 {
4252 /* Neither node is bounded. First distinguish the two sides;
4253 then emit the code for one side at a time. */
4254
4255 tree test_label
4256 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4257
4258 /* See if the value is on the right. */
4259 emit_cmp_insn (index, expand_expr (node->high, 0,
4260 VOIDmode, 0),
4261 GT, 0, mode, unsignedp, 0);
4262 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4263
4264 /* Value must be on the left.
4265 Handle the left-hand subtree. */
4266 emit_case_nodes (index, node->left, default_label, index_type);
4267 /* If left-hand subtree does nothing,
4268 go to default. */
4269 emit_jump_if_reachable (default_label);
4270
4271 /* Code branches here for the right-hand subtree. */
4272 expand_label (test_label);
4273 emit_case_nodes (index, node->right, default_label, index_type);
4274 }
4275 }
4276
4277 else if (node->right != 0 && node->left == 0)
4278 {
4279 /* Here we have a right child but no left so we issue conditional
4280 branch to default and process the right child.
4281
4282 Omit the conditional branch to default if we it avoid only one
4283 right child; it costs too much space to save so little time. */
4284
de14fd73 4285 if (node->right->right || node->right->left
28d81abb
RK
4286 || !tree_int_cst_equal (node->right->low, node->right->high))
4287 {
4288 if (!node_has_low_bound (node, index_type))
4289 {
4290 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4291 LT, 0, mode, unsignedp, 0);
4292 emit_jump_insn ((*gen_blt_pat) (default_label));
4293 }
4294
4295 emit_case_nodes (index, node->right, default_label, index_type);
4296 }
4297 else
4298 /* We cannot process node->right normally
4299 since we haven't ruled out the numbers less than
4300 this node's value. So handle node->right explicitly. */
4301 do_jump_if_equal (index,
4302 expand_expr (node->right->low, 0, VOIDmode, 0),
4303 label_rtx (node->right->code_label), unsignedp);
4304 }
4305
4306 else if (node->right == 0 && node->left != 0)
4307 {
4308 /* Just one subtree, on the left. */
4309
de14fd73
RK
4310#if 0 /* The following code and comment were formerly part
4311 of the condition here, but they didn't work
4312 and I don't understand what the idea was. -- rms. */
4313 /* If our "most probable entry" is less probable
28d81abb
RK
4314 than the default label, emit a jump to
4315 the default label using condition codes
4316 already lying around. With no right branch,
4317 a branch-greater-than will get us to the default
4318 label correctly. */
de14fd73
RK
4319 if (use_cost_table
4320 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4321 ;
4322#endif /* 0 */
4323 if (node->left->left || node->left->right
28d81abb
RK
4324 || !tree_int_cst_equal (node->left->low, node->left->high))
4325 {
4326 if (!node_has_high_bound (node, index_type))
4327 {
4328 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4329 GT, 0, mode, unsignedp, 0);
4330 emit_jump_insn ((*gen_bgt_pat) (default_label));
4331 }
4332
4333 emit_case_nodes (index, node->left, default_label, index_type);
4334 }
4335 else
4336 /* We cannot process node->left normally
4337 since we haven't ruled out the numbers less than
4338 this node's value. So handle node->left explicitly. */
4339 do_jump_if_equal (index,
4340 expand_expr (node->left->low, 0, VOIDmode, 0),
4341 label_rtx (node->left->code_label), unsignedp);
4342 }
4343 }
4344 else
4345 {
4346 /* Node is a range. These cases are very similar to those for a single
4347 value, except that we do not start by testing whether this node
4348 is the one to branch to. */
4349
4350 if (node->right != 0 && node->left != 0)
4351 {
4352 /* Node has subtrees on both sides.
4353 If the right-hand subtree is bounded,
4354 test for it first, since we can go straight there.
4355 Otherwise, we need to make a branch in the control structure,
4356 then handle the two subtrees. */
4357 tree test_label = 0;
4358
4359 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4360 GT, 0, mode, unsignedp, 0);
4361
4362 if (node_is_bounded (node->right, index_type))
4363 /* Right hand node is fully bounded so we can eliminate any
4364 testing and branch directly to the target code. */
4365 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4366 else
4367 {
4368 /* Right hand node requires testing.
4369 Branch to a label where we will handle it later. */
4370
4371 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4372 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4373 }
4374
4375 /* Value belongs to this node or to the left-hand subtree. */
4376
4377 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4378 GE, 0, mode, unsignedp, 0);
4379 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4380
4381 /* Handle the left-hand subtree. */
4382 emit_case_nodes (index, node->left, default_label, index_type);
4383
4384 /* If right node had to be handled later, do that now. */
4385
4386 if (test_label)
4387 {
4388 /* If the left-hand subtree fell through,
4389 don't let it fall into the right-hand subtree. */
4390 emit_jump_if_reachable (default_label);
4391
4392 expand_label (test_label);
4393 emit_case_nodes (index, node->right, default_label, index_type);
4394 }
4395 }
4396
4397 else if (node->right != 0 && node->left == 0)
4398 {
4399 /* Deal with values to the left of this node,
4400 if they are possible. */
4401 if (!node_has_low_bound (node, index_type))
4402 {
4403 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4404 LT, 0, mode, unsignedp, 0);
4405 emit_jump_insn ((*gen_blt_pat) (default_label));
4406 }
4407
4408 /* Value belongs to this node or to the right-hand subtree. */
4409
4410 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4411 LE, 0, mode, unsignedp, 0);
4412 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4413
4414 emit_case_nodes (index, node->right, default_label, index_type);
4415 }
4416
4417 else if (node->right == 0 && node->left != 0)
4418 {
4419 /* Deal with values to the right of this node,
4420 if they are possible. */
4421 if (!node_has_high_bound (node, index_type))
4422 {
4423 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4424 GT, 0, mode, unsignedp, 0);
4425 emit_jump_insn ((*gen_bgt_pat) (default_label));
4426 }
4427
4428 /* Value belongs to this node or to the left-hand subtree. */
4429
4430 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4431 GE, 0, mode, unsignedp, 0);
4432 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4433
4434 emit_case_nodes (index, node->left, default_label, index_type);
4435 }
4436
4437 else
4438 {
4439 /* Node has no children so we check low and high bounds to remove
4440 redundant tests. Only one of the bounds can exist,
4441 since otherwise this node is bounded--a case tested already. */
4442
4443 if (!node_has_high_bound (node, index_type))
4444 {
4445 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4446 GT, 0, mode, unsignedp, 0);
4447 emit_jump_insn ((*gen_bgt_pat) (default_label));
4448 }
4449
4450 if (!node_has_low_bound (node, index_type))
4451 {
4452 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4453 LT, 0, mode, unsignedp, 0);
4454 emit_jump_insn ((*gen_blt_pat) (default_label));
4455 }
4456
4457 emit_jump (label_rtx (node->code_label));
4458 }
4459 }
4460}
4461\f
4462/* These routines are used by the loop unrolling code. They copy BLOCK trees
4463 so that the debugging info will be correct for the unrolled loop. */
4464
4465/* Indexed by loop number, contains pointer to the first block in the loop,
4466 or zero if none. Only valid if doing loop unrolling and outputting debugger
4467 info. */
4468
4469tree *loop_number_first_block;
4470
4471/* Indexed by loop number, contains pointer to the last block in the loop,
4472 only valid if loop_number_first_block is nonzero. */
4473
4474tree *loop_number_last_block;
4475
4476/* Indexed by loop number, contains nesting level of first block in the
4477 loop, if any. Only valid if doing loop unrolling and outputting debugger
4478 info. */
4479
4480int *loop_number_block_level;
4481
4482/* Scan the function looking for loops, and walk the BLOCK tree at the
4483 same time. Record the first and last BLOCK tree corresponding to each
4484 loop. This function is similar to find_and_verify_loops in loop.c. */
4485
4486void
4487find_loop_tree_blocks (f)
4488 rtx f;
4489{
4490 rtx insn;
4491 int current_loop = -1;
4492 int next_loop = -1;
4493 int loop;
4494 int block_level, tree_level;
4495 tree tree_block, parent_tree_block;
4496
4497 tree_block = DECL_INITIAL (current_function_decl);
4498 parent_tree_block = 0;
4499 block_level = 0;
4500 tree_level = -1;
4501
4502 /* Find boundaries of loops, and save the first and last BLOCK tree
4503 corresponding to each loop. */
4504
4505 for (insn = f; insn; insn = NEXT_INSN (insn))
4506 {
4507 if (GET_CODE (insn) == NOTE)
4508 switch (NOTE_LINE_NUMBER (insn))
4509 {
4510 case NOTE_INSN_LOOP_BEG:
4511 loop_number_block_level[++next_loop] = block_level;
4512 loop_number_first_block[next_loop] = 0;
4513 current_loop = next_loop;
4514 break;
4515
4516 case NOTE_INSN_LOOP_END:
4517 if (current_loop == -1)
4518 abort ();
4519
4520 current_loop = loop_outer_loop[current_loop];
4521 break;
4522
4523 case NOTE_INSN_BLOCK_BEG:
4524 if (tree_level < block_level)
4525 {
4526 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4527 we must now visit the subtree of the current block. */
4528 parent_tree_block = tree_block;
4529 tree_block = BLOCK_SUBBLOCKS (tree_block);
4530 tree_level++;
4531 }
4532 else if (tree_level > block_level)
4533 abort ();
4534
4535 /* Save this block tree here for all nested loops for which
4536 this is the topmost block. */
4537 for (loop = current_loop;
4538 loop != -1 && block_level == loop_number_block_level[loop];
4539 loop = loop_outer_loop[loop])
4540 {
4541 if (loop_number_first_block[loop] == 0)
4542 loop_number_first_block[loop] = tree_block;
4543 loop_number_last_block[loop] = tree_block;
4544 }
4545
4546 block_level++;
4547 break;
4548
4549 case NOTE_INSN_BLOCK_END:
4550 block_level--;
4551 if (tree_level > block_level)
4552 {
4553 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4554 we must now visit the parent of the current tree. */
4555 if (tree_block != 0 || parent_tree_block == 0)
4556 abort ();
4557 tree_block = parent_tree_block;
4558 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4559 tree_level--;
4560 }
4561 tree_block = BLOCK_CHAIN (tree_block);
4562 break;
4563 }
4564 }
4565}
4566
4567/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4568 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4569
4570 Note that we only copy the topmost level of tree nodes; they will share
4571 pointers to the same subblocks. */
4572
4573void
4574unroll_block_trees (loop_number, copies)
4575 int loop_number;
4576 int copies;
4577{
4578 int i;
4579
4580 /* First check whether there are any blocks that need to be copied. */
4581 if (loop_number_first_block[loop_number])
4582 {
4583 tree first_block = loop_number_first_block[loop_number];
4584 tree last_block = loop_number_last_block[loop_number];
4585 tree last_block_created = 0;
4586
4587 for (i = 0; i < copies - 1; i++)
4588 {
4589 tree block = first_block;
4590 tree insert_after = last_block;
4591 tree copied_block;
4592
4593 /* Copy every block between first_block and last_block inclusive,
4594 inserting the new blocks after last_block. */
4595 do
4596 {
4597 tree new_block = make_node (BLOCK);
4598 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4599 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4600 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4601 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4602 TREE_USED (new_block) = TREE_USED (block);
4603
4604 /* Insert the new block after the insertion point, and move
4605 the insertion point to the new block. This ensures that
4606 the copies are inserted in the right order. */
4607 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4608 BLOCK_CHAIN (insert_after) = new_block;
4609 insert_after = new_block;
4610
4611 copied_block = block;
4612 block = BLOCK_CHAIN (block);
4613 }
4614 while (copied_block != last_block);
4615
4616 /* Remember the last block created, so that we can update the
4617 info in the tables. */
4618 if (last_block_created == 0)
4619 last_block_created = insert_after;
4620 }
4621
4622 /* For all nested loops for which LAST_BLOCK was originally the last
4623 block, update the tables to indicate that LAST_BLOCK_CREATED is
4624 now the last block in the loop. */
4625 for (i = loop_number; last_block == loop_number_last_block[i];
4626 i = loop_outer_loop[i])
4627 loop_number_last_block[i] = last_block_created;
4628 }
4629}
This page took 0.466469 seconds and 5 git commands to generate.