1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack
;
57 /* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
62 /* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
65 int expr_stmts_for_value
;
67 /* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
70 static tree last_expr_type
;
71 static rtx last_expr_value
;
73 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
74 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
75 This is used by the `remember_end_note' function to record the endpoint
76 of each generated block in its associated BLOCK node. */
78 static rtx last_block_end_note
;
80 /* Number of binding contours started so far in this function. */
82 int block_start_count
;
84 /* Nonzero if function being compiled needs to
85 return the address of where it has put a structure value. */
87 extern int current_function_returns_pcc_struct
;
89 /* Label that will go on parm cleanup code, if any.
90 Jumping to this label runs cleanup code for parameters, if
91 such code must be run. Following this code is the logical return label. */
93 extern rtx cleanup_label
;
95 /* Label that will go on function epilogue.
96 Jumping to this label serves as a "return" instruction
97 on machines which require execution of the epilogue on all returns. */
99 extern rtx return_label
;
101 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
102 So we can mark them all live at the end of the function, if nonopt. */
103 extern rtx save_expr_regs
;
105 /* Offset to end of allocated area of stack frame.
106 If stack grows down, this is the address of the last stack slot allocated.
107 If stack grows up, this is the address for the next slot. */
108 extern int frame_offset
;
110 /* Label to jump back to for tail recursion, or 0 if we have
111 not yet needed one for this function. */
112 extern rtx tail_recursion_label
;
114 /* Place after which to insert the tail_recursion_label if we need one. */
115 extern rtx tail_recursion_reentry
;
117 /* Location at which to save the argument pointer if it will need to be
118 referenced. There are two cases where this is done: if nonlocal gotos
119 exist, or if vars whose is an offset from the argument pointer will be
120 needed by inner routines. */
122 extern rtx arg_pointer_save_area
;
124 /* Chain of all RTL_EXPRs that have insns in them. */
125 extern tree rtl_expr_chain
;
127 #if 0 /* Turned off because 0 seems to work just as well. */
128 /* Cleanup lists are required for binding levels regardless of whether
129 that binding level has cleanups or not. This node serves as the
130 cleanup list whenever an empty list is required. */
131 static tree empty_cleanup_list
;
134 /* Functions and data structures for expanding case statements. */
136 /* Case label structure, used to hold info on labels within case
137 statements. We handle "range" labels; for a single-value label
138 as in C, the high and low limits are the same.
140 A chain of case nodes is initially maintained via the RIGHT fields
141 in the nodes. Nodes with higher case values are later in the list.
143 Switch statements can be output in one of two forms. A branch table
144 is used if there are more than a few labels and the labels are dense
145 within the range between the smallest and largest case value. If a
146 branch table is used, no further manipulations are done with the case
149 The alternative to the use of a branch table is to generate a series
150 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
151 and PARENT fields to hold a binary tree. Initially the tree is
152 totally unbalanced, with everything on the right. We balance the tree
153 with nodes on the left having lower case values than the parent
154 and nodes on the right having higher values. We then output the tree
159 struct case_node
*left
; /* Left son in binary tree */
160 struct case_node
*right
; /* Right son in binary tree; also node chain */
161 struct case_node
*parent
; /* Parent of node in binary tree */
162 tree low
; /* Lowest index value for this label */
163 tree high
; /* Highest index value for this label */
164 tree code_label
; /* Label to jump to when node matches */
167 typedef struct case_node case_node
;
168 typedef struct case_node
*case_node_ptr
;
170 /* These are used by estimate_case_costs and balance_case_nodes. */
172 /* This must be a signed type, and non-ANSI compilers lack signed char. */
173 static short *cost_table
;
174 static int use_cost_table
;
176 static int estimate_case_costs ();
177 static void balance_case_nodes ();
178 static void emit_case_nodes ();
179 static void group_case_nodes ();
180 static void emit_jump_if_reachable ();
182 static int warn_if_unused_value ();
183 static void expand_goto_internal ();
184 static int expand_fixup ();
186 void free_temp_slots ();
187 static void expand_cleanups ();
188 static void expand_null_return_1 ();
189 static int tail_recursion_args ();
190 static void do_jump_if_equal ();
192 /* Stack of control and binding constructs we are currently inside.
194 These constructs begin when you call `expand_start_WHATEVER'
195 and end when you call `expand_end_WHATEVER'. This stack records
196 info about how the construct began that tells the end-function
197 what to do. It also may provide information about the construct
198 to alter the behavior of other constructs within the body.
199 For example, they may affect the behavior of C `break' and `continue'.
201 Each construct gets one `struct nesting' object.
202 All of these objects are chained through the `all' field.
203 `nesting_stack' points to the first object (innermost construct).
204 The position of an entry on `nesting_stack' is in its `depth' field.
206 Each type of construct has its own individual stack.
207 For example, loops have `loop_stack'. Each object points to the
208 next object of the same type through the `next' field.
210 Some constructs are visible to `break' exit-statements and others
211 are not. Which constructs are visible depends on the language.
212 Therefore, the data structure allows each construct to be visible
213 or not, according to the args given when the construct is started.
214 The construct is visible if the `exit_label' field is non-null.
215 In that case, the value should be a CODE_LABEL rtx. */
220 struct nesting
*next
;
225 /* For conds (if-then and if-then-else statements). */
228 /* Label for the end of the if construct.
229 There is none if EXITFLAG was not set
230 and no `else' has been seen yet. */
232 /* Label for the end of this alternative.
233 This may be the end of the if or the next else/elseif. */
239 /* Label at the top of the loop; place to loop back to. */
241 /* Label at the end of the whole construct. */
243 /* Label for `continue' statement to jump to;
244 this is in front of the stepper of the loop. */
247 /* For variable binding contours. */
250 /* Sequence number of this binding contour within the function,
251 in order of entry. */
252 int block_start_count
;
253 /* Nonzero => value to restore stack to on exit. */
255 /* The NOTE that starts this contour.
256 Used by expand_goto to check whether the destination
257 is within each contour or not. */
259 /* Innermost containing binding contour that has a stack level. */
260 struct nesting
*innermost_stack_block
;
261 /* List of cleanups to be run on exit from this contour.
262 This is a list of expressions to be evaluated.
263 The TREE_PURPOSE of each link is the ..._DECL node
264 which the cleanup pertains to. */
266 /* List of cleanup-lists of blocks containing this block,
267 as they were at the locus where this block appears.
268 There is an element for each containing block,
269 ordered innermost containing block first.
270 The tail of this list can be 0 (was empty_cleanup_list),
271 if all remaining elements would be empty lists.
272 The element's TREE_VALUE is the cleanup-list of that block,
273 which may be null. */
275 /* Chain of labels defined inside this binding contour.
276 For contours that have stack levels or cleanups. */
277 struct label_chain
*label_chain
;
278 /* Number of function calls seen, as of start of this block. */
279 int function_call_count
;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node
*case_list
;
293 /* Label to jump to if no case matches. */
295 /* The expression to be dispatched on. */
297 /* Type that INDEX_EXPR should be converted to. */
299 /* Number of range exprs in case statement. */
301 /* Name of this kind of statement, for warnings. */
303 /* Nonzero if a case label has been seen in this case stmt. */
306 /* For exception contours. */
309 /* List of exceptions raised. This is a TREE_LIST
310 of whatever you want. */
312 /* List of exceptions caught. This is also a TREE_LIST
313 of whatever you want. As a special case, it has the
314 value `void_type_node' if it handles default exceptions. */
317 /* First insn of TRY block, in case resumptive model is needed. */
319 /* Label for the catch clauses. */
321 /* Label for unhandled exceptions. */
323 /* Label at the end of whole construct. */
325 /* Label which "escapes" the exception construct.
326 Like EXIT_LABEL for BREAK construct, but for exceptions. */
332 /* Chain of all pending binding contours. */
333 struct nesting
*block_stack
;
335 /* Chain of all pending binding contours that restore stack levels
337 struct nesting
*stack_block_stack
;
339 /* Chain of all pending conditional statements. */
340 struct nesting
*cond_stack
;
342 /* Chain of all pending loops. */
343 struct nesting
*loop_stack
;
345 /* Chain of all pending case or switch statements. */
346 struct nesting
*case_stack
;
348 /* Chain of all pending exception contours. */
349 struct nesting
*except_stack
;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting
*nesting_stack
;
355 /* Number of entries on nesting_stack now. */
358 /* Allocate and return a new `struct nesting'. */
360 #define ALLOC_NESTING() \
361 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
363 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
364 and pop off `nesting_stack' down to the same level. */
366 #define POPSTACK(STACK) \
367 do { int initial_depth = nesting_stack->depth; \
368 do { struct nesting *this = STACK; \
369 STACK = this->next; \
370 nesting_stack = this->all; \
371 nesting_depth = this->depth; \
372 obstack_free (&stmt_obstack, this); } \
373 while (nesting_depth > initial_depth); } while (0)
375 /* In some cases it is impossible to generate code for a forward goto
376 until the label definition is seen. This happens when it may be necessary
377 for the goto to reset the stack pointer: we don't yet know how to do that.
378 So expand_goto puts an entry on this fixup list.
379 Each time a binding contour that resets the stack is exited,
381 If the target label has now been defined, we can insert the proper code. */
385 /* Points to following fixup. */
386 struct goto_fixup
*next
;
387 /* Points to the insn before the jump insn.
388 If more code must be inserted, it goes after this insn. */
390 /* The LABEL_DECL that this jump is jumping to, or 0
391 for break, continue or return. */
393 /* The BLOCK for the place where this goto was found. */
395 /* The CODE_LABEL rtx that this is jumping to. */
397 /* Number of binding contours started in current function
398 before the label reference. */
399 int block_start_count
;
400 /* The outermost stack level that should be restored for this jump.
401 Each time a binding contour that resets the stack is exited,
402 if the target label is *not* yet defined, this slot is updated. */
404 /* List of lists of cleanup expressions to be run by this goto.
405 There is one element for each block that this goto is within.
406 The tail of this list can be 0 (was empty_cleanup_list),
407 if all remaining elements would be empty.
408 The TREE_VALUE contains the cleanup list of that block as of the
409 time this goto was seen.
410 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
411 tree cleanup_list_list
;
414 static struct goto_fixup
*goto_fixup_chain
;
416 /* Within any binding contour that must restore a stack level,
417 all labels are recorded with a chain of these structures. */
421 /* Points to following fixup. */
422 struct label_chain
*next
;
429 gcc_obstack_init (&stmt_obstack
);
431 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);
436 init_stmt_for_function ()
438 /* We are not currently within any block, conditional, loop or case. */
446 block_start_count
= 0;
448 /* No gotos have been expanded yet. */
449 goto_fixup_chain
= 0;
451 /* We are not processing a ({...}) grouping. */
452 expr_stmts_for_value
= 0;
460 p
->block_stack
= block_stack
;
461 p
->stack_block_stack
= stack_block_stack
;
462 p
->cond_stack
= cond_stack
;
463 p
->loop_stack
= loop_stack
;
464 p
->case_stack
= case_stack
;
465 p
->nesting_stack
= nesting_stack
;
466 p
->nesting_depth
= nesting_depth
;
467 p
->block_start_count
= block_start_count
;
468 p
->last_expr_type
= last_expr_type
;
469 p
->last_expr_value
= last_expr_value
;
470 p
->expr_stmts_for_value
= expr_stmts_for_value
;
471 p
->emit_filename
= emit_filename
;
472 p
->emit_lineno
= emit_lineno
;
473 p
->goto_fixup_chain
= goto_fixup_chain
;
477 restore_stmt_status (p
)
480 block_stack
= p
->block_stack
;
481 stack_block_stack
= p
->stack_block_stack
;
482 cond_stack
= p
->cond_stack
;
483 loop_stack
= p
->loop_stack
;
484 case_stack
= p
->case_stack
;
485 nesting_stack
= p
->nesting_stack
;
486 nesting_depth
= p
->nesting_depth
;
487 block_start_count
= p
->block_start_count
;
488 last_expr_type
= p
->last_expr_type
;
489 last_expr_value
= p
->last_expr_value
;
490 expr_stmts_for_value
= p
->expr_stmts_for_value
;
491 emit_filename
= p
->emit_filename
;
492 emit_lineno
= p
->emit_lineno
;
493 goto_fixup_chain
= p
->goto_fixup_chain
;
496 /* Emit a no-op instruction. */
501 rtx last_insn
= get_last_insn ();
503 && (GET_CODE (last_insn
) == CODE_LABEL
504 || prev_real_insn (last_insn
) == 0))
505 emit_insn (gen_nop ());
508 /* Return the rtx-label that corresponds to a LABEL_DECL,
509 creating it if necessary. */
515 if (TREE_CODE (label
) != LABEL_DECL
)
518 if (DECL_RTL (label
))
519 return DECL_RTL (label
);
521 return DECL_RTL (label
) = gen_label_rtx ();
524 /* Add an unconditional jump to LABEL as the next sequential instruction. */
530 do_pending_stack_adjust ();
531 emit_jump_insn (gen_jump (label
));
535 /* Emit code to jump to the address
536 specified by the pointer expression EXP. */
539 expand_computed_goto (exp
)
542 rtx x
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
544 emit_indirect_jump (x
);
547 /* Handle goto statements and the labels that they can go to. */
549 /* Specify the location in the RTL code of a label LABEL,
550 which is a LABEL_DECL tree node.
552 This is used for the kind of label that the user can jump to with a
553 goto statement, and for alternatives of a switch or case statement.
554 RTL labels generated for loops and conditionals don't go through here;
555 they are generated directly at the RTL level, by other functions below.
557 Note that this has nothing to do with defining label *names*.
558 Languages vary in how they do that and what that even means. */
564 struct label_chain
*p
;
566 do_pending_stack_adjust ();
567 emit_label (label_rtx (label
));
568 if (DECL_NAME (label
))
569 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
571 if (stack_block_stack
!= 0)
573 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
574 p
->next
= stack_block_stack
->data
.block
.label_chain
;
575 stack_block_stack
->data
.block
.label_chain
= p
;
580 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
581 from nested functions. */
584 declare_nonlocal_label (label
)
587 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
588 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
589 if (nonlocal_goto_handler_slot
== 0)
591 nonlocal_goto_handler_slot
592 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
593 emit_stack_save (SAVE_NONLOCAL
,
594 &nonlocal_goto_stack_level
,
595 PREV_INSN (tail_recursion_reentry
));
599 /* Generate RTL code for a `goto' statement with target label LABEL.
600 LABEL should be a LABEL_DECL tree node that was or will later be
601 defined with `expand_label'. */
607 /* Check for a nonlocal goto to a containing function. */
608 tree context
= decl_function_context (label
);
609 if (context
!= 0 && context
!= current_function_decl
)
611 struct function
*p
= find_function_data (context
);
613 p
->has_nonlocal_label
= 1;
615 /* Copy the rtl for the slots so that they won't be shared in
616 case the virtual stack vars register gets instantiated differently
617 in the parent than in the child. */
619 #if HAVE_nonlocal_goto
620 if (HAVE_nonlocal_goto
)
621 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
622 copy_rtx (p
->nonlocal_goto_handler_slot
),
623 copy_rtx (p
->nonlocal_goto_stack_level
),
624 gen_rtx (LABEL_REF
, Pmode
,
625 label_rtx (label
))));
631 /* Restore frame pointer for containing function.
632 This sets the actual hard register used for the frame pointer
633 to the location of the function's incoming static chain info.
634 The non-local goto handler will then adjust it to contain the
635 proper value and reload the argument pointer, if needed. */
636 emit_move_insn (frame_pointer_rtx
, lookup_static_chain (label
));
638 /* We have now loaded the frame pointer hardware register with
639 the address of that corresponds to the start of the virtual
640 stack vars. So replace virtual_stack_vars_rtx in all
641 addresses we use with stack_pointer_rtx. */
643 /* Get addr of containing function's current nonlocal goto handler,
644 which will do any cleanups and then jump to the label. */
645 addr
= copy_rtx (p
->nonlocal_goto_handler_slot
);
646 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
649 /* Restore the stack pointer. Note this uses fp just restored. */
650 addr
= p
->nonlocal_goto_stack_level
;
652 addr
= replace_rtx (copy_rtx (addr
),
653 virtual_stack_vars_rtx
, frame_pointer_rtx
);
655 emit_stack_restore (SAVE_NONLOCAL
, addr
, NULL_RTX
);
657 /* Put in the static chain register the nonlocal label address. */
658 emit_move_insn (static_chain_rtx
,
659 gen_rtx (LABEL_REF
, Pmode
, label_rtx (label
)));
660 /* USE of frame_pointer_rtx added for consistency; not clear if
662 emit_insn (gen_rtx (USE
, VOIDmode
, frame_pointer_rtx
));
663 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
664 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
665 emit_indirect_jump (temp
);
669 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
672 /* Generate RTL code for a `goto' statement with target label BODY.
673 LABEL should be a LABEL_REF.
674 LAST_INSN, if non-0, is the rtx we should consider as the last
675 insn emitted (for the purposes of cleaning up a return). */
678 expand_goto_internal (body
, label
, last_insn
)
683 struct nesting
*block
;
686 if (GET_CODE (label
) != CODE_LABEL
)
689 /* If label has already been defined, we can tell now
690 whether and how we must alter the stack level. */
692 if (PREV_INSN (label
) != 0)
694 /* Find the innermost pending block that contains the label.
695 (Check containment by comparing insn-uids.)
696 Then restore the outermost stack level within that block,
697 and do cleanups of all blocks contained in it. */
698 for (block
= block_stack
; block
; block
= block
->next
)
700 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
702 if (block
->data
.block
.stack_level
!= 0)
703 stack_level
= block
->data
.block
.stack_level
;
704 /* Execute the cleanups for blocks we are exiting. */
705 if (block
->data
.block
.cleanups
!= 0)
707 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
);
708 do_pending_stack_adjust ();
714 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
715 the stack pointer. This one should be deleted as dead by flow. */
716 clear_pending_stack_adjust ();
717 do_pending_stack_adjust ();
718 emit_stack_restore (SAVE_BLOCK
, stack_level
, NULL_RTX
);
721 if (body
!= 0 && DECL_TOO_LATE (body
))
722 error ("jump to `%s' invalidly jumps into binding contour",
723 IDENTIFIER_POINTER (DECL_NAME (body
)));
725 /* Label not yet defined: may need to put this goto
726 on the fixup list. */
727 else if (! expand_fixup (body
, label
, last_insn
))
729 /* No fixup needed. Record that the label is the target
730 of at least one goto that has no fixup. */
732 TREE_ADDRESSABLE (body
) = 1;
738 /* Generate if necessary a fixup for a goto
739 whose target label in tree structure (if any) is TREE_LABEL
740 and whose target in rtl is RTL_LABEL.
742 If LAST_INSN is nonzero, we pretend that the jump appears
743 after insn LAST_INSN instead of at the current point in the insn stream.
745 The fixup will be used later to insert insns just before the goto.
746 Those insns will restore the stack level as appropriate for the
747 target label, and will (in the case of C++) also invoke any object
748 destructors which have to be invoked when we exit the scopes which
749 are exited by the goto.
751 Value is nonzero if a fixup is made. */
754 expand_fixup (tree_label
, rtl_label
, last_insn
)
759 struct nesting
*block
, *end_block
;
761 /* See if we can recognize which block the label will be output in.
762 This is possible in some very common cases.
763 If we succeed, set END_BLOCK to that block.
764 Otherwise, set it to 0. */
767 && (rtl_label
== cond_stack
->data
.cond
.endif_label
768 || rtl_label
== cond_stack
->data
.cond
.next_label
))
769 end_block
= cond_stack
;
770 /* If we are in a loop, recognize certain labels which
771 are likely targets. This reduces the number of fixups
772 we need to create. */
774 && (rtl_label
== loop_stack
->data
.loop
.start_label
775 || rtl_label
== loop_stack
->data
.loop
.end_label
776 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
777 end_block
= loop_stack
;
781 /* Now set END_BLOCK to the binding level to which we will return. */
785 struct nesting
*next_block
= end_block
->all
;
788 /* First see if the END_BLOCK is inside the innermost binding level.
789 If so, then no cleanups or stack levels are relevant. */
790 while (next_block
&& next_block
!= block
)
791 next_block
= next_block
->all
;
796 /* Otherwise, set END_BLOCK to the innermost binding level
797 which is outside the relevant control-structure nesting. */
798 next_block
= block_stack
->next
;
799 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
800 if (block
== next_block
)
801 next_block
= next_block
->next
;
802 end_block
= next_block
;
805 /* Does any containing block have a stack level or cleanups?
806 If not, no fixup is needed, and that is the normal case
807 (the only case, for standard C). */
808 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
809 if (block
->data
.block
.stack_level
!= 0
810 || block
->data
.block
.cleanups
!= 0)
813 if (block
!= end_block
)
815 /* Ok, a fixup is needed. Add a fixup to the list of such. */
816 struct goto_fixup
*fixup
817 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
818 /* In case an old stack level is restored, make sure that comes
819 after any pending stack adjust. */
820 /* ?? If the fixup isn't to come at the present position,
821 doing the stack adjust here isn't useful. Doing it with our
822 settings at that location isn't useful either. Let's hope
825 do_pending_stack_adjust ();
826 fixup
->target
= tree_label
;
827 fixup
->target_rtl
= rtl_label
;
829 /* Create a BLOCK node and a corresponding matched set of
830 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
831 this point. The notes will encapsulate any and all fixup
832 code which we might later insert at this point in the insn
833 stream. Also, the BLOCK node will be the parent (i.e. the
834 `SUPERBLOCK') of any other BLOCK nodes which we might create
835 later on when we are expanding the fixup code. */
838 register rtx original_before_jump
839 = last_insn
? last_insn
: get_last_insn ();
843 fixup
->before_jump
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
844 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
845 fixup
->context
= poplevel (1, 0, 0); /* Create the BLOCK node now! */
847 emit_insns_after (fixup
->before_jump
, original_before_jump
);
850 fixup
->block_start_count
= block_start_count
;
851 fixup
->stack_level
= 0;
852 fixup
->cleanup_list_list
853 = (((block
->data
.block
.outer_cleanups
855 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
858 || block
->data
.block
.cleanups
)
859 ? tree_cons (NULL_TREE
, block
->data
.block
.cleanups
,
860 block
->data
.block
.outer_cleanups
)
862 fixup
->next
= goto_fixup_chain
;
863 goto_fixup_chain
= fixup
;
869 /* When exiting a binding contour, process all pending gotos requiring fixups.
870 THISBLOCK is the structure that describes the block being exited.
871 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
872 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
873 FIRST_INSN is the insn that began this contour.
875 Gotos that jump out of this contour must restore the
876 stack level and do the cleanups before actually jumping.
878 DONT_JUMP_IN nonzero means report error there is a jump into this
879 contour from before the beginning of the contour.
880 This is also done if STACK_LEVEL is nonzero. */
883 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
884 struct nesting
*thisblock
;
890 register struct goto_fixup
*f
, *prev
;
892 /* F is the fixup we are considering; PREV is the previous one. */
893 /* We run this loop in two passes so that cleanups of exited blocks
894 are run first, and blocks that are exited are marked so
897 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
899 /* Test for a fixup that is inactive because it is already handled. */
900 if (f
->before_jump
== 0)
902 /* Delete inactive fixup from the chain, if that is easy to do. */
904 prev
->next
= f
->next
;
906 /* Has this fixup's target label been defined?
907 If so, we can finalize it. */
908 else if (PREV_INSN (f
->target_rtl
) != 0)
910 register rtx cleanup_insns
;
912 /* Get the first non-label after the label
913 this goto jumps to. If that's before this scope begins,
914 we don't have a jump into the scope. */
915 rtx after_label
= f
->target_rtl
;
916 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
917 after_label
= NEXT_INSN (after_label
);
919 /* If this fixup jumped into this contour from before the beginning
920 of this contour, report an error. */
921 /* ??? Bug: this does not detect jumping in through intermediate
922 blocks that have stack levels or cleanups.
923 It detects only a problem with the innermost block
926 && (dont_jump_in
|| stack_level
|| cleanup_list
)
927 /* If AFTER_LABEL is 0, it means the jump goes to the end
928 of the rtl, which means it jumps into this scope. */
930 || INSN_UID (first_insn
) < INSN_UID (after_label
))
931 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
932 && ! DECL_REGISTER (f
->target
))
934 error_with_decl (f
->target
,
935 "label `%s' used before containing binding contour");
936 /* Prevent multiple errors for one label. */
937 DECL_REGISTER (f
->target
) = 1;
940 /* We will expand the cleanups into a sequence of their own and
941 then later on we will attach this new sequence to the insn
942 stream just ahead of the actual jump insn. */
946 /* Temporarily restore the lexical context where we will
947 logically be inserting the fixup code. We do this for the
948 sake of getting the debugging information right. */
951 set_block (f
->context
);
953 /* Expand the cleanups for blocks this jump exits. */
954 if (f
->cleanup_list_list
)
957 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
958 /* Marked elements correspond to blocks that have been closed.
959 Do their cleanups. */
960 if (TREE_ADDRESSABLE (lists
)
961 && TREE_VALUE (lists
) != 0)
963 expand_cleanups (TREE_VALUE (lists
), 0);
964 /* Pop any pushes done in the cleanups,
965 in case function is about to return. */
966 do_pending_stack_adjust ();
970 /* Restore stack level for the biggest contour that this
971 jump jumps out of. */
973 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
975 /* Finish up the sequence containing the insns which implement the
976 necessary cleanups, and then attach that whole sequence to the
977 insn stream just ahead of the actual jump insn. Attaching it
978 at that point insures that any cleanups which are in fact
979 implicit C++ object destructions (which must be executed upon
980 leaving the block) appear (to the debugger) to be taking place
981 in an area of the generated code where the object(s) being
982 destructed are still "in scope". */
984 cleanup_insns
= get_insns ();
988 emit_insns_after (cleanup_insns
, f
->before_jump
);
995 /* Mark the cleanups of exited blocks so that they are executed
996 by the code above. */
997 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
998 if (f
->before_jump
!= 0
999 && PREV_INSN (f
->target_rtl
) == 0
1000 /* Label has still not appeared. If we are exiting a block with
1001 a stack level to restore, that started before the fixup,
1002 mark this stack level as needing restoration
1003 when the fixup is later finalized.
1004 Also mark the cleanup_list_list element for F
1005 that corresponds to this block, so that ultimately
1006 this block's cleanups will be executed by the code above. */
1008 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1009 it means the label is undefined. That's erroneous, but possible. */
1010 && (thisblock
->data
.block
.block_start_count
1011 <= f
->block_start_count
))
1013 tree lists
= f
->cleanup_list_list
;
1014 for (; lists
; lists
= TREE_CHAIN (lists
))
1015 /* If the following elt. corresponds to our containing block
1016 then the elt. must be for this block. */
1017 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
1018 TREE_ADDRESSABLE (lists
) = 1;
1021 f
->stack_level
= stack_level
;
1025 /* Generate RTL for an asm statement (explicit assembler code).
1026 BODY is a STRING_CST node containing the assembler code text,
1027 or an ADDR_EXPR containing a STRING_CST. */
1033 if (TREE_CODE (body
) == ADDR_EXPR
)
1034 body
= TREE_OPERAND (body
, 0);
1036 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
1037 TREE_STRING_POINTER (body
)));
1041 /* Generate RTL for an asm statement with arguments.
1042 STRING is the instruction template.
1043 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1044 Each output or input has an expression in the TREE_VALUE and
1045 a constraint-string in the TREE_PURPOSE.
1046 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1047 that is clobbered by this insn.
1049 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1050 Some elements of OUTPUTS may be replaced with trees representing temporary
1051 values. The caller should copy those temporary values to the originally
1054 VOL nonzero means the insn is volatile; don't optimize it. */
1057 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
1058 tree string
, outputs
, inputs
, clobbers
;
1063 rtvec argvec
, constraints
;
1065 int ninputs
= list_length (inputs
);
1066 int noutputs
= list_length (outputs
);
1070 /* Vector of RTX's of evaluated output operands. */
1071 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1072 /* The insn we have emitted. */
1075 /* Count the number of meaningful clobbered registers, ignoring what
1076 we would ignore later. */
1078 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1080 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1081 i
= decode_reg_name (regname
);
1082 if (i
>= 0 || i
== -4)
1088 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1090 tree val
= TREE_VALUE (tail
);
1095 /* If there's an erroneous arg, emit no insn. */
1096 if (TREE_TYPE (val
) == error_mark_node
)
1099 /* Make sure constraint has `=' and does not have `+'. */
1102 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1104 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1106 error ("output operand constraint contains `+'");
1109 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '=')
1114 error ("output operand constraint lacks `='");
1118 /* If an output operand is not a variable or indirect ref,
1120 create a SAVE_EXPR which is a pseudo-reg
1121 to act as an intermediate temporary.
1122 Make the asm insn write into that, then copy it to
1123 the real output operand. */
1125 while (TREE_CODE (val
) == COMPONENT_REF
1126 || TREE_CODE (val
) == ARRAY_REF
)
1127 val
= TREE_OPERAND (val
, 0);
1129 if (TREE_CODE (val
) != VAR_DECL
1130 && TREE_CODE (val
) != PARM_DECL
1131 && TREE_CODE (val
) != INDIRECT_REF
)
1132 TREE_VALUE (tail
) = save_expr (TREE_VALUE (tail
));
1134 output_rtx
[i
] = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1137 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1139 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1143 /* Make vectors for the expression-rtx and constraint strings. */
1145 argvec
= rtvec_alloc (ninputs
);
1146 constraints
= rtvec_alloc (ninputs
);
1148 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
1149 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
1151 MEM_VOLATILE_P (body
) = vol
;
1153 /* Eval the inputs and put them into ARGVEC.
1154 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1157 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1161 /* If there's an erroneous arg, emit no insn,
1162 because the ASM_INPUT would get VOIDmode
1163 and that could cause a crash in reload. */
1164 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1166 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1168 error ("hard register `%s' listed as input operand to `asm'",
1169 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1173 /* Make sure constraint has neither `=' nor `+'. */
1175 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1176 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
1177 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1179 error ("input operand constraint contains `%c'",
1180 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1184 XVECEXP (body
, 3, i
) /* argvec */
1185 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1186 XVECEXP (body
, 4, i
) /* constraints */
1187 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1188 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1192 /* Protect all the operands from the queue,
1193 now that they have all been evaluated. */
1195 for (i
= 0; i
< ninputs
; i
++)
1196 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1198 for (i
= 0; i
< noutputs
; i
++)
1199 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1201 /* Now, for each output, construct an rtx
1202 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1203 ARGVEC CONSTRAINTS))
1204 If there is more than one, put them inside a PARALLEL. */
1206 if (noutputs
== 1 && nclobbers
== 0)
1208 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1209 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
1211 else if (noutputs
== 0 && nclobbers
== 0)
1213 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1214 insn
= emit_insn (body
);
1220 if (num
== 0) num
= 1;
1221 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1223 /* For each output operand, store a SET. */
1225 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1227 XVECEXP (body
, 0, i
)
1228 = gen_rtx (SET
, VOIDmode
,
1230 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1231 TREE_STRING_POINTER (string
),
1232 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1233 i
, argvec
, constraints
,
1235 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1238 /* If there are no outputs (but there are some clobbers)
1239 store the bare ASM_OPERANDS into the PARALLEL. */
1242 XVECEXP (body
, 0, i
++) = obody
;
1244 /* Store (clobber REG) for each clobbered register specified. */
1246 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1248 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1249 int j
= decode_reg_name (regname
);
1253 if (j
== -3) /* `cc', which is not a register */
1256 if (j
== -4) /* `memory', don't cache memory across asm */
1258 XVECEXP (body
, 0, i
++) = gen_rtx (CLOBBER
, VOIDmode
, const0_rtx
);
1262 error ("unknown register name `%s' in `asm'", regname
);
1266 /* Use QImode since that's guaranteed to clobber just one reg. */
1267 XVECEXP (body
, 0, i
++)
1268 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1271 insn
= emit_insn (body
);
1277 /* Generate RTL to evaluate the expression EXP
1278 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1281 expand_expr_stmt (exp
)
1284 /* If -W, warn about statements with no side effects,
1285 except for an explicit cast to void (e.g. for assert()), and
1286 except inside a ({...}) where they may be useful. */
1287 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1289 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1290 && !(TREE_CODE (exp
) == CONVERT_EXPR
1291 && TREE_TYPE (exp
) == void_type_node
))
1292 warning_with_file_and_line (emit_filename
, emit_lineno
,
1293 "statement with no effect");
1294 else if (warn_unused
)
1295 warn_if_unused_value (exp
);
1297 last_expr_type
= TREE_TYPE (exp
);
1298 if (! flag_syntax_only
)
1299 last_expr_value
= expand_expr (exp
,
1300 (expr_stmts_for_value
1301 ? NULL_RTX
: const0_rtx
),
1304 /* If all we do is reference a volatile value in memory,
1305 copy it to a register to be sure it is actually touched. */
1306 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1307 && TREE_THIS_VOLATILE (exp
))
1309 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1310 copy_to_reg (last_expr_value
);
1313 rtx lab
= gen_label_rtx ();
1315 /* Compare the value with itself to reference it. */
1316 emit_cmp_insn (last_expr_value
, last_expr_value
, EQ
,
1317 expand_expr (TYPE_SIZE (last_expr_type
),
1318 NULL_RTX
, VOIDmode
, 0),
1320 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
);
1321 emit_jump_insn ((*bcc_gen_fctn
[(int) EQ
]) (lab
));
1326 /* If this expression is part of a ({...}) and is in memory, we may have
1327 to preserve temporaries. */
1328 preserve_temp_slots (last_expr_value
);
1330 /* Free any temporaries used to evaluate this expression. Any temporary
1331 used as a result of this expression will already have been preserved
1338 /* Warn if EXP contains any computations whose results are not used.
1339 Return 1 if a warning is printed; 0 otherwise. */
1342 warn_if_unused_value (exp
)
1345 if (TREE_USED (exp
))
1348 switch (TREE_CODE (exp
))
1350 case PREINCREMENT_EXPR
:
1351 case POSTINCREMENT_EXPR
:
1352 case PREDECREMENT_EXPR
:
1353 case POSTDECREMENT_EXPR
:
1358 case METHOD_CALL_EXPR
:
1361 case ANTI_WRAPPER_EXPR
:
1362 case WITH_CLEANUP_EXPR
:
1364 /* We don't warn about COND_EXPR because it may be a useful
1365 construct if either arm contains a side effect. */
1370 /* For a binding, warn if no side effect within it. */
1371 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1373 case TRUTH_ORIF_EXPR
:
1374 case TRUTH_ANDIF_EXPR
:
1375 /* In && or ||, warn if 2nd operand has no side effect. */
1376 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1379 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1381 /* Let people do `(foo (), 0)' without a warning. */
1382 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1384 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1388 case NON_LVALUE_EXPR
:
1389 /* Don't warn about values cast to void. */
1390 if (TREE_TYPE (exp
) == void_type_node
)
1392 /* Don't warn about conversions not explicit in the user's program. */
1393 if (TREE_NO_UNUSED_WARNING (exp
))
1395 /* Assignment to a cast usually results in a cast of a modify.
1396 Don't complain about that. */
1397 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MODIFY_EXPR
)
1399 /* Sometimes it results in a cast of a cast of a modify.
1400 Don't complain about that. */
1401 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == CONVERT_EXPR
1402 || TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
)
1403 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == MODIFY_EXPR
)
1407 /* Referencing a volatile value is a side effect, so don't warn. */
1408 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1409 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1410 && TREE_THIS_VOLATILE (exp
))
1412 warning_with_file_and_line (emit_filename
, emit_lineno
,
1413 "value computed is not used");
1418 /* Clear out the memory of the last expression evaluated. */
1426 /* Begin a statement which will return a value.
1427 Return the RTL_EXPR for this statement expr.
1428 The caller must save that value and pass it to expand_end_stmt_expr. */
1431 expand_start_stmt_expr ()
1433 /* Make the RTL_EXPR node temporary, not momentary,
1434 so that rtl_expr_chain doesn't become garbage. */
1435 int momentary
= suspend_momentary ();
1436 tree t
= make_node (RTL_EXPR
);
1437 resume_momentary (momentary
);
1440 expr_stmts_for_value
++;
1444 /* Restore the previous state at the end of a statement that returns a value.
1445 Returns a tree node representing the statement's value and the
1446 insns to compute the value.
1448 The nodes of that expression have been freed by now, so we cannot use them.
1449 But we don't want to do that anyway; the expression has already been
1450 evaluated and now we just want to use the value. So generate a RTL_EXPR
1451 with the proper type and RTL value.
1453 If the last substatement was not an expression,
1454 return something with type `void'. */
1457 expand_end_stmt_expr (t
)
1462 if (last_expr_type
== 0)
1464 last_expr_type
= void_type_node
;
1465 last_expr_value
= const0_rtx
;
1467 else if (last_expr_value
== 0)
1468 /* There are some cases where this can happen, such as when the
1469 statement is void type. */
1470 last_expr_value
= const0_rtx
;
1471 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1472 /* Remove any possible QUEUED. */
1473 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1477 TREE_TYPE (t
) = last_expr_type
;
1478 RTL_EXPR_RTL (t
) = last_expr_value
;
1479 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1481 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1485 /* Don't consider deleting this expr or containing exprs at tree level. */
1486 TREE_SIDE_EFFECTS (t
) = 1;
1487 /* Propagate volatility of the actual RTL expr. */
1488 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1491 expr_stmts_for_value
--;
1496 /* The exception handling nesting looks like this:
1499 { <-- exception handler block
1501 <-- in an exception handler
1503 : <-- in a TRY block
1504 : <-- in an exception handler
1509 : <-- in an except block
1510 : <-- in an exception handler
1517 /* Return nonzero iff in a try block at level LEVEL. */
1520 in_try_block (level
)
1523 struct nesting
*n
= except_stack
;
1526 while (n
&& n
->data
.except_stmt
.after_label
!= 0)
1537 /* Return nonzero iff in an except block at level LEVEL. */
1540 in_except_block (level
)
1543 struct nesting
*n
= except_stack
;
1546 while (n
&& n
->data
.except_stmt
.after_label
== 0)
1557 /* Return nonzero iff in an exception handler at level LEVEL. */
1560 in_exception_handler (level
)
1563 struct nesting
*n
= except_stack
;
1564 while (n
&& level
--)
1569 /* Record the fact that the current exception nesting raises
1570 exception EX. If not in an exception handler, return 0. */
1577 if (except_stack
== 0)
1579 raises_ptr
= &except_stack
->data
.except_stmt
.raised
;
1580 if (! value_member (ex
, *raises_ptr
))
1581 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1585 /* Generate RTL for the start of a try block.
1587 TRY_CLAUSE is the condition to test to enter the try block. */
1590 expand_start_try (try_clause
, exitflag
, escapeflag
)
1595 struct nesting
*thishandler
= ALLOC_NESTING ();
1597 /* Make an entry on cond_stack for the cond we are entering. */
1599 thishandler
->next
= except_stack
;
1600 thishandler
->all
= nesting_stack
;
1601 thishandler
->depth
= ++nesting_depth
;
1602 thishandler
->data
.except_stmt
.raised
= 0;
1603 thishandler
->data
.except_stmt
.handled
= 0;
1604 thishandler
->data
.except_stmt
.first_insn
= get_insns ();
1605 thishandler
->data
.except_stmt
.except_label
= gen_label_rtx ();
1606 thishandler
->data
.except_stmt
.unhandled_label
= 0;
1607 thishandler
->data
.except_stmt
.after_label
= 0;
1608 thishandler
->data
.except_stmt
.escape_label
1609 = escapeflag
? thishandler
->data
.except_stmt
.except_label
: 0;
1610 thishandler
->exit_label
= exitflag
? gen_label_rtx () : 0;
1611 except_stack
= thishandler
;
1612 nesting_stack
= thishandler
;
1614 do_jump (try_clause
, thishandler
->data
.except_stmt
.except_label
, NULL_RTX
);
1617 /* End of a TRY block. Nothing to do for now. */
1622 except_stack
->data
.except_stmt
.after_label
= gen_label_rtx ();
1623 expand_goto_internal (NULL_TREE
, except_stack
->data
.except_stmt
.after_label
,
1627 /* Start an `except' nesting contour.
1628 EXITFLAG says whether this contour should be able to `exit' something.
1629 ESCAPEFLAG says whether this contour should be escapable. */
1632 expand_start_except (exitflag
, escapeflag
)
1639 /* An `exit' from catch clauses goes out to next exit level,
1640 if there is one. Otherwise, it just goes to the end
1641 of the construct. */
1642 for (n
= except_stack
->next
; n
; n
= n
->next
)
1643 if (n
->exit_label
!= 0)
1645 except_stack
->exit_label
= n
->exit_label
;
1649 except_stack
->exit_label
= except_stack
->data
.except_stmt
.after_label
;
1654 /* An `escape' from catch clauses goes out to next escape level,
1655 if there is one. Otherwise, it just goes to the end
1656 of the construct. */
1657 for (n
= except_stack
->next
; n
; n
= n
->next
)
1658 if (n
->data
.except_stmt
.escape_label
!= 0)
1660 except_stack
->data
.except_stmt
.escape_label
1661 = n
->data
.except_stmt
.escape_label
;
1665 except_stack
->data
.except_stmt
.escape_label
1666 = except_stack
->data
.except_stmt
.after_label
;
1668 do_pending_stack_adjust ();
1669 emit_label (except_stack
->data
.except_stmt
.except_label
);
1672 /* Generate code to `escape' from an exception contour. This
1673 is like `exiting', but does not conflict with constructs which
1676 Return nonzero if this contour is escapable, otherwise
1677 return zero, and language-specific code will emit the
1678 appropriate error message. */
1680 expand_escape_except ()
1684 for (n
= except_stack
; n
; n
= n
->next
)
1685 if (n
->data
.except_stmt
.escape_label
!= 0)
1687 expand_goto_internal (NULL_TREE
,
1688 n
->data
.except_stmt
.escape_label
, NULL_RTX
);
1695 /* Finish processing and `except' contour.
1696 Culls out all exceptions which might be raise but not
1697 handled, and returns the list to the caller.
1698 Language-specific code is responsible for dealing with these
1702 expand_end_except ()
1705 tree raised
= NULL_TREE
;
1707 do_pending_stack_adjust ();
1708 emit_label (except_stack
->data
.except_stmt
.after_label
);
1710 n
= except_stack
->next
;
1713 /* Propagate exceptions raised but not handled to next
1715 tree handled
= except_stack
->data
.except_stmt
.raised
;
1716 if (handled
!= void_type_node
)
1718 tree prev
= NULL_TREE
;
1719 raised
= except_stack
->data
.except_stmt
.raised
;
1723 for (this_raise
= raised
, prev
= 0; this_raise
;
1724 this_raise
= TREE_CHAIN (this_raise
))
1726 if (value_member (TREE_VALUE (this_raise
), handled
))
1729 TREE_CHAIN (prev
) = TREE_CHAIN (this_raise
);
1732 raised
= TREE_CHAIN (raised
);
1733 if (raised
== NULL_TREE
)
1740 handled
= TREE_CHAIN (handled
);
1742 if (prev
== NULL_TREE
)
1745 TREE_CHAIN (prev
) = n
->data
.except_stmt
.raised
;
1747 n
->data
.except_stmt
.raised
= raised
;
1751 POPSTACK (except_stack
);
1756 /* Record that exception EX is caught by this exception handler.
1757 Return nonzero if in exception handling construct, otherwise return 0. */
1764 if (except_stack
== 0)
1766 raises_ptr
= &except_stack
->data
.except_stmt
.handled
;
1767 if (*raises_ptr
!= void_type_node
1769 && ! value_member (ex
, *raises_ptr
))
1770 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1774 /* Record that this exception handler catches all exceptions.
1775 Return nonzero if in exception handling construct, otherwise return 0. */
1778 expand_catch_default ()
1780 if (except_stack
== 0)
1782 except_stack
->data
.except_stmt
.handled
= void_type_node
;
1789 if (except_stack
== 0 || except_stack
->data
.except_stmt
.after_label
== 0)
1791 expand_goto_internal (NULL_TREE
, except_stack
->data
.except_stmt
.after_label
,
1796 /* Generate RTL for the start of an if-then. COND is the expression
1797 whose truth should be tested.
1799 If EXITFLAG is nonzero, this conditional is visible to
1800 `exit_something'. */
1803 expand_start_cond (cond
, exitflag
)
1807 struct nesting
*thiscond
= ALLOC_NESTING ();
1809 /* Make an entry on cond_stack for the cond we are entering. */
1811 thiscond
->next
= cond_stack
;
1812 thiscond
->all
= nesting_stack
;
1813 thiscond
->depth
= ++nesting_depth
;
1814 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1815 /* Before we encounter an `else', we don't need a separate exit label
1816 unless there are supposed to be exit statements
1817 to exit this conditional. */
1818 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1819 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1820 cond_stack
= thiscond
;
1821 nesting_stack
= thiscond
;
1823 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL_RTX
);
1826 /* Generate RTL between then-clause and the elseif-clause
1827 of an if-then-elseif-.... */
1830 expand_start_elseif (cond
)
1833 if (cond_stack
->data
.cond
.endif_label
== 0)
1834 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1835 emit_jump (cond_stack
->data
.cond
.endif_label
);
1836 emit_label (cond_stack
->data
.cond
.next_label
);
1837 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1838 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
1841 /* Generate RTL between the then-clause and the else-clause
1842 of an if-then-else. */
1845 expand_start_else ()
1847 if (cond_stack
->data
.cond
.endif_label
== 0)
1848 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1849 emit_jump (cond_stack
->data
.cond
.endif_label
);
1850 emit_label (cond_stack
->data
.cond
.next_label
);
1851 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
1854 /* Generate RTL for the end of an if-then.
1855 Pop the record for it off of cond_stack. */
1860 struct nesting
*thiscond
= cond_stack
;
1862 do_pending_stack_adjust ();
1863 if (thiscond
->data
.cond
.next_label
)
1864 emit_label (thiscond
->data
.cond
.next_label
);
1865 if (thiscond
->data
.cond
.endif_label
)
1866 emit_label (thiscond
->data
.cond
.endif_label
);
1868 POPSTACK (cond_stack
);
1872 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1873 loop should be exited by `exit_something'. This is a loop for which
1874 `expand_continue' will jump to the top of the loop.
1876 Make an entry on loop_stack to record the labels associated with
1880 expand_start_loop (exit_flag
)
1883 register struct nesting
*thisloop
= ALLOC_NESTING ();
1885 /* Make an entry on loop_stack for the loop we are entering. */
1887 thisloop
->next
= loop_stack
;
1888 thisloop
->all
= nesting_stack
;
1889 thisloop
->depth
= ++nesting_depth
;
1890 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1891 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1892 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1893 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1894 loop_stack
= thisloop
;
1895 nesting_stack
= thisloop
;
1897 do_pending_stack_adjust ();
1899 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
1900 emit_label (thisloop
->data
.loop
.start_label
);
1905 /* Like expand_start_loop but for a loop where the continuation point
1906 (for expand_continue_loop) will be specified explicitly. */
1909 expand_start_loop_continue_elsewhere (exit_flag
)
1912 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
1913 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1917 /* Specify the continuation point for a loop started with
1918 expand_start_loop_continue_elsewhere.
1919 Use this at the point in the code to which a continue statement
1923 expand_loop_continue_here ()
1925 do_pending_stack_adjust ();
1926 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
1927 emit_label (loop_stack
->data
.loop
.continue_label
);
1930 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1931 Pop the block off of loop_stack. */
1936 register rtx insn
= get_last_insn ();
1937 register rtx start_label
= loop_stack
->data
.loop
.start_label
;
1938 rtx last_test_insn
= 0;
1941 /* Mark the continue-point at the top of the loop if none elsewhere. */
1942 if (start_label
== loop_stack
->data
.loop
.continue_label
)
1943 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
1945 do_pending_stack_adjust ();
1947 /* If optimizing, perhaps reorder the loop. If the loop
1948 starts with a conditional exit, roll that to the end
1949 where it will optimize together with the jump back.
1951 We look for the last conditional branch to the exit that we encounter
1952 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1953 branch to the exit first, use it.
1955 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1956 because moving them is not valid. */
1960 ! (GET_CODE (insn
) == JUMP_INSN
1961 && GET_CODE (PATTERN (insn
)) == SET
1962 && SET_DEST (PATTERN (insn
)) == pc_rtx
1963 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1965 /* Scan insns from the top of the loop looking for a qualified
1966 conditional exit. */
1967 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
1968 insn
= NEXT_INSN (insn
))
1970 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
1973 if (GET_CODE (insn
) == NOTE
1974 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1975 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1978 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
1981 if (last_test_insn
&& num_insns
> 30)
1984 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1985 && SET_DEST (PATTERN (insn
)) == pc_rtx
1986 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1987 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1988 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1989 == loop_stack
->data
.loop
.end_label
))
1990 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1991 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1992 == loop_stack
->data
.loop
.end_label
))))
1993 last_test_insn
= insn
;
1995 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
1996 && GET_CODE (PATTERN (insn
)) == SET
1997 && SET_DEST (PATTERN (insn
)) == pc_rtx
1998 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
1999 && (XEXP (SET_SRC (PATTERN (insn
)), 0)
2000 == loop_stack
->data
.loop
.end_label
))
2001 /* Include BARRIER. */
2002 last_test_insn
= NEXT_INSN (insn
);
2005 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
2007 /* We found one. Move everything from there up
2008 to the end of the loop, and add a jump into the loop
2009 to jump to there. */
2010 register rtx newstart_label
= gen_label_rtx ();
2011 register rtx start_move
= start_label
;
2013 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2014 then we want to move this note also. */
2015 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
2016 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
2017 == NOTE_INSN_LOOP_CONT
))
2018 start_move
= PREV_INSN (start_move
);
2020 emit_label_after (newstart_label
, PREV_INSN (start_move
));
2021 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
2022 emit_jump_insn_after (gen_jump (start_label
),
2023 PREV_INSN (newstart_label
));
2024 emit_barrier_after (PREV_INSN (newstart_label
));
2025 start_label
= newstart_label
;
2029 emit_jump (start_label
);
2030 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
2031 emit_label (loop_stack
->data
.loop
.end_label
);
2033 POPSTACK (loop_stack
);
2038 /* Generate a jump to the current loop's continue-point.
2039 This is usually the top of the loop, but may be specified
2040 explicitly elsewhere. If not currently inside a loop,
2041 return 0 and do nothing; caller will print an error message. */
2044 expand_continue_loop (whichloop
)
2045 struct nesting
*whichloop
;
2049 whichloop
= loop_stack
;
2052 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.continue_label
,
2057 /* Generate a jump to exit the current loop. If not currently inside a loop,
2058 return 0 and do nothing; caller will print an error message. */
2061 expand_exit_loop (whichloop
)
2062 struct nesting
*whichloop
;
2066 whichloop
= loop_stack
;
2069 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2073 /* Generate a conditional jump to exit the current loop if COND
2074 evaluates to zero. If not currently inside a loop,
2075 return 0 and do nothing; caller will print an error message. */
2078 expand_exit_loop_if_false (whichloop
, cond
)
2079 struct nesting
*whichloop
;
2084 whichloop
= loop_stack
;
2087 do_jump (cond
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2091 /* Return non-zero if we should preserve sub-expressions as separate
2092 pseudos. We never do so if we aren't optimizing. We always do so
2093 if -fexpensive-optimizations.
2095 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2096 the loop may still be a small one. */
2099 preserve_subexpressions_p ()
2103 if (flag_expensive_optimizations
)
2106 if (optimize
== 0 || loop_stack
== 0)
2109 insn
= get_last_insn_anywhere ();
2112 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2113 < n_non_fixed_regs
* 3));
2117 /* Generate a jump to exit the current loop, conditional, binding contour
2118 or case statement. Not all such constructs are visible to this function,
2119 only those started with EXIT_FLAG nonzero. Individual languages use
2120 the EXIT_FLAG parameter to control which kinds of constructs you can
2123 If not currently inside anything that can be exited,
2124 return 0 and do nothing; caller will print an error message. */
2127 expand_exit_something ()
2131 for (n
= nesting_stack
; n
; n
= n
->all
)
2132 if (n
->exit_label
!= 0)
2134 expand_goto_internal (NULL_TREE
, n
->exit_label
, NULL_RTX
);
2141 /* Generate RTL to return from the current function, with no value.
2142 (That is, we do not do anything about returning any value.) */
2145 expand_null_return ()
2147 struct nesting
*block
= block_stack
;
2150 /* Does any pending block have cleanups? */
2152 while (block
&& block
->data
.block
.cleanups
== 0)
2153 block
= block
->next
;
2155 /* If yes, use a goto to return, since that runs cleanups. */
2157 expand_null_return_1 (last_insn
, block
!= 0);
2160 /* Generate RTL to return from the current function, with value VAL. */
2163 expand_value_return (val
)
2166 struct nesting
*block
= block_stack
;
2167 rtx last_insn
= get_last_insn ();
2168 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2170 /* Copy the value to the return location
2171 unless it's already there. */
2173 if (return_reg
!= val
)
2174 emit_move_insn (return_reg
, val
);
2175 if (GET_CODE (return_reg
) == REG
2176 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2177 emit_insn (gen_rtx (USE
, VOIDmode
, return_reg
));
2179 /* Does any pending block have cleanups? */
2181 while (block
&& block
->data
.block
.cleanups
== 0)
2182 block
= block
->next
;
2184 /* If yes, use a goto to return, since that runs cleanups.
2185 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2187 expand_null_return_1 (last_insn
, block
!= 0);
2190 /* Output a return with no value. If LAST_INSN is nonzero,
2191 pretend that the return takes place after LAST_INSN.
2192 If USE_GOTO is nonzero then don't use a return instruction;
2193 go to the return label instead. This causes any cleanups
2194 of pending blocks to be executed normally. */
2197 expand_null_return_1 (last_insn
, use_goto
)
2201 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2203 clear_pending_stack_adjust ();
2204 do_pending_stack_adjust ();
2207 /* PCC-struct return always uses an epilogue. */
2208 if (current_function_returns_pcc_struct
|| use_goto
)
2211 end_label
= return_label
= gen_label_rtx ();
2212 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2216 /* Otherwise output a simple return-insn if one is available,
2217 unless it won't do the job. */
2219 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2221 emit_jump_insn (gen_return ());
2227 /* Otherwise jump to the epilogue. */
2228 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2231 /* Generate RTL to evaluate the expression RETVAL and return it
2232 from the current function. */
2235 expand_return (retval
)
2238 /* If there are any cleanups to be performed, then they will
2239 be inserted following LAST_INSN. It is desirable
2240 that the last_insn, for such purposes, should be the
2241 last insn before computing the return value. Otherwise, cleanups
2242 which call functions can clobber the return value. */
2243 /* ??? rms: I think that is erroneous, because in C++ it would
2244 run destructors on variables that might be used in the subsequent
2245 computation of the return value. */
2247 register rtx val
= 0;
2251 struct nesting
*block
;
2253 /* If function wants no value, give it none. */
2254 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2256 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2258 expand_null_return ();
2262 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2263 cleanups
= any_pending_cleanups (1);
2265 if (TREE_CODE (retval
) == RESULT_DECL
)
2266 retval_rhs
= retval
;
2267 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2268 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2269 retval_rhs
= TREE_OPERAND (retval
, 1);
2270 else if (TREE_TYPE (retval
) == void_type_node
)
2271 /* Recognize tail-recursive call to void function. */
2272 retval_rhs
= retval
;
2274 retval_rhs
= NULL_TREE
;
2276 /* Only use `last_insn' if there are cleanups which must be run. */
2277 if (cleanups
|| cleanup_label
!= 0)
2278 last_insn
= get_last_insn ();
2280 /* Distribute return down conditional expr if either of the sides
2281 may involve tail recursion (see test below). This enhances the number
2282 of tail recursions we see. Don't do this always since it can produce
2283 sub-optimal code in some cases and we distribute assignments into
2284 conditional expressions when it would help. */
2286 if (optimize
&& retval_rhs
!= 0
2287 && frame_offset
== 0
2288 && TREE_CODE (retval_rhs
) == COND_EXPR
2289 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2290 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2292 rtx label
= gen_label_rtx ();
2293 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, NULL_RTX
);
2294 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2295 DECL_RESULT (current_function_decl
),
2296 TREE_OPERAND (retval_rhs
, 1)));
2298 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2299 DECL_RESULT (current_function_decl
),
2300 TREE_OPERAND (retval_rhs
, 2)));
2304 /* For tail-recursive call to current function,
2305 just jump back to the beginning.
2306 It's unsafe if any auto variable in this function
2307 has its address taken; for simplicity,
2308 require stack frame to be empty. */
2309 if (optimize
&& retval_rhs
!= 0
2310 && frame_offset
== 0
2311 && TREE_CODE (retval_rhs
) == CALL_EXPR
2312 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2313 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2314 /* Finish checking validity, and if valid emit code
2315 to set the argument variables for the new call. */
2316 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2317 DECL_ARGUMENTS (current_function_decl
)))
2319 if (tail_recursion_label
== 0)
2321 tail_recursion_label
= gen_label_rtx ();
2322 emit_label_after (tail_recursion_label
,
2323 tail_recursion_reentry
);
2326 expand_goto_internal (NULL_TREE
, tail_recursion_label
, last_insn
);
2331 /* This optimization is safe if there are local cleanups
2332 because expand_null_return takes care of them.
2333 ??? I think it should also be safe when there is a cleanup label,
2334 because expand_null_return takes care of them, too.
2335 Any reason why not? */
2336 if (HAVE_return
&& cleanup_label
== 0
2337 && ! current_function_returns_pcc_struct
)
2339 /* If this is return x == y; then generate
2340 if (x == y) return 1; else return 0;
2341 if we can do it with explicit return insns. */
2343 switch (TREE_CODE (retval_rhs
))
2351 case TRUTH_ANDIF_EXPR
:
2352 case TRUTH_ORIF_EXPR
:
2353 case TRUTH_AND_EXPR
:
2355 case TRUTH_NOT_EXPR
:
2356 op0
= gen_label_rtx ();
2357 jumpifnot (retval_rhs
, op0
);
2358 expand_value_return (const1_rtx
);
2360 expand_value_return (const0_rtx
);
2364 #endif /* HAVE_return */
2368 && TREE_TYPE (retval_rhs
) != void_type_node
2369 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2371 /* Calculate the return value into a pseudo reg. */
2372 val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2374 /* All temporaries have now been used. */
2376 /* Return the calculated value, doing cleanups first. */
2377 expand_value_return (val
);
2381 /* No cleanups or no hard reg used;
2382 calculate value into hard return reg. */
2383 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2386 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2390 /* Return 1 if the end of the generated RTX is not a barrier.
2391 This means code already compiled can drop through. */
2394 drop_through_at_end_p ()
2396 rtx insn
= get_last_insn ();
2397 while (insn
&& GET_CODE (insn
) == NOTE
)
2398 insn
= PREV_INSN (insn
);
2399 return insn
&& GET_CODE (insn
) != BARRIER
;
2402 /* Emit code to alter this function's formal parms for a tail-recursive call.
2403 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2404 FORMALS is the chain of decls of formals.
2405 Return 1 if this can be done;
2406 otherwise return 0 and do not emit any code. */
2409 tail_recursion_args (actuals
, formals
)
2410 tree actuals
, formals
;
2412 register tree a
= actuals
, f
= formals
;
2414 register rtx
*argvec
;
2416 /* Check that number and types of actuals are compatible
2417 with the formals. This is not always true in valid C code.
2418 Also check that no formal needs to be addressable
2419 and that all formals are scalars. */
2421 /* Also count the args. */
2423 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2425 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
2427 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2430 if (a
!= 0 || f
!= 0)
2433 /* Compute all the actuals. */
2435 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2437 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2438 argvec
[i
] = expand_expr (TREE_VALUE (a
), NULL_RTX
, VOIDmode
, 0);
2440 /* Find which actual values refer to current values of previous formals.
2441 Copy each of them now, before any formal is changed. */
2443 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2447 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2448 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2449 { copy
= 1; break; }
2451 argvec
[i
] = copy_to_reg (argvec
[i
]);
2454 /* Store the values of the actuals into the formals. */
2456 for (f
= formals
, a
= actuals
, i
= 0; f
;
2457 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2459 if (DECL_MODE (f
) == GET_MODE (argvec
[i
]))
2460 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2462 convert_move (DECL_RTL (f
), argvec
[i
],
2463 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2470 /* Generate the RTL code for entering a binding contour.
2471 The variables are declared one by one, by calls to `expand_decl'.
2473 EXIT_FLAG is nonzero if this construct should be visible to
2474 `exit_something'. */
2477 expand_start_bindings (exit_flag
)
2480 struct nesting
*thisblock
= ALLOC_NESTING ();
2482 rtx note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
2484 /* Make an entry on block_stack for the block we are entering. */
2486 thisblock
->next
= block_stack
;
2487 thisblock
->all
= nesting_stack
;
2488 thisblock
->depth
= ++nesting_depth
;
2489 thisblock
->data
.block
.stack_level
= 0;
2490 thisblock
->data
.block
.cleanups
= 0;
2491 thisblock
->data
.block
.function_call_count
= 0;
2495 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2496 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2497 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2498 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2500 thisblock
->data
.block
.outer_cleanups
2501 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2502 block_stack
->data
.block
.outer_cleanups
);
2505 thisblock
->data
.block
.outer_cleanups
= 0;
2509 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2510 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2511 thisblock
->data
.block
.outer_cleanups
2512 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2513 block_stack
->data
.block
.outer_cleanups
);
2515 thisblock
->data
.block
.outer_cleanups
= 0;
2517 thisblock
->data
.block
.label_chain
= 0;
2518 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2519 thisblock
->data
.block
.first_insn
= note
;
2520 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2521 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2522 block_stack
= thisblock
;
2523 nesting_stack
= thisblock
;
2525 /* Make a new level for allocating stack slots. */
2529 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2530 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2534 remember_end_note (block
)
2535 register tree block
;
2537 BLOCK_END_NOTE (block
) = last_block_end_note
;
2538 last_block_end_note
= NULL_RTX
;
2541 /* Generate RTL code to terminate a binding contour.
2542 VARS is the chain of VAR_DECL nodes
2543 for the variables bound in this contour.
2544 MARK_ENDS is nonzero if we should put a note at the beginning
2545 and end of this binding contour.
2547 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2548 (That is true automatically if the contour has a saved stack level.) */
2551 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2556 register struct nesting
*thisblock
= block_stack
;
2560 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2561 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
2562 && ! DECL_IN_SYSTEM_HEADER (decl
))
2563 warning_with_decl (decl
, "unused variable `%s'");
2565 if (thisblock
->exit_label
)
2567 do_pending_stack_adjust ();
2568 emit_label (thisblock
->exit_label
);
2571 /* If necessary, make a handler for nonlocal gotos taking
2572 place in the function calls in this block. */
2573 if (function_call_count
!= thisblock
->data
.block
.function_call_count
2575 /* Make handler for outermost block
2576 if there were any nonlocal gotos to this function. */
2577 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
2578 /* Make handler for inner block if it has something
2579 special to do when you jump out of it. */
2580 : (thisblock
->data
.block
.cleanups
!= 0
2581 || thisblock
->data
.block
.stack_level
!= 0)))
2584 rtx afterward
= gen_label_rtx ();
2585 rtx handler_label
= gen_label_rtx ();
2586 rtx save_receiver
= gen_reg_rtx (Pmode
);
2588 /* Don't let jump_optimize delete the handler. */
2589 LABEL_PRESERVE_P (handler_label
) = 1;
2591 /* Record the handler address in the stack slot for that purpose,
2592 during this block, saving and restoring the outer value. */
2593 if (thisblock
->next
!= 0)
2595 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
2596 emit_insn_before (gen_move_insn (save_receiver
,
2597 nonlocal_goto_handler_slot
),
2598 thisblock
->data
.block
.first_insn
);
2600 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot
,
2601 gen_rtx (LABEL_REF
, Pmode
,
2603 thisblock
->data
.block
.first_insn
);
2605 /* Jump around the handler; it runs only when specially invoked. */
2606 emit_jump (afterward
);
2607 emit_label (handler_label
);
2609 #ifdef HAVE_nonlocal_goto
2610 if (! HAVE_nonlocal_goto
)
2612 /* First adjust our frame pointer to its actual value. It was
2613 previously set to the start of the virtual area corresponding to
2614 the stacked variables when we branched here and now needs to be
2615 adjusted to the actual hardware fp value.
2617 Assignments are to virtual registers are converted by
2618 instantiate_virtual_regs into the corresponding assignment
2619 to the underlying register (fp in this case) that makes
2620 the original assignment true.
2621 So the following insn will actually be
2622 decrementing fp by STARTING_FRAME_OFFSET. */
2623 emit_move_insn (virtual_stack_vars_rtx
, frame_pointer_rtx
);
2625 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2626 if (fixed_regs
[ARG_POINTER_REGNUM
])
2628 #ifdef ELIMINABLE_REGS
2629 /* If the argument pointer can be eliminated in favor of the
2630 frame pointer, we don't need to restore it. We assume here
2631 that if such an elimination is present, it can always be used.
2632 This is the case on all known machines; if we don't make this
2633 assumption, we do unnecessary saving on many machines. */
2634 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
2637 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
2638 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
2639 && elim_regs
[i
].to
== FRAME_POINTER_REGNUM
)
2642 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
2645 /* Now restore our arg pointer from the address at which it
2646 was saved in our stack frame.
2647 If there hasn't be space allocated for it yet, make
2649 if (arg_pointer_save_area
== 0)
2650 arg_pointer_save_area
2651 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
2652 emit_move_insn (virtual_incoming_args_rtx
,
2653 /* We need a pseudo here, or else
2654 instantiate_virtual_regs_1 complains. */
2655 copy_to_reg (arg_pointer_save_area
));
2660 /* The handler expects the desired label address in the static chain
2661 register. It tests the address and does an appropriate jump
2662 to whatever label is desired. */
2663 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
2664 /* Skip any labels we shouldn't be able to jump to from here. */
2665 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
2667 rtx not_this
= gen_label_rtx ();
2668 rtx
this = gen_label_rtx ();
2669 do_jump_if_equal (static_chain_rtx
,
2670 gen_rtx (LABEL_REF
, Pmode
, DECL_RTL (TREE_VALUE (link
))),
2672 emit_jump (not_this
);
2674 expand_goto (TREE_VALUE (link
));
2675 emit_label (not_this
);
2677 /* If label is not recognized, abort. */
2678 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "abort"), 0,
2680 emit_label (afterward
);
2683 /* Don't allow jumping into a block that has cleanups or a stack level. */
2685 || thisblock
->data
.block
.stack_level
!= 0
2686 || thisblock
->data
.block
.cleanups
!= 0)
2688 struct label_chain
*chain
;
2690 /* Any labels in this block are no longer valid to go to.
2691 Mark them to cause an error message. */
2692 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2694 DECL_TOO_LATE (chain
->label
) = 1;
2695 /* If any goto without a fixup came to this label,
2696 that must be an error, because gotos without fixups
2697 come from outside all saved stack-levels and all cleanups. */
2698 if (TREE_ADDRESSABLE (chain
->label
))
2699 error_with_decl (chain
->label
,
2700 "label `%s' used before containing binding contour");
2704 /* Restore stack level in effect before the block
2705 (only if variable-size objects allocated). */
2706 /* Perform any cleanups associated with the block. */
2708 if (thisblock
->data
.block
.stack_level
!= 0
2709 || thisblock
->data
.block
.cleanups
!= 0)
2711 /* Don't let cleanups affect ({...}) constructs. */
2712 int old_expr_stmts_for_value
= expr_stmts_for_value
;
2713 rtx old_last_expr_value
= last_expr_value
;
2714 tree old_last_expr_type
= last_expr_type
;
2715 expr_stmts_for_value
= 0;
2717 /* Do the cleanups. */
2718 expand_cleanups (thisblock
->data
.block
.cleanups
, NULL_TREE
);
2719 do_pending_stack_adjust ();
2721 expr_stmts_for_value
= old_expr_stmts_for_value
;
2722 last_expr_value
= old_last_expr_value
;
2723 last_expr_type
= old_last_expr_type
;
2725 /* Restore the stack level. */
2727 if (thisblock
->data
.block
.stack_level
!= 0)
2729 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2730 thisblock
->data
.block
.stack_level
, NULL_RTX
);
2731 if (nonlocal_goto_handler_slot
!= 0)
2732 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
,
2736 /* Any gotos out of this block must also do these things.
2737 Also report any gotos with fixups that came to labels in this
2739 fixup_gotos (thisblock
,
2740 thisblock
->data
.block
.stack_level
,
2741 thisblock
->data
.block
.cleanups
,
2742 thisblock
->data
.block
.first_insn
,
2746 /* Mark the beginning and end of the scope if requested.
2747 We do this now, after running cleanups on the variables
2748 just going out of scope, so they are in scope for their cleanups. */
2751 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
2753 /* Get rid of the beginning-mark if we don't make an end-mark. */
2754 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
2756 /* If doing stupid register allocation, make sure lives of all
2757 register variables declared here extend thru end of scope. */
2760 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2762 rtx rtl
= DECL_RTL (decl
);
2763 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
2767 /* Restore block_stack level for containing block. */
2769 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
2770 POPSTACK (block_stack
);
2772 /* Pop the stack slot nesting and free any slots at this level. */
2776 /* Generate RTL for the automatic variable declaration DECL.
2777 (Other kinds of declarations are simply ignored if seen here.)
2778 CLEANUP is an expression to be executed at exit from this binding contour;
2779 for example, in C++, it might call the destructor for this variable.
2781 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2782 either before or after calling `expand_decl' but before compiling
2783 any subsequent expressions. This is because CLEANUP may be expanded
2784 more than once, on different branches of execution.
2785 For the same reason, CLEANUP may not contain a CALL_EXPR
2786 except as its topmost node--else `preexpand_calls' would get confused.
2788 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2789 that is not associated with any particular variable.
2791 There is no special support here for C++ constructors.
2792 They should be handled by the proper code in DECL_INITIAL. */
2798 struct nesting
*thisblock
= block_stack
;
2799 tree type
= TREE_TYPE (decl
);
2801 /* Only automatic variables need any expansion done.
2802 Static and external variables, and external functions,
2803 will be handled by `assemble_variable' (called from finish_decl).
2804 TYPE_DECL and CONST_DECL require nothing.
2805 PARM_DECLs are handled in `assign_parms'. */
2807 if (TREE_CODE (decl
) != VAR_DECL
)
2809 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
2812 /* Create the RTL representation for the variable. */
2814 if (type
== error_mark_node
)
2815 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2816 else if (DECL_SIZE (decl
) == 0)
2817 /* Variable with incomplete type. */
2819 if (DECL_INITIAL (decl
) == 0)
2820 /* Error message was already done; now avoid a crash. */
2821 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
2823 /* An initializer is going to decide the size of this array.
2824 Until we know the size, represent its address with a reg. */
2825 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
2827 else if (DECL_MODE (decl
) != BLKmode
2828 /* If -ffloat-store, don't put explicit float vars
2830 && !(flag_float_store
2831 && TREE_CODE (type
) == REAL_TYPE
)
2832 && ! TREE_THIS_VOLATILE (decl
)
2833 && ! TREE_ADDRESSABLE (decl
)
2834 && (DECL_REGISTER (decl
) || ! obey_regdecls
))
2836 /* Automatic variable that can go in a register. */
2837 DECL_RTL (decl
) = gen_reg_rtx (DECL_MODE (decl
));
2838 if (TREE_CODE (type
) == POINTER_TYPE
)
2839 mark_reg_pointer (DECL_RTL (decl
));
2840 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
2842 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
)
2844 /* Variable of fixed size that goes on the stack. */
2848 /* If we previously made RTL for this decl, it must be an array
2849 whose size was determined by the initializer.
2850 The old address was a register; set that register now
2851 to the proper address. */
2852 if (DECL_RTL (decl
) != 0)
2854 if (GET_CODE (DECL_RTL (decl
)) != MEM
2855 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
2857 oldaddr
= XEXP (DECL_RTL (decl
), 0);
2861 = assign_stack_temp (DECL_MODE (decl
),
2862 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
2863 + BITS_PER_UNIT
- 1)
2867 /* Set alignment we actually gave this decl. */
2868 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
2869 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
2873 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
2874 if (addr
!= oldaddr
)
2875 emit_move_insn (oldaddr
, addr
);
2878 /* If this is a memory ref that contains aggregate components,
2879 mark it as such for cse and loop optimize. */
2880 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2881 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2882 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2883 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2885 /* If this is in memory because of -ffloat-store,
2886 set the volatile bit, to prevent optimizations from
2887 undoing the effects. */
2888 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
2889 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2893 /* Dynamic-size object: must push space on the stack. */
2897 /* Record the stack pointer on entry to block, if have
2898 not already done so. */
2899 if (thisblock
->data
.block
.stack_level
== 0)
2901 do_pending_stack_adjust ();
2902 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2903 &thisblock
->data
.block
.stack_level
,
2904 thisblock
->data
.block
.first_insn
);
2905 stack_block_stack
= thisblock
;
2908 /* Compute the variable's size, in bytes. */
2909 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
2911 size_int (BITS_PER_UNIT
)),
2912 NULL_RTX
, VOIDmode
, 0);
2915 /* This is equivalent to calling alloca. */
2916 current_function_calls_alloca
= 1;
2918 /* Allocate space on the stack for the variable. */
2919 address
= allocate_dynamic_stack_space (size
, NULL_RTX
,
2922 if (nonlocal_goto_handler_slot
!= 0)
2923 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
2925 /* Reference the variable indirect through that rtx. */
2926 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
2928 /* If this is a memory ref that contains aggregate components,
2929 mark it as such for cse and loop optimize. */
2930 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2931 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2932 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2933 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2935 /* Indicate the alignment we actually gave this variable. */
2936 #ifdef STACK_BOUNDARY
2937 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
2939 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
2943 if (TREE_THIS_VOLATILE (decl
))
2944 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2945 if (TREE_READONLY (decl
))
2946 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
2948 /* If doing stupid register allocation, make sure life of any
2949 register variable starts here, at the start of its scope. */
2952 use_variable (DECL_RTL (decl
));
2955 /* Emit code to perform the initialization of a declaration DECL. */
2958 expand_decl_init (decl
)
2961 int was_used
= TREE_USED (decl
);
2963 if (TREE_STATIC (decl
))
2966 /* Compute and store the initial value now. */
2968 if (DECL_INITIAL (decl
) == error_mark_node
)
2970 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
2971 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
2972 || code
== POINTER_TYPE
)
2973 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
2977 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
2979 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
2980 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
2984 /* Don't let the initialization count as "using" the variable. */
2985 TREE_USED (decl
) = was_used
;
2987 /* Free any temporaries we made while initializing the decl. */
2991 /* CLEANUP is an expression to be executed at exit from this binding contour;
2992 for example, in C++, it might call the destructor for this variable.
2994 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2995 either before or after calling `expand_decl' but before compiling
2996 any subsequent expressions. This is because CLEANUP may be expanded
2997 more than once, on different branches of execution.
2998 For the same reason, CLEANUP may not contain a CALL_EXPR
2999 except as its topmost node--else `preexpand_calls' would get confused.
3001 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3002 that is not associated with any particular variable. */
3005 expand_decl_cleanup (decl
, cleanup
)
3008 struct nesting
*thisblock
= block_stack
;
3010 /* Error if we are not in any block. */
3014 /* Record the cleanup if there is one. */
3018 thisblock
->data
.block
.cleanups
3019 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
3020 /* If this block has a cleanup, it belongs in stack_block_stack. */
3021 stack_block_stack
= thisblock
;
3026 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3027 DECL_ELTS is the list of elements that belong to DECL's type.
3028 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3031 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
3032 tree decl
, cleanup
, decl_elts
;
3034 struct nesting
*thisblock
= block_stack
;
3037 expand_decl (decl
, cleanup
);
3038 x
= DECL_RTL (decl
);
3042 tree decl_elt
= TREE_VALUE (decl_elts
);
3043 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
3044 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
3046 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3047 instead create a new MEM rtx with the proper mode. */
3048 if (GET_CODE (x
) == MEM
)
3050 if (mode
== GET_MODE (x
))
3051 DECL_RTL (decl_elt
) = x
;
3054 DECL_RTL (decl_elt
) = gen_rtx (MEM
, mode
, copy_rtx (XEXP (x
, 0)));
3055 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
3056 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
3059 else if (GET_CODE (x
) == REG
)
3061 if (mode
== GET_MODE (x
))
3062 DECL_RTL (decl_elt
) = x
;
3064 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, mode
, x
, 0);
3069 /* Record the cleanup if there is one. */
3072 thisblock
->data
.block
.cleanups
3073 = temp_tree_cons (decl_elt
, cleanup_elt
,
3074 thisblock
->data
.block
.cleanups
);
3076 decl_elts
= TREE_CHAIN (decl_elts
);
3080 /* Expand a list of cleanups LIST.
3081 Elements may be expressions or may be nested lists.
3083 If DONT_DO is nonnull, then any list-element
3084 whose TREE_PURPOSE matches DONT_DO is omitted.
3085 This is sometimes used to avoid a cleanup associated with
3086 a value that is being returned out of the scope. */
3089 expand_cleanups (list
, dont_do
)
3094 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3095 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
3097 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3098 expand_cleanups (TREE_VALUE (tail
), dont_do
);
3101 /* Cleanups may be run multiple times. For example,
3102 when exiting a binding contour, we expand the
3103 cleanups associated with that contour. When a goto
3104 within that binding contour has a target outside that
3105 contour, it will expand all cleanups from its scope to
3106 the target. Though the cleanups are expanded multiple
3107 times, the control paths are non-overlapping so the
3108 cleanups will not be executed twice. */
3109 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
3115 /* Move all cleanups from the current block_stack
3116 to the containing block_stack, where they are assumed to
3117 have been created. If anything can cause a temporary to
3118 be created, but not expanded for more than one level of
3119 block_stacks, then this code will have to change. */
3124 struct nesting
*block
= block_stack
;
3125 struct nesting
*outer
= block
->next
;
3127 outer
->data
.block
.cleanups
3128 = chainon (block
->data
.block
.cleanups
,
3129 outer
->data
.block
.cleanups
);
3130 block
->data
.block
.cleanups
= 0;
3134 last_cleanup_this_contour ()
3136 if (block_stack
== 0)
3139 return block_stack
->data
.block
.cleanups
;
3142 /* Return 1 if there are any pending cleanups at this point.
3143 If THIS_CONTOUR is nonzero, check the current contour as well.
3144 Otherwise, look only at the contours that enclose this one. */
3147 any_pending_cleanups (this_contour
)
3150 struct nesting
*block
;
3152 if (block_stack
== 0)
3155 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3157 if (block_stack
->data
.block
.cleanups
== 0
3158 && (block_stack
->data
.block
.outer_cleanups
== 0
3160 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
3165 for (block
= block_stack
->next
; block
; block
= block
->next
)
3166 if (block
->data
.block
.cleanups
!= 0)
3172 /* Enter a case (Pascal) or switch (C) statement.
3173 Push a block onto case_stack and nesting_stack
3174 to accumulate the case-labels that are seen
3175 and to record the labels generated for the statement.
3177 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3178 Otherwise, this construct is transparent for `exit_something'.
3180 EXPR is the index-expression to be dispatched on.
3181 TYPE is its nominal type. We could simply convert EXPR to this type,
3182 but instead we take short cuts. */
3185 expand_start_case (exit_flag
, expr
, type
, printname
)
3191 register struct nesting
*thiscase
= ALLOC_NESTING ();
3193 /* Make an entry on case_stack for the case we are entering. */
3195 thiscase
->next
= case_stack
;
3196 thiscase
->all
= nesting_stack
;
3197 thiscase
->depth
= ++nesting_depth
;
3198 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3199 thiscase
->data
.case_stmt
.case_list
= 0;
3200 thiscase
->data
.case_stmt
.index_expr
= expr
;
3201 thiscase
->data
.case_stmt
.nominal_type
= type
;
3202 thiscase
->data
.case_stmt
.default_label
= 0;
3203 thiscase
->data
.case_stmt
.num_ranges
= 0;
3204 thiscase
->data
.case_stmt
.printname
= printname
;
3205 thiscase
->data
.case_stmt
.seenlabel
= 0;
3206 case_stack
= thiscase
;
3207 nesting_stack
= thiscase
;
3209 do_pending_stack_adjust ();
3211 /* Make sure case_stmt.start points to something that won't
3212 need any transformation before expand_end_case. */
3213 if (GET_CODE (get_last_insn ()) != NOTE
)
3214 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
3216 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3219 /* Start a "dummy case statement" within which case labels are invalid
3220 and are not connected to any larger real case statement.
3221 This can be used if you don't want to let a case statement jump
3222 into the middle of certain kinds of constructs. */
3225 expand_start_case_dummy ()
3227 register struct nesting
*thiscase
= ALLOC_NESTING ();
3229 /* Make an entry on case_stack for the dummy. */
3231 thiscase
->next
= case_stack
;
3232 thiscase
->all
= nesting_stack
;
3233 thiscase
->depth
= ++nesting_depth
;
3234 thiscase
->exit_label
= 0;
3235 thiscase
->data
.case_stmt
.case_list
= 0;
3236 thiscase
->data
.case_stmt
.start
= 0;
3237 thiscase
->data
.case_stmt
.nominal_type
= 0;
3238 thiscase
->data
.case_stmt
.default_label
= 0;
3239 thiscase
->data
.case_stmt
.num_ranges
= 0;
3240 case_stack
= thiscase
;
3241 nesting_stack
= thiscase
;
3244 /* End a dummy case statement. */
3247 expand_end_case_dummy ()
3249 POPSTACK (case_stack
);
3252 /* Return the data type of the index-expression
3253 of the innermost case statement, or null if none. */
3256 case_index_expr_type ()
3259 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3263 /* Accumulate one case or default label inside a case or switch statement.
3264 VALUE is the value of the case (a null pointer, for a default label).
3266 If not currently inside a case or switch statement, return 1 and do
3267 nothing. The caller will print a language-specific error message.
3268 If VALUE is a duplicate or overlaps, return 2 and do nothing
3269 except store the (first) duplicate node in *DUPLICATE.
3270 If VALUE is out of range, return 3 and do nothing.
3271 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3272 Return 0 on success.
3274 Extended to handle range statements. */
3277 pushcase (value
, label
, duplicate
)
3278 register tree value
;
3279 register tree label
;
3282 register struct case_node
**l
;
3283 register struct case_node
*n
;
3287 /* Fail if not inside a real case statement. */
3288 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3291 if (stack_block_stack
3292 && stack_block_stack
->depth
> case_stack
->depth
)
3295 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3296 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3298 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3299 if (index_type
== error_mark_node
)
3302 /* Convert VALUE to the type in which the comparisons are nominally done. */
3304 value
= convert (nominal_type
, value
);
3306 /* If this is the first label, warn if any insns have been emitted. */
3307 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3310 for (insn
= case_stack
->data
.case_stmt
.start
;
3312 insn
= NEXT_INSN (insn
))
3314 if (GET_CODE (insn
) == CODE_LABEL
)
3316 if (GET_CODE (insn
) != NOTE
3317 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3319 warning ("unreachable code at beginning of %s",
3320 case_stack
->data
.case_stmt
.printname
);
3325 case_stack
->data
.case_stmt
.seenlabel
= 1;
3327 /* Fail if this value is out of range for the actual type of the index
3328 (which may be narrower than NOMINAL_TYPE). */
3329 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
3332 /* Fail if this is a duplicate or overlaps another entry. */
3335 if (case_stack
->data
.case_stmt
.default_label
!= 0)
3337 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
3340 case_stack
->data
.case_stmt
.default_label
= label
;
3344 /* Find the elt in the chain before which to insert the new value,
3345 to keep the chain sorted in increasing order.
3346 But report an error if this element is a duplicate. */
3347 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3348 /* Keep going past elements distinctly less than VALUE. */
3349 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
3354 /* Element we will insert before must be distinctly greater;
3355 overlap means error. */
3356 if (! tree_int_cst_lt (value
, (*l
)->low
))
3358 *duplicate
= (*l
)->code_label
;
3363 /* Add this label to the chain, and succeed.
3364 Copy VALUE so it is on temporary rather than momentary
3365 obstack and will thus survive till the end of the case statement. */
3366 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3369 n
->high
= n
->low
= copy_node (value
);
3370 n
->code_label
= label
;
3374 expand_label (label
);
3378 /* Like pushcase but this case applies to all values
3379 between VALUE1 and VALUE2 (inclusive).
3380 The return value is the same as that of pushcase
3381 but there is one additional error code:
3382 4 means the specified range was empty. */
3385 pushcase_range (value1
, value2
, label
, duplicate
)
3386 register tree value1
, value2
;
3387 register tree label
;
3390 register struct case_node
**l
;
3391 register struct case_node
*n
;
3395 /* Fail if not inside a real case statement. */
3396 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3399 if (stack_block_stack
3400 && stack_block_stack
->depth
> case_stack
->depth
)
3403 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3404 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3406 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3407 if (index_type
== error_mark_node
)
3410 /* If this is the first label, warn if any insns have been emitted. */
3411 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3414 for (insn
= case_stack
->data
.case_stmt
.start
;
3416 insn
= NEXT_INSN (insn
))
3418 if (GET_CODE (insn
) == CODE_LABEL
)
3420 if (GET_CODE (insn
) != NOTE
3421 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3423 warning ("unreachable code at beginning of %s",
3424 case_stack
->data
.case_stmt
.printname
);
3429 case_stack
->data
.case_stmt
.seenlabel
= 1;
3431 /* Convert VALUEs to type in which the comparisons are nominally done. */
3432 if (value1
== 0) /* Negative infinity. */
3433 value1
= TYPE_MIN_VALUE(index_type
);
3434 value1
= convert (nominal_type
, value1
);
3436 if (value2
== 0) /* Positive infinity. */
3437 value2
= TYPE_MAX_VALUE(index_type
);
3438 value2
= convert (nominal_type
, value2
);
3440 /* Fail if these values are out of range. */
3441 if (! int_fits_type_p (value1
, index_type
))
3444 if (! int_fits_type_p (value2
, index_type
))
3447 /* Fail if the range is empty. */
3448 if (tree_int_cst_lt (value2
, value1
))
3451 /* If the bounds are equal, turn this into the one-value case. */
3452 if (tree_int_cst_equal (value1
, value2
))
3453 return pushcase (value1
, label
, duplicate
);
3455 /* Find the elt in the chain before which to insert the new value,
3456 to keep the chain sorted in increasing order.
3457 But report an error if this element is a duplicate. */
3458 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3459 /* Keep going past elements distinctly less than this range. */
3460 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
3465 /* Element we will insert before must be distinctly greater;
3466 overlap means error. */
3467 if (! tree_int_cst_lt (value2
, (*l
)->low
))
3469 *duplicate
= (*l
)->code_label
;
3474 /* Add this label to the chain, and succeed.
3475 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3476 obstack and will thus survive till the end of the case statement. */
3478 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3481 n
->low
= copy_node (value1
);
3482 n
->high
= copy_node (value2
);
3483 n
->code_label
= label
;
3486 expand_label (label
);
3488 case_stack
->data
.case_stmt
.num_ranges
++;
3493 /* Called when the index of a switch statement is an enumerated type
3494 and there is no default label.
3496 Checks that all enumeration literals are covered by the case
3497 expressions of a switch. Also, warn if there are any extra
3498 switch cases that are *not* elements of the enumerated type.
3500 If all enumeration literals were covered by the case expressions,
3501 turn one of the expressions into the default expression since it should
3502 not be possible to fall through such a switch. */
3505 check_for_full_enumeration_handling (type
)
3508 register struct case_node
*n
;
3509 register struct case_node
**l
;
3510 register tree chain
;
3513 /* The time complexity of this loop is currently O(N * M), with
3514 N being the number of enumerals in the enumerated type, and
3515 M being the number of case expressions in the switch. */
3517 for (chain
= TYPE_VALUES (type
);
3519 chain
= TREE_CHAIN (chain
))
3521 /* Find a match between enumeral and case expression, if possible.
3522 Quit looking when we've gone too far (since case expressions
3523 are kept sorted in ascending order). Warn about enumerals not
3524 handled in the switch statement case expression list. */
3526 for (n
= case_stack
->data
.case_stmt
.case_list
;
3527 n
&& tree_int_cst_lt (n
->high
, TREE_VALUE (chain
));
3531 if (!n
|| tree_int_cst_lt (TREE_VALUE (chain
), n
->low
))
3534 warning ("enumeration value `%s' not handled in switch",
3535 IDENTIFIER_POINTER (TREE_PURPOSE (chain
)));
3540 /* Now we go the other way around; we warn if there are case
3541 expressions that don't correspond to enumerals. This can
3542 occur since C and C++ don't enforce type-checking of
3543 assignments to enumeration variables. */
3546 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3548 for (chain
= TYPE_VALUES (type
);
3549 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
3550 chain
= TREE_CHAIN (chain
))
3554 warning ("case value `%d' not in enumerated type `%s'",
3555 TREE_INT_CST_LOW (n
->low
),
3556 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3559 : DECL_NAME (TYPE_NAME (type
))));
3560 if (!tree_int_cst_equal (n
->low
, n
->high
))
3562 for (chain
= TYPE_VALUES (type
);
3563 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
3564 chain
= TREE_CHAIN (chain
))
3568 warning ("case value `%d' not in enumerated type `%s'",
3569 TREE_INT_CST_LOW (n
->high
),
3570 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3573 : DECL_NAME (TYPE_NAME (type
))));
3577 /* If all values were found as case labels, make one of them the default
3578 label. Thus, this switch will never fall through. We arbitrarily pick
3579 the last one to make the default since this is likely the most
3580 efficient choice. */
3584 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3589 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
3594 /* Terminate a case (Pascal) or switch (C) statement
3595 in which ORIG_INDEX is the expression to be tested.
3596 Generate the code to test it and jump to the right place. */
3599 expand_end_case (orig_index
)
3602 tree minval
, maxval
, range
;
3603 rtx default_label
= 0;
3604 register struct case_node
*n
;
3607 rtx table_label
= gen_label_rtx ();
3612 register struct nesting
*thiscase
= case_stack
;
3613 tree index_expr
= thiscase
->data
.case_stmt
.index_expr
;
3614 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (index_expr
));
3616 do_pending_stack_adjust ();
3618 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3619 if (TREE_TYPE (index_expr
) != error_mark_node
)
3621 /* If switch expression was an enumerated type, check that all
3622 enumeration literals are covered by the cases.
3623 No sense trying this if there's a default case, however. */
3625 if (!thiscase
->data
.case_stmt
.default_label
3626 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
3627 && TREE_CODE (index_expr
) != INTEGER_CST
)
3628 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
3630 /* If this is the first label, warn if any insns have been emitted. */
3631 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
3634 for (insn
= get_last_insn ();
3635 insn
!= case_stack
->data
.case_stmt
.start
;
3636 insn
= PREV_INSN (insn
))
3637 if (GET_CODE (insn
) != NOTE
3638 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
3640 warning ("unreachable code at beginning of %s",
3641 case_stack
->data
.case_stmt
.printname
);
3646 /* If we don't have a default-label, create one here,
3647 after the body of the switch. */
3648 if (thiscase
->data
.case_stmt
.default_label
== 0)
3650 thiscase
->data
.case_stmt
.default_label
3651 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
3652 expand_label (thiscase
->data
.case_stmt
.default_label
);
3654 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
3656 before_case
= get_last_insn ();
3658 /* Simplify the case-list before we count it. */
3659 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
3661 /* Get upper and lower bounds of case values.
3662 Also convert all the case values to the index expr's data type. */
3665 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3667 /* Check low and high label values are integers. */
3668 if (TREE_CODE (n
->low
) != INTEGER_CST
)
3670 if (TREE_CODE (n
->high
) != INTEGER_CST
)
3673 n
->low
= convert (TREE_TYPE (index_expr
), n
->low
);
3674 n
->high
= convert (TREE_TYPE (index_expr
), n
->high
);
3676 /* Count the elements and track the largest and smallest
3677 of them (treating them as signed even if they are not). */
3685 if (INT_CST_LT (n
->low
, minval
))
3687 if (INT_CST_LT (maxval
, n
->high
))
3690 /* A range counts double, since it requires two compares. */
3691 if (! tree_int_cst_equal (n
->low
, n
->high
))
3695 /* Compute span of values. */
3697 range
= fold (build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3700 if (count
== 0 || TREE_CODE (TREE_TYPE (index_expr
)) == ERROR_MARK
)
3702 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
3704 emit_jump (default_label
);
3706 /* If range of values is much bigger than number of values,
3707 make a sequence of conditional branches instead of a dispatch.
3708 If the switch-index is a constant, do it this way
3709 because we can optimize it. */
3711 #ifndef CASE_VALUES_THRESHOLD
3713 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
3715 /* If machine does not have a case insn that compares the
3716 bounds, this means extra overhead for dispatch tables
3717 which raises the threshold for using them. */
3718 #define CASE_VALUES_THRESHOLD 5
3719 #endif /* HAVE_casesi */
3720 #endif /* CASE_VALUES_THRESHOLD */
3722 else if (TREE_INT_CST_HIGH (range
) != 0
3723 || count
< CASE_VALUES_THRESHOLD
3724 || ((unsigned HOST_WIDE_INT
) (TREE_INT_CST_LOW (range
))
3726 || TREE_CODE (index_expr
) == INTEGER_CST
3727 /* These will reduce to a constant. */
3728 || (TREE_CODE (index_expr
) == CALL_EXPR
3729 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
3730 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
3731 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
3732 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
3733 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
3735 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3737 /* If the index is a short or char that we do not have
3738 an insn to handle comparisons directly, convert it to
3739 a full integer now, rather than letting each comparison
3740 generate the conversion. */
3742 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
3743 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
3744 == CODE_FOR_nothing
))
3746 enum machine_mode wider_mode
;
3747 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
3748 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
3749 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
3750 != CODE_FOR_nothing
)
3752 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
3758 do_pending_stack_adjust ();
3760 index
= protect_from_queue (index
, 0);
3761 if (GET_CODE (index
) == MEM
)
3762 index
= copy_to_reg (index
);
3763 if (GET_CODE (index
) == CONST_INT
3764 || TREE_CODE (index_expr
) == INTEGER_CST
)
3766 /* Make a tree node with the proper constant value
3767 if we don't already have one. */
3768 if (TREE_CODE (index_expr
) != INTEGER_CST
)
3771 = build_int_2 (INTVAL (index
),
3772 !unsignedp
&& INTVAL (index
) >= 0 ? 0 : -1);
3773 index_expr
= convert (TREE_TYPE (index_expr
), index_expr
);
3776 /* For constant index expressions we need only
3777 issue a unconditional branch to the appropriate
3778 target code. The job of removing any unreachable
3779 code is left to the optimisation phase if the
3780 "-O" option is specified. */
3781 for (n
= thiscase
->data
.case_stmt
.case_list
;
3785 if (! tree_int_cst_lt (index_expr
, n
->low
)
3786 && ! tree_int_cst_lt (n
->high
, index_expr
))
3790 emit_jump (label_rtx (n
->code_label
));
3792 emit_jump (default_label
);
3796 /* If the index expression is not constant we generate
3797 a binary decision tree to select the appropriate
3798 target code. This is done as follows:
3800 The list of cases is rearranged into a binary tree,
3801 nearly optimal assuming equal probability for each case.
3803 The tree is transformed into RTL, eliminating
3804 redundant test conditions at the same time.
3806 If program flow could reach the end of the
3807 decision tree an unconditional jump to the
3808 default code is emitted. */
3811 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
3812 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
3813 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
,
3815 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
3816 default_label
, TREE_TYPE (index_expr
));
3817 emit_jump_if_reachable (default_label
);
3826 enum machine_mode index_mode
= SImode
;
3827 int index_bits
= GET_MODE_BITSIZE (index_mode
);
3829 /* Convert the index to SImode. */
3830 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr
)))
3831 > GET_MODE_BITSIZE (index_mode
))
3833 enum machine_mode omode
= TYPE_MODE (TREE_TYPE (index_expr
));
3834 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
3836 /* We must handle the endpoints in the original mode. */
3837 index_expr
= build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3838 index_expr
, minval
);
3839 minval
= integer_zero_node
;
3840 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3841 emit_cmp_insn (rangertx
, index
, LTU
, NULL_RTX
, omode
, 0, 0);
3842 emit_jump_insn (gen_bltu (default_label
));
3843 /* Now we can safely truncate. */
3844 index
= convert_to_mode (index_mode
, index
, 0);
3848 if (TYPE_MODE (TREE_TYPE (index_expr
)) != index_mode
)
3849 index_expr
= convert (type_for_size (index_bits
, 0),
3851 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3854 index
= protect_from_queue (index
, 0);
3855 do_pending_stack_adjust ();
3857 emit_jump_insn (gen_casesi (index
, expand_expr (minval
, NULL_RTX
,
3859 expand_expr (range
, NULL_RTX
,
3861 table_label
, default_label
));
3865 #ifdef HAVE_tablejump
3866 if (! win
&& HAVE_tablejump
)
3868 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
3869 fold (build (MINUS_EXPR
,
3870 TREE_TYPE (index_expr
),
3871 index_expr
, minval
)));
3872 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3874 index
= protect_from_queue (index
, 0);
3875 do_pending_stack_adjust ();
3877 do_tablejump (index
, TYPE_MODE (TREE_TYPE (index_expr
)),
3878 expand_expr (range
, NULL_RTX
, VOIDmode
, 0),
3879 table_label
, default_label
);
3886 /* Get table of labels to jump to, in order of case index. */
3888 ncases
= TREE_INT_CST_LOW (range
) + 1;
3889 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
3890 bzero (labelvec
, ncases
* sizeof (rtx
));
3892 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3894 register HOST_WIDE_INT i
3895 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (minval
);
3900 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
3901 if (i
+ TREE_INT_CST_LOW (minval
)
3902 == TREE_INT_CST_LOW (n
->high
))
3908 /* Fill in the gaps with the default. */
3909 for (i
= 0; i
< ncases
; i
++)
3910 if (labelvec
[i
] == 0)
3911 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
3913 /* Output the table */
3914 emit_label (table_label
);
3916 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3917 were an expression, instead of an #ifdef/#ifndef. */
3919 #ifdef CASE_VECTOR_PC_RELATIVE
3923 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
3924 gen_rtx (LABEL_REF
, Pmode
, table_label
),
3925 gen_rtvec_v (ncases
, labelvec
)));
3927 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
3928 gen_rtvec_v (ncases
, labelvec
)));
3930 /* If the case insn drops through the table,
3931 after the table we must jump to the default-label.
3932 Otherwise record no drop-through after the table. */
3933 #ifdef CASE_DROPS_THROUGH
3934 emit_jump (default_label
);
3940 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
3941 reorder_insns (before_case
, get_last_insn (),
3942 thiscase
->data
.case_stmt
.start
);
3944 if (thiscase
->exit_label
)
3945 emit_label (thiscase
->exit_label
);
3947 POPSTACK (case_stack
);
3952 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3955 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
3956 rtx op1
, op2
, label
;
3959 if (GET_CODE (op1
) == CONST_INT
3960 && GET_CODE (op2
) == CONST_INT
)
3962 if (INTVAL (op1
) == INTVAL (op2
))
3967 enum machine_mode mode
= GET_MODE (op1
);
3968 if (mode
== VOIDmode
)
3969 mode
= GET_MODE (op2
);
3970 emit_cmp_insn (op1
, op2
, EQ
, NULL_RTX
, mode
, unsignedp
, 0);
3971 emit_jump_insn (gen_beq (label
));
3975 /* Not all case values are encountered equally. This function
3976 uses a heuristic to weight case labels, in cases where that
3977 looks like a reasonable thing to do.
3979 Right now, all we try to guess is text, and we establish the
3982 chars above space: 16
3991 If we find any cases in the switch that are not either -1 or in the range
3992 of valid ASCII characters, or are control characters other than those
3993 commonly used with "\", don't treat this switch scanning text.
3995 Return 1 if these nodes are suitable for cost estimation, otherwise
3999 estimate_case_costs (node
)
4002 tree min_ascii
= build_int_2 (-1, -1);
4003 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
4007 /* If we haven't already made the cost table, make it now. Note that the
4008 lower bound of the table is -1, not zero. */
4010 if (cost_table
== NULL
)
4012 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
4013 bzero (cost_table
- 1, 129 * sizeof (short));
4015 for (i
= 0; i
< 128; i
++)
4019 else if (ispunct (i
))
4021 else if (iscntrl (i
))
4025 cost_table
[' '] = 8;
4026 cost_table
['\t'] = 4;
4027 cost_table
['\0'] = 4;
4028 cost_table
['\n'] = 2;
4029 cost_table
['\f'] = 1;
4030 cost_table
['\v'] = 1;
4031 cost_table
['\b'] = 1;
4034 /* See if all the case expressions look like text. It is text if the
4035 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4036 as signed arithmetic since we don't want to ever access cost_table with a
4037 value less than -1. Also check that none of the constants in a range
4038 are strange control characters. */
4040 for (n
= node
; n
; n
= n
->right
)
4042 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
4045 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
4046 if (cost_table
[i
] < 0)
4050 /* All interesting values are within the range of interesting
4051 ASCII characters. */
4055 /* Scan an ordered list of case nodes
4056 combining those with consecutive values or ranges.
4058 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4061 group_case_nodes (head
)
4064 case_node_ptr node
= head
;
4068 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
4069 case_node_ptr np
= node
;
4071 /* Try to group the successors of NODE with NODE. */
4072 while (((np
= np
->right
) != 0)
4073 /* Do they jump to the same place? */
4074 && next_real_insn (label_rtx (np
->code_label
)) == lb
4075 /* Are their ranges consecutive? */
4076 && tree_int_cst_equal (np
->low
,
4077 fold (build (PLUS_EXPR
,
4078 TREE_TYPE (node
->high
),
4081 /* An overflow is not consecutive. */
4082 && tree_int_cst_lt (node
->high
,
4083 fold (build (PLUS_EXPR
,
4084 TREE_TYPE (node
->high
),
4086 integer_one_node
))))
4088 node
->high
= np
->high
;
4090 /* NP is the first node after NODE which can't be grouped with it.
4091 Delete the nodes in between, and move on to that node. */
4097 /* Take an ordered list of case nodes
4098 and transform them into a near optimal binary tree,
4099 on the assumption that any target code selection value is as
4100 likely as any other.
4102 The transformation is performed by splitting the ordered
4103 list into two equal sections plus a pivot. The parts are
4104 then attached to the pivot as left and right branches. Each
4105 branch is is then transformed recursively. */
4108 balance_case_nodes (head
, parent
)
4109 case_node_ptr
*head
;
4110 case_node_ptr parent
;
4112 register case_node_ptr np
;
4120 register case_node_ptr
*npp
;
4123 /* Count the number of entries on branch. Also count the ranges. */
4127 if (!tree_int_cst_equal (np
->low
, np
->high
))
4131 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
4135 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
4143 /* Split this list if it is long enough for that to help. */
4148 /* Find the place in the list that bisects the list's total cost,
4149 Here I gets half the total cost. */
4154 /* Skip nodes while their cost does not reach that amount. */
4155 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4156 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
4157 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
4160 npp
= &(*npp
)->right
;
4165 /* Leave this branch lopsided, but optimize left-hand
4166 side and fill in `parent' fields for right-hand side. */
4168 np
->parent
= parent
;
4169 balance_case_nodes (&np
->left
, np
);
4170 for (; np
->right
; np
= np
->right
)
4171 np
->right
->parent
= np
;
4175 /* If there are just three nodes, split at the middle one. */
4177 npp
= &(*npp
)->right
;
4180 /* Find the place in the list that bisects the list's total cost,
4181 where ranges count as 2.
4182 Here I gets half the total cost. */
4183 i
= (i
+ ranges
+ 1) / 2;
4186 /* Skip nodes while their cost does not reach that amount. */
4187 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4192 npp
= &(*npp
)->right
;
4197 np
->parent
= parent
;
4200 /* Optimize each of the two split parts. */
4201 balance_case_nodes (&np
->left
, np
);
4202 balance_case_nodes (&np
->right
, np
);
4206 /* Else leave this branch as one level,
4207 but fill in `parent' fields. */
4209 np
->parent
= parent
;
4210 for (; np
->right
; np
= np
->right
)
4211 np
->right
->parent
= np
;
4216 /* Search the parent sections of the case node tree
4217 to see if a test for the lower bound of NODE would be redundant.
4218 INDEX_TYPE is the type of the index expression.
4220 The instructions to generate the case decision tree are
4221 output in the same order as nodes are processed so it is
4222 known that if a parent node checks the range of the current
4223 node minus one that the current node is bounded at its lower
4224 span. Thus the test would be redundant. */
4227 node_has_low_bound (node
, index_type
)
4232 case_node_ptr pnode
;
4234 /* If the lower bound of this node is the lowest value in the index type,
4235 we need not test it. */
4237 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
4240 /* If this node has a left branch, the value at the left must be less
4241 than that at this node, so it cannot be bounded at the bottom and
4242 we need not bother testing any further. */
4247 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
4248 node
->low
, integer_one_node
));
4250 /* If the subtraction above overflowed, we can't verify anything.
4251 Otherwise, look for a parent that tests our value - 1. */
4253 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
4256 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4257 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
4263 /* Search the parent sections of the case node tree
4264 to see if a test for the upper bound of NODE would be redundant.
4265 INDEX_TYPE is the type of the index expression.
4267 The instructions to generate the case decision tree are
4268 output in the same order as nodes are processed so it is
4269 known that if a parent node checks the range of the current
4270 node plus one that the current node is bounded at its upper
4271 span. Thus the test would be redundant. */
4274 node_has_high_bound (node
, index_type
)
4279 case_node_ptr pnode
;
4281 /* If the upper bound of this node is the highest value in the type
4282 of the index expression, we need not test against it. */
4284 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
4287 /* If this node has a right branch, the value at the right must be greater
4288 than that at this node, so it cannot be bounded at the top and
4289 we need not bother testing any further. */
4294 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
4295 node
->high
, integer_one_node
));
4297 /* If the addition above overflowed, we can't verify anything.
4298 Otherwise, look for a parent that tests our value + 1. */
4300 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
4303 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4304 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
4310 /* Search the parent sections of the
4311 case node tree to see if both tests for the upper and lower
4312 bounds of NODE would be redundant. */
4315 node_is_bounded (node
, index_type
)
4319 return (node_has_low_bound (node
, index_type
)
4320 && node_has_high_bound (node
, index_type
));
4323 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4326 emit_jump_if_reachable (label
)
4329 if (GET_CODE (get_last_insn ()) != BARRIER
)
4333 /* Emit step-by-step code to select a case for the value of INDEX.
4334 The thus generated decision tree follows the form of the
4335 case-node binary tree NODE, whose nodes represent test conditions.
4336 INDEX_TYPE is the type of the index of the switch.
4338 Care is taken to prune redundant tests from the decision tree
4339 by detecting any boundary conditions already checked by
4340 emitted rtx. (See node_has_high_bound, node_has_low_bound
4341 and node_is_bounded, above.)
4343 Where the test conditions can be shown to be redundant we emit
4344 an unconditional jump to the target code. As a further
4345 optimization, the subordinates of a tree node are examined to
4346 check for bounded nodes. In this case conditional and/or
4347 unconditional jumps as a result of the boundary check for the
4348 current node are arranged to target the subordinates associated
4349 code for out of bound conditions on the current node node.
4351 We can assume that when control reaches the code generated here,
4352 the index value has already been compared with the parents
4353 of this node, and determined to be on the same side of each parent
4354 as this node is. Thus, if this node tests for the value 51,
4355 and a parent tested for 52, we don't need to consider
4356 the possibility of a value greater than 51. If another parent
4357 tests for the value 50, then this node need not test anything. */
4360 emit_case_nodes (index
, node
, default_label
, index_type
)
4366 /* If INDEX has an unsigned type, we must make unsigned branches. */
4367 int unsignedp
= TREE_UNSIGNED (index_type
);
4368 typedef rtx
rtx_function ();
4369 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
4370 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
4371 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
4372 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
4373 enum machine_mode mode
= GET_MODE (index
);
4375 /* See if our parents have already tested everything for us.
4376 If they have, emit an unconditional jump for this node. */
4377 if (node_is_bounded (node
, index_type
))
4378 emit_jump (label_rtx (node
->code_label
));
4380 else if (tree_int_cst_equal (node
->low
, node
->high
))
4382 /* Node is single valued. First see if the index expression matches
4383 this node and then check our children, if any. */
4385 do_jump_if_equal (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4386 label_rtx (node
->code_label
), unsignedp
);
4388 if (node
->right
!= 0 && node
->left
!= 0)
4390 /* This node has children on both sides.
4391 Dispatch to one side or the other
4392 by comparing the index value with this node's value.
4393 If one subtree is bounded, check that one first,
4394 so we can avoid real branches in the tree. */
4396 if (node_is_bounded (node
->right
, index_type
))
4398 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4400 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4402 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4403 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4406 else if (node_is_bounded (node
->left
, index_type
))
4408 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4410 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4411 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
4412 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4417 /* Neither node is bounded. First distinguish the two sides;
4418 then emit the code for one side at a time. */
4421 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4423 /* See if the value is on the right. */
4424 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4426 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4427 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4429 /* Value must be on the left.
4430 Handle the left-hand subtree. */
4431 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4432 /* If left-hand subtree does nothing,
4434 emit_jump_if_reachable (default_label
);
4436 /* Code branches here for the right-hand subtree. */
4437 expand_label (test_label
);
4438 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4442 else if (node
->right
!= 0 && node
->left
== 0)
4444 /* Here we have a right child but no left so we issue conditional
4445 branch to default and process the right child.
4447 Omit the conditional branch to default if we it avoid only one
4448 right child; it costs too much space to save so little time. */
4450 if (node
->right
->right
|| node
->right
->left
4451 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
4453 if (!node_has_low_bound (node
, index_type
))
4455 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4457 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4458 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4461 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4464 /* We cannot process node->right normally
4465 since we haven't ruled out the numbers less than
4466 this node's value. So handle node->right explicitly. */
4467 do_jump_if_equal (index
,
4468 expand_expr (node
->right
->low
, NULL_RTX
,
4470 label_rtx (node
->right
->code_label
), unsignedp
);
4473 else if (node
->right
== 0 && node
->left
!= 0)
4475 /* Just one subtree, on the left. */
4477 #if 0 /* The following code and comment were formerly part
4478 of the condition here, but they didn't work
4479 and I don't understand what the idea was. -- rms. */
4480 /* If our "most probable entry" is less probable
4481 than the default label, emit a jump to
4482 the default label using condition codes
4483 already lying around. With no right branch,
4484 a branch-greater-than will get us to the default
4487 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
4490 if (node
->left
->left
|| node
->left
->right
4491 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
4493 if (!node_has_high_bound (node
, index_type
))
4495 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4497 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4498 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4501 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4504 /* We cannot process node->left normally
4505 since we haven't ruled out the numbers less than
4506 this node's value. So handle node->left explicitly. */
4507 do_jump_if_equal (index
,
4508 expand_expr (node
->left
->low
, NULL_RTX
,
4510 label_rtx (node
->left
->code_label
), unsignedp
);
4515 /* Node is a range. These cases are very similar to those for a single
4516 value, except that we do not start by testing whether this node
4517 is the one to branch to. */
4519 if (node
->right
!= 0 && node
->left
!= 0)
4521 /* Node has subtrees on both sides.
4522 If the right-hand subtree is bounded,
4523 test for it first, since we can go straight there.
4524 Otherwise, we need to make a branch in the control structure,
4525 then handle the two subtrees. */
4526 tree test_label
= 0;
4528 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4530 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4532 if (node_is_bounded (node
->right
, index_type
))
4533 /* Right hand node is fully bounded so we can eliminate any
4534 testing and branch directly to the target code. */
4535 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4538 /* Right hand node requires testing.
4539 Branch to a label where we will handle it later. */
4541 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4542 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4545 /* Value belongs to this node or to the left-hand subtree. */
4547 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4548 GE
, NULL_RTX
, mode
, unsignedp
, 0);
4549 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4551 /* Handle the left-hand subtree. */
4552 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4554 /* If right node had to be handled later, do that now. */
4558 /* If the left-hand subtree fell through,
4559 don't let it fall into the right-hand subtree. */
4560 emit_jump_if_reachable (default_label
);
4562 expand_label (test_label
);
4563 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4567 else if (node
->right
!= 0 && node
->left
== 0)
4569 /* Deal with values to the left of this node,
4570 if they are possible. */
4571 if (!node_has_low_bound (node
, index_type
))
4573 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
4575 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4576 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4579 /* Value belongs to this node or to the right-hand subtree. */
4581 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4583 LE
, NULL_RTX
, mode
, unsignedp
, 0);
4584 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
4586 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4589 else if (node
->right
== 0 && node
->left
!= 0)
4591 /* Deal with values to the right of this node,
4592 if they are possible. */
4593 if (!node_has_high_bound (node
, index_type
))
4595 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4597 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4598 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4601 /* Value belongs to this node or to the left-hand subtree. */
4603 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4604 GE
, NULL_RTX
, mode
, unsignedp
, 0);
4605 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4607 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4612 /* Node has no children so we check low and high bounds to remove
4613 redundant tests. Only one of the bounds can exist,
4614 since otherwise this node is bounded--a case tested already. */
4616 if (!node_has_high_bound (node
, index_type
))
4618 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4620 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4621 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4624 if (!node_has_low_bound (node
, index_type
))
4626 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
4628 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4629 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4632 emit_jump (label_rtx (node
->code_label
));
4637 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4638 so that the debugging info will be correct for the unrolled loop. */
4640 /* Indexed by block number, contains a pointer to the N'th block node. */
4642 static tree
*block_vector
;
4645 find_loop_tree_blocks ()
4647 tree block
= DECL_INITIAL (current_function_decl
);
4649 /* There first block is for the function body, and does not have
4650 corresponding block notes. Don't include it in the block vector. */
4651 block
= BLOCK_SUBBLOCKS (block
);
4653 block_vector
= identify_blocks (block
, get_insns ());
4657 unroll_block_trees ()
4659 tree block
= DECL_INITIAL (current_function_decl
);
4661 reorder_blocks (block_vector
, block
, get_insns ());