1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack
;
57 extern int xmalloc ();
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
68 int expr_stmts_for_value
;
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
73 static tree last_expr_type
;
74 static rtx last_expr_value
;
76 /* Number of binding contours started so far in this function. */
78 int block_start_count
;
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
83 extern int current_function_returns_pcc_struct
;
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
89 extern rtx cleanup_label
;
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
95 extern rtx return_label
;
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs
;
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset
;
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label
;
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry
;
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
118 extern rtx arg_pointer_save_area
;
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain
;
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list
;
130 /* Functions and data structures for expanding case statements. */
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
155 struct case_node
*left
; /* Left son in binary tree */
156 struct case_node
*right
; /* Right son in binary tree; also node chain */
157 struct case_node
*parent
; /* Parent of node in binary tree */
158 tree low
; /* Lowest index value for this label */
159 tree high
; /* Highest index value for this label */
160 tree code_label
; /* Label to jump to when node matches */
163 typedef struct case_node case_node
;
164 typedef struct case_node
*case_node_ptr
;
166 /* These are used by estimate_case_costs and balance_case_nodes. */
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table
;
170 static int use_cost_table
;
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
189 /* Stack of control and binding constructs we are currently inside.
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
217 struct nesting
*next
;
222 /* For conds (if-then and if-then-else statements). */
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
236 /* Label at the top of the loop; place to loop back to. */
238 /* Label at the end of the whole construct. */
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
244 /* For variable binding contours. */
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count
;
250 /* Nonzero => value to restore stack to on exit. */
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting
*innermost_stack_block
;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain
*label_chain
;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count
;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node
*case_list
;
290 /* Label to jump to if no case matches. */
292 /* The expression to be dispatched on. */
294 /* Type that INDEX_EXPR should be converted to. */
296 /* Number of range exprs in case statement. */
298 /* Name of this kind of statement, for warnings. */
300 /* Nonzero if a case label has been seen in this case stmt. */
303 /* For exception contours. */
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
314 /* First insn of TRY block, in case resumptive model is needed. */
316 /* Label for the catch clauses. */
318 /* Label for unhandled exceptions. */
320 /* Label at the end of whole construct. */
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
329 /* Chain of all pending binding contours. */
330 struct nesting
*block_stack
;
332 /* Chain of all pending binding contours that restore stack levels
334 struct nesting
*stack_block_stack
;
336 /* Chain of all pending conditional statements. */
337 struct nesting
*cond_stack
;
339 /* Chain of all pending loops. */
340 struct nesting
*loop_stack
;
342 /* Chain of all pending case or switch statements. */
343 struct nesting
*case_stack
;
345 /* Chain of all pending exception contours. */
346 struct nesting
*except_stack
;
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting
*nesting_stack
;
352 /* Number of entries on nesting_stack now. */
355 /* Allocate and return a new `struct nesting'. */
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
378 If the target label has now been defined, we can insert the proper code. */
382 /* Points to following fixup. */
383 struct goto_fixup
*next
;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
390 /* The CODE_LABEL rtx that this is jumping to. */
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count
;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list
;
409 static struct goto_fixup
*goto_fixup_chain
;
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
416 /* Points to following fixup. */
417 struct label_chain
*next
;
424 gcc_obstack_init (&stmt_obstack
);
426 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);
431 init_stmt_for_function ()
433 /* We are not currently within any block, conditional, loop or case. */
441 block_start_count
= 0;
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain
= 0;
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value
= 0;
455 p
->block_stack
= block_stack
;
456 p
->stack_block_stack
= stack_block_stack
;
457 p
->cond_stack
= cond_stack
;
458 p
->loop_stack
= loop_stack
;
459 p
->case_stack
= case_stack
;
460 p
->nesting_stack
= nesting_stack
;
461 p
->nesting_depth
= nesting_depth
;
462 p
->block_start_count
= block_start_count
;
463 p
->last_expr_type
= last_expr_type
;
464 p
->last_expr_value
= last_expr_value
;
465 p
->expr_stmts_for_value
= expr_stmts_for_value
;
466 p
->emit_filename
= emit_filename
;
467 p
->emit_lineno
= emit_lineno
;
468 p
->goto_fixup_chain
= goto_fixup_chain
;
472 restore_stmt_status (p
)
475 block_stack
= p
->block_stack
;
476 stack_block_stack
= p
->stack_block_stack
;
477 cond_stack
= p
->cond_stack
;
478 loop_stack
= p
->loop_stack
;
479 case_stack
= p
->case_stack
;
480 nesting_stack
= p
->nesting_stack
;
481 nesting_depth
= p
->nesting_depth
;
482 block_start_count
= p
->block_start_count
;
483 last_expr_type
= p
->last_expr_type
;
484 last_expr_value
= p
->last_expr_value
;
485 expr_stmts_for_value
= p
->expr_stmts_for_value
;
486 emit_filename
= p
->emit_filename
;
487 emit_lineno
= p
->emit_lineno
;
488 goto_fixup_chain
= p
->goto_fixup_chain
;
491 /* Emit a no-op instruction. */
496 rtx last_insn
= get_last_insn ();
498 && (GET_CODE (last_insn
) == CODE_LABEL
499 || prev_real_insn (last_insn
) == 0))
500 emit_insn (gen_nop ());
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
510 if (TREE_CODE (label
) != LABEL_DECL
)
513 if (DECL_RTL (label
))
514 return DECL_RTL (label
);
516 return DECL_RTL (label
) = gen_label_rtx ();
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label
));
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
534 expand_computed_goto (exp
)
537 rtx x
= expand_expr (exp
, 0, VOIDmode
, 0);
539 emit_indirect_jump (x
);
543 /* Handle goto statements and the labels that they can go to. */
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
560 struct label_chain
*p
;
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label
));
564 if (DECL_NAME (label
))
565 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
567 if (stack_block_stack
!= 0)
569 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
570 p
->next
= stack_block_stack
->data
.block
.label_chain
;
571 stack_block_stack
->data
.block
.label_chain
= p
;
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
580 declare_nonlocal_label (label
)
583 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
584 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
585 if (nonlocal_goto_handler_slot
== 0)
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
589 nonlocal_goto_stack_level
590 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
591 emit_insn_before (gen_move_insn (nonlocal_goto_stack_level
,
593 tail_recursion_reentry
);
597 /* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
605 /* Check for a nonlocal goto to a containing function. */
606 tree context
= decl_function_context (label
);
607 if (context
!= 0 && context
!= current_function_decl
)
609 struct function
*p
= find_function_data (context
);
611 p
->has_nonlocal_label
= 1;
612 #if HAVE_nonlocal_goto
613 if (HAVE_nonlocal_goto
)
614 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
615 p
->nonlocal_goto_handler_slot
,
616 p
->nonlocal_goto_stack_level
,
617 gen_rtx (LABEL_REF
, Pmode
,
618 label_rtx (label
))));
622 /* Restore frame pointer for containing function.
623 This sets the actual hard register used for the frame pointer
624 to the location of the function's incoming static chain info.
625 The non-local goto handler will then adjust it to contain the
626 proper value and reload the argument pointer, if needed. */
627 emit_move_insn (frame_pointer_rtx
, lookup_static_chain (label
));
628 /* Get addr of containing function's current nonlocal goto handler,
629 which will do any cleanups and then jump to the label. */
630 temp
= copy_to_reg (p
->nonlocal_goto_handler_slot
);
631 /* Restore the stack pointer. Note this uses fp just restored. */
632 emit_move_insn (stack_pointer_rtx
, p
->nonlocal_goto_stack_level
);
633 /* Put in the static chain register the nonlocal label address. */
634 emit_move_insn (static_chain_rtx
,
635 gen_rtx (LABEL_REF
, Pmode
, label_rtx (label
)));
636 /* USE of frame_pointer_rtx added for consistency; not clear if
638 emit_insn (gen_rtx (USE
, VOIDmode
, frame_pointer_rtx
));
639 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
640 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
641 emit_indirect_jump (temp
);
645 expand_goto_internal (label
, label_rtx (label
), 0);
648 /* Generate RTL code for a `goto' statement with target label BODY.
649 LABEL should be a LABEL_REF.
650 LAST_INSN, if non-0, is the rtx we should consider as the last
651 insn emitted (for the purposes of cleaning up a return). */
654 expand_goto_internal (body
, label
, last_insn
)
659 struct nesting
*block
;
662 if (GET_CODE (label
) != CODE_LABEL
)
665 /* If label has already been defined, we can tell now
666 whether and how we must alter the stack level. */
668 if (PREV_INSN (label
) != 0)
670 /* Find the innermost pending block that contains the label.
671 (Check containment by comparing insn-uids.)
672 Then restore the outermost stack level within that block,
673 and do cleanups of all blocks contained in it. */
674 for (block
= block_stack
; block
; block
= block
->next
)
676 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
678 if (block
->data
.block
.stack_level
!= 0)
679 stack_level
= block
->data
.block
.stack_level
;
680 /* Execute the cleanups for blocks we are exiting. */
681 if (block
->data
.block
.cleanups
!= 0)
683 expand_cleanups (block
->data
.block
.cleanups
, 0);
684 do_pending_stack_adjust ();
690 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
691 the stack pointer. This one should be deleted as dead by flow. */
692 clear_pending_stack_adjust ();
693 do_pending_stack_adjust ();
694 emit_move_insn (stack_pointer_rtx
, stack_level
);
697 if (body
!= 0 && DECL_TOO_LATE (body
))
698 error ("jump to `%s' invalidly jumps into binding contour",
699 IDENTIFIER_POINTER (DECL_NAME (body
)));
701 /* Label not yet defined: may need to put this goto
702 on the fixup list. */
703 else if (! expand_fixup (body
, label
, last_insn
))
705 /* No fixup needed. Record that the label is the target
706 of at least one goto that has no fixup. */
708 TREE_ADDRESSABLE (body
) = 1;
714 /* Generate if necessary a fixup for a goto
715 whose target label in tree structure (if any) is TREE_LABEL
716 and whose target in rtl is RTL_LABEL.
718 If LAST_INSN is nonzero, we pretend that the jump appears
719 after insn LAST_INSN instead of at the current point in the insn stream.
721 The fixup will be used later to insert insns at this point
722 to restore the stack level as appropriate for the target label.
724 Value is nonzero if a fixup is made. */
727 expand_fixup (tree_label
, rtl_label
, last_insn
)
732 struct nesting
*block
, *end_block
;
734 /* See if we can recognize which block the label will be output in.
735 This is possible in some very common cases.
736 If we succeed, set END_BLOCK to that block.
737 Otherwise, set it to 0. */
740 && (rtl_label
== cond_stack
->data
.cond
.endif_label
741 || rtl_label
== cond_stack
->data
.cond
.next_label
))
742 end_block
= cond_stack
;
743 /* If we are in a loop, recognize certain labels which
744 are likely targets. This reduces the number of fixups
745 we need to create. */
747 && (rtl_label
== loop_stack
->data
.loop
.start_label
748 || rtl_label
== loop_stack
->data
.loop
.end_label
749 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
750 end_block
= loop_stack
;
754 /* Now set END_BLOCK to the binding level to which we will return. */
758 struct nesting
*next_block
= end_block
->all
;
761 /* First see if the END_BLOCK is inside the innermost binding level.
762 If so, then no cleanups or stack levels are relevant. */
763 while (next_block
&& next_block
!= block
)
764 next_block
= next_block
->all
;
769 /* Otherwise, set END_BLOCK to the innermost binding level
770 which is outside the relevant control-structure nesting. */
771 next_block
= block_stack
->next
;
772 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
773 if (block
== next_block
)
774 next_block
= next_block
->next
;
775 end_block
= next_block
;
778 /* Does any containing block have a stack level or cleanups?
779 If not, no fixup is needed, and that is the normal case
780 (the only case, for standard C). */
781 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
782 if (block
->data
.block
.stack_level
!= 0
783 || block
->data
.block
.cleanups
!= 0)
786 if (block
!= end_block
)
788 /* Ok, a fixup is needed. Add a fixup to the list of such. */
789 struct goto_fixup
*fixup
790 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
791 /* In case an old stack level is restored, make sure that comes
792 after any pending stack adjust. */
793 /* ?? If the fixup isn't to come at the present position,
794 doing the stack adjust here isn't useful. Doing it with our
795 settings at that location isn't useful either. Let's hope
798 do_pending_stack_adjust ();
799 fixup
->before_jump
= last_insn
? last_insn
: get_last_insn ();
800 fixup
->target
= tree_label
;
801 fixup
->target_rtl
= rtl_label
;
802 fixup
->block_start_count
= block_start_count
;
803 fixup
->stack_level
= 0;
804 fixup
->cleanup_list_list
805 = (((block
->data
.block
.outer_cleanups
807 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
810 || block
->data
.block
.cleanups
)
811 ? tree_cons (0, block
->data
.block
.cleanups
,
812 block
->data
.block
.outer_cleanups
)
814 fixup
->next
= goto_fixup_chain
;
815 goto_fixup_chain
= fixup
;
821 /* When exiting a binding contour, process all pending gotos requiring fixups.
822 THISBLOCK is the structure that describes the block being exited.
823 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
824 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
825 FIRST_INSN is the insn that began this contour.
827 Gotos that jump out of this contour must restore the
828 stack level and do the cleanups before actually jumping.
830 DONT_JUMP_IN nonzero means report error there is a jump into this
831 contour from before the beginning of the contour.
832 This is also done if STACK_LEVEL is nonzero. */
835 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
836 struct nesting
*thisblock
;
842 register struct goto_fixup
*f
, *prev
;
844 /* F is the fixup we are considering; PREV is the previous one. */
845 /* We run this loop in two passes so that cleanups of exited blocks
846 are run first, and blocks that are exited are marked so
849 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
851 /* Test for a fixup that is inactive because it is already handled. */
852 if (f
->before_jump
== 0)
854 /* Delete inactive fixup from the chain, if that is easy to do. */
856 prev
->next
= f
->next
;
858 /* Has this fixup's target label been defined?
859 If so, we can finalize it. */
860 else if (PREV_INSN (f
->target_rtl
) != 0)
862 /* Get the first non-label after the label
863 this goto jumps to. If that's before this scope begins,
864 we don't have a jump into the scope. */
865 rtx after_label
= f
->target_rtl
;
866 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
867 after_label
= NEXT_INSN (after_label
);
869 /* If this fixup jumped into this contour from before the beginning
870 of this contour, report an error. */
871 /* ??? Bug: this does not detect jumping in through intermediate
872 blocks that have stack levels or cleanups.
873 It detects only a problem with the innermost block
876 && (dont_jump_in
|| stack_level
|| cleanup_list
)
877 /* If AFTER_LABEL is 0, it means the jump goes to the end
878 of the rtl, which means it jumps into this scope. */
880 || INSN_UID (first_insn
) < INSN_UID (after_label
))
881 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
882 && ! TREE_REGDECL (f
->target
))
884 error_with_decl (f
->target
,
885 "label `%s' used before containing binding contour");
886 /* Prevent multiple errors for one label. */
887 TREE_REGDECL (f
->target
) = 1;
890 /* Execute cleanups for blocks this jump exits. */
891 if (f
->cleanup_list_list
)
894 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
895 /* Marked elements correspond to blocks that have been closed.
896 Do their cleanups. */
897 if (TREE_ADDRESSABLE (lists
)
898 && TREE_VALUE (lists
) != 0)
899 fixup_cleanups (TREE_VALUE (lists
), &f
->before_jump
);
902 /* Restore stack level for the biggest contour that this
903 jump jumps out of. */
905 emit_insn_after (gen_move_insn (stack_pointer_rtx
, f
->stack_level
),
911 /* Mark the cleanups of exited blocks so that they are executed
912 by the code above. */
913 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
914 if (f
->before_jump
!= 0
915 && PREV_INSN (f
->target_rtl
) == 0
916 /* Label has still not appeared. If we are exiting a block with
917 a stack level to restore, that started before the fixup,
918 mark this stack level as needing restoration
919 when the fixup is later finalized.
920 Also mark the cleanup_list_list element for F
921 that corresponds to this block, so that ultimately
922 this block's cleanups will be executed by the code above. */
924 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
925 it means the label is undefined. That's erroneous, but possible. */
926 && (thisblock
->data
.block
.block_start_count
927 <= f
->block_start_count
))
929 tree lists
= f
->cleanup_list_list
;
930 for (; lists
; lists
= TREE_CHAIN (lists
))
931 /* If the following elt. corresponds to our containing block
932 then the elt. must be for this block. */
933 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
934 TREE_ADDRESSABLE (lists
) = 1;
937 f
->stack_level
= stack_level
;
941 /* Generate RTL for an asm statement (explicit assembler code).
942 BODY is a STRING_CST node containing the assembler code text,
943 or an ADDR_EXPR containing a STRING_CST. */
949 if (TREE_CODE (body
) == ADDR_EXPR
)
950 body
= TREE_OPERAND (body
, 0);
952 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
953 TREE_STRING_POINTER (body
)));
957 /* Generate RTL for an asm statement with arguments.
958 STRING is the instruction template.
959 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
960 Each output or input has an expression in the TREE_VALUE and
961 a constraint-string in the TREE_PURPOSE.
962 CLOBBERS is a list of STRING_CST nodes each naming a hard register
963 that is clobbered by this insn.
965 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
966 Some elements of OUTPUTS may be replaced with trees representing temporary
967 values. The caller should copy those temporary values to the originally
970 VOL nonzero means the insn is volatile; don't optimize it. */
973 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
974 tree string
, outputs
, inputs
, clobbers
;
979 rtvec argvec
, constraints
;
981 int ninputs
= list_length (inputs
);
982 int noutputs
= list_length (outputs
);
983 int nclobbers
= list_length (clobbers
);
986 /* Vector of RTX's of evaluated output operands. */
987 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
988 /* The insn we have emitted. */
993 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
995 tree val
= TREE_VALUE (tail
);
1000 /* If there's an erroneous arg, emit no insn. */
1001 if (TREE_TYPE (val
) == error_mark_node
)
1004 /* Make sure constraint has `=' and does not have `+'. */
1007 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1009 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1011 error ("output operand constraint contains `+'");
1014 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '=')
1019 error ("output operand constraint lacks `='");
1023 /* If an output operand is not a variable or indirect ref,
1025 create a SAVE_EXPR which is a pseudo-reg
1026 to act as an intermediate temporary.
1027 Make the asm insn write into that, then copy it to
1028 the real output operand. */
1030 while (TREE_CODE (val
) == COMPONENT_REF
1031 || TREE_CODE (val
) == ARRAY_REF
)
1032 val
= TREE_OPERAND (val
, 0);
1034 if (TREE_CODE (val
) != VAR_DECL
1035 && TREE_CODE (val
) != PARM_DECL
1036 && TREE_CODE (val
) != INDIRECT_REF
)
1037 TREE_VALUE (tail
) = save_expr (TREE_VALUE (tail
));
1039 output_rtx
[i
] = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
1042 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1044 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1048 /* Make vectors for the expression-rtx and constraint strings. */
1050 argvec
= rtvec_alloc (ninputs
);
1051 constraints
= rtvec_alloc (ninputs
);
1053 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
1054 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
1056 MEM_VOLATILE_P (body
) = vol
;
1058 /* Eval the inputs and put them into ARGVEC.
1059 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1062 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1066 /* If there's an erroneous arg, emit no insn,
1067 because the ASM_INPUT would get VOIDmode
1068 and that could cause a crash in reload. */
1069 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1071 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1073 error ("hard register `%s' listed as input operand to `asm'",
1074 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1078 /* Make sure constraint has neither `=' nor `+'. */
1080 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1081 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
1082 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1084 error ("input operand constraint contains `%c'",
1085 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1089 XVECEXP (body
, 3, i
) /* argvec */
1090 = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
1091 XVECEXP (body
, 4, i
) /* constraints */
1092 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1093 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1097 /* Protect all the operands from the queue,
1098 now that they have all been evaluated. */
1100 for (i
= 0; i
< ninputs
; i
++)
1101 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1103 for (i
= 0; i
< noutputs
; i
++)
1104 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1106 /* Now, for each output, construct an rtx
1107 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1108 ARGVEC CONSTRAINTS))
1109 If there is more than one, put them inside a PARALLEL. */
1111 if (noutputs
== 1 && nclobbers
== 0)
1113 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1114 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
1116 else if (noutputs
== 0 && nclobbers
== 0)
1118 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1119 insn
= emit_insn (body
);
1125 if (num
== 0) num
= 1;
1126 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1128 /* For each output operand, store a SET. */
1130 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1132 XVECEXP (body
, 0, i
)
1133 = gen_rtx (SET
, VOIDmode
,
1135 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1136 TREE_STRING_POINTER (string
),
1137 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1138 i
, argvec
, constraints
,
1140 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1143 /* If there are no outputs (but there are some clobbers)
1144 store the bare ASM_OPERANDS into the PARALLEL. */
1147 XVECEXP (body
, 0, i
++) = obody
;
1149 /* Store (clobber REG) for each clobbered register specified. */
1151 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1153 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1154 int j
= decode_reg_name (regname
);
1161 error ("unknown register name `%s' in `asm'", regname
);
1165 /* Use QImode since that's guaranteed to clobber just one reg. */
1166 XVECEXP (body
, 0, i
)
1167 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1170 insn
= emit_insn (body
);
1176 /* Generate RTL to evaluate the expression EXP
1177 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1180 expand_expr_stmt (exp
)
1183 /* If -W, warn about statements with no side effects,
1184 except for an explicit cast to void (e.g. for assert()), and
1185 except inside a ({...}) where they may be useful. */
1186 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1188 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1189 && !(TREE_CODE (exp
) == CONVERT_EXPR
1190 && TREE_TYPE (exp
) == void_type_node
))
1191 warning_with_file_and_line (emit_filename
, emit_lineno
,
1192 "statement with no effect");
1193 else if (warn_unused
)
1194 warn_if_unused_value (exp
);
1196 last_expr_type
= TREE_TYPE (exp
);
1197 if (! flag_syntax_only
)
1198 last_expr_value
= expand_expr (exp
, expr_stmts_for_value
? 0 : const0_rtx
,
1201 /* If all we do is reference a volatile value in memory,
1202 copy it to a register to be sure it is actually touched. */
1203 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1204 && TREE_THIS_VOLATILE (exp
))
1206 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1207 copy_to_reg (last_expr_value
);
1209 /* This case needs to be written. */
1213 /* If this expression is part of a ({...}) and is in memory, we may have
1214 to preserve temporaries. */
1215 preserve_temp_slots (last_expr_value
);
1217 /* Free any temporaries used to evaluate this expression. Any temporary
1218 used as a result of this expression will already have been preserved
1225 /* Warn if EXP contains any computations whose results are not used.
1226 Return 1 if a warning is printed; 0 otherwise. */
1229 warn_if_unused_value (exp
)
1232 if (TREE_USED (exp
))
1235 switch (TREE_CODE (exp
))
1237 case PREINCREMENT_EXPR
:
1238 case POSTINCREMENT_EXPR
:
1239 case PREDECREMENT_EXPR
:
1240 case POSTDECREMENT_EXPR
:
1245 case METHOD_CALL_EXPR
:
1248 case ANTI_WRAPPER_EXPR
:
1249 case WITH_CLEANUP_EXPR
:
1251 /* We don't warn about COND_EXPR because it may be a useful
1252 construct if either arm contains a side effect. */
1257 /* For a binding, warn if no side effect within it. */
1258 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1260 case TRUTH_ORIF_EXPR
:
1261 case TRUTH_ANDIF_EXPR
:
1262 /* In && or ||, warn if 2nd operand has no side effect. */
1263 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1266 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1268 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1272 case NON_LVALUE_EXPR
:
1273 /* Don't warn about values cast to void. */
1274 if (TREE_TYPE (exp
) == void_type_node
)
1276 /* Don't warn about conversions not explicit in the user's program. */
1277 if (TREE_NO_UNUSED_WARNING (exp
))
1279 /* Assignment to a cast usually results in a cast of a modify.
1280 Don't complain about that. */
1281 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MODIFY_EXPR
)
1283 /* Sometimes it results in a cast of a cast of a modify.
1284 Don't complain about that. */
1285 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == CONVERT_EXPR
1286 || TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
)
1287 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == MODIFY_EXPR
)
1291 warning_with_file_and_line (emit_filename
, emit_lineno
,
1292 "value computed is not used");
1297 /* Clear out the memory of the last expression evaluated. */
1305 /* Begin a statement which will return a value.
1306 Return the RTL_EXPR for this statement expr.
1307 The caller must save that value and pass it to expand_end_stmt_expr. */
1310 expand_start_stmt_expr ()
1312 /* Make the RTL_EXPR node temporary, not momentary,
1313 so that rtl_expr_chain doesn't become garbage. */
1314 int momentary
= suspend_momentary ();
1315 tree t
= make_node (RTL_EXPR
);
1316 resume_momentary (momentary
);
1319 expr_stmts_for_value
++;
1323 /* Restore the previous state at the end of a statement that returns a value.
1324 Returns a tree node representing the statement's value and the
1325 insns to compute the value.
1327 The nodes of that expression have been freed by now, so we cannot use them.
1328 But we don't want to do that anyway; the expression has already been
1329 evaluated and now we just want to use the value. So generate a RTL_EXPR
1330 with the proper type and RTL value.
1332 If the last substatement was not an expression,
1333 return something with type `void'. */
1336 expand_end_stmt_expr (t
)
1341 if (last_expr_type
== 0)
1343 last_expr_type
= void_type_node
;
1344 last_expr_value
= const0_rtx
;
1346 else if (last_expr_value
== 0)
1347 /* There are some cases where this can happen, such as when the
1348 statement is void type. */
1349 last_expr_value
= const0_rtx
;
1350 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1351 /* Remove any possible QUEUED. */
1352 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1356 TREE_TYPE (t
) = last_expr_type
;
1357 RTL_EXPR_RTL (t
) = last_expr_value
;
1358 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1360 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1364 /* Don't consider deleting this expr or containing exprs at tree level. */
1365 TREE_SIDE_EFFECTS (t
) = 1;
1366 /* Propagate volatility of the actual RTL expr. */
1367 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1370 expr_stmts_for_value
--;
1375 /* The exception handling nesting looks like this:
1378 { <-- exception handler block
1380 <-- in an exception handler
1382 : <-- in a TRY block
1383 : <-- in an exception handler
1388 : <-- in an except block
1389 : <-- in an exception handler
1395 /* Return nonzero iff in a try block at level LEVEL. */
1398 in_try_block (level
)
1401 struct nesting
*n
= except_stack
;
1404 while (n
&& n
->data
.except_stmt
.after_label
!= 0)
1415 /* Return nonzero iff in an except block at level LEVEL. */
1418 in_except_block (level
)
1421 struct nesting
*n
= except_stack
;
1424 while (n
&& n
->data
.except_stmt
.after_label
== 0)
1435 /* Return nonzero iff in an exception handler at level LEVEL. */
1438 in_exception_handler (level
)
1441 struct nesting
*n
= except_stack
;
1442 while (n
&& level
--)
1447 /* Record the fact that the current exception nesting raises
1448 exception EX. If not in an exception handler, return 0. */
1455 if (except_stack
== 0)
1457 raises_ptr
= &except_stack
->data
.except_stmt
.raised
;
1458 if (! value_member (ex
, *raises_ptr
))
1459 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1463 /* Generate RTL for the start of a try block.
1465 TRY_CLAUSE is the condition to test to enter the try block. */
1468 expand_start_try (try_clause
, exitflag
, escapeflag
)
1473 struct nesting
*thishandler
= ALLOC_NESTING ();
1475 /* Make an entry on cond_stack for the cond we are entering. */
1477 thishandler
->next
= except_stack
;
1478 thishandler
->all
= nesting_stack
;
1479 thishandler
->depth
= ++nesting_depth
;
1480 thishandler
->data
.except_stmt
.raised
= 0;
1481 thishandler
->data
.except_stmt
.handled
= 0;
1482 thishandler
->data
.except_stmt
.first_insn
= get_insns ();
1483 thishandler
->data
.except_stmt
.except_label
= gen_label_rtx ();
1484 thishandler
->data
.except_stmt
.unhandled_label
= 0;
1485 thishandler
->data
.except_stmt
.after_label
= 0;
1486 thishandler
->data
.except_stmt
.escape_label
1487 = escapeflag
? thishandler
->data
.except_stmt
.except_label
: 0;
1488 thishandler
->exit_label
= exitflag
? gen_label_rtx () : 0;
1489 except_stack
= thishandler
;
1490 nesting_stack
= thishandler
;
1492 do_jump (try_clause
, thishandler
->data
.except_stmt
.except_label
, NULL
);
1495 /* End of a TRY block. Nothing to do for now. */
1500 except_stack
->data
.except_stmt
.after_label
= gen_label_rtx ();
1501 expand_goto_internal (NULL
, except_stack
->data
.except_stmt
.after_label
, 0);
1504 /* Start an `except' nesting contour.
1505 EXITFLAG says whether this contour should be able to `exit' something.
1506 ESCAPEFLAG says whether this contour should be escapable. */
1509 expand_start_except (exitflag
, escapeflag
)
1516 /* An `exit' from catch clauses goes out to next exit level,
1517 if there is one. Otherwise, it just goes to the end
1518 of the construct. */
1519 for (n
= except_stack
->next
; n
; n
= n
->next
)
1520 if (n
->exit_label
!= 0)
1522 except_stack
->exit_label
= n
->exit_label
;
1526 except_stack
->exit_label
= except_stack
->data
.except_stmt
.after_label
;
1531 /* An `escape' from catch clauses goes out to next escape level,
1532 if there is one. Otherwise, it just goes to the end
1533 of the construct. */
1534 for (n
= except_stack
->next
; n
; n
= n
->next
)
1535 if (n
->data
.except_stmt
.escape_label
!= 0)
1537 except_stack
->data
.except_stmt
.escape_label
1538 = n
->data
.except_stmt
.escape_label
;
1542 except_stack
->data
.except_stmt
.escape_label
1543 = except_stack
->data
.except_stmt
.after_label
;
1545 do_pending_stack_adjust ();
1546 emit_label (except_stack
->data
.except_stmt
.except_label
);
1549 /* Generate code to `escape' from an exception contour. This
1550 is like `exiting', but does not conflict with constructs which
1553 Return nonzero if this contour is escapable, otherwise
1554 return zero, and language-specific code will emit the
1555 appropriate error message. */
1557 expand_escape_except ()
1561 for (n
= except_stack
; n
; n
= n
->next
)
1562 if (n
->data
.except_stmt
.escape_label
!= 0)
1564 expand_goto_internal (0, n
->data
.except_stmt
.escape_label
, 0);
1571 /* Finish processing and `except' contour.
1572 Culls out all exceptions which might be raise but not
1573 handled, and returns the list to the caller.
1574 Language-specific code is responsible for dealing with these
1578 expand_end_except ()
1581 tree raised
= NULL_TREE
;
1583 do_pending_stack_adjust ();
1584 emit_label (except_stack
->data
.except_stmt
.after_label
);
1586 n
= except_stack
->next
;
1589 /* Propagate exceptions raised but not handled to next
1591 tree handled
= except_stack
->data
.except_stmt
.raised
;
1592 if (handled
!= void_type_node
)
1594 tree prev
= NULL_TREE
;
1595 raised
= except_stack
->data
.except_stmt
.raised
;
1599 for (this_raise
= raised
, prev
= 0; this_raise
;
1600 this_raise
= TREE_CHAIN (this_raise
))
1602 if (value_member (TREE_VALUE (this_raise
), handled
))
1605 TREE_CHAIN (prev
) = TREE_CHAIN (this_raise
);
1608 raised
= TREE_CHAIN (raised
);
1609 if (raised
== NULL_TREE
)
1616 handled
= TREE_CHAIN (handled
);
1618 if (prev
== NULL_TREE
)
1621 TREE_CHAIN (prev
) = n
->data
.except_stmt
.raised
;
1623 n
->data
.except_stmt
.raised
= raised
;
1627 POPSTACK (except_stack
);
1632 /* Record that exception EX is caught by this exception handler.
1633 Return nonzero if in exception handling construct, otherwise return 0. */
1640 if (except_stack
== 0)
1642 raises_ptr
= &except_stack
->data
.except_stmt
.handled
;
1643 if (*raises_ptr
!= void_type_node
1645 && ! value_member (ex
, *raises_ptr
))
1646 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1650 /* Record that this exception handler catches all exceptions.
1651 Return nonzero if in exception handling construct, otherwise return 0. */
1654 expand_catch_default ()
1656 if (except_stack
== 0)
1658 except_stack
->data
.except_stmt
.handled
= void_type_node
;
1665 if (except_stack
== 0 || except_stack
->data
.except_stmt
.after_label
== 0)
1667 expand_goto_internal (0, except_stack
->data
.except_stmt
.after_label
, 0);
1671 /* Generate RTL for the start of an if-then. COND is the expression
1672 whose truth should be tested.
1674 If EXITFLAG is nonzero, this conditional is visible to
1675 `exit_something'. */
1678 expand_start_cond (cond
, exitflag
)
1682 struct nesting
*thiscond
= ALLOC_NESTING ();
1684 /* Make an entry on cond_stack for the cond we are entering. */
1686 thiscond
->next
= cond_stack
;
1687 thiscond
->all
= nesting_stack
;
1688 thiscond
->depth
= ++nesting_depth
;
1689 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1690 /* Before we encounter an `else', we don't need a separate exit label
1691 unless there are supposed to be exit statements
1692 to exit this conditional. */
1693 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1694 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1695 cond_stack
= thiscond
;
1696 nesting_stack
= thiscond
;
1698 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL
);
1701 /* Generate RTL between then-clause and the elseif-clause
1702 of an if-then-elseif-.... */
1705 expand_start_elseif (cond
)
1708 if (cond_stack
->data
.cond
.endif_label
== 0)
1709 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1710 emit_jump (cond_stack
->data
.cond
.endif_label
);
1711 emit_label (cond_stack
->data
.cond
.next_label
);
1712 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1713 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL
);
1716 /* Generate RTL between the then-clause and the else-clause
1717 of an if-then-else. */
1720 expand_start_else ()
1722 if (cond_stack
->data
.cond
.endif_label
== 0)
1723 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1724 emit_jump (cond_stack
->data
.cond
.endif_label
);
1725 emit_label (cond_stack
->data
.cond
.next_label
);
1726 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
1729 /* Generate RTL for the end of an if-then.
1730 Pop the record for it off of cond_stack. */
1735 struct nesting
*thiscond
= cond_stack
;
1737 do_pending_stack_adjust ();
1738 if (thiscond
->data
.cond
.next_label
)
1739 emit_label (thiscond
->data
.cond
.next_label
);
1740 if (thiscond
->data
.cond
.endif_label
)
1741 emit_label (thiscond
->data
.cond
.endif_label
);
1743 POPSTACK (cond_stack
);
1747 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1748 loop should be exited by `exit_something'. This is a loop for which
1749 `expand_continue' will jump to the top of the loop.
1751 Make an entry on loop_stack to record the labels associated with
1755 expand_start_loop (exit_flag
)
1758 register struct nesting
*thisloop
= ALLOC_NESTING ();
1760 /* Make an entry on loop_stack for the loop we are entering. */
1762 thisloop
->next
= loop_stack
;
1763 thisloop
->all
= nesting_stack
;
1764 thisloop
->depth
= ++nesting_depth
;
1765 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1766 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1767 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1768 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1769 loop_stack
= thisloop
;
1770 nesting_stack
= thisloop
;
1772 do_pending_stack_adjust ();
1774 emit_note (0, NOTE_INSN_LOOP_BEG
);
1775 emit_label (thisloop
->data
.loop
.start_label
);
1780 /* Like expand_start_loop but for a loop where the continuation point
1781 (for expand_continue_loop) will be specified explicitly. */
1784 expand_start_loop_continue_elsewhere (exit_flag
)
1787 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
1788 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1792 /* Specify the continuation point for a loop started with
1793 expand_start_loop_continue_elsewhere.
1794 Use this at the point in the code to which a continue statement
1798 expand_loop_continue_here ()
1800 do_pending_stack_adjust ();
1801 emit_note (0, NOTE_INSN_LOOP_CONT
);
1802 emit_label (loop_stack
->data
.loop
.continue_label
);
1805 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1806 Pop the block off of loop_stack. */
1811 register rtx insn
= get_last_insn ();
1812 register rtx start_label
= loop_stack
->data
.loop
.start_label
;
1813 rtx last_test_insn
= 0;
1816 /* Mark the continue-point at the top of the loop if none elsewhere. */
1817 if (start_label
== loop_stack
->data
.loop
.continue_label
)
1818 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
1820 do_pending_stack_adjust ();
1822 /* If optimizing, perhaps reorder the loop. If the loop
1823 starts with a conditional exit, roll that to the end
1824 where it will optimize together with the jump back.
1826 We look for the last conditional branch to the exit that we encounter
1827 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1828 branch to the exit first, use it.
1830 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1831 because moving them is not valid. */
1835 ! (GET_CODE (insn
) == JUMP_INSN
1836 && GET_CODE (PATTERN (insn
)) == SET
1837 && SET_DEST (PATTERN (insn
)) == pc_rtx
1838 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1840 /* Scan insns from the top of the loop looking for a qualified
1841 conditional exit. */
1842 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
1843 insn
= NEXT_INSN (insn
))
1845 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
1848 if (GET_CODE (insn
) == NOTE
1849 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1850 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1853 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
1856 if (last_test_insn
&& num_insns
> 30)
1859 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1860 && SET_DEST (PATTERN (insn
)) == pc_rtx
1861 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1862 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1863 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1864 == loop_stack
->data
.loop
.end_label
))
1865 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1866 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1867 == loop_stack
->data
.loop
.end_label
))))
1868 last_test_insn
= insn
;
1870 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
1871 && GET_CODE (PATTERN (insn
)) == SET
1872 && SET_DEST (PATTERN (insn
)) == pc_rtx
1873 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
1874 && (XEXP (SET_SRC (PATTERN (insn
)), 0)
1875 == loop_stack
->data
.loop
.end_label
))
1876 /* Include BARRIER. */
1877 last_test_insn
= NEXT_INSN (insn
);
1880 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
1882 /* We found one. Move everything from there up
1883 to the end of the loop, and add a jump into the loop
1884 to jump to there. */
1885 register rtx newstart_label
= gen_label_rtx ();
1886 register rtx start_move
= start_label
;
1888 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1889 then we want to move this note also. */
1890 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
1891 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
1892 == NOTE_INSN_LOOP_CONT
))
1893 start_move
= PREV_INSN (start_move
);
1895 emit_label_after (newstart_label
, PREV_INSN (start_move
));
1896 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
1897 emit_jump_insn_after (gen_jump (start_label
),
1898 PREV_INSN (newstart_label
));
1899 emit_barrier_after (PREV_INSN (newstart_label
));
1900 start_label
= newstart_label
;
1904 emit_jump (start_label
);
1905 emit_note (0, NOTE_INSN_LOOP_END
);
1906 emit_label (loop_stack
->data
.loop
.end_label
);
1908 POPSTACK (loop_stack
);
1913 /* Generate a jump to the current loop's continue-point.
1914 This is usually the top of the loop, but may be specified
1915 explicitly elsewhere. If not currently inside a loop,
1916 return 0 and do nothing; caller will print an error message. */
1919 expand_continue_loop (whichloop
)
1920 struct nesting
*whichloop
;
1924 whichloop
= loop_stack
;
1927 expand_goto_internal (0, whichloop
->data
.loop
.continue_label
, 0);
1931 /* Generate a jump to exit the current loop. If not currently inside a loop,
1932 return 0 and do nothing; caller will print an error message. */
1935 expand_exit_loop (whichloop
)
1936 struct nesting
*whichloop
;
1940 whichloop
= loop_stack
;
1943 expand_goto_internal (0, whichloop
->data
.loop
.end_label
, 0);
1947 /* Generate a conditional jump to exit the current loop if COND
1948 evaluates to zero. If not currently inside a loop,
1949 return 0 and do nothing; caller will print an error message. */
1952 expand_exit_loop_if_false (whichloop
, cond
)
1953 struct nesting
*whichloop
;
1958 whichloop
= loop_stack
;
1961 do_jump (cond
, whichloop
->data
.loop
.end_label
, NULL
);
1965 /* Return non-zero if we should preserve sub-expressions as separate
1966 pseudos. We never do so if we aren't optimizing. We always do so
1967 if -fexpensive-optimizations.
1969 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
1970 the loop may still be a small one. */
1973 preserve_subexpressions_p ()
1977 if (flag_expensive_optimizations
)
1980 if (optimize
== 0 || loop_stack
== 0)
1983 insn
= get_last_insn_anywhere ();
1986 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
1987 < n_non_fixed_regs
* 3));
1991 /* Generate a jump to exit the current loop, conditional, binding contour
1992 or case statement. Not all such constructs are visible to this function,
1993 only those started with EXIT_FLAG nonzero. Individual languages use
1994 the EXIT_FLAG parameter to control which kinds of constructs you can
1997 If not currently inside anything that can be exited,
1998 return 0 and do nothing; caller will print an error message. */
2001 expand_exit_something ()
2005 for (n
= nesting_stack
; n
; n
= n
->all
)
2006 if (n
->exit_label
!= 0)
2008 expand_goto_internal (0, n
->exit_label
, 0);
2015 /* Generate RTL to return from the current function, with no value.
2016 (That is, we do not do anything about returning any value.) */
2019 expand_null_return ()
2021 struct nesting
*block
= block_stack
;
2024 /* Does any pending block have cleanups? */
2026 while (block
&& block
->data
.block
.cleanups
== 0)
2027 block
= block
->next
;
2029 /* If yes, use a goto to return, since that runs cleanups. */
2031 expand_null_return_1 (last_insn
, block
!= 0);
2034 /* Generate RTL to return from the current function, with value VAL. */
2037 expand_value_return (val
)
2040 struct nesting
*block
= block_stack
;
2041 rtx last_insn
= get_last_insn ();
2042 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2044 /* Copy the value to the return location
2045 unless it's already there. */
2047 if (return_reg
!= val
)
2048 emit_move_insn (return_reg
, val
);
2049 if (GET_CODE (return_reg
) == REG
2050 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2051 emit_insn (gen_rtx (USE
, VOIDmode
, return_reg
));
2053 /* Does any pending block have cleanups? */
2055 while (block
&& block
->data
.block
.cleanups
== 0)
2056 block
= block
->next
;
2058 /* If yes, use a goto to return, since that runs cleanups.
2059 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2061 expand_null_return_1 (last_insn
, block
!= 0);
2064 /* Output a return with no value. If LAST_INSN is nonzero,
2065 pretend that the return takes place after LAST_INSN.
2066 If USE_GOTO is nonzero then don't use a return instruction;
2067 go to the return label instead. This causes any cleanups
2068 of pending blocks to be executed normally. */
2071 expand_null_return_1 (last_insn
, use_goto
)
2075 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2077 clear_pending_stack_adjust ();
2078 do_pending_stack_adjust ();
2081 /* PCC-struct return always uses an epilogue. */
2082 if (current_function_returns_pcc_struct
|| use_goto
)
2085 end_label
= return_label
= gen_label_rtx ();
2086 expand_goto_internal (0, end_label
, last_insn
);
2090 /* Otherwise output a simple return-insn if one is available,
2091 unless it won't do the job. */
2093 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2095 emit_jump_insn (gen_return ());
2101 /* Otherwise jump to the epilogue. */
2102 expand_goto_internal (0, end_label
, last_insn
);
2105 /* Generate RTL to evaluate the expression RETVAL and return it
2106 from the current function. */
2109 expand_return (retval
)
2112 /* If there are any cleanups to be performed, then they will
2113 be inserted following LAST_INSN. It is desirable
2114 that the last_insn, for such purposes, should be the
2115 last insn before computing the return value. Otherwise, cleanups
2116 which call functions can clobber the return value. */
2117 /* ??? rms: I think that is erroneous, because in C++ it would
2118 run destructors on variables that might be used in the subsequent
2119 computation of the return value. */
2121 register rtx val
= 0;
2125 struct nesting
*block
;
2127 /* If function wants no value, give it none. */
2128 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2130 expand_expr (retval
, 0, VOIDmode
, 0);
2131 expand_null_return ();
2135 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2136 cleanups
= any_pending_cleanups (1);
2138 if (TREE_CODE (retval
) == RESULT_DECL
)
2139 retval_rhs
= retval
;
2140 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2141 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2142 retval_rhs
= TREE_OPERAND (retval
, 1);
2143 else if (TREE_TYPE (retval
) == void_type_node
)
2144 /* Recognize tail-recursive call to void function. */
2145 retval_rhs
= retval
;
2147 retval_rhs
= NULL_TREE
;
2149 /* Only use `last_insn' if there are cleanups which must be run. */
2150 if (cleanups
|| cleanup_label
!= 0)
2151 last_insn
= get_last_insn ();
2153 /* Distribute return down conditional expr if either of the sides
2154 may involve tail recursion (see test below). This enhances the number
2155 of tail recursions we see. Don't do this always since it can produce
2156 sub-optimal code in some cases and we distribute assignments into
2157 conditional expressions when it would help. */
2159 if (optimize
&& retval_rhs
!= 0
2160 && frame_offset
== 0
2161 && TREE_CODE (retval_rhs
) == COND_EXPR
2162 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2163 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2165 rtx label
= gen_label_rtx ();
2166 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, 0);
2167 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2168 DECL_RESULT (current_function_decl
),
2169 TREE_OPERAND (retval_rhs
, 1)));
2171 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2172 DECL_RESULT (current_function_decl
),
2173 TREE_OPERAND (retval_rhs
, 2)));
2177 /* For tail-recursive call to current function,
2178 just jump back to the beginning.
2179 It's unsafe if any auto variable in this function
2180 has its address taken; for simplicity,
2181 require stack frame to be empty. */
2182 if (optimize
&& retval_rhs
!= 0
2183 && frame_offset
== 0
2184 && TREE_CODE (retval_rhs
) == CALL_EXPR
2185 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2186 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2187 /* Finish checking validity, and if valid emit code
2188 to set the argument variables for the new call. */
2189 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2190 DECL_ARGUMENTS (current_function_decl
)))
2192 if (tail_recursion_label
== 0)
2194 tail_recursion_label
= gen_label_rtx ();
2195 emit_label_after (tail_recursion_label
,
2196 tail_recursion_reentry
);
2198 expand_goto_internal (0, tail_recursion_label
, last_insn
);
2203 /* This optimization is safe if there are local cleanups
2204 because expand_null_return takes care of them.
2205 ??? I think it should also be safe when there is a cleanup label,
2206 because expand_null_return takes care of them, too.
2207 Any reason why not? */
2208 if (HAVE_return
&& cleanup_label
== 0
2209 && ! current_function_returns_pcc_struct
)
2211 /* If this is return x == y; then generate
2212 if (x == y) return 1; else return 0;
2213 if we can do it with explicit return insns. */
2215 switch (TREE_CODE (retval_rhs
))
2223 case TRUTH_ANDIF_EXPR
:
2224 case TRUTH_ORIF_EXPR
:
2225 case TRUTH_AND_EXPR
:
2227 case TRUTH_NOT_EXPR
:
2228 op0
= gen_label_rtx ();
2229 jumpifnot (retval_rhs
, op0
);
2230 expand_value_return (const1_rtx
);
2232 expand_value_return (const0_rtx
);
2236 #endif /* HAVE_return */
2240 && TREE_TYPE (retval_rhs
) != void_type_node
2241 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2243 /* Calculate the return value into a pseudo reg. */
2244 val
= expand_expr (retval_rhs
, 0, VOIDmode
, 0);
2246 /* All temporaries have now been used. */
2248 /* Return the calculated value, doing cleanups first. */
2249 expand_value_return (val
);
2253 /* No cleanups or no hard reg used;
2254 calculate value into hard return reg. */
2255 expand_expr (retval
, 0, VOIDmode
, 0);
2258 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2262 /* Return 1 if the end of the generated RTX is not a barrier.
2263 This means code already compiled can drop through. */
2266 drop_through_at_end_p ()
2268 rtx insn
= get_last_insn ();
2269 while (insn
&& GET_CODE (insn
) == NOTE
)
2270 insn
= PREV_INSN (insn
);
2271 return insn
&& GET_CODE (insn
) != BARRIER
;
2274 /* Emit code to alter this function's formal parms for a tail-recursive call.
2275 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2276 FORMALS is the chain of decls of formals.
2277 Return 1 if this can be done;
2278 otherwise return 0 and do not emit any code. */
2281 tail_recursion_args (actuals
, formals
)
2282 tree actuals
, formals
;
2284 register tree a
= actuals
, f
= formals
;
2286 register rtx
*argvec
;
2288 /* Check that number and types of actuals are compatible
2289 with the formals. This is not always true in valid C code.
2290 Also check that no formal needs to be addressable
2291 and that all formals are scalars. */
2293 /* Also count the args. */
2295 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2297 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
2299 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2302 if (a
!= 0 || f
!= 0)
2305 /* Compute all the actuals. */
2307 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2309 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2310 argvec
[i
] = expand_expr (TREE_VALUE (a
), 0, VOIDmode
, 0);
2312 /* Find which actual values refer to current values of previous formals.
2313 Copy each of them now, before any formal is changed. */
2315 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2319 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2320 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2321 { copy
= 1; break; }
2323 argvec
[i
] = copy_to_reg (argvec
[i
]);
2326 /* Store the values of the actuals into the formals. */
2328 for (f
= formals
, a
= actuals
, i
= 0; f
;
2329 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2331 if (DECL_MODE (f
) == GET_MODE (argvec
[i
]))
2332 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2334 convert_move (DECL_RTL (f
), argvec
[i
],
2335 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2342 /* Generate the RTL code for entering a binding contour.
2343 The variables are declared one by one, by calls to `expand_decl'.
2345 EXIT_FLAG is nonzero if this construct should be visible to
2346 `exit_something'. */
2349 expand_start_bindings (exit_flag
)
2352 struct nesting
*thisblock
= ALLOC_NESTING ();
2354 rtx note
= emit_note (0, NOTE_INSN_BLOCK_BEG
);
2356 /* Make an entry on block_stack for the block we are entering. */
2358 thisblock
->next
= block_stack
;
2359 thisblock
->all
= nesting_stack
;
2360 thisblock
->depth
= ++nesting_depth
;
2361 thisblock
->data
.block
.stack_level
= 0;
2362 thisblock
->data
.block
.cleanups
= 0;
2363 thisblock
->data
.block
.function_call_count
= 0;
2367 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2368 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2369 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2370 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2372 thisblock
->data
.block
.outer_cleanups
2373 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2374 block_stack
->data
.block
.outer_cleanups
);
2377 thisblock
->data
.block
.outer_cleanups
= 0;
2381 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2382 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2383 thisblock
->data
.block
.outer_cleanups
2384 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2385 block_stack
->data
.block
.outer_cleanups
);
2387 thisblock
->data
.block
.outer_cleanups
= 0;
2389 thisblock
->data
.block
.label_chain
= 0;
2390 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2391 thisblock
->data
.block
.first_insn
= note
;
2392 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2393 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2394 block_stack
= thisblock
;
2395 nesting_stack
= thisblock
;
2397 /* Make a new level for allocating stack slots. */
2401 /* Generate RTL code to terminate a binding contour.
2402 VARS is the chain of VAR_DECL nodes
2403 for the variables bound in this contour.
2404 MARK_ENDS is nonzero if we should put a note at the beginning
2405 and end of this binding contour.
2407 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2408 (That is true automatically if the contour has a saved stack level.) */
2411 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2416 register struct nesting
*thisblock
= block_stack
;
2420 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2421 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
2422 warning_with_decl (decl
, "unused variable `%s'");
2424 /* Mark the beginning and end of the scope if requested. */
2427 emit_note (0, NOTE_INSN_BLOCK_END
);
2429 /* Get rid of the beginning-mark if we don't make an end-mark. */
2430 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
2432 if (thisblock
->exit_label
)
2434 do_pending_stack_adjust ();
2435 emit_label (thisblock
->exit_label
);
2438 /* If necessary, make a handler for nonlocal gotos taking
2439 place in the function calls in this block. */
2440 if (function_call_count
!= thisblock
->data
.block
.function_call_count
2442 /* Make handler for outermost block
2443 if there were any nonlocal gotos to this function. */
2444 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
2445 /* Make handler for inner block if it has something
2446 special to do when you jump out of it. */
2447 : (thisblock
->data
.block
.cleanups
!= 0
2448 || thisblock
->data
.block
.stack_level
!= 0)))
2451 rtx afterward
= gen_label_rtx ();
2452 rtx handler_label
= gen_label_rtx ();
2453 rtx save_receiver
= gen_reg_rtx (Pmode
);
2455 /* Don't let jump_optimize delete the handler. */
2456 LABEL_PRESERVE_P (handler_label
) = 1;
2458 /* Record the handler address in the stack slot for that purpose,
2459 during this block, saving and restoring the outer value. */
2460 if (thisblock
->next
!= 0)
2462 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
2463 emit_insn_before (gen_move_insn (save_receiver
,
2464 nonlocal_goto_handler_slot
),
2465 thisblock
->data
.block
.first_insn
);
2467 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot
,
2468 gen_rtx (LABEL_REF
, Pmode
,
2470 thisblock
->data
.block
.first_insn
);
2472 /* Jump around the handler; it runs only when specially invoked. */
2473 emit_jump (afterward
);
2474 emit_label (handler_label
);
2476 #ifdef HAVE_nonlocal_goto
2477 if (! HAVE_nonlocal_goto
)
2479 /* First adjust our frame pointer to its actual value. It was
2480 previously set to the start of the virtual area corresponding to
2481 the stacked variables when we branched here and now needs to be
2482 adjusted to the actual hardware fp value.
2484 Assignments are to virtual registers are converted by
2485 instantiate_virtual_regs into the corresponding assignment
2486 to the underlying register (fp in this case) that makes
2487 the original assignment true.
2488 So the following insn will actually be
2489 decrementing fp by STARTING_FRAME_OFFSET. */
2490 emit_move_insn (virtual_stack_vars_rtx
, frame_pointer_rtx
);
2492 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2493 if (fixed_regs
[ARG_POINTER_REGNUM
])
2495 /* Now restore our arg pointer from the address at which it was saved
2497 If there hasn't be space allocated for it yet, make some now. */
2498 if (arg_pointer_save_area
== 0)
2499 arg_pointer_save_area
2500 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
2501 emit_move_insn (virtual_incoming_args_rtx
,
2502 /* We need a pseudo here,
2503 or else instantiate_virtual_regs_1 complains. */
2504 copy_to_reg (arg_pointer_save_area
));
2508 /* The handler expects the desired label address in the static chain
2509 register. It tests the address and does an appropriate jump
2510 to whatever label is desired. */
2511 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
2512 /* Skip any labels we shouldn't be able to jump to from here. */
2513 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
2515 rtx not_this
= gen_label_rtx ();
2516 rtx
this = gen_label_rtx ();
2517 do_jump_if_equal (static_chain_rtx
,
2518 gen_rtx (LABEL_REF
, Pmode
, DECL_RTL (TREE_VALUE (link
))),
2520 emit_jump (not_this
);
2522 expand_goto (TREE_VALUE (link
));
2523 emit_label (not_this
);
2525 /* If label is not recognized, abort. */
2526 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "abort"), 0,
2528 emit_label (afterward
);
2531 /* Don't allow jumping into a block that has cleanups or a stack level. */
2533 || thisblock
->data
.block
.stack_level
!= 0
2534 || thisblock
->data
.block
.cleanups
!= 0)
2536 struct label_chain
*chain
;
2538 /* Any labels in this block are no longer valid to go to.
2539 Mark them to cause an error message. */
2540 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2542 DECL_TOO_LATE (chain
->label
) = 1;
2543 /* If any goto without a fixup came to this label,
2544 that must be an error, because gotos without fixups
2545 come from outside all saved stack-levels and all cleanups. */
2546 if (TREE_ADDRESSABLE (chain
->label
))
2547 error_with_decl (chain
->label
,
2548 "label `%s' used before containing binding contour");
2552 /* Restore stack level in effect before the block
2553 (only if variable-size objects allocated). */
2554 /* Perform any cleanups associated with the block. */
2556 if (thisblock
->data
.block
.stack_level
!= 0
2557 || thisblock
->data
.block
.cleanups
!= 0)
2559 /* Don't let cleanups affect ({...}) constructs. */
2560 int old_expr_stmts_for_value
= expr_stmts_for_value
;
2561 rtx old_last_expr_value
= last_expr_value
;
2562 tree old_last_expr_type
= last_expr_type
;
2563 expr_stmts_for_value
= 0;
2565 /* Do the cleanups. */
2566 expand_cleanups (thisblock
->data
.block
.cleanups
, 0);
2567 do_pending_stack_adjust ();
2569 expr_stmts_for_value
= old_expr_stmts_for_value
;
2570 last_expr_value
= old_last_expr_value
;
2571 last_expr_type
= old_last_expr_type
;
2573 /* Restore the stack level. */
2575 if (thisblock
->data
.block
.stack_level
!= 0)
2577 emit_move_insn (stack_pointer_rtx
,
2578 thisblock
->data
.block
.stack_level
);
2579 if (nonlocal_goto_stack_level
!= 0)
2580 emit_move_insn (nonlocal_goto_stack_level
, stack_pointer_rtx
);
2583 /* Any gotos out of this block must also do these things.
2584 Also report any gotos with fixups that came to labels in this level. */
2585 fixup_gotos (thisblock
,
2586 thisblock
->data
.block
.stack_level
,
2587 thisblock
->data
.block
.cleanups
,
2588 thisblock
->data
.block
.first_insn
,
2592 /* If doing stupid register allocation, make sure lives of all
2593 register variables declared here extend thru end of scope. */
2596 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2598 rtx rtl
= DECL_RTL (decl
);
2599 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
2603 /* Restore block_stack level for containing block. */
2605 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
2606 POPSTACK (block_stack
);
2608 /* Pop the stack slot nesting and free any slots at this level. */
2612 /* Generate RTL for the automatic variable declaration DECL.
2613 (Other kinds of declarations are simply ignored if seen here.)
2614 CLEANUP is an expression to be executed at exit from this binding contour;
2615 for example, in C++, it might call the destructor for this variable.
2617 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2618 either before or after calling `expand_decl' but before compiling
2619 any subsequent expressions. This is because CLEANUP may be expanded
2620 more than once, on different branches of execution.
2621 For the same reason, CLEANUP may not contain a CALL_EXPR
2622 except as its topmost node--else `preexpand_calls' would get confused.
2624 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2625 that is not associated with any particular variable.
2627 There is no special support here for C++ constructors.
2628 They should be handled by the proper code in DECL_INITIAL. */
2634 struct nesting
*thisblock
= block_stack
;
2635 tree type
= TREE_TYPE (decl
);
2637 /* Only automatic variables need any expansion done.
2638 Static and external variables, and external functions,
2639 will be handled by `assemble_variable' (called from finish_decl).
2640 TYPE_DECL and CONST_DECL require nothing.
2641 PARM_DECLs are handled in `assign_parms'. */
2643 if (TREE_CODE (decl
) != VAR_DECL
)
2645 if (TREE_STATIC (decl
) || TREE_EXTERNAL (decl
))
2648 /* Create the RTL representation for the variable. */
2650 if (type
== error_mark_node
)
2651 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2652 else if (DECL_SIZE (decl
) == 0)
2653 /* Variable with incomplete type. */
2655 if (DECL_INITIAL (decl
) == 0)
2656 /* Error message was already done; now avoid a crash. */
2657 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
2659 /* An initializer is going to decide the size of this array.
2660 Until we know the size, represent its address with a reg. */
2661 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
2663 else if (DECL_MODE (decl
) != BLKmode
2664 /* If -ffloat-store, don't put explicit float vars
2666 && !(flag_float_store
2667 && TREE_CODE (type
) == REAL_TYPE
)
2668 && ! TREE_THIS_VOLATILE (decl
)
2669 && ! TREE_ADDRESSABLE (decl
)
2670 && (TREE_REGDECL (decl
) || ! obey_regdecls
))
2672 /* Automatic variable that can go in a register. */
2673 DECL_RTL (decl
) = gen_reg_rtx (DECL_MODE (decl
));
2674 if (TREE_CODE (type
) == POINTER_TYPE
)
2675 mark_reg_pointer (DECL_RTL (decl
));
2676 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
2678 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
)
2680 /* Variable of fixed size that goes on the stack. */
2684 /* If we previously made RTL for this decl, it must be an array
2685 whose size was determined by the initializer.
2686 The old address was a register; set that register now
2687 to the proper address. */
2688 if (DECL_RTL (decl
) != 0)
2690 if (GET_CODE (DECL_RTL (decl
)) != MEM
2691 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
2693 oldaddr
= XEXP (DECL_RTL (decl
), 0);
2697 = assign_stack_temp (DECL_MODE (decl
),
2698 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
2699 + BITS_PER_UNIT
- 1)
2703 /* Set alignment we actually gave this decl. */
2704 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
2705 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
2709 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
2710 if (addr
!= oldaddr
)
2711 emit_move_insn (oldaddr
, addr
);
2714 /* If this is a memory ref that contains aggregate components,
2715 mark it as such for cse and loop optimize. */
2716 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2717 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2718 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2719 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2721 /* If this is in memory because of -ffloat-store,
2722 set the volatile bit, to prevent optimizations from
2723 undoing the effects. */
2724 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
2725 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2729 /* Dynamic-size object: must push space on the stack. */
2733 /* Record the stack pointer on entry to block, if have
2734 not already done so. */
2735 if (thisblock
->data
.block
.stack_level
== 0)
2737 do_pending_stack_adjust ();
2738 thisblock
->data
.block
.stack_level
2739 = copy_to_reg (stack_pointer_rtx
);
2740 stack_block_stack
= thisblock
;
2743 /* Compute the variable's size, in bytes. */
2744 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
2746 size_int (BITS_PER_UNIT
)),
2750 /* Allocate space on the stack for the variable. */
2751 address
= allocate_dynamic_stack_space (size
, 0, DECL_ALIGN (decl
));
2753 if (nonlocal_goto_stack_level
!= 0)
2754 emit_move_insn (nonlocal_goto_stack_level
, stack_pointer_rtx
);
2756 /* Reference the variable indirect through that rtx. */
2757 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
2759 /* Indicate the alignment we actually gave this variable. */
2760 #ifdef STACK_BOUNDARY
2761 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
2763 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
2767 if (TREE_THIS_VOLATILE (decl
))
2768 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2769 if (TREE_READONLY (decl
))
2770 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
2772 /* If doing stupid register allocation, make sure life of any
2773 register variable starts here, at the start of its scope. */
2776 use_variable (DECL_RTL (decl
));
2779 /* Emit code to perform the initialization of a declaration DECL. */
2782 expand_decl_init (decl
)
2785 int was_used
= TREE_USED (decl
);
2787 if (TREE_STATIC (decl
))
2790 /* Compute and store the initial value now. */
2792 if (DECL_INITIAL (decl
) == error_mark_node
)
2794 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
2795 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
2796 || code
== POINTER_TYPE
)
2797 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
2801 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
2803 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
2804 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
2808 /* Don't let the initialization count as "using" the variable. */
2809 TREE_USED (decl
) = was_used
;
2811 /* Free any temporaries we made while initializing the decl. */
2815 /* CLEANUP is an expression to be executed at exit from this binding contour;
2816 for example, in C++, it might call the destructor for this variable.
2818 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2819 either before or after calling `expand_decl' but before compiling
2820 any subsequent expressions. This is because CLEANUP may be expanded
2821 more than once, on different branches of execution.
2822 For the same reason, CLEANUP may not contain a CALL_EXPR
2823 except as its topmost node--else `preexpand_calls' would get confused.
2825 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2826 that is not associated with any particular variable. */
2829 expand_decl_cleanup (decl
, cleanup
)
2832 struct nesting
*thisblock
= block_stack
;
2834 /* Error if we are not in any block. */
2838 /* Record the cleanup if there is one. */
2842 thisblock
->data
.block
.cleanups
2843 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
2844 /* If this block has a cleanup, it belongs in stack_block_stack. */
2845 stack_block_stack
= thisblock
;
2850 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2851 DECL_ELTS is the list of elements that belong to DECL's type.
2852 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2855 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
2856 tree decl
, cleanup
, decl_elts
;
2858 struct nesting
*thisblock
= block_stack
;
2861 expand_decl (decl
, cleanup
);
2862 x
= DECL_RTL (decl
);
2866 tree decl_elt
= TREE_VALUE (decl_elts
);
2867 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
2868 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
2870 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2871 instead create a new MEM rtx with the proper mode. */
2872 if (GET_CODE (x
) == MEM
)
2874 if (mode
== GET_MODE (x
))
2875 DECL_RTL (decl_elt
) = x
;
2878 DECL_RTL (decl_elt
) = gen_rtx (MEM
, mode
, copy_rtx (XEXP (x
, 0)));
2879 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
2880 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
2883 else if (GET_CODE (x
) == REG
)
2885 if (mode
== GET_MODE (x
))
2886 DECL_RTL (decl_elt
) = x
;
2888 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, mode
, x
, 0);
2893 /* Record the cleanup if there is one. */
2896 thisblock
->data
.block
.cleanups
2897 = temp_tree_cons (decl_elt
, cleanup_elt
,
2898 thisblock
->data
.block
.cleanups
);
2900 decl_elts
= TREE_CHAIN (decl_elts
);
2904 /* Expand a list of cleanups LIST.
2905 Elements may be expressions or may be nested lists.
2907 If DONT_DO is nonnull, then any list-element
2908 whose TREE_PURPOSE matches DONT_DO is omitted.
2909 This is sometimes used to avoid a cleanup associated with
2910 a value that is being returned out of the scope. */
2913 expand_cleanups (list
, dont_do
)
2918 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
2919 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
2921 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
2922 expand_cleanups (TREE_VALUE (tail
), dont_do
);
2925 /* Cleanups may be run multiple times. For example,
2926 when exiting a binding contour, we expand the
2927 cleanups associated with that contour. When a goto
2928 within that binding contour has a target outside that
2929 contour, it will expand all cleanups from its scope to
2930 the target. Though the cleanups are expanded multiple
2931 times, the control paths are non-overlapping so the
2932 cleanups will not be executed twice. */
2933 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
2939 /* Expand a list of cleanups for a goto fixup.
2940 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2941 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2944 fixup_cleanups (list
, before_jump
)
2948 rtx beyond_jump
= get_last_insn ();
2949 rtx new_before_jump
;
2951 expand_cleanups (list
, 0);
2952 /* Pop any pushes done in the cleanups,
2953 in case function is about to return. */
2954 do_pending_stack_adjust ();
2956 new_before_jump
= get_last_insn ();
2958 if (beyond_jump
!= new_before_jump
)
2960 /* If cleanups expand to nothing, don't reorder. */
2961 reorder_insns (NEXT_INSN (beyond_jump
), new_before_jump
, *before_jump
);
2962 *before_jump
= new_before_jump
;
2966 /* Move all cleanups from the current block_stack
2967 to the containing block_stack, where they are assumed to
2968 have been created. If anything can cause a temporary to
2969 be created, but not expanded for more than one level of
2970 block_stacks, then this code will have to change. */
2975 struct nesting
*block
= block_stack
;
2976 struct nesting
*outer
= block
->next
;
2978 outer
->data
.block
.cleanups
2979 = chainon (block
->data
.block
.cleanups
,
2980 outer
->data
.block
.cleanups
);
2981 block
->data
.block
.cleanups
= 0;
2985 last_cleanup_this_contour ()
2987 if (block_stack
== 0)
2990 return block_stack
->data
.block
.cleanups
;
2993 /* Return 1 if there are any pending cleanups at this point.
2994 If THIS_CONTOUR is nonzero, check the current contour as well.
2995 Otherwise, look only at the contours that enclose this one. */
2998 any_pending_cleanups (this_contour
)
3001 struct nesting
*block
;
3003 if (block_stack
== 0)
3006 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3008 if (block_stack
->data
.block
.cleanups
== 0
3009 && (block_stack
->data
.block
.outer_cleanups
== 0
3011 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
3016 for (block
= block_stack
->next
; block
; block
= block
->next
)
3017 if (block
->data
.block
.cleanups
!= 0)
3023 /* Enter a case (Pascal) or switch (C) statement.
3024 Push a block onto case_stack and nesting_stack
3025 to accumulate the case-labels that are seen
3026 and to record the labels generated for the statement.
3028 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3029 Otherwise, this construct is transparent for `exit_something'.
3031 EXPR is the index-expression to be dispatched on.
3032 TYPE is its nominal type. We could simply convert EXPR to this type,
3033 but instead we take short cuts. */
3036 expand_start_case (exit_flag
, expr
, type
, printname
)
3042 register struct nesting
*thiscase
= ALLOC_NESTING ();
3044 /* Make an entry on case_stack for the case we are entering. */
3046 thiscase
->next
= case_stack
;
3047 thiscase
->all
= nesting_stack
;
3048 thiscase
->depth
= ++nesting_depth
;
3049 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3050 thiscase
->data
.case_stmt
.case_list
= 0;
3051 thiscase
->data
.case_stmt
.index_expr
= expr
;
3052 thiscase
->data
.case_stmt
.nominal_type
= type
;
3053 thiscase
->data
.case_stmt
.default_label
= 0;
3054 thiscase
->data
.case_stmt
.num_ranges
= 0;
3055 thiscase
->data
.case_stmt
.printname
= printname
;
3056 thiscase
->data
.case_stmt
.seenlabel
= 0;
3057 case_stack
= thiscase
;
3058 nesting_stack
= thiscase
;
3060 do_pending_stack_adjust ();
3062 /* Make sure case_stmt.start points to something that won't
3063 need any transformation before expand_end_case. */
3064 if (GET_CODE (get_last_insn ()) != NOTE
)
3065 emit_note (0, NOTE_INSN_DELETED
);
3067 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3070 /* Start a "dummy case statement" within which case labels are invalid
3071 and are not connected to any larger real case statement.
3072 This can be used if you don't want to let a case statement jump
3073 into the middle of certain kinds of constructs. */
3076 expand_start_case_dummy ()
3078 register struct nesting
*thiscase
= ALLOC_NESTING ();
3080 /* Make an entry on case_stack for the dummy. */
3082 thiscase
->next
= case_stack
;
3083 thiscase
->all
= nesting_stack
;
3084 thiscase
->depth
= ++nesting_depth
;
3085 thiscase
->exit_label
= 0;
3086 thiscase
->data
.case_stmt
.case_list
= 0;
3087 thiscase
->data
.case_stmt
.start
= 0;
3088 thiscase
->data
.case_stmt
.nominal_type
= 0;
3089 thiscase
->data
.case_stmt
.default_label
= 0;
3090 thiscase
->data
.case_stmt
.num_ranges
= 0;
3091 case_stack
= thiscase
;
3092 nesting_stack
= thiscase
;
3095 /* End a dummy case statement. */
3098 expand_end_case_dummy ()
3100 POPSTACK (case_stack
);
3103 /* Return the data type of the index-expression
3104 of the innermost case statement, or null if none. */
3107 case_index_expr_type ()
3110 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3114 /* Accumulate one case or default label inside a case or switch statement.
3115 VALUE is the value of the case (a null pointer, for a default label).
3117 If not currently inside a case or switch statement, return 1 and do
3118 nothing. The caller will print a language-specific error message.
3119 If VALUE is a duplicate or overlaps, return 2 and do nothing
3120 except store the (first) duplicate node in *DUPLICATE.
3121 If VALUE is out of range, return 3 and do nothing.
3122 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3123 Return 0 on success.
3125 Extended to handle range statements. */
3128 pushcase (value
, label
, duplicate
)
3129 register tree value
;
3130 register tree label
;
3133 register struct case_node
**l
;
3134 register struct case_node
*n
;
3138 /* Fail if not inside a real case statement. */
3139 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3142 if (stack_block_stack
3143 && stack_block_stack
->depth
> case_stack
->depth
)
3146 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3147 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3149 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3150 if (index_type
== error_mark_node
)
3153 /* Convert VALUE to the type in which the comparisons are nominally done. */
3155 value
= convert (nominal_type
, value
);
3157 /* If this is the first label, warn if any insns have been emitted. */
3158 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3161 for (insn
= case_stack
->data
.case_stmt
.start
;
3163 insn
= NEXT_INSN (insn
))
3165 if (GET_CODE (insn
) == CODE_LABEL
)
3167 if (GET_CODE (insn
) != NOTE
3168 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3170 warning ("unreachable code at beginning of %s",
3171 case_stack
->data
.case_stmt
.printname
);
3176 case_stack
->data
.case_stmt
.seenlabel
= 1;
3178 /* Fail if this value is out of range for the actual type of the index
3179 (which may be narrower than NOMINAL_TYPE). */
3180 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
3183 /* Fail if this is a duplicate or overlaps another entry. */
3186 if (case_stack
->data
.case_stmt
.default_label
!= 0)
3188 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
3191 case_stack
->data
.case_stmt
.default_label
= label
;
3195 /* Find the elt in the chain before which to insert the new value,
3196 to keep the chain sorted in increasing order.
3197 But report an error if this element is a duplicate. */
3198 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3199 /* Keep going past elements distinctly less than VALUE. */
3200 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
3205 /* Element we will insert before must be distinctly greater;
3206 overlap means error. */
3207 if (! tree_int_cst_lt (value
, (*l
)->low
))
3209 *duplicate
= (*l
)->code_label
;
3214 /* Add this label to the chain, and succeed.
3215 Copy VALUE so it is on temporary rather than momentary
3216 obstack and will thus survive till the end of the case statement. */
3217 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3220 n
->high
= n
->low
= copy_node (value
);
3221 n
->code_label
= label
;
3225 expand_label (label
);
3229 /* Like pushcase but this case applies to all values
3230 between VALUE1 and VALUE2 (inclusive).
3231 The return value is the same as that of pushcase
3232 but there is one additional error code:
3233 4 means the specified range was empty. */
3236 pushcase_range (value1
, value2
, label
, duplicate
)
3237 register tree value1
, value2
;
3238 register tree label
;
3241 register struct case_node
**l
;
3242 register struct case_node
*n
;
3246 /* Fail if not inside a real case statement. */
3247 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3250 if (stack_block_stack
3251 && stack_block_stack
->depth
> case_stack
->depth
)
3254 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3255 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3257 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3258 if (index_type
== error_mark_node
)
3261 /* If this is the first label, warn if any insns have been emitted. */
3262 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3265 for (insn
= case_stack
->data
.case_stmt
.start
;
3267 insn
= NEXT_INSN (insn
))
3269 if (GET_CODE (insn
) == CODE_LABEL
)
3271 if (GET_CODE (insn
) != NOTE
3272 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3274 warning ("unreachable code at beginning of %s",
3275 case_stack
->data
.case_stmt
.printname
);
3280 case_stack
->data
.case_stmt
.seenlabel
= 1;
3282 /* Convert VALUEs to type in which the comparisons are nominally done. */
3283 if (value1
== 0) /* Negative infinity. */
3284 value1
= TYPE_MIN_VALUE(index_type
);
3285 value1
= convert (nominal_type
, value1
);
3287 if (value2
== 0) /* Positive infinity. */
3288 value2
= TYPE_MAX_VALUE(index_type
);
3289 value2
= convert (nominal_type
, value2
);
3291 /* Fail if these values are out of range. */
3292 if (! int_fits_type_p (value1
, index_type
))
3295 if (! int_fits_type_p (value2
, index_type
))
3298 /* Fail if the range is empty. */
3299 if (tree_int_cst_lt (value2
, value1
))
3302 /* If the bounds are equal, turn this into the one-value case. */
3303 if (tree_int_cst_equal (value1
, value2
))
3304 return pushcase (value1
, label
, duplicate
);
3306 /* Find the elt in the chain before which to insert the new value,
3307 to keep the chain sorted in increasing order.
3308 But report an error if this element is a duplicate. */
3309 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3310 /* Keep going past elements distinctly less than this range. */
3311 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
3316 /* Element we will insert before must be distinctly greater;
3317 overlap means error. */
3318 if (! tree_int_cst_lt (value2
, (*l
)->low
))
3320 *duplicate
= (*l
)->code_label
;
3325 /* Add this label to the chain, and succeed.
3326 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3327 obstack and will thus survive till the end of the case statement. */
3329 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3332 n
->low
= copy_node (value1
);
3333 n
->high
= copy_node (value2
);
3334 n
->code_label
= label
;
3337 expand_label (label
);
3339 case_stack
->data
.case_stmt
.num_ranges
++;
3344 /* Called when the index of a switch statement is an enumerated type
3345 and there is no default label.
3347 Checks that all enumeration literals are covered by the case
3348 expressions of a switch. Also, warn if there are any extra
3349 switch cases that are *not* elements of the enumerated type.
3351 If all enumeration literals were covered by the case expressions,
3352 turn one of the expressions into the default expression since it should
3353 not be possible to fall through such a switch. */
3356 check_for_full_enumeration_handling (type
)
3359 register struct case_node
*n
;
3360 register struct case_node
**l
;
3361 register tree chain
;
3364 /* The time complexity of this loop is currently O(N * M), with
3365 N being the number of enumerals in the enumerated type, and
3366 M being the number of case expressions in the switch. */
3368 for (chain
= TYPE_VALUES (type
);
3370 chain
= TREE_CHAIN (chain
))
3372 /* Find a match between enumeral and case expression, if possible.
3373 Quit looking when we've gone too far (since case expressions
3374 are kept sorted in ascending order). Warn about enumerals not
3375 handled in the switch statement case expression list. */
3377 for (n
= case_stack
->data
.case_stmt
.case_list
;
3378 n
&& tree_int_cst_lt (n
->high
, TREE_VALUE (chain
));
3382 if (!(n
&& tree_int_cst_equal (n
->low
, TREE_VALUE (chain
))))
3385 warning ("enumerated value `%s' not handled in switch",
3386 IDENTIFIER_POINTER (TREE_PURPOSE (chain
)));
3391 /* Now we go the other way around; we warn if there are case
3392 expressions that don't correspond to enumerals. This can
3393 occur since C and C++ don't enforce type-checking of
3394 assignments to enumeration variables. */
3397 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3399 for (chain
= TYPE_VALUES (type
);
3400 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
3401 chain
= TREE_CHAIN (chain
))
3405 warning ("case value `%d' not in enumerated type `%s'",
3406 TREE_INT_CST_LOW (n
->low
),
3407 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3410 : DECL_NAME (TYPE_NAME (type
))));
3413 /* If all values were found as case labels, make one of them the default
3414 label. Thus, this switch will never fall through. We arbitrarily pick
3415 the last one to make the default since this is likely the most
3416 efficient choice. */
3420 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3425 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
3430 /* Terminate a case (Pascal) or switch (C) statement
3431 in which CASE_INDEX is the expression to be tested.
3432 Generate the code to test it and jump to the right place. */
3435 expand_end_case (orig_index
)
3438 tree minval
, maxval
, range
;
3439 rtx default_label
= 0;
3440 register struct case_node
*n
;
3443 rtx table_label
= gen_label_rtx ();
3448 register struct nesting
*thiscase
= case_stack
;
3449 tree index_expr
= thiscase
->data
.case_stmt
.index_expr
;
3450 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (index_expr
));
3452 do_pending_stack_adjust ();
3454 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3455 if (TREE_TYPE (index_expr
) != error_mark_node
)
3457 /* If switch expression was an enumerated type, check that all
3458 enumeration literals are covered by the cases.
3459 No sense trying this if there's a default case, however. */
3461 if (!thiscase
->data
.case_stmt
.default_label
3462 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
3463 && TREE_CODE (index_expr
) != INTEGER_CST
)
3464 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
3466 /* If this is the first label, warn if any insns have been emitted. */
3467 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
3470 for (insn
= get_last_insn ();
3471 insn
!= case_stack
->data
.case_stmt
.start
;
3472 insn
= PREV_INSN (insn
))
3473 if (GET_CODE (insn
) != NOTE
3474 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
3476 warning ("unreachable code at beginning of %s",
3477 case_stack
->data
.case_stmt
.printname
);
3482 /* If we don't have a default-label, create one here,
3483 after the body of the switch. */
3484 if (thiscase
->data
.case_stmt
.default_label
== 0)
3486 thiscase
->data
.case_stmt
.default_label
3487 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
3488 expand_label (thiscase
->data
.case_stmt
.default_label
);
3490 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
3492 before_case
= get_last_insn ();
3494 /* Simplify the case-list before we count it. */
3495 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
3497 /* Get upper and lower bounds of case values.
3498 Also convert all the case values to the index expr's data type. */
3501 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3503 /* Check low and high label values are integers. */
3504 if (TREE_CODE (n
->low
) != INTEGER_CST
)
3506 if (TREE_CODE (n
->high
) != INTEGER_CST
)
3509 n
->low
= convert (TREE_TYPE (index_expr
), n
->low
);
3510 n
->high
= convert (TREE_TYPE (index_expr
), n
->high
);
3512 /* Count the elements and track the largest and smallest
3513 of them (treating them as signed even if they are not). */
3521 if (INT_CST_LT (n
->low
, minval
))
3523 if (INT_CST_LT (maxval
, n
->high
))
3526 /* A range counts double, since it requires two compares. */
3527 if (! tree_int_cst_equal (n
->low
, n
->high
))
3531 /* Compute span of values. */
3533 range
= fold (build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3536 if (count
== 0 || TREE_CODE (TREE_TYPE (index_expr
)) == ERROR_MARK
)
3538 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
3540 emit_jump (default_label
);
3542 /* If range of values is much bigger than number of values,
3543 make a sequence of conditional branches instead of a dispatch.
3544 If the switch-index is a constant, do it this way
3545 because we can optimize it. */
3546 else if (TREE_INT_CST_HIGH (range
) != 0
3548 || (HAVE_casesi
? count
< 4 : count
< 5)
3550 /* If machine does not have a case insn that compares the
3551 bounds, this means extra overhead for dispatch tables
3552 which raises the threshold for using them. */
3555 || (unsigned) (TREE_INT_CST_LOW (range
)) > 10 * count
3556 || TREE_CODE (index_expr
) == INTEGER_CST
3557 /* These will reduce to a constant. */
3558 || (TREE_CODE (index_expr
) == CALL_EXPR
3559 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
3560 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
3561 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
3562 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
3563 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
3565 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3567 /* If the index is a short or char that we do not have
3568 an insn to handle comparisons directly, convert it to
3569 a full integer now, rather than letting each comparison
3570 generate the conversion. */
3572 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
3573 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
3574 == CODE_FOR_nothing
))
3576 enum machine_mode wider_mode
;
3577 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
3578 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
3579 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
3580 != CODE_FOR_nothing
)
3582 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
3588 do_pending_stack_adjust ();
3590 index
= protect_from_queue (index
, 0);
3591 if (GET_CODE (index
) == MEM
)
3592 index
= copy_to_reg (index
);
3593 if (GET_CODE (index
) == CONST_INT
3594 || TREE_CODE (index_expr
) == INTEGER_CST
)
3596 /* Make a tree node with the proper constant value
3597 if we don't already have one. */
3598 if (TREE_CODE (index_expr
) != INTEGER_CST
)
3601 = build_int_2 (INTVAL (index
),
3602 !unsignedp
&& INTVAL (index
) >= 0 ? 0 : -1);
3603 index_expr
= convert (TREE_TYPE (index_expr
), index_expr
);
3606 /* For constant index expressions we need only
3607 issue a unconditional branch to the appropriate
3608 target code. The job of removing any unreachable
3609 code is left to the optimisation phase if the
3610 "-O" option is specified. */
3611 for (n
= thiscase
->data
.case_stmt
.case_list
;
3615 if (! tree_int_cst_lt (index_expr
, n
->low
)
3616 && ! tree_int_cst_lt (n
->high
, index_expr
))
3620 emit_jump (label_rtx (n
->code_label
));
3622 emit_jump (default_label
);
3626 /* If the index expression is not constant we generate
3627 a binary decision tree to select the appropriate
3628 target code. This is done as follows:
3630 The list of cases is rearranged into a binary tree,
3631 nearly optimal assuming equal probability for each case.
3633 The tree is transformed into RTL, eliminating
3634 redundant test conditions at the same time.
3636 If program flow could reach the end of the
3637 decision tree an unconditional jump to the
3638 default code is emitted. */
3641 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
3642 && default_label
!= 0
3643 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
3644 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
, 0);
3645 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
3646 default_label
, TREE_TYPE (index_expr
));
3647 emit_jump_if_reachable (default_label
);
3656 enum machine_mode index_mode
= SImode
;
3657 int index_bits
= GET_MODE_BITSIZE (index_mode
);
3659 /* Convert the index to SImode. */
3660 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr
)))
3661 > GET_MODE_BITSIZE (index_mode
))
3663 index_expr
= build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3664 index_expr
, minval
);
3665 minval
= integer_zero_node
;
3667 if (TYPE_MODE (TREE_TYPE (index_expr
)) != index_mode
)
3668 index_expr
= convert (type_for_size (index_bits
, 0),
3670 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3672 index
= protect_from_queue (index
, 0);
3673 do_pending_stack_adjust ();
3675 emit_jump_insn (gen_casesi (index
, expand_expr (minval
, 0, VOIDmode
, 0),
3676 expand_expr (range
, 0, VOIDmode
, 0),
3677 table_label
, default_label
));
3681 #ifdef HAVE_tablejump
3682 if (! win
&& HAVE_tablejump
)
3684 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
3685 fold (build (MINUS_EXPR
,
3686 TREE_TYPE (index_expr
),
3687 index_expr
, minval
)));
3688 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3690 /* convert_to_mode calls protect_from_queue. */
3691 index
= convert_to_mode (Pmode
, index
, 1);
3692 do_pending_stack_adjust ();
3694 do_tablejump (index
, Pmode
,
3695 gen_rtx (CONST_INT
, VOIDmode
,
3696 TREE_INT_CST_LOW (range
)),
3697 table_label
, default_label
);
3704 /* Get table of labels to jump to, in order of case index. */
3706 ncases
= TREE_INT_CST_LOW (range
) + 1;
3707 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
3708 bzero (labelvec
, ncases
* sizeof (rtx
));
3710 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3713 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (minval
);
3718 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
3719 if (i
+ TREE_INT_CST_LOW (minval
)
3720 == TREE_INT_CST_LOW (n
->high
))
3726 /* Fill in the gaps with the default. */
3727 for (i
= 0; i
< ncases
; i
++)
3728 if (labelvec
[i
] == 0)
3729 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
3731 /* Output the table */
3732 emit_label (table_label
);
3734 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3735 were an expression, instead of a an #ifdef/#ifndef. */
3737 #ifdef CASE_VECTOR_PC_RELATIVE
3741 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
3742 gen_rtx (LABEL_REF
, Pmode
, table_label
),
3743 gen_rtvec_v (ncases
, labelvec
)));
3745 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
3746 gen_rtvec_v (ncases
, labelvec
)));
3748 /* If the case insn drops through the table,
3749 after the table we must jump to the default-label.
3750 Otherwise record no drop-through after the table. */
3751 #ifdef CASE_DROPS_THROUGH
3752 emit_jump (default_label
);
3758 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
3759 reorder_insns (before_case
, get_last_insn (),
3760 thiscase
->data
.case_stmt
.start
);
3762 if (thiscase
->exit_label
)
3763 emit_label (thiscase
->exit_label
);
3765 POPSTACK (case_stack
);
3770 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3773 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
3774 rtx op1
, op2
, label
;
3777 if (GET_CODE (op1
) == CONST_INT
3778 && GET_CODE (op2
) == CONST_INT
)
3780 if (INTVAL (op1
) == INTVAL (op2
))
3785 enum machine_mode mode
= GET_MODE (op1
);
3786 if (mode
== VOIDmode
)
3787 mode
= GET_MODE (op2
);
3788 emit_cmp_insn (op1
, op2
, EQ
, 0, mode
, unsignedp
, 0);
3789 emit_jump_insn (gen_beq (label
));
3793 /* Not all case values are encountered equally. This function
3794 uses a heuristic to weight case labels, in cases where that
3795 looks like a reasonable thing to do.
3797 Right now, all we try to guess is text, and we establish the
3800 chars above space: 16
3809 If we find any cases in the switch that are not either -1 or in the range
3810 of valid ASCII characters, or are control characters other than those
3811 commonly used with "\", don't treat this switch scanning text.
3813 Return 1 if these nodes are suitable for cost estimation, otherwise
3817 estimate_case_costs (node
)
3820 tree min_ascii
= build_int_2 (-1, -1);
3821 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
3825 /* If we haven't already made the cost table, make it now. Note that the
3826 lower bound of the table is -1, not zero. */
3828 if (cost_table
== NULL
)
3830 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
3831 bzero (cost_table
- 1, 129 * sizeof (short));
3833 for (i
= 0; i
< 128; i
++)
3837 else if (ispunct (i
))
3839 else if (iscntrl (i
))
3843 cost_table
[' '] = 8;
3844 cost_table
['\t'] = 4;
3845 cost_table
['\0'] = 4;
3846 cost_table
['\n'] = 2;
3847 cost_table
['\f'] = 1;
3848 cost_table
['\v'] = 1;
3849 cost_table
['\b'] = 1;
3852 /* See if all the case expressions look like text. It is text if the
3853 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3854 as signed arithmetic since we don't want to ever access cost_table with a
3855 value less than -1. Also check that none of the constants in a range
3856 are strange control characters. */
3858 for (n
= node
; n
; n
= n
->right
)
3860 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
3863 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
3864 if (cost_table
[i
] < 0)
3868 /* All interesting values are within the range of interesting
3869 ASCII characters. */
3873 /* Scan an ordered list of case nodes
3874 combining those with consecutive values or ranges.
3876 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3879 group_case_nodes (head
)
3882 case_node_ptr node
= head
;
3886 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
3887 case_node_ptr np
= node
;
3889 /* Try to group the successors of NODE with NODE. */
3890 while (((np
= np
->right
) != 0)
3891 /* Do they jump to the same place? */
3892 && next_real_insn (label_rtx (np
->code_label
)) == lb
3893 /* Are their ranges consecutive? */
3894 && tree_int_cst_equal (np
->low
,
3895 fold (build (PLUS_EXPR
,
3896 TREE_TYPE (node
->high
),
3899 /* An overflow is not consecutive. */
3900 && tree_int_cst_lt (node
->high
,
3901 fold (build (PLUS_EXPR
,
3902 TREE_TYPE (node
->high
),
3904 integer_one_node
))))
3906 node
->high
= np
->high
;
3908 /* NP is the first node after NODE which can't be grouped with it.
3909 Delete the nodes in between, and move on to that node. */
3915 /* Take an ordered list of case nodes
3916 and transform them into a near optimal binary tree,
3917 on the assumtion that any target code selection value is as
3918 likely as any other.
3920 The transformation is performed by splitting the ordered
3921 list into two equal sections plus a pivot. The parts are
3922 then attached to the pivot as left and right branches. Each
3923 branch is is then transformed recursively. */
3926 balance_case_nodes (head
, parent
)
3927 case_node_ptr
*head
;
3928 case_node_ptr parent
;
3930 register case_node_ptr np
;
3938 register case_node_ptr
*npp
;
3941 /* Count the number of entries on branch. Also count the ranges. */
3945 if (!tree_int_cst_equal (np
->low
, np
->high
))
3949 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
3953 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
3961 /* Split this list if it is long enough for that to help. */
3966 /* Find the place in the list that bisects the list's total cost,
3967 Here I gets half the total cost. */
3972 /* Skip nodes while their cost does not reach that amount. */
3973 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
3974 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
3975 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
3978 npp
= &(*npp
)->right
;
3983 /* Leave this branch lopsided, but optimize left-hand
3984 side and fill in `parent' fields for right-hand side. */
3986 np
->parent
= parent
;
3987 balance_case_nodes (&np
->left
, np
);
3988 for (; np
->right
; np
= np
->right
)
3989 np
->right
->parent
= np
;
3993 /* If there are just three nodes, split at the middle one. */
3995 npp
= &(*npp
)->right
;
3998 /* Find the place in the list that bisects the list's total cost,
3999 where ranges count as 2.
4000 Here I gets half the total cost. */
4001 i
= (i
+ ranges
+ 1) / 2;
4004 /* Skip nodes while their cost does not reach that amount. */
4005 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4010 npp
= &(*npp
)->right
;
4015 np
->parent
= parent
;
4018 /* Optimize each of the two split parts. */
4019 balance_case_nodes (&np
->left
, np
);
4020 balance_case_nodes (&np
->right
, np
);
4024 /* Else leave this branch as one level,
4025 but fill in `parent' fields. */
4027 np
->parent
= parent
;
4028 for (; np
->right
; np
= np
->right
)
4029 np
->right
->parent
= np
;
4034 /* Search the parent sections of the case node tree
4035 to see if a test for the lower bound of NODE would be redundant.
4036 INDEX_TYPE is the type of the index expression.
4038 The instructions to generate the case decision tree are
4039 output in the same order as nodes are processed so it is
4040 known that if a parent node checks the range of the current
4041 node minus one that the current node is bounded at its lower
4042 span. Thus the test would be redundant. */
4045 node_has_low_bound (node
, index_type
)
4050 case_node_ptr pnode
;
4052 /* If the lower bound of this node is the lowest value in the index type,
4053 we need not test it. */
4055 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
4058 /* If this node has a left branch, the value at the left must be less
4059 than that at this node, so it cannot be bounded at the bottom and
4060 we need not bother testing any further. */
4065 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
4066 node
->low
, integer_one_node
));
4068 /* If the subtraction above overflowed, we can't verify anything.
4069 Otherwise, look for a parent that tests our value - 1. */
4071 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
4074 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4075 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
4081 /* Search the parent sections of the case node tree
4082 to see if a test for the upper bound of NODE would be redundant.
4083 INDEX_TYPE is the type of the index expression.
4085 The instructions to generate the case decision tree are
4086 output in the same order as nodes are processed so it is
4087 known that if a parent node checks the range of the current
4088 node plus one that the current node is bounded at its upper
4089 span. Thus the test would be redundant. */
4092 node_has_high_bound (node
, index_type
)
4097 case_node_ptr pnode
;
4099 /* If the upper bound of this node is the highest value in the type
4100 of the index expression, we need not test against it. */
4102 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
4105 /* If this node has a right branch, the value at the right must be greater
4106 than that at this node, so it cannot be bounded at the top and
4107 we need not bother testing any further. */
4112 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
4113 node
->high
, integer_one_node
));
4115 /* If the addition above overflowed, we can't verify anything.
4116 Otherwise, look for a parent that tests our value + 1. */
4118 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
4121 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4122 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
4128 /* Search the parent sections of the
4129 case node tree to see if both tests for the upper and lower
4130 bounds of NODE would be redundant. */
4133 node_is_bounded (node
, index_type
)
4137 return (node_has_low_bound (node
, index_type
)
4138 && node_has_high_bound (node
, index_type
));
4141 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4144 emit_jump_if_reachable (label
)
4147 if (GET_CODE (get_last_insn ()) != BARRIER
)
4151 /* Emit step-by-step code to select a case for the value of INDEX.
4152 The thus generated decision tree follows the form of the
4153 case-node binary tree NODE, whose nodes represent test conditions.
4154 INDEX_TYPE is the type of the index of the switch.
4156 Care is taken to prune redundant tests from the decision tree
4157 by detecting any boundary conditions already checked by
4158 emitted rtx. (See node_has_high_bound, node_has_low_bound
4159 and node_is_bounded, above.)
4161 Where the test conditions can be shown to be redundant we emit
4162 an unconditional jump to the target code. As a further
4163 optimization, the subordinates of a tree node are examined to
4164 check for bounded nodes. In this case conditional and/or
4165 unconditional jumps as a result of the boundary check for the
4166 current node are arranged to target the subordinates associated
4167 code for out of bound conditions on the current node node.
4169 We can asume that when control reaches the code generated here,
4170 the index value has already been compared with the parents
4171 of this node, and determined to be on the same side of each parent
4172 as this node is. Thus, if this node tests for the value 51,
4173 and a parent tested for 52, we don't need to consider
4174 the possibility of a value greater than 51. If another parent
4175 tests for the value 50, then this node need not test anything. */
4178 emit_case_nodes (index
, node
, default_label
, index_type
)
4184 /* If INDEX has an unsigned type, we must make unsigned branches. */
4185 int unsignedp
= TREE_UNSIGNED (index_type
);
4186 typedef rtx
rtx_function ();
4187 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
4188 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
4189 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
4190 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
4191 enum machine_mode mode
= GET_MODE (index
);
4193 /* See if our parents have already tested everything for us.
4194 If they have, emit an unconditional jump for this node. */
4195 if (node_is_bounded (node
, index_type
))
4196 emit_jump (label_rtx (node
->code_label
));
4198 else if (tree_int_cst_equal (node
->low
, node
->high
))
4200 /* Node is single valued. First see if the index expression matches
4201 this node and then check our children, if any. */
4203 do_jump_if_equal (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4204 label_rtx (node
->code_label
), unsignedp
);
4206 if (node
->right
!= 0 && node
->left
!= 0)
4208 /* This node has children on both sides.
4209 Dispatch to one side or the other
4210 by comparing the index value with this node's value.
4211 If one subtree is bounded, check that one first,
4212 so we can avoid real branches in the tree. */
4214 if (node_is_bounded (node
->right
, index_type
))
4216 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4217 GT
, 0, mode
, unsignedp
, 0);
4219 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4220 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4223 else if (node_is_bounded (node
->left
, index_type
))
4225 emit_cmp_insn (index
, expand_expr (node
->high
, 0,
4227 LT
, 0, mode
, unsignedp
, 0);
4228 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
4229 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4234 /* Neither node is bounded. First distinguish the two sides;
4235 then emit the code for one side at a time. */
4238 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4240 /* See if the value is on the right. */
4241 emit_cmp_insn (index
, expand_expr (node
->high
, 0,
4243 GT
, 0, mode
, unsignedp
, 0);
4244 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4246 /* Value must be on the left.
4247 Handle the left-hand subtree. */
4248 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4249 /* If left-hand subtree does nothing,
4251 emit_jump_if_reachable (default_label
);
4253 /* Code branches here for the right-hand subtree. */
4254 expand_label (test_label
);
4255 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4259 else if (node
->right
!= 0 && node
->left
== 0)
4261 /* Here we have a right child but no left so we issue conditional
4262 branch to default and process the right child.
4264 Omit the conditional branch to default if we it avoid only one
4265 right child; it costs too much space to save so little time. */
4267 if (node
->right
->right
|| node
->right
->left
4268 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
4270 if (!node_has_low_bound (node
, index_type
))
4272 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4273 LT
, 0, mode
, unsignedp
, 0);
4274 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4277 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4280 /* We cannot process node->right normally
4281 since we haven't ruled out the numbers less than
4282 this node's value. So handle node->right explicitly. */
4283 do_jump_if_equal (index
,
4284 expand_expr (node
->right
->low
, 0, VOIDmode
, 0),
4285 label_rtx (node
->right
->code_label
), unsignedp
);
4288 else if (node
->right
== 0 && node
->left
!= 0)
4290 /* Just one subtree, on the left. */
4292 #if 0 /* The following code and comment were formerly part
4293 of the condition here, but they didn't work
4294 and I don't understand what the idea was. -- rms. */
4295 /* If our "most probable entry" is less probable
4296 than the default label, emit a jump to
4297 the default label using condition codes
4298 already lying around. With no right branch,
4299 a branch-greater-than will get us to the default
4302 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
4305 if (node
->left
->left
|| node
->left
->right
4306 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
4308 if (!node_has_high_bound (node
, index_type
))
4310 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4311 GT
, 0, mode
, unsignedp
, 0);
4312 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4315 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4318 /* We cannot process node->left normally
4319 since we haven't ruled out the numbers less than
4320 this node's value. So handle node->left explicitly. */
4321 do_jump_if_equal (index
,
4322 expand_expr (node
->left
->low
, 0, VOIDmode
, 0),
4323 label_rtx (node
->left
->code_label
), unsignedp
);
4328 /* Node is a range. These cases are very similar to those for a single
4329 value, except that we do not start by testing whether this node
4330 is the one to branch to. */
4332 if (node
->right
!= 0 && node
->left
!= 0)
4334 /* Node has subtrees on both sides.
4335 If the right-hand subtree is bounded,
4336 test for it first, since we can go straight there.
4337 Otherwise, we need to make a branch in the control structure,
4338 then handle the two subtrees. */
4339 tree test_label
= 0;
4341 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4342 GT
, 0, mode
, unsignedp
, 0);
4344 if (node_is_bounded (node
->right
, index_type
))
4345 /* Right hand node is fully bounded so we can eliminate any
4346 testing and branch directly to the target code. */
4347 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4350 /* Right hand node requires testing.
4351 Branch to a label where we will handle it later. */
4353 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4354 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4357 /* Value belongs to this node or to the left-hand subtree. */
4359 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4360 GE
, 0, mode
, unsignedp
, 0);
4361 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4363 /* Handle the left-hand subtree. */
4364 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4366 /* If right node had to be handled later, do that now. */
4370 /* If the left-hand subtree fell through,
4371 don't let it fall into the right-hand subtree. */
4372 emit_jump_if_reachable (default_label
);
4374 expand_label (test_label
);
4375 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4379 else if (node
->right
!= 0 && node
->left
== 0)
4381 /* Deal with values to the left of this node,
4382 if they are possible. */
4383 if (!node_has_low_bound (node
, index_type
))
4385 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4386 LT
, 0, mode
, unsignedp
, 0);
4387 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4390 /* Value belongs to this node or to the right-hand subtree. */
4392 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4393 LE
, 0, mode
, unsignedp
, 0);
4394 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
4396 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4399 else if (node
->right
== 0 && node
->left
!= 0)
4401 /* Deal with values to the right of this node,
4402 if they are possible. */
4403 if (!node_has_high_bound (node
, index_type
))
4405 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4406 GT
, 0, mode
, unsignedp
, 0);
4407 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4410 /* Value belongs to this node or to the left-hand subtree. */
4412 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4413 GE
, 0, mode
, unsignedp
, 0);
4414 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4416 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4421 /* Node has no children so we check low and high bounds to remove
4422 redundant tests. Only one of the bounds can exist,
4423 since otherwise this node is bounded--a case tested already. */
4425 if (!node_has_high_bound (node
, index_type
))
4427 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4428 GT
, 0, mode
, unsignedp
, 0);
4429 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4432 if (!node_has_low_bound (node
, index_type
))
4434 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4435 LT
, 0, mode
, unsignedp
, 0);
4436 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4439 emit_jump (label_rtx (node
->code_label
));
4444 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4445 so that the debugging info will be correct for the unrolled loop. */
4447 /* Indexed by loop number, contains pointer to the first block in the loop,
4448 or zero if none. Only valid if doing loop unrolling and outputting debugger
4451 tree
*loop_number_first_block
;
4453 /* Indexed by loop number, contains pointer to the last block in the loop,
4454 only valid if loop_number_first_block is nonzero. */
4456 tree
*loop_number_last_block
;
4458 /* Indexed by loop number, contains nesting level of first block in the
4459 loop, if any. Only valid if doing loop unrolling and outputting debugger
4462 int *loop_number_block_level
;
4464 /* Scan the function looking for loops, and walk the BLOCK tree at the
4465 same time. Record the first and last BLOCK tree corresponding to each
4466 loop. This function is similar to find_and_verify_loops in loop.c. */
4469 find_loop_tree_blocks (f
)
4473 int current_loop
= -1;
4476 int block_level
, tree_level
;
4477 tree tree_block
, parent_tree_block
;
4479 tree_block
= DECL_INITIAL (current_function_decl
);
4480 parent_tree_block
= 0;
4484 /* Find boundaries of loops, and save the first and last BLOCK tree
4485 corresponding to each loop. */
4487 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
4489 if (GET_CODE (insn
) == NOTE
)
4490 switch (NOTE_LINE_NUMBER (insn
))
4492 case NOTE_INSN_LOOP_BEG
:
4493 loop_number_block_level
[++next_loop
] = block_level
;
4494 loop_number_first_block
[next_loop
] = 0;
4495 current_loop
= next_loop
;
4498 case NOTE_INSN_LOOP_END
:
4499 if (current_loop
== -1)
4502 current_loop
= loop_outer_loop
[current_loop
];
4505 case NOTE_INSN_BLOCK_BEG
:
4506 if (tree_level
< block_level
)
4508 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4509 we must now visit the subtree of the current block. */
4510 parent_tree_block
= tree_block
;
4511 tree_block
= BLOCK_SUBBLOCKS (tree_block
);
4514 else if (tree_level
> block_level
)
4517 /* Save this block tree here for all nested loops for which
4518 this is the topmost block. */
4519 for (loop
= current_loop
;
4520 loop
!= -1 && block_level
== loop_number_block_level
[loop
];
4521 loop
= loop_outer_loop
[loop
])
4523 if (loop_number_first_block
[loop
] == 0)
4524 loop_number_first_block
[loop
] = tree_block
;
4525 loop_number_last_block
[loop
] = tree_block
;
4531 case NOTE_INSN_BLOCK_END
:
4533 if (tree_level
> block_level
)
4535 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4536 we must now visit the parent of the current tree. */
4537 if (tree_block
!= 0 || parent_tree_block
== 0)
4539 tree_block
= parent_tree_block
;
4540 parent_tree_block
= BLOCK_SUPERCONTEXT (parent_tree_block
);
4543 tree_block
= BLOCK_CHAIN (tree_block
);
4549 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4550 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4552 Note that we only copy the topmost level of tree nodes; they will share
4553 pointers to the same subblocks. */
4556 unroll_block_trees (loop_number
, copies
)
4562 /* First check whether there are any blocks that need to be copied. */
4563 if (loop_number_first_block
[loop_number
])
4565 tree first_block
= loop_number_first_block
[loop_number
];
4566 tree last_block
= loop_number_last_block
[loop_number
];
4567 tree last_block_created
= 0;
4569 for (i
= 0; i
< copies
- 1; i
++)
4571 tree block
= first_block
;
4572 tree insert_after
= last_block
;
4575 /* Copy every block between first_block and last_block inclusive,
4576 inserting the new blocks after last_block. */
4579 tree new_block
= make_node (BLOCK
);
4580 BLOCK_VARS (new_block
) = BLOCK_VARS (block
);
4581 BLOCK_TYPE_TAGS (new_block
) = BLOCK_TYPE_TAGS (block
);
4582 BLOCK_SUBBLOCKS (new_block
) = BLOCK_SUBBLOCKS (block
);
4583 BLOCK_SUPERCONTEXT (new_block
) = BLOCK_SUPERCONTEXT (block
);
4584 TREE_USED (new_block
) = TREE_USED (block
);
4586 /* Insert the new block after the insertion point, and move
4587 the insertion point to the new block. This ensures that
4588 the copies are inserted in the right order. */
4589 BLOCK_CHAIN (new_block
) = BLOCK_CHAIN (insert_after
);
4590 BLOCK_CHAIN (insert_after
) = new_block
;
4591 insert_after
= new_block
;
4593 copied_block
= block
;
4594 block
= BLOCK_CHAIN (block
);
4596 while (copied_block
!= last_block
);
4598 /* Remember the last block created, so that we can update the
4599 info in the tables. */
4600 if (last_block_created
== 0)
4601 last_block_created
= insert_after
;
4604 /* For all nested loops for which LAST_BLOCK was originally the last
4605 block, update the tables to indicate that LAST_BLOCK_CREATED is
4606 now the last block in the loop. */
4607 for (i
= loop_number
; last_block
== loop_number_last_block
[i
];
4608 i
= loop_outer_loop
[i
])
4609 loop_number_last_block
[i
] = last_block_created
;