1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack
;
57 extern int xmalloc ();
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
68 int expr_stmts_for_value
;
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
73 static tree last_expr_type
;
74 static rtx last_expr_value
;
76 /* Number of binding contours started so far in this function. */
78 int block_start_count
;
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
83 extern int current_function_returns_pcc_struct
;
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
89 extern rtx cleanup_label
;
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
95 extern rtx return_label
;
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs
;
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset
;
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label
;
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry
;
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
118 extern rtx arg_pointer_save_area
;
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain
;
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list
;
130 /* Functions and data structures for expanding case statements. */
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
155 struct case_node
*left
; /* Left son in binary tree */
156 struct case_node
*right
; /* Right son in binary tree; also node chain */
157 struct case_node
*parent
; /* Parent of node in binary tree */
158 tree low
; /* Lowest index value for this label */
159 tree high
; /* Highest index value for this label */
160 tree code_label
; /* Label to jump to when node matches */
163 typedef struct case_node case_node
;
164 typedef struct case_node
*case_node_ptr
;
166 /* These are used by estimate_case_costs and balance_case_nodes. */
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table
;
170 static int use_cost_table
;
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
189 /* Stack of control and binding constructs we are currently inside.
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
217 struct nesting
*next
;
222 /* For conds (if-then and if-then-else statements). */
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
236 /* Label at the top of the loop; place to loop back to. */
238 /* Label at the end of the whole construct. */
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
244 /* For variable binding contours. */
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count
;
250 /* Nonzero => value to restore stack to on exit. */
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting
*innermost_stack_block
;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain
*label_chain
;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count
;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node
*case_list
;
290 /* Label to jump to if no case matches. */
292 /* The expression to be dispatched on. */
294 /* Type that INDEX_EXPR should be converted to. */
296 /* Number of range exprs in case statement. */
298 /* Name of this kind of statement, for warnings. */
300 /* Nonzero if a case label has been seen in this case stmt. */
303 /* For exception contours. */
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
314 /* First insn of TRY block, in case resumptive model is needed. */
316 /* Label for the catch clauses. */
318 /* Label for unhandled exceptions. */
320 /* Label at the end of whole construct. */
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
329 /* Chain of all pending binding contours. */
330 struct nesting
*block_stack
;
332 /* Chain of all pending binding contours that restore stack levels
334 struct nesting
*stack_block_stack
;
336 /* Chain of all pending conditional statements. */
337 struct nesting
*cond_stack
;
339 /* Chain of all pending loops. */
340 struct nesting
*loop_stack
;
342 /* Chain of all pending case or switch statements. */
343 struct nesting
*case_stack
;
345 /* Chain of all pending exception contours. */
346 struct nesting
*except_stack
;
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting
*nesting_stack
;
352 /* Number of entries on nesting_stack now. */
355 /* Allocate and return a new `struct nesting'. */
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
378 If the target label has now been defined, we can insert the proper code. */
382 /* Points to following fixup. */
383 struct goto_fixup
*next
;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
390 /* The CODE_LABEL rtx that this is jumping to. */
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count
;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list
;
409 static struct goto_fixup
*goto_fixup_chain
;
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
416 /* Points to following fixup. */
417 struct label_chain
*next
;
424 gcc_obstack_init (&stmt_obstack
);
426 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);
431 init_stmt_for_function ()
433 /* We are not currently within any block, conditional, loop or case. */
441 block_start_count
= 0;
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain
= 0;
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value
= 0;
455 p
->block_stack
= block_stack
;
456 p
->stack_block_stack
= stack_block_stack
;
457 p
->cond_stack
= cond_stack
;
458 p
->loop_stack
= loop_stack
;
459 p
->case_stack
= case_stack
;
460 p
->nesting_stack
= nesting_stack
;
461 p
->nesting_depth
= nesting_depth
;
462 p
->block_start_count
= block_start_count
;
463 p
->last_expr_type
= last_expr_type
;
464 p
->last_expr_value
= last_expr_value
;
465 p
->expr_stmts_for_value
= expr_stmts_for_value
;
466 p
->emit_filename
= emit_filename
;
467 p
->emit_lineno
= emit_lineno
;
468 p
->goto_fixup_chain
= goto_fixup_chain
;
472 restore_stmt_status (p
)
475 block_stack
= p
->block_stack
;
476 stack_block_stack
= p
->stack_block_stack
;
477 cond_stack
= p
->cond_stack
;
478 loop_stack
= p
->loop_stack
;
479 case_stack
= p
->case_stack
;
480 nesting_stack
= p
->nesting_stack
;
481 nesting_depth
= p
->nesting_depth
;
482 block_start_count
= p
->block_start_count
;
483 last_expr_type
= p
->last_expr_type
;
484 last_expr_value
= p
->last_expr_value
;
485 expr_stmts_for_value
= p
->expr_stmts_for_value
;
486 emit_filename
= p
->emit_filename
;
487 emit_lineno
= p
->emit_lineno
;
488 goto_fixup_chain
= p
->goto_fixup_chain
;
491 /* Emit a no-op instruction. */
496 rtx last_insn
= get_last_insn ();
498 && (GET_CODE (last_insn
) == CODE_LABEL
499 || prev_real_insn (last_insn
) == 0))
500 emit_insn (gen_nop ());
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
510 if (TREE_CODE (label
) != LABEL_DECL
)
513 if (DECL_RTL (label
))
514 return DECL_RTL (label
);
516 return DECL_RTL (label
) = gen_label_rtx ();
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label
));
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
534 expand_computed_goto (exp
)
537 rtx x
= expand_expr (exp
, 0, VOIDmode
, 0);
539 emit_indirect_jump (x
);
542 /* Handle goto statements and the labels that they can go to. */
544 /* Specify the location in the RTL code of a label LABEL,
545 which is a LABEL_DECL tree node.
547 This is used for the kind of label that the user can jump to with a
548 goto statement, and for alternatives of a switch or case statement.
549 RTL labels generated for loops and conditionals don't go through here;
550 they are generated directly at the RTL level, by other functions below.
552 Note that this has nothing to do with defining label *names*.
553 Languages vary in how they do that and what that even means. */
559 struct label_chain
*p
;
561 do_pending_stack_adjust ();
562 emit_label (label_rtx (label
));
563 if (DECL_NAME (label
))
564 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
566 if (stack_block_stack
!= 0)
568 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
569 p
->next
= stack_block_stack
->data
.block
.label_chain
;
570 stack_block_stack
->data
.block
.label_chain
= p
;
575 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
576 from nested functions. */
579 declare_nonlocal_label (label
)
582 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
583 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
584 if (nonlocal_goto_handler_slot
== 0)
586 nonlocal_goto_handler_slot
587 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
588 emit_stack_save (SAVE_NONLOCAL
,
589 &nonlocal_goto_stack_level
,
590 PREV_INSN (tail_recursion_reentry
));
594 /* Generate RTL code for a `goto' statement with target label LABEL.
595 LABEL should be a LABEL_DECL tree node that was or will later be
596 defined with `expand_label'. */
602 /* Check for a nonlocal goto to a containing function. */
603 tree context
= decl_function_context (label
);
604 if (context
!= 0 && context
!= current_function_decl
)
606 struct function
*p
= find_function_data (context
);
608 p
->has_nonlocal_label
= 1;
610 /* Copy the rtl for the slots so that they won't be shared in
611 case the virtual stack vars register gets instantiated differently
612 in the parent than in the child. */
614 #if HAVE_nonlocal_goto
615 if (HAVE_nonlocal_goto
)
616 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
617 copy_rtx (p
->nonlocal_goto_handler_slot
),
618 copy_rtx (p
->nonlocal_goto_stack_level
),
619 gen_rtx (LABEL_REF
, Pmode
,
620 label_rtx (label
))));
626 /* Restore frame pointer for containing function.
627 This sets the actual hard register used for the frame pointer
628 to the location of the function's incoming static chain info.
629 The non-local goto handler will then adjust it to contain the
630 proper value and reload the argument pointer, if needed. */
631 emit_move_insn (frame_pointer_rtx
, lookup_static_chain (label
));
633 /* We have now loaded the frame pointer hardware register with
634 the address of that corresponds to the start of the virtual
635 stack vars. So replace virtual_stack_vars_rtx in all
636 addresses we use with stack_pointer_rtx. */
638 /* Get addr of containing function's current nonlocal goto handler,
639 which will do any cleanups and then jump to the label. */
640 addr
= copy_rtx (p
->nonlocal_goto_handler_slot
);
641 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
644 /* Restore the stack pointer. Note this uses fp just restored. */
645 addr
= p
->nonlocal_goto_stack_level
;
647 addr
= replace_rtx (copy_rtx (addr
),
648 virtual_stack_vars_rtx
, frame_pointer_rtx
);
650 emit_stack_restore (SAVE_NONLOCAL
, addr
, 0);
652 /* Put in the static chain register the nonlocal label address. */
653 emit_move_insn (static_chain_rtx
,
654 gen_rtx (LABEL_REF
, Pmode
, label_rtx (label
)));
655 /* USE of frame_pointer_rtx added for consistency; not clear if
657 emit_insn (gen_rtx (USE
, VOIDmode
, frame_pointer_rtx
));
658 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
659 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
660 emit_indirect_jump (temp
);
664 expand_goto_internal (label
, label_rtx (label
), 0);
667 /* Generate RTL code for a `goto' statement with target label BODY.
668 LABEL should be a LABEL_REF.
669 LAST_INSN, if non-0, is the rtx we should consider as the last
670 insn emitted (for the purposes of cleaning up a return). */
673 expand_goto_internal (body
, label
, last_insn
)
678 struct nesting
*block
;
681 if (GET_CODE (label
) != CODE_LABEL
)
684 /* If label has already been defined, we can tell now
685 whether and how we must alter the stack level. */
687 if (PREV_INSN (label
) != 0)
689 /* Find the innermost pending block that contains the label.
690 (Check containment by comparing insn-uids.)
691 Then restore the outermost stack level within that block,
692 and do cleanups of all blocks contained in it. */
693 for (block
= block_stack
; block
; block
= block
->next
)
695 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
697 if (block
->data
.block
.stack_level
!= 0)
698 stack_level
= block
->data
.block
.stack_level
;
699 /* Execute the cleanups for blocks we are exiting. */
700 if (block
->data
.block
.cleanups
!= 0)
702 expand_cleanups (block
->data
.block
.cleanups
, 0);
703 do_pending_stack_adjust ();
709 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
710 the stack pointer. This one should be deleted as dead by flow. */
711 clear_pending_stack_adjust ();
712 do_pending_stack_adjust ();
713 emit_stack_restore (SAVE_BLOCK
, stack_level
, 0);
716 if (body
!= 0 && DECL_TOO_LATE (body
))
717 error ("jump to `%s' invalidly jumps into binding contour",
718 IDENTIFIER_POINTER (DECL_NAME (body
)));
720 /* Label not yet defined: may need to put this goto
721 on the fixup list. */
722 else if (! expand_fixup (body
, label
, last_insn
))
724 /* No fixup needed. Record that the label is the target
725 of at least one goto that has no fixup. */
727 TREE_ADDRESSABLE (body
) = 1;
733 /* Generate if necessary a fixup for a goto
734 whose target label in tree structure (if any) is TREE_LABEL
735 and whose target in rtl is RTL_LABEL.
737 If LAST_INSN is nonzero, we pretend that the jump appears
738 after insn LAST_INSN instead of at the current point in the insn stream.
740 The fixup will be used later to insert insns at this point
741 to restore the stack level as appropriate for the target label.
743 Value is nonzero if a fixup is made. */
746 expand_fixup (tree_label
, rtl_label
, last_insn
)
751 struct nesting
*block
, *end_block
;
753 /* See if we can recognize which block the label will be output in.
754 This is possible in some very common cases.
755 If we succeed, set END_BLOCK to that block.
756 Otherwise, set it to 0. */
759 && (rtl_label
== cond_stack
->data
.cond
.endif_label
760 || rtl_label
== cond_stack
->data
.cond
.next_label
))
761 end_block
= cond_stack
;
762 /* If we are in a loop, recognize certain labels which
763 are likely targets. This reduces the number of fixups
764 we need to create. */
766 && (rtl_label
== loop_stack
->data
.loop
.start_label
767 || rtl_label
== loop_stack
->data
.loop
.end_label
768 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
769 end_block
= loop_stack
;
773 /* Now set END_BLOCK to the binding level to which we will return. */
777 struct nesting
*next_block
= end_block
->all
;
780 /* First see if the END_BLOCK is inside the innermost binding level.
781 If so, then no cleanups or stack levels are relevant. */
782 while (next_block
&& next_block
!= block
)
783 next_block
= next_block
->all
;
788 /* Otherwise, set END_BLOCK to the innermost binding level
789 which is outside the relevant control-structure nesting. */
790 next_block
= block_stack
->next
;
791 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
792 if (block
== next_block
)
793 next_block
= next_block
->next
;
794 end_block
= next_block
;
797 /* Does any containing block have a stack level or cleanups?
798 If not, no fixup is needed, and that is the normal case
799 (the only case, for standard C). */
800 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
801 if (block
->data
.block
.stack_level
!= 0
802 || block
->data
.block
.cleanups
!= 0)
805 if (block
!= end_block
)
807 /* Ok, a fixup is needed. Add a fixup to the list of such. */
808 struct goto_fixup
*fixup
809 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
810 /* In case an old stack level is restored, make sure that comes
811 after any pending stack adjust. */
812 /* ?? If the fixup isn't to come at the present position,
813 doing the stack adjust here isn't useful. Doing it with our
814 settings at that location isn't useful either. Let's hope
817 do_pending_stack_adjust ();
818 fixup
->before_jump
= last_insn
? last_insn
: get_last_insn ();
819 fixup
->target
= tree_label
;
820 fixup
->target_rtl
= rtl_label
;
821 fixup
->block_start_count
= block_start_count
;
822 fixup
->stack_level
= 0;
823 fixup
->cleanup_list_list
824 = (((block
->data
.block
.outer_cleanups
826 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
829 || block
->data
.block
.cleanups
)
830 ? tree_cons (0, block
->data
.block
.cleanups
,
831 block
->data
.block
.outer_cleanups
)
833 fixup
->next
= goto_fixup_chain
;
834 goto_fixup_chain
= fixup
;
840 /* When exiting a binding contour, process all pending gotos requiring fixups.
841 THISBLOCK is the structure that describes the block being exited.
842 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
843 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
844 FIRST_INSN is the insn that began this contour.
846 Gotos that jump out of this contour must restore the
847 stack level and do the cleanups before actually jumping.
849 DONT_JUMP_IN nonzero means report error there is a jump into this
850 contour from before the beginning of the contour.
851 This is also done if STACK_LEVEL is nonzero. */
854 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
855 struct nesting
*thisblock
;
861 register struct goto_fixup
*f
, *prev
;
863 /* F is the fixup we are considering; PREV is the previous one. */
864 /* We run this loop in two passes so that cleanups of exited blocks
865 are run first, and blocks that are exited are marked so
868 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
870 /* Test for a fixup that is inactive because it is already handled. */
871 if (f
->before_jump
== 0)
873 /* Delete inactive fixup from the chain, if that is easy to do. */
875 prev
->next
= f
->next
;
877 /* Has this fixup's target label been defined?
878 If so, we can finalize it. */
879 else if (PREV_INSN (f
->target_rtl
) != 0)
881 /* Get the first non-label after the label
882 this goto jumps to. If that's before this scope begins,
883 we don't have a jump into the scope. */
884 rtx after_label
= f
->target_rtl
;
885 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
886 after_label
= NEXT_INSN (after_label
);
888 /* If this fixup jumped into this contour from before the beginning
889 of this contour, report an error. */
890 /* ??? Bug: this does not detect jumping in through intermediate
891 blocks that have stack levels or cleanups.
892 It detects only a problem with the innermost block
895 && (dont_jump_in
|| stack_level
|| cleanup_list
)
896 /* If AFTER_LABEL is 0, it means the jump goes to the end
897 of the rtl, which means it jumps into this scope. */
899 || INSN_UID (first_insn
) < INSN_UID (after_label
))
900 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
901 && ! TREE_REGDECL (f
->target
))
903 error_with_decl (f
->target
,
904 "label `%s' used before containing binding contour");
905 /* Prevent multiple errors for one label. */
906 TREE_REGDECL (f
->target
) = 1;
909 /* Execute cleanups for blocks this jump exits. */
910 if (f
->cleanup_list_list
)
913 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
914 /* Marked elements correspond to blocks that have been closed.
915 Do their cleanups. */
916 if (TREE_ADDRESSABLE (lists
)
917 && TREE_VALUE (lists
) != 0)
918 fixup_cleanups (TREE_VALUE (lists
), &f
->before_jump
);
921 /* Restore stack level for the biggest contour that this
922 jump jumps out of. */
924 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
929 /* Mark the cleanups of exited blocks so that they are executed
930 by the code above. */
931 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
932 if (f
->before_jump
!= 0
933 && PREV_INSN (f
->target_rtl
) == 0
934 /* Label has still not appeared. If we are exiting a block with
935 a stack level to restore, that started before the fixup,
936 mark this stack level as needing restoration
937 when the fixup is later finalized.
938 Also mark the cleanup_list_list element for F
939 that corresponds to this block, so that ultimately
940 this block's cleanups will be executed by the code above. */
942 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
943 it means the label is undefined. That's erroneous, but possible. */
944 && (thisblock
->data
.block
.block_start_count
945 <= f
->block_start_count
))
947 tree lists
= f
->cleanup_list_list
;
948 for (; lists
; lists
= TREE_CHAIN (lists
))
949 /* If the following elt. corresponds to our containing block
950 then the elt. must be for this block. */
951 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
952 TREE_ADDRESSABLE (lists
) = 1;
955 f
->stack_level
= stack_level
;
959 /* Generate RTL for an asm statement (explicit assembler code).
960 BODY is a STRING_CST node containing the assembler code text,
961 or an ADDR_EXPR containing a STRING_CST. */
967 if (TREE_CODE (body
) == ADDR_EXPR
)
968 body
= TREE_OPERAND (body
, 0);
970 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
971 TREE_STRING_POINTER (body
)));
975 /* Generate RTL for an asm statement with arguments.
976 STRING is the instruction template.
977 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
978 Each output or input has an expression in the TREE_VALUE and
979 a constraint-string in the TREE_PURPOSE.
980 CLOBBERS is a list of STRING_CST nodes each naming a hard register
981 that is clobbered by this insn.
983 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
984 Some elements of OUTPUTS may be replaced with trees representing temporary
985 values. The caller should copy those temporary values to the originally
988 VOL nonzero means the insn is volatile; don't optimize it. */
991 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
992 tree string
, outputs
, inputs
, clobbers
;
997 rtvec argvec
, constraints
;
999 int ninputs
= list_length (inputs
);
1000 int noutputs
= list_length (outputs
);
1004 /* Vector of RTX's of evaluated output operands. */
1005 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1006 /* The insn we have emitted. */
1009 /* Count the number of meaningful clobbered registers, ignoring what
1010 we would ignore later. */
1012 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1014 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1015 i
= decode_reg_name (regname
);
1016 if (i
>= 0 || i
== -4)
1022 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1024 tree val
= TREE_VALUE (tail
);
1029 /* If there's an erroneous arg, emit no insn. */
1030 if (TREE_TYPE (val
) == error_mark_node
)
1033 /* Make sure constraint has `=' and does not have `+'. */
1036 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1038 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1040 error ("output operand constraint contains `+'");
1043 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '=')
1048 error ("output operand constraint lacks `='");
1052 /* If an output operand is not a variable or indirect ref,
1054 create a SAVE_EXPR which is a pseudo-reg
1055 to act as an intermediate temporary.
1056 Make the asm insn write into that, then copy it to
1057 the real output operand. */
1059 while (TREE_CODE (val
) == COMPONENT_REF
1060 || TREE_CODE (val
) == ARRAY_REF
)
1061 val
= TREE_OPERAND (val
, 0);
1063 if (TREE_CODE (val
) != VAR_DECL
1064 && TREE_CODE (val
) != PARM_DECL
1065 && TREE_CODE (val
) != INDIRECT_REF
)
1066 TREE_VALUE (tail
) = save_expr (TREE_VALUE (tail
));
1068 output_rtx
[i
] = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
1071 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1073 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1077 /* Make vectors for the expression-rtx and constraint strings. */
1079 argvec
= rtvec_alloc (ninputs
);
1080 constraints
= rtvec_alloc (ninputs
);
1082 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
1083 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
1085 MEM_VOLATILE_P (body
) = vol
;
1087 /* Eval the inputs and put them into ARGVEC.
1088 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1091 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1095 /* If there's an erroneous arg, emit no insn,
1096 because the ASM_INPUT would get VOIDmode
1097 and that could cause a crash in reload. */
1098 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1100 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1102 error ("hard register `%s' listed as input operand to `asm'",
1103 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1107 /* Make sure constraint has neither `=' nor `+'. */
1109 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1110 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
1111 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1113 error ("input operand constraint contains `%c'",
1114 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1118 XVECEXP (body
, 3, i
) /* argvec */
1119 = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
1120 XVECEXP (body
, 4, i
) /* constraints */
1121 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1122 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1126 /* Protect all the operands from the queue,
1127 now that they have all been evaluated. */
1129 for (i
= 0; i
< ninputs
; i
++)
1130 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1132 for (i
= 0; i
< noutputs
; i
++)
1133 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1135 /* Now, for each output, construct an rtx
1136 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1137 ARGVEC CONSTRAINTS))
1138 If there is more than one, put them inside a PARALLEL. */
1140 if (noutputs
== 1 && nclobbers
== 0)
1142 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1143 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
1145 else if (noutputs
== 0 && nclobbers
== 0)
1147 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1148 insn
= emit_insn (body
);
1154 if (num
== 0) num
= 1;
1155 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1157 /* For each output operand, store a SET. */
1159 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1161 XVECEXP (body
, 0, i
)
1162 = gen_rtx (SET
, VOIDmode
,
1164 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1165 TREE_STRING_POINTER (string
),
1166 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1167 i
, argvec
, constraints
,
1169 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1172 /* If there are no outputs (but there are some clobbers)
1173 store the bare ASM_OPERANDS into the PARALLEL. */
1176 XVECEXP (body
, 0, i
++) = obody
;
1178 /* Store (clobber REG) for each clobbered register specified. */
1180 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1182 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1183 int j
= decode_reg_name (regname
);
1187 if (j
== -3) /* `cc', which is not a register */
1190 if (j
== -4) /* `memory', don't cache memory across asm */
1192 XVECEXP (body
, 0, i
++) = gen_rtx (CLOBBER
, VOIDmode
, const0_rtx
);
1196 error ("unknown register name `%s' in `asm'", regname
);
1200 /* Use QImode since that's guaranteed to clobber just one reg. */
1201 XVECEXP (body
, 0, i
++)
1202 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1205 insn
= emit_insn (body
);
1211 /* Generate RTL to evaluate the expression EXP
1212 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1215 expand_expr_stmt (exp
)
1218 /* If -W, warn about statements with no side effects,
1219 except for an explicit cast to void (e.g. for assert()), and
1220 except inside a ({...}) where they may be useful. */
1221 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1223 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1224 && !(TREE_CODE (exp
) == CONVERT_EXPR
1225 && TREE_TYPE (exp
) == void_type_node
))
1226 warning_with_file_and_line (emit_filename
, emit_lineno
,
1227 "statement with no effect");
1228 else if (warn_unused
)
1229 warn_if_unused_value (exp
);
1231 last_expr_type
= TREE_TYPE (exp
);
1232 if (! flag_syntax_only
)
1233 last_expr_value
= expand_expr (exp
, expr_stmts_for_value
? 0 : const0_rtx
,
1236 /* If all we do is reference a volatile value in memory,
1237 copy it to a register to be sure it is actually touched. */
1238 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1239 && TREE_THIS_VOLATILE (exp
))
1241 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1242 copy_to_reg (last_expr_value
);
1245 rtx lab
= gen_label_rtx ();
1247 /* Compare the value with itself to reference it. */
1248 emit_cmp_insn (last_expr_value
, last_expr_value
, EQ
,
1249 expand_expr (TYPE_SIZE (last_expr_type
),
1252 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
);
1253 emit_jump_insn ((*bcc_gen_fctn
[(int) EQ
]) (lab
));
1258 /* If this expression is part of a ({...}) and is in memory, we may have
1259 to preserve temporaries. */
1260 preserve_temp_slots (last_expr_value
);
1262 /* Free any temporaries used to evaluate this expression. Any temporary
1263 used as a result of this expression will already have been preserved
1270 /* Warn if EXP contains any computations whose results are not used.
1271 Return 1 if a warning is printed; 0 otherwise. */
1274 warn_if_unused_value (exp
)
1277 if (TREE_USED (exp
))
1280 switch (TREE_CODE (exp
))
1282 case PREINCREMENT_EXPR
:
1283 case POSTINCREMENT_EXPR
:
1284 case PREDECREMENT_EXPR
:
1285 case POSTDECREMENT_EXPR
:
1290 case METHOD_CALL_EXPR
:
1293 case ANTI_WRAPPER_EXPR
:
1294 case WITH_CLEANUP_EXPR
:
1296 /* We don't warn about COND_EXPR because it may be a useful
1297 construct if either arm contains a side effect. */
1302 /* For a binding, warn if no side effect within it. */
1303 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1305 case TRUTH_ORIF_EXPR
:
1306 case TRUTH_ANDIF_EXPR
:
1307 /* In && or ||, warn if 2nd operand has no side effect. */
1308 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1311 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1313 /* Let people do `(foo (), 0)' without a warning. */
1314 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1316 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1320 case NON_LVALUE_EXPR
:
1321 /* Don't warn about values cast to void. */
1322 if (TREE_TYPE (exp
) == void_type_node
)
1324 /* Don't warn about conversions not explicit in the user's program. */
1325 if (TREE_NO_UNUSED_WARNING (exp
))
1327 /* Assignment to a cast usually results in a cast of a modify.
1328 Don't complain about that. */
1329 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MODIFY_EXPR
)
1331 /* Sometimes it results in a cast of a cast of a modify.
1332 Don't complain about that. */
1333 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == CONVERT_EXPR
1334 || TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
)
1335 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == MODIFY_EXPR
)
1339 /* Referencing a volatile value is a side effect, so don't warn. */
1340 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1341 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1342 && TREE_THIS_VOLATILE (exp
))
1344 warning_with_file_and_line (emit_filename
, emit_lineno
,
1345 "value computed is not used");
1350 /* Clear out the memory of the last expression evaluated. */
1358 /* Begin a statement which will return a value.
1359 Return the RTL_EXPR for this statement expr.
1360 The caller must save that value and pass it to expand_end_stmt_expr. */
1363 expand_start_stmt_expr ()
1365 /* Make the RTL_EXPR node temporary, not momentary,
1366 so that rtl_expr_chain doesn't become garbage. */
1367 int momentary
= suspend_momentary ();
1368 tree t
= make_node (RTL_EXPR
);
1369 resume_momentary (momentary
);
1372 expr_stmts_for_value
++;
1376 /* Restore the previous state at the end of a statement that returns a value.
1377 Returns a tree node representing the statement's value and the
1378 insns to compute the value.
1380 The nodes of that expression have been freed by now, so we cannot use them.
1381 But we don't want to do that anyway; the expression has already been
1382 evaluated and now we just want to use the value. So generate a RTL_EXPR
1383 with the proper type and RTL value.
1385 If the last substatement was not an expression,
1386 return something with type `void'. */
1389 expand_end_stmt_expr (t
)
1394 if (last_expr_type
== 0)
1396 last_expr_type
= void_type_node
;
1397 last_expr_value
= const0_rtx
;
1399 else if (last_expr_value
== 0)
1400 /* There are some cases where this can happen, such as when the
1401 statement is void type. */
1402 last_expr_value
= const0_rtx
;
1403 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1404 /* Remove any possible QUEUED. */
1405 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1409 TREE_TYPE (t
) = last_expr_type
;
1410 RTL_EXPR_RTL (t
) = last_expr_value
;
1411 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1413 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1417 /* Don't consider deleting this expr or containing exprs at tree level. */
1418 TREE_SIDE_EFFECTS (t
) = 1;
1419 /* Propagate volatility of the actual RTL expr. */
1420 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1423 expr_stmts_for_value
--;
1428 /* The exception handling nesting looks like this:
1431 { <-- exception handler block
1433 <-- in an exception handler
1435 : <-- in a TRY block
1436 : <-- in an exception handler
1441 : <-- in an except block
1442 : <-- in an exception handler
1448 /* Return nonzero iff in a try block at level LEVEL. */
1451 in_try_block (level
)
1454 struct nesting
*n
= except_stack
;
1457 while (n
&& n
->data
.except_stmt
.after_label
!= 0)
1468 /* Return nonzero iff in an except block at level LEVEL. */
1471 in_except_block (level
)
1474 struct nesting
*n
= except_stack
;
1477 while (n
&& n
->data
.except_stmt
.after_label
== 0)
1488 /* Return nonzero iff in an exception handler at level LEVEL. */
1491 in_exception_handler (level
)
1494 struct nesting
*n
= except_stack
;
1495 while (n
&& level
--)
1500 /* Record the fact that the current exception nesting raises
1501 exception EX. If not in an exception handler, return 0. */
1508 if (except_stack
== 0)
1510 raises_ptr
= &except_stack
->data
.except_stmt
.raised
;
1511 if (! value_member (ex
, *raises_ptr
))
1512 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1516 /* Generate RTL for the start of a try block.
1518 TRY_CLAUSE is the condition to test to enter the try block. */
1521 expand_start_try (try_clause
, exitflag
, escapeflag
)
1526 struct nesting
*thishandler
= ALLOC_NESTING ();
1528 /* Make an entry on cond_stack for the cond we are entering. */
1530 thishandler
->next
= except_stack
;
1531 thishandler
->all
= nesting_stack
;
1532 thishandler
->depth
= ++nesting_depth
;
1533 thishandler
->data
.except_stmt
.raised
= 0;
1534 thishandler
->data
.except_stmt
.handled
= 0;
1535 thishandler
->data
.except_stmt
.first_insn
= get_insns ();
1536 thishandler
->data
.except_stmt
.except_label
= gen_label_rtx ();
1537 thishandler
->data
.except_stmt
.unhandled_label
= 0;
1538 thishandler
->data
.except_stmt
.after_label
= 0;
1539 thishandler
->data
.except_stmt
.escape_label
1540 = escapeflag
? thishandler
->data
.except_stmt
.except_label
: 0;
1541 thishandler
->exit_label
= exitflag
? gen_label_rtx () : 0;
1542 except_stack
= thishandler
;
1543 nesting_stack
= thishandler
;
1545 do_jump (try_clause
, thishandler
->data
.except_stmt
.except_label
, NULL
);
1548 /* End of a TRY block. Nothing to do for now. */
1553 except_stack
->data
.except_stmt
.after_label
= gen_label_rtx ();
1554 expand_goto_internal (NULL
, except_stack
->data
.except_stmt
.after_label
, 0);
1557 /* Start an `except' nesting contour.
1558 EXITFLAG says whether this contour should be able to `exit' something.
1559 ESCAPEFLAG says whether this contour should be escapable. */
1562 expand_start_except (exitflag
, escapeflag
)
1569 /* An `exit' from catch clauses goes out to next exit level,
1570 if there is one. Otherwise, it just goes to the end
1571 of the construct. */
1572 for (n
= except_stack
->next
; n
; n
= n
->next
)
1573 if (n
->exit_label
!= 0)
1575 except_stack
->exit_label
= n
->exit_label
;
1579 except_stack
->exit_label
= except_stack
->data
.except_stmt
.after_label
;
1584 /* An `escape' from catch clauses goes out to next escape level,
1585 if there is one. Otherwise, it just goes to the end
1586 of the construct. */
1587 for (n
= except_stack
->next
; n
; n
= n
->next
)
1588 if (n
->data
.except_stmt
.escape_label
!= 0)
1590 except_stack
->data
.except_stmt
.escape_label
1591 = n
->data
.except_stmt
.escape_label
;
1595 except_stack
->data
.except_stmt
.escape_label
1596 = except_stack
->data
.except_stmt
.after_label
;
1598 do_pending_stack_adjust ();
1599 emit_label (except_stack
->data
.except_stmt
.except_label
);
1602 /* Generate code to `escape' from an exception contour. This
1603 is like `exiting', but does not conflict with constructs which
1606 Return nonzero if this contour is escapable, otherwise
1607 return zero, and language-specific code will emit the
1608 appropriate error message. */
1610 expand_escape_except ()
1614 for (n
= except_stack
; n
; n
= n
->next
)
1615 if (n
->data
.except_stmt
.escape_label
!= 0)
1617 expand_goto_internal (0, n
->data
.except_stmt
.escape_label
, 0);
1624 /* Finish processing and `except' contour.
1625 Culls out all exceptions which might be raise but not
1626 handled, and returns the list to the caller.
1627 Language-specific code is responsible for dealing with these
1631 expand_end_except ()
1634 tree raised
= NULL_TREE
;
1636 do_pending_stack_adjust ();
1637 emit_label (except_stack
->data
.except_stmt
.after_label
);
1639 n
= except_stack
->next
;
1642 /* Propagate exceptions raised but not handled to next
1644 tree handled
= except_stack
->data
.except_stmt
.raised
;
1645 if (handled
!= void_type_node
)
1647 tree prev
= NULL_TREE
;
1648 raised
= except_stack
->data
.except_stmt
.raised
;
1652 for (this_raise
= raised
, prev
= 0; this_raise
;
1653 this_raise
= TREE_CHAIN (this_raise
))
1655 if (value_member (TREE_VALUE (this_raise
), handled
))
1658 TREE_CHAIN (prev
) = TREE_CHAIN (this_raise
);
1661 raised
= TREE_CHAIN (raised
);
1662 if (raised
== NULL_TREE
)
1669 handled
= TREE_CHAIN (handled
);
1671 if (prev
== NULL_TREE
)
1674 TREE_CHAIN (prev
) = n
->data
.except_stmt
.raised
;
1676 n
->data
.except_stmt
.raised
= raised
;
1680 POPSTACK (except_stack
);
1685 /* Record that exception EX is caught by this exception handler.
1686 Return nonzero if in exception handling construct, otherwise return 0. */
1693 if (except_stack
== 0)
1695 raises_ptr
= &except_stack
->data
.except_stmt
.handled
;
1696 if (*raises_ptr
!= void_type_node
1698 && ! value_member (ex
, *raises_ptr
))
1699 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1703 /* Record that this exception handler catches all exceptions.
1704 Return nonzero if in exception handling construct, otherwise return 0. */
1707 expand_catch_default ()
1709 if (except_stack
== 0)
1711 except_stack
->data
.except_stmt
.handled
= void_type_node
;
1718 if (except_stack
== 0 || except_stack
->data
.except_stmt
.after_label
== 0)
1720 expand_goto_internal (0, except_stack
->data
.except_stmt
.after_label
, 0);
1724 /* Generate RTL for the start of an if-then. COND is the expression
1725 whose truth should be tested.
1727 If EXITFLAG is nonzero, this conditional is visible to
1728 `exit_something'. */
1731 expand_start_cond (cond
, exitflag
)
1735 struct nesting
*thiscond
= ALLOC_NESTING ();
1737 /* Make an entry on cond_stack for the cond we are entering. */
1739 thiscond
->next
= cond_stack
;
1740 thiscond
->all
= nesting_stack
;
1741 thiscond
->depth
= ++nesting_depth
;
1742 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1743 /* Before we encounter an `else', we don't need a separate exit label
1744 unless there are supposed to be exit statements
1745 to exit this conditional. */
1746 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1747 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1748 cond_stack
= thiscond
;
1749 nesting_stack
= thiscond
;
1751 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL
);
1754 /* Generate RTL between then-clause and the elseif-clause
1755 of an if-then-elseif-.... */
1758 expand_start_elseif (cond
)
1761 if (cond_stack
->data
.cond
.endif_label
== 0)
1762 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1763 emit_jump (cond_stack
->data
.cond
.endif_label
);
1764 emit_label (cond_stack
->data
.cond
.next_label
);
1765 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1766 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL
);
1769 /* Generate RTL between the then-clause and the else-clause
1770 of an if-then-else. */
1773 expand_start_else ()
1775 if (cond_stack
->data
.cond
.endif_label
== 0)
1776 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1777 emit_jump (cond_stack
->data
.cond
.endif_label
);
1778 emit_label (cond_stack
->data
.cond
.next_label
);
1779 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
1782 /* Generate RTL for the end of an if-then.
1783 Pop the record for it off of cond_stack. */
1788 struct nesting
*thiscond
= cond_stack
;
1790 do_pending_stack_adjust ();
1791 if (thiscond
->data
.cond
.next_label
)
1792 emit_label (thiscond
->data
.cond
.next_label
);
1793 if (thiscond
->data
.cond
.endif_label
)
1794 emit_label (thiscond
->data
.cond
.endif_label
);
1796 POPSTACK (cond_stack
);
1800 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1801 loop should be exited by `exit_something'. This is a loop for which
1802 `expand_continue' will jump to the top of the loop.
1804 Make an entry on loop_stack to record the labels associated with
1808 expand_start_loop (exit_flag
)
1811 register struct nesting
*thisloop
= ALLOC_NESTING ();
1813 /* Make an entry on loop_stack for the loop we are entering. */
1815 thisloop
->next
= loop_stack
;
1816 thisloop
->all
= nesting_stack
;
1817 thisloop
->depth
= ++nesting_depth
;
1818 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1819 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1820 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1821 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1822 loop_stack
= thisloop
;
1823 nesting_stack
= thisloop
;
1825 do_pending_stack_adjust ();
1827 emit_note (0, NOTE_INSN_LOOP_BEG
);
1828 emit_label (thisloop
->data
.loop
.start_label
);
1833 /* Like expand_start_loop but for a loop where the continuation point
1834 (for expand_continue_loop) will be specified explicitly. */
1837 expand_start_loop_continue_elsewhere (exit_flag
)
1840 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
1841 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1845 /* Specify the continuation point for a loop started with
1846 expand_start_loop_continue_elsewhere.
1847 Use this at the point in the code to which a continue statement
1851 expand_loop_continue_here ()
1853 do_pending_stack_adjust ();
1854 emit_note (0, NOTE_INSN_LOOP_CONT
);
1855 emit_label (loop_stack
->data
.loop
.continue_label
);
1858 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1859 Pop the block off of loop_stack. */
1864 register rtx insn
= get_last_insn ();
1865 register rtx start_label
= loop_stack
->data
.loop
.start_label
;
1866 rtx last_test_insn
= 0;
1869 /* Mark the continue-point at the top of the loop if none elsewhere. */
1870 if (start_label
== loop_stack
->data
.loop
.continue_label
)
1871 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
1873 do_pending_stack_adjust ();
1875 /* If optimizing, perhaps reorder the loop. If the loop
1876 starts with a conditional exit, roll that to the end
1877 where it will optimize together with the jump back.
1879 We look for the last conditional branch to the exit that we encounter
1880 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1881 branch to the exit first, use it.
1883 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1884 because moving them is not valid. */
1888 ! (GET_CODE (insn
) == JUMP_INSN
1889 && GET_CODE (PATTERN (insn
)) == SET
1890 && SET_DEST (PATTERN (insn
)) == pc_rtx
1891 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1893 /* Scan insns from the top of the loop looking for a qualified
1894 conditional exit. */
1895 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
1896 insn
= NEXT_INSN (insn
))
1898 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
1901 if (GET_CODE (insn
) == NOTE
1902 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1903 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1906 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
1909 if (last_test_insn
&& num_insns
> 30)
1912 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1913 && SET_DEST (PATTERN (insn
)) == pc_rtx
1914 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1915 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1916 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1917 == loop_stack
->data
.loop
.end_label
))
1918 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1919 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1920 == loop_stack
->data
.loop
.end_label
))))
1921 last_test_insn
= insn
;
1923 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
1924 && GET_CODE (PATTERN (insn
)) == SET
1925 && SET_DEST (PATTERN (insn
)) == pc_rtx
1926 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
1927 && (XEXP (SET_SRC (PATTERN (insn
)), 0)
1928 == loop_stack
->data
.loop
.end_label
))
1929 /* Include BARRIER. */
1930 last_test_insn
= NEXT_INSN (insn
);
1933 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
1935 /* We found one. Move everything from there up
1936 to the end of the loop, and add a jump into the loop
1937 to jump to there. */
1938 register rtx newstart_label
= gen_label_rtx ();
1939 register rtx start_move
= start_label
;
1941 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1942 then we want to move this note also. */
1943 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
1944 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
1945 == NOTE_INSN_LOOP_CONT
))
1946 start_move
= PREV_INSN (start_move
);
1948 emit_label_after (newstart_label
, PREV_INSN (start_move
));
1949 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
1950 emit_jump_insn_after (gen_jump (start_label
),
1951 PREV_INSN (newstart_label
));
1952 emit_barrier_after (PREV_INSN (newstart_label
));
1953 start_label
= newstart_label
;
1957 emit_jump (start_label
);
1958 emit_note (0, NOTE_INSN_LOOP_END
);
1959 emit_label (loop_stack
->data
.loop
.end_label
);
1961 POPSTACK (loop_stack
);
1966 /* Generate a jump to the current loop's continue-point.
1967 This is usually the top of the loop, but may be specified
1968 explicitly elsewhere. If not currently inside a loop,
1969 return 0 and do nothing; caller will print an error message. */
1972 expand_continue_loop (whichloop
)
1973 struct nesting
*whichloop
;
1977 whichloop
= loop_stack
;
1980 expand_goto_internal (0, whichloop
->data
.loop
.continue_label
, 0);
1984 /* Generate a jump to exit the current loop. If not currently inside a loop,
1985 return 0 and do nothing; caller will print an error message. */
1988 expand_exit_loop (whichloop
)
1989 struct nesting
*whichloop
;
1993 whichloop
= loop_stack
;
1996 expand_goto_internal (0, whichloop
->data
.loop
.end_label
, 0);
2000 /* Generate a conditional jump to exit the current loop if COND
2001 evaluates to zero. If not currently inside a loop,
2002 return 0 and do nothing; caller will print an error message. */
2005 expand_exit_loop_if_false (whichloop
, cond
)
2006 struct nesting
*whichloop
;
2011 whichloop
= loop_stack
;
2014 do_jump (cond
, whichloop
->data
.loop
.end_label
, NULL
);
2018 /* Return non-zero if we should preserve sub-expressions as separate
2019 pseudos. We never do so if we aren't optimizing. We always do so
2020 if -fexpensive-optimizations.
2022 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2023 the loop may still be a small one. */
2026 preserve_subexpressions_p ()
2030 if (flag_expensive_optimizations
)
2033 if (optimize
== 0 || loop_stack
== 0)
2036 insn
= get_last_insn_anywhere ();
2039 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2040 < n_non_fixed_regs
* 3));
2044 /* Generate a jump to exit the current loop, conditional, binding contour
2045 or case statement. Not all such constructs are visible to this function,
2046 only those started with EXIT_FLAG nonzero. Individual languages use
2047 the EXIT_FLAG parameter to control which kinds of constructs you can
2050 If not currently inside anything that can be exited,
2051 return 0 and do nothing; caller will print an error message. */
2054 expand_exit_something ()
2058 for (n
= nesting_stack
; n
; n
= n
->all
)
2059 if (n
->exit_label
!= 0)
2061 expand_goto_internal (0, n
->exit_label
, 0);
2068 /* Generate RTL to return from the current function, with no value.
2069 (That is, we do not do anything about returning any value.) */
2072 expand_null_return ()
2074 struct nesting
*block
= block_stack
;
2077 /* Does any pending block have cleanups? */
2079 while (block
&& block
->data
.block
.cleanups
== 0)
2080 block
= block
->next
;
2082 /* If yes, use a goto to return, since that runs cleanups. */
2084 expand_null_return_1 (last_insn
, block
!= 0);
2087 /* Generate RTL to return from the current function, with value VAL. */
2090 expand_value_return (val
)
2093 struct nesting
*block
= block_stack
;
2094 rtx last_insn
= get_last_insn ();
2095 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2097 /* Copy the value to the return location
2098 unless it's already there. */
2100 if (return_reg
!= val
)
2101 emit_move_insn (return_reg
, val
);
2102 if (GET_CODE (return_reg
) == REG
2103 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2104 emit_insn (gen_rtx (USE
, VOIDmode
, return_reg
));
2106 /* Does any pending block have cleanups? */
2108 while (block
&& block
->data
.block
.cleanups
== 0)
2109 block
= block
->next
;
2111 /* If yes, use a goto to return, since that runs cleanups.
2112 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2114 expand_null_return_1 (last_insn
, block
!= 0);
2117 /* Output a return with no value. If LAST_INSN is nonzero,
2118 pretend that the return takes place after LAST_INSN.
2119 If USE_GOTO is nonzero then don't use a return instruction;
2120 go to the return label instead. This causes any cleanups
2121 of pending blocks to be executed normally. */
2124 expand_null_return_1 (last_insn
, use_goto
)
2128 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2130 clear_pending_stack_adjust ();
2131 do_pending_stack_adjust ();
2134 /* PCC-struct return always uses an epilogue. */
2135 if (current_function_returns_pcc_struct
|| use_goto
)
2138 end_label
= return_label
= gen_label_rtx ();
2139 expand_goto_internal (0, end_label
, last_insn
);
2143 /* Otherwise output a simple return-insn if one is available,
2144 unless it won't do the job. */
2146 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2148 emit_jump_insn (gen_return ());
2154 /* Otherwise jump to the epilogue. */
2155 expand_goto_internal (0, end_label
, last_insn
);
2158 /* Generate RTL to evaluate the expression RETVAL and return it
2159 from the current function. */
2162 expand_return (retval
)
2165 /* If there are any cleanups to be performed, then they will
2166 be inserted following LAST_INSN. It is desirable
2167 that the last_insn, for such purposes, should be the
2168 last insn before computing the return value. Otherwise, cleanups
2169 which call functions can clobber the return value. */
2170 /* ??? rms: I think that is erroneous, because in C++ it would
2171 run destructors on variables that might be used in the subsequent
2172 computation of the return value. */
2174 register rtx val
= 0;
2178 struct nesting
*block
;
2180 /* If function wants no value, give it none. */
2181 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2183 expand_expr (retval
, 0, VOIDmode
, 0);
2184 expand_null_return ();
2188 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2189 cleanups
= any_pending_cleanups (1);
2191 if (TREE_CODE (retval
) == RESULT_DECL
)
2192 retval_rhs
= retval
;
2193 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2194 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2195 retval_rhs
= TREE_OPERAND (retval
, 1);
2196 else if (TREE_TYPE (retval
) == void_type_node
)
2197 /* Recognize tail-recursive call to void function. */
2198 retval_rhs
= retval
;
2200 retval_rhs
= NULL_TREE
;
2202 /* Only use `last_insn' if there are cleanups which must be run. */
2203 if (cleanups
|| cleanup_label
!= 0)
2204 last_insn
= get_last_insn ();
2206 /* Distribute return down conditional expr if either of the sides
2207 may involve tail recursion (see test below). This enhances the number
2208 of tail recursions we see. Don't do this always since it can produce
2209 sub-optimal code in some cases and we distribute assignments into
2210 conditional expressions when it would help. */
2212 if (optimize
&& retval_rhs
!= 0
2213 && frame_offset
== 0
2214 && TREE_CODE (retval_rhs
) == COND_EXPR
2215 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2216 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2218 rtx label
= gen_label_rtx ();
2219 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, 0);
2220 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2221 DECL_RESULT (current_function_decl
),
2222 TREE_OPERAND (retval_rhs
, 1)));
2224 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2225 DECL_RESULT (current_function_decl
),
2226 TREE_OPERAND (retval_rhs
, 2)));
2230 /* For tail-recursive call to current function,
2231 just jump back to the beginning.
2232 It's unsafe if any auto variable in this function
2233 has its address taken; for simplicity,
2234 require stack frame to be empty. */
2235 if (optimize
&& retval_rhs
!= 0
2236 && frame_offset
== 0
2237 && TREE_CODE (retval_rhs
) == CALL_EXPR
2238 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2239 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2240 /* Finish checking validity, and if valid emit code
2241 to set the argument variables for the new call. */
2242 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2243 DECL_ARGUMENTS (current_function_decl
)))
2245 if (tail_recursion_label
== 0)
2247 tail_recursion_label
= gen_label_rtx ();
2248 emit_label_after (tail_recursion_label
,
2249 tail_recursion_reentry
);
2252 expand_goto_internal (0, tail_recursion_label
, last_insn
);
2257 /* This optimization is safe if there are local cleanups
2258 because expand_null_return takes care of them.
2259 ??? I think it should also be safe when there is a cleanup label,
2260 because expand_null_return takes care of them, too.
2261 Any reason why not? */
2262 if (HAVE_return
&& cleanup_label
== 0
2263 && ! current_function_returns_pcc_struct
)
2265 /* If this is return x == y; then generate
2266 if (x == y) return 1; else return 0;
2267 if we can do it with explicit return insns. */
2269 switch (TREE_CODE (retval_rhs
))
2277 case TRUTH_ANDIF_EXPR
:
2278 case TRUTH_ORIF_EXPR
:
2279 case TRUTH_AND_EXPR
:
2281 case TRUTH_NOT_EXPR
:
2282 op0
= gen_label_rtx ();
2283 jumpifnot (retval_rhs
, op0
);
2284 expand_value_return (const1_rtx
);
2286 expand_value_return (const0_rtx
);
2290 #endif /* HAVE_return */
2294 && TREE_TYPE (retval_rhs
) != void_type_node
2295 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2297 /* Calculate the return value into a pseudo reg. */
2298 val
= expand_expr (retval_rhs
, 0, VOIDmode
, 0);
2300 /* All temporaries have now been used. */
2302 /* Return the calculated value, doing cleanups first. */
2303 expand_value_return (val
);
2307 /* No cleanups or no hard reg used;
2308 calculate value into hard return reg. */
2309 expand_expr (retval
, 0, VOIDmode
, 0);
2312 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2316 /* Return 1 if the end of the generated RTX is not a barrier.
2317 This means code already compiled can drop through. */
2320 drop_through_at_end_p ()
2322 rtx insn
= get_last_insn ();
2323 while (insn
&& GET_CODE (insn
) == NOTE
)
2324 insn
= PREV_INSN (insn
);
2325 return insn
&& GET_CODE (insn
) != BARRIER
;
2328 /* Emit code to alter this function's formal parms for a tail-recursive call.
2329 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2330 FORMALS is the chain of decls of formals.
2331 Return 1 if this can be done;
2332 otherwise return 0 and do not emit any code. */
2335 tail_recursion_args (actuals
, formals
)
2336 tree actuals
, formals
;
2338 register tree a
= actuals
, f
= formals
;
2340 register rtx
*argvec
;
2342 /* Check that number and types of actuals are compatible
2343 with the formals. This is not always true in valid C code.
2344 Also check that no formal needs to be addressable
2345 and that all formals are scalars. */
2347 /* Also count the args. */
2349 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2351 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
2353 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2356 if (a
!= 0 || f
!= 0)
2359 /* Compute all the actuals. */
2361 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2363 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2364 argvec
[i
] = expand_expr (TREE_VALUE (a
), 0, VOIDmode
, 0);
2366 /* Find which actual values refer to current values of previous formals.
2367 Copy each of them now, before any formal is changed. */
2369 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2373 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2374 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2375 { copy
= 1; break; }
2377 argvec
[i
] = copy_to_reg (argvec
[i
]);
2380 /* Store the values of the actuals into the formals. */
2382 for (f
= formals
, a
= actuals
, i
= 0; f
;
2383 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2385 if (DECL_MODE (f
) == GET_MODE (argvec
[i
]))
2386 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2388 convert_move (DECL_RTL (f
), argvec
[i
],
2389 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2396 /* Generate the RTL code for entering a binding contour.
2397 The variables are declared one by one, by calls to `expand_decl'.
2399 EXIT_FLAG is nonzero if this construct should be visible to
2400 `exit_something'. */
2403 expand_start_bindings (exit_flag
)
2406 struct nesting
*thisblock
= ALLOC_NESTING ();
2408 rtx note
= emit_note (0, NOTE_INSN_BLOCK_BEG
);
2410 /* Make an entry on block_stack for the block we are entering. */
2412 thisblock
->next
= block_stack
;
2413 thisblock
->all
= nesting_stack
;
2414 thisblock
->depth
= ++nesting_depth
;
2415 thisblock
->data
.block
.stack_level
= 0;
2416 thisblock
->data
.block
.cleanups
= 0;
2417 thisblock
->data
.block
.function_call_count
= 0;
2421 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2422 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2423 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2424 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2426 thisblock
->data
.block
.outer_cleanups
2427 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2428 block_stack
->data
.block
.outer_cleanups
);
2431 thisblock
->data
.block
.outer_cleanups
= 0;
2435 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2436 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2437 thisblock
->data
.block
.outer_cleanups
2438 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2439 block_stack
->data
.block
.outer_cleanups
);
2441 thisblock
->data
.block
.outer_cleanups
= 0;
2443 thisblock
->data
.block
.label_chain
= 0;
2444 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2445 thisblock
->data
.block
.first_insn
= note
;
2446 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2447 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2448 block_stack
= thisblock
;
2449 nesting_stack
= thisblock
;
2451 /* Make a new level for allocating stack slots. */
2455 /* Generate RTL code to terminate a binding contour.
2456 VARS is the chain of VAR_DECL nodes
2457 for the variables bound in this contour.
2458 MARK_ENDS is nonzero if we should put a note at the beginning
2459 and end of this binding contour.
2461 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2462 (That is true automatically if the contour has a saved stack level.) */
2465 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2470 register struct nesting
*thisblock
= block_stack
;
2474 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2475 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
2476 warning_with_decl (decl
, "unused variable `%s'");
2478 /* Mark the beginning and end of the scope if requested. */
2481 emit_note (0, NOTE_INSN_BLOCK_END
);
2483 /* Get rid of the beginning-mark if we don't make an end-mark. */
2484 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
2486 if (thisblock
->exit_label
)
2488 do_pending_stack_adjust ();
2489 emit_label (thisblock
->exit_label
);
2492 /* If necessary, make a handler for nonlocal gotos taking
2493 place in the function calls in this block. */
2494 if (function_call_count
!= thisblock
->data
.block
.function_call_count
2496 /* Make handler for outermost block
2497 if there were any nonlocal gotos to this function. */
2498 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
2499 /* Make handler for inner block if it has something
2500 special to do when you jump out of it. */
2501 : (thisblock
->data
.block
.cleanups
!= 0
2502 || thisblock
->data
.block
.stack_level
!= 0)))
2505 rtx afterward
= gen_label_rtx ();
2506 rtx handler_label
= gen_label_rtx ();
2507 rtx save_receiver
= gen_reg_rtx (Pmode
);
2509 /* Don't let jump_optimize delete the handler. */
2510 LABEL_PRESERVE_P (handler_label
) = 1;
2512 /* Record the handler address in the stack slot for that purpose,
2513 during this block, saving and restoring the outer value. */
2514 if (thisblock
->next
!= 0)
2516 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
2517 emit_insn_before (gen_move_insn (save_receiver
,
2518 nonlocal_goto_handler_slot
),
2519 thisblock
->data
.block
.first_insn
);
2521 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot
,
2522 gen_rtx (LABEL_REF
, Pmode
,
2524 thisblock
->data
.block
.first_insn
);
2526 /* Jump around the handler; it runs only when specially invoked. */
2527 emit_jump (afterward
);
2528 emit_label (handler_label
);
2530 #ifdef HAVE_nonlocal_goto
2531 if (! HAVE_nonlocal_goto
)
2533 /* First adjust our frame pointer to its actual value. It was
2534 previously set to the start of the virtual area corresponding to
2535 the stacked variables when we branched here and now needs to be
2536 adjusted to the actual hardware fp value.
2538 Assignments are to virtual registers are converted by
2539 instantiate_virtual_regs into the corresponding assignment
2540 to the underlying register (fp in this case) that makes
2541 the original assignment true.
2542 So the following insn will actually be
2543 decrementing fp by STARTING_FRAME_OFFSET. */
2544 emit_move_insn (virtual_stack_vars_rtx
, frame_pointer_rtx
);
2546 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2547 if (fixed_regs
[ARG_POINTER_REGNUM
])
2549 /* Now restore our arg pointer from the address at which it was saved
2551 If there hasn't be space allocated for it yet, make some now. */
2552 if (arg_pointer_save_area
== 0)
2553 arg_pointer_save_area
2554 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
2555 emit_move_insn (virtual_incoming_args_rtx
,
2556 /* We need a pseudo here,
2557 or else instantiate_virtual_regs_1 complains. */
2558 copy_to_reg (arg_pointer_save_area
));
2562 /* The handler expects the desired label address in the static chain
2563 register. It tests the address and does an appropriate jump
2564 to whatever label is desired. */
2565 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
2566 /* Skip any labels we shouldn't be able to jump to from here. */
2567 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
2569 rtx not_this
= gen_label_rtx ();
2570 rtx
this = gen_label_rtx ();
2571 do_jump_if_equal (static_chain_rtx
,
2572 gen_rtx (LABEL_REF
, Pmode
, DECL_RTL (TREE_VALUE (link
))),
2574 emit_jump (not_this
);
2576 expand_goto (TREE_VALUE (link
));
2577 emit_label (not_this
);
2579 /* If label is not recognized, abort. */
2580 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "abort"), 0,
2582 emit_label (afterward
);
2585 /* Don't allow jumping into a block that has cleanups or a stack level. */
2587 || thisblock
->data
.block
.stack_level
!= 0
2588 || thisblock
->data
.block
.cleanups
!= 0)
2590 struct label_chain
*chain
;
2592 /* Any labels in this block are no longer valid to go to.
2593 Mark them to cause an error message. */
2594 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2596 DECL_TOO_LATE (chain
->label
) = 1;
2597 /* If any goto without a fixup came to this label,
2598 that must be an error, because gotos without fixups
2599 come from outside all saved stack-levels and all cleanups. */
2600 if (TREE_ADDRESSABLE (chain
->label
))
2601 error_with_decl (chain
->label
,
2602 "label `%s' used before containing binding contour");
2606 /* Restore stack level in effect before the block
2607 (only if variable-size objects allocated). */
2608 /* Perform any cleanups associated with the block. */
2610 if (thisblock
->data
.block
.stack_level
!= 0
2611 || thisblock
->data
.block
.cleanups
!= 0)
2613 /* Don't let cleanups affect ({...}) constructs. */
2614 int old_expr_stmts_for_value
= expr_stmts_for_value
;
2615 rtx old_last_expr_value
= last_expr_value
;
2616 tree old_last_expr_type
= last_expr_type
;
2617 expr_stmts_for_value
= 0;
2619 /* Do the cleanups. */
2620 expand_cleanups (thisblock
->data
.block
.cleanups
, 0);
2621 do_pending_stack_adjust ();
2623 expr_stmts_for_value
= old_expr_stmts_for_value
;
2624 last_expr_value
= old_last_expr_value
;
2625 last_expr_type
= old_last_expr_type
;
2627 /* Restore the stack level. */
2629 if (thisblock
->data
.block
.stack_level
!= 0)
2631 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2632 thisblock
->data
.block
.stack_level
, 0);
2633 if (nonlocal_goto_handler_slot
!= 0)
2634 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, 0);
2637 /* Any gotos out of this block must also do these things.
2638 Also report any gotos with fixups that came to labels in this
2640 fixup_gotos (thisblock
,
2641 thisblock
->data
.block
.stack_level
,
2642 thisblock
->data
.block
.cleanups
,
2643 thisblock
->data
.block
.first_insn
,
2647 /* If doing stupid register allocation, make sure lives of all
2648 register variables declared here extend thru end of scope. */
2651 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2653 rtx rtl
= DECL_RTL (decl
);
2654 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
2658 /* Restore block_stack level for containing block. */
2660 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
2661 POPSTACK (block_stack
);
2663 /* Pop the stack slot nesting and free any slots at this level. */
2667 /* Generate RTL for the automatic variable declaration DECL.
2668 (Other kinds of declarations are simply ignored if seen here.)
2669 CLEANUP is an expression to be executed at exit from this binding contour;
2670 for example, in C++, it might call the destructor for this variable.
2672 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2673 either before or after calling `expand_decl' but before compiling
2674 any subsequent expressions. This is because CLEANUP may be expanded
2675 more than once, on different branches of execution.
2676 For the same reason, CLEANUP may not contain a CALL_EXPR
2677 except as its topmost node--else `preexpand_calls' would get confused.
2679 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2680 that is not associated with any particular variable.
2682 There is no special support here for C++ constructors.
2683 They should be handled by the proper code in DECL_INITIAL. */
2689 struct nesting
*thisblock
= block_stack
;
2690 tree type
= TREE_TYPE (decl
);
2692 /* Only automatic variables need any expansion done.
2693 Static and external variables, and external functions,
2694 will be handled by `assemble_variable' (called from finish_decl).
2695 TYPE_DECL and CONST_DECL require nothing.
2696 PARM_DECLs are handled in `assign_parms'. */
2698 if (TREE_CODE (decl
) != VAR_DECL
)
2700 if (TREE_STATIC (decl
) || TREE_EXTERNAL (decl
))
2703 /* Create the RTL representation for the variable. */
2705 if (type
== error_mark_node
)
2706 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2707 else if (DECL_SIZE (decl
) == 0)
2708 /* Variable with incomplete type. */
2710 if (DECL_INITIAL (decl
) == 0)
2711 /* Error message was already done; now avoid a crash. */
2712 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
2714 /* An initializer is going to decide the size of this array.
2715 Until we know the size, represent its address with a reg. */
2716 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
2718 else if (DECL_MODE (decl
) != BLKmode
2719 /* If -ffloat-store, don't put explicit float vars
2721 && !(flag_float_store
2722 && TREE_CODE (type
) == REAL_TYPE
)
2723 && ! TREE_THIS_VOLATILE (decl
)
2724 && ! TREE_ADDRESSABLE (decl
)
2725 && (TREE_REGDECL (decl
) || ! obey_regdecls
))
2727 /* Automatic variable that can go in a register. */
2728 DECL_RTL (decl
) = gen_reg_rtx (DECL_MODE (decl
));
2729 if (TREE_CODE (type
) == POINTER_TYPE
)
2730 mark_reg_pointer (DECL_RTL (decl
));
2731 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
2733 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
)
2735 /* Variable of fixed size that goes on the stack. */
2739 /* If we previously made RTL for this decl, it must be an array
2740 whose size was determined by the initializer.
2741 The old address was a register; set that register now
2742 to the proper address. */
2743 if (DECL_RTL (decl
) != 0)
2745 if (GET_CODE (DECL_RTL (decl
)) != MEM
2746 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
2748 oldaddr
= XEXP (DECL_RTL (decl
), 0);
2752 = assign_stack_temp (DECL_MODE (decl
),
2753 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
2754 + BITS_PER_UNIT
- 1)
2758 /* Set alignment we actually gave this decl. */
2759 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
2760 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
2764 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
2765 if (addr
!= oldaddr
)
2766 emit_move_insn (oldaddr
, addr
);
2769 /* If this is a memory ref that contains aggregate components,
2770 mark it as such for cse and loop optimize. */
2771 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2772 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2773 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2774 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2776 /* If this is in memory because of -ffloat-store,
2777 set the volatile bit, to prevent optimizations from
2778 undoing the effects. */
2779 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
2780 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2784 /* Dynamic-size object: must push space on the stack. */
2788 /* Record the stack pointer on entry to block, if have
2789 not already done so. */
2790 if (thisblock
->data
.block
.stack_level
== 0)
2792 do_pending_stack_adjust ();
2793 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2794 &thisblock
->data
.block
.stack_level
,
2795 thisblock
->data
.block
.first_insn
);
2796 stack_block_stack
= thisblock
;
2799 /* Compute the variable's size, in bytes. */
2800 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
2802 size_int (BITS_PER_UNIT
)),
2806 /* This is equivalent to calling alloca. */
2807 current_function_calls_alloca
= 1;
2809 /* Allocate space on the stack for the variable. */
2810 address
= allocate_dynamic_stack_space (size
, 0, DECL_ALIGN (decl
));
2812 if (nonlocal_goto_handler_slot
!= 0)
2813 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, 0);
2815 /* Reference the variable indirect through that rtx. */
2816 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
2818 /* If this is a memory ref that contains aggregate components,
2819 mark it as such for cse and loop optimize. */
2820 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2821 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2822 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2823 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2825 /* Indicate the alignment we actually gave this variable. */
2826 #ifdef STACK_BOUNDARY
2827 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
2829 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
2833 if (TREE_THIS_VOLATILE (decl
))
2834 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2835 if (TREE_READONLY (decl
))
2836 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
2838 /* If doing stupid register allocation, make sure life of any
2839 register variable starts here, at the start of its scope. */
2842 use_variable (DECL_RTL (decl
));
2845 /* Emit code to perform the initialization of a declaration DECL. */
2848 expand_decl_init (decl
)
2851 int was_used
= TREE_USED (decl
);
2853 if (TREE_STATIC (decl
))
2856 /* Compute and store the initial value now. */
2858 if (DECL_INITIAL (decl
) == error_mark_node
)
2860 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
2861 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
2862 || code
== POINTER_TYPE
)
2863 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
2867 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
2869 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
2870 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
2874 /* Don't let the initialization count as "using" the variable. */
2875 TREE_USED (decl
) = was_used
;
2877 /* Free any temporaries we made while initializing the decl. */
2881 /* CLEANUP is an expression to be executed at exit from this binding contour;
2882 for example, in C++, it might call the destructor for this variable.
2884 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2885 either before or after calling `expand_decl' but before compiling
2886 any subsequent expressions. This is because CLEANUP may be expanded
2887 more than once, on different branches of execution.
2888 For the same reason, CLEANUP may not contain a CALL_EXPR
2889 except as its topmost node--else `preexpand_calls' would get confused.
2891 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2892 that is not associated with any particular variable. */
2895 expand_decl_cleanup (decl
, cleanup
)
2898 struct nesting
*thisblock
= block_stack
;
2900 /* Error if we are not in any block. */
2904 /* Record the cleanup if there is one. */
2908 thisblock
->data
.block
.cleanups
2909 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
2910 /* If this block has a cleanup, it belongs in stack_block_stack. */
2911 stack_block_stack
= thisblock
;
2916 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2917 DECL_ELTS is the list of elements that belong to DECL's type.
2918 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2921 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
2922 tree decl
, cleanup
, decl_elts
;
2924 struct nesting
*thisblock
= block_stack
;
2927 expand_decl (decl
, cleanup
);
2928 x
= DECL_RTL (decl
);
2932 tree decl_elt
= TREE_VALUE (decl_elts
);
2933 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
2934 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
2936 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2937 instead create a new MEM rtx with the proper mode. */
2938 if (GET_CODE (x
) == MEM
)
2940 if (mode
== GET_MODE (x
))
2941 DECL_RTL (decl_elt
) = x
;
2944 DECL_RTL (decl_elt
) = gen_rtx (MEM
, mode
, copy_rtx (XEXP (x
, 0)));
2945 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
2946 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
2949 else if (GET_CODE (x
) == REG
)
2951 if (mode
== GET_MODE (x
))
2952 DECL_RTL (decl_elt
) = x
;
2954 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, mode
, x
, 0);
2959 /* Record the cleanup if there is one. */
2962 thisblock
->data
.block
.cleanups
2963 = temp_tree_cons (decl_elt
, cleanup_elt
,
2964 thisblock
->data
.block
.cleanups
);
2966 decl_elts
= TREE_CHAIN (decl_elts
);
2970 /* Expand a list of cleanups LIST.
2971 Elements may be expressions or may be nested lists.
2973 If DONT_DO is nonnull, then any list-element
2974 whose TREE_PURPOSE matches DONT_DO is omitted.
2975 This is sometimes used to avoid a cleanup associated with
2976 a value that is being returned out of the scope. */
2979 expand_cleanups (list
, dont_do
)
2984 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
2985 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
2987 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
2988 expand_cleanups (TREE_VALUE (tail
), dont_do
);
2991 /* Cleanups may be run multiple times. For example,
2992 when exiting a binding contour, we expand the
2993 cleanups associated with that contour. When a goto
2994 within that binding contour has a target outside that
2995 contour, it will expand all cleanups from its scope to
2996 the target. Though the cleanups are expanded multiple
2997 times, the control paths are non-overlapping so the
2998 cleanups will not be executed twice. */
2999 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
3005 /* Expand a list of cleanups for a goto fixup.
3006 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3007 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3010 fixup_cleanups (list
, before_jump
)
3014 rtx beyond_jump
= get_last_insn ();
3015 rtx new_before_jump
;
3017 expand_cleanups (list
, 0);
3018 /* Pop any pushes done in the cleanups,
3019 in case function is about to return. */
3020 do_pending_stack_adjust ();
3022 new_before_jump
= get_last_insn ();
3024 if (beyond_jump
!= new_before_jump
)
3026 /* If cleanups expand to nothing, don't reorder. */
3027 reorder_insns (NEXT_INSN (beyond_jump
), new_before_jump
, *before_jump
);
3028 *before_jump
= new_before_jump
;
3032 /* Move all cleanups from the current block_stack
3033 to the containing block_stack, where they are assumed to
3034 have been created. If anything can cause a temporary to
3035 be created, but not expanded for more than one level of
3036 block_stacks, then this code will have to change. */
3041 struct nesting
*block
= block_stack
;
3042 struct nesting
*outer
= block
->next
;
3044 outer
->data
.block
.cleanups
3045 = chainon (block
->data
.block
.cleanups
,
3046 outer
->data
.block
.cleanups
);
3047 block
->data
.block
.cleanups
= 0;
3051 last_cleanup_this_contour ()
3053 if (block_stack
== 0)
3056 return block_stack
->data
.block
.cleanups
;
3059 /* Return 1 if there are any pending cleanups at this point.
3060 If THIS_CONTOUR is nonzero, check the current contour as well.
3061 Otherwise, look only at the contours that enclose this one. */
3064 any_pending_cleanups (this_contour
)
3067 struct nesting
*block
;
3069 if (block_stack
== 0)
3072 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3074 if (block_stack
->data
.block
.cleanups
== 0
3075 && (block_stack
->data
.block
.outer_cleanups
== 0
3077 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
3082 for (block
= block_stack
->next
; block
; block
= block
->next
)
3083 if (block
->data
.block
.cleanups
!= 0)
3089 /* Enter a case (Pascal) or switch (C) statement.
3090 Push a block onto case_stack and nesting_stack
3091 to accumulate the case-labels that are seen
3092 and to record the labels generated for the statement.
3094 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3095 Otherwise, this construct is transparent for `exit_something'.
3097 EXPR is the index-expression to be dispatched on.
3098 TYPE is its nominal type. We could simply convert EXPR to this type,
3099 but instead we take short cuts. */
3102 expand_start_case (exit_flag
, expr
, type
, printname
)
3108 register struct nesting
*thiscase
= ALLOC_NESTING ();
3110 /* Make an entry on case_stack for the case we are entering. */
3112 thiscase
->next
= case_stack
;
3113 thiscase
->all
= nesting_stack
;
3114 thiscase
->depth
= ++nesting_depth
;
3115 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3116 thiscase
->data
.case_stmt
.case_list
= 0;
3117 thiscase
->data
.case_stmt
.index_expr
= expr
;
3118 thiscase
->data
.case_stmt
.nominal_type
= type
;
3119 thiscase
->data
.case_stmt
.default_label
= 0;
3120 thiscase
->data
.case_stmt
.num_ranges
= 0;
3121 thiscase
->data
.case_stmt
.printname
= printname
;
3122 thiscase
->data
.case_stmt
.seenlabel
= 0;
3123 case_stack
= thiscase
;
3124 nesting_stack
= thiscase
;
3126 do_pending_stack_adjust ();
3128 /* Make sure case_stmt.start points to something that won't
3129 need any transformation before expand_end_case. */
3130 if (GET_CODE (get_last_insn ()) != NOTE
)
3131 emit_note (0, NOTE_INSN_DELETED
);
3133 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3136 /* Start a "dummy case statement" within which case labels are invalid
3137 and are not connected to any larger real case statement.
3138 This can be used if you don't want to let a case statement jump
3139 into the middle of certain kinds of constructs. */
3142 expand_start_case_dummy ()
3144 register struct nesting
*thiscase
= ALLOC_NESTING ();
3146 /* Make an entry on case_stack for the dummy. */
3148 thiscase
->next
= case_stack
;
3149 thiscase
->all
= nesting_stack
;
3150 thiscase
->depth
= ++nesting_depth
;
3151 thiscase
->exit_label
= 0;
3152 thiscase
->data
.case_stmt
.case_list
= 0;
3153 thiscase
->data
.case_stmt
.start
= 0;
3154 thiscase
->data
.case_stmt
.nominal_type
= 0;
3155 thiscase
->data
.case_stmt
.default_label
= 0;
3156 thiscase
->data
.case_stmt
.num_ranges
= 0;
3157 case_stack
= thiscase
;
3158 nesting_stack
= thiscase
;
3161 /* End a dummy case statement. */
3164 expand_end_case_dummy ()
3166 POPSTACK (case_stack
);
3169 /* Return the data type of the index-expression
3170 of the innermost case statement, or null if none. */
3173 case_index_expr_type ()
3176 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3180 /* Accumulate one case or default label inside a case or switch statement.
3181 VALUE is the value of the case (a null pointer, for a default label).
3183 If not currently inside a case or switch statement, return 1 and do
3184 nothing. The caller will print a language-specific error message.
3185 If VALUE is a duplicate or overlaps, return 2 and do nothing
3186 except store the (first) duplicate node in *DUPLICATE.
3187 If VALUE is out of range, return 3 and do nothing.
3188 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3189 Return 0 on success.
3191 Extended to handle range statements. */
3194 pushcase (value
, label
, duplicate
)
3195 register tree value
;
3196 register tree label
;
3199 register struct case_node
**l
;
3200 register struct case_node
*n
;
3204 /* Fail if not inside a real case statement. */
3205 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3208 if (stack_block_stack
3209 && stack_block_stack
->depth
> case_stack
->depth
)
3212 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3213 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3215 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3216 if (index_type
== error_mark_node
)
3219 /* Convert VALUE to the type in which the comparisons are nominally done. */
3221 value
= convert (nominal_type
, value
);
3223 /* If this is the first label, warn if any insns have been emitted. */
3224 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3227 for (insn
= case_stack
->data
.case_stmt
.start
;
3229 insn
= NEXT_INSN (insn
))
3231 if (GET_CODE (insn
) == CODE_LABEL
)
3233 if (GET_CODE (insn
) != NOTE
3234 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3236 warning ("unreachable code at beginning of %s",
3237 case_stack
->data
.case_stmt
.printname
);
3242 case_stack
->data
.case_stmt
.seenlabel
= 1;
3244 /* Fail if this value is out of range for the actual type of the index
3245 (which may be narrower than NOMINAL_TYPE). */
3246 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
3249 /* Fail if this is a duplicate or overlaps another entry. */
3252 if (case_stack
->data
.case_stmt
.default_label
!= 0)
3254 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
3257 case_stack
->data
.case_stmt
.default_label
= label
;
3261 /* Find the elt in the chain before which to insert the new value,
3262 to keep the chain sorted in increasing order.
3263 But report an error if this element is a duplicate. */
3264 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3265 /* Keep going past elements distinctly less than VALUE. */
3266 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
3271 /* Element we will insert before must be distinctly greater;
3272 overlap means error. */
3273 if (! tree_int_cst_lt (value
, (*l
)->low
))
3275 *duplicate
= (*l
)->code_label
;
3280 /* Add this label to the chain, and succeed.
3281 Copy VALUE so it is on temporary rather than momentary
3282 obstack and will thus survive till the end of the case statement. */
3283 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3286 n
->high
= n
->low
= copy_node (value
);
3287 n
->code_label
= label
;
3291 expand_label (label
);
3295 /* Like pushcase but this case applies to all values
3296 between VALUE1 and VALUE2 (inclusive).
3297 The return value is the same as that of pushcase
3298 but there is one additional error code:
3299 4 means the specified range was empty. */
3302 pushcase_range (value1
, value2
, label
, duplicate
)
3303 register tree value1
, value2
;
3304 register tree label
;
3307 register struct case_node
**l
;
3308 register struct case_node
*n
;
3312 /* Fail if not inside a real case statement. */
3313 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3316 if (stack_block_stack
3317 && stack_block_stack
->depth
> case_stack
->depth
)
3320 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3321 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3323 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3324 if (index_type
== error_mark_node
)
3327 /* If this is the first label, warn if any insns have been emitted. */
3328 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3331 for (insn
= case_stack
->data
.case_stmt
.start
;
3333 insn
= NEXT_INSN (insn
))
3335 if (GET_CODE (insn
) == CODE_LABEL
)
3337 if (GET_CODE (insn
) != NOTE
3338 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3340 warning ("unreachable code at beginning of %s",
3341 case_stack
->data
.case_stmt
.printname
);
3346 case_stack
->data
.case_stmt
.seenlabel
= 1;
3348 /* Convert VALUEs to type in which the comparisons are nominally done. */
3349 if (value1
== 0) /* Negative infinity. */
3350 value1
= TYPE_MIN_VALUE(index_type
);
3351 value1
= convert (nominal_type
, value1
);
3353 if (value2
== 0) /* Positive infinity. */
3354 value2
= TYPE_MAX_VALUE(index_type
);
3355 value2
= convert (nominal_type
, value2
);
3357 /* Fail if these values are out of range. */
3358 if (! int_fits_type_p (value1
, index_type
))
3361 if (! int_fits_type_p (value2
, index_type
))
3364 /* Fail if the range is empty. */
3365 if (tree_int_cst_lt (value2
, value1
))
3368 /* If the bounds are equal, turn this into the one-value case. */
3369 if (tree_int_cst_equal (value1
, value2
))
3370 return pushcase (value1
, label
, duplicate
);
3372 /* Find the elt in the chain before which to insert the new value,
3373 to keep the chain sorted in increasing order.
3374 But report an error if this element is a duplicate. */
3375 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3376 /* Keep going past elements distinctly less than this range. */
3377 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
3382 /* Element we will insert before must be distinctly greater;
3383 overlap means error. */
3384 if (! tree_int_cst_lt (value2
, (*l
)->low
))
3386 *duplicate
= (*l
)->code_label
;
3391 /* Add this label to the chain, and succeed.
3392 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3393 obstack and will thus survive till the end of the case statement. */
3395 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3398 n
->low
= copy_node (value1
);
3399 n
->high
= copy_node (value2
);
3400 n
->code_label
= label
;
3403 expand_label (label
);
3405 case_stack
->data
.case_stmt
.num_ranges
++;
3410 /* Called when the index of a switch statement is an enumerated type
3411 and there is no default label.
3413 Checks that all enumeration literals are covered by the case
3414 expressions of a switch. Also, warn if there are any extra
3415 switch cases that are *not* elements of the enumerated type.
3417 If all enumeration literals were covered by the case expressions,
3418 turn one of the expressions into the default expression since it should
3419 not be possible to fall through such a switch. */
3422 check_for_full_enumeration_handling (type
)
3425 register struct case_node
*n
;
3426 register struct case_node
**l
;
3427 register tree chain
;
3430 /* The time complexity of this loop is currently O(N * M), with
3431 N being the number of enumerals in the enumerated type, and
3432 M being the number of case expressions in the switch. */
3434 for (chain
= TYPE_VALUES (type
);
3436 chain
= TREE_CHAIN (chain
))
3438 /* Find a match between enumeral and case expression, if possible.
3439 Quit looking when we've gone too far (since case expressions
3440 are kept sorted in ascending order). Warn about enumerals not
3441 handled in the switch statement case expression list. */
3443 for (n
= case_stack
->data
.case_stmt
.case_list
;
3444 n
&& tree_int_cst_lt (n
->high
, TREE_VALUE (chain
));
3448 if (!n
|| tree_int_cst_lt (TREE_VALUE (chain
), n
->low
))
3451 warning ("enumeration value `%s' not handled in switch",
3452 IDENTIFIER_POINTER (TREE_PURPOSE (chain
)));
3457 /* Now we go the other way around; we warn if there are case
3458 expressions that don't correspond to enumerals. This can
3459 occur since C and C++ don't enforce type-checking of
3460 assignments to enumeration variables. */
3463 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3465 for (chain
= TYPE_VALUES (type
);
3466 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
3467 chain
= TREE_CHAIN (chain
))
3471 warning ("case value `%d' not in enumerated type `%s'",
3472 TREE_INT_CST_LOW (n
->low
),
3473 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3476 : DECL_NAME (TYPE_NAME (type
))));
3477 if (!tree_int_cst_equal (n
->low
, n
->high
))
3479 for (chain
= TYPE_VALUES (type
);
3480 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
3481 chain
= TREE_CHAIN (chain
))
3485 warning ("case value `%d' not in enumerated type `%s'",
3486 TREE_INT_CST_LOW (n
->high
),
3487 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3490 : DECL_NAME (TYPE_NAME (type
))));
3494 /* If all values were found as case labels, make one of them the default
3495 label. Thus, this switch will never fall through. We arbitrarily pick
3496 the last one to make the default since this is likely the most
3497 efficient choice. */
3501 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3506 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
3511 /* Terminate a case (Pascal) or switch (C) statement
3512 in which CASE_INDEX is the expression to be tested.
3513 Generate the code to test it and jump to the right place. */
3516 expand_end_case (orig_index
)
3519 tree minval
, maxval
, range
;
3520 rtx default_label
= 0;
3521 register struct case_node
*n
;
3524 rtx table_label
= gen_label_rtx ();
3529 register struct nesting
*thiscase
= case_stack
;
3530 tree index_expr
= thiscase
->data
.case_stmt
.index_expr
;
3531 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (index_expr
));
3533 do_pending_stack_adjust ();
3535 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3536 if (TREE_TYPE (index_expr
) != error_mark_node
)
3538 /* If switch expression was an enumerated type, check that all
3539 enumeration literals are covered by the cases.
3540 No sense trying this if there's a default case, however. */
3542 if (!thiscase
->data
.case_stmt
.default_label
3543 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
3544 && TREE_CODE (index_expr
) != INTEGER_CST
)
3545 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
3547 /* If this is the first label, warn if any insns have been emitted. */
3548 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
3551 for (insn
= get_last_insn ();
3552 insn
!= case_stack
->data
.case_stmt
.start
;
3553 insn
= PREV_INSN (insn
))
3554 if (GET_CODE (insn
) != NOTE
3555 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
3557 warning ("unreachable code at beginning of %s",
3558 case_stack
->data
.case_stmt
.printname
);
3563 /* If we don't have a default-label, create one here,
3564 after the body of the switch. */
3565 if (thiscase
->data
.case_stmt
.default_label
== 0)
3567 thiscase
->data
.case_stmt
.default_label
3568 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
3569 expand_label (thiscase
->data
.case_stmt
.default_label
);
3571 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
3573 before_case
= get_last_insn ();
3575 /* Simplify the case-list before we count it. */
3576 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
3578 /* Get upper and lower bounds of case values.
3579 Also convert all the case values to the index expr's data type. */
3582 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3584 /* Check low and high label values are integers. */
3585 if (TREE_CODE (n
->low
) != INTEGER_CST
)
3587 if (TREE_CODE (n
->high
) != INTEGER_CST
)
3590 n
->low
= convert (TREE_TYPE (index_expr
), n
->low
);
3591 n
->high
= convert (TREE_TYPE (index_expr
), n
->high
);
3593 /* Count the elements and track the largest and smallest
3594 of them (treating them as signed even if they are not). */
3602 if (INT_CST_LT (n
->low
, minval
))
3604 if (INT_CST_LT (maxval
, n
->high
))
3607 /* A range counts double, since it requires two compares. */
3608 if (! tree_int_cst_equal (n
->low
, n
->high
))
3612 /* Compute span of values. */
3614 range
= fold (build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3617 if (count
== 0 || TREE_CODE (TREE_TYPE (index_expr
)) == ERROR_MARK
)
3619 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
3621 emit_jump (default_label
);
3623 /* If range of values is much bigger than number of values,
3624 make a sequence of conditional branches instead of a dispatch.
3625 If the switch-index is a constant, do it this way
3626 because we can optimize it. */
3628 #ifndef CASE_VALUES_THRESHOLD
3630 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
3632 /* If machine does not have a case insn that compares the
3633 bounds, this means extra overhead for dispatch tables
3634 which raises the threshold for using them. */
3635 #define CASE_VALUES_THRESHOLD 5
3636 #endif /* HAVE_casesi */
3637 #endif /* CASE_VALUES_THRESHOLD */
3639 else if (TREE_INT_CST_HIGH (range
) != 0
3640 || count
< CASE_VALUES_THRESHOLD
3641 || (unsigned) (TREE_INT_CST_LOW (range
)) > 10 * count
3642 || TREE_CODE (index_expr
) == INTEGER_CST
3643 /* These will reduce to a constant. */
3644 || (TREE_CODE (index_expr
) == CALL_EXPR
3645 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
3646 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
3647 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
3648 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
3649 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
3651 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3653 /* If the index is a short or char that we do not have
3654 an insn to handle comparisons directly, convert it to
3655 a full integer now, rather than letting each comparison
3656 generate the conversion. */
3658 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
3659 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
3660 == CODE_FOR_nothing
))
3662 enum machine_mode wider_mode
;
3663 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
3664 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
3665 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
3666 != CODE_FOR_nothing
)
3668 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
3674 do_pending_stack_adjust ();
3676 index
= protect_from_queue (index
, 0);
3677 if (GET_CODE (index
) == MEM
)
3678 index
= copy_to_reg (index
);
3679 if (GET_CODE (index
) == CONST_INT
3680 || TREE_CODE (index_expr
) == INTEGER_CST
)
3682 /* Make a tree node with the proper constant value
3683 if we don't already have one. */
3684 if (TREE_CODE (index_expr
) != INTEGER_CST
)
3687 = build_int_2 (INTVAL (index
),
3688 !unsignedp
&& INTVAL (index
) >= 0 ? 0 : -1);
3689 index_expr
= convert (TREE_TYPE (index_expr
), index_expr
);
3692 /* For constant index expressions we need only
3693 issue a unconditional branch to the appropriate
3694 target code. The job of removing any unreachable
3695 code is left to the optimisation phase if the
3696 "-O" option is specified. */
3697 for (n
= thiscase
->data
.case_stmt
.case_list
;
3701 if (! tree_int_cst_lt (index_expr
, n
->low
)
3702 && ! tree_int_cst_lt (n
->high
, index_expr
))
3706 emit_jump (label_rtx (n
->code_label
));
3708 emit_jump (default_label
);
3712 /* If the index expression is not constant we generate
3713 a binary decision tree to select the appropriate
3714 target code. This is done as follows:
3716 The list of cases is rearranged into a binary tree,
3717 nearly optimal assuming equal probability for each case.
3719 The tree is transformed into RTL, eliminating
3720 redundant test conditions at the same time.
3722 If program flow could reach the end of the
3723 decision tree an unconditional jump to the
3724 default code is emitted. */
3727 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
3728 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
3729 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
, 0);
3730 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
3731 default_label
, TREE_TYPE (index_expr
));
3732 emit_jump_if_reachable (default_label
);
3741 enum machine_mode index_mode
= SImode
;
3742 int index_bits
= GET_MODE_BITSIZE (index_mode
);
3744 /* Convert the index to SImode. */
3745 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr
)))
3746 > GET_MODE_BITSIZE (index_mode
))
3748 enum machine_mode omode
= TYPE_MODE (TREE_TYPE (index_expr
));
3749 rtx rangertx
= expand_expr (range
, 0, VOIDmode
, 0);
3751 /* We must handle the endpoints in the original mode. */
3752 index_expr
= build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3753 index_expr
, minval
);
3754 minval
= integer_zero_node
;
3755 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3756 emit_cmp_insn (rangertx
, index
, LTU
, 0, omode
, 0, 0);
3757 emit_jump_insn (gen_bltu (default_label
));
3758 /* Now we can safely truncate. */
3759 index
= convert_to_mode (index_mode
, index
, 0);
3763 if (TYPE_MODE (TREE_TYPE (index_expr
)) != index_mode
)
3764 index_expr
= convert (type_for_size (index_bits
, 0),
3766 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3769 index
= protect_from_queue (index
, 0);
3770 do_pending_stack_adjust ();
3772 emit_jump_insn (gen_casesi (index
, expand_expr (minval
, 0, VOIDmode
, 0),
3773 expand_expr (range
, 0, VOIDmode
, 0),
3774 table_label
, default_label
));
3778 #ifdef HAVE_tablejump
3779 if (! win
&& HAVE_tablejump
)
3781 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
3782 fold (build (MINUS_EXPR
,
3783 TREE_TYPE (index_expr
),
3784 index_expr
, minval
)));
3785 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3787 index
= protect_from_queue (index
, 0);
3788 do_pending_stack_adjust ();
3790 do_tablejump (index
, TYPE_MODE (TREE_TYPE (index_expr
)),
3791 expand_expr (range
, 0, VOIDmode
, 0),
3792 table_label
, default_label
);
3799 /* Get table of labels to jump to, in order of case index. */
3801 ncases
= TREE_INT_CST_LOW (range
) + 1;
3802 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
3803 bzero (labelvec
, ncases
* sizeof (rtx
));
3805 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3808 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (minval
);
3813 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
3814 if (i
+ TREE_INT_CST_LOW (minval
)
3815 == TREE_INT_CST_LOW (n
->high
))
3821 /* Fill in the gaps with the default. */
3822 for (i
= 0; i
< ncases
; i
++)
3823 if (labelvec
[i
] == 0)
3824 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
3826 /* Output the table */
3827 emit_label (table_label
);
3829 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3830 were an expression, instead of a an #ifdef/#ifndef. */
3832 #ifdef CASE_VECTOR_PC_RELATIVE
3836 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
3837 gen_rtx (LABEL_REF
, Pmode
, table_label
),
3838 gen_rtvec_v (ncases
, labelvec
)));
3840 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
3841 gen_rtvec_v (ncases
, labelvec
)));
3843 /* If the case insn drops through the table,
3844 after the table we must jump to the default-label.
3845 Otherwise record no drop-through after the table. */
3846 #ifdef CASE_DROPS_THROUGH
3847 emit_jump (default_label
);
3853 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
3854 reorder_insns (before_case
, get_last_insn (),
3855 thiscase
->data
.case_stmt
.start
);
3857 if (thiscase
->exit_label
)
3858 emit_label (thiscase
->exit_label
);
3860 POPSTACK (case_stack
);
3865 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3868 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
3869 rtx op1
, op2
, label
;
3872 if (GET_CODE (op1
) == CONST_INT
3873 && GET_CODE (op2
) == CONST_INT
)
3875 if (INTVAL (op1
) == INTVAL (op2
))
3880 enum machine_mode mode
= GET_MODE (op1
);
3881 if (mode
== VOIDmode
)
3882 mode
= GET_MODE (op2
);
3883 emit_cmp_insn (op1
, op2
, EQ
, 0, mode
, unsignedp
, 0);
3884 emit_jump_insn (gen_beq (label
));
3888 /* Not all case values are encountered equally. This function
3889 uses a heuristic to weight case labels, in cases where that
3890 looks like a reasonable thing to do.
3892 Right now, all we try to guess is text, and we establish the
3895 chars above space: 16
3904 If we find any cases in the switch that are not either -1 or in the range
3905 of valid ASCII characters, or are control characters other than those
3906 commonly used with "\", don't treat this switch scanning text.
3908 Return 1 if these nodes are suitable for cost estimation, otherwise
3912 estimate_case_costs (node
)
3915 tree min_ascii
= build_int_2 (-1, -1);
3916 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
3920 /* If we haven't already made the cost table, make it now. Note that the
3921 lower bound of the table is -1, not zero. */
3923 if (cost_table
== NULL
)
3925 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
3926 bzero (cost_table
- 1, 129 * sizeof (short));
3928 for (i
= 0; i
< 128; i
++)
3932 else if (ispunct (i
))
3934 else if (iscntrl (i
))
3938 cost_table
[' '] = 8;
3939 cost_table
['\t'] = 4;
3940 cost_table
['\0'] = 4;
3941 cost_table
['\n'] = 2;
3942 cost_table
['\f'] = 1;
3943 cost_table
['\v'] = 1;
3944 cost_table
['\b'] = 1;
3947 /* See if all the case expressions look like text. It is text if the
3948 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3949 as signed arithmetic since we don't want to ever access cost_table with a
3950 value less than -1. Also check that none of the constants in a range
3951 are strange control characters. */
3953 for (n
= node
; n
; n
= n
->right
)
3955 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
3958 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
3959 if (cost_table
[i
] < 0)
3963 /* All interesting values are within the range of interesting
3964 ASCII characters. */
3968 /* Scan an ordered list of case nodes
3969 combining those with consecutive values or ranges.
3971 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3974 group_case_nodes (head
)
3977 case_node_ptr node
= head
;
3981 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
3982 case_node_ptr np
= node
;
3984 /* Try to group the successors of NODE with NODE. */
3985 while (((np
= np
->right
) != 0)
3986 /* Do they jump to the same place? */
3987 && next_real_insn (label_rtx (np
->code_label
)) == lb
3988 /* Are their ranges consecutive? */
3989 && tree_int_cst_equal (np
->low
,
3990 fold (build (PLUS_EXPR
,
3991 TREE_TYPE (node
->high
),
3994 /* An overflow is not consecutive. */
3995 && tree_int_cst_lt (node
->high
,
3996 fold (build (PLUS_EXPR
,
3997 TREE_TYPE (node
->high
),
3999 integer_one_node
))))
4001 node
->high
= np
->high
;
4003 /* NP is the first node after NODE which can't be grouped with it.
4004 Delete the nodes in between, and move on to that node. */
4010 /* Take an ordered list of case nodes
4011 and transform them into a near optimal binary tree,
4012 on the assumption that any target code selection value is as
4013 likely as any other.
4015 The transformation is performed by splitting the ordered
4016 list into two equal sections plus a pivot. The parts are
4017 then attached to the pivot as left and right branches. Each
4018 branch is is then transformed recursively. */
4021 balance_case_nodes (head
, parent
)
4022 case_node_ptr
*head
;
4023 case_node_ptr parent
;
4025 register case_node_ptr np
;
4033 register case_node_ptr
*npp
;
4036 /* Count the number of entries on branch. Also count the ranges. */
4040 if (!tree_int_cst_equal (np
->low
, np
->high
))
4044 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
4048 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
4056 /* Split this list if it is long enough for that to help. */
4061 /* Find the place in the list that bisects the list's total cost,
4062 Here I gets half the total cost. */
4067 /* Skip nodes while their cost does not reach that amount. */
4068 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4069 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
4070 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
4073 npp
= &(*npp
)->right
;
4078 /* Leave this branch lopsided, but optimize left-hand
4079 side and fill in `parent' fields for right-hand side. */
4081 np
->parent
= parent
;
4082 balance_case_nodes (&np
->left
, np
);
4083 for (; np
->right
; np
= np
->right
)
4084 np
->right
->parent
= np
;
4088 /* If there are just three nodes, split at the middle one. */
4090 npp
= &(*npp
)->right
;
4093 /* Find the place in the list that bisects the list's total cost,
4094 where ranges count as 2.
4095 Here I gets half the total cost. */
4096 i
= (i
+ ranges
+ 1) / 2;
4099 /* Skip nodes while their cost does not reach that amount. */
4100 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4105 npp
= &(*npp
)->right
;
4110 np
->parent
= parent
;
4113 /* Optimize each of the two split parts. */
4114 balance_case_nodes (&np
->left
, np
);
4115 balance_case_nodes (&np
->right
, np
);
4119 /* Else leave this branch as one level,
4120 but fill in `parent' fields. */
4122 np
->parent
= parent
;
4123 for (; np
->right
; np
= np
->right
)
4124 np
->right
->parent
= np
;
4129 /* Search the parent sections of the case node tree
4130 to see if a test for the lower bound of NODE would be redundant.
4131 INDEX_TYPE is the type of the index expression.
4133 The instructions to generate the case decision tree are
4134 output in the same order as nodes are processed so it is
4135 known that if a parent node checks the range of the current
4136 node minus one that the current node is bounded at its lower
4137 span. Thus the test would be redundant. */
4140 node_has_low_bound (node
, index_type
)
4145 case_node_ptr pnode
;
4147 /* If the lower bound of this node is the lowest value in the index type,
4148 we need not test it. */
4150 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
4153 /* If this node has a left branch, the value at the left must be less
4154 than that at this node, so it cannot be bounded at the bottom and
4155 we need not bother testing any further. */
4160 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
4161 node
->low
, integer_one_node
));
4163 /* If the subtraction above overflowed, we can't verify anything.
4164 Otherwise, look for a parent that tests our value - 1. */
4166 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
4169 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4170 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
4176 /* Search the parent sections of the case node tree
4177 to see if a test for the upper bound of NODE would be redundant.
4178 INDEX_TYPE is the type of the index expression.
4180 The instructions to generate the case decision tree are
4181 output in the same order as nodes are processed so it is
4182 known that if a parent node checks the range of the current
4183 node plus one that the current node is bounded at its upper
4184 span. Thus the test would be redundant. */
4187 node_has_high_bound (node
, index_type
)
4192 case_node_ptr pnode
;
4194 /* If the upper bound of this node is the highest value in the type
4195 of the index expression, we need not test against it. */
4197 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
4200 /* If this node has a right branch, the value at the right must be greater
4201 than that at this node, so it cannot be bounded at the top and
4202 we need not bother testing any further. */
4207 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
4208 node
->high
, integer_one_node
));
4210 /* If the addition above overflowed, we can't verify anything.
4211 Otherwise, look for a parent that tests our value + 1. */
4213 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
4216 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4217 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
4223 /* Search the parent sections of the
4224 case node tree to see if both tests for the upper and lower
4225 bounds of NODE would be redundant. */
4228 node_is_bounded (node
, index_type
)
4232 return (node_has_low_bound (node
, index_type
)
4233 && node_has_high_bound (node
, index_type
));
4236 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4239 emit_jump_if_reachable (label
)
4242 if (GET_CODE (get_last_insn ()) != BARRIER
)
4246 /* Emit step-by-step code to select a case for the value of INDEX.
4247 The thus generated decision tree follows the form of the
4248 case-node binary tree NODE, whose nodes represent test conditions.
4249 INDEX_TYPE is the type of the index of the switch.
4251 Care is taken to prune redundant tests from the decision tree
4252 by detecting any boundary conditions already checked by
4253 emitted rtx. (See node_has_high_bound, node_has_low_bound
4254 and node_is_bounded, above.)
4256 Where the test conditions can be shown to be redundant we emit
4257 an unconditional jump to the target code. As a further
4258 optimization, the subordinates of a tree node are examined to
4259 check for bounded nodes. In this case conditional and/or
4260 unconditional jumps as a result of the boundary check for the
4261 current node are arranged to target the subordinates associated
4262 code for out of bound conditions on the current node node.
4264 We can assume that when control reaches the code generated here,
4265 the index value has already been compared with the parents
4266 of this node, and determined to be on the same side of each parent
4267 as this node is. Thus, if this node tests for the value 51,
4268 and a parent tested for 52, we don't need to consider
4269 the possibility of a value greater than 51. If another parent
4270 tests for the value 50, then this node need not test anything. */
4273 emit_case_nodes (index
, node
, default_label
, index_type
)
4279 /* If INDEX has an unsigned type, we must make unsigned branches. */
4280 int unsignedp
= TREE_UNSIGNED (index_type
);
4281 typedef rtx
rtx_function ();
4282 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
4283 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
4284 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
4285 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
4286 enum machine_mode mode
= GET_MODE (index
);
4288 /* See if our parents have already tested everything for us.
4289 If they have, emit an unconditional jump for this node. */
4290 if (node_is_bounded (node
, index_type
))
4291 emit_jump (label_rtx (node
->code_label
));
4293 else if (tree_int_cst_equal (node
->low
, node
->high
))
4295 /* Node is single valued. First see if the index expression matches
4296 this node and then check our children, if any. */
4298 do_jump_if_equal (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4299 label_rtx (node
->code_label
), unsignedp
);
4301 if (node
->right
!= 0 && node
->left
!= 0)
4303 /* This node has children on both sides.
4304 Dispatch to one side or the other
4305 by comparing the index value with this node's value.
4306 If one subtree is bounded, check that one first,
4307 so we can avoid real branches in the tree. */
4309 if (node_is_bounded (node
->right
, index_type
))
4311 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4312 GT
, 0, mode
, unsignedp
, 0);
4314 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4315 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4318 else if (node_is_bounded (node
->left
, index_type
))
4320 emit_cmp_insn (index
, expand_expr (node
->high
, 0,
4322 LT
, 0, mode
, unsignedp
, 0);
4323 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
4324 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4329 /* Neither node is bounded. First distinguish the two sides;
4330 then emit the code for one side at a time. */
4333 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4335 /* See if the value is on the right. */
4336 emit_cmp_insn (index
, expand_expr (node
->high
, 0,
4338 GT
, 0, mode
, unsignedp
, 0);
4339 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4341 /* Value must be on the left.
4342 Handle the left-hand subtree. */
4343 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4344 /* If left-hand subtree does nothing,
4346 emit_jump_if_reachable (default_label
);
4348 /* Code branches here for the right-hand subtree. */
4349 expand_label (test_label
);
4350 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4354 else if (node
->right
!= 0 && node
->left
== 0)
4356 /* Here we have a right child but no left so we issue conditional
4357 branch to default and process the right child.
4359 Omit the conditional branch to default if we it avoid only one
4360 right child; it costs too much space to save so little time. */
4362 if (node
->right
->right
|| node
->right
->left
4363 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
4365 if (!node_has_low_bound (node
, index_type
))
4367 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4368 LT
, 0, mode
, unsignedp
, 0);
4369 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4372 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4375 /* We cannot process node->right normally
4376 since we haven't ruled out the numbers less than
4377 this node's value. So handle node->right explicitly. */
4378 do_jump_if_equal (index
,
4379 expand_expr (node
->right
->low
, 0, VOIDmode
, 0),
4380 label_rtx (node
->right
->code_label
), unsignedp
);
4383 else if (node
->right
== 0 && node
->left
!= 0)
4385 /* Just one subtree, on the left. */
4387 #if 0 /* The following code and comment were formerly part
4388 of the condition here, but they didn't work
4389 and I don't understand what the idea was. -- rms. */
4390 /* If our "most probable entry" is less probable
4391 than the default label, emit a jump to
4392 the default label using condition codes
4393 already lying around. With no right branch,
4394 a branch-greater-than will get us to the default
4397 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
4400 if (node
->left
->left
|| node
->left
->right
4401 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
4403 if (!node_has_high_bound (node
, index_type
))
4405 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4406 GT
, 0, mode
, unsignedp
, 0);
4407 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4410 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4413 /* We cannot process node->left normally
4414 since we haven't ruled out the numbers less than
4415 this node's value. So handle node->left explicitly. */
4416 do_jump_if_equal (index
,
4417 expand_expr (node
->left
->low
, 0, VOIDmode
, 0),
4418 label_rtx (node
->left
->code_label
), unsignedp
);
4423 /* Node is a range. These cases are very similar to those for a single
4424 value, except that we do not start by testing whether this node
4425 is the one to branch to. */
4427 if (node
->right
!= 0 && node
->left
!= 0)
4429 /* Node has subtrees on both sides.
4430 If the right-hand subtree is bounded,
4431 test for it first, since we can go straight there.
4432 Otherwise, we need to make a branch in the control structure,
4433 then handle the two subtrees. */
4434 tree test_label
= 0;
4436 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4437 GT
, 0, mode
, unsignedp
, 0);
4439 if (node_is_bounded (node
->right
, index_type
))
4440 /* Right hand node is fully bounded so we can eliminate any
4441 testing and branch directly to the target code. */
4442 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4445 /* Right hand node requires testing.
4446 Branch to a label where we will handle it later. */
4448 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4449 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4452 /* Value belongs to this node or to the left-hand subtree. */
4454 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4455 GE
, 0, mode
, unsignedp
, 0);
4456 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4458 /* Handle the left-hand subtree. */
4459 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4461 /* If right node had to be handled later, do that now. */
4465 /* If the left-hand subtree fell through,
4466 don't let it fall into the right-hand subtree. */
4467 emit_jump_if_reachable (default_label
);
4469 expand_label (test_label
);
4470 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4474 else if (node
->right
!= 0 && node
->left
== 0)
4476 /* Deal with values to the left of this node,
4477 if they are possible. */
4478 if (!node_has_low_bound (node
, index_type
))
4480 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4481 LT
, 0, mode
, unsignedp
, 0);
4482 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4485 /* Value belongs to this node or to the right-hand subtree. */
4487 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4488 LE
, 0, mode
, unsignedp
, 0);
4489 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
4491 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4494 else if (node
->right
== 0 && node
->left
!= 0)
4496 /* Deal with values to the right of this node,
4497 if they are possible. */
4498 if (!node_has_high_bound (node
, index_type
))
4500 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4501 GT
, 0, mode
, unsignedp
, 0);
4502 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4505 /* Value belongs to this node or to the left-hand subtree. */
4507 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4508 GE
, 0, mode
, unsignedp
, 0);
4509 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4511 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4516 /* Node has no children so we check low and high bounds to remove
4517 redundant tests. Only one of the bounds can exist,
4518 since otherwise this node is bounded--a case tested already. */
4520 if (!node_has_high_bound (node
, index_type
))
4522 emit_cmp_insn (index
, expand_expr (node
->high
, 0, VOIDmode
, 0),
4523 GT
, 0, mode
, unsignedp
, 0);
4524 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4527 if (!node_has_low_bound (node
, index_type
))
4529 emit_cmp_insn (index
, expand_expr (node
->low
, 0, VOIDmode
, 0),
4530 LT
, 0, mode
, unsignedp
, 0);
4531 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4534 emit_jump (label_rtx (node
->code_label
));
4539 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4540 so that the debugging info will be correct for the unrolled loop. */
4542 /* Indexed by loop number, contains pointer to the first block in the loop,
4543 or zero if none. Only valid if doing loop unrolling and outputting debugger
4546 tree
*loop_number_first_block
;
4548 /* Indexed by loop number, contains pointer to the last block in the loop,
4549 only valid if loop_number_first_block is nonzero. */
4551 tree
*loop_number_last_block
;
4553 /* Indexed by loop number, contains nesting level of first block in the
4554 loop, if any. Only valid if doing loop unrolling and outputting debugger
4557 int *loop_number_block_level
;
4559 /* Scan the function looking for loops, and walk the BLOCK tree at the
4560 same time. Record the first and last BLOCK tree corresponding to each
4561 loop. This function is similar to find_and_verify_loops in loop.c. */
4564 find_loop_tree_blocks (f
)
4568 int current_loop
= -1;
4571 int block_level
, tree_level
;
4572 tree tree_block
, parent_tree_block
;
4574 tree_block
= DECL_INITIAL (current_function_decl
);
4575 parent_tree_block
= 0;
4579 /* Find boundaries of loops, and save the first and last BLOCK tree
4580 corresponding to each loop. */
4582 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
4584 if (GET_CODE (insn
) == NOTE
)
4585 switch (NOTE_LINE_NUMBER (insn
))
4587 case NOTE_INSN_LOOP_BEG
:
4588 loop_number_block_level
[++next_loop
] = block_level
;
4589 loop_number_first_block
[next_loop
] = 0;
4590 current_loop
= next_loop
;
4593 case NOTE_INSN_LOOP_END
:
4594 if (current_loop
== -1)
4597 current_loop
= loop_outer_loop
[current_loop
];
4600 case NOTE_INSN_BLOCK_BEG
:
4601 if (tree_level
< block_level
)
4603 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4604 we must now visit the subtree of the current block. */
4605 parent_tree_block
= tree_block
;
4606 tree_block
= BLOCK_SUBBLOCKS (tree_block
);
4609 else if (tree_level
> block_level
)
4612 /* Save this block tree here for all nested loops for which
4613 this is the topmost block. */
4614 for (loop
= current_loop
;
4615 loop
!= -1 && block_level
== loop_number_block_level
[loop
];
4616 loop
= loop_outer_loop
[loop
])
4618 if (loop_number_first_block
[loop
] == 0)
4619 loop_number_first_block
[loop
] = tree_block
;
4620 loop_number_last_block
[loop
] = tree_block
;
4626 case NOTE_INSN_BLOCK_END
:
4628 if (tree_level
> block_level
)
4630 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4631 we must now visit the parent of the current tree. */
4632 if (tree_block
!= 0 || parent_tree_block
== 0)
4634 tree_block
= parent_tree_block
;
4635 parent_tree_block
= BLOCK_SUPERCONTEXT (parent_tree_block
);
4638 tree_block
= BLOCK_CHAIN (tree_block
);
4644 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4645 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4647 Note that we only copy the topmost level of tree nodes; they will share
4648 pointers to the same subblocks. */
4651 unroll_block_trees (loop_number
, copies
)
4657 /* First check whether there are any blocks that need to be copied. */
4658 if (loop_number_first_block
[loop_number
])
4660 tree first_block
= loop_number_first_block
[loop_number
];
4661 tree last_block
= loop_number_last_block
[loop_number
];
4662 tree last_block_created
= 0;
4664 for (i
= 0; i
< copies
- 1; i
++)
4666 tree block
= first_block
;
4667 tree insert_after
= last_block
;
4670 /* Copy every block between first_block and last_block inclusive,
4671 inserting the new blocks after last_block. */
4674 tree new_block
= make_node (BLOCK
);
4675 BLOCK_VARS (new_block
) = BLOCK_VARS (block
);
4676 BLOCK_TYPE_TAGS (new_block
) = BLOCK_TYPE_TAGS (block
);
4677 BLOCK_SUBBLOCKS (new_block
) = BLOCK_SUBBLOCKS (block
);
4678 BLOCK_SUPERCONTEXT (new_block
) = BLOCK_SUPERCONTEXT (block
);
4679 TREE_USED (new_block
) = TREE_USED (block
);
4681 /* Insert the new block after the insertion point, and move
4682 the insertion point to the new block. This ensures that
4683 the copies are inserted in the right order. */
4684 BLOCK_CHAIN (new_block
) = BLOCK_CHAIN (insert_after
);
4685 BLOCK_CHAIN (insert_after
) = new_block
;
4686 insert_after
= new_block
;
4688 copied_block
= block
;
4689 block
= BLOCK_CHAIN (block
);
4691 while (copied_block
!= last_block
);
4693 /* Remember the last block created, so that we can update the
4694 info in the tables. */
4695 if (last_block_created
== 0)
4696 last_block_created
= insert_after
;
4699 /* For all nested loops for which LAST_BLOCK was originally the last
4700 block, update the tables to indicate that LAST_BLOCK_CREATED is
4701 now the last block in the loop. */
4702 for (i
= loop_number
; last_block
== loop_number_last_block
[i
];
4703 i
= loop_outer_loop
[i
])
4704 loop_number_last_block
[i
] = last_block_created
;