1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
50 #include "hard-reg-set.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack
;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value
;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type
;
79 static rtx last_expr_value
;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note
;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count
;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct
;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label
;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label
;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset
;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label
;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry
;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area
;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain
;
131 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
132 extern int target_temp_slot_level
;
134 extern int temp_slot_level
;
136 /* Functions and data structures for expanding case statements. */
138 /* Case label structure, used to hold info on labels within case
139 statements. We handle "range" labels; for a single-value label
140 as in C, the high and low limits are the same.
142 An AVL tree of case nodes is initially created, and later transformed
143 to a list linked via the RIGHT fields in the nodes. Nodes with
144 higher case values are later in the list.
146 Switch statements can be output in one of two forms. A branch table
147 is used if there are more than a few labels and the labels are dense
148 within the range between the smallest and largest case value. If a
149 branch table is used, no further manipulations are done with the case
152 The alternative to the use of a branch table is to generate a series
153 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
154 and PARENT fields to hold a binary tree. Initially the tree is
155 totally unbalanced, with everything on the right. We balance the tree
156 with nodes on the left having lower case values than the parent
157 and nodes on the right having higher values. We then output the tree
162 struct case_node
*left
; /* Left son in binary tree */
163 struct case_node
*right
; /* Right son in binary tree; also node chain */
164 struct case_node
*parent
; /* Parent of node in binary tree */
165 tree low
; /* Lowest index value for this label */
166 tree high
; /* Highest index value for this label */
167 tree code_label
; /* Label to jump to when node matches */
171 typedef struct case_node case_node
;
172 typedef struct case_node
*case_node_ptr
;
174 /* These are used by estimate_case_costs and balance_case_nodes. */
176 /* This must be a signed type, and non-ANSI compilers lack signed char. */
177 static short *cost_table
;
178 static int use_cost_table
;
180 /* Stack of control and binding constructs we are currently inside.
182 These constructs begin when you call `expand_start_WHATEVER'
183 and end when you call `expand_end_WHATEVER'. This stack records
184 info about how the construct began that tells the end-function
185 what to do. It also may provide information about the construct
186 to alter the behavior of other constructs within the body.
187 For example, they may affect the behavior of C `break' and `continue'.
189 Each construct gets one `struct nesting' object.
190 All of these objects are chained through the `all' field.
191 `nesting_stack' points to the first object (innermost construct).
192 The position of an entry on `nesting_stack' is in its `depth' field.
194 Each type of construct has its own individual stack.
195 For example, loops have `loop_stack'. Each object points to the
196 next object of the same type through the `next' field.
198 Some constructs are visible to `break' exit-statements and others
199 are not. Which constructs are visible depends on the language.
200 Therefore, the data structure allows each construct to be visible
201 or not, according to the args given when the construct is started.
202 The construct is visible if the `exit_label' field is non-null.
203 In that case, the value should be a CODE_LABEL rtx. */
208 struct nesting
*next
;
213 /* For conds (if-then and if-then-else statements). */
216 /* Label for the end of the if construct.
217 There is none if EXITFLAG was not set
218 and no `else' has been seen yet. */
220 /* Label for the end of this alternative.
221 This may be the end of the if or the next else/elseif. */
227 /* Label at the top of the loop; place to loop back to. */
229 /* Label at the end of the whole construct. */
231 /* Label before a jump that branches to the end of the whole
232 construct. This is where destructors go if any. */
234 /* Label for `continue' statement to jump to;
235 this is in front of the stepper of the loop. */
238 /* For variable binding contours. */
241 /* Sequence number of this binding contour within the function,
242 in order of entry. */
243 int block_start_count
;
244 /* Nonzero => value to restore stack to on exit. */
246 /* The NOTE that starts this contour.
247 Used by expand_goto to check whether the destination
248 is within each contour or not. */
250 /* Innermost containing binding contour that has a stack level. */
251 struct nesting
*innermost_stack_block
;
252 /* List of cleanups to be run on exit from this contour.
253 This is a list of expressions to be evaluated.
254 The TREE_PURPOSE of each link is the ..._DECL node
255 which the cleanup pertains to. */
257 /* List of cleanup-lists of blocks containing this block,
258 as they were at the locus where this block appears.
259 There is an element for each containing block,
260 ordered innermost containing block first.
261 The tail of this list can be 0,
262 if all remaining elements would be empty lists.
263 The element's TREE_VALUE is the cleanup-list of that block,
264 which may be null. */
266 /* Chain of labels defined inside this binding contour.
267 For contours that have stack levels or cleanups. */
268 struct label_chain
*label_chain
;
269 /* Number of function calls seen, as of start of this block. */
270 int function_call_count
;
271 /* Bytecode specific: stack level to restore stack to on exit. */
272 /* Nonzero if this is associated with a EH region. */
273 int exception_region
;
274 /* The saved target_temp_slot_level from our outer block.
275 We may reset target_temp_slot_level to be the level of
276 this block, if that is done, target_temp_slot_level
277 reverts to the saved target_temp_slot_level at the very
279 int target_temp_slot_level
;
280 /* True if we are currently emitting insns in an area of
281 output code that is controlled by a conditional
282 expression. This is used by the cleanup handling code to
283 generate conditional cleanup actions. */
284 int conditional_code
;
285 /* A place to move the start of the exception region for any
286 of the conditional cleanups, must be at the end or after
287 the start of the last unconditional cleanup, and before any
288 conditional branch points. */
289 rtx last_unconditional_cleanup
;
290 /* When in a conditional context, this is the specific
291 cleanup list associated with last_unconditional_cleanup,
292 where we place the conditionalized cleanups. */
295 /* For switch (C) or case (Pascal) statements,
296 and also for dummies (see `expand_start_case_dummy'). */
299 /* The insn after which the case dispatch should finally
300 be emitted. Zero for a dummy. */
302 /* A list of case labels; it is first built as an AVL tree.
303 During expand_end_case, this is converted to a list, and may be
304 rearranged into a nearly balanced binary tree. */
305 struct case_node
*case_list
;
306 /* Label to jump to if no case matches. */
308 /* The expression to be dispatched on. */
310 /* Type that INDEX_EXPR should be converted to. */
312 /* Number of range exprs in case statement. */
314 /* Name of this kind of statement, for warnings. */
316 /* Nonzero if a case label has been seen in this case stmt. */
322 /* Chain of all pending binding contours. */
323 struct nesting
*block_stack
;
325 /* If any new stacks are added here, add them to POPSTACKS too. */
327 /* Chain of all pending binding contours that restore stack levels
329 struct nesting
*stack_block_stack
;
331 /* Chain of all pending conditional statements. */
332 struct nesting
*cond_stack
;
334 /* Chain of all pending loops. */
335 struct nesting
*loop_stack
;
337 /* Chain of all pending case or switch statements. */
338 struct nesting
*case_stack
;
340 /* Separate chain including all of the above,
341 chained through the `all' field. */
342 struct nesting
*nesting_stack
;
344 /* Number of entries on nesting_stack now. */
347 /* Allocate and return a new `struct nesting'. */
349 #define ALLOC_NESTING() \
350 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
352 /* Pop the nesting stack element by element until we pop off
353 the element which is at the top of STACK.
354 Update all the other stacks, popping off elements from them
355 as we pop them from nesting_stack. */
357 #define POPSTACK(STACK) \
358 do { struct nesting *target = STACK; \
359 struct nesting *this; \
360 do { this = nesting_stack; \
361 if (loop_stack == this) \
362 loop_stack = loop_stack->next; \
363 if (cond_stack == this) \
364 cond_stack = cond_stack->next; \
365 if (block_stack == this) \
366 block_stack = block_stack->next; \
367 if (stack_block_stack == this) \
368 stack_block_stack = stack_block_stack->next; \
369 if (case_stack == this) \
370 case_stack = case_stack->next; \
371 nesting_depth = nesting_stack->depth - 1; \
372 nesting_stack = this->all; \
373 obstack_free (&stmt_obstack, this); } \
374 while (this != target); } while (0)
376 /* In some cases it is impossible to generate code for a forward goto
377 until the label definition is seen. This happens when it may be necessary
378 for the goto to reset the stack pointer: we don't yet know how to do that.
379 So expand_goto puts an entry on this fixup list.
380 Each time a binding contour that resets the stack is exited,
382 If the target label has now been defined, we can insert the proper code. */
386 /* Points to following fixup. */
387 struct goto_fixup
*next
;
388 /* Points to the insn before the jump insn.
389 If more code must be inserted, it goes after this insn. */
391 /* The LABEL_DECL that this jump is jumping to, or 0
392 for break, continue or return. */
394 /* The BLOCK for the place where this goto was found. */
396 /* The CODE_LABEL rtx that this is jumping to. */
398 /* Number of binding contours started in current function
399 before the label reference. */
400 int block_start_count
;
401 /* The outermost stack level that should be restored for this jump.
402 Each time a binding contour that resets the stack is exited,
403 if the target label is *not* yet defined, this slot is updated. */
405 /* List of lists of cleanup expressions to be run by this goto.
406 There is one element for each block that this goto is within.
407 The tail of this list can be 0,
408 if all remaining elements would be empty.
409 The TREE_VALUE contains the cleanup list of that block as of the
410 time this goto was seen.
411 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
412 tree cleanup_list_list
;
415 static struct goto_fixup
*goto_fixup_chain
;
417 /* Within any binding contour that must restore a stack level,
418 all labels are recorded with a chain of these structures. */
422 /* Points to following fixup. */
423 struct label_chain
*next
;
428 /* Non-zero if we are using EH to handle cleanus. */
429 static int using_eh_for_cleanups_p
= 0;
432 static void expand_goto_internal
PROTO((tree
, rtx
, rtx
));
433 static int expand_fixup
PROTO((tree
, rtx
, rtx
));
434 static void fixup_gotos
PROTO((struct nesting
*, rtx
, tree
,
436 static void expand_null_return_1
PROTO((rtx
, int));
437 static void expand_value_return
PROTO((rtx
));
438 static int tail_recursion_args
PROTO((tree
, tree
));
439 static void expand_cleanups
PROTO((tree
, tree
, int, int));
440 static void do_jump_if_equal
PROTO((rtx
, rtx
, rtx
, int));
441 static int estimate_case_costs
PROTO((case_node_ptr
));
442 static void group_case_nodes
PROTO((case_node_ptr
));
443 static void balance_case_nodes
PROTO((case_node_ptr
*,
445 static int node_has_low_bound
PROTO((case_node_ptr
, tree
));
446 static int node_has_high_bound
PROTO((case_node_ptr
, tree
));
447 static int node_is_bounded
PROTO((case_node_ptr
, tree
));
448 static void emit_jump_if_reachable
PROTO((rtx
));
449 static void emit_case_nodes
PROTO((rtx
, case_node_ptr
, rtx
, tree
));
450 static int add_case_node
PROTO((tree
, tree
, tree
, tree
*));
451 static struct case_node
*case_tree2list
PROTO((case_node
*, case_node
*));
455 using_eh_for_cleanups ()
457 using_eh_for_cleanups_p
= 1;
463 gcc_obstack_init (&stmt_obstack
);
468 init_stmt_for_function ()
470 /* We are not currently within any block, conditional, loop or case. */
472 stack_block_stack
= 0;
479 block_start_count
= 0;
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain
= 0;
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value
= 0;
488 init_eh_for_function ();
495 p
->block_stack
= block_stack
;
496 p
->stack_block_stack
= stack_block_stack
;
497 p
->cond_stack
= cond_stack
;
498 p
->loop_stack
= loop_stack
;
499 p
->case_stack
= case_stack
;
500 p
->nesting_stack
= nesting_stack
;
501 p
->nesting_depth
= nesting_depth
;
502 p
->block_start_count
= block_start_count
;
503 p
->last_expr_type
= last_expr_type
;
504 p
->last_expr_value
= last_expr_value
;
505 p
->expr_stmts_for_value
= expr_stmts_for_value
;
506 p
->emit_filename
= emit_filename
;
507 p
->emit_lineno
= emit_lineno
;
508 p
->goto_fixup_chain
= goto_fixup_chain
;
513 restore_stmt_status (p
)
516 block_stack
= p
->block_stack
;
517 stack_block_stack
= p
->stack_block_stack
;
518 cond_stack
= p
->cond_stack
;
519 loop_stack
= p
->loop_stack
;
520 case_stack
= p
->case_stack
;
521 nesting_stack
= p
->nesting_stack
;
522 nesting_depth
= p
->nesting_depth
;
523 block_start_count
= p
->block_start_count
;
524 last_expr_type
= p
->last_expr_type
;
525 last_expr_value
= p
->last_expr_value
;
526 expr_stmts_for_value
= p
->expr_stmts_for_value
;
527 emit_filename
= p
->emit_filename
;
528 emit_lineno
= p
->emit_lineno
;
529 goto_fixup_chain
= p
->goto_fixup_chain
;
530 restore_eh_status (p
);
533 /* Emit a no-op instruction. */
540 last_insn
= get_last_insn ();
542 && (GET_CODE (last_insn
) == CODE_LABEL
543 || (GET_CODE (last_insn
) == NOTE
544 && prev_real_insn (last_insn
) == 0)))
545 emit_insn (gen_nop ());
548 /* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
555 if (TREE_CODE (label
) != LABEL_DECL
)
558 if (DECL_RTL (label
))
559 return DECL_RTL (label
);
561 return DECL_RTL (label
) = gen_label_rtx ();
564 /* Add an unconditional jump to LABEL as the next sequential instruction. */
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label
));
575 /* Emit code to jump to the address
576 specified by the pointer expression EXP. */
579 expand_computed_goto (exp
)
582 rtx x
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
584 #ifdef POINTERS_EXTEND_UNSIGNED
585 x
= convert_memory_address (Pmode
, x
);
589 /* Be sure the function is executable. */
590 if (flag_check_memory_usage
)
591 emit_library_call (chkr_check_exec_libfunc
, 1,
592 VOIDmode
, 1, x
, ptr_mode
);
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x
);
598 /* Handle goto statements and the labels that they can go to. */
600 /* Specify the location in the RTL code of a label LABEL,
601 which is a LABEL_DECL tree node.
603 This is used for the kind of label that the user can jump to with a
604 goto statement, and for alternatives of a switch or case statement.
605 RTL labels generated for loops and conditionals don't go through here;
606 they are generated directly at the RTL level, by other functions below.
608 Note that this has nothing to do with defining label *names*.
609 Languages vary in how they do that and what that even means. */
615 struct label_chain
*p
;
617 do_pending_stack_adjust ();
618 emit_label (label_rtx (label
));
619 if (DECL_NAME (label
))
620 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
622 if (stack_block_stack
!= 0)
624 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
625 p
->next
= stack_block_stack
->data
.block
.label_chain
;
626 stack_block_stack
->data
.block
.label_chain
= p
;
631 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
632 from nested functions. */
635 declare_nonlocal_label (label
)
638 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
639 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
640 if (nonlocal_goto_handler_slot
== 0)
642 nonlocal_goto_handler_slot
643 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
644 emit_stack_save (SAVE_NONLOCAL
,
645 &nonlocal_goto_stack_level
,
646 PREV_INSN (tail_recursion_reentry
));
650 /* Generate RTL code for a `goto' statement with target label LABEL.
651 LABEL should be a LABEL_DECL tree node that was or will later be
652 defined with `expand_label'. */
660 /* Check for a nonlocal goto to a containing function. */
661 context
= decl_function_context (label
);
662 if (context
!= 0 && context
!= current_function_decl
)
664 struct function
*p
= find_function_data (context
);
665 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
, label_rtx (label
));
668 p
->has_nonlocal_label
= 1;
669 current_function_has_nonlocal_goto
= 1;
670 LABEL_REF_NONLOCAL_P (label_ref
) = 1;
672 /* Copy the rtl for the slots so that they won't be shared in
673 case the virtual stack vars register gets instantiated differently
674 in the parent than in the child. */
676 #if HAVE_nonlocal_goto
677 if (HAVE_nonlocal_goto
)
678 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
679 copy_rtx (p
->nonlocal_goto_handler_slot
),
680 copy_rtx (p
->nonlocal_goto_stack_level
),
687 /* Restore frame pointer for containing function.
688 This sets the actual hard register used for the frame pointer
689 to the location of the function's incoming static chain info.
690 The non-local goto handler will then adjust it to contain the
691 proper value and reload the argument pointer, if needed. */
692 emit_move_insn (hard_frame_pointer_rtx
, lookup_static_chain (label
));
694 /* We have now loaded the frame pointer hardware register with
695 the address of that corresponds to the start of the virtual
696 stack vars. So replace virtual_stack_vars_rtx in all
697 addresses we use with stack_pointer_rtx. */
699 /* Get addr of containing function's current nonlocal goto handler,
700 which will do any cleanups and then jump to the label. */
701 addr
= copy_rtx (p
->nonlocal_goto_handler_slot
);
702 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
703 hard_frame_pointer_rtx
));
705 /* Restore the stack pointer. Note this uses fp just restored. */
706 addr
= p
->nonlocal_goto_stack_level
;
708 addr
= replace_rtx (copy_rtx (addr
),
709 virtual_stack_vars_rtx
,
710 hard_frame_pointer_rtx
);
712 emit_stack_restore (SAVE_NONLOCAL
, addr
, NULL_RTX
);
714 /* Put in the static chain register the nonlocal label address. */
715 emit_move_insn (static_chain_rtx
, label_ref
);
716 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
718 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
719 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
720 emit_insn (gen_rtx_USE (VOIDmode
, static_chain_rtx
));
721 emit_indirect_jump (temp
);
725 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
728 /* Generate RTL code for a `goto' statement with target label BODY.
729 LABEL should be a LABEL_REF.
730 LAST_INSN, if non-0, is the rtx we should consider as the last
731 insn emitted (for the purposes of cleaning up a return). */
734 expand_goto_internal (body
, label
, last_insn
)
739 struct nesting
*block
;
742 if (GET_CODE (label
) != CODE_LABEL
)
745 /* If label has already been defined, we can tell now
746 whether and how we must alter the stack level. */
748 if (PREV_INSN (label
) != 0)
750 /* Find the innermost pending block that contains the label.
751 (Check containment by comparing insn-uids.)
752 Then restore the outermost stack level within that block,
753 and do cleanups of all blocks contained in it. */
754 for (block
= block_stack
; block
; block
= block
->next
)
756 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
758 if (block
->data
.block
.stack_level
!= 0)
759 stack_level
= block
->data
.block
.stack_level
;
760 /* Execute the cleanups for blocks we are exiting. */
761 if (block
->data
.block
.cleanups
!= 0)
763 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
, 1, 1);
764 do_pending_stack_adjust ();
770 /* Ensure stack adjust isn't done by emit_jump, as this
771 would clobber the stack pointer. This one should be
772 deleted as dead by flow. */
773 clear_pending_stack_adjust ();
774 do_pending_stack_adjust ();
775 emit_stack_restore (SAVE_BLOCK
, stack_level
, NULL_RTX
);
778 if (body
!= 0 && DECL_TOO_LATE (body
))
779 error ("jump to `%s' invalidly jumps into binding contour",
780 IDENTIFIER_POINTER (DECL_NAME (body
)));
782 /* Label not yet defined: may need to put this goto
783 on the fixup list. */
784 else if (! expand_fixup (body
, label
, last_insn
))
786 /* No fixup needed. Record that the label is the target
787 of at least one goto that has no fixup. */
789 TREE_ADDRESSABLE (body
) = 1;
795 /* Generate if necessary a fixup for a goto
796 whose target label in tree structure (if any) is TREE_LABEL
797 and whose target in rtl is RTL_LABEL.
799 If LAST_INSN is nonzero, we pretend that the jump appears
800 after insn LAST_INSN instead of at the current point in the insn stream.
802 The fixup will be used later to insert insns just before the goto.
803 Those insns will restore the stack level as appropriate for the
804 target label, and will (in the case of C++) also invoke any object
805 destructors which have to be invoked when we exit the scopes which
806 are exited by the goto.
808 Value is nonzero if a fixup is made. */
811 expand_fixup (tree_label
, rtl_label
, last_insn
)
816 struct nesting
*block
, *end_block
;
818 /* See if we can recognize which block the label will be output in.
819 This is possible in some very common cases.
820 If we succeed, set END_BLOCK to that block.
821 Otherwise, set it to 0. */
824 && (rtl_label
== cond_stack
->data
.cond
.endif_label
825 || rtl_label
== cond_stack
->data
.cond
.next_label
))
826 end_block
= cond_stack
;
827 /* If we are in a loop, recognize certain labels which
828 are likely targets. This reduces the number of fixups
829 we need to create. */
831 && (rtl_label
== loop_stack
->data
.loop
.start_label
832 || rtl_label
== loop_stack
->data
.loop
.end_label
833 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
834 end_block
= loop_stack
;
838 /* Now set END_BLOCK to the binding level to which we will return. */
842 struct nesting
*next_block
= end_block
->all
;
845 /* First see if the END_BLOCK is inside the innermost binding level.
846 If so, then no cleanups or stack levels are relevant. */
847 while (next_block
&& next_block
!= block
)
848 next_block
= next_block
->all
;
853 /* Otherwise, set END_BLOCK to the innermost binding level
854 which is outside the relevant control-structure nesting. */
855 next_block
= block_stack
->next
;
856 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
857 if (block
== next_block
)
858 next_block
= next_block
->next
;
859 end_block
= next_block
;
862 /* Does any containing block have a stack level or cleanups?
863 If not, no fixup is needed, and that is the normal case
864 (the only case, for standard C). */
865 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
866 if (block
->data
.block
.stack_level
!= 0
867 || block
->data
.block
.cleanups
!= 0)
870 if (block
!= end_block
)
872 /* Ok, a fixup is needed. Add a fixup to the list of such. */
873 struct goto_fixup
*fixup
874 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
875 /* In case an old stack level is restored, make sure that comes
876 after any pending stack adjust. */
877 /* ?? If the fixup isn't to come at the present position,
878 doing the stack adjust here isn't useful. Doing it with our
879 settings at that location isn't useful either. Let's hope
882 do_pending_stack_adjust ();
883 fixup
->target
= tree_label
;
884 fixup
->target_rtl
= rtl_label
;
886 /* Create a BLOCK node and a corresponding matched set of
887 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
888 this point. The notes will encapsulate any and all fixup
889 code which we might later insert at this point in the insn
890 stream. Also, the BLOCK node will be the parent (i.e. the
891 `SUPERBLOCK') of any other BLOCK nodes which we might create
892 later on when we are expanding the fixup code. */
895 register rtx original_before_jump
896 = last_insn
? last_insn
: get_last_insn ();
900 fixup
->before_jump
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
901 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
902 fixup
->context
= poplevel (1, 0, 0); /* Create the BLOCK node now! */
904 emit_insns_after (fixup
->before_jump
, original_before_jump
);
907 fixup
->block_start_count
= block_start_count
;
908 fixup
->stack_level
= 0;
909 fixup
->cleanup_list_list
910 = ((block
->data
.block
.outer_cleanups
911 || block
->data
.block
.cleanups
)
912 ? tree_cons (NULL_TREE
, block
->data
.block
.cleanups
,
913 block
->data
.block
.outer_cleanups
)
915 fixup
->next
= goto_fixup_chain
;
916 goto_fixup_chain
= fixup
;
924 /* Expand any needed fixups in the outputmost binding level of the
925 function. FIRST_INSN is the first insn in the function. */
928 expand_fixups (first_insn
)
931 fixup_gotos (NULL_PTR
, NULL_RTX
, NULL_TREE
, first_insn
, 0);
934 /* When exiting a binding contour, process all pending gotos requiring fixups.
935 THISBLOCK is the structure that describes the block being exited.
936 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
937 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
938 FIRST_INSN is the insn that began this contour.
940 Gotos that jump out of this contour must restore the
941 stack level and do the cleanups before actually jumping.
943 DONT_JUMP_IN nonzero means report error there is a jump into this
944 contour from before the beginning of the contour.
945 This is also done if STACK_LEVEL is nonzero. */
948 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
949 struct nesting
*thisblock
;
955 register struct goto_fixup
*f
, *prev
;
957 /* F is the fixup we are considering; PREV is the previous one. */
958 /* We run this loop in two passes so that cleanups of exited blocks
959 are run first, and blocks that are exited are marked so
962 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
964 /* Test for a fixup that is inactive because it is already handled. */
965 if (f
->before_jump
== 0)
967 /* Delete inactive fixup from the chain, if that is easy to do. */
969 prev
->next
= f
->next
;
971 /* Has this fixup's target label been defined?
972 If so, we can finalize it. */
973 else if (PREV_INSN (f
->target_rtl
) != 0)
975 register rtx cleanup_insns
;
977 /* Get the first non-label after the label
978 this goto jumps to. If that's before this scope begins,
979 we don't have a jump into the scope. */
980 rtx after_label
= f
->target_rtl
;
981 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
982 after_label
= NEXT_INSN (after_label
);
984 /* If this fixup jumped into this contour from before the beginning
985 of this contour, report an error. */
986 /* ??? Bug: this does not detect jumping in through intermediate
987 blocks that have stack levels or cleanups.
988 It detects only a problem with the innermost block
991 && (dont_jump_in
|| stack_level
|| cleanup_list
)
992 /* If AFTER_LABEL is 0, it means the jump goes to the end
993 of the rtl, which means it jumps into this scope. */
995 || INSN_UID (first_insn
) < INSN_UID (after_label
))
996 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
997 && ! DECL_ERROR_ISSUED (f
->target
))
999 error_with_decl (f
->target
,
1000 "label `%s' used before containing binding contour");
1001 /* Prevent multiple errors for one label. */
1002 DECL_ERROR_ISSUED (f
->target
) = 1;
1005 /* We will expand the cleanups into a sequence of their own and
1006 then later on we will attach this new sequence to the insn
1007 stream just ahead of the actual jump insn. */
1011 /* Temporarily restore the lexical context where we will
1012 logically be inserting the fixup code. We do this for the
1013 sake of getting the debugging information right. */
1016 set_block (f
->context
);
1018 /* Expand the cleanups for blocks this jump exits. */
1019 if (f
->cleanup_list_list
)
1022 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
1023 /* Marked elements correspond to blocks that have been closed.
1024 Do their cleanups. */
1025 if (TREE_ADDRESSABLE (lists
)
1026 && TREE_VALUE (lists
) != 0)
1028 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1029 /* Pop any pushes done in the cleanups,
1030 in case function is about to return. */
1031 do_pending_stack_adjust ();
1035 /* Restore stack level for the biggest contour that this
1036 jump jumps out of. */
1038 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
1040 /* Finish up the sequence containing the insns which implement the
1041 necessary cleanups, and then attach that whole sequence to the
1042 insn stream just ahead of the actual jump insn. Attaching it
1043 at that point insures that any cleanups which are in fact
1044 implicit C++ object destructions (which must be executed upon
1045 leaving the block) appear (to the debugger) to be taking place
1046 in an area of the generated code where the object(s) being
1047 destructed are still "in scope". */
1049 cleanup_insns
= get_insns ();
1053 emit_insns_after (cleanup_insns
, f
->before_jump
);
1060 /* For any still-undefined labels, do the cleanups for this block now.
1061 We must do this now since items in the cleanup list may go out
1062 of scope when the block ends. */
1063 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1064 if (f
->before_jump
!= 0
1065 && PREV_INSN (f
->target_rtl
) == 0
1066 /* Label has still not appeared. If we are exiting a block with
1067 a stack level to restore, that started before the fixup,
1068 mark this stack level as needing restoration
1069 when the fixup is later finalized. */
1071 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1072 means the label is undefined. That's erroneous, but possible. */
1073 && (thisblock
->data
.block
.block_start_count
1074 <= f
->block_start_count
))
1076 tree lists
= f
->cleanup_list_list
;
1079 for (; lists
; lists
= TREE_CHAIN (lists
))
1080 /* If the following elt. corresponds to our containing block
1081 then the elt. must be for this block. */
1082 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
1086 set_block (f
->context
);
1087 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1088 do_pending_stack_adjust ();
1089 cleanup_insns
= get_insns ();
1092 if (cleanup_insns
!= 0)
1094 = emit_insns_after (cleanup_insns
, f
->before_jump
);
1096 f
->cleanup_list_list
= TREE_CHAIN (lists
);
1100 f
->stack_level
= stack_level
;
1106 /* Generate RTL for an asm statement (explicit assembler code).
1107 BODY is a STRING_CST node containing the assembler code text,
1108 or an ADDR_EXPR containing a STRING_CST. */
1114 if (flag_check_memory_usage
)
1116 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1120 if (TREE_CODE (body
) == ADDR_EXPR
)
1121 body
= TREE_OPERAND (body
, 0);
1123 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
,
1124 TREE_STRING_POINTER (body
)));
1128 /* Generate RTL for an asm statement with arguments.
1129 STRING is the instruction template.
1130 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1131 Each output or input has an expression in the TREE_VALUE and
1132 a constraint-string in the TREE_PURPOSE.
1133 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1134 that is clobbered by this insn.
1136 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1137 Some elements of OUTPUTS may be replaced with trees representing temporary
1138 values. The caller should copy those temporary values to the originally
1141 VOL nonzero means the insn is volatile; don't optimize it. */
1144 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
1145 tree string
, outputs
, inputs
, clobbers
;
1150 rtvec argvec
, constraints
;
1152 int ninputs
= list_length (inputs
);
1153 int noutputs
= list_length (outputs
);
1158 /* Vector of RTX's of evaluated output operands. */
1159 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1160 int *inout_opnum
= (int *) alloca (noutputs
* sizeof (int));
1161 enum machine_mode
*inout_mode
1162 = (enum machine_mode
*) alloca (noutputs
* sizeof (enum machine_mode
));
1163 /* The insn we have emitted. */
1166 /* An ASM with no outputs needs to be treated as volatile. */
1170 if (flag_check_memory_usage
)
1172 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1176 /* Count the number of meaningful clobbered registers, ignoring what
1177 we would ignore later. */
1179 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1181 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1182 i
= decode_reg_name (regname
);
1183 if (i
>= 0 || i
== -4)
1186 error ("unknown register name `%s' in `asm'", regname
);
1191 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1193 tree val
= TREE_VALUE (tail
);
1194 tree type
= TREE_TYPE (val
);
1197 int found_equal
= 0;
1201 /* If there's an erroneous arg, emit no insn. */
1202 if (TREE_TYPE (val
) == error_mark_node
)
1205 /* Make sure constraint has `=' and does not have `+'. Also, see
1206 if it allows any register. Be liberal on the latter test, since
1207 the worst that happens if we get it wrong is we issue an error
1210 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)) - 1; j
++)
1211 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
])
1214 /* Make sure we can specify the matching operand. */
1217 error ("output operand constraint %d contains `+'", i
);
1221 /* Replace '+' with '='. */
1222 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] = '=';
1230 case '?': case '!': case '*': case '%': case '&':
1231 case 'V': case 'm': case 'o': case '<': case '>':
1232 case 'E': case 'F': case 'G': case 'H': case 'X':
1233 case 's': case 'i': case 'n':
1234 case 'I': case 'J': case 'K': case 'L': case 'M':
1235 case 'N': case 'O': case 'P': case ',':
1236 #ifdef EXTRA_CONSTRAINT
1237 case 'Q': case 'R': case 'S': case 'T': case 'U':
1241 case '0': case '1': case '2': case '3': case '4':
1242 case '5': case '6': case '7': case '8': case '9':
1243 error ("matching constraint not valid in output operand");
1246 case 'p': case 'g': case 'r':
1252 if (! found_equal
&& ! found_plus
)
1254 error ("output operand constraint lacks `='");
1258 /* If an output operand is not a decl or indirect ref and our constraint
1259 allows a register, make a temporary to act as an intermediate.
1260 Make the asm insn write into that, then our caller will copy it to
1261 the real output operand. Likewise for promoted variables. */
1263 if (TREE_CODE (val
) == INDIRECT_REF
1264 || (TREE_CODE_CLASS (TREE_CODE (val
)) == 'd'
1265 && ! (GET_CODE (DECL_RTL (val
)) == REG
1266 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
1271 mark_addressable (TREE_VALUE (tail
));
1274 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
,
1275 EXPAND_MEMORY_USE_WO
);
1277 if (! allows_reg
&& GET_CODE (output_rtx
[i
]) != MEM
)
1278 error ("output number %d not directly addressable", i
);
1282 output_rtx
[i
] = assign_temp (type
, 0, 0, 0);
1283 TREE_VALUE (tail
) = make_tree (type
, output_rtx
[i
]);
1288 inout_mode
[ninout
] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
)));
1289 inout_opnum
[ninout
++] = i
;
1294 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1296 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1300 /* Make vectors for the expression-rtx and constraint strings. */
1302 argvec
= rtvec_alloc (ninputs
);
1303 constraints
= rtvec_alloc (ninputs
);
1305 body
= gen_rtx_ASM_OPERANDS (VOIDmode
,
1306 TREE_STRING_POINTER (string
), "", 0, argvec
,
1307 constraints
, filename
, line
);
1309 /* The only use of BODY is if no outputs are specified, so set
1310 it volatile, at least for now. */
1311 MEM_VOLATILE_P (body
) = 1;
1313 /* Eval the inputs and put them into ARGVEC.
1314 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1317 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1322 /* If there's an erroneous arg, emit no insn,
1323 because the ASM_INPUT would get VOIDmode
1324 and that could cause a crash in reload. */
1325 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1327 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1329 error ("hard register `%s' listed as input operand to `asm'",
1330 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1334 /* Make sure constraint has neither `=' nor `+'. */
1336 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)) - 1; j
++)
1337 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
])
1340 error ("input operand constraint contains `%c'",
1341 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1344 case '?': case '!': case '*': case '%': case '&':
1345 case 'V': case 'm': case 'o': case '<': case '>':
1346 case 'E': case 'F': case 'G': case 'H': case 'X':
1347 case 's': case 'i': case 'n':
1348 case 'I': case 'J': case 'K': case 'L': case 'M':
1349 case 'N': case 'O': case 'P': case ',':
1350 #ifdef EXTRA_CONSTRAINT
1351 case 'Q': case 'R': case 'S': case 'T': case 'U':
1355 /* Whether or not a numeric constraint allows a register is
1356 decided by the matching constraint, and so there is no need
1357 to do anything special with them. We must handle them in
1358 the default case, so that we don't unnecessarily force
1359 operands to memory. */
1360 case '0': case '1': case '2': case '3': case '4':
1361 case '5': case '6': case '7': case '8': case '9':
1362 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]
1366 ("matching constraint references invalid operand number");
1370 /* ... fall through ... */
1372 case 'p': case 'g': case 'r':
1379 mark_addressable (TREE_VALUE (tail
));
1381 XVECEXP (body
, 3, i
) /* argvec */
1382 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1383 if (CONSTANT_P (XVECEXP (body
, 3, i
))
1384 && ! general_operand (XVECEXP (body
, 3, i
),
1385 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
)))))
1388 XVECEXP (body
, 3, i
)
1389 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1390 XVECEXP (body
, 3, i
));
1392 XVECEXP (body
, 3, i
)
1393 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1394 XVECEXP (body
, 3, i
));
1398 && (GET_CODE (XVECEXP (body
, 3, i
)) == REG
1399 || GET_CODE (XVECEXP (body
, 3, i
)) == SUBREG
1400 || GET_CODE (XVECEXP (body
, 3, i
)) == CONCAT
))
1402 tree type
= TREE_TYPE (TREE_VALUE (tail
));
1403 rtx memloc
= assign_temp (type
, 1, 1, 1);
1405 emit_move_insn (memloc
, XVECEXP (body
, 3, i
));
1406 XVECEXP (body
, 3, i
) = memloc
;
1409 XVECEXP (body
, 4, i
) /* constraints */
1410 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1411 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1415 /* Protect all the operands from the queue,
1416 now that they have all been evaluated. */
1418 for (i
= 0; i
< ninputs
- ninout
; i
++)
1419 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1421 for (i
= 0; i
< noutputs
; i
++)
1422 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1424 /* For in-out operands, copy output rtx to input rtx. */
1425 for (i
= 0; i
< ninout
; i
++)
1427 static char match
[9+1][2]
1428 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1429 int j
= inout_opnum
[i
];
1431 XVECEXP (body
, 3, ninputs
- ninout
+ i
) /* argvec */
1433 XVECEXP (body
, 4, ninputs
- ninout
+ i
) /* constraints */
1434 = gen_rtx_ASM_INPUT (inout_mode
[j
], match
[j
]);
1437 /* Now, for each output, construct an rtx
1438 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1439 ARGVEC CONSTRAINTS))
1440 If there is more than one, put them inside a PARALLEL. */
1442 if (noutputs
== 1 && nclobbers
== 0)
1444 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1445 insn
= emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
1447 else if (noutputs
== 0 && nclobbers
== 0)
1449 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1450 insn
= emit_insn (body
);
1456 if (num
== 0) num
= 1;
1457 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1459 /* For each output operand, store a SET. */
1461 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1463 XVECEXP (body
, 0, i
)
1464 = gen_rtx_SET (VOIDmode
,
1466 gen_rtx_ASM_OPERANDS (VOIDmode
,
1467 TREE_STRING_POINTER (string
),
1468 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1469 i
, argvec
, constraints
,
1471 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1474 /* If there are no outputs (but there are some clobbers)
1475 store the bare ASM_OPERANDS into the PARALLEL. */
1478 XVECEXP (body
, 0, i
++) = obody
;
1480 /* Store (clobber REG) for each clobbered register specified. */
1482 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1484 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1485 int j
= decode_reg_name (regname
);
1489 if (j
== -3) /* `cc', which is not a register */
1492 if (j
== -4) /* `memory', don't cache memory across asm */
1494 XVECEXP (body
, 0, i
++)
1495 = gen_rtx_CLOBBER (VOIDmode
,
1496 gen_rtx_MEM (BLKmode
,
1497 gen_rtx_SCRATCH (VOIDmode
)));
1501 /* Ignore unknown register, error already signaled. */
1505 /* Use QImode since that's guaranteed to clobber just one reg. */
1506 XVECEXP (body
, 0, i
++)
1507 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (QImode
, j
));
1510 insn
= emit_insn (body
);
1516 /* Generate RTL to evaluate the expression EXP
1517 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1520 expand_expr_stmt (exp
)
1523 /* If -W, warn about statements with no side effects,
1524 except for an explicit cast to void (e.g. for assert()), and
1525 except inside a ({...}) where they may be useful. */
1526 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1528 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1529 && !(TREE_CODE (exp
) == CONVERT_EXPR
1530 && TREE_TYPE (exp
) == void_type_node
))
1531 warning_with_file_and_line (emit_filename
, emit_lineno
,
1532 "statement with no effect");
1533 else if (warn_unused
)
1534 warn_if_unused_value (exp
);
1537 /* If EXP is of function type and we are expanding statements for
1538 value, convert it to pointer-to-function. */
1539 if (expr_stmts_for_value
&& TREE_CODE (TREE_TYPE (exp
)) == FUNCTION_TYPE
)
1540 exp
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
1542 last_expr_type
= TREE_TYPE (exp
);
1543 if (! flag_syntax_only
)
1544 last_expr_value
= expand_expr (exp
,
1545 (expr_stmts_for_value
1546 ? NULL_RTX
: const0_rtx
),
1549 /* If all we do is reference a volatile value in memory,
1550 copy it to a register to be sure it is actually touched. */
1551 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1552 && TREE_THIS_VOLATILE (exp
))
1554 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
)
1556 else if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1557 copy_to_reg (last_expr_value
);
1560 rtx lab
= gen_label_rtx ();
1562 /* Compare the value with itself to reference it. */
1563 emit_cmp_insn (last_expr_value
, last_expr_value
, EQ
,
1564 expand_expr (TYPE_SIZE (last_expr_type
),
1565 NULL_RTX
, VOIDmode
, 0),
1567 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
);
1568 emit_jump_insn ((*bcc_gen_fctn
[(int) EQ
]) (lab
));
1573 /* If this expression is part of a ({...}) and is in memory, we may have
1574 to preserve temporaries. */
1575 preserve_temp_slots (last_expr_value
);
1577 /* Free any temporaries used to evaluate this expression. Any temporary
1578 used as a result of this expression will already have been preserved
1585 /* Warn if EXP contains any computations whose results are not used.
1586 Return 1 if a warning is printed; 0 otherwise. */
1589 warn_if_unused_value (exp
)
1592 if (TREE_USED (exp
))
1595 switch (TREE_CODE (exp
))
1597 case PREINCREMENT_EXPR
:
1598 case POSTINCREMENT_EXPR
:
1599 case PREDECREMENT_EXPR
:
1600 case POSTDECREMENT_EXPR
:
1605 case METHOD_CALL_EXPR
:
1607 case TRY_CATCH_EXPR
:
1608 case WITH_CLEANUP_EXPR
:
1610 /* We don't warn about COND_EXPR because it may be a useful
1611 construct if either arm contains a side effect. */
1616 /* For a binding, warn if no side effect within it. */
1617 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1620 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1622 case TRUTH_ORIF_EXPR
:
1623 case TRUTH_ANDIF_EXPR
:
1624 /* In && or ||, warn if 2nd operand has no side effect. */
1625 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1628 if (TREE_NO_UNUSED_WARNING (exp
))
1630 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1632 /* Let people do `(foo (), 0)' without a warning. */
1633 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1635 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1639 case NON_LVALUE_EXPR
:
1640 /* Don't warn about values cast to void. */
1641 if (TREE_TYPE (exp
) == void_type_node
)
1643 /* Don't warn about conversions not explicit in the user's program. */
1644 if (TREE_NO_UNUSED_WARNING (exp
))
1646 /* Assignment to a cast usually results in a cast of a modify.
1647 Don't complain about that. There can be an arbitrary number of
1648 casts before the modify, so we must loop until we find the first
1649 non-cast expression and then test to see if that is a modify. */
1651 tree tem
= TREE_OPERAND (exp
, 0);
1653 while (TREE_CODE (tem
) == CONVERT_EXPR
|| TREE_CODE (tem
) == NOP_EXPR
)
1654 tem
= TREE_OPERAND (tem
, 0);
1656 if (TREE_CODE (tem
) == MODIFY_EXPR
|| TREE_CODE (tem
) == INIT_EXPR
1657 || TREE_CODE (tem
) == CALL_EXPR
)
1663 /* Don't warn about automatic dereferencing of references, since
1664 the user cannot control it. */
1665 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == REFERENCE_TYPE
)
1666 return warn_if_unused_value (TREE_OPERAND (exp
, 0));
1667 /* ... fall through ... */
1670 /* Referencing a volatile value is a side effect, so don't warn. */
1671 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1672 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1673 && TREE_THIS_VOLATILE (exp
))
1676 warning_with_file_and_line (emit_filename
, emit_lineno
,
1677 "value computed is not used");
1682 /* Clear out the memory of the last expression evaluated. */
1690 /* Begin a statement which will return a value.
1691 Return the RTL_EXPR for this statement expr.
1692 The caller must save that value and pass it to expand_end_stmt_expr. */
1695 expand_start_stmt_expr ()
1700 /* Make the RTL_EXPR node temporary, not momentary,
1701 so that rtl_expr_chain doesn't become garbage. */
1702 momentary
= suspend_momentary ();
1703 t
= make_node (RTL_EXPR
);
1704 resume_momentary (momentary
);
1705 do_pending_stack_adjust ();
1706 start_sequence_for_rtl_expr (t
);
1708 expr_stmts_for_value
++;
1712 /* Restore the previous state at the end of a statement that returns a value.
1713 Returns a tree node representing the statement's value and the
1714 insns to compute the value.
1716 The nodes of that expression have been freed by now, so we cannot use them.
1717 But we don't want to do that anyway; the expression has already been
1718 evaluated and now we just want to use the value. So generate a RTL_EXPR
1719 with the proper type and RTL value.
1721 If the last substatement was not an expression,
1722 return something with type `void'. */
1725 expand_end_stmt_expr (t
)
1730 if (last_expr_type
== 0)
1732 last_expr_type
= void_type_node
;
1733 last_expr_value
= const0_rtx
;
1735 else if (last_expr_value
== 0)
1736 /* There are some cases where this can happen, such as when the
1737 statement is void type. */
1738 last_expr_value
= const0_rtx
;
1739 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1740 /* Remove any possible QUEUED. */
1741 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1745 TREE_TYPE (t
) = last_expr_type
;
1746 RTL_EXPR_RTL (t
) = last_expr_value
;
1747 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1749 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1753 /* Don't consider deleting this expr or containing exprs at tree level. */
1754 TREE_SIDE_EFFECTS (t
) = 1;
1755 /* Propagate volatility of the actual RTL expr. */
1756 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1759 expr_stmts_for_value
--;
1764 /* Generate RTL for the start of an if-then. COND is the expression
1765 whose truth should be tested.
1767 If EXITFLAG is nonzero, this conditional is visible to
1768 `exit_something'. */
1771 expand_start_cond (cond
, exitflag
)
1775 struct nesting
*thiscond
= ALLOC_NESTING ();
1777 /* Make an entry on cond_stack for the cond we are entering. */
1779 thiscond
->next
= cond_stack
;
1780 thiscond
->all
= nesting_stack
;
1781 thiscond
->depth
= ++nesting_depth
;
1782 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1783 /* Before we encounter an `else', we don't need a separate exit label
1784 unless there are supposed to be exit statements
1785 to exit this conditional. */
1786 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1787 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1788 cond_stack
= thiscond
;
1789 nesting_stack
= thiscond
;
1791 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL_RTX
);
1794 /* Generate RTL between then-clause and the elseif-clause
1795 of an if-then-elseif-.... */
1798 expand_start_elseif (cond
)
1801 if (cond_stack
->data
.cond
.endif_label
== 0)
1802 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1803 emit_jump (cond_stack
->data
.cond
.endif_label
);
1804 emit_label (cond_stack
->data
.cond
.next_label
);
1805 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1806 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
1809 /* Generate RTL between the then-clause and the else-clause
1810 of an if-then-else. */
1813 expand_start_else ()
1815 if (cond_stack
->data
.cond
.endif_label
== 0)
1816 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1818 emit_jump (cond_stack
->data
.cond
.endif_label
);
1819 emit_label (cond_stack
->data
.cond
.next_label
);
1820 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
1823 /* After calling expand_start_else, turn this "else" into an "else if"
1824 by providing another condition. */
1827 expand_elseif (cond
)
1830 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1831 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
1834 /* Generate RTL for the end of an if-then.
1835 Pop the record for it off of cond_stack. */
1840 struct nesting
*thiscond
= cond_stack
;
1842 do_pending_stack_adjust ();
1843 if (thiscond
->data
.cond
.next_label
)
1844 emit_label (thiscond
->data
.cond
.next_label
);
1845 if (thiscond
->data
.cond
.endif_label
)
1846 emit_label (thiscond
->data
.cond
.endif_label
);
1848 POPSTACK (cond_stack
);
1854 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1855 loop should be exited by `exit_something'. This is a loop for which
1856 `expand_continue' will jump to the top of the loop.
1858 Make an entry on loop_stack to record the labels associated with
1862 expand_start_loop (exit_flag
)
1865 register struct nesting
*thisloop
= ALLOC_NESTING ();
1867 /* Make an entry on loop_stack for the loop we are entering. */
1869 thisloop
->next
= loop_stack
;
1870 thisloop
->all
= nesting_stack
;
1871 thisloop
->depth
= ++nesting_depth
;
1872 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1873 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1874 thisloop
->data
.loop
.alt_end_label
= 0;
1875 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1876 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1877 loop_stack
= thisloop
;
1878 nesting_stack
= thisloop
;
1880 do_pending_stack_adjust ();
1882 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
1883 emit_label (thisloop
->data
.loop
.start_label
);
1888 /* Like expand_start_loop but for a loop where the continuation point
1889 (for expand_continue_loop) will be specified explicitly. */
1892 expand_start_loop_continue_elsewhere (exit_flag
)
1895 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
1896 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1900 /* Specify the continuation point for a loop started with
1901 expand_start_loop_continue_elsewhere.
1902 Use this at the point in the code to which a continue statement
1906 expand_loop_continue_here ()
1908 do_pending_stack_adjust ();
1909 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
1910 emit_label (loop_stack
->data
.loop
.continue_label
);
1913 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1914 Pop the block off of loop_stack. */
1920 register rtx start_label
;
1921 rtx last_test_insn
= 0;
1924 insn
= get_last_insn ();
1925 start_label
= loop_stack
->data
.loop
.start_label
;
1927 /* Mark the continue-point at the top of the loop if none elsewhere. */
1928 if (start_label
== loop_stack
->data
.loop
.continue_label
)
1929 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
1931 do_pending_stack_adjust ();
1933 /* If optimizing, perhaps reorder the loop. If the loop
1934 starts with a conditional exit, roll that to the end
1935 where it will optimize together with the jump back.
1937 We look for the last conditional branch to the exit that we encounter
1938 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1939 branch to the exit first, use it.
1941 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1942 because moving them is not valid. */
1946 ! (GET_CODE (insn
) == JUMP_INSN
1947 && GET_CODE (PATTERN (insn
)) == SET
1948 && SET_DEST (PATTERN (insn
)) == pc_rtx
1949 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1951 /* Scan insns from the top of the loop looking for a qualified
1952 conditional exit. */
1953 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
1954 insn
= NEXT_INSN (insn
))
1956 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
1959 if (GET_CODE (insn
) == NOTE
1960 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1961 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1964 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
1967 if (last_test_insn
&& num_insns
> 30)
1970 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1971 && SET_DEST (PATTERN (insn
)) == pc_rtx
1972 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1973 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1974 && ((XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1975 == loop_stack
->data
.loop
.end_label
)
1976 || (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1977 == loop_stack
->data
.loop
.alt_end_label
)))
1978 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1979 && ((XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1980 == loop_stack
->data
.loop
.end_label
)
1981 || (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1982 == loop_stack
->data
.loop
.alt_end_label
)))))
1983 last_test_insn
= insn
;
1985 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
1986 && GET_CODE (PATTERN (insn
)) == SET
1987 && SET_DEST (PATTERN (insn
)) == pc_rtx
1988 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
1989 && ((XEXP (SET_SRC (PATTERN (insn
)), 0)
1990 == loop_stack
->data
.loop
.end_label
)
1991 || (XEXP (SET_SRC (PATTERN (insn
)), 0)
1992 == loop_stack
->data
.loop
.alt_end_label
)))
1993 /* Include BARRIER. */
1994 last_test_insn
= NEXT_INSN (insn
);
1997 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
1999 /* We found one. Move everything from there up
2000 to the end of the loop, and add a jump into the loop
2001 to jump to there. */
2002 register rtx newstart_label
= gen_label_rtx ();
2003 register rtx start_move
= start_label
;
2005 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2006 then we want to move this note also. */
2007 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
2008 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
2009 == NOTE_INSN_LOOP_CONT
))
2010 start_move
= PREV_INSN (start_move
);
2012 emit_label_after (newstart_label
, PREV_INSN (start_move
));
2013 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
2014 emit_jump_insn_after (gen_jump (start_label
),
2015 PREV_INSN (newstart_label
));
2016 emit_barrier_after (PREV_INSN (newstart_label
));
2017 start_label
= newstart_label
;
2021 emit_jump (start_label
);
2022 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
2023 emit_label (loop_stack
->data
.loop
.end_label
);
2025 POPSTACK (loop_stack
);
2030 /* Generate a jump to the current loop's continue-point.
2031 This is usually the top of the loop, but may be specified
2032 explicitly elsewhere. If not currently inside a loop,
2033 return 0 and do nothing; caller will print an error message. */
2036 expand_continue_loop (whichloop
)
2037 struct nesting
*whichloop
;
2041 whichloop
= loop_stack
;
2044 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.continue_label
,
2049 /* Generate a jump to exit the current loop. If not currently inside a loop,
2050 return 0 and do nothing; caller will print an error message. */
2053 expand_exit_loop (whichloop
)
2054 struct nesting
*whichloop
;
2058 whichloop
= loop_stack
;
2061 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2065 /* Generate a conditional jump to exit the current loop if COND
2066 evaluates to zero. If not currently inside a loop,
2067 return 0 and do nothing; caller will print an error message. */
2070 expand_exit_loop_if_false (whichloop
, cond
)
2071 struct nesting
*whichloop
;
2074 rtx label
= gen_label_rtx ();
2079 whichloop
= loop_stack
;
2082 /* In order to handle fixups, we actually create a conditional jump
2083 around a unconditional branch to exit the loop. If fixups are
2084 necessary, they go before the unconditional branch. */
2087 do_jump (cond
, NULL_RTX
, label
);
2088 last_insn
= get_last_insn ();
2089 if (GET_CODE (last_insn
) == CODE_LABEL
)
2090 whichloop
->data
.loop
.alt_end_label
= last_insn
;
2091 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
,
2098 /* Return non-zero if we should preserve sub-expressions as separate
2099 pseudos. We never do so if we aren't optimizing. We always do so
2100 if -fexpensive-optimizations.
2102 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2103 the loop may still be a small one. */
2106 preserve_subexpressions_p ()
2110 if (flag_expensive_optimizations
)
2113 if (optimize
== 0 || loop_stack
== 0)
2116 insn
= get_last_insn_anywhere ();
2119 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2120 < n_non_fixed_regs
* 3));
2124 /* Generate a jump to exit the current loop, conditional, binding contour
2125 or case statement. Not all such constructs are visible to this function,
2126 only those started with EXIT_FLAG nonzero. Individual languages use
2127 the EXIT_FLAG parameter to control which kinds of constructs you can
2130 If not currently inside anything that can be exited,
2131 return 0 and do nothing; caller will print an error message. */
2134 expand_exit_something ()
2138 for (n
= nesting_stack
; n
; n
= n
->all
)
2139 if (n
->exit_label
!= 0)
2141 expand_goto_internal (NULL_TREE
, n
->exit_label
, NULL_RTX
);
2148 /* Generate RTL to return from the current function, with no value.
2149 (That is, we do not do anything about returning any value.) */
2152 expand_null_return ()
2154 struct nesting
*block
= block_stack
;
2157 /* Does any pending block have cleanups? */
2159 while (block
&& block
->data
.block
.cleanups
== 0)
2160 block
= block
->next
;
2162 /* If yes, use a goto to return, since that runs cleanups. */
2164 expand_null_return_1 (last_insn
, block
!= 0);
2167 /* Generate RTL to return from the current function, with value VAL. */
2170 expand_value_return (val
)
2173 struct nesting
*block
= block_stack
;
2174 rtx last_insn
= get_last_insn ();
2175 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2177 /* Copy the value to the return location
2178 unless it's already there. */
2180 if (return_reg
!= val
)
2182 #ifdef PROMOTE_FUNCTION_RETURN
2183 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
2184 int unsignedp
= TREE_UNSIGNED (type
);
2185 enum machine_mode mode
2186 = promote_mode (type
, DECL_MODE (DECL_RESULT (current_function_decl
)),
2189 if (GET_MODE (val
) != VOIDmode
&& GET_MODE (val
) != mode
)
2190 convert_move (return_reg
, val
, unsignedp
);
2193 emit_move_insn (return_reg
, val
);
2195 if (GET_CODE (return_reg
) == REG
2196 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2197 emit_insn (gen_rtx_USE (VOIDmode
, return_reg
));
2198 /* Handle calls that return values in multiple non-contiguous locations.
2199 The Irix 6 ABI has examples of this. */
2200 else if (GET_CODE (return_reg
) == PARALLEL
)
2204 for (i
= 0; i
< XVECLEN (return_reg
, 0); i
++)
2206 rtx x
= XEXP (XVECEXP (return_reg
, 0, i
), 0);
2208 if (GET_CODE (x
) == REG
2209 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2210 emit_insn (gen_rtx_USE (VOIDmode
, x
));
2214 /* Does any pending block have cleanups? */
2216 while (block
&& block
->data
.block
.cleanups
== 0)
2217 block
= block
->next
;
2219 /* If yes, use a goto to return, since that runs cleanups.
2220 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2222 expand_null_return_1 (last_insn
, block
!= 0);
2225 /* Output a return with no value. If LAST_INSN is nonzero,
2226 pretend that the return takes place after LAST_INSN.
2227 If USE_GOTO is nonzero then don't use a return instruction;
2228 go to the return label instead. This causes any cleanups
2229 of pending blocks to be executed normally. */
2232 expand_null_return_1 (last_insn
, use_goto
)
2236 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2238 clear_pending_stack_adjust ();
2239 do_pending_stack_adjust ();
2242 /* PCC-struct return always uses an epilogue. */
2243 if (current_function_returns_pcc_struct
|| use_goto
)
2246 end_label
= return_label
= gen_label_rtx ();
2247 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2251 /* Otherwise output a simple return-insn if one is available,
2252 unless it won't do the job. */
2254 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2256 emit_jump_insn (gen_return ());
2262 /* Otherwise jump to the epilogue. */
2263 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2266 /* Generate RTL to evaluate the expression RETVAL and return it
2267 from the current function. */
2270 expand_return (retval
)
2273 /* If there are any cleanups to be performed, then they will
2274 be inserted following LAST_INSN. It is desirable
2275 that the last_insn, for such purposes, should be the
2276 last insn before computing the return value. Otherwise, cleanups
2277 which call functions can clobber the return value. */
2278 /* ??? rms: I think that is erroneous, because in C++ it would
2279 run destructors on variables that might be used in the subsequent
2280 computation of the return value. */
2282 register rtx val
= 0;
2286 struct nesting
*block
;
2288 /* If function wants no value, give it none. */
2289 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2291 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2293 expand_null_return ();
2297 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2298 /* This is not sufficient. We also need to watch for cleanups of the
2299 expression we are about to expand. Unfortunately, we cannot know
2300 if it has cleanups until we expand it, and we want to change how we
2301 expand it depending upon if we need cleanups. We can't win. */
2303 cleanups
= any_pending_cleanups (1);
2308 if (TREE_CODE (retval
) == RESULT_DECL
)
2309 retval_rhs
= retval
;
2310 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2311 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2312 retval_rhs
= TREE_OPERAND (retval
, 1);
2313 else if (TREE_TYPE (retval
) == void_type_node
)
2314 /* Recognize tail-recursive call to void function. */
2315 retval_rhs
= retval
;
2317 retval_rhs
= NULL_TREE
;
2319 /* Only use `last_insn' if there are cleanups which must be run. */
2320 if (cleanups
|| cleanup_label
!= 0)
2321 last_insn
= get_last_insn ();
2323 /* Distribute return down conditional expr if either of the sides
2324 may involve tail recursion (see test below). This enhances the number
2325 of tail recursions we see. Don't do this always since it can produce
2326 sub-optimal code in some cases and we distribute assignments into
2327 conditional expressions when it would help. */
2329 if (optimize
&& retval_rhs
!= 0
2330 && frame_offset
== 0
2331 && TREE_CODE (retval_rhs
) == COND_EXPR
2332 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2333 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2335 rtx label
= gen_label_rtx ();
2338 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, NULL_RTX
);
2339 expr
= build (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (current_function_decl
)),
2340 DECL_RESULT (current_function_decl
),
2341 TREE_OPERAND (retval_rhs
, 1));
2342 TREE_SIDE_EFFECTS (expr
) = 1;
2343 expand_return (expr
);
2346 expr
= build (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (current_function_decl
)),
2347 DECL_RESULT (current_function_decl
),
2348 TREE_OPERAND (retval_rhs
, 2));
2349 TREE_SIDE_EFFECTS (expr
) = 1;
2350 expand_return (expr
);
2354 /* For tail-recursive call to current function,
2355 just jump back to the beginning.
2356 It's unsafe if any auto variable in this function
2357 has its address taken; for simplicity,
2358 require stack frame to be empty. */
2359 if (optimize
&& retval_rhs
!= 0
2360 && frame_offset
== 0
2361 && TREE_CODE (retval_rhs
) == CALL_EXPR
2362 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2363 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2364 /* Finish checking validity, and if valid emit code
2365 to set the argument variables for the new call. */
2366 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2367 DECL_ARGUMENTS (current_function_decl
)))
2369 if (tail_recursion_label
== 0)
2371 tail_recursion_label
= gen_label_rtx ();
2372 emit_label_after (tail_recursion_label
,
2373 tail_recursion_reentry
);
2376 expand_goto_internal (NULL_TREE
, tail_recursion_label
, last_insn
);
2381 /* This optimization is safe if there are local cleanups
2382 because expand_null_return takes care of them.
2383 ??? I think it should also be safe when there is a cleanup label,
2384 because expand_null_return takes care of them, too.
2385 Any reason why not? */
2386 if (HAVE_return
&& cleanup_label
== 0
2387 && ! current_function_returns_pcc_struct
2388 && BRANCH_COST
<= 1)
2390 /* If this is return x == y; then generate
2391 if (x == y) return 1; else return 0;
2392 if we can do it with explicit return insns and branches are cheap,
2393 but not if we have the corresponding scc insn. */
2396 switch (TREE_CODE (retval_rhs
))
2422 case TRUTH_ANDIF_EXPR
:
2423 case TRUTH_ORIF_EXPR
:
2424 case TRUTH_AND_EXPR
:
2426 case TRUTH_NOT_EXPR
:
2427 case TRUTH_XOR_EXPR
:
2430 op0
= gen_label_rtx ();
2431 jumpifnot (retval_rhs
, op0
);
2432 expand_value_return (const1_rtx
);
2434 expand_value_return (const0_rtx
);
2443 #endif /* HAVE_return */
2445 /* If the result is an aggregate that is being returned in one (or more)
2446 registers, load the registers here. The compiler currently can't handle
2447 copying a BLKmode value into registers. We could put this code in a
2448 more general area (for use by everyone instead of just function
2449 call/return), but until this feature is generally usable it is kept here
2450 (and in expand_call). The value must go into a pseudo in case there
2451 are cleanups that will clobber the real return register. */
2454 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
2455 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2457 int i
, bitpos
, xbitpos
;
2458 int big_endian_correction
= 0;
2459 int bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
2460 int n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2461 int bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs
)),BITS_PER_WORD
);
2462 rtx
*result_pseudos
= (rtx
*) alloca (sizeof (rtx
) * n_regs
);
2463 rtx result_reg
, src
, dst
;
2464 rtx result_val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2465 enum machine_mode tmpmode
, result_reg_mode
;
2467 /* Structures whose size is not a multiple of a word are aligned
2468 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2469 machine, this means we must skip the empty high order bytes when
2470 calculating the bit offset. */
2471 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2472 big_endian_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2475 /* Copy the structure BITSIZE bits at a time. */
2476 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2477 bitpos
< bytes
* BITS_PER_UNIT
;
2478 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2480 /* We need a new destination pseudo each time xbitpos is
2481 on a word boundary and when xbitpos == big_endian_correction
2482 (the first time through). */
2483 if (xbitpos
% BITS_PER_WORD
== 0
2484 || xbitpos
== big_endian_correction
)
2486 /* Generate an appropriate register. */
2487 dst
= gen_reg_rtx (word_mode
);
2488 result_pseudos
[xbitpos
/ BITS_PER_WORD
] = dst
;
2490 /* Clobber the destination before we move anything into it. */
2491 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
2494 /* We need a new source operand each time bitpos is on a word
2496 if (bitpos
% BITS_PER_WORD
== 0)
2497 src
= operand_subword_force (result_val
,
2498 bitpos
/ BITS_PER_WORD
,
2501 /* Use bitpos for the source extraction (left justified) and
2502 xbitpos for the destination store (right justified). */
2503 store_bit_field (dst
, bitsize
, xbitpos
% BITS_PER_WORD
, word_mode
,
2504 extract_bit_field (src
, bitsize
,
2505 bitpos
% BITS_PER_WORD
, 1,
2506 NULL_RTX
, word_mode
,
2508 bitsize
/ BITS_PER_UNIT
,
2510 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
2513 /* Find the smallest integer mode large enough to hold the
2514 entire structure and use that mode instead of BLKmode
2515 on the USE insn for the return register. */
2516 bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
2517 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2518 tmpmode
!= MAX_MACHINE_MODE
;
2519 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
2521 /* Have we found a large enough mode? */
2522 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
2526 /* No suitable mode found. */
2527 if (tmpmode
== MAX_MACHINE_MODE
)
2530 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl
)), tmpmode
);
2532 if (GET_MODE_SIZE (tmpmode
) < GET_MODE_SIZE (word_mode
))
2533 result_reg_mode
= word_mode
;
2535 result_reg_mode
= tmpmode
;
2536 result_reg
= gen_reg_rtx (result_reg_mode
);
2539 for (i
= 0; i
< n_regs
; i
++)
2540 emit_move_insn (operand_subword (result_reg
, i
, 0, result_reg_mode
),
2543 if (tmpmode
!= result_reg_mode
)
2544 result_reg
= gen_lowpart (tmpmode
, result_reg
);
2546 expand_value_return (result_reg
);
2550 && TREE_TYPE (retval_rhs
) != void_type_node
2551 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2553 /* Calculate the return value into a pseudo reg. */
2554 val
= gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl
)));
2555 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), 0);
2556 val
= force_not_mem (val
);
2558 /* Return the calculated value, doing cleanups first. */
2559 expand_value_return (val
);
2563 /* No cleanups or no hard reg used;
2564 calculate value into hard return reg. */
2565 expand_expr (retval
, const0_rtx
, VOIDmode
, 0);
2567 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2571 /* Return 1 if the end of the generated RTX is not a barrier.
2572 This means code already compiled can drop through. */
2575 drop_through_at_end_p ()
2577 rtx insn
= get_last_insn ();
2578 while (insn
&& GET_CODE (insn
) == NOTE
)
2579 insn
= PREV_INSN (insn
);
2580 return insn
&& GET_CODE (insn
) != BARRIER
;
2583 /* Emit code to alter this function's formal parms for a tail-recursive call.
2584 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2585 FORMALS is the chain of decls of formals.
2586 Return 1 if this can be done;
2587 otherwise return 0 and do not emit any code. */
2590 tail_recursion_args (actuals
, formals
)
2591 tree actuals
, formals
;
2593 register tree a
= actuals
, f
= formals
;
2595 register rtx
*argvec
;
2597 /* Check that number and types of actuals are compatible
2598 with the formals. This is not always true in valid C code.
2599 Also check that no formal needs to be addressable
2600 and that all formals are scalars. */
2602 /* Also count the args. */
2604 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2606 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a
)))
2607 != TYPE_MAIN_VARIANT (TREE_TYPE (f
)))
2609 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2612 if (a
!= 0 || f
!= 0)
2615 /* Compute all the actuals. */
2617 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2619 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2620 argvec
[i
] = expand_expr (TREE_VALUE (a
), NULL_RTX
, VOIDmode
, 0);
2622 /* Find which actual values refer to current values of previous formals.
2623 Copy each of them now, before any formal is changed. */
2625 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2629 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2630 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2631 { copy
= 1; break; }
2633 argvec
[i
] = copy_to_reg (argvec
[i
]);
2636 /* Store the values of the actuals into the formals. */
2638 for (f
= formals
, a
= actuals
, i
= 0; f
;
2639 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2641 if (GET_MODE (DECL_RTL (f
)) == GET_MODE (argvec
[i
]))
2642 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2644 convert_move (DECL_RTL (f
), argvec
[i
],
2645 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2652 /* Generate the RTL code for entering a binding contour.
2653 The variables are declared one by one, by calls to `expand_decl'.
2655 EXIT_FLAG is nonzero if this construct should be visible to
2656 `exit_something'. */
2659 expand_start_bindings (exit_flag
)
2662 struct nesting
*thisblock
= ALLOC_NESTING ();
2663 rtx note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
2665 /* Make an entry on block_stack for the block we are entering. */
2667 thisblock
->next
= block_stack
;
2668 thisblock
->all
= nesting_stack
;
2669 thisblock
->depth
= ++nesting_depth
;
2670 thisblock
->data
.block
.stack_level
= 0;
2671 thisblock
->data
.block
.cleanups
= 0;
2672 thisblock
->data
.block
.function_call_count
= 0;
2673 thisblock
->data
.block
.exception_region
= 0;
2674 thisblock
->data
.block
.target_temp_slot_level
= target_temp_slot_level
;
2676 thisblock
->data
.block
.conditional_code
= 0;
2677 thisblock
->data
.block
.last_unconditional_cleanup
= note
;
2678 thisblock
->data
.block
.cleanup_ptr
= &thisblock
->data
.block
.cleanups
;
2681 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2682 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2683 thisblock
->data
.block
.outer_cleanups
2684 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2685 block_stack
->data
.block
.outer_cleanups
);
2687 thisblock
->data
.block
.outer_cleanups
= 0;
2688 thisblock
->data
.block
.label_chain
= 0;
2689 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2690 thisblock
->data
.block
.first_insn
= note
;
2691 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2692 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2693 block_stack
= thisblock
;
2694 nesting_stack
= thisblock
;
2696 /* Make a new level for allocating stack slots. */
2700 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2701 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2702 expand_expr are made. After we end the region, we know that all
2703 space for all temporaries that were created by TARGET_EXPRs will be
2704 destroyed and their space freed for reuse. */
2707 expand_start_target_temps ()
2709 /* This is so that even if the result is preserved, the space
2710 allocated will be freed, as we know that it is no longer in use. */
2713 /* Start a new binding layer that will keep track of all cleanup
2714 actions to be performed. */
2715 expand_start_bindings (0);
2717 target_temp_slot_level
= temp_slot_level
;
2721 expand_end_target_temps ()
2723 expand_end_bindings (NULL_TREE
, 0, 0);
2725 /* This is so that even if the result is preserved, the space
2726 allocated will be freed, as we know that it is no longer in use. */
2730 /* Mark top block of block_stack as an implicit binding for an
2731 exception region. This is used to prevent infinite recursion when
2732 ending a binding with expand_end_bindings. It is only ever called
2733 by expand_eh_region_start, as that it the only way to create a
2734 block stack for a exception region. */
2737 mark_block_as_eh_region ()
2739 block_stack
->data
.block
.exception_region
= 1;
2740 if (block_stack
->next
2741 && block_stack
->next
->data
.block
.conditional_code
)
2743 block_stack
->data
.block
.conditional_code
2744 = block_stack
->next
->data
.block
.conditional_code
;
2745 block_stack
->data
.block
.last_unconditional_cleanup
2746 = block_stack
->next
->data
.block
.last_unconditional_cleanup
;
2747 block_stack
->data
.block
.cleanup_ptr
2748 = block_stack
->next
->data
.block
.cleanup_ptr
;
2752 /* True if we are currently emitting insns in an area of output code
2753 that is controlled by a conditional expression. This is used by
2754 the cleanup handling code to generate conditional cleanup actions. */
2757 conditional_context ()
2759 return block_stack
&& block_stack
->data
.block
.conditional_code
;
2762 /* Mark top block of block_stack as not for an implicit binding for an
2763 exception region. This is only ever done by expand_eh_region_end
2764 to let expand_end_bindings know that it is being called explicitly
2765 to end the binding layer for just the binding layer associated with
2766 the exception region, otherwise expand_end_bindings would try and
2767 end all implicit binding layers for exceptions regions, and then
2768 one normal binding layer. */
2771 mark_block_as_not_eh_region ()
2773 block_stack
->data
.block
.exception_region
= 0;
2776 /* True if the top block of block_stack was marked as for an exception
2777 region by mark_block_as_eh_region. */
2782 return block_stack
&& block_stack
->data
.block
.exception_region
;
2785 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2786 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2790 remember_end_note (block
)
2791 register tree block
;
2793 BLOCK_END_NOTE (block
) = last_block_end_note
;
2794 last_block_end_note
= NULL_RTX
;
2797 /* Generate RTL code to terminate a binding contour.
2798 VARS is the chain of VAR_DECL nodes
2799 for the variables bound in this contour.
2800 MARK_ENDS is nonzero if we should put a note at the beginning
2801 and end of this binding contour.
2803 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2804 (That is true automatically if the contour has a saved stack level.) */
2807 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2812 register struct nesting
*thisblock
;
2815 while (block_stack
->data
.block
.exception_region
)
2817 /* Because we don't need or want a new temporary level and
2818 because we didn't create one in expand_eh_region_start,
2819 create a fake one now to avoid removing one in
2820 expand_end_bindings. */
2823 block_stack
->data
.block
.exception_region
= 0;
2825 expand_end_bindings (NULL_TREE
, 0, 0);
2828 /* Since expand_eh_region_start does an expand_start_bindings, we
2829 have to first end all the bindings that were created by
2830 expand_eh_region_start. */
2832 thisblock
= block_stack
;
2835 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2836 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
2837 && ! DECL_IN_SYSTEM_HEADER (decl
)
2838 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
2839 warning_with_decl (decl
, "unused variable `%s'");
2841 if (thisblock
->exit_label
)
2843 do_pending_stack_adjust ();
2844 emit_label (thisblock
->exit_label
);
2847 /* If necessary, make a handler for nonlocal gotos taking
2848 place in the function calls in this block. */
2849 if (function_call_count
!= thisblock
->data
.block
.function_call_count
2851 /* Make handler for outermost block
2852 if there were any nonlocal gotos to this function. */
2853 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
2854 /* Make handler for inner block if it has something
2855 special to do when you jump out of it. */
2856 : (thisblock
->data
.block
.cleanups
!= 0
2857 || thisblock
->data
.block
.stack_level
!= 0)))
2860 rtx afterward
= gen_label_rtx ();
2861 rtx handler_label
= gen_label_rtx ();
2862 rtx save_receiver
= gen_reg_rtx (Pmode
);
2865 /* Don't let jump_optimize delete the handler. */
2866 LABEL_PRESERVE_P (handler_label
) = 1;
2868 /* Record the handler address in the stack slot for that purpose,
2869 during this block, saving and restoring the outer value. */
2870 if (thisblock
->next
!= 0)
2872 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
2875 emit_move_insn (save_receiver
, nonlocal_goto_handler_slot
);
2876 insns
= get_insns ();
2878 emit_insns_before (insns
, thisblock
->data
.block
.first_insn
);
2882 emit_move_insn (nonlocal_goto_handler_slot
,
2883 gen_rtx_LABEL_REF (Pmode
, handler_label
));
2884 insns
= get_insns ();
2886 emit_insns_before (insns
, thisblock
->data
.block
.first_insn
);
2888 /* Jump around the handler; it runs only when specially invoked. */
2889 emit_jump (afterward
);
2890 emit_label (handler_label
);
2892 #ifdef HAVE_nonlocal_goto
2893 if (! HAVE_nonlocal_goto
)
2895 /* First adjust our frame pointer to its actual value. It was
2896 previously set to the start of the virtual area corresponding to
2897 the stacked variables when we branched here and now needs to be
2898 adjusted to the actual hardware fp value.
2900 Assignments are to virtual registers are converted by
2901 instantiate_virtual_regs into the corresponding assignment
2902 to the underlying register (fp in this case) that makes
2903 the original assignment true.
2904 So the following insn will actually be
2905 decrementing fp by STARTING_FRAME_OFFSET. */
2906 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
2908 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2909 if (fixed_regs
[ARG_POINTER_REGNUM
])
2911 #ifdef ELIMINABLE_REGS
2912 /* If the argument pointer can be eliminated in favor of the
2913 frame pointer, we don't need to restore it. We assume here
2914 that if such an elimination is present, it can always be used.
2915 This is the case on all known machines; if we don't make this
2916 assumption, we do unnecessary saving on many machines. */
2917 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
2920 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
2921 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
2922 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
2925 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
2928 /* Now restore our arg pointer from the address at which it
2929 was saved in our stack frame.
2930 If there hasn't be space allocated for it yet, make
2932 if (arg_pointer_save_area
== 0)
2933 arg_pointer_save_area
2934 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
2935 emit_move_insn (virtual_incoming_args_rtx
,
2936 /* We need a pseudo here, or else
2937 instantiate_virtual_regs_1 complains. */
2938 copy_to_reg (arg_pointer_save_area
));
2943 #ifdef HAVE_nonlocal_goto_receiver
2944 if (HAVE_nonlocal_goto_receiver
)
2945 emit_insn (gen_nonlocal_goto_receiver ());
2948 /* The handler expects the desired label address in the static chain
2949 register. It tests the address and does an appropriate jump
2950 to whatever label is desired. */
2951 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
2952 /* Skip any labels we shouldn't be able to jump to from here. */
2953 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
2955 rtx not_this
= gen_label_rtx ();
2956 rtx
this = gen_label_rtx ();
2957 do_jump_if_equal (static_chain_rtx
,
2958 gen_rtx_LABEL_REF (Pmode
, DECL_RTL (TREE_VALUE (link
))),
2960 emit_jump (not_this
);
2962 expand_goto (TREE_VALUE (link
));
2963 emit_label (not_this
);
2965 /* If label is not recognized, abort. */
2966 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "abort"), 0,
2969 emit_label (afterward
);
2972 /* Don't allow jumping into a block that has a stack level.
2973 Cleanups are allowed, though. */
2975 || thisblock
->data
.block
.stack_level
!= 0)
2977 struct label_chain
*chain
;
2979 /* Any labels in this block are no longer valid to go to.
2980 Mark them to cause an error message. */
2981 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2983 DECL_TOO_LATE (chain
->label
) = 1;
2984 /* If any goto without a fixup came to this label,
2985 that must be an error, because gotos without fixups
2986 come from outside all saved stack-levels. */
2987 if (TREE_ADDRESSABLE (chain
->label
))
2988 error_with_decl (chain
->label
,
2989 "label `%s' used before containing binding contour");
2993 /* Restore stack level in effect before the block
2994 (only if variable-size objects allocated). */
2995 /* Perform any cleanups associated with the block. */
2997 if (thisblock
->data
.block
.stack_level
!= 0
2998 || thisblock
->data
.block
.cleanups
!= 0)
3000 /* Only clean up here if this point can actually be reached. */
3001 int reachable
= GET_CODE (get_last_insn ()) != BARRIER
;
3003 /* Don't let cleanups affect ({...}) constructs. */
3004 int old_expr_stmts_for_value
= expr_stmts_for_value
;
3005 rtx old_last_expr_value
= last_expr_value
;
3006 tree old_last_expr_type
= last_expr_type
;
3007 expr_stmts_for_value
= 0;
3009 /* Do the cleanups. */
3010 expand_cleanups (thisblock
->data
.block
.cleanups
, NULL_TREE
, 0, reachable
);
3012 do_pending_stack_adjust ();
3014 expr_stmts_for_value
= old_expr_stmts_for_value
;
3015 last_expr_value
= old_last_expr_value
;
3016 last_expr_type
= old_last_expr_type
;
3018 /* Restore the stack level. */
3020 if (reachable
&& thisblock
->data
.block
.stack_level
!= 0)
3022 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3023 thisblock
->data
.block
.stack_level
, NULL_RTX
);
3024 if (nonlocal_goto_handler_slot
!= 0)
3025 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
,
3029 /* Any gotos out of this block must also do these things.
3030 Also report any gotos with fixups that came to labels in this
3032 fixup_gotos (thisblock
,
3033 thisblock
->data
.block
.stack_level
,
3034 thisblock
->data
.block
.cleanups
,
3035 thisblock
->data
.block
.first_insn
,
3039 /* Mark the beginning and end of the scope if requested.
3040 We do this now, after running cleanups on the variables
3041 just going out of scope, so they are in scope for their cleanups. */
3044 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
3046 /* Get rid of the beginning-mark if we don't make an end-mark. */
3047 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
3049 /* If doing stupid register allocation, make sure lives of all
3050 register variables declared here extend thru end of scope. */
3053 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3055 rtx rtl
= DECL_RTL (decl
);
3056 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
3060 /* Restore the temporary level of TARGET_EXPRs. */
3061 target_temp_slot_level
= thisblock
->data
.block
.target_temp_slot_level
;
3063 /* Restore block_stack level for containing block. */
3065 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
3066 POPSTACK (block_stack
);
3068 /* Pop the stack slot nesting and free any slots at this level. */
3074 /* Generate RTL for the automatic variable declaration DECL.
3075 (Other kinds of declarations are simply ignored if seen here.) */
3081 struct nesting
*thisblock
= block_stack
;
3084 type
= TREE_TYPE (decl
);
3086 /* Only automatic variables need any expansion done.
3087 Static and external variables, and external functions,
3088 will be handled by `assemble_variable' (called from finish_decl).
3089 TYPE_DECL and CONST_DECL require nothing.
3090 PARM_DECLs are handled in `assign_parms'. */
3092 if (TREE_CODE (decl
) != VAR_DECL
)
3094 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
3097 /* Create the RTL representation for the variable. */
3099 if (type
== error_mark_node
)
3100 DECL_RTL (decl
) = gen_rtx_MEM (BLKmode
, const0_rtx
);
3101 else if (DECL_SIZE (decl
) == 0)
3102 /* Variable with incomplete type. */
3104 if (DECL_INITIAL (decl
) == 0)
3105 /* Error message was already done; now avoid a crash. */
3106 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
3108 /* An initializer is going to decide the size of this array.
3109 Until we know the size, represent its address with a reg. */
3110 DECL_RTL (decl
) = gen_rtx_MEM (BLKmode
, gen_reg_rtx (Pmode
));
3111 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (type
);
3113 else if (DECL_MODE (decl
) != BLKmode
3114 /* If -ffloat-store, don't put explicit float vars
3116 && !(flag_float_store
3117 && TREE_CODE (type
) == REAL_TYPE
)
3118 && ! TREE_THIS_VOLATILE (decl
)
3119 && ! TREE_ADDRESSABLE (decl
)
3120 && (DECL_REGISTER (decl
) || ! obey_regdecls
))
3122 /* Automatic variable that can go in a register. */
3123 int unsignedp
= TREE_UNSIGNED (type
);
3124 enum machine_mode reg_mode
3125 = promote_mode (type
, DECL_MODE (decl
), &unsignedp
, 0);
3127 DECL_RTL (decl
) = gen_reg_rtx (reg_mode
);
3128 mark_user_reg (DECL_RTL (decl
));
3130 if (TREE_CODE (type
) == POINTER_TYPE
)
3131 mark_reg_pointer (DECL_RTL (decl
),
3132 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl
)))
3136 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
3137 && ! (flag_stack_check
&& ! STACK_CHECK_BUILTIN
3138 && (TREE_INT_CST_HIGH (DECL_SIZE (decl
)) != 0
3139 || (TREE_INT_CST_LOW (DECL_SIZE (decl
))
3140 > STACK_CHECK_MAX_VAR_SIZE
* BITS_PER_UNIT
))))
3142 /* Variable of fixed size that goes on the stack. */
3146 /* If we previously made RTL for this decl, it must be an array
3147 whose size was determined by the initializer.
3148 The old address was a register; set that register now
3149 to the proper address. */
3150 if (DECL_RTL (decl
) != 0)
3152 if (GET_CODE (DECL_RTL (decl
)) != MEM
3153 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
3155 oldaddr
= XEXP (DECL_RTL (decl
), 0);
3159 = assign_stack_temp (DECL_MODE (decl
),
3160 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
3161 + BITS_PER_UNIT
- 1)
3164 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3166 /* Set alignment we actually gave this decl. */
3167 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
3168 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
3172 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
3173 if (addr
!= oldaddr
)
3174 emit_move_insn (oldaddr
, addr
);
3177 /* If this is a memory ref that contains aggregate components,
3178 mark it as such for cse and loop optimize. */
3179 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3181 /* If this is in memory because of -ffloat-store,
3182 set the volatile bit, to prevent optimizations from
3183 undoing the effects. */
3184 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
3185 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3189 /* Dynamic-size object: must push space on the stack. */
3193 /* Record the stack pointer on entry to block, if have
3194 not already done so. */
3195 if (thisblock
->data
.block
.stack_level
== 0)
3197 do_pending_stack_adjust ();
3198 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3199 &thisblock
->data
.block
.stack_level
,
3200 thisblock
->data
.block
.first_insn
);
3201 stack_block_stack
= thisblock
;
3204 /* Compute the variable's size, in bytes. */
3205 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
3207 size_int (BITS_PER_UNIT
)),
3208 NULL_RTX
, VOIDmode
, 0);
3211 /* Allocate space on the stack for the variable. Note that
3212 DECL_ALIGN says how the variable is to be aligned and we
3213 cannot use it to conclude anything about the alignment of
3215 address
= allocate_dynamic_stack_space (size
, NULL_RTX
,
3216 TYPE_ALIGN (TREE_TYPE (decl
)));
3218 /* Reference the variable indirect through that rtx. */
3219 DECL_RTL (decl
) = gen_rtx_MEM (DECL_MODE (decl
), address
);
3221 /* If this is a memory ref that contains aggregate components,
3222 mark it as such for cse and loop optimize. */
3223 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3225 /* Indicate the alignment we actually gave this variable. */
3226 #ifdef STACK_BOUNDARY
3227 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
3229 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
3233 if (TREE_THIS_VOLATILE (decl
))
3234 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3235 #if 0 /* A variable is not necessarily unchanging
3236 just because it is const. RTX_UNCHANGING_P
3237 means no change in the function,
3238 not merely no change in the variable's scope.
3239 It is correct to set RTX_UNCHANGING_P if the variable's scope
3240 is the whole function. There's no convenient way to test that. */
3241 if (TREE_READONLY (decl
))
3242 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
3245 /* If doing stupid register allocation, make sure life of any
3246 register variable starts here, at the start of its scope. */
3249 use_variable (DECL_RTL (decl
));
3254 /* Emit code to perform the initialization of a declaration DECL. */
3257 expand_decl_init (decl
)
3260 int was_used
= TREE_USED (decl
);
3262 /* If this is a CONST_DECL, we don't have to generate any code, but
3263 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3264 to be set while in the obstack containing the constant. If we don't
3265 do this, we can lose if we have functions nested three deep and the middle
3266 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3267 the innermost function is the first to expand that STRING_CST. */
3268 if (TREE_CODE (decl
) == CONST_DECL
)
3270 if (DECL_INITIAL (decl
) && TREE_CONSTANT (DECL_INITIAL (decl
)))
3271 expand_expr (DECL_INITIAL (decl
), NULL_RTX
, VOIDmode
,
3272 EXPAND_INITIALIZER
);
3276 if (TREE_STATIC (decl
))
3279 /* Compute and store the initial value now. */
3281 if (DECL_INITIAL (decl
) == error_mark_node
)
3283 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
3284 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
3285 || code
== POINTER_TYPE
)
3286 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
3290 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
3292 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
3293 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
3297 /* Don't let the initialization count as "using" the variable. */
3298 TREE_USED (decl
) = was_used
;
3300 /* Free any temporaries we made while initializing the decl. */
3301 preserve_temp_slots (NULL_RTX
);
3305 /* CLEANUP is an expression to be executed at exit from this binding contour;
3306 for example, in C++, it might call the destructor for this variable.
3308 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3309 CLEANUP multiple times, and have the correct semantics. This
3310 happens in exception handling, for gotos, returns, breaks that
3311 leave the current scope.
3313 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3314 that is not associated with any particular variable. */
3317 expand_decl_cleanup (decl
, cleanup
)
3320 struct nesting
*thisblock
= block_stack
;
3322 /* Error if we are not in any block. */
3326 /* Record the cleanup if there is one. */
3332 tree
*cleanups
= &thisblock
->data
.block
.cleanups
;
3333 int cond_context
= conditional_context ();
3337 rtx flag
= gen_reg_rtx (word_mode
);
3342 emit_move_insn (flag
, const0_rtx
);
3343 set_flag_0
= get_insns ();
3346 thisblock
->data
.block
.last_unconditional_cleanup
3347 = emit_insns_after (set_flag_0
,
3348 thisblock
->data
.block
.last_unconditional_cleanup
);
3350 emit_move_insn (flag
, const1_rtx
);
3352 /* All cleanups must be on the function_obstack. */
3353 push_obstacks_nochange ();
3354 resume_temporary_allocation ();
3356 cond
= build_decl (VAR_DECL
, NULL_TREE
, type_for_mode (word_mode
, 1));
3357 DECL_RTL (cond
) = flag
;
3359 /* Conditionalize the cleanup. */
3360 cleanup
= build (COND_EXPR
, void_type_node
,
3361 truthvalue_conversion (cond
),
3362 cleanup
, integer_zero_node
);
3363 cleanup
= fold (cleanup
);
3367 cleanups
= thisblock
->data
.block
.cleanup_ptr
;
3370 /* All cleanups must be on the function_obstack. */
3371 push_obstacks_nochange ();
3372 resume_temporary_allocation ();
3373 cleanup
= unsave_expr (cleanup
);
3376 t
= *cleanups
= temp_tree_cons (decl
, cleanup
, *cleanups
);
3379 /* If this block has a cleanup, it belongs in stack_block_stack. */
3380 stack_block_stack
= thisblock
;
3387 /* If this was optimized so that there is no exception region for the
3388 cleanup, then mark the TREE_LIST node, so that we can later tell
3389 if we need to call expand_eh_region_end. */
3390 if (! using_eh_for_cleanups_p
3391 || expand_eh_region_start_tree (decl
, cleanup
))
3392 TREE_ADDRESSABLE (t
) = 1;
3393 /* If that started a new EH region, we're in a new block. */
3394 thisblock
= block_stack
;
3401 thisblock
->data
.block
.last_unconditional_cleanup
3402 = emit_insns_after (seq
,
3403 thisblock
->data
.block
.last_unconditional_cleanup
);
3407 thisblock
->data
.block
.last_unconditional_cleanup
3409 thisblock
->data
.block
.cleanup_ptr
= &thisblock
->data
.block
.cleanups
;
3415 /* Like expand_decl_cleanup, but suppress generating an exception handler
3416 to perform the cleanup. */
3419 expand_decl_cleanup_no_eh (decl
, cleanup
)
3422 int save_eh
= using_eh_for_cleanups_p
;
3425 using_eh_for_cleanups_p
= 0;
3426 result
= expand_decl_cleanup (decl
, cleanup
);
3427 using_eh_for_cleanups_p
= save_eh
;
3432 /* Arrange for the top element of the dynamic cleanup chain to be
3433 popped if we exit the current binding contour. DECL is the
3434 associated declaration, if any, otherwise NULL_TREE. If the
3435 current contour is left via an exception, then __sjthrow will pop
3436 the top element off the dynamic cleanup chain. The code that
3437 avoids doing the action we push into the cleanup chain in the
3438 exceptional case is contained in expand_cleanups.
3440 This routine is only used by expand_eh_region_start, and that is
3441 the only way in which an exception region should be started. This
3442 routine is only used when using the setjmp/longjmp codegen method
3443 for exception handling. */
3446 expand_dcc_cleanup (decl
)
3449 struct nesting
*thisblock
= block_stack
;
3452 /* Error if we are not in any block. */
3456 /* Record the cleanup for the dynamic handler chain. */
3458 /* All cleanups must be on the function_obstack. */
3459 push_obstacks_nochange ();
3460 resume_temporary_allocation ();
3461 cleanup
= make_node (POPDCC_EXPR
);
3464 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3465 thisblock
->data
.block
.cleanups
3466 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
3468 /* If this block has a cleanup, it belongs in stack_block_stack. */
3469 stack_block_stack
= thisblock
;
3473 /* Arrange for the top element of the dynamic handler chain to be
3474 popped if we exit the current binding contour. DECL is the
3475 associated declaration, if any, otherwise NULL_TREE. If the current
3476 contour is left via an exception, then __sjthrow will pop the top
3477 element off the dynamic handler chain. The code that avoids doing
3478 the action we push into the handler chain in the exceptional case
3479 is contained in expand_cleanups.
3481 This routine is only used by expand_eh_region_start, and that is
3482 the only way in which an exception region should be started. This
3483 routine is only used when using the setjmp/longjmp codegen method
3484 for exception handling. */
3487 expand_dhc_cleanup (decl
)
3490 struct nesting
*thisblock
= block_stack
;
3493 /* Error if we are not in any block. */
3497 /* Record the cleanup for the dynamic handler chain. */
3499 /* All cleanups must be on the function_obstack. */
3500 push_obstacks_nochange ();
3501 resume_temporary_allocation ();
3502 cleanup
= make_node (POPDHC_EXPR
);
3505 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3506 thisblock
->data
.block
.cleanups
3507 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
3509 /* If this block has a cleanup, it belongs in stack_block_stack. */
3510 stack_block_stack
= thisblock
;
3514 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3515 DECL_ELTS is the list of elements that belong to DECL's type.
3516 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3519 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
3520 tree decl
, cleanup
, decl_elts
;
3522 struct nesting
*thisblock
= block_stack
;
3526 expand_decl_cleanup (decl
, cleanup
);
3527 x
= DECL_RTL (decl
);
3531 tree decl_elt
= TREE_VALUE (decl_elts
);
3532 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
3533 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
3535 /* Propagate the union's alignment to the elements. */
3536 DECL_ALIGN (decl_elt
) = DECL_ALIGN (decl
);
3538 /* If the element has BLKmode and the union doesn't, the union is
3539 aligned such that the element doesn't need to have BLKmode, so
3540 change the element's mode to the appropriate one for its size. */
3541 if (mode
== BLKmode
&& DECL_MODE (decl
) != BLKmode
)
3542 DECL_MODE (decl_elt
) = mode
3543 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt
)),
3546 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3547 instead create a new MEM rtx with the proper mode. */
3548 if (GET_CODE (x
) == MEM
)
3550 if (mode
== GET_MODE (x
))
3551 DECL_RTL (decl_elt
) = x
;
3554 DECL_RTL (decl_elt
) = gen_rtx_MEM (mode
, copy_rtx (XEXP (x
, 0)));
3555 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
3556 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
3559 else if (GET_CODE (x
) == REG
)
3561 if (mode
== GET_MODE (x
))
3562 DECL_RTL (decl_elt
) = x
;
3564 DECL_RTL (decl_elt
) = gen_rtx_SUBREG (mode
, x
, 0);
3569 /* Record the cleanup if there is one. */
3572 thisblock
->data
.block
.cleanups
3573 = temp_tree_cons (decl_elt
, cleanup_elt
,
3574 thisblock
->data
.block
.cleanups
);
3576 decl_elts
= TREE_CHAIN (decl_elts
);
3580 /* Expand a list of cleanups LIST.
3581 Elements may be expressions or may be nested lists.
3583 If DONT_DO is nonnull, then any list-element
3584 whose TREE_PURPOSE matches DONT_DO is omitted.
3585 This is sometimes used to avoid a cleanup associated with
3586 a value that is being returned out of the scope.
3588 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3589 goto and handle protection regions specially in that case.
3591 If REACHABLE, we emit code, otherwise just inform the exception handling
3592 code about this finalization. */
3595 expand_cleanups (list
, dont_do
, in_fixup
, reachable
)
3602 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3603 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
3605 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3606 expand_cleanups (TREE_VALUE (tail
), dont_do
, in_fixup
, reachable
);
3611 tree cleanup
= TREE_VALUE (tail
);
3613 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3614 if (TREE_CODE (cleanup
) != POPDHC_EXPR
3615 && TREE_CODE (cleanup
) != POPDCC_EXPR
3616 /* See expand_eh_region_start_tree for this case. */
3617 && ! TREE_ADDRESSABLE (tail
))
3619 cleanup
= protect_with_terminate (cleanup
);
3620 expand_eh_region_end (cleanup
);
3626 /* Cleanups may be run multiple times. For example,
3627 when exiting a binding contour, we expand the
3628 cleanups associated with that contour. When a goto
3629 within that binding contour has a target outside that
3630 contour, it will expand all cleanups from its scope to
3631 the target. Though the cleanups are expanded multiple
3632 times, the control paths are non-overlapping so the
3633 cleanups will not be executed twice. */
3635 /* We may need to protect fixups with rethrow regions. */
3636 int protect
= (in_fixup
&& ! TREE_ADDRESSABLE (tail
));
3638 expand_fixup_region_start ();
3639 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
3641 expand_fixup_region_end (TREE_VALUE (tail
));
3648 /* Mark when the context we are emitting RTL for as a conditional
3649 context, so that any cleanup actions we register with
3650 expand_decl_init will be properly conditionalized when those
3651 cleanup actions are later performed. Must be called before any
3652 expression (tree) is expanded that is within a conditional context. */
3655 start_cleanup_deferral ()
3657 /* block_stack can be NULL if we are inside the parameter list. It is
3658 OK to do nothing, because cleanups aren't possible here. */
3660 ++block_stack
->data
.block
.conditional_code
;
3663 /* Mark the end of a conditional region of code. Because cleanup
3664 deferrals may be nested, we may still be in a conditional region
3665 after we end the currently deferred cleanups, only after we end all
3666 deferred cleanups, are we back in unconditional code. */
3669 end_cleanup_deferral ()
3671 /* block_stack can be NULL if we are inside the parameter list. It is
3672 OK to do nothing, because cleanups aren't possible here. */
3674 --block_stack
->data
.block
.conditional_code
;
3677 /* Move all cleanups from the current block_stack
3678 to the containing block_stack, where they are assumed to
3679 have been created. If anything can cause a temporary to
3680 be created, but not expanded for more than one level of
3681 block_stacks, then this code will have to change. */
3686 struct nesting
*block
= block_stack
;
3687 struct nesting
*outer
= block
->next
;
3689 outer
->data
.block
.cleanups
3690 = chainon (block
->data
.block
.cleanups
,
3691 outer
->data
.block
.cleanups
);
3692 block
->data
.block
.cleanups
= 0;
3696 last_cleanup_this_contour ()
3698 if (block_stack
== 0)
3701 return block_stack
->data
.block
.cleanups
;
3704 /* Return 1 if there are any pending cleanups at this point.
3705 If THIS_CONTOUR is nonzero, check the current contour as well.
3706 Otherwise, look only at the contours that enclose this one. */
3709 any_pending_cleanups (this_contour
)
3712 struct nesting
*block
;
3714 if (block_stack
== 0)
3717 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3719 if (block_stack
->data
.block
.cleanups
== 0
3720 && block_stack
->data
.block
.outer_cleanups
== 0)
3723 for (block
= block_stack
->next
; block
; block
= block
->next
)
3724 if (block
->data
.block
.cleanups
!= 0)
3730 /* Enter a case (Pascal) or switch (C) statement.
3731 Push a block onto case_stack and nesting_stack
3732 to accumulate the case-labels that are seen
3733 and to record the labels generated for the statement.
3735 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3736 Otherwise, this construct is transparent for `exit_something'.
3738 EXPR is the index-expression to be dispatched on.
3739 TYPE is its nominal type. We could simply convert EXPR to this type,
3740 but instead we take short cuts. */
3743 expand_start_case (exit_flag
, expr
, type
, printname
)
3749 register struct nesting
*thiscase
= ALLOC_NESTING ();
3751 /* Make an entry on case_stack for the case we are entering. */
3753 thiscase
->next
= case_stack
;
3754 thiscase
->all
= nesting_stack
;
3755 thiscase
->depth
= ++nesting_depth
;
3756 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3757 thiscase
->data
.case_stmt
.case_list
= 0;
3758 thiscase
->data
.case_stmt
.index_expr
= expr
;
3759 thiscase
->data
.case_stmt
.nominal_type
= type
;
3760 thiscase
->data
.case_stmt
.default_label
= 0;
3761 thiscase
->data
.case_stmt
.num_ranges
= 0;
3762 thiscase
->data
.case_stmt
.printname
= printname
;
3763 thiscase
->data
.case_stmt
.seenlabel
= 0;
3764 case_stack
= thiscase
;
3765 nesting_stack
= thiscase
;
3767 do_pending_stack_adjust ();
3769 /* Make sure case_stmt.start points to something that won't
3770 need any transformation before expand_end_case. */
3771 if (GET_CODE (get_last_insn ()) != NOTE
)
3772 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
3774 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3776 start_cleanup_deferral ();
3780 /* Start a "dummy case statement" within which case labels are invalid
3781 and are not connected to any larger real case statement.
3782 This can be used if you don't want to let a case statement jump
3783 into the middle of certain kinds of constructs. */
3786 expand_start_case_dummy ()
3788 register struct nesting
*thiscase
= ALLOC_NESTING ();
3790 /* Make an entry on case_stack for the dummy. */
3792 thiscase
->next
= case_stack
;
3793 thiscase
->all
= nesting_stack
;
3794 thiscase
->depth
= ++nesting_depth
;
3795 thiscase
->exit_label
= 0;
3796 thiscase
->data
.case_stmt
.case_list
= 0;
3797 thiscase
->data
.case_stmt
.start
= 0;
3798 thiscase
->data
.case_stmt
.nominal_type
= 0;
3799 thiscase
->data
.case_stmt
.default_label
= 0;
3800 thiscase
->data
.case_stmt
.num_ranges
= 0;
3801 case_stack
= thiscase
;
3802 nesting_stack
= thiscase
;
3803 start_cleanup_deferral ();
3806 /* End a dummy case statement. */
3809 expand_end_case_dummy ()
3811 end_cleanup_deferral ();
3812 POPSTACK (case_stack
);
3815 /* Return the data type of the index-expression
3816 of the innermost case statement, or null if none. */
3819 case_index_expr_type ()
3822 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3826 /* Accumulate one case or default label inside a case or switch statement.
3827 VALUE is the value of the case (a null pointer, for a default label).
3828 The function CONVERTER, when applied to arguments T and V,
3829 converts the value V to the type T.
3831 If not currently inside a case or switch statement, return 1 and do
3832 nothing. The caller will print a language-specific error message.
3833 If VALUE is a duplicate or overlaps, return 2 and do nothing
3834 except store the (first) duplicate node in *DUPLICATE.
3835 If VALUE is out of range, return 3 and do nothing.
3836 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3837 Return 0 on success.
3839 Extended to handle range statements. */
3842 pushcase (value
, converter
, label
, duplicate
)
3843 register tree value
;
3844 tree (*converter
) PROTO((tree
, tree
));
3845 register tree label
;
3848 register struct case_node
**l
;
3849 register struct case_node
*n
;
3853 /* Fail if not inside a real case statement. */
3854 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3857 if (stack_block_stack
3858 && stack_block_stack
->depth
> case_stack
->depth
)
3861 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3862 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3864 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3865 if (index_type
== error_mark_node
)
3868 /* Convert VALUE to the type in which the comparisons are nominally done. */
3870 value
= (*converter
) (nominal_type
, value
);
3872 /* If this is the first label, warn if any insns have been emitted. */
3873 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3876 for (insn
= case_stack
->data
.case_stmt
.start
;
3878 insn
= NEXT_INSN (insn
))
3880 if (GET_CODE (insn
) == CODE_LABEL
)
3882 if (GET_CODE (insn
) != NOTE
3883 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3885 warning ("unreachable code at beginning of %s",
3886 case_stack
->data
.case_stmt
.printname
);
3891 case_stack
->data
.case_stmt
.seenlabel
= 1;
3893 /* Fail if this value is out of range for the actual type of the index
3894 (which may be narrower than NOMINAL_TYPE). */
3895 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
3898 /* Fail if this is a duplicate or overlaps another entry. */
3901 if (case_stack
->data
.case_stmt
.default_label
!= 0)
3903 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
3906 case_stack
->data
.case_stmt
.default_label
= label
;
3909 return add_case_node (value
, value
, label
, duplicate
);
3911 expand_label (label
);
3915 /* Like pushcase but this case applies to all values between VALUE1 and
3916 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3917 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3918 starts at VALUE1 and ends at the highest value of the index type.
3919 If both are NULL, this case applies to all values.
3921 The return value is the same as that of pushcase but there is one
3922 additional error code: 4 means the specified range was empty. */
3925 pushcase_range (value1
, value2
, converter
, label
, duplicate
)
3926 register tree value1
, value2
;
3927 tree (*converter
) PROTO((tree
, tree
));
3928 register tree label
;
3931 register struct case_node
**l
;
3932 register struct case_node
*n
;
3936 /* Fail if not inside a real case statement. */
3937 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3940 if (stack_block_stack
3941 && stack_block_stack
->depth
> case_stack
->depth
)
3944 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3945 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3947 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3948 if (index_type
== error_mark_node
)
3951 /* If this is the first label, warn if any insns have been emitted. */
3952 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3955 for (insn
= case_stack
->data
.case_stmt
.start
;
3957 insn
= NEXT_INSN (insn
))
3959 if (GET_CODE (insn
) == CODE_LABEL
)
3961 if (GET_CODE (insn
) != NOTE
3962 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3964 warning ("unreachable code at beginning of %s",
3965 case_stack
->data
.case_stmt
.printname
);
3970 case_stack
->data
.case_stmt
.seenlabel
= 1;
3972 /* Convert VALUEs to type in which the comparisons are nominally done
3973 and replace any unspecified value with the corresponding bound. */
3975 value1
= TYPE_MIN_VALUE (index_type
);
3977 value2
= TYPE_MAX_VALUE (index_type
);
3979 /* Fail if the range is empty. Do this before any conversion since
3980 we want to allow out-of-range empty ranges. */
3981 if (value2
&& tree_int_cst_lt (value2
, value1
))
3984 value1
= (*converter
) (nominal_type
, value1
);
3986 /* If the max was unbounded, use the max of the nominal_type we are
3987 converting to. Do this after the < check above to suppress false
3990 value2
= TYPE_MAX_VALUE (nominal_type
);
3991 value2
= (*converter
) (nominal_type
, value2
);
3993 /* Fail if these values are out of range. */
3994 if (TREE_CONSTANT_OVERFLOW (value1
)
3995 || ! int_fits_type_p (value1
, index_type
))
3998 if (TREE_CONSTANT_OVERFLOW (value2
)
3999 || ! int_fits_type_p (value2
, index_type
))
4002 return add_case_node (value1
, value2
, label
, duplicate
);
4005 /* Do the actual insertion of a case label for pushcase and pushcase_range
4006 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4007 slowdown for large switch statements. */
4010 add_case_node (low
, high
, label
, duplicate
)
4015 struct case_node
*p
, **q
, *r
;
4017 q
= &case_stack
->data
.case_stmt
.case_list
;
4024 /* Keep going past elements distinctly greater than HIGH. */
4025 if (tree_int_cst_lt (high
, p
->low
))
4028 /* or distinctly less than LOW. */
4029 else if (tree_int_cst_lt (p
->high
, low
))
4034 /* We have an overlap; this is an error. */
4035 *duplicate
= p
->code_label
;
4040 /* Add this label to the chain, and succeed.
4041 Copy LOW, HIGH so they are on temporary rather than momentary
4042 obstack and will thus survive till the end of the case statement. */
4044 r
= (struct case_node
*) oballoc (sizeof (struct case_node
));
4045 r
->low
= copy_node (low
);
4047 /* If the bounds are equal, turn this into the one-value case. */
4049 if (tree_int_cst_equal (low
, high
))
4053 r
->high
= copy_node (high
);
4054 case_stack
->data
.case_stmt
.num_ranges
++;
4057 r
->code_label
= label
;
4058 expand_label (label
);
4068 struct case_node
*s
;
4074 if (! (b
= p
->balance
))
4075 /* Growth propagation from left side. */
4082 if (p
->left
= s
= r
->right
)
4099 case_stack
->data
.case_stmt
.case_list
= r
;
4102 /* r->balance == +1 */
4107 struct case_node
*t
= r
->right
;
4109 if (p
->left
= s
= t
->right
)
4113 if (r
->right
= s
= t
->left
)
4135 case_stack
->data
.case_stmt
.case_list
= t
;
4142 /* p->balance == +1; growth of left side balances the node. */
4152 if (! (b
= p
->balance
))
4153 /* Growth propagation from right side. */
4161 if (p
->right
= s
= r
->left
)
4178 case_stack
->data
.case_stmt
.case_list
= r
;
4182 /* r->balance == -1 */
4186 struct case_node
*t
= r
->left
;
4188 if (p
->right
= s
= t
->left
)
4193 if (r
->left
= s
= t
->right
)
4216 case_stack
->data
.case_stmt
.case_list
= t
;
4222 /* p->balance == -1; growth of right side balances the node. */
4236 /* Returns the number of possible values of TYPE.
4237 Returns -1 if the number is unknown or variable.
4238 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4239 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4240 do not increase monotonically (there may be duplicates);
4241 to 1 if the values increase monotonically, but not always by 1;
4242 otherwise sets it to 0. */
4245 all_cases_count (type
, spareness
)
4249 HOST_WIDE_INT count
, count_high
= 0;
4252 switch (TREE_CODE (type
))
4259 count
= 1 << BITS_PER_UNIT
;
4263 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4264 || TYPE_MAX_VALUE (type
) == NULL
4265 || TREE_CODE (TYPE_MAX_VALUE (type
)) != INTEGER_CST
)
4270 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4271 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4272 but with overflow checking. */
4273 tree mint
= TYPE_MIN_VALUE (type
);
4274 tree maxt
= TYPE_MAX_VALUE (type
);
4275 HOST_WIDE_INT lo
, hi
;
4276 neg_double(TREE_INT_CST_LOW (mint
), TREE_INT_CST_HIGH (mint
),
4278 add_double(TREE_INT_CST_LOW (maxt
), TREE_INT_CST_HIGH (maxt
),
4280 add_double (lo
, hi
, 1, 0, &lo
, &hi
);
4281 if (hi
!= 0 || lo
< 0)
4288 for (t
= TYPE_VALUES (type
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
4290 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4291 || TREE_CODE (TREE_VALUE (t
)) != INTEGER_CST
4292 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type
)) + count
4293 != TREE_INT_CST_LOW (TREE_VALUE (t
)))
4297 if (*spareness
== 1)
4299 tree prev
= TREE_VALUE (TYPE_VALUES (type
));
4300 for (t
= TYPE_VALUES (type
); t
= TREE_CHAIN (t
), t
!= NULL_TREE
; )
4302 if (! tree_int_cst_lt (prev
, TREE_VALUE (t
)))
4307 prev
= TREE_VALUE (t
);
4316 #define BITARRAY_TEST(ARRAY, INDEX) \
4317 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4318 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4319 #define BITARRAY_SET(ARRAY, INDEX) \
4320 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4321 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4323 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4324 with the case values we have seen, assuming the case expression
4326 SPARSENESS is as determined by all_cases_count.
4328 The time needed is proportional to COUNT, unless
4329 SPARSENESS is 2, in which case quadratic time is needed. */
4332 mark_seen_cases (type
, cases_seen
, count
, sparseness
)
4334 unsigned char *cases_seen
;
4340 tree next_node_to_try
= NULL_TREE
;
4341 long next_node_offset
= 0;
4343 register struct case_node
*n
, *root
= case_stack
->data
.case_stmt
.case_list
;
4344 tree val
= make_node (INTEGER_CST
);
4345 TREE_TYPE (val
) = type
;
4348 else if (sparseness
== 2)
4353 /* This less efficient loop is only needed to handle
4354 duplicate case values (multiple enum constants
4355 with the same value). */
4356 TREE_TYPE (val
) = TREE_TYPE (root
->low
);
4357 for (t
= TYPE_VALUES (type
), xlo
= 0; t
!= NULL_TREE
;
4358 t
= TREE_CHAIN (t
), xlo
++)
4360 TREE_INT_CST_LOW (val
) = TREE_INT_CST_LOW (TREE_VALUE (t
));
4361 TREE_INT_CST_HIGH (val
) = TREE_INT_CST_HIGH (TREE_VALUE (t
));
4365 /* Keep going past elements distinctly greater than VAL. */
4366 if (tree_int_cst_lt (val
, n
->low
))
4369 /* or distinctly less than VAL. */
4370 else if (tree_int_cst_lt (n
->high
, val
))
4375 /* We have found a matching range. */
4376 BITARRAY_SET (cases_seen
, xlo
);
4386 case_stack
->data
.case_stmt
.case_list
= root
= case_tree2list (root
, 0);
4387 for (n
= root
; n
; n
= n
->right
)
4389 TREE_INT_CST_LOW (val
) = TREE_INT_CST_LOW (n
->low
);
4390 TREE_INT_CST_HIGH (val
) = TREE_INT_CST_HIGH (n
->low
);
4391 while ( ! tree_int_cst_lt (n
->high
, val
))
4393 /* Calculate (into xlo) the "offset" of the integer (val).
4394 The element with lowest value has offset 0, the next smallest
4395 element has offset 1, etc. */
4397 HOST_WIDE_INT xlo
, xhi
;
4399 if (sparseness
&& TYPE_VALUES (type
) != NULL_TREE
)
4401 /* The TYPE_VALUES will be in increasing order, so
4402 starting searching where we last ended. */
4403 t
= next_node_to_try
;
4404 xlo
= next_node_offset
;
4410 t
= TYPE_VALUES (type
);
4413 if (tree_int_cst_equal (val
, TREE_VALUE (t
)))
4415 next_node_to_try
= TREE_CHAIN (t
);
4416 next_node_offset
= xlo
+ 1;
4421 if (t
== next_node_to_try
)
4430 t
= TYPE_MIN_VALUE (type
);
4432 neg_double (TREE_INT_CST_LOW (t
), TREE_INT_CST_HIGH (t
),
4436 add_double (xlo
, xhi
,
4437 TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
4441 if (xhi
== 0 && xlo
>= 0 && xlo
< count
)
4442 BITARRAY_SET (cases_seen
, xlo
);
4443 add_double (TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
4445 &TREE_INT_CST_LOW (val
), &TREE_INT_CST_HIGH (val
));
4451 /* Called when the index of a switch statement is an enumerated type
4452 and there is no default label.
4454 Checks that all enumeration literals are covered by the case
4455 expressions of a switch. Also, warn if there are any extra
4456 switch cases that are *not* elements of the enumerated type.
4458 If all enumeration literals were covered by the case expressions,
4459 turn one of the expressions into the default expression since it should
4460 not be possible to fall through such a switch. */
4463 check_for_full_enumeration_handling (type
)
4466 register struct case_node
*n
;
4467 register struct case_node
**l
;
4468 register tree chain
;
4471 /* True iff the selector type is a numbered set mode. */
4474 /* The number of possible selector values. */
4477 /* For each possible selector value. a one iff it has been matched
4478 by a case value alternative. */
4479 unsigned char *cases_seen
;
4481 /* The allocated size of cases_seen, in chars. */
4488 size
= all_cases_count (type
, &sparseness
);
4489 bytes_needed
= (size
+ HOST_BITS_PER_CHAR
) / HOST_BITS_PER_CHAR
;
4491 if (size
> 0 && size
< 600000
4492 /* We deliberately use malloc here - not xmalloc. */
4493 && (cases_seen
= (unsigned char *) malloc (bytes_needed
)) != NULL
)
4496 tree v
= TYPE_VALUES (type
);
4497 bzero (cases_seen
, bytes_needed
);
4499 /* The time complexity of this code is normally O(N), where
4500 N being the number of members in the enumerated type.
4501 However, if type is a ENUMERAL_TYPE whose values do not
4502 increase monotonically, O(N*log(N)) time may be needed. */
4504 mark_seen_cases (type
, cases_seen
, size
, sparseness
);
4506 for (i
= 0; v
!= NULL_TREE
&& i
< size
; i
++, v
= TREE_CHAIN (v
))
4508 if (BITARRAY_TEST(cases_seen
, i
) == 0)
4509 warning ("enumeration value `%s' not handled in switch",
4510 IDENTIFIER_POINTER (TREE_PURPOSE (v
)));
4516 /* Now we go the other way around; we warn if there are case
4517 expressions that don't correspond to enumerators. This can
4518 occur since C and C++ don't enforce type-checking of
4519 assignments to enumeration variables. */
4521 if (case_stack
->data
.case_stmt
.case_list
4522 && case_stack
->data
.case_stmt
.case_list
->left
)
4523 case_stack
->data
.case_stmt
.case_list
4524 = case_tree2list (case_stack
->data
.case_stmt
.case_list
, 0);
4526 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4528 for (chain
= TYPE_VALUES (type
);
4529 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
4530 chain
= TREE_CHAIN (chain
))
4535 if (TYPE_NAME (type
) == 0)
4536 warning ("case value `%d' not in enumerated type",
4537 TREE_INT_CST_LOW (n
->low
));
4539 warning ("case value `%d' not in enumerated type `%s'",
4540 TREE_INT_CST_LOW (n
->low
),
4541 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
4544 : DECL_NAME (TYPE_NAME (type
))));
4546 if (!tree_int_cst_equal (n
->low
, n
->high
))
4548 for (chain
= TYPE_VALUES (type
);
4549 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
4550 chain
= TREE_CHAIN (chain
))
4555 if (TYPE_NAME (type
) == 0)
4556 warning ("case value `%d' not in enumerated type",
4557 TREE_INT_CST_LOW (n
->high
));
4559 warning ("case value `%d' not in enumerated type `%s'",
4560 TREE_INT_CST_LOW (n
->high
),
4561 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
4564 : DECL_NAME (TYPE_NAME (type
))));
4570 /* ??? This optimization is disabled because it causes valid programs to
4571 fail. ANSI C does not guarantee that an expression with enum type
4572 will have a value that is the same as one of the enumeration literals. */
4574 /* If all values were found as case labels, make one of them the default
4575 label. Thus, this switch will never fall through. We arbitrarily pick
4576 the last one to make the default since this is likely the most
4577 efficient choice. */
4581 for (l
= &case_stack
->data
.case_stmt
.case_list
;
4586 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
4593 /* Terminate a case (Pascal) or switch (C) statement
4594 in which ORIG_INDEX is the expression to be tested.
4595 Generate the code to test it and jump to the right place. */
4598 expand_end_case (orig_index
)
4601 tree minval
, maxval
, range
, orig_minval
;
4602 rtx default_label
= 0;
4603 register struct case_node
*n
;
4611 register struct nesting
*thiscase
= case_stack
;
4612 tree index_expr
, index_type
;
4615 table_label
= gen_label_rtx ();
4616 index_expr
= thiscase
->data
.case_stmt
.index_expr
;
4617 index_type
= TREE_TYPE (index_expr
);
4618 unsignedp
= TREE_UNSIGNED (index_type
);
4620 do_pending_stack_adjust ();
4622 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4623 if (index_type
!= error_mark_node
)
4625 /* If switch expression was an enumerated type, check that all
4626 enumeration literals are covered by the cases.
4627 No sense trying this if there's a default case, however. */
4629 if (!thiscase
->data
.case_stmt
.default_label
4630 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
4631 && TREE_CODE (index_expr
) != INTEGER_CST
)
4632 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
4634 /* If this is the first label, warn if any insns have been emitted. */
4635 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
4638 for (insn
= get_last_insn ();
4639 insn
!= case_stack
->data
.case_stmt
.start
;
4640 insn
= PREV_INSN (insn
))
4641 if (GET_CODE (insn
) != NOTE
4642 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
4644 warning ("unreachable code at beginning of %s",
4645 case_stack
->data
.case_stmt
.printname
);
4650 /* If we don't have a default-label, create one here,
4651 after the body of the switch. */
4652 if (thiscase
->data
.case_stmt
.default_label
== 0)
4654 thiscase
->data
.case_stmt
.default_label
4655 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4656 expand_label (thiscase
->data
.case_stmt
.default_label
);
4658 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
4660 before_case
= get_last_insn ();
4662 if (thiscase
->data
.case_stmt
.case_list
4663 && thiscase
->data
.case_stmt
.case_list
->left
)
4664 thiscase
->data
.case_stmt
.case_list
4665 = case_tree2list(thiscase
->data
.case_stmt
.case_list
, 0);
4667 /* Simplify the case-list before we count it. */
4668 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
4670 /* Get upper and lower bounds of case values.
4671 Also convert all the case values to the index expr's data type. */
4674 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4676 /* Check low and high label values are integers. */
4677 if (TREE_CODE (n
->low
) != INTEGER_CST
)
4679 if (TREE_CODE (n
->high
) != INTEGER_CST
)
4682 n
->low
= convert (index_type
, n
->low
);
4683 n
->high
= convert (index_type
, n
->high
);
4685 /* Count the elements and track the largest and smallest
4686 of them (treating them as signed even if they are not). */
4694 if (INT_CST_LT (n
->low
, minval
))
4696 if (INT_CST_LT (maxval
, n
->high
))
4699 /* A range counts double, since it requires two compares. */
4700 if (! tree_int_cst_equal (n
->low
, n
->high
))
4704 orig_minval
= minval
;
4706 /* Compute span of values. */
4708 range
= fold (build (MINUS_EXPR
, index_type
, maxval
, minval
));
4710 end_cleanup_deferral ();
4714 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
4716 emit_jump (default_label
);
4719 /* If range of values is much bigger than number of values,
4720 make a sequence of conditional branches instead of a dispatch.
4721 If the switch-index is a constant, do it this way
4722 because we can optimize it. */
4724 #ifndef CASE_VALUES_THRESHOLD
4726 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4728 /* If machine does not have a case insn that compares the
4729 bounds, this means extra overhead for dispatch tables
4730 which raises the threshold for using them. */
4731 #define CASE_VALUES_THRESHOLD 5
4732 #endif /* HAVE_casesi */
4733 #endif /* CASE_VALUES_THRESHOLD */
4735 else if (TREE_INT_CST_HIGH (range
) != 0
4736 || count
< CASE_VALUES_THRESHOLD
4737 || ((unsigned HOST_WIDE_INT
) (TREE_INT_CST_LOW (range
))
4739 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4742 || TREE_CODE (index_expr
) == INTEGER_CST
4743 /* These will reduce to a constant. */
4744 || (TREE_CODE (index_expr
) == CALL_EXPR
4745 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
4746 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
4747 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
4748 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
4749 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
4751 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4753 /* If the index is a short or char that we do not have
4754 an insn to handle comparisons directly, convert it to
4755 a full integer now, rather than letting each comparison
4756 generate the conversion. */
4758 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
4759 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
4760 == CODE_FOR_nothing
))
4762 enum machine_mode wider_mode
;
4763 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
4764 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4765 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
4766 != CODE_FOR_nothing
)
4768 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
4774 do_pending_stack_adjust ();
4776 index
= protect_from_queue (index
, 0);
4777 if (GET_CODE (index
) == MEM
)
4778 index
= copy_to_reg (index
);
4779 if (GET_CODE (index
) == CONST_INT
4780 || TREE_CODE (index_expr
) == INTEGER_CST
)
4782 /* Make a tree node with the proper constant value
4783 if we don't already have one. */
4784 if (TREE_CODE (index_expr
) != INTEGER_CST
)
4787 = build_int_2 (INTVAL (index
),
4788 unsignedp
|| INTVAL (index
) >= 0 ? 0 : -1);
4789 index_expr
= convert (index_type
, index_expr
);
4792 /* For constant index expressions we need only
4793 issue a unconditional branch to the appropriate
4794 target code. The job of removing any unreachable
4795 code is left to the optimisation phase if the
4796 "-O" option is specified. */
4797 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4798 if (! tree_int_cst_lt (index_expr
, n
->low
)
4799 && ! tree_int_cst_lt (n
->high
, index_expr
))
4803 emit_jump (label_rtx (n
->code_label
));
4805 emit_jump (default_label
);
4809 /* If the index expression is not constant we generate
4810 a binary decision tree to select the appropriate
4811 target code. This is done as follows:
4813 The list of cases is rearranged into a binary tree,
4814 nearly optimal assuming equal probability for each case.
4816 The tree is transformed into RTL, eliminating
4817 redundant test conditions at the same time.
4819 If program flow could reach the end of the
4820 decision tree an unconditional jump to the
4821 default code is emitted. */
4824 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
4825 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
4826 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
,
4828 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
4829 default_label
, index_type
);
4830 emit_jump_if_reachable (default_label
);
4839 enum machine_mode index_mode
= SImode
;
4840 int index_bits
= GET_MODE_BITSIZE (index_mode
);
4842 enum machine_mode op_mode
;
4844 /* Convert the index to SImode. */
4845 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
))
4846 > GET_MODE_BITSIZE (index_mode
))
4848 enum machine_mode omode
= TYPE_MODE (index_type
);
4849 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
4851 /* We must handle the endpoints in the original mode. */
4852 index_expr
= build (MINUS_EXPR
, index_type
,
4853 index_expr
, minval
);
4854 minval
= integer_zero_node
;
4855 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4856 emit_cmp_insn (rangertx
, index
, LTU
, NULL_RTX
, omode
, 1, 0);
4857 emit_jump_insn (gen_bltu (default_label
));
4858 /* Now we can safely truncate. */
4859 index
= convert_to_mode (index_mode
, index
, 0);
4863 if (TYPE_MODE (index_type
) != index_mode
)
4865 index_expr
= convert (type_for_size (index_bits
, 0),
4867 index_type
= TREE_TYPE (index_expr
);
4870 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4873 index
= protect_from_queue (index
, 0);
4874 do_pending_stack_adjust ();
4876 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][0];
4877 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][0])
4879 index
= copy_to_mode_reg (op_mode
, index
);
4881 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
4883 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][1];
4884 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][1])
4886 op1
= copy_to_mode_reg (op_mode
, op1
);
4888 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
4890 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][2];
4891 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][2])
4893 op2
= copy_to_mode_reg (op_mode
, op2
);
4895 emit_jump_insn (gen_casesi (index
, op1
, op2
,
4896 table_label
, default_label
));
4900 #ifdef HAVE_tablejump
4901 if (! win
&& HAVE_tablejump
)
4903 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
4904 fold (build (MINUS_EXPR
, index_type
,
4905 index_expr
, minval
)));
4906 index_type
= TREE_TYPE (index_expr
);
4907 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4909 index
= protect_from_queue (index
, 0);
4910 do_pending_stack_adjust ();
4912 do_tablejump (index
, TYPE_MODE (index_type
),
4913 expand_expr (range
, NULL_RTX
, VOIDmode
, 0),
4914 table_label
, default_label
);
4921 /* Get table of labels to jump to, in order of case index. */
4923 ncases
= TREE_INT_CST_LOW (range
) + 1;
4924 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
4925 bzero ((char *) labelvec
, ncases
* sizeof (rtx
));
4927 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4929 register HOST_WIDE_INT i
4930 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (orig_minval
);
4935 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
4936 if (i
+ TREE_INT_CST_LOW (orig_minval
)
4937 == TREE_INT_CST_LOW (n
->high
))
4943 /* Fill in the gaps with the default. */
4944 for (i
= 0; i
< ncases
; i
++)
4945 if (labelvec
[i
] == 0)
4946 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
4948 /* Output the table */
4949 emit_label (table_label
);
4951 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
4952 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
4953 gen_rtx_LABEL_REF (Pmode
, table_label
),
4954 gen_rtvec_v (ncases
, labelvec
)));
4956 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
4957 gen_rtvec_v (ncases
, labelvec
)));
4959 /* If the case insn drops through the table,
4960 after the table we must jump to the default-label.
4961 Otherwise record no drop-through after the table. */
4962 #ifdef CASE_DROPS_THROUGH
4963 emit_jump (default_label
);
4969 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
4970 reorder_insns (before_case
, get_last_insn (),
4971 thiscase
->data
.case_stmt
.start
);
4974 end_cleanup_deferral ();
4976 if (thiscase
->exit_label
)
4977 emit_label (thiscase
->exit_label
);
4979 POPSTACK (case_stack
);
4984 /* Convert the tree NODE into a list linked by the right field, with the left
4985 field zeroed. RIGHT is used for recursion; it is a list to be placed
4986 rightmost in the resulting list. */
4988 static struct case_node
*
4989 case_tree2list (node
, right
)
4990 struct case_node
*node
, *right
;
4992 struct case_node
*left
;
4995 right
= case_tree2list (node
->right
, right
);
4997 node
->right
= right
;
4998 if (left
= node
->left
)
5001 return case_tree2list (left
, node
);
5007 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5010 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
5011 rtx op1
, op2
, label
;
5014 if (GET_CODE (op1
) == CONST_INT
5015 && GET_CODE (op2
) == CONST_INT
)
5017 if (INTVAL (op1
) == INTVAL (op2
))
5022 enum machine_mode mode
= GET_MODE (op1
);
5023 if (mode
== VOIDmode
)
5024 mode
= GET_MODE (op2
);
5025 emit_cmp_insn (op1
, op2
, EQ
, NULL_RTX
, mode
, unsignedp
, 0);
5026 emit_jump_insn (gen_beq (label
));
5030 /* Not all case values are encountered equally. This function
5031 uses a heuristic to weight case labels, in cases where that
5032 looks like a reasonable thing to do.
5034 Right now, all we try to guess is text, and we establish the
5037 chars above space: 16
5046 If we find any cases in the switch that are not either -1 or in the range
5047 of valid ASCII characters, or are control characters other than those
5048 commonly used with "\", don't treat this switch scanning text.
5050 Return 1 if these nodes are suitable for cost estimation, otherwise
5054 estimate_case_costs (node
)
5057 tree min_ascii
= build_int_2 (-1, -1);
5058 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
5062 /* If we haven't already made the cost table, make it now. Note that the
5063 lower bound of the table is -1, not zero. */
5065 if (cost_table
== NULL
)
5067 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
5068 bzero ((char *) (cost_table
- 1), 129 * sizeof (short));
5070 for (i
= 0; i
< 128; i
++)
5074 else if (ispunct (i
))
5076 else if (iscntrl (i
))
5080 cost_table
[' '] = 8;
5081 cost_table
['\t'] = 4;
5082 cost_table
['\0'] = 4;
5083 cost_table
['\n'] = 2;
5084 cost_table
['\f'] = 1;
5085 cost_table
['\v'] = 1;
5086 cost_table
['\b'] = 1;
5089 /* See if all the case expressions look like text. It is text if the
5090 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5091 as signed arithmetic since we don't want to ever access cost_table with a
5092 value less than -1. Also check that none of the constants in a range
5093 are strange control characters. */
5095 for (n
= node
; n
; n
= n
->right
)
5097 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
5100 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
5101 if (cost_table
[i
] < 0)
5105 /* All interesting values are within the range of interesting
5106 ASCII characters. */
5110 /* Scan an ordered list of case nodes
5111 combining those with consecutive values or ranges.
5113 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5116 group_case_nodes (head
)
5119 case_node_ptr node
= head
;
5123 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
5125 case_node_ptr np
= node
;
5127 /* Try to group the successors of NODE with NODE. */
5128 while (((np
= np
->right
) != 0)
5129 /* Do they jump to the same place? */
5130 && ((lb2
= next_real_insn (label_rtx (np
->code_label
))) == lb
5131 || (lb
!= 0 && lb2
!= 0
5132 && simplejump_p (lb
)
5133 && simplejump_p (lb2
)
5134 && rtx_equal_p (SET_SRC (PATTERN (lb
)),
5135 SET_SRC (PATTERN (lb2
)))))
5136 /* Are their ranges consecutive? */
5137 && tree_int_cst_equal (np
->low
,
5138 fold (build (PLUS_EXPR
,
5139 TREE_TYPE (node
->high
),
5142 /* An overflow is not consecutive. */
5143 && tree_int_cst_lt (node
->high
,
5144 fold (build (PLUS_EXPR
,
5145 TREE_TYPE (node
->high
),
5147 integer_one_node
))))
5149 node
->high
= np
->high
;
5151 /* NP is the first node after NODE which can't be grouped with it.
5152 Delete the nodes in between, and move on to that node. */
5158 /* Take an ordered list of case nodes
5159 and transform them into a near optimal binary tree,
5160 on the assumption that any target code selection value is as
5161 likely as any other.
5163 The transformation is performed by splitting the ordered
5164 list into two equal sections plus a pivot. The parts are
5165 then attached to the pivot as left and right branches. Each
5166 branch is is then transformed recursively. */
5169 balance_case_nodes (head
, parent
)
5170 case_node_ptr
*head
;
5171 case_node_ptr parent
;
5173 register case_node_ptr np
;
5181 register case_node_ptr
*npp
;
5184 /* Count the number of entries on branch. Also count the ranges. */
5188 if (!tree_int_cst_equal (np
->low
, np
->high
))
5192 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
5196 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
5204 /* Split this list if it is long enough for that to help. */
5209 /* Find the place in the list that bisects the list's total cost,
5210 Here I gets half the total cost. */
5215 /* Skip nodes while their cost does not reach that amount. */
5216 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5217 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
5218 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
5221 npp
= &(*npp
)->right
;
5226 /* Leave this branch lopsided, but optimize left-hand
5227 side and fill in `parent' fields for right-hand side. */
5229 np
->parent
= parent
;
5230 balance_case_nodes (&np
->left
, np
);
5231 for (; np
->right
; np
= np
->right
)
5232 np
->right
->parent
= np
;
5236 /* If there are just three nodes, split at the middle one. */
5238 npp
= &(*npp
)->right
;
5241 /* Find the place in the list that bisects the list's total cost,
5242 where ranges count as 2.
5243 Here I gets half the total cost. */
5244 i
= (i
+ ranges
+ 1) / 2;
5247 /* Skip nodes while their cost does not reach that amount. */
5248 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5253 npp
= &(*npp
)->right
;
5258 np
->parent
= parent
;
5261 /* Optimize each of the two split parts. */
5262 balance_case_nodes (&np
->left
, np
);
5263 balance_case_nodes (&np
->right
, np
);
5267 /* Else leave this branch as one level,
5268 but fill in `parent' fields. */
5270 np
->parent
= parent
;
5271 for (; np
->right
; np
= np
->right
)
5272 np
->right
->parent
= np
;
5277 /* Search the parent sections of the case node tree
5278 to see if a test for the lower bound of NODE would be redundant.
5279 INDEX_TYPE is the type of the index expression.
5281 The instructions to generate the case decision tree are
5282 output in the same order as nodes are processed so it is
5283 known that if a parent node checks the range of the current
5284 node minus one that the current node is bounded at its lower
5285 span. Thus the test would be redundant. */
5288 node_has_low_bound (node
, index_type
)
5293 case_node_ptr pnode
;
5295 /* If the lower bound of this node is the lowest value in the index type,
5296 we need not test it. */
5298 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
5301 /* If this node has a left branch, the value at the left must be less
5302 than that at this node, so it cannot be bounded at the bottom and
5303 we need not bother testing any further. */
5308 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
5309 node
->low
, integer_one_node
));
5311 /* If the subtraction above overflowed, we can't verify anything.
5312 Otherwise, look for a parent that tests our value - 1. */
5314 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
5317 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5318 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
5324 /* Search the parent sections of the case node tree
5325 to see if a test for the upper bound of NODE would be redundant.
5326 INDEX_TYPE is the type of the index expression.
5328 The instructions to generate the case decision tree are
5329 output in the same order as nodes are processed so it is
5330 known that if a parent node checks the range of the current
5331 node plus one that the current node is bounded at its upper
5332 span. Thus the test would be redundant. */
5335 node_has_high_bound (node
, index_type
)
5340 case_node_ptr pnode
;
5342 /* If there is no upper bound, obviously no test is needed. */
5344 if (TYPE_MAX_VALUE (index_type
) == NULL
)
5347 /* If the upper bound of this node is the highest value in the type
5348 of the index expression, we need not test against it. */
5350 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
5353 /* If this node has a right branch, the value at the right must be greater
5354 than that at this node, so it cannot be bounded at the top and
5355 we need not bother testing any further. */
5360 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
5361 node
->high
, integer_one_node
));
5363 /* If the addition above overflowed, we can't verify anything.
5364 Otherwise, look for a parent that tests our value + 1. */
5366 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
5369 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5370 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
5376 /* Search the parent sections of the
5377 case node tree to see if both tests for the upper and lower
5378 bounds of NODE would be redundant. */
5381 node_is_bounded (node
, index_type
)
5385 return (node_has_low_bound (node
, index_type
)
5386 && node_has_high_bound (node
, index_type
));
5389 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5392 emit_jump_if_reachable (label
)
5395 if (GET_CODE (get_last_insn ()) != BARRIER
)
5399 /* Emit step-by-step code to select a case for the value of INDEX.
5400 The thus generated decision tree follows the form of the
5401 case-node binary tree NODE, whose nodes represent test conditions.
5402 INDEX_TYPE is the type of the index of the switch.
5404 Care is taken to prune redundant tests from the decision tree
5405 by detecting any boundary conditions already checked by
5406 emitted rtx. (See node_has_high_bound, node_has_low_bound
5407 and node_is_bounded, above.)
5409 Where the test conditions can be shown to be redundant we emit
5410 an unconditional jump to the target code. As a further
5411 optimization, the subordinates of a tree node are examined to
5412 check for bounded nodes. In this case conditional and/or
5413 unconditional jumps as a result of the boundary check for the
5414 current node are arranged to target the subordinates associated
5415 code for out of bound conditions on the current node node.
5417 We can assume that when control reaches the code generated here,
5418 the index value has already been compared with the parents
5419 of this node, and determined to be on the same side of each parent
5420 as this node is. Thus, if this node tests for the value 51,
5421 and a parent tested for 52, we don't need to consider
5422 the possibility of a value greater than 51. If another parent
5423 tests for the value 50, then this node need not test anything. */
5426 emit_case_nodes (index
, node
, default_label
, index_type
)
5432 /* If INDEX has an unsigned type, we must make unsigned branches. */
5433 int unsignedp
= TREE_UNSIGNED (index_type
);
5434 typedef rtx
rtx_function ();
5435 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
5436 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
5437 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
5438 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
5439 enum machine_mode mode
= GET_MODE (index
);
5441 /* See if our parents have already tested everything for us.
5442 If they have, emit an unconditional jump for this node. */
5443 if (node_is_bounded (node
, index_type
))
5444 emit_jump (label_rtx (node
->code_label
));
5446 else if (tree_int_cst_equal (node
->low
, node
->high
))
5448 /* Node is single valued. First see if the index expression matches
5449 this node and then check our children, if any. */
5451 do_jump_if_equal (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5452 label_rtx (node
->code_label
), unsignedp
);
5454 if (node
->right
!= 0 && node
->left
!= 0)
5456 /* This node has children on both sides.
5457 Dispatch to one side or the other
5458 by comparing the index value with this node's value.
5459 If one subtree is bounded, check that one first,
5460 so we can avoid real branches in the tree. */
5462 if (node_is_bounded (node
->right
, index_type
))
5464 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5466 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5468 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
5469 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5472 else if (node_is_bounded (node
->left
, index_type
))
5474 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5476 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5477 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
5478 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5483 /* Neither node is bounded. First distinguish the two sides;
5484 then emit the code for one side at a time. */
5487 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
5489 /* See if the value is on the right. */
5490 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5492 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5493 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
5495 /* Value must be on the left.
5496 Handle the left-hand subtree. */
5497 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5498 /* If left-hand subtree does nothing,
5500 emit_jump_if_reachable (default_label
);
5502 /* Code branches here for the right-hand subtree. */
5503 expand_label (test_label
);
5504 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5508 else if (node
->right
!= 0 && node
->left
== 0)
5510 /* Here we have a right child but no left so we issue conditional
5511 branch to default and process the right child.
5513 Omit the conditional branch to default if we it avoid only one
5514 right child; it costs too much space to save so little time. */
5516 if (node
->right
->right
|| node
->right
->left
5517 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
5519 if (!node_has_low_bound (node
, index_type
))
5521 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5523 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5524 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5527 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5530 /* We cannot process node->right normally
5531 since we haven't ruled out the numbers less than
5532 this node's value. So handle node->right explicitly. */
5533 do_jump_if_equal (index
,
5534 expand_expr (node
->right
->low
, NULL_RTX
,
5536 label_rtx (node
->right
->code_label
), unsignedp
);
5539 else if (node
->right
== 0 && node
->left
!= 0)
5541 /* Just one subtree, on the left. */
5543 #if 0 /* The following code and comment were formerly part
5544 of the condition here, but they didn't work
5545 and I don't understand what the idea was. -- rms. */
5546 /* If our "most probable entry" is less probable
5547 than the default label, emit a jump to
5548 the default label using condition codes
5549 already lying around. With no right branch,
5550 a branch-greater-than will get us to the default
5553 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
5556 if (node
->left
->left
|| node
->left
->right
5557 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
5559 if (!node_has_high_bound (node
, index_type
))
5561 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5563 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5564 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5567 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5570 /* We cannot process node->left normally
5571 since we haven't ruled out the numbers less than
5572 this node's value. So handle node->left explicitly. */
5573 do_jump_if_equal (index
,
5574 expand_expr (node
->left
->low
, NULL_RTX
,
5576 label_rtx (node
->left
->code_label
), unsignedp
);
5581 /* Node is a range. These cases are very similar to those for a single
5582 value, except that we do not start by testing whether this node
5583 is the one to branch to. */
5585 if (node
->right
!= 0 && node
->left
!= 0)
5587 /* Node has subtrees on both sides.
5588 If the right-hand subtree is bounded,
5589 test for it first, since we can go straight there.
5590 Otherwise, we need to make a branch in the control structure,
5591 then handle the two subtrees. */
5592 tree test_label
= 0;
5594 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5596 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5598 if (node_is_bounded (node
->right
, index_type
))
5599 /* Right hand node is fully bounded so we can eliminate any
5600 testing and branch directly to the target code. */
5601 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
5604 /* Right hand node requires testing.
5605 Branch to a label where we will handle it later. */
5607 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
5608 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
5611 /* Value belongs to this node or to the left-hand subtree. */
5613 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5614 GE
, NULL_RTX
, mode
, unsignedp
, 0);
5615 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
5617 /* Handle the left-hand subtree. */
5618 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5620 /* If right node had to be handled later, do that now. */
5624 /* If the left-hand subtree fell through,
5625 don't let it fall into the right-hand subtree. */
5626 emit_jump_if_reachable (default_label
);
5628 expand_label (test_label
);
5629 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5633 else if (node
->right
!= 0 && node
->left
== 0)
5635 /* Deal with values to the left of this node,
5636 if they are possible. */
5637 if (!node_has_low_bound (node
, index_type
))
5639 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
5641 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5642 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5645 /* Value belongs to this node or to the right-hand subtree. */
5647 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5649 LE
, NULL_RTX
, mode
, unsignedp
, 0);
5650 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
5652 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5655 else if (node
->right
== 0 && node
->left
!= 0)
5657 /* Deal with values to the right of this node,
5658 if they are possible. */
5659 if (!node_has_high_bound (node
, index_type
))
5661 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5663 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5664 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5667 /* Value belongs to this node or to the left-hand subtree. */
5669 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5670 GE
, NULL_RTX
, mode
, unsignedp
, 0);
5671 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
5673 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5678 /* Node has no children so we check low and high bounds to remove
5679 redundant tests. Only one of the bounds can exist,
5680 since otherwise this node is bounded--a case tested already. */
5682 if (!node_has_high_bound (node
, index_type
))
5684 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5686 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5687 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5690 if (!node_has_low_bound (node
, index_type
))
5692 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
5694 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5695 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5698 emit_jump (label_rtx (node
->code_label
));
5703 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5704 so that the debugging info will be correct for the unrolled loop. */
5706 /* Indexed by block number, contains a pointer to the N'th block node. */
5708 static tree
*block_vector
;
5711 find_loop_tree_blocks ()
5713 tree block
= DECL_INITIAL (current_function_decl
);
5715 block_vector
= identify_blocks (block
, get_insns ());
5719 unroll_block_trees ()
5721 tree block
= DECL_INITIAL (current_function_decl
);
5723 reorder_blocks (block_vector
, block
, get_insns ());