1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
49 #include "hard-reg-set.h"
56 #include "bc-typecd.h"
57 #include "bc-opcode.h"
61 #define obstack_chunk_alloc xmalloc
62 #define obstack_chunk_free free
63 struct obstack stmt_obstack
;
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value
;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type
;
79 static rtx last_expr_value
;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note
;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count
;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct
;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label
;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label
;
109 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
110 So we can mark them all live at the end of the function, if nonopt. */
111 extern rtx save_expr_regs
;
113 /* Offset to end of allocated area of stack frame.
114 If stack grows down, this is the address of the last stack slot allocated.
115 If stack grows up, this is the address for the next slot. */
116 extern int frame_offset
;
118 /* Label to jump back to for tail recursion, or 0 if we have
119 not yet needed one for this function. */
120 extern rtx tail_recursion_label
;
122 /* Place after which to insert the tail_recursion_label if we need one. */
123 extern rtx tail_recursion_reentry
;
125 /* Location at which to save the argument pointer if it will need to be
126 referenced. There are two cases where this is done: if nonlocal gotos
127 exist, or if vars whose is an offset from the argument pointer will be
128 needed by inner routines. */
130 extern rtx arg_pointer_save_area
;
132 /* Chain of all RTL_EXPRs that have insns in them. */
133 extern tree rtl_expr_chain
;
135 #if 0 /* Turned off because 0 seems to work just as well. */
136 /* Cleanup lists are required for binding levels regardless of whether
137 that binding level has cleanups or not. This node serves as the
138 cleanup list whenever an empty list is required. */
139 static tree empty_cleanup_list
;
142 extern void (*interim_eh_hook
) PROTO((tree
));
144 /* Functions and data structures for expanding case statements. */
146 /* Case label structure, used to hold info on labels within case
147 statements. We handle "range" labels; for a single-value label
148 as in C, the high and low limits are the same.
150 A chain of case nodes is initially maintained via the RIGHT fields
151 in the nodes. Nodes with higher case values are later in the list.
153 Switch statements can be output in one of two forms. A branch table
154 is used if there are more than a few labels and the labels are dense
155 within the range between the smallest and largest case value. If a
156 branch table is used, no further manipulations are done with the case
159 The alternative to the use of a branch table is to generate a series
160 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
161 and PARENT fields to hold a binary tree. Initially the tree is
162 totally unbalanced, with everything on the right. We balance the tree
163 with nodes on the left having lower case values than the parent
164 and nodes on the right having higher values. We then output the tree
169 struct case_node
*left
; /* Left son in binary tree */
170 struct case_node
*right
; /* Right son in binary tree; also node chain */
171 struct case_node
*parent
; /* Parent of node in binary tree */
172 tree low
; /* Lowest index value for this label */
173 tree high
; /* Highest index value for this label */
174 tree code_label
; /* Label to jump to when node matches */
177 typedef struct case_node case_node
;
178 typedef struct case_node
*case_node_ptr
;
180 /* These are used by estimate_case_costs and balance_case_nodes. */
182 /* This must be a signed type, and non-ANSI compilers lack signed char. */
183 static short *cost_table
;
184 static int use_cost_table
;
186 /* Stack of control and binding constructs we are currently inside.
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
214 struct nesting
*next
;
219 /* For conds (if-then and if-then-else statements). */
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
233 /* Label at the top of the loop; place to loop back to. */
235 /* Label at the end of the whole construct. */
237 /* Label before a jump that branches to the end of the whole
238 construct. This is where destructors go if any. */
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
244 /* For variable binding contours. */
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count
;
250 /* Nonzero => value to restore stack to on exit. Complemented by
251 bc_stack_level (see below) when generating bytecodes. */
253 /* The NOTE that starts this contour.
254 Used by expand_goto to check whether the destination
255 is within each contour or not. */
257 /* Innermost containing binding contour that has a stack level. */
258 struct nesting
*innermost_stack_block
;
259 /* List of cleanups to be run on exit from this contour.
260 This is a list of expressions to be evaluated.
261 The TREE_PURPOSE of each link is the ..._DECL node
262 which the cleanup pertains to. */
264 /* List of cleanup-lists of blocks containing this block,
265 as they were at the locus where this block appears.
266 There is an element for each containing block,
267 ordered innermost containing block first.
268 The tail of this list can be 0 (was empty_cleanup_list),
269 if all remaining elements would be empty lists.
270 The element's TREE_VALUE is the cleanup-list of that block,
271 which may be null. */
273 /* Chain of labels defined inside this binding contour.
274 For contours that have stack levels or cleanups. */
275 struct label_chain
*label_chain
;
276 /* Number of function calls seen, as of start of this block. */
277 int function_call_count
;
278 /* Bytecode specific: stack level to restore stack to on exit. */
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
288 /* For bytecodes, the case table is in-lined right in the code.
289 A label is needed for skipping over this block. It is only
290 used when generating bytecodes. */
292 /* A list of case labels, kept in ascending order by value
293 as the list is built.
294 During expand_end_case, this list may be rearranged into a
295 nearly balanced binary tree. */
296 struct case_node
*case_list
;
297 /* Label to jump to if no case matches. */
299 /* The expression to be dispatched on. */
301 /* Type that INDEX_EXPR should be converted to. */
303 /* Number of range exprs in case statement. */
305 /* Name of this kind of statement, for warnings. */
307 /* Nonzero if a case label has been seen in this case stmt. */
313 /* Chain of all pending binding contours. */
314 struct nesting
*block_stack
;
316 /* If any new stacks are added here, add them to POPSTACKS too. */
318 /* Chain of all pending binding contours that restore stack levels
320 struct nesting
*stack_block_stack
;
322 /* Chain of all pending conditional statements. */
323 struct nesting
*cond_stack
;
325 /* Chain of all pending loops. */
326 struct nesting
*loop_stack
;
328 /* Chain of all pending case or switch statements. */
329 struct nesting
*case_stack
;
331 /* Separate chain including all of the above,
332 chained through the `all' field. */
333 struct nesting
*nesting_stack
;
335 /* Number of entries on nesting_stack now. */
338 /* Allocate and return a new `struct nesting'. */
340 #define ALLOC_NESTING() \
341 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
343 /* Pop the nesting stack element by element until we pop off
344 the element which is at the top of STACK.
345 Update all the other stacks, popping off elements from them
346 as we pop them from nesting_stack. */
348 #define POPSTACK(STACK) \
349 do { struct nesting *target = STACK; \
350 struct nesting *this; \
351 do { this = nesting_stack; \
352 if (loop_stack == this) \
353 loop_stack = loop_stack->next; \
354 if (cond_stack == this) \
355 cond_stack = cond_stack->next; \
356 if (block_stack == this) \
357 block_stack = block_stack->next; \
358 if (stack_block_stack == this) \
359 stack_block_stack = stack_block_stack->next; \
360 if (case_stack == this) \
361 case_stack = case_stack->next; \
362 nesting_depth = nesting_stack->depth - 1; \
363 nesting_stack = this->all; \
364 obstack_free (&stmt_obstack, this); } \
365 while (this != target); } while (0)
367 /* In some cases it is impossible to generate code for a forward goto
368 until the label definition is seen. This happens when it may be necessary
369 for the goto to reset the stack pointer: we don't yet know how to do that.
370 So expand_goto puts an entry on this fixup list.
371 Each time a binding contour that resets the stack is exited,
373 If the target label has now been defined, we can insert the proper code. */
377 /* Points to following fixup. */
378 struct goto_fixup
*next
;
379 /* Points to the insn before the jump insn.
380 If more code must be inserted, it goes after this insn. */
382 /* The LABEL_DECL that this jump is jumping to, or 0
383 for break, continue or return. */
385 /* The BLOCK for the place where this goto was found. */
387 /* The CODE_LABEL rtx that this is jumping to. */
389 /* Number of binding contours started in current function
390 before the label reference. */
391 int block_start_count
;
392 /* The outermost stack level that should be restored for this jump.
393 Each time a binding contour that resets the stack is exited,
394 if the target label is *not* yet defined, this slot is updated. */
396 /* List of lists of cleanup expressions to be run by this goto.
397 There is one element for each block that this goto is within.
398 The tail of this list can be 0 (was empty_cleanup_list),
399 if all remaining elements would be empty.
400 The TREE_VALUE contains the cleanup list of that block as of the
401 time this goto was seen.
402 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
403 tree cleanup_list_list
;
405 /* Bytecode specific members follow */
407 /* The label that this jump is jumping to, or 0 for break, continue
409 struct bc_label
*bc_target
;
411 /* The label we use for the fixup patch */
412 struct bc_label
*label
;
414 /* True (non-0) if fixup has been handled */
417 /* Like stack_level above, except refers to the interpreter stack */
421 static struct goto_fixup
*goto_fixup_chain
;
423 /* Within any binding contour that must restore a stack level,
424 all labels are recorded with a chain of these structures. */
428 /* Points to following fixup. */
429 struct label_chain
*next
;
432 static void expand_goto_internal
PROTO((tree
, rtx
, rtx
));
433 static void bc_expand_goto_internal
PROTO((enum bytecode_opcode
,
434 struct bc_label
*, tree
));
435 static int expand_fixup
PROTO((tree
, rtx
, rtx
));
436 static void bc_expand_fixup
PROTO((enum bytecode_opcode
,
437 struct bc_label
*, int));
438 static void fixup_gotos
PROTO((struct nesting
*, rtx
, tree
,
440 static void bc_fixup_gotos
PROTO((struct nesting
*, int, tree
,
442 static void bc_expand_start_cond
PROTO((tree
, int));
443 static void bc_expand_end_cond
PROTO((void));
444 static void bc_expand_start_else
PROTO((void));
445 static void bc_expand_end_loop
PROTO((void));
446 static void bc_expand_end_bindings
PROTO((tree
, int, int));
447 static void bc_expand_decl
PROTO((tree
, tree
));
448 static void bc_expand_variable_local_init
PROTO((tree
));
449 static void bc_expand_decl_init
PROTO((tree
));
450 static void expand_null_return_1
PROTO((rtx
, int));
451 static void expand_value_return
PROTO((rtx
));
452 static int tail_recursion_args
PROTO((tree
, tree
));
453 static void expand_cleanups
PROTO((tree
, tree
, int, int));
454 static void bc_expand_start_case
PROTO((struct nesting
*, tree
,
456 static int bc_pushcase
PROTO((tree
, tree
));
457 static void bc_check_for_full_enumeration_handling
PROTO((tree
));
458 static void bc_expand_end_case
PROTO((tree
));
459 static void do_jump_if_equal
PROTO((rtx
, rtx
, rtx
, int));
460 static int estimate_case_costs
PROTO((case_node_ptr
));
461 static void group_case_nodes
PROTO((case_node_ptr
));
462 static void balance_case_nodes
PROTO((case_node_ptr
*,
464 static int node_has_low_bound
PROTO((case_node_ptr
, tree
));
465 static int node_has_high_bound
PROTO((case_node_ptr
, tree
));
466 static int node_is_bounded
PROTO((case_node_ptr
, tree
));
467 static void emit_jump_if_reachable
PROTO((rtx
));
468 static void emit_case_nodes
PROTO((rtx
, case_node_ptr
, rtx
, tree
));
470 int bc_expand_exit_loop_if_false ();
471 void bc_expand_start_cond ();
472 void bc_expand_end_cond ();
473 void bc_expand_start_else ();
474 void bc_expand_end_bindings ();
475 void bc_expand_start_case ();
476 void bc_check_for_full_enumeration_handling ();
477 void bc_expand_end_case ();
478 void bc_expand_decl ();
480 extern rtx
bc_allocate_local ();
481 extern rtx
bc_allocate_variable_array ();
486 gcc_obstack_init (&stmt_obstack
);
488 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);
493 init_stmt_for_function ()
495 /* We are not currently within any block, conditional, loop or case. */
497 stack_block_stack
= 0;
504 block_start_count
= 0;
506 /* No gotos have been expanded yet. */
507 goto_fixup_chain
= 0;
509 /* We are not processing a ({...}) grouping. */
510 expr_stmts_for_value
= 0;
518 p
->block_stack
= block_stack
;
519 p
->stack_block_stack
= stack_block_stack
;
520 p
->cond_stack
= cond_stack
;
521 p
->loop_stack
= loop_stack
;
522 p
->case_stack
= case_stack
;
523 p
->nesting_stack
= nesting_stack
;
524 p
->nesting_depth
= nesting_depth
;
525 p
->block_start_count
= block_start_count
;
526 p
->last_expr_type
= last_expr_type
;
527 p
->last_expr_value
= last_expr_value
;
528 p
->expr_stmts_for_value
= expr_stmts_for_value
;
529 p
->emit_filename
= emit_filename
;
530 p
->emit_lineno
= emit_lineno
;
531 p
->goto_fixup_chain
= goto_fixup_chain
;
535 restore_stmt_status (p
)
538 block_stack
= p
->block_stack
;
539 stack_block_stack
= p
->stack_block_stack
;
540 cond_stack
= p
->cond_stack
;
541 loop_stack
= p
->loop_stack
;
542 case_stack
= p
->case_stack
;
543 nesting_stack
= p
->nesting_stack
;
544 nesting_depth
= p
->nesting_depth
;
545 block_start_count
= p
->block_start_count
;
546 last_expr_type
= p
->last_expr_type
;
547 last_expr_value
= p
->last_expr_value
;
548 expr_stmts_for_value
= p
->expr_stmts_for_value
;
549 emit_filename
= p
->emit_filename
;
550 emit_lineno
= p
->emit_lineno
;
551 goto_fixup_chain
= p
->goto_fixup_chain
;
554 /* Emit a no-op instruction. */
561 if (!output_bytecode
)
563 last_insn
= get_last_insn ();
565 && (GET_CODE (last_insn
) == CODE_LABEL
566 || prev_real_insn (last_insn
) == 0))
567 emit_insn (gen_nop ());
571 /* Return the rtx-label that corresponds to a LABEL_DECL,
572 creating it if necessary. */
578 if (TREE_CODE (label
) != LABEL_DECL
)
581 if (DECL_RTL (label
))
582 return DECL_RTL (label
);
584 return DECL_RTL (label
) = gen_label_rtx ();
587 /* Add an unconditional jump to LABEL as the next sequential instruction. */
593 do_pending_stack_adjust ();
594 emit_jump_insn (gen_jump (label
));
598 /* Emit code to jump to the address
599 specified by the pointer expression EXP. */
602 expand_computed_goto (exp
)
607 bc_expand_expr (exp
);
608 bc_emit_instruction (jumpP
);
612 rtx x
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
614 #ifdef POINTERS_EXTEND_UNSIGNED
615 x
= convert_memory_address (Pmode
, x
);
619 do_pending_stack_adjust ();
620 emit_indirect_jump (x
);
624 /* Handle goto statements and the labels that they can go to. */
626 /* Specify the location in the RTL code of a label LABEL,
627 which is a LABEL_DECL tree node.
629 This is used for the kind of label that the user can jump to with a
630 goto statement, and for alternatives of a switch or case statement.
631 RTL labels generated for loops and conditionals don't go through here;
632 they are generated directly at the RTL level, by other functions below.
634 Note that this has nothing to do with defining label *names*.
635 Languages vary in how they do that and what that even means. */
641 struct label_chain
*p
;
645 if (! DECL_RTL (label
))
646 DECL_RTL (label
) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
647 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label
))))
648 error ("multiply defined label");
652 do_pending_stack_adjust ();
653 emit_label (label_rtx (label
));
654 if (DECL_NAME (label
))
655 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
657 if (stack_block_stack
!= 0)
659 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
660 p
->next
= stack_block_stack
->data
.block
.label_chain
;
661 stack_block_stack
->data
.block
.label_chain
= p
;
666 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
667 from nested functions. */
670 declare_nonlocal_label (label
)
673 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
674 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
675 if (nonlocal_goto_handler_slot
== 0)
677 nonlocal_goto_handler_slot
678 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
679 emit_stack_save (SAVE_NONLOCAL
,
680 &nonlocal_goto_stack_level
,
681 PREV_INSN (tail_recursion_reentry
));
685 /* Generate RTL code for a `goto' statement with target label LABEL.
686 LABEL should be a LABEL_DECL tree node that was or will later be
687 defined with `expand_label'. */
697 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
701 /* Check for a nonlocal goto to a containing function. */
702 context
= decl_function_context (label
);
703 if (context
!= 0 && context
!= current_function_decl
)
705 struct function
*p
= find_function_data (context
);
706 rtx label_ref
= gen_rtx (LABEL_REF
, Pmode
, label_rtx (label
));
709 p
->has_nonlocal_label
= 1;
710 current_function_has_nonlocal_goto
= 1;
711 LABEL_REF_NONLOCAL_P (label_ref
) = 1;
713 /* Copy the rtl for the slots so that they won't be shared in
714 case the virtual stack vars register gets instantiated differently
715 in the parent than in the child. */
717 #if HAVE_nonlocal_goto
718 if (HAVE_nonlocal_goto
)
719 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
720 copy_rtx (p
->nonlocal_goto_handler_slot
),
721 copy_rtx (p
->nonlocal_goto_stack_level
),
728 /* Restore frame pointer for containing function.
729 This sets the actual hard register used for the frame pointer
730 to the location of the function's incoming static chain info.
731 The non-local goto handler will then adjust it to contain the
732 proper value and reload the argument pointer, if needed. */
733 emit_move_insn (hard_frame_pointer_rtx
, lookup_static_chain (label
));
735 /* We have now loaded the frame pointer hardware register with
736 the address of that corresponds to the start of the virtual
737 stack vars. So replace virtual_stack_vars_rtx in all
738 addresses we use with stack_pointer_rtx. */
740 /* Get addr of containing function's current nonlocal goto handler,
741 which will do any cleanups and then jump to the label. */
742 addr
= copy_rtx (p
->nonlocal_goto_handler_slot
);
743 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
744 hard_frame_pointer_rtx
));
746 /* Restore the stack pointer. Note this uses fp just restored. */
747 addr
= p
->nonlocal_goto_stack_level
;
749 addr
= replace_rtx (copy_rtx (addr
),
750 virtual_stack_vars_rtx
,
751 hard_frame_pointer_rtx
);
753 emit_stack_restore (SAVE_NONLOCAL
, addr
, NULL_RTX
);
755 /* Put in the static chain register the nonlocal label address. */
756 emit_move_insn (static_chain_rtx
, label_ref
);
757 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
759 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
760 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
761 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
762 emit_indirect_jump (temp
);
766 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
769 /* Generate RTL code for a `goto' statement with target label BODY.
770 LABEL should be a LABEL_REF.
771 LAST_INSN, if non-0, is the rtx we should consider as the last
772 insn emitted (for the purposes of cleaning up a return). */
775 expand_goto_internal (body
, label
, last_insn
)
780 struct nesting
*block
;
783 /* NOTICE! If a bytecode instruction other than `jump' is needed,
784 then the caller has to call bc_expand_goto_internal()
785 directly. This is rather an exceptional case, and there aren't
786 that many places where this is necessary. */
789 expand_goto_internal (body
, label
, last_insn
);
793 if (GET_CODE (label
) != CODE_LABEL
)
796 /* If label has already been defined, we can tell now
797 whether and how we must alter the stack level. */
799 if (PREV_INSN (label
) != 0)
801 /* Find the innermost pending block that contains the label.
802 (Check containment by comparing insn-uids.)
803 Then restore the outermost stack level within that block,
804 and do cleanups of all blocks contained in it. */
805 for (block
= block_stack
; block
; block
= block
->next
)
807 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
809 if (block
->data
.block
.stack_level
!= 0)
810 stack_level
= block
->data
.block
.stack_level
;
811 /* Execute the cleanups for blocks we are exiting. */
812 if (block
->data
.block
.cleanups
!= 0)
814 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
, 1, 1);
815 do_pending_stack_adjust ();
821 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
822 the stack pointer. This one should be deleted as dead by flow. */
823 clear_pending_stack_adjust ();
824 do_pending_stack_adjust ();
825 emit_stack_restore (SAVE_BLOCK
, stack_level
, NULL_RTX
);
828 if (body
!= 0 && DECL_TOO_LATE (body
))
829 error ("jump to `%s' invalidly jumps into binding contour",
830 IDENTIFIER_POINTER (DECL_NAME (body
)));
832 /* Label not yet defined: may need to put this goto
833 on the fixup list. */
834 else if (! expand_fixup (body
, label
, last_insn
))
836 /* No fixup needed. Record that the label is the target
837 of at least one goto that has no fixup. */
839 TREE_ADDRESSABLE (body
) = 1;
845 /* Generate a jump with OPCODE to the given bytecode LABEL which is
846 found within BODY. */
849 bc_expand_goto_internal (opcode
, label
, body
)
850 enum bytecode_opcode opcode
;
851 struct bc_label
*label
;
854 struct nesting
*block
;
855 int stack_level
= -1;
857 /* If the label is defined, adjust the stack as necessary.
858 If it's not defined, we have to push the reference on the
864 /* Find the innermost pending block that contains the label.
865 (Check containment by comparing bytecode uids.) Then restore the
866 outermost stack level within that block. */
868 for (block
= block_stack
; block
; block
= block
->next
)
870 if (BYTECODE_BC_LABEL (block
->data
.block
.first_insn
)->uid
< label
->uid
)
872 if (block
->data
.block
.bc_stack_level
)
873 stack_level
= block
->data
.block
.bc_stack_level
;
875 /* Execute the cleanups for blocks we are exiting. */
876 if (block
->data
.block
.cleanups
!= 0)
878 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
, 1, 1);
879 do_pending_stack_adjust ();
883 /* Restore the stack level. If we need to adjust the stack, we
884 must do so after the jump, since the jump may depend on
885 what's on the stack. Thus, any stack-modifying conditional
886 jumps (these are the only ones that rely on what's on the
887 stack) go into the fixup list. */
890 && stack_depth
!= stack_level
893 bc_expand_fixup (opcode
, label
, stack_level
);
896 if (stack_level
>= 0)
897 bc_adjust_stack (stack_depth
- stack_level
);
899 if (body
&& DECL_BIT_FIELD (body
))
900 error ("jump to `%s' invalidly jumps into binding contour",
901 IDENTIFIER_POINTER (DECL_NAME (body
)));
903 /* Emit immediate jump */
904 bc_emit_bytecode (opcode
);
905 bc_emit_bytecode_labelref (label
);
907 #ifdef DEBUG_PRINT_CODE
908 fputc ('\n', stderr
);
913 /* Put goto in the fixup list */
914 bc_expand_fixup (opcode
, label
, stack_level
);
917 /* Generate if necessary a fixup for a goto
918 whose target label in tree structure (if any) is TREE_LABEL
919 and whose target in rtl is RTL_LABEL.
921 If LAST_INSN is nonzero, we pretend that the jump appears
922 after insn LAST_INSN instead of at the current point in the insn stream.
924 The fixup will be used later to insert insns just before the goto.
925 Those insns will restore the stack level as appropriate for the
926 target label, and will (in the case of C++) also invoke any object
927 destructors which have to be invoked when we exit the scopes which
928 are exited by the goto.
930 Value is nonzero if a fixup is made. */
933 expand_fixup (tree_label
, rtl_label
, last_insn
)
938 struct nesting
*block
, *end_block
;
940 /* See if we can recognize which block the label will be output in.
941 This is possible in some very common cases.
942 If we succeed, set END_BLOCK to that block.
943 Otherwise, set it to 0. */
946 && (rtl_label
== cond_stack
->data
.cond
.endif_label
947 || rtl_label
== cond_stack
->data
.cond
.next_label
))
948 end_block
= cond_stack
;
949 /* If we are in a loop, recognize certain labels which
950 are likely targets. This reduces the number of fixups
951 we need to create. */
953 && (rtl_label
== loop_stack
->data
.loop
.start_label
954 || rtl_label
== loop_stack
->data
.loop
.end_label
955 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
956 end_block
= loop_stack
;
960 /* Now set END_BLOCK to the binding level to which we will return. */
964 struct nesting
*next_block
= end_block
->all
;
967 /* First see if the END_BLOCK is inside the innermost binding level.
968 If so, then no cleanups or stack levels are relevant. */
969 while (next_block
&& next_block
!= block
)
970 next_block
= next_block
->all
;
975 /* Otherwise, set END_BLOCK to the innermost binding level
976 which is outside the relevant control-structure nesting. */
977 next_block
= block_stack
->next
;
978 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
979 if (block
== next_block
)
980 next_block
= next_block
->next
;
981 end_block
= next_block
;
984 /* Does any containing block have a stack level or cleanups?
985 If not, no fixup is needed, and that is the normal case
986 (the only case, for standard C). */
987 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
988 if (block
->data
.block
.stack_level
!= 0
989 || block
->data
.block
.cleanups
!= 0)
992 if (block
!= end_block
)
994 /* Ok, a fixup is needed. Add a fixup to the list of such. */
995 struct goto_fixup
*fixup
996 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
997 /* In case an old stack level is restored, make sure that comes
998 after any pending stack adjust. */
999 /* ?? If the fixup isn't to come at the present position,
1000 doing the stack adjust here isn't useful. Doing it with our
1001 settings at that location isn't useful either. Let's hope
1004 do_pending_stack_adjust ();
1005 fixup
->target
= tree_label
;
1006 fixup
->target_rtl
= rtl_label
;
1008 /* Create a BLOCK node and a corresponding matched set of
1009 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1010 this point. The notes will encapsulate any and all fixup
1011 code which we might later insert at this point in the insn
1012 stream. Also, the BLOCK node will be the parent (i.e. the
1013 `SUPERBLOCK') of any other BLOCK nodes which we might create
1014 later on when we are expanding the fixup code. */
1017 register rtx original_before_jump
1018 = last_insn
? last_insn
: get_last_insn ();
1022 fixup
->before_jump
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
1023 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
1024 fixup
->context
= poplevel (1, 0, 0); /* Create the BLOCK node now! */
1026 emit_insns_after (fixup
->before_jump
, original_before_jump
);
1029 fixup
->block_start_count
= block_start_count
;
1030 fixup
->stack_level
= 0;
1031 fixup
->cleanup_list_list
1032 = (((block
->data
.block
.outer_cleanups
1034 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
1037 || block
->data
.block
.cleanups
)
1038 ? tree_cons (NULL_TREE
, block
->data
.block
.cleanups
,
1039 block
->data
.block
.outer_cleanups
)
1041 fixup
->next
= goto_fixup_chain
;
1042 goto_fixup_chain
= fixup
;
1049 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1050 Make the fixup restore the stack level to STACK_LEVEL. */
1053 bc_expand_fixup (opcode
, label
, stack_level
)
1054 enum bytecode_opcode opcode
;
1055 struct bc_label
*label
;
1058 struct goto_fixup
*fixup
1059 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
1061 fixup
->label
= bc_get_bytecode_label ();
1062 fixup
->bc_target
= label
;
1063 fixup
->bc_stack_level
= stack_level
;
1064 fixup
->bc_handled
= FALSE
;
1066 fixup
->next
= goto_fixup_chain
;
1067 goto_fixup_chain
= fixup
;
1069 /* Insert a jump to the fixup code */
1070 bc_emit_bytecode (opcode
);
1071 bc_emit_bytecode_labelref (fixup
->label
);
1073 #ifdef DEBUG_PRINT_CODE
1074 fputc ('\n', stderr
);
1078 /* Expand any needed fixups in the outputmost binding level of the
1079 function. FIRST_INSN is the first insn in the function. */
1082 expand_fixups (first_insn
)
1085 fixup_gotos (NULL_PTR
, NULL_RTX
, NULL_TREE
, first_insn
, 0);
1088 /* When exiting a binding contour, process all pending gotos requiring fixups.
1089 THISBLOCK is the structure that describes the block being exited.
1090 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1091 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1092 FIRST_INSN is the insn that began this contour.
1094 Gotos that jump out of this contour must restore the
1095 stack level and do the cleanups before actually jumping.
1097 DONT_JUMP_IN nonzero means report error there is a jump into this
1098 contour from before the beginning of the contour.
1099 This is also done if STACK_LEVEL is nonzero. */
1102 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
1103 struct nesting
*thisblock
;
1109 register struct goto_fixup
*f
, *prev
;
1111 if (output_bytecode
)
1113 /* ??? The second arg is the bc stack level, which is not the same
1114 as STACK_LEVEL. I have no idea what should go here, so I'll
1116 bc_fixup_gotos (thisblock
, 0, cleanup_list
, first_insn
, dont_jump_in
);
1120 /* F is the fixup we are considering; PREV is the previous one. */
1121 /* We run this loop in two passes so that cleanups of exited blocks
1122 are run first, and blocks that are exited are marked so
1125 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1127 /* Test for a fixup that is inactive because it is already handled. */
1128 if (f
->before_jump
== 0)
1130 /* Delete inactive fixup from the chain, if that is easy to do. */
1132 prev
->next
= f
->next
;
1134 /* Has this fixup's target label been defined?
1135 If so, we can finalize it. */
1136 else if (PREV_INSN (f
->target_rtl
) != 0)
1138 register rtx cleanup_insns
;
1140 /* Get the first non-label after the label
1141 this goto jumps to. If that's before this scope begins,
1142 we don't have a jump into the scope. */
1143 rtx after_label
= f
->target_rtl
;
1144 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
1145 after_label
= NEXT_INSN (after_label
);
1147 /* If this fixup jumped into this contour from before the beginning
1148 of this contour, report an error. */
1149 /* ??? Bug: this does not detect jumping in through intermediate
1150 blocks that have stack levels or cleanups.
1151 It detects only a problem with the innermost block
1152 around the label. */
1154 && (dont_jump_in
|| stack_level
|| cleanup_list
)
1155 /* If AFTER_LABEL is 0, it means the jump goes to the end
1156 of the rtl, which means it jumps into this scope. */
1157 && (after_label
== 0
1158 || INSN_UID (first_insn
) < INSN_UID (after_label
))
1159 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
1160 && ! DECL_REGISTER (f
->target
))
1162 error_with_decl (f
->target
,
1163 "label `%s' used before containing binding contour");
1164 /* Prevent multiple errors for one label. */
1165 DECL_REGISTER (f
->target
) = 1;
1168 /* We will expand the cleanups into a sequence of their own and
1169 then later on we will attach this new sequence to the insn
1170 stream just ahead of the actual jump insn. */
1174 /* Temporarily restore the lexical context where we will
1175 logically be inserting the fixup code. We do this for the
1176 sake of getting the debugging information right. */
1179 set_block (f
->context
);
1181 /* Expand the cleanups for blocks this jump exits. */
1182 if (f
->cleanup_list_list
)
1185 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
1186 /* Marked elements correspond to blocks that have been closed.
1187 Do their cleanups. */
1188 if (TREE_ADDRESSABLE (lists
)
1189 && TREE_VALUE (lists
) != 0)
1191 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1192 /* Pop any pushes done in the cleanups,
1193 in case function is about to return. */
1194 do_pending_stack_adjust ();
1198 /* Restore stack level for the biggest contour that this
1199 jump jumps out of. */
1201 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
1203 /* Finish up the sequence containing the insns which implement the
1204 necessary cleanups, and then attach that whole sequence to the
1205 insn stream just ahead of the actual jump insn. Attaching it
1206 at that point insures that any cleanups which are in fact
1207 implicit C++ object destructions (which must be executed upon
1208 leaving the block) appear (to the debugger) to be taking place
1209 in an area of the generated code where the object(s) being
1210 destructed are still "in scope". */
1212 cleanup_insns
= get_insns ();
1216 emit_insns_after (cleanup_insns
, f
->before_jump
);
1223 /* For any still-undefined labels, do the cleanups for this block now.
1224 We must do this now since items in the cleanup list may go out
1225 of scope when the block ends. */
1226 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1227 if (f
->before_jump
!= 0
1228 && PREV_INSN (f
->target_rtl
) == 0
1229 /* Label has still not appeared. If we are exiting a block with
1230 a stack level to restore, that started before the fixup,
1231 mark this stack level as needing restoration
1232 when the fixup is later finalized. */
1234 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1235 means the label is undefined. That's erroneous, but possible. */
1236 && (thisblock
->data
.block
.block_start_count
1237 <= f
->block_start_count
))
1239 tree lists
= f
->cleanup_list_list
;
1242 for (; lists
; lists
= TREE_CHAIN (lists
))
1243 /* If the following elt. corresponds to our containing block
1244 then the elt. must be for this block. */
1245 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
1249 set_block (f
->context
);
1250 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1251 do_pending_stack_adjust ();
1252 cleanup_insns
= get_insns ();
1256 = emit_insns_after (cleanup_insns
, f
->before_jump
);
1258 TREE_VALUE (lists
) = 0;
1262 f
->stack_level
= stack_level
;
1267 /* When exiting a binding contour, process all pending gotos requiring fixups.
1268 Note: STACK_DEPTH is not altered.
1270 The arguments are currently not used in the bytecode compiler, but we may
1271 need them one day for languages other than C.
1273 THISBLOCK is the structure that describes the block being exited.
1274 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1275 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1276 FIRST_INSN is the insn that began this contour.
1278 Gotos that jump out of this contour must restore the
1279 stack level and do the cleanups before actually jumping.
1281 DONT_JUMP_IN nonzero means report error there is a jump into this
1282 contour from before the beginning of the contour.
1283 This is also done if STACK_LEVEL is nonzero. */
1286 bc_fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
1287 struct nesting
*thisblock
;
1293 register struct goto_fixup
*f
, *prev
;
1294 int saved_stack_depth
;
1296 /* F is the fixup we are considering; PREV is the previous one. */
1298 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1300 /* Test for a fixup that is inactive because it is already handled. */
1301 if (f
->before_jump
== 0)
1303 /* Delete inactive fixup from the chain, if that is easy to do. */
1305 prev
->next
= f
->next
;
1308 /* Emit code to restore the stack and continue */
1309 bc_emit_bytecode_labeldef (f
->label
);
1311 /* Save stack_depth across call, since bc_adjust_stack () will alter
1312 the perceived stack depth via the instructions generated. */
1314 if (f
->bc_stack_level
>= 0)
1316 saved_stack_depth
= stack_depth
;
1317 bc_adjust_stack (stack_depth
- f
->bc_stack_level
);
1318 stack_depth
= saved_stack_depth
;
1321 bc_emit_bytecode (jump
);
1322 bc_emit_bytecode_labelref (f
->bc_target
);
1324 #ifdef DEBUG_PRINT_CODE
1325 fputc ('\n', stderr
);
1329 goto_fixup_chain
= NULL
;
1332 /* Generate RTL for an asm statement (explicit assembler code).
1333 BODY is a STRING_CST node containing the assembler code text,
1334 or an ADDR_EXPR containing a STRING_CST. */
1340 if (output_bytecode
)
1342 error ("`asm' is invalid when generating bytecode");
1346 if (TREE_CODE (body
) == ADDR_EXPR
)
1347 body
= TREE_OPERAND (body
, 0);
1349 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
1350 TREE_STRING_POINTER (body
)));
1354 /* Generate RTL for an asm statement with arguments.
1355 STRING is the instruction template.
1356 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1357 Each output or input has an expression in the TREE_VALUE and
1358 a constraint-string in the TREE_PURPOSE.
1359 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1360 that is clobbered by this insn.
1362 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1363 Some elements of OUTPUTS may be replaced with trees representing temporary
1364 values. The caller should copy those temporary values to the originally
1367 VOL nonzero means the insn is volatile; don't optimize it. */
1370 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
1371 tree string
, outputs
, inputs
, clobbers
;
1376 rtvec argvec
, constraints
;
1378 int ninputs
= list_length (inputs
);
1379 int noutputs
= list_length (outputs
);
1383 /* Vector of RTX's of evaluated output operands. */
1384 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1385 /* The insn we have emitted. */
1388 if (output_bytecode
)
1390 error ("`asm' is invalid when generating bytecode");
1394 /* Count the number of meaningful clobbered registers, ignoring what
1395 we would ignore later. */
1397 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1399 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1400 i
= decode_reg_name (regname
);
1401 if (i
>= 0 || i
== -4)
1404 error ("unknown register name `%s' in `asm'", regname
);
1409 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1411 tree val
= TREE_VALUE (tail
);
1412 tree type
= TREE_TYPE (val
);
1415 int found_equal
= 0;
1418 /* If there's an erroneous arg, emit no insn. */
1419 if (TREE_TYPE (val
) == error_mark_node
)
1422 /* Make sure constraint has `=' and does not have `+'. Also, see
1423 if it allows any register. Be liberal on the latter test, since
1424 the worst that happens if we get it wrong is we issue an error
1427 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)) - 1; j
++)
1428 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
])
1431 error ("output operand constraint contains `+'");
1438 case '?': case '!': case '*': case '%': case '&':
1439 case '0': case '1': case '2': case '3': case '4':
1440 case 'V': case 'm': case 'o': case '<': case '>':
1441 case 'E': case 'F': case 'G': case 'H': case 'X':
1442 case 's': case 'i': case 'n':
1443 case 'I': case 'J': case 'K': case 'L': case 'M':
1444 case 'N': case 'O': case 'P': case ',':
1445 #ifdef EXTRA_CONSTRAINT
1446 case 'Q': case 'R': case 'S': case 'T': case 'U':
1450 case 'p': case 'g': case 'r':
1458 error ("output operand constraint lacks `='");
1462 /* If an output operand is not a decl or indirect ref and our constraint
1463 allows a register, make a temporary to act as an intermediate.
1464 Make the asm insn write into that, then our caller will copy it to
1465 the real output operand. Likewise for promoted variables. */
1467 if (TREE_CODE (val
) == INDIRECT_REF
1468 || (TREE_CODE_CLASS (TREE_CODE (val
)) == 'd'
1469 && ! (GET_CODE (DECL_RTL (val
)) == REG
1470 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
1474 mark_addressable (TREE_VALUE (tail
));
1477 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1479 if (! allows_reg
&& GET_CODE (output_rtx
[i
]) != MEM
)
1480 error ("output number %d not directly addressable", i
);
1484 if (TYPE_MODE (type
) == BLKmode
)
1486 output_rtx
[i
] = assign_stack_temp (BLKmode
,
1487 int_size_in_bytes (type
), 0);
1488 MEM_IN_STRUCT_P (output_rtx
[i
]) = AGGREGATE_TYPE_P (type
);
1491 output_rtx
[i
] = gen_reg_rtx (TYPE_MODE (type
));
1493 TREE_VALUE (tail
) = make_tree (type
, output_rtx
[i
]);
1497 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1499 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1503 /* Make vectors for the expression-rtx and constraint strings. */
1505 argvec
= rtvec_alloc (ninputs
);
1506 constraints
= rtvec_alloc (ninputs
);
1508 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
1509 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
1511 MEM_VOLATILE_P (body
) = vol
;
1513 /* Eval the inputs and put them into ARGVEC.
1514 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1517 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1521 /* If there's an erroneous arg, emit no insn,
1522 because the ASM_INPUT would get VOIDmode
1523 and that could cause a crash in reload. */
1524 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1526 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1528 error ("hard register `%s' listed as input operand to `asm'",
1529 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1533 /* Make sure constraint has neither `=' nor `+'. */
1535 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1536 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
1537 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1539 error ("input operand constraint contains `%c'",
1540 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1544 XVECEXP (body
, 3, i
) /* argvec */
1545 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1546 if (CONSTANT_P (XVECEXP (body
, 3, i
))
1547 && ! general_operand (XVECEXP (body
, 3, i
),
1548 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
)))))
1549 XVECEXP (body
, 3, i
)
1550 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1551 XVECEXP (body
, 3, i
));
1552 XVECEXP (body
, 4, i
) /* constraints */
1553 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1554 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1558 /* Protect all the operands from the queue,
1559 now that they have all been evaluated. */
1561 for (i
= 0; i
< ninputs
; i
++)
1562 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1564 for (i
= 0; i
< noutputs
; i
++)
1565 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1567 /* Now, for each output, construct an rtx
1568 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1569 ARGVEC CONSTRAINTS))
1570 If there is more than one, put them inside a PARALLEL. */
1572 if (noutputs
== 1 && nclobbers
== 0)
1574 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1575 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
1577 else if (noutputs
== 0 && nclobbers
== 0)
1579 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1580 insn
= emit_insn (body
);
1586 if (num
== 0) num
= 1;
1587 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1589 /* For each output operand, store a SET. */
1591 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1593 XVECEXP (body
, 0, i
)
1594 = gen_rtx (SET
, VOIDmode
,
1596 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1597 TREE_STRING_POINTER (string
),
1598 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1599 i
, argvec
, constraints
,
1601 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1604 /* If there are no outputs (but there are some clobbers)
1605 store the bare ASM_OPERANDS into the PARALLEL. */
1608 XVECEXP (body
, 0, i
++) = obody
;
1610 /* Store (clobber REG) for each clobbered register specified. */
1612 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1614 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1615 int j
= decode_reg_name (regname
);
1619 if (j
== -3) /* `cc', which is not a register */
1622 if (j
== -4) /* `memory', don't cache memory across asm */
1624 XVECEXP (body
, 0, i
++)
1625 = gen_rtx (CLOBBER
, VOIDmode
,
1626 gen_rtx (MEM
, BLKmode
,
1627 gen_rtx (SCRATCH
, VOIDmode
, 0)));
1631 /* Ignore unknown register, error already signalled. */
1635 /* Use QImode since that's guaranteed to clobber just one reg. */
1636 XVECEXP (body
, 0, i
++)
1637 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1640 insn
= emit_insn (body
);
1646 /* Generate RTL to evaluate the expression EXP
1647 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1650 expand_expr_stmt (exp
)
1653 if (output_bytecode
)
1655 int org_stack_depth
= stack_depth
;
1657 bc_expand_expr (exp
);
1659 /* Restore stack depth */
1660 if (stack_depth
< org_stack_depth
)
1663 bc_emit_instruction (drop
);
1665 last_expr_type
= TREE_TYPE (exp
);
1669 /* If -W, warn about statements with no side effects,
1670 except for an explicit cast to void (e.g. for assert()), and
1671 except inside a ({...}) where they may be useful. */
1672 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1674 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1675 && !(TREE_CODE (exp
) == CONVERT_EXPR
1676 && TREE_TYPE (exp
) == void_type_node
))
1677 warning_with_file_and_line (emit_filename
, emit_lineno
,
1678 "statement with no effect");
1679 else if (warn_unused
)
1680 warn_if_unused_value (exp
);
1683 /* If EXP is of function type and we are expanding statements for
1684 value, convert it to pointer-to-function. */
1685 if (expr_stmts_for_value
&& TREE_CODE (TREE_TYPE (exp
)) == FUNCTION_TYPE
)
1686 exp
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
1688 last_expr_type
= TREE_TYPE (exp
);
1689 if (! flag_syntax_only
)
1690 last_expr_value
= expand_expr (exp
,
1691 (expr_stmts_for_value
1692 ? NULL_RTX
: const0_rtx
),
1695 /* If all we do is reference a volatile value in memory,
1696 copy it to a register to be sure it is actually touched. */
1697 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1698 && TREE_THIS_VOLATILE (exp
))
1700 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
)
1702 else if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1703 copy_to_reg (last_expr_value
);
1706 rtx lab
= gen_label_rtx ();
1708 /* Compare the value with itself to reference it. */
1709 emit_cmp_insn (last_expr_value
, last_expr_value
, EQ
,
1710 expand_expr (TYPE_SIZE (last_expr_type
),
1711 NULL_RTX
, VOIDmode
, 0),
1713 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
);
1714 emit_jump_insn ((*bcc_gen_fctn
[(int) EQ
]) (lab
));
1719 /* If this expression is part of a ({...}) and is in memory, we may have
1720 to preserve temporaries. */
1721 preserve_temp_slots (last_expr_value
);
1723 /* Free any temporaries used to evaluate this expression. Any temporary
1724 used as a result of this expression will already have been preserved
1731 /* Warn if EXP contains any computations whose results are not used.
1732 Return 1 if a warning is printed; 0 otherwise. */
1735 warn_if_unused_value (exp
)
1738 if (TREE_USED (exp
))
1741 switch (TREE_CODE (exp
))
1743 case PREINCREMENT_EXPR
:
1744 case POSTINCREMENT_EXPR
:
1745 case PREDECREMENT_EXPR
:
1746 case POSTDECREMENT_EXPR
:
1751 case METHOD_CALL_EXPR
:
1753 case WITH_CLEANUP_EXPR
:
1755 /* We don't warn about COND_EXPR because it may be a useful
1756 construct if either arm contains a side effect. */
1761 /* For a binding, warn if no side effect within it. */
1762 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1765 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1767 case TRUTH_ORIF_EXPR
:
1768 case TRUTH_ANDIF_EXPR
:
1769 /* In && or ||, warn if 2nd operand has no side effect. */
1770 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1773 if (TREE_NO_UNUSED_WARNING (exp
))
1775 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1777 /* Let people do `(foo (), 0)' without a warning. */
1778 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1780 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1784 case NON_LVALUE_EXPR
:
1785 /* Don't warn about values cast to void. */
1786 if (TREE_TYPE (exp
) == void_type_node
)
1788 /* Don't warn about conversions not explicit in the user's program. */
1789 if (TREE_NO_UNUSED_WARNING (exp
))
1791 /* Assignment to a cast usually results in a cast of a modify.
1792 Don't complain about that. There can be an arbitrary number of
1793 casts before the modify, so we must loop until we find the first
1794 non-cast expression and then test to see if that is a modify. */
1796 tree tem
= TREE_OPERAND (exp
, 0);
1798 while (TREE_CODE (tem
) == CONVERT_EXPR
|| TREE_CODE (tem
) == NOP_EXPR
)
1799 tem
= TREE_OPERAND (tem
, 0);
1801 if (TREE_CODE (tem
) == MODIFY_EXPR
|| TREE_CODE (tem
) == INIT_EXPR
1802 || TREE_CODE (tem
) == CALL_EXPR
)
1808 /* Don't warn about automatic dereferencing of references, since
1809 the user cannot control it. */
1810 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == REFERENCE_TYPE
)
1811 return warn_if_unused_value (TREE_OPERAND (exp
, 0));
1812 /* ... fall through ... */
1815 /* Referencing a volatile value is a side effect, so don't warn. */
1816 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1817 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1818 && TREE_THIS_VOLATILE (exp
))
1821 warning_with_file_and_line (emit_filename
, emit_lineno
,
1822 "value computed is not used");
1827 /* Clear out the memory of the last expression evaluated. */
1835 /* Begin a statement which will return a value.
1836 Return the RTL_EXPR for this statement expr.
1837 The caller must save that value and pass it to expand_end_stmt_expr. */
1840 expand_start_stmt_expr ()
1845 /* When generating bytecode just note down the stack depth */
1846 if (output_bytecode
)
1847 return (build_int_2 (stack_depth
, 0));
1849 /* Make the RTL_EXPR node temporary, not momentary,
1850 so that rtl_expr_chain doesn't become garbage. */
1851 momentary
= suspend_momentary ();
1852 t
= make_node (RTL_EXPR
);
1853 resume_momentary (momentary
);
1854 start_sequence_for_rtl_expr (t
);
1856 expr_stmts_for_value
++;
1860 /* Restore the previous state at the end of a statement that returns a value.
1861 Returns a tree node representing the statement's value and the
1862 insns to compute the value.
1864 The nodes of that expression have been freed by now, so we cannot use them.
1865 But we don't want to do that anyway; the expression has already been
1866 evaluated and now we just want to use the value. So generate a RTL_EXPR
1867 with the proper type and RTL value.
1869 If the last substatement was not an expression,
1870 return something with type `void'. */
1873 expand_end_stmt_expr (t
)
1876 if (output_bytecode
)
1882 /* At this point, all expressions have been evaluated in order.
1883 However, all expression values have been popped when evaluated,
1884 which means we have to recover the last expression value. This is
1885 the last value removed by means of a `drop' instruction. Instead
1886 of adding code to inhibit dropping the last expression value, it
1887 is here recovered by undoing the `drop'. Since `drop' is
1888 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1891 bc_adjust_stack (-1);
1893 if (!last_expr_type
)
1894 last_expr_type
= void_type_node
;
1896 t
= make_node (RTL_EXPR
);
1897 TREE_TYPE (t
) = last_expr_type
;
1898 RTL_EXPR_RTL (t
) = NULL
;
1899 RTL_EXPR_SEQUENCE (t
) = NULL
;
1901 /* Don't consider deleting this expr or containing exprs at tree level. */
1902 TREE_THIS_VOLATILE (t
) = 1;
1910 if (last_expr_type
== 0)
1912 last_expr_type
= void_type_node
;
1913 last_expr_value
= const0_rtx
;
1915 else if (last_expr_value
== 0)
1916 /* There are some cases where this can happen, such as when the
1917 statement is void type. */
1918 last_expr_value
= const0_rtx
;
1919 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1920 /* Remove any possible QUEUED. */
1921 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1925 TREE_TYPE (t
) = last_expr_type
;
1926 RTL_EXPR_RTL (t
) = last_expr_value
;
1927 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1929 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1933 /* Don't consider deleting this expr or containing exprs at tree level. */
1934 TREE_SIDE_EFFECTS (t
) = 1;
1935 /* Propagate volatility of the actual RTL expr. */
1936 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1939 expr_stmts_for_value
--;
1944 /* Generate RTL for the start of an if-then. COND is the expression
1945 whose truth should be tested.
1947 If EXITFLAG is nonzero, this conditional is visible to
1948 `exit_something'. */
1951 expand_start_cond (cond
, exitflag
)
1955 struct nesting
*thiscond
= ALLOC_NESTING ();
1957 /* Make an entry on cond_stack for the cond we are entering. */
1959 thiscond
->next
= cond_stack
;
1960 thiscond
->all
= nesting_stack
;
1961 thiscond
->depth
= ++nesting_depth
;
1962 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1963 /* Before we encounter an `else', we don't need a separate exit label
1964 unless there are supposed to be exit statements
1965 to exit this conditional. */
1966 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1967 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1968 cond_stack
= thiscond
;
1969 nesting_stack
= thiscond
;
1971 if (output_bytecode
)
1972 bc_expand_start_cond (cond
, exitflag
);
1974 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL_RTX
);
1977 /* Generate RTL between then-clause and the elseif-clause
1978 of an if-then-elseif-.... */
1981 expand_start_elseif (cond
)
1984 if (cond_stack
->data
.cond
.endif_label
== 0)
1985 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1986 emit_jump (cond_stack
->data
.cond
.endif_label
);
1987 emit_label (cond_stack
->data
.cond
.next_label
);
1988 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1989 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
1992 /* Generate RTL between the then-clause and the else-clause
1993 of an if-then-else. */
1996 expand_start_else ()
1998 if (cond_stack
->data
.cond
.endif_label
== 0)
1999 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
2001 if (output_bytecode
)
2003 bc_expand_start_else ();
2007 emit_jump (cond_stack
->data
.cond
.endif_label
);
2008 emit_label (cond_stack
->data
.cond
.next_label
);
2009 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
2012 /* After calling expand_start_else, turn this "else" into an "else if"
2013 by providing another condition. */
2016 expand_elseif (cond
)
2019 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
2020 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
2023 /* Generate RTL for the end of an if-then.
2024 Pop the record for it off of cond_stack. */
2029 struct nesting
*thiscond
= cond_stack
;
2031 if (output_bytecode
)
2032 bc_expand_end_cond ();
2035 do_pending_stack_adjust ();
2036 if (thiscond
->data
.cond
.next_label
)
2037 emit_label (thiscond
->data
.cond
.next_label
);
2038 if (thiscond
->data
.cond
.endif_label
)
2039 emit_label (thiscond
->data
.cond
.endif_label
);
2042 POPSTACK (cond_stack
);
2047 /* Generate code for the start of an if-then. COND is the expression
2048 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2049 is to be visible to exit_something. It is assumed that the caller
2050 has pushed the previous context on the cond stack. */
2053 bc_expand_start_cond (cond
, exitflag
)
2057 struct nesting
*thiscond
= cond_stack
;
2059 thiscond
->data
.case_stmt
.nominal_type
= cond
;
2061 thiscond
->exit_label
= gen_label_rtx ();
2062 bc_expand_expr (cond
);
2063 bc_emit_bytecode (xjumpifnot
);
2064 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond
->exit_label
));
2066 #ifdef DEBUG_PRINT_CODE
2067 fputc ('\n', stderr
);
2071 /* Generate the label for the end of an if with
2075 bc_expand_end_cond ()
2077 struct nesting
*thiscond
= cond_stack
;
2079 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond
->exit_label
));
2082 /* Generate code for the start of the else- clause of
2086 bc_expand_start_else ()
2088 struct nesting
*thiscond
= cond_stack
;
2090 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
2091 thiscond
->exit_label
= gen_label_rtx ();
2092 bc_emit_bytecode (jump
);
2093 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond
->exit_label
));
2095 #ifdef DEBUG_PRINT_CODE
2096 fputc ('\n', stderr
);
2099 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond
->data
.cond
.endif_label
));
2102 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2103 loop should be exited by `exit_something'. This is a loop for which
2104 `expand_continue' will jump to the top of the loop.
2106 Make an entry on loop_stack to record the labels associated with
2110 expand_start_loop (exit_flag
)
2113 register struct nesting
*thisloop
= ALLOC_NESTING ();
2115 /* Make an entry on loop_stack for the loop we are entering. */
2117 thisloop
->next
= loop_stack
;
2118 thisloop
->all
= nesting_stack
;
2119 thisloop
->depth
= ++nesting_depth
;
2120 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
2121 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
2122 thisloop
->data
.loop
.alt_end_label
= 0;
2123 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
2124 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
2125 loop_stack
= thisloop
;
2126 nesting_stack
= thisloop
;
2128 if (output_bytecode
)
2130 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop
->data
.loop
.start_label
));
2134 do_pending_stack_adjust ();
2136 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
2137 emit_label (thisloop
->data
.loop
.start_label
);
2142 /* Like expand_start_loop but for a loop where the continuation point
2143 (for expand_continue_loop) will be specified explicitly. */
2146 expand_start_loop_continue_elsewhere (exit_flag
)
2149 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
2150 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
2154 /* Specify the continuation point for a loop started with
2155 expand_start_loop_continue_elsewhere.
2156 Use this at the point in the code to which a continue statement
2160 expand_loop_continue_here ()
2162 if (output_bytecode
)
2164 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack
->data
.loop
.continue_label
));
2167 do_pending_stack_adjust ();
2168 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
2169 emit_label (loop_stack
->data
.loop
.continue_label
);
2175 bc_expand_end_loop ()
2177 struct nesting
*thisloop
= loop_stack
;
2179 bc_emit_bytecode (jump
);
2180 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop
->data
.loop
.start_label
));
2182 #ifdef DEBUG_PRINT_CODE
2183 fputc ('\n', stderr
);
2186 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop
->exit_label
));
2187 POPSTACK (loop_stack
);
2192 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2193 Pop the block off of loop_stack. */
2199 register rtx start_label
;
2200 rtx last_test_insn
= 0;
2203 if (output_bytecode
)
2205 bc_expand_end_loop ();
2209 insn
= get_last_insn ();
2210 start_label
= loop_stack
->data
.loop
.start_label
;
2212 /* Mark the continue-point at the top of the loop if none elsewhere. */
2213 if (start_label
== loop_stack
->data
.loop
.continue_label
)
2214 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
2216 do_pending_stack_adjust ();
2218 /* If optimizing, perhaps reorder the loop. If the loop
2219 starts with a conditional exit, roll that to the end
2220 where it will optimize together with the jump back.
2222 We look for the last conditional branch to the exit that we encounter
2223 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2224 branch to the exit first, use it.
2226 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2227 because moving them is not valid. */
2231 ! (GET_CODE (insn
) == JUMP_INSN
2232 && GET_CODE (PATTERN (insn
)) == SET
2233 && SET_DEST (PATTERN (insn
)) == pc_rtx
2234 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
2236 /* Scan insns from the top of the loop looking for a qualified
2237 conditional exit. */
2238 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
2239 insn
= NEXT_INSN (insn
))
2241 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
2244 if (GET_CODE (insn
) == NOTE
2245 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2246 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
2249 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
2252 if (last_test_insn
&& num_insns
> 30)
2255 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
2256 && SET_DEST (PATTERN (insn
)) == pc_rtx
2257 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
2258 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
2259 && ((XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
2260 == loop_stack
->data
.loop
.end_label
)
2261 || (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
2262 == loop_stack
->data
.loop
.alt_end_label
)))
2263 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
2264 && ((XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
2265 == loop_stack
->data
.loop
.end_label
)
2266 || (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
2267 == loop_stack
->data
.loop
.alt_end_label
)))))
2268 last_test_insn
= insn
;
2270 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
2271 && GET_CODE (PATTERN (insn
)) == SET
2272 && SET_DEST (PATTERN (insn
)) == pc_rtx
2273 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
2274 && ((XEXP (SET_SRC (PATTERN (insn
)), 0)
2275 == loop_stack
->data
.loop
.end_label
)
2276 || (XEXP (SET_SRC (PATTERN (insn
)), 0)
2277 == loop_stack
->data
.loop
.alt_end_label
)))
2278 /* Include BARRIER. */
2279 last_test_insn
= NEXT_INSN (insn
);
2282 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
2284 /* We found one. Move everything from there up
2285 to the end of the loop, and add a jump into the loop
2286 to jump to there. */
2287 register rtx newstart_label
= gen_label_rtx ();
2288 register rtx start_move
= start_label
;
2290 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2291 then we want to move this note also. */
2292 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
2293 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
2294 == NOTE_INSN_LOOP_CONT
))
2295 start_move
= PREV_INSN (start_move
);
2297 emit_label_after (newstart_label
, PREV_INSN (start_move
));
2298 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
2299 emit_jump_insn_after (gen_jump (start_label
),
2300 PREV_INSN (newstart_label
));
2301 emit_barrier_after (PREV_INSN (newstart_label
));
2302 start_label
= newstart_label
;
2306 emit_jump (start_label
);
2307 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
2308 emit_label (loop_stack
->data
.loop
.end_label
);
2310 POPSTACK (loop_stack
);
2315 /* Generate a jump to the current loop's continue-point.
2316 This is usually the top of the loop, but may be specified
2317 explicitly elsewhere. If not currently inside a loop,
2318 return 0 and do nothing; caller will print an error message. */
2321 expand_continue_loop (whichloop
)
2322 struct nesting
*whichloop
;
2326 whichloop
= loop_stack
;
2329 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.continue_label
,
2334 /* Generate a jump to exit the current loop. If not currently inside a loop,
2335 return 0 and do nothing; caller will print an error message. */
2338 expand_exit_loop (whichloop
)
2339 struct nesting
*whichloop
;
2343 whichloop
= loop_stack
;
2346 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2350 /* Generate a conditional jump to exit the current loop if COND
2351 evaluates to zero. If not currently inside a loop,
2352 return 0 and do nothing; caller will print an error message. */
2355 expand_exit_loop_if_false (whichloop
, cond
)
2356 struct nesting
*whichloop
;
2361 whichloop
= loop_stack
;
2364 if (output_bytecode
)
2366 bc_expand_expr (cond
);
2367 bc_expand_goto_internal (xjumpifnot
,
2368 BYTECODE_BC_LABEL (whichloop
->exit_label
),
2373 /* In order to handle fixups, we actually create a conditional jump
2374 around a unconditional branch to exit the loop. If fixups are
2375 necessary, they go before the unconditional branch. */
2377 rtx label
= gen_label_rtx ();
2380 do_jump (cond
, NULL_RTX
, label
);
2381 last_insn
= get_last_insn ();
2382 if (GET_CODE (last_insn
) == CODE_LABEL
)
2383 whichloop
->data
.loop
.alt_end_label
= last_insn
;
2384 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
,
2392 /* Return non-zero if we should preserve sub-expressions as separate
2393 pseudos. We never do so if we aren't optimizing. We always do so
2394 if -fexpensive-optimizations.
2396 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2397 the loop may still be a small one. */
2400 preserve_subexpressions_p ()
2404 if (flag_expensive_optimizations
)
2407 if (optimize
== 0 || loop_stack
== 0)
2410 insn
= get_last_insn_anywhere ();
2413 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2414 < n_non_fixed_regs
* 3));
2418 /* Generate a jump to exit the current loop, conditional, binding contour
2419 or case statement. Not all such constructs are visible to this function,
2420 only those started with EXIT_FLAG nonzero. Individual languages use
2421 the EXIT_FLAG parameter to control which kinds of constructs you can
2424 If not currently inside anything that can be exited,
2425 return 0 and do nothing; caller will print an error message. */
2428 expand_exit_something ()
2432 for (n
= nesting_stack
; n
; n
= n
->all
)
2433 if (n
->exit_label
!= 0)
2435 expand_goto_internal (NULL_TREE
, n
->exit_label
, NULL_RTX
);
2442 /* Generate RTL to return from the current function, with no value.
2443 (That is, we do not do anything about returning any value.) */
2446 expand_null_return ()
2448 struct nesting
*block
= block_stack
;
2451 if (output_bytecode
)
2453 bc_emit_instruction (ret
);
2457 /* Does any pending block have cleanups? */
2459 while (block
&& block
->data
.block
.cleanups
== 0)
2460 block
= block
->next
;
2462 /* If yes, use a goto to return, since that runs cleanups. */
2464 expand_null_return_1 (last_insn
, block
!= 0);
2467 /* Generate RTL to return from the current function, with value VAL. */
2470 expand_value_return (val
)
2473 struct nesting
*block
= block_stack
;
2474 rtx last_insn
= get_last_insn ();
2475 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2477 /* Copy the value to the return location
2478 unless it's already there. */
2480 if (return_reg
!= val
)
2482 #ifdef PROMOTE_FUNCTION_RETURN
2483 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
2484 int unsignedp
= TREE_UNSIGNED (type
);
2485 enum machine_mode mode
2486 = promote_mode (type
, DECL_MODE (DECL_RESULT (current_function_decl
)),
2489 if (GET_MODE (val
) != VOIDmode
&& GET_MODE (val
) != mode
)
2490 convert_move (return_reg
, val
, unsignedp
);
2493 emit_move_insn (return_reg
, val
);
2495 if (GET_CODE (return_reg
) == REG
2496 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2497 emit_insn (gen_rtx (USE
, VOIDmode
, return_reg
));
2499 /* Does any pending block have cleanups? */
2501 while (block
&& block
->data
.block
.cleanups
== 0)
2502 block
= block
->next
;
2504 /* If yes, use a goto to return, since that runs cleanups.
2505 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2507 expand_null_return_1 (last_insn
, block
!= 0);
2510 /* Output a return with no value. If LAST_INSN is nonzero,
2511 pretend that the return takes place after LAST_INSN.
2512 If USE_GOTO is nonzero then don't use a return instruction;
2513 go to the return label instead. This causes any cleanups
2514 of pending blocks to be executed normally. */
2517 expand_null_return_1 (last_insn
, use_goto
)
2521 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2523 clear_pending_stack_adjust ();
2524 do_pending_stack_adjust ();
2527 /* PCC-struct return always uses an epilogue. */
2528 if (current_function_returns_pcc_struct
|| use_goto
)
2531 end_label
= return_label
= gen_label_rtx ();
2532 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2536 /* Otherwise output a simple return-insn if one is available,
2537 unless it won't do the job. */
2539 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2541 emit_jump_insn (gen_return ());
2547 /* Otherwise jump to the epilogue. */
2548 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2551 /* Generate RTL to evaluate the expression RETVAL and return it
2552 from the current function. */
2555 expand_return (retval
)
2558 /* If there are any cleanups to be performed, then they will
2559 be inserted following LAST_INSN. It is desirable
2560 that the last_insn, for such purposes, should be the
2561 last insn before computing the return value. Otherwise, cleanups
2562 which call functions can clobber the return value. */
2563 /* ??? rms: I think that is erroneous, because in C++ it would
2564 run destructors on variables that might be used in the subsequent
2565 computation of the return value. */
2567 register rtx val
= 0;
2571 struct nesting
*block
;
2573 /* Bytecode returns are quite simple, just leave the result on the
2574 arithmetic stack. */
2575 if (output_bytecode
)
2577 bc_expand_expr (retval
);
2578 bc_emit_instruction (ret
);
2582 /* If function wants no value, give it none. */
2583 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2585 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2587 expand_null_return ();
2591 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2592 /* This is not sufficient. We also need to watch for cleanups of the
2593 expression we are about to expand. Unfortunately, we cannot know
2594 if it has cleanups until we expand it, and we want to change how we
2595 expand it depending upon if we need cleanups. We can't win. */
2597 cleanups
= any_pending_cleanups (1);
2602 if (TREE_CODE (retval
) == RESULT_DECL
)
2603 retval_rhs
= retval
;
2604 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2605 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2606 retval_rhs
= TREE_OPERAND (retval
, 1);
2607 else if (TREE_TYPE (retval
) == void_type_node
)
2608 /* Recognize tail-recursive call to void function. */
2609 retval_rhs
= retval
;
2611 retval_rhs
= NULL_TREE
;
2613 /* Only use `last_insn' if there are cleanups which must be run. */
2614 if (cleanups
|| cleanup_label
!= 0)
2615 last_insn
= get_last_insn ();
2617 /* Distribute return down conditional expr if either of the sides
2618 may involve tail recursion (see test below). This enhances the number
2619 of tail recursions we see. Don't do this always since it can produce
2620 sub-optimal code in some cases and we distribute assignments into
2621 conditional expressions when it would help. */
2623 if (optimize
&& retval_rhs
!= 0
2624 && frame_offset
== 0
2625 && TREE_CODE (retval_rhs
) == COND_EXPR
2626 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2627 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2629 rtx label
= gen_label_rtx ();
2632 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, NULL_RTX
);
2633 expr
= build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2634 DECL_RESULT (current_function_decl
),
2635 TREE_OPERAND (retval_rhs
, 1));
2636 TREE_SIDE_EFFECTS (expr
) = 1;
2637 expand_return (expr
);
2640 expr
= build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2641 DECL_RESULT (current_function_decl
),
2642 TREE_OPERAND (retval_rhs
, 2));
2643 TREE_SIDE_EFFECTS (expr
) = 1;
2644 expand_return (expr
);
2648 /* For tail-recursive call to current function,
2649 just jump back to the beginning.
2650 It's unsafe if any auto variable in this function
2651 has its address taken; for simplicity,
2652 require stack frame to be empty. */
2653 if (optimize
&& retval_rhs
!= 0
2654 && frame_offset
== 0
2655 && TREE_CODE (retval_rhs
) == CALL_EXPR
2656 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2657 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2658 /* Finish checking validity, and if valid emit code
2659 to set the argument variables for the new call. */
2660 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2661 DECL_ARGUMENTS (current_function_decl
)))
2663 if (tail_recursion_label
== 0)
2665 tail_recursion_label
= gen_label_rtx ();
2666 emit_label_after (tail_recursion_label
,
2667 tail_recursion_reentry
);
2670 expand_goto_internal (NULL_TREE
, tail_recursion_label
, last_insn
);
2675 /* This optimization is safe if there are local cleanups
2676 because expand_null_return takes care of them.
2677 ??? I think it should also be safe when there is a cleanup label,
2678 because expand_null_return takes care of them, too.
2679 Any reason why not? */
2680 if (HAVE_return
&& cleanup_label
== 0
2681 && ! current_function_returns_pcc_struct
2682 && BRANCH_COST
<= 1)
2684 /* If this is return x == y; then generate
2685 if (x == y) return 1; else return 0;
2686 if we can do it with explicit return insns and
2687 branches are cheap. */
2689 switch (TREE_CODE (retval_rhs
))
2697 case TRUTH_ANDIF_EXPR
:
2698 case TRUTH_ORIF_EXPR
:
2699 case TRUTH_AND_EXPR
:
2701 case TRUTH_NOT_EXPR
:
2702 case TRUTH_XOR_EXPR
:
2703 op0
= gen_label_rtx ();
2704 jumpifnot (retval_rhs
, op0
);
2705 expand_value_return (const1_rtx
);
2707 expand_value_return (const0_rtx
);
2711 #endif /* HAVE_return */
2713 /* If the result is an aggregate that is being returned in one (or more)
2714 registers, load the registers here. The compiler currently can't handle
2715 copying a BLKmode value into registers. We could put this code in a
2716 more general area (for use by everyone instead of just function
2717 call/return), but until this feature is generally usable it is kept here
2718 (and in expand_call). The value must go into a pseudo in case there
2719 are cleanups that will clobber the real return register. */
2722 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
2723 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2726 int big_endian_correction
= 0;
2727 int bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
2728 int n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2729 rtx
*result_pseudos
= (rtx
*) alloca (sizeof (rtx
) * n_regs
);
2731 rtx result_val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2732 enum machine_mode tmpmode
, result_reg_mode
;
2734 /* Structures smaller than a word are aligned to the least significant
2735 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
2736 must skip the empty high order bytes when calculating the bit
2738 if (BYTES_BIG_ENDIAN
&& bytes
< UNITS_PER_WORD
)
2739 big_endian_correction
= (BITS_PER_WORD
- (bytes
* BITS_PER_UNIT
));
2741 for (i
= 0; i
< n_regs
; i
++)
2743 rtx reg
= gen_reg_rtx (word_mode
);
2744 rtx word
= operand_subword_force (result_val
, i
, BLKmode
);
2745 int bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs
)),BITS_PER_WORD
);
2748 result_pseudos
[i
] = reg
;
2750 /* Clobber REG and move each partword into it. Ensure we don't
2751 go past the end of the structure. Note that the loop below
2752 works because we've already verified that padding and
2753 endianness are compatible. */
2754 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, reg
));
2757 bitpos
< BITS_PER_WORD
&& bytes
> 0;
2758 bitpos
+= bitsize
, bytes
-= bitsize
/ BITS_PER_UNIT
)
2760 int xbitpos
= bitpos
+ big_endian_correction
;
2762 store_bit_field (reg
, bitsize
, xbitpos
, word_mode
,
2763 extract_bit_field (word
, bitsize
, bitpos
, 1,
2764 NULL_RTX
, word_mode
,
2766 bitsize
/ BITS_PER_UNIT
,
2768 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
2772 /* Find the smallest integer mode large enough to hold the
2773 entire structure and use that mode instead of BLKmode
2774 on the USE insn for the return register. */
2775 bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
2776 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2777 tmpmode
!= MAX_MACHINE_MODE
;
2778 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
2780 /* Have we found a large enough mode? */
2781 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
2785 /* No suitable mode found. */
2786 if (tmpmode
== MAX_MACHINE_MODE
)
2789 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl
)), tmpmode
);
2791 if (GET_MODE_SIZE (tmpmode
) < GET_MODE_SIZE (word_mode
))
2792 result_reg_mode
= word_mode
;
2794 result_reg_mode
= tmpmode
;
2795 result_reg
= gen_reg_rtx (result_reg_mode
);
2797 /* Now that the value is in pseudos, copy it to the result reg(s). */
2800 for (i
= 0; i
< n_regs
; i
++)
2801 emit_move_insn (operand_subword (result_reg
, i
, 0, result_reg_mode
),
2804 if (tmpmode
!= result_reg_mode
)
2805 result_reg
= gen_lowpart (tmpmode
, result_reg
);
2807 expand_value_return (result_reg
);
2811 && TREE_TYPE (retval_rhs
) != void_type_node
2812 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2814 /* Calculate the return value into a pseudo reg. */
2815 val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2817 /* All temporaries have now been used. */
2819 /* Return the calculated value, doing cleanups first. */
2820 expand_value_return (val
);
2824 /* No cleanups or no hard reg used;
2825 calculate value into hard return reg. */
2826 expand_expr (retval
, const0_rtx
, VOIDmode
, 0);
2829 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2833 /* Return 1 if the end of the generated RTX is not a barrier.
2834 This means code already compiled can drop through. */
2837 drop_through_at_end_p ()
2839 rtx insn
= get_last_insn ();
2840 while (insn
&& GET_CODE (insn
) == NOTE
)
2841 insn
= PREV_INSN (insn
);
2842 return insn
&& GET_CODE (insn
) != BARRIER
;
2845 /* Emit code to alter this function's formal parms for a tail-recursive call.
2846 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2847 FORMALS is the chain of decls of formals.
2848 Return 1 if this can be done;
2849 otherwise return 0 and do not emit any code. */
2852 tail_recursion_args (actuals
, formals
)
2853 tree actuals
, formals
;
2855 register tree a
= actuals
, f
= formals
;
2857 register rtx
*argvec
;
2859 /* Check that number and types of actuals are compatible
2860 with the formals. This is not always true in valid C code.
2861 Also check that no formal needs to be addressable
2862 and that all formals are scalars. */
2864 /* Also count the args. */
2866 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2868 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
2870 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2873 if (a
!= 0 || f
!= 0)
2876 /* Compute all the actuals. */
2878 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2880 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2881 argvec
[i
] = expand_expr (TREE_VALUE (a
), NULL_RTX
, VOIDmode
, 0);
2883 /* Find which actual values refer to current values of previous formals.
2884 Copy each of them now, before any formal is changed. */
2886 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2890 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2891 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2892 { copy
= 1; break; }
2894 argvec
[i
] = copy_to_reg (argvec
[i
]);
2897 /* Store the values of the actuals into the formals. */
2899 for (f
= formals
, a
= actuals
, i
= 0; f
;
2900 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2902 if (GET_MODE (DECL_RTL (f
)) == GET_MODE (argvec
[i
]))
2903 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2905 convert_move (DECL_RTL (f
), argvec
[i
],
2906 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2913 /* Generate the RTL code for entering a binding contour.
2914 The variables are declared one by one, by calls to `expand_decl'.
2916 EXIT_FLAG is nonzero if this construct should be visible to
2917 `exit_something'. */
2920 expand_start_bindings (exit_flag
)
2923 struct nesting
*thisblock
= ALLOC_NESTING ();
2924 rtx note
= output_bytecode
? 0 : emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
2926 /* Make an entry on block_stack for the block we are entering. */
2928 thisblock
->next
= block_stack
;
2929 thisblock
->all
= nesting_stack
;
2930 thisblock
->depth
= ++nesting_depth
;
2931 thisblock
->data
.block
.stack_level
= 0;
2932 thisblock
->data
.block
.cleanups
= 0;
2933 thisblock
->data
.block
.function_call_count
= 0;
2937 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2938 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2939 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2940 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2942 thisblock
->data
.block
.outer_cleanups
2943 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2944 block_stack
->data
.block
.outer_cleanups
);
2947 thisblock
->data
.block
.outer_cleanups
= 0;
2951 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2952 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2953 thisblock
->data
.block
.outer_cleanups
2954 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2955 block_stack
->data
.block
.outer_cleanups
);
2957 thisblock
->data
.block
.outer_cleanups
= 0;
2959 thisblock
->data
.block
.label_chain
= 0;
2960 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2961 thisblock
->data
.block
.first_insn
= note
;
2962 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2963 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2964 block_stack
= thisblock
;
2965 nesting_stack
= thisblock
;
2967 if (!output_bytecode
)
2969 /* Make a new level for allocating stack slots. */
2974 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2975 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2979 remember_end_note (block
)
2980 register tree block
;
2982 BLOCK_END_NOTE (block
) = last_block_end_note
;
2983 last_block_end_note
= NULL_RTX
;
2986 /* Generate RTL code to terminate a binding contour.
2987 VARS is the chain of VAR_DECL nodes
2988 for the variables bound in this contour.
2989 MARK_ENDS is nonzero if we should put a note at the beginning
2990 and end of this binding contour.
2992 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2993 (That is true automatically if the contour has a saved stack level.) */
2996 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
3001 register struct nesting
*thisblock
= block_stack
;
3004 if (output_bytecode
)
3006 bc_expand_end_bindings (vars
, mark_ends
, dont_jump_in
);
3011 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3012 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
3013 && ! DECL_IN_SYSTEM_HEADER (decl
))
3014 warning_with_decl (decl
, "unused variable `%s'");
3016 if (thisblock
->exit_label
)
3018 do_pending_stack_adjust ();
3019 emit_label (thisblock
->exit_label
);
3022 /* If necessary, make a handler for nonlocal gotos taking
3023 place in the function calls in this block. */
3024 if (function_call_count
!= thisblock
->data
.block
.function_call_count
3026 /* Make handler for outermost block
3027 if there were any nonlocal gotos to this function. */
3028 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
3029 /* Make handler for inner block if it has something
3030 special to do when you jump out of it. */
3031 : (thisblock
->data
.block
.cleanups
!= 0
3032 || thisblock
->data
.block
.stack_level
!= 0)))
3035 rtx afterward
= gen_label_rtx ();
3036 rtx handler_label
= gen_label_rtx ();
3037 rtx save_receiver
= gen_reg_rtx (Pmode
);
3040 /* Don't let jump_optimize delete the handler. */
3041 LABEL_PRESERVE_P (handler_label
) = 1;
3043 /* Record the handler address in the stack slot for that purpose,
3044 during this block, saving and restoring the outer value. */
3045 if (thisblock
->next
!= 0)
3047 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
3050 emit_move_insn (save_receiver
, nonlocal_goto_handler_slot
);
3051 insns
= get_insns ();
3053 emit_insns_before (insns
, thisblock
->data
.block
.first_insn
);
3057 emit_move_insn (nonlocal_goto_handler_slot
,
3058 gen_rtx (LABEL_REF
, Pmode
, handler_label
));
3059 insns
= get_insns ();
3061 emit_insns_before (insns
, thisblock
->data
.block
.first_insn
);
3063 /* Jump around the handler; it runs only when specially invoked. */
3064 emit_jump (afterward
);
3065 emit_label (handler_label
);
3067 #ifdef HAVE_nonlocal_goto
3068 if (! HAVE_nonlocal_goto
)
3070 /* First adjust our frame pointer to its actual value. It was
3071 previously set to the start of the virtual area corresponding to
3072 the stacked variables when we branched here and now needs to be
3073 adjusted to the actual hardware fp value.
3075 Assignments are to virtual registers are converted by
3076 instantiate_virtual_regs into the corresponding assignment
3077 to the underlying register (fp in this case) that makes
3078 the original assignment true.
3079 So the following insn will actually be
3080 decrementing fp by STARTING_FRAME_OFFSET. */
3081 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
3083 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3084 if (fixed_regs
[ARG_POINTER_REGNUM
])
3086 #ifdef ELIMINABLE_REGS
3087 /* If the argument pointer can be eliminated in favor of the
3088 frame pointer, we don't need to restore it. We assume here
3089 that if such an elimination is present, it can always be used.
3090 This is the case on all known machines; if we don't make this
3091 assumption, we do unnecessary saving on many machines. */
3092 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
3095 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
3096 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
3097 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
3100 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
3103 /* Now restore our arg pointer from the address at which it
3104 was saved in our stack frame.
3105 If there hasn't be space allocated for it yet, make
3107 if (arg_pointer_save_area
== 0)
3108 arg_pointer_save_area
3109 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
3110 emit_move_insn (virtual_incoming_args_rtx
,
3111 /* We need a pseudo here, or else
3112 instantiate_virtual_regs_1 complains. */
3113 copy_to_reg (arg_pointer_save_area
));
3118 /* The handler expects the desired label address in the static chain
3119 register. It tests the address and does an appropriate jump
3120 to whatever label is desired. */
3121 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
3122 /* Skip any labels we shouldn't be able to jump to from here. */
3123 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
3125 rtx not_this
= gen_label_rtx ();
3126 rtx
this = gen_label_rtx ();
3127 do_jump_if_equal (static_chain_rtx
,
3128 gen_rtx (LABEL_REF
, Pmode
, DECL_RTL (TREE_VALUE (link
))),
3130 emit_jump (not_this
);
3132 expand_goto (TREE_VALUE (link
));
3133 emit_label (not_this
);
3135 /* If label is not recognized, abort. */
3136 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "abort"), 0,
3139 emit_label (afterward
);
3142 /* Don't allow jumping into a block that has cleanups or a stack level. */
3144 || thisblock
->data
.block
.stack_level
!= 0
3145 || thisblock
->data
.block
.cleanups
!= 0)
3147 struct label_chain
*chain
;
3149 /* Any labels in this block are no longer valid to go to.
3150 Mark them to cause an error message. */
3151 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
3153 DECL_TOO_LATE (chain
->label
) = 1;
3154 /* If any goto without a fixup came to this label,
3155 that must be an error, because gotos without fixups
3156 come from outside all saved stack-levels and all cleanups. */
3157 if (TREE_ADDRESSABLE (chain
->label
))
3158 error_with_decl (chain
->label
,
3159 "label `%s' used before containing binding contour");
3163 /* Restore stack level in effect before the block
3164 (only if variable-size objects allocated). */
3165 /* Perform any cleanups associated with the block. */
3167 if (thisblock
->data
.block
.stack_level
!= 0
3168 || thisblock
->data
.block
.cleanups
!= 0)
3170 /* Only clean up here if this point can actually be reached. */
3171 int reachable
= GET_CODE (get_last_insn ()) != BARRIER
;
3173 /* Don't let cleanups affect ({...}) constructs. */
3174 int old_expr_stmts_for_value
= expr_stmts_for_value
;
3175 rtx old_last_expr_value
= last_expr_value
;
3176 tree old_last_expr_type
= last_expr_type
;
3177 expr_stmts_for_value
= 0;
3179 /* Do the cleanups. */
3180 expand_cleanups (thisblock
->data
.block
.cleanups
, NULL_TREE
, 0, reachable
);
3182 do_pending_stack_adjust ();
3184 expr_stmts_for_value
= old_expr_stmts_for_value
;
3185 last_expr_value
= old_last_expr_value
;
3186 last_expr_type
= old_last_expr_type
;
3188 /* Restore the stack level. */
3190 if (reachable
&& thisblock
->data
.block
.stack_level
!= 0)
3192 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3193 thisblock
->data
.block
.stack_level
, NULL_RTX
);
3194 if (nonlocal_goto_handler_slot
!= 0)
3195 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
,
3199 /* Any gotos out of this block must also do these things.
3200 Also report any gotos with fixups that came to labels in this
3202 fixup_gotos (thisblock
,
3203 thisblock
->data
.block
.stack_level
,
3204 thisblock
->data
.block
.cleanups
,
3205 thisblock
->data
.block
.first_insn
,
3209 /* Mark the beginning and end of the scope if requested.
3210 We do this now, after running cleanups on the variables
3211 just going out of scope, so they are in scope for their cleanups. */
3214 last_block_end_note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
3216 /* Get rid of the beginning-mark if we don't make an end-mark. */
3217 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
3219 /* If doing stupid register allocation, make sure lives of all
3220 register variables declared here extend thru end of scope. */
3223 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3225 rtx rtl
= DECL_RTL (decl
);
3226 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
3230 /* Restore block_stack level for containing block. */
3232 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
3233 POPSTACK (block_stack
);
3235 /* Pop the stack slot nesting and free any slots at this level. */
3240 /* End a binding contour.
3241 VARS is the chain of VAR_DECL nodes for the variables bound
3242 in this contour. MARK_ENDS is nonzer if we should put a note
3243 at the beginning and end of this binding contour.
3244 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3248 bc_expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
3253 struct nesting
*thisbind
= nesting_stack
;
3257 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3258 if (! TREE_USED (TREE_VALUE (decl
)) && TREE_CODE (TREE_VALUE (decl
)) == VAR_DECL
)
3259 warning_with_decl (decl
, "unused variable `%s'");
3261 if (thisbind
->exit_label
)
3262 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind
->exit_label
));
3264 /* Pop block/bindings off stack */
3265 POPSTACK (block_stack
);
3268 /* Generate RTL for the automatic variable declaration DECL.
3269 (Other kinds of declarations are simply ignored if seen here.)
3270 CLEANUP is an expression to be executed at exit from this binding contour;
3271 for example, in C++, it might call the destructor for this variable.
3273 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3274 either before or after calling `expand_decl' but before compiling
3275 any subsequent expressions. This is because CLEANUP may be expanded
3276 more than once, on different branches of execution.
3277 For the same reason, CLEANUP may not contain a CALL_EXPR
3278 except as its topmost node--else `preexpand_calls' would get confused.
3280 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3281 that is not associated with any particular variable.
3283 There is no special support here for C++ constructors.
3284 They should be handled by the proper code in DECL_INITIAL. */
3290 struct nesting
*thisblock
= block_stack
;
3293 if (output_bytecode
)
3295 bc_expand_decl (decl
, 0);
3299 type
= TREE_TYPE (decl
);
3301 /* Only automatic variables need any expansion done.
3302 Static and external variables, and external functions,
3303 will be handled by `assemble_variable' (called from finish_decl).
3304 TYPE_DECL and CONST_DECL require nothing.
3305 PARM_DECLs are handled in `assign_parms'. */
3307 if (TREE_CODE (decl
) != VAR_DECL
)
3309 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
3312 /* Create the RTL representation for the variable. */
3314 if (type
== error_mark_node
)
3315 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
3316 else if (DECL_SIZE (decl
) == 0)
3317 /* Variable with incomplete type. */
3319 if (DECL_INITIAL (decl
) == 0)
3320 /* Error message was already done; now avoid a crash. */
3321 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
3323 /* An initializer is going to decide the size of this array.
3324 Until we know the size, represent its address with a reg. */
3325 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
3326 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (type
);
3328 else if (DECL_MODE (decl
) != BLKmode
3329 /* If -ffloat-store, don't put explicit float vars
3331 && !(flag_float_store
3332 && TREE_CODE (type
) == REAL_TYPE
)
3333 && ! TREE_THIS_VOLATILE (decl
)
3334 && ! TREE_ADDRESSABLE (decl
)
3335 && (DECL_REGISTER (decl
) || ! obey_regdecls
))
3337 /* Automatic variable that can go in a register. */
3338 int unsignedp
= TREE_UNSIGNED (type
);
3339 enum machine_mode reg_mode
3340 = promote_mode (type
, DECL_MODE (decl
), &unsignedp
, 0);
3342 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3344 rtx realpart
, imagpart
;
3345 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (type
));
3347 /* For a complex type variable, make a CONCAT of two pseudos
3348 so that the real and imaginary parts
3349 can be allocated separately. */
3350 realpart
= gen_reg_rtx (partmode
);
3351 REG_USERVAR_P (realpart
) = 1;
3352 imagpart
= gen_reg_rtx (partmode
);
3353 REG_USERVAR_P (imagpart
) = 1;
3354 DECL_RTL (decl
) = gen_rtx (CONCAT
, reg_mode
, realpart
, imagpart
);
3358 DECL_RTL (decl
) = gen_reg_rtx (reg_mode
);
3359 if (TREE_CODE (type
) == POINTER_TYPE
)
3360 mark_reg_pointer (DECL_RTL (decl
));
3361 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
3364 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
)
3366 /* Variable of fixed size that goes on the stack. */
3370 /* If we previously made RTL for this decl, it must be an array
3371 whose size was determined by the initializer.
3372 The old address was a register; set that register now
3373 to the proper address. */
3374 if (DECL_RTL (decl
) != 0)
3376 if (GET_CODE (DECL_RTL (decl
)) != MEM
3377 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
3379 oldaddr
= XEXP (DECL_RTL (decl
), 0);
3383 = assign_stack_temp (DECL_MODE (decl
),
3384 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
3385 + BITS_PER_UNIT
- 1)
3388 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3390 /* Set alignment we actually gave this decl. */
3391 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
3392 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
3396 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
3397 if (addr
!= oldaddr
)
3398 emit_move_insn (oldaddr
, addr
);
3401 /* If this is a memory ref that contains aggregate components,
3402 mark it as such for cse and loop optimize. */
3403 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3405 /* If this is in memory because of -ffloat-store,
3406 set the volatile bit, to prevent optimizations from
3407 undoing the effects. */
3408 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
3409 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3413 /* Dynamic-size object: must push space on the stack. */
3417 /* Record the stack pointer on entry to block, if have
3418 not already done so. */
3419 if (thisblock
->data
.block
.stack_level
== 0)
3421 do_pending_stack_adjust ();
3422 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3423 &thisblock
->data
.block
.stack_level
,
3424 thisblock
->data
.block
.first_insn
);
3425 stack_block_stack
= thisblock
;
3428 /* Compute the variable's size, in bytes. */
3429 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
3431 size_int (BITS_PER_UNIT
)),
3432 NULL_RTX
, VOIDmode
, 0);
3435 /* Allocate space on the stack for the variable. */
3436 address
= allocate_dynamic_stack_space (size
, NULL_RTX
,
3439 /* Reference the variable indirect through that rtx. */
3440 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
3442 /* If this is a memory ref that contains aggregate components,
3443 mark it as such for cse and loop optimize. */
3444 MEM_IN_STRUCT_P (DECL_RTL (decl
)) = AGGREGATE_TYPE_P (TREE_TYPE (decl
));
3446 /* Indicate the alignment we actually gave this variable. */
3447 #ifdef STACK_BOUNDARY
3448 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
3450 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
3454 if (TREE_THIS_VOLATILE (decl
))
3455 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3456 #if 0 /* A variable is not necessarily unchanging
3457 just because it is const. RTX_UNCHANGING_P
3458 means no change in the function,
3459 not merely no change in the variable's scope.
3460 It is correct to set RTX_UNCHANGING_P if the variable's scope
3461 is the whole function. There's no convenient way to test that. */
3462 if (TREE_READONLY (decl
))
3463 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
3466 /* If doing stupid register allocation, make sure life of any
3467 register variable starts here, at the start of its scope. */
3470 use_variable (DECL_RTL (decl
));
3474 /* Generate code for the automatic variable declaration DECL. For
3475 most variables this just means we give it a stack offset. The
3476 compiler sometimes emits cleanups without variables and we will
3477 have to deal with those too. */
3480 bc_expand_decl (decl
, cleanup
)
3488 /* A cleanup with no variable. */
3495 /* Only auto variables need any work. */
3496 if (TREE_CODE (decl
) != VAR_DECL
|| TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
3499 type
= TREE_TYPE (decl
);
3501 if (type
== error_mark_node
)
3502 DECL_RTL (decl
) = bc_gen_rtx ((char *) 0, 0, (struct bc_label
*) 0);
3504 else if (DECL_SIZE (decl
) == 0)
3506 /* Variable with incomplete type. The stack offset herein will be
3507 fixed later in expand_decl_init (). */
3508 DECL_RTL (decl
) = bc_gen_rtx ((char *) 0, 0, (struct bc_label
*) 0);
3510 else if (TREE_CONSTANT (DECL_SIZE (decl
)))
3512 DECL_RTL (decl
) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl
)) / BITS_PER_UNIT
,
3516 DECL_RTL (decl
) = bc_allocate_variable_array (DECL_SIZE (decl
));
3519 /* Emit code to perform the initialization of a declaration DECL. */
3522 expand_decl_init (decl
)
3525 int was_used
= TREE_USED (decl
);
3527 if (output_bytecode
)
3529 bc_expand_decl_init (decl
);
3533 /* If this is a CONST_DECL, we don't have to generate any code, but
3534 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3535 to be set while in the obstack containing the constant. If we don't
3536 do this, we can lose if we have functions nested three deep and the middle
3537 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3538 the innermost function is the first to expand that STRING_CST. */
3539 if (TREE_CODE (decl
) == CONST_DECL
)
3541 if (DECL_INITIAL (decl
) && TREE_CONSTANT (DECL_INITIAL (decl
)))
3542 expand_expr (DECL_INITIAL (decl
), NULL_RTX
, VOIDmode
,
3543 EXPAND_INITIALIZER
);
3547 if (TREE_STATIC (decl
))
3550 /* Compute and store the initial value now. */
3552 if (DECL_INITIAL (decl
) == error_mark_node
)
3554 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
3555 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
3556 || code
== POINTER_TYPE
)
3557 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
3561 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
3563 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
3564 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
3568 /* Don't let the initialization count as "using" the variable. */
3569 TREE_USED (decl
) = was_used
;
3571 /* Free any temporaries we made while initializing the decl. */
3572 preserve_temp_slots (NULL_RTX
);
3576 /* Expand initialization for variable-sized types. Allocate array
3577 using newlocalSI and set local variable, which is a pointer to the
3581 bc_expand_variable_local_init (decl
)
3584 /* Evaluate size expression and coerce to SI */
3585 bc_expand_expr (DECL_SIZE (decl
));
3587 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3588 no coercion is necessary (?) */
3590 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3591 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3593 /* Emit code to allocate array */
3594 bc_emit_instruction (newlocalSI
);
3596 /* Store array pointer in local variable. This is the only instance
3597 where we actually want the address of the pointer to the
3598 variable-size block, rather than the pointer itself. We avoid
3599 using expand_address() since that would cause the pointer to be
3600 pushed rather than its address. Hence the hard-coded reference;
3601 notice also that the variable is always local (no global
3602 variable-size type variables). */
3604 bc_load_localaddr (DECL_RTL (decl
));
3605 bc_emit_instruction (storeP
);
3609 /* Emit code to initialize a declaration. */
3612 bc_expand_decl_init (decl
)
3615 int org_stack_depth
;
3617 /* Statical initializers are handled elsewhere */
3619 if (TREE_STATIC (decl
))
3622 /* Memory original stack depth */
3623 org_stack_depth
= stack_depth
;
3625 /* If the type is variable-size, we first create its space (we ASSUME
3626 it CAN'T be static). We do this regardless of whether there's an
3627 initializer assignment or not. */
3629 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
3630 bc_expand_variable_local_init (decl
);
3632 /* Expand initializer assignment */
3633 if (DECL_INITIAL (decl
) == error_mark_node
)
3635 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
3637 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
3638 || code
== POINTER_TYPE
)
3640 expand_assignment (TREE_TYPE (decl
), decl
, 0, 0);
3642 else if (DECL_INITIAL (decl
))
3643 expand_assignment (TREE_TYPE (decl
), decl
, 0, 0);
3645 /* Restore stack depth */
3646 if (org_stack_depth
> stack_depth
)
3649 bc_adjust_stack (stack_depth
- org_stack_depth
);
3653 /* CLEANUP is an expression to be executed at exit from this binding contour;
3654 for example, in C++, it might call the destructor for this variable.
3656 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3657 either before or after calling `expand_decl' but before compiling
3658 any subsequent expressions. This is because CLEANUP may be expanded
3659 more than once, on different branches of execution.
3660 For the same reason, CLEANUP may not contain a CALL_EXPR
3661 except as its topmost node--else `preexpand_calls' would get confused.
3663 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3664 that is not associated with any particular variable. */
3667 expand_decl_cleanup (decl
, cleanup
)
3670 struct nesting
*thisblock
= block_stack
;
3672 /* Error if we are not in any block. */
3676 /* Record the cleanup if there is one. */
3680 thisblock
->data
.block
.cleanups
3681 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
3682 /* If this block has a cleanup, it belongs in stack_block_stack. */
3683 stack_block_stack
= thisblock
;
3684 (*interim_eh_hook
) (NULL_TREE
);
3689 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3690 DECL_ELTS is the list of elements that belong to DECL's type.
3691 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3694 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
3695 tree decl
, cleanup
, decl_elts
;
3697 struct nesting
*thisblock
= block_stack
;
3700 expand_decl (decl
, cleanup
);
3701 x
= DECL_RTL (decl
);
3705 tree decl_elt
= TREE_VALUE (decl_elts
);
3706 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
3707 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
3709 /* Propagate the union's alignment to the elements. */
3710 DECL_ALIGN (decl_elt
) = DECL_ALIGN (decl
);
3712 /* If the element has BLKmode and the union doesn't, the union is
3713 aligned such that the element doesn't need to have BLKmode, so
3714 change the element's mode to the appropriate one for its size. */
3715 if (mode
== BLKmode
&& DECL_MODE (decl
) != BLKmode
)
3716 DECL_MODE (decl_elt
) = mode
3717 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt
)),
3720 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3721 instead create a new MEM rtx with the proper mode. */
3722 if (GET_CODE (x
) == MEM
)
3724 if (mode
== GET_MODE (x
))
3725 DECL_RTL (decl_elt
) = x
;
3728 DECL_RTL (decl_elt
) = gen_rtx (MEM
, mode
, copy_rtx (XEXP (x
, 0)));
3729 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
3730 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
3733 else if (GET_CODE (x
) == REG
)
3735 if (mode
== GET_MODE (x
))
3736 DECL_RTL (decl_elt
) = x
;
3738 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, mode
, x
, 0);
3743 /* Record the cleanup if there is one. */
3746 thisblock
->data
.block
.cleanups
3747 = temp_tree_cons (decl_elt
, cleanup_elt
,
3748 thisblock
->data
.block
.cleanups
);
3750 decl_elts
= TREE_CHAIN (decl_elts
);
3754 /* Expand a list of cleanups LIST.
3755 Elements may be expressions or may be nested lists.
3757 If DONT_DO is nonnull, then any list-element
3758 whose TREE_PURPOSE matches DONT_DO is omitted.
3759 This is sometimes used to avoid a cleanup associated with
3760 a value that is being returned out of the scope.
3762 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3763 goto and handle protection regions specially in that case.
3765 If REACHABLE, we emit code, otherwise just inform the exception handling
3766 code about this finalization. */
3769 expand_cleanups (list
, dont_do
, in_fixup
, reachable
)
3776 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3777 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
3779 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3780 expand_cleanups (TREE_VALUE (tail
), dont_do
, in_fixup
, reachable
);
3784 (*interim_eh_hook
) (TREE_VALUE (tail
));
3788 /* Cleanups may be run multiple times. For example,
3789 when exiting a binding contour, we expand the
3790 cleanups associated with that contour. When a goto
3791 within that binding contour has a target outside that
3792 contour, it will expand all cleanups from its scope to
3793 the target. Though the cleanups are expanded multiple
3794 times, the control paths are non-overlapping so the
3795 cleanups will not be executed twice. */
3796 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
3803 /* Move all cleanups from the current block_stack
3804 to the containing block_stack, where they are assumed to
3805 have been created. If anything can cause a temporary to
3806 be created, but not expanded for more than one level of
3807 block_stacks, then this code will have to change. */
3812 struct nesting
*block
= block_stack
;
3813 struct nesting
*outer
= block
->next
;
3815 outer
->data
.block
.cleanups
3816 = chainon (block
->data
.block
.cleanups
,
3817 outer
->data
.block
.cleanups
);
3818 block
->data
.block
.cleanups
= 0;
3822 last_cleanup_this_contour ()
3824 if (block_stack
== 0)
3827 return block_stack
->data
.block
.cleanups
;
3830 /* Return 1 if there are any pending cleanups at this point.
3831 If THIS_CONTOUR is nonzero, check the current contour as well.
3832 Otherwise, look only at the contours that enclose this one. */
3835 any_pending_cleanups (this_contour
)
3838 struct nesting
*block
;
3840 if (block_stack
== 0)
3843 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3845 if (block_stack
->data
.block
.cleanups
== 0
3846 && (block_stack
->data
.block
.outer_cleanups
== 0
3848 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
3853 for (block
= block_stack
->next
; block
; block
= block
->next
)
3854 if (block
->data
.block
.cleanups
!= 0)
3860 /* Enter a case (Pascal) or switch (C) statement.
3861 Push a block onto case_stack and nesting_stack
3862 to accumulate the case-labels that are seen
3863 and to record the labels generated for the statement.
3865 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3866 Otherwise, this construct is transparent for `exit_something'.
3868 EXPR is the index-expression to be dispatched on.
3869 TYPE is its nominal type. We could simply convert EXPR to this type,
3870 but instead we take short cuts. */
3873 expand_start_case (exit_flag
, expr
, type
, printname
)
3879 register struct nesting
*thiscase
= ALLOC_NESTING ();
3881 /* Make an entry on case_stack for the case we are entering. */
3883 thiscase
->next
= case_stack
;
3884 thiscase
->all
= nesting_stack
;
3885 thiscase
->depth
= ++nesting_depth
;
3886 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3887 thiscase
->data
.case_stmt
.case_list
= 0;
3888 thiscase
->data
.case_stmt
.index_expr
= expr
;
3889 thiscase
->data
.case_stmt
.nominal_type
= type
;
3890 thiscase
->data
.case_stmt
.default_label
= 0;
3891 thiscase
->data
.case_stmt
.num_ranges
= 0;
3892 thiscase
->data
.case_stmt
.printname
= printname
;
3893 thiscase
->data
.case_stmt
.seenlabel
= 0;
3894 case_stack
= thiscase
;
3895 nesting_stack
= thiscase
;
3897 if (output_bytecode
)
3899 bc_expand_start_case (thiscase
, expr
, type
, printname
);
3903 do_pending_stack_adjust ();
3905 /* Make sure case_stmt.start points to something that won't
3906 need any transformation before expand_end_case. */
3907 if (GET_CODE (get_last_insn ()) != NOTE
)
3908 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
3910 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3914 /* Enter a case statement. It is assumed that the caller has pushed
3915 the current context onto the case stack. */
3918 bc_expand_start_case (thiscase
, expr
, type
, printname
)
3919 struct nesting
*thiscase
;
3924 bc_expand_expr (expr
);
3925 bc_expand_conversion (TREE_TYPE (expr
), type
);
3927 /* For cases, the skip is a place we jump to that's emitted after
3928 the size of the jump table is known. */
3930 thiscase
->data
.case_stmt
.skip_label
= gen_label_rtx ();
3931 bc_emit_bytecode (jump
);
3932 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase
->data
.case_stmt
.skip_label
));
3934 #ifdef DEBUG_PRINT_CODE
3935 fputc ('\n', stderr
);
3940 /* Start a "dummy case statement" within which case labels are invalid
3941 and are not connected to any larger real case statement.
3942 This can be used if you don't want to let a case statement jump
3943 into the middle of certain kinds of constructs. */
3946 expand_start_case_dummy ()
3948 register struct nesting
*thiscase
= ALLOC_NESTING ();
3950 /* Make an entry on case_stack for the dummy. */
3952 thiscase
->next
= case_stack
;
3953 thiscase
->all
= nesting_stack
;
3954 thiscase
->depth
= ++nesting_depth
;
3955 thiscase
->exit_label
= 0;
3956 thiscase
->data
.case_stmt
.case_list
= 0;
3957 thiscase
->data
.case_stmt
.start
= 0;
3958 thiscase
->data
.case_stmt
.nominal_type
= 0;
3959 thiscase
->data
.case_stmt
.default_label
= 0;
3960 thiscase
->data
.case_stmt
.num_ranges
= 0;
3961 case_stack
= thiscase
;
3962 nesting_stack
= thiscase
;
3965 /* End a dummy case statement. */
3968 expand_end_case_dummy ()
3970 POPSTACK (case_stack
);
3973 /* Return the data type of the index-expression
3974 of the innermost case statement, or null if none. */
3977 case_index_expr_type ()
3980 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3984 /* Accumulate one case or default label inside a case or switch statement.
3985 VALUE is the value of the case (a null pointer, for a default label).
3986 The function CONVERTER, when applied to arguments T and V,
3987 converts the value V to the type T.
3989 If not currently inside a case or switch statement, return 1 and do
3990 nothing. The caller will print a language-specific error message.
3991 If VALUE is a duplicate or overlaps, return 2 and do nothing
3992 except store the (first) duplicate node in *DUPLICATE.
3993 If VALUE is out of range, return 3 and do nothing.
3994 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3995 Return 0 on success.
3997 Extended to handle range statements. */
4000 pushcase (value
, converter
, label
, duplicate
)
4001 register tree value
;
4002 tree (*converter
) PROTO((tree
, tree
));
4003 register tree label
;
4006 register struct case_node
**l
;
4007 register struct case_node
*n
;
4011 if (output_bytecode
)
4012 return bc_pushcase (value
, label
);
4014 /* Fail if not inside a real case statement. */
4015 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
4018 if (stack_block_stack
4019 && stack_block_stack
->depth
> case_stack
->depth
)
4022 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
4023 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
4025 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4026 if (index_type
== error_mark_node
)
4029 /* Convert VALUE to the type in which the comparisons are nominally done. */
4031 value
= (*converter
) (nominal_type
, value
);
4033 /* If this is the first label, warn if any insns have been emitted. */
4034 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
4037 for (insn
= case_stack
->data
.case_stmt
.start
;
4039 insn
= NEXT_INSN (insn
))
4041 if (GET_CODE (insn
) == CODE_LABEL
)
4043 if (GET_CODE (insn
) != NOTE
4044 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
4046 warning ("unreachable code at beginning of %s",
4047 case_stack
->data
.case_stmt
.printname
);
4052 case_stack
->data
.case_stmt
.seenlabel
= 1;
4054 /* Fail if this value is out of range for the actual type of the index
4055 (which may be narrower than NOMINAL_TYPE). */
4056 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
4059 /* Fail if this is a duplicate or overlaps another entry. */
4062 if (case_stack
->data
.case_stmt
.default_label
!= 0)
4064 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
4067 case_stack
->data
.case_stmt
.default_label
= label
;
4071 /* Find the elt in the chain before which to insert the new value,
4072 to keep the chain sorted in increasing order.
4073 But report an error if this element is a duplicate. */
4074 for (l
= &case_stack
->data
.case_stmt
.case_list
;
4075 /* Keep going past elements distinctly less than VALUE. */
4076 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
4081 /* Element we will insert before must be distinctly greater;
4082 overlap means error. */
4083 if (! tree_int_cst_lt (value
, (*l
)->low
))
4085 *duplicate
= (*l
)->code_label
;
4090 /* Add this label to the chain, and succeed.
4091 Copy VALUE so it is on temporary rather than momentary
4092 obstack and will thus survive till the end of the case statement. */
4093 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
4096 n
->high
= n
->low
= copy_node (value
);
4097 n
->code_label
= label
;
4101 expand_label (label
);
4105 /* Like pushcase but this case applies to all values
4106 between VALUE1 and VALUE2 (inclusive).
4107 The return value is the same as that of pushcase
4108 but there is one additional error code:
4109 4 means the specified range was empty. */
4112 pushcase_range (value1
, value2
, converter
, label
, duplicate
)
4113 register tree value1
, value2
;
4114 tree (*converter
) PROTO((tree
, tree
));
4115 register tree label
;
4118 register struct case_node
**l
;
4119 register struct case_node
*n
;
4123 /* Fail if not inside a real case statement. */
4124 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
4127 if (stack_block_stack
4128 && stack_block_stack
->depth
> case_stack
->depth
)
4131 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
4132 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
4134 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4135 if (index_type
== error_mark_node
)
4138 /* If this is the first label, warn if any insns have been emitted. */
4139 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
4142 for (insn
= case_stack
->data
.case_stmt
.start
;
4144 insn
= NEXT_INSN (insn
))
4146 if (GET_CODE (insn
) == CODE_LABEL
)
4148 if (GET_CODE (insn
) != NOTE
4149 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
4151 warning ("unreachable code at beginning of %s",
4152 case_stack
->data
.case_stmt
.printname
);
4157 case_stack
->data
.case_stmt
.seenlabel
= 1;
4159 /* Convert VALUEs to type in which the comparisons are nominally done. */
4160 if (value1
== 0) /* Negative infinity. */
4161 value1
= TYPE_MIN_VALUE(index_type
);
4162 value1
= (*converter
) (nominal_type
, value1
);
4164 if (value2
== 0) /* Positive infinity. */
4165 value2
= TYPE_MAX_VALUE(index_type
);
4166 value2
= (*converter
) (nominal_type
, value2
);
4168 /* Fail if these values are out of range. */
4169 if (! int_fits_type_p (value1
, index_type
))
4172 if (! int_fits_type_p (value2
, index_type
))
4175 /* Fail if the range is empty. */
4176 if (tree_int_cst_lt (value2
, value1
))
4179 /* If the bounds are equal, turn this into the one-value case. */
4180 if (tree_int_cst_equal (value1
, value2
))
4181 return pushcase (value1
, converter
, label
, duplicate
);
4183 /* Find the elt in the chain before which to insert the new value,
4184 to keep the chain sorted in increasing order.
4185 But report an error if this element is a duplicate. */
4186 for (l
= &case_stack
->data
.case_stmt
.case_list
;
4187 /* Keep going past elements distinctly less than this range. */
4188 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
4193 /* Element we will insert before must be distinctly greater;
4194 overlap means error. */
4195 if (! tree_int_cst_lt (value2
, (*l
)->low
))
4197 *duplicate
= (*l
)->code_label
;
4202 /* Add this label to the chain, and succeed.
4203 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4204 obstack and will thus survive till the end of the case statement. */
4206 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
4209 n
->low
= copy_node (value1
);
4210 n
->high
= copy_node (value2
);
4211 n
->code_label
= label
;
4214 expand_label (label
);
4216 case_stack
->data
.case_stmt
.num_ranges
++;
4222 /* Accumulate one case or default label; VALUE is the value of the
4223 case, or nil for a default label. If not currently inside a case,
4224 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4225 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4226 Return 0 on success. This function is a leftover from the earlier
4227 bytecode compiler, which was based on gcc 1.37. It should be
4228 merged into pushcase. */
4231 bc_pushcase (value
, label
)
4235 struct nesting
*thiscase
= case_stack
;
4236 struct case_node
*case_label
, *new_label
;
4241 /* Fail if duplicate, overlap, or out of type range. */
4244 value
= convert (thiscase
->data
.case_stmt
.nominal_type
, value
);
4245 if (! int_fits_type_p (value
, thiscase
->data
.case_stmt
.nominal_type
))
4248 for (case_label
= thiscase
->data
.case_stmt
.case_list
;
4249 case_label
->left
; case_label
= case_label
->left
)
4250 if (! tree_int_cst_lt (case_label
->left
->high
, value
))
4253 if (case_label
!= thiscase
->data
.case_stmt
.case_list
4254 && ! tree_int_cst_lt (case_label
->high
, value
)
4255 || case_label
->left
&& ! tree_int_cst_lt (value
, case_label
->left
->low
))
4258 new_label
= (struct case_node
*) oballoc (sizeof (struct case_node
));
4259 new_label
->low
= new_label
->high
= copy_node (value
);
4260 new_label
->code_label
= label
;
4261 new_label
->left
= case_label
->left
;
4263 case_label
->left
= new_label
;
4264 thiscase
->data
.case_stmt
.num_ranges
++;
4268 if (thiscase
->data
.case_stmt
.default_label
)
4270 thiscase
->data
.case_stmt
.default_label
= label
;
4273 expand_label (label
);
4277 /* Returns the number of possible values of TYPE.
4278 Returns -1 if the number is unknown or variable.
4279 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4280 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4281 do not increase monotonically (there may be duplicates);
4282 to 1 if the values increase monotonically, but not always by 1;
4283 otherwise sets it to 0. */
4286 all_cases_count (type
, spareness
)
4290 HOST_WIDE_INT count
, count_high
= 0;
4293 switch (TREE_CODE (type
))
4300 count
= 1 << BITS_PER_UNIT
;
4304 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4305 || TREE_CODE (TYPE_MAX_VALUE (type
)) != INTEGER_CST
)
4310 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4311 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4312 but with overflow checking. */
4313 tree mint
= TYPE_MIN_VALUE (type
);
4314 tree maxt
= TYPE_MAX_VALUE (type
);
4315 HOST_WIDE_INT lo
, hi
;
4316 neg_double(TREE_INT_CST_LOW (mint
), TREE_INT_CST_HIGH (mint
),
4318 add_double(TREE_INT_CST_LOW (maxt
), TREE_INT_CST_HIGH (maxt
),
4320 add_double (lo
, hi
, 1, 0, &lo
, &hi
);
4321 if (hi
!= 0 || lo
< 0)
4328 for (t
= TYPE_VALUES (type
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
4330 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4331 || TREE_CODE (TREE_VALUE (t
)) != INTEGER_CST
4332 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type
)) + count
4333 != TREE_INT_CST_LOW (TREE_VALUE (t
)))
4337 if (*spareness
== 1)
4339 tree prev
= TREE_VALUE (TYPE_VALUES (type
));
4340 for (t
= TYPE_VALUES (type
); t
= TREE_CHAIN (t
), t
!= NULL_TREE
; )
4342 if (! tree_int_cst_lt (prev
, TREE_VALUE (t
)))
4347 prev
= TREE_VALUE (t
);
4356 #define BITARRAY_TEST(ARRAY, INDEX) \
4357 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4358 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4359 #define BITARRAY_SET(ARRAY, INDEX) \
4360 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4361 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4363 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4364 with the case values we have seen, assuming the case expression
4366 SPARSENESS is as determined by all_cases_count.
4368 The time needed is proportional to COUNT, unless
4369 SPARSENESS is 2, in which case quadratic time is needed. */
4372 mark_seen_cases (type
, cases_seen
, count
, sparseness
)
4374 unsigned char *cases_seen
;
4380 tree next_node_to_try
= NULL_TREE
;
4381 long next_node_offset
= 0;
4383 register struct case_node
*n
;
4384 tree val
= make_node (INTEGER_CST
);
4385 TREE_TYPE (val
) = type
;
4386 for (n
= case_stack
->data
.case_stmt
.case_list
; n
;
4389 TREE_INT_CST_LOW (val
) = TREE_INT_CST_LOW (n
->low
);
4390 TREE_INT_CST_HIGH (val
) = TREE_INT_CST_HIGH (n
->low
);
4391 while ( ! tree_int_cst_lt (n
->high
, val
))
4393 /* Calculate (into xlo) the "offset" of the integer (val).
4394 The element with lowest value has offset 0, the next smallest
4395 element has offset 1, etc. */
4397 HOST_WIDE_INT xlo
, xhi
;
4399 if (sparseness
== 2)
4401 /* This less efficient loop is only needed to handle
4402 duplicate case values (multiple enum constants
4403 with the same value). */
4404 for (t
= TYPE_VALUES (type
), xlo
= 0; t
!= NULL_TREE
;
4405 t
= TREE_CHAIN (t
), xlo
++)
4407 if (tree_int_cst_equal (val
, TREE_VALUE (t
)))
4408 BITARRAY_SET (cases_seen
, xlo
);
4413 if (sparseness
&& TYPE_VALUES (type
) != NULL_TREE
)
4415 /* The TYPE_VALUES will be in increasing order, so
4416 starting searching where we last ended. */
4417 t
= next_node_to_try
;
4418 xlo
= next_node_offset
;
4424 t
= TYPE_VALUES (type
);
4427 if (tree_int_cst_equal (val
, TREE_VALUE (t
)))
4429 next_node_to_try
= TREE_CHAIN (t
);
4430 next_node_offset
= xlo
+ 1;
4435 if (t
== next_node_to_try
)
4441 t
= TYPE_MIN_VALUE (type
);
4443 neg_double (TREE_INT_CST_LOW (t
), TREE_INT_CST_HIGH (t
),
4447 add_double (xlo
, xhi
,
4448 TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
4452 if (xhi
== 0 && xlo
>= 0 && xlo
< count
)
4453 BITARRAY_SET (cases_seen
, xlo
);
4455 add_double (TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
4457 &TREE_INT_CST_LOW (val
), &TREE_INT_CST_HIGH (val
));
4462 /* Called when the index of a switch statement is an enumerated type
4463 and there is no default label.
4465 Checks that all enumeration literals are covered by the case
4466 expressions of a switch. Also, warn if there are any extra
4467 switch cases that are *not* elements of the enumerated type.
4469 If all enumeration literals were covered by the case expressions,
4470 turn one of the expressions into the default expression since it should
4471 not be possible to fall through such a switch. */
4474 check_for_full_enumeration_handling (type
)
4477 register struct case_node
*n
;
4478 register struct case_node
**l
;
4479 register tree chain
;
4482 /* True iff the selector type is a numbered set mode. */
4485 /* The number of possible selector values. */
4488 /* For each possible selector value. a one iff it has been matched
4489 by a case value alternative. */
4490 unsigned char *cases_seen
;
4492 /* The allocated size of cases_seen, in chars. */
4496 if (output_bytecode
)
4498 bc_check_for_full_enumeration_handling (type
);
4505 size
= all_cases_count (type
, &sparseness
);
4506 bytes_needed
= (size
+ HOST_BITS_PER_CHAR
) / HOST_BITS_PER_CHAR
;
4508 if (size
> 0 && size
< 600000
4509 /* We deliberately use malloc here - not xmalloc. */
4510 && (cases_seen
= (unsigned char *) malloc (bytes_needed
)) != NULL
)
4513 tree v
= TYPE_VALUES (type
);
4514 bzero (cases_seen
, bytes_needed
);
4516 /* The time complexity of this code is normally O(N), where
4517 N being the number of members in the enumerated type.
4518 However, if type is a ENUMERAL_TYPE whose values do not
4519 increase monotonically, quadratic time may be needed. */
4521 mark_seen_cases (type
, cases_seen
, size
, sparseness
);
4523 for (i
= 0; v
!= NULL_TREE
&& i
< size
; i
++, v
= TREE_CHAIN (v
))
4525 if (BITARRAY_TEST(cases_seen
, i
) == 0)
4526 warning ("enumeration value `%s' not handled in switch",
4527 IDENTIFIER_POINTER (TREE_PURPOSE (v
)));
4533 /* Now we go the other way around; we warn if there are case
4534 expressions that don't correspond to enumerators. This can
4535 occur since C and C++ don't enforce type-checking of
4536 assignments to enumeration variables. */
4539 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4541 for (chain
= TYPE_VALUES (type
);
4542 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
4543 chain
= TREE_CHAIN (chain
))
4548 if (TYPE_NAME (type
) == 0)
4549 warning ("case value `%d' not in enumerated type",
4550 TREE_INT_CST_LOW (n
->low
));
4552 warning ("case value `%d' not in enumerated type `%s'",
4553 TREE_INT_CST_LOW (n
->low
),
4554 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
4557 : DECL_NAME (TYPE_NAME (type
))));
4559 if (!tree_int_cst_equal (n
->low
, n
->high
))
4561 for (chain
= TYPE_VALUES (type
);
4562 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
4563 chain
= TREE_CHAIN (chain
))
4568 if (TYPE_NAME (type
) == 0)
4569 warning ("case value `%d' not in enumerated type",
4570 TREE_INT_CST_LOW (n
->high
));
4572 warning ("case value `%d' not in enumerated type `%s'",
4573 TREE_INT_CST_LOW (n
->high
),
4574 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
4577 : DECL_NAME (TYPE_NAME (type
))));
4583 /* ??? This optimization is disabled because it causes valid programs to
4584 fail. ANSI C does not guarantee that an expression with enum type
4585 will have a value that is the same as one of the enumeration literals. */
4587 /* If all values were found as case labels, make one of them the default
4588 label. Thus, this switch will never fall through. We arbitrarily pick
4589 the last one to make the default since this is likely the most
4590 efficient choice. */
4594 for (l
= &case_stack
->data
.case_stmt
.case_list
;
4599 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
4606 /* Check that all enumeration literals are covered by the case
4607 expressions of a switch. Also warn if there are any cases
4608 that are not elements of the enumerated type. */
4611 bc_check_for_full_enumeration_handling (type
)
4614 struct nesting
*thiscase
= case_stack
;
4615 struct case_node
*c
;
4618 /* Check for enums not handled. */
4619 for (e
= TYPE_VALUES (type
); e
; e
= TREE_CHAIN (e
))
4621 for (c
= thiscase
->data
.case_stmt
.case_list
->left
;
4622 c
&& tree_int_cst_lt (c
->high
, TREE_VALUE (e
));
4625 if (! (c
&& tree_int_cst_equal (c
->low
, TREE_VALUE (e
))))
4626 warning ("enumerated value `%s' not handled in switch",
4627 IDENTIFIER_POINTER (TREE_PURPOSE (e
)));
4630 /* Check for cases not in the enumeration. */
4631 for (c
= thiscase
->data
.case_stmt
.case_list
->left
; c
; c
= c
->left
)
4633 for (e
= TYPE_VALUES (type
);
4634 e
&& !tree_int_cst_equal (c
->low
, TREE_VALUE (e
));
4638 warning ("case value `%d' not in enumerated type `%s'",
4639 TREE_INT_CST_LOW (c
->low
),
4640 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type
)) == IDENTIFIER_NODE
4642 : DECL_NAME (TYPE_NAME (type
))));
4646 /* Terminate a case (Pascal) or switch (C) statement
4647 in which ORIG_INDEX is the expression to be tested.
4648 Generate the code to test it and jump to the right place. */
4651 expand_end_case (orig_index
)
4654 tree minval
, maxval
, range
, orig_minval
;
4655 rtx default_label
= 0;
4656 register struct case_node
*n
;
4664 register struct nesting
*thiscase
= case_stack
;
4665 tree index_expr
, index_type
;
4668 if (output_bytecode
)
4670 bc_expand_end_case (orig_index
);
4674 table_label
= gen_label_rtx ();
4675 index_expr
= thiscase
->data
.case_stmt
.index_expr
;
4676 index_type
= TREE_TYPE (index_expr
);
4677 unsignedp
= TREE_UNSIGNED (index_type
);
4679 do_pending_stack_adjust ();
4681 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4682 if (index_type
!= error_mark_node
)
4684 /* If switch expression was an enumerated type, check that all
4685 enumeration literals are covered by the cases.
4686 No sense trying this if there's a default case, however. */
4688 if (!thiscase
->data
.case_stmt
.default_label
4689 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
4690 && TREE_CODE (index_expr
) != INTEGER_CST
)
4691 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
4693 /* If this is the first label, warn if any insns have been emitted. */
4694 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
4697 for (insn
= get_last_insn ();
4698 insn
!= case_stack
->data
.case_stmt
.start
;
4699 insn
= PREV_INSN (insn
))
4700 if (GET_CODE (insn
) != NOTE
4701 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
4703 warning ("unreachable code at beginning of %s",
4704 case_stack
->data
.case_stmt
.printname
);
4709 /* If we don't have a default-label, create one here,
4710 after the body of the switch. */
4711 if (thiscase
->data
.case_stmt
.default_label
== 0)
4713 thiscase
->data
.case_stmt
.default_label
4714 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4715 expand_label (thiscase
->data
.case_stmt
.default_label
);
4717 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
4719 before_case
= get_last_insn ();
4721 /* Simplify the case-list before we count it. */
4722 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
4724 /* Get upper and lower bounds of case values.
4725 Also convert all the case values to the index expr's data type. */
4728 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4730 /* Check low and high label values are integers. */
4731 if (TREE_CODE (n
->low
) != INTEGER_CST
)
4733 if (TREE_CODE (n
->high
) != INTEGER_CST
)
4736 n
->low
= convert (index_type
, n
->low
);
4737 n
->high
= convert (index_type
, n
->high
);
4739 /* Count the elements and track the largest and smallest
4740 of them (treating them as signed even if they are not). */
4748 if (INT_CST_LT (n
->low
, minval
))
4750 if (INT_CST_LT (maxval
, n
->high
))
4753 /* A range counts double, since it requires two compares. */
4754 if (! tree_int_cst_equal (n
->low
, n
->high
))
4758 orig_minval
= minval
;
4760 /* Compute span of values. */
4762 range
= fold (build (MINUS_EXPR
, index_type
, maxval
, minval
));
4766 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
4768 emit_jump (default_label
);
4771 /* If range of values is much bigger than number of values,
4772 make a sequence of conditional branches instead of a dispatch.
4773 If the switch-index is a constant, do it this way
4774 because we can optimize it. */
4776 #ifndef CASE_VALUES_THRESHOLD
4778 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4780 /* If machine does not have a case insn that compares the
4781 bounds, this means extra overhead for dispatch tables
4782 which raises the threshold for using them. */
4783 #define CASE_VALUES_THRESHOLD 5
4784 #endif /* HAVE_casesi */
4785 #endif /* CASE_VALUES_THRESHOLD */
4787 else if (TREE_INT_CST_HIGH (range
) != 0
4788 || count
< CASE_VALUES_THRESHOLD
4789 || ((unsigned HOST_WIDE_INT
) (TREE_INT_CST_LOW (range
))
4791 || TREE_CODE (index_expr
) == INTEGER_CST
4792 /* These will reduce to a constant. */
4793 || (TREE_CODE (index_expr
) == CALL_EXPR
4794 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
4795 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
4796 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
4797 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
4798 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
4800 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4802 /* If the index is a short or char that we do not have
4803 an insn to handle comparisons directly, convert it to
4804 a full integer now, rather than letting each comparison
4805 generate the conversion. */
4807 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
4808 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
4809 == CODE_FOR_nothing
))
4811 enum machine_mode wider_mode
;
4812 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
4813 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4814 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
4815 != CODE_FOR_nothing
)
4817 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
4823 do_pending_stack_adjust ();
4825 index
= protect_from_queue (index
, 0);
4826 if (GET_CODE (index
) == MEM
)
4827 index
= copy_to_reg (index
);
4828 if (GET_CODE (index
) == CONST_INT
4829 || TREE_CODE (index_expr
) == INTEGER_CST
)
4831 /* Make a tree node with the proper constant value
4832 if we don't already have one. */
4833 if (TREE_CODE (index_expr
) != INTEGER_CST
)
4836 = build_int_2 (INTVAL (index
),
4837 unsignedp
|| INTVAL (index
) >= 0 ? 0 : -1);
4838 index_expr
= convert (index_type
, index_expr
);
4841 /* For constant index expressions we need only
4842 issue a unconditional branch to the appropriate
4843 target code. The job of removing any unreachable
4844 code is left to the optimisation phase if the
4845 "-O" option is specified. */
4846 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4847 if (! tree_int_cst_lt (index_expr
, n
->low
)
4848 && ! tree_int_cst_lt (n
->high
, index_expr
))
4852 emit_jump (label_rtx (n
->code_label
));
4854 emit_jump (default_label
);
4858 /* If the index expression is not constant we generate
4859 a binary decision tree to select the appropriate
4860 target code. This is done as follows:
4862 The list of cases is rearranged into a binary tree,
4863 nearly optimal assuming equal probability for each case.
4865 The tree is transformed into RTL, eliminating
4866 redundant test conditions at the same time.
4868 If program flow could reach the end of the
4869 decision tree an unconditional jump to the
4870 default code is emitted. */
4873 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
4874 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
4875 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
,
4877 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
4878 default_label
, index_type
);
4879 emit_jump_if_reachable (default_label
);
4888 enum machine_mode index_mode
= SImode
;
4889 int index_bits
= GET_MODE_BITSIZE (index_mode
);
4891 enum machine_mode op_mode
;
4893 /* Convert the index to SImode. */
4894 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
))
4895 > GET_MODE_BITSIZE (index_mode
))
4897 enum machine_mode omode
= TYPE_MODE (index_type
);
4898 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
4900 /* We must handle the endpoints in the original mode. */
4901 index_expr
= build (MINUS_EXPR
, index_type
,
4902 index_expr
, minval
);
4903 minval
= integer_zero_node
;
4904 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4905 emit_cmp_insn (rangertx
, index
, LTU
, NULL_RTX
, omode
, 1, 0);
4906 emit_jump_insn (gen_bltu (default_label
));
4907 /* Now we can safely truncate. */
4908 index
= convert_to_mode (index_mode
, index
, 0);
4912 if (TYPE_MODE (index_type
) != index_mode
)
4914 index_expr
= convert (type_for_size (index_bits
, 0),
4916 index_type
= TREE_TYPE (index_expr
);
4919 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4922 index
= protect_from_queue (index
, 0);
4923 do_pending_stack_adjust ();
4925 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][0];
4926 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][0])
4928 index
= copy_to_mode_reg (op_mode
, index
);
4930 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
4932 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][1];
4933 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][1])
4935 op1
= copy_to_mode_reg (op_mode
, op1
);
4937 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
4939 op_mode
= insn_operand_mode
[(int)CODE_FOR_casesi
][2];
4940 if (! (*insn_operand_predicate
[(int)CODE_FOR_casesi
][2])
4942 op2
= copy_to_mode_reg (op_mode
, op2
);
4944 emit_jump_insn (gen_casesi (index
, op1
, op2
,
4945 table_label
, default_label
));
4949 #ifdef HAVE_tablejump
4950 if (! win
&& HAVE_tablejump
)
4952 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
4953 fold (build (MINUS_EXPR
, index_type
,
4954 index_expr
, minval
)));
4955 index_type
= TREE_TYPE (index_expr
);
4956 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
4958 index
= protect_from_queue (index
, 0);
4959 do_pending_stack_adjust ();
4961 do_tablejump (index
, TYPE_MODE (index_type
),
4962 expand_expr (range
, NULL_RTX
, VOIDmode
, 0),
4963 table_label
, default_label
);
4970 /* Get table of labels to jump to, in order of case index. */
4972 ncases
= TREE_INT_CST_LOW (range
) + 1;
4973 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
4974 bzero ((char *) labelvec
, ncases
* sizeof (rtx
));
4976 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
4978 register HOST_WIDE_INT i
4979 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (orig_minval
);
4984 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
4985 if (i
+ TREE_INT_CST_LOW (orig_minval
)
4986 == TREE_INT_CST_LOW (n
->high
))
4992 /* Fill in the gaps with the default. */
4993 for (i
= 0; i
< ncases
; i
++)
4994 if (labelvec
[i
] == 0)
4995 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
4997 /* Output the table */
4998 emit_label (table_label
);
5000 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5001 were an expression, instead of an #ifdef/#ifndef. */
5003 #ifdef CASE_VECTOR_PC_RELATIVE
5007 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
5008 gen_rtx (LABEL_REF
, Pmode
, table_label
),
5009 gen_rtvec_v (ncases
, labelvec
)));
5011 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
5012 gen_rtvec_v (ncases
, labelvec
)));
5014 /* If the case insn drops through the table,
5015 after the table we must jump to the default-label.
5016 Otherwise record no drop-through after the table. */
5017 #ifdef CASE_DROPS_THROUGH
5018 emit_jump (default_label
);
5024 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
5025 reorder_insns (before_case
, get_last_insn (),
5026 thiscase
->data
.case_stmt
.start
);
5029 if (thiscase
->exit_label
)
5030 emit_label (thiscase
->exit_label
);
5032 POPSTACK (case_stack
);
5038 /* Terminate a case statement. EXPR is the original index
5042 bc_expand_end_case (expr
)
5045 struct nesting
*thiscase
= case_stack
;
5046 enum bytecode_opcode opcode
;
5047 struct bc_label
*jump_label
;
5048 struct case_node
*c
;
5050 bc_emit_bytecode (jump
);
5051 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase
->exit_label
));
5053 #ifdef DEBUG_PRINT_CODE
5054 fputc ('\n', stderr
);
5057 /* Now that the size of the jump table is known, emit the actual
5058 indexed jump instruction. */
5059 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase
->data
.case_stmt
.skip_label
));
5061 opcode
= TYPE_MODE (thiscase
->data
.case_stmt
.nominal_type
) == SImode
5062 ? TREE_UNSIGNED (thiscase
->data
.case_stmt
.nominal_type
) ? caseSU
: caseSI
5063 : TREE_UNSIGNED (thiscase
->data
.case_stmt
.nominal_type
) ? caseDU
: caseDI
;
5065 bc_emit_bytecode (opcode
);
5067 /* Now emit the case instructions literal arguments, in order.
5068 In addition to the value on the stack, it uses:
5069 1. The address of the jump table.
5070 2. The size of the jump table.
5071 3. The default label. */
5073 jump_label
= bc_get_bytecode_label ();
5074 bc_emit_bytecode_labelref (jump_label
);
5075 bc_emit_bytecode_const ((char *) &thiscase
->data
.case_stmt
.num_ranges
,
5076 sizeof thiscase
->data
.case_stmt
.num_ranges
);
5078 if (thiscase
->data
.case_stmt
.default_label
)
5079 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase
->data
.case_stmt
.default_label
)));
5081 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase
->exit_label
));
5083 /* Output the jump table. */
5085 bc_align_bytecode (3 /* PTR_ALIGN */);
5086 bc_emit_bytecode_labeldef (jump_label
);
5088 if (TYPE_MODE (thiscase
->data
.case_stmt
.nominal_type
) == SImode
)
5089 for (c
= thiscase
->data
.case_stmt
.case_list
->left
; c
; c
= c
->left
)
5091 opcode
= TREE_INT_CST_LOW (c
->low
);
5092 bc_emit_bytecode_const ((char *) &opcode
, sizeof opcode
);
5094 opcode
= TREE_INT_CST_LOW (c
->high
);
5095 bc_emit_bytecode_const ((char *) &opcode
, sizeof opcode
);
5097 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c
->code_label
)));
5100 if (TYPE_MODE (thiscase
->data
.case_stmt
.nominal_type
) == DImode
)
5101 for (c
= thiscase
->data
.case_stmt
.case_list
->left
; c
; c
= c
->left
)
5103 bc_emit_bytecode_DI_const (c
->low
);
5104 bc_emit_bytecode_DI_const (c
->high
);
5106 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c
->code_label
)));
5113 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase
->exit_label
));
5115 /* Possibly issue enumeration warnings. */
5117 if (!thiscase
->data
.case_stmt
.default_label
5118 && TREE_CODE (TREE_TYPE (expr
)) == ENUMERAL_TYPE
5119 && TREE_CODE (expr
) != INTEGER_CST
5121 check_for_full_enumeration_handling (TREE_TYPE (expr
));
5124 #ifdef DEBUG_PRINT_CODE
5125 fputc ('\n', stderr
);
5128 POPSTACK (case_stack
);
5132 /* Return unique bytecode ID. */
5137 static int bc_uid
= 0;
5142 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5145 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
5146 rtx op1
, op2
, label
;
5149 if (GET_CODE (op1
) == CONST_INT
5150 && GET_CODE (op2
) == CONST_INT
)
5152 if (INTVAL (op1
) == INTVAL (op2
))
5157 enum machine_mode mode
= GET_MODE (op1
);
5158 if (mode
== VOIDmode
)
5159 mode
= GET_MODE (op2
);
5160 emit_cmp_insn (op1
, op2
, EQ
, NULL_RTX
, mode
, unsignedp
, 0);
5161 emit_jump_insn (gen_beq (label
));
5165 /* Not all case values are encountered equally. This function
5166 uses a heuristic to weight case labels, in cases where that
5167 looks like a reasonable thing to do.
5169 Right now, all we try to guess is text, and we establish the
5172 chars above space: 16
5181 If we find any cases in the switch that are not either -1 or in the range
5182 of valid ASCII characters, or are control characters other than those
5183 commonly used with "\", don't treat this switch scanning text.
5185 Return 1 if these nodes are suitable for cost estimation, otherwise
5189 estimate_case_costs (node
)
5192 tree min_ascii
= build_int_2 (-1, -1);
5193 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
5197 /* If we haven't already made the cost table, make it now. Note that the
5198 lower bound of the table is -1, not zero. */
5200 if (cost_table
== NULL
)
5202 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
5203 bzero ((char *) (cost_table
- 1), 129 * sizeof (short));
5205 for (i
= 0; i
< 128; i
++)
5209 else if (ispunct (i
))
5211 else if (iscntrl (i
))
5215 cost_table
[' '] = 8;
5216 cost_table
['\t'] = 4;
5217 cost_table
['\0'] = 4;
5218 cost_table
['\n'] = 2;
5219 cost_table
['\f'] = 1;
5220 cost_table
['\v'] = 1;
5221 cost_table
['\b'] = 1;
5224 /* See if all the case expressions look like text. It is text if the
5225 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5226 as signed arithmetic since we don't want to ever access cost_table with a
5227 value less than -1. Also check that none of the constants in a range
5228 are strange control characters. */
5230 for (n
= node
; n
; n
= n
->right
)
5232 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
5235 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
5236 if (cost_table
[i
] < 0)
5240 /* All interesting values are within the range of interesting
5241 ASCII characters. */
5245 /* Scan an ordered list of case nodes
5246 combining those with consecutive values or ranges.
5248 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5251 group_case_nodes (head
)
5254 case_node_ptr node
= head
;
5258 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
5259 case_node_ptr np
= node
;
5261 /* Try to group the successors of NODE with NODE. */
5262 while (((np
= np
->right
) != 0)
5263 /* Do they jump to the same place? */
5264 && next_real_insn (label_rtx (np
->code_label
)) == lb
5265 /* Are their ranges consecutive? */
5266 && tree_int_cst_equal (np
->low
,
5267 fold (build (PLUS_EXPR
,
5268 TREE_TYPE (node
->high
),
5271 /* An overflow is not consecutive. */
5272 && tree_int_cst_lt (node
->high
,
5273 fold (build (PLUS_EXPR
,
5274 TREE_TYPE (node
->high
),
5276 integer_one_node
))))
5278 node
->high
= np
->high
;
5280 /* NP is the first node after NODE which can't be grouped with it.
5281 Delete the nodes in between, and move on to that node. */
5287 /* Take an ordered list of case nodes
5288 and transform them into a near optimal binary tree,
5289 on the assumption that any target code selection value is as
5290 likely as any other.
5292 The transformation is performed by splitting the ordered
5293 list into two equal sections plus a pivot. The parts are
5294 then attached to the pivot as left and right branches. Each
5295 branch is is then transformed recursively. */
5298 balance_case_nodes (head
, parent
)
5299 case_node_ptr
*head
;
5300 case_node_ptr parent
;
5302 register case_node_ptr np
;
5310 register case_node_ptr
*npp
;
5313 /* Count the number of entries on branch. Also count the ranges. */
5317 if (!tree_int_cst_equal (np
->low
, np
->high
))
5321 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
5325 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
5333 /* Split this list if it is long enough for that to help. */
5338 /* Find the place in the list that bisects the list's total cost,
5339 Here I gets half the total cost. */
5344 /* Skip nodes while their cost does not reach that amount. */
5345 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5346 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
5347 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
5350 npp
= &(*npp
)->right
;
5355 /* Leave this branch lopsided, but optimize left-hand
5356 side and fill in `parent' fields for right-hand side. */
5358 np
->parent
= parent
;
5359 balance_case_nodes (&np
->left
, np
);
5360 for (; np
->right
; np
= np
->right
)
5361 np
->right
->parent
= np
;
5365 /* If there are just three nodes, split at the middle one. */
5367 npp
= &(*npp
)->right
;
5370 /* Find the place in the list that bisects the list's total cost,
5371 where ranges count as 2.
5372 Here I gets half the total cost. */
5373 i
= (i
+ ranges
+ 1) / 2;
5376 /* Skip nodes while their cost does not reach that amount. */
5377 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5382 npp
= &(*npp
)->right
;
5387 np
->parent
= parent
;
5390 /* Optimize each of the two split parts. */
5391 balance_case_nodes (&np
->left
, np
);
5392 balance_case_nodes (&np
->right
, np
);
5396 /* Else leave this branch as one level,
5397 but fill in `parent' fields. */
5399 np
->parent
= parent
;
5400 for (; np
->right
; np
= np
->right
)
5401 np
->right
->parent
= np
;
5406 /* Search the parent sections of the case node tree
5407 to see if a test for the lower bound of NODE would be redundant.
5408 INDEX_TYPE is the type of the index expression.
5410 The instructions to generate the case decision tree are
5411 output in the same order as nodes are processed so it is
5412 known that if a parent node checks the range of the current
5413 node minus one that the current node is bounded at its lower
5414 span. Thus the test would be redundant. */
5417 node_has_low_bound (node
, index_type
)
5422 case_node_ptr pnode
;
5424 /* If the lower bound of this node is the lowest value in the index type,
5425 we need not test it. */
5427 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
5430 /* If this node has a left branch, the value at the left must be less
5431 than that at this node, so it cannot be bounded at the bottom and
5432 we need not bother testing any further. */
5437 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
5438 node
->low
, integer_one_node
));
5440 /* If the subtraction above overflowed, we can't verify anything.
5441 Otherwise, look for a parent that tests our value - 1. */
5443 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
5446 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5447 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
5453 /* Search the parent sections of the case node tree
5454 to see if a test for the upper bound of NODE would be redundant.
5455 INDEX_TYPE is the type of the index expression.
5457 The instructions to generate the case decision tree are
5458 output in the same order as nodes are processed so it is
5459 known that if a parent node checks the range of the current
5460 node plus one that the current node is bounded at its upper
5461 span. Thus the test would be redundant. */
5464 node_has_high_bound (node
, index_type
)
5469 case_node_ptr pnode
;
5471 /* If the upper bound of this node is the highest value in the type
5472 of the index expression, we need not test against it. */
5474 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
5477 /* If this node has a right branch, the value at the right must be greater
5478 than that at this node, so it cannot be bounded at the top and
5479 we need not bother testing any further. */
5484 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
5485 node
->high
, integer_one_node
));
5487 /* If the addition above overflowed, we can't verify anything.
5488 Otherwise, look for a parent that tests our value + 1. */
5490 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
5493 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5494 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
5500 /* Search the parent sections of the
5501 case node tree to see if both tests for the upper and lower
5502 bounds of NODE would be redundant. */
5505 node_is_bounded (node
, index_type
)
5509 return (node_has_low_bound (node
, index_type
)
5510 && node_has_high_bound (node
, index_type
));
5513 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5516 emit_jump_if_reachable (label
)
5519 if (GET_CODE (get_last_insn ()) != BARRIER
)
5523 /* Emit step-by-step code to select a case for the value of INDEX.
5524 The thus generated decision tree follows the form of the
5525 case-node binary tree NODE, whose nodes represent test conditions.
5526 INDEX_TYPE is the type of the index of the switch.
5528 Care is taken to prune redundant tests from the decision tree
5529 by detecting any boundary conditions already checked by
5530 emitted rtx. (See node_has_high_bound, node_has_low_bound
5531 and node_is_bounded, above.)
5533 Where the test conditions can be shown to be redundant we emit
5534 an unconditional jump to the target code. As a further
5535 optimization, the subordinates of a tree node are examined to
5536 check for bounded nodes. In this case conditional and/or
5537 unconditional jumps as a result of the boundary check for the
5538 current node are arranged to target the subordinates associated
5539 code for out of bound conditions on the current node node.
5541 We can assume that when control reaches the code generated here,
5542 the index value has already been compared with the parents
5543 of this node, and determined to be on the same side of each parent
5544 as this node is. Thus, if this node tests for the value 51,
5545 and a parent tested for 52, we don't need to consider
5546 the possibility of a value greater than 51. If another parent
5547 tests for the value 50, then this node need not test anything. */
5550 emit_case_nodes (index
, node
, default_label
, index_type
)
5556 /* If INDEX has an unsigned type, we must make unsigned branches. */
5557 int unsignedp
= TREE_UNSIGNED (index_type
);
5558 typedef rtx
rtx_function ();
5559 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
5560 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
5561 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
5562 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
5563 enum machine_mode mode
= GET_MODE (index
);
5565 /* See if our parents have already tested everything for us.
5566 If they have, emit an unconditional jump for this node. */
5567 if (node_is_bounded (node
, index_type
))
5568 emit_jump (label_rtx (node
->code_label
));
5570 else if (tree_int_cst_equal (node
->low
, node
->high
))
5572 /* Node is single valued. First see if the index expression matches
5573 this node and then check our children, if any. */
5575 do_jump_if_equal (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5576 label_rtx (node
->code_label
), unsignedp
);
5578 if (node
->right
!= 0 && node
->left
!= 0)
5580 /* This node has children on both sides.
5581 Dispatch to one side or the other
5582 by comparing the index value with this node's value.
5583 If one subtree is bounded, check that one first,
5584 so we can avoid real branches in the tree. */
5586 if (node_is_bounded (node
->right
, index_type
))
5588 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5590 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5592 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
5593 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5596 else if (node_is_bounded (node
->left
, index_type
))
5598 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5600 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5601 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
5602 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5607 /* Neither node is bounded. First distinguish the two sides;
5608 then emit the code for one side at a time. */
5611 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
5613 /* See if the value is on the right. */
5614 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5616 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5617 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
5619 /* Value must be on the left.
5620 Handle the left-hand subtree. */
5621 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5622 /* If left-hand subtree does nothing,
5624 emit_jump_if_reachable (default_label
);
5626 /* Code branches here for the right-hand subtree. */
5627 expand_label (test_label
);
5628 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5632 else if (node
->right
!= 0 && node
->left
== 0)
5634 /* Here we have a right child but no left so we issue conditional
5635 branch to default and process the right child.
5637 Omit the conditional branch to default if we it avoid only one
5638 right child; it costs too much space to save so little time. */
5640 if (node
->right
->right
|| node
->right
->left
5641 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
5643 if (!node_has_low_bound (node
, index_type
))
5645 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5647 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5648 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5651 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5654 /* We cannot process node->right normally
5655 since we haven't ruled out the numbers less than
5656 this node's value. So handle node->right explicitly. */
5657 do_jump_if_equal (index
,
5658 expand_expr (node
->right
->low
, NULL_RTX
,
5660 label_rtx (node
->right
->code_label
), unsignedp
);
5663 else if (node
->right
== 0 && node
->left
!= 0)
5665 /* Just one subtree, on the left. */
5667 #if 0 /* The following code and comment were formerly part
5668 of the condition here, but they didn't work
5669 and I don't understand what the idea was. -- rms. */
5670 /* If our "most probable entry" is less probable
5671 than the default label, emit a jump to
5672 the default label using condition codes
5673 already lying around. With no right branch,
5674 a branch-greater-than will get us to the default
5677 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
5680 if (node
->left
->left
|| node
->left
->right
5681 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
5683 if (!node_has_high_bound (node
, index_type
))
5685 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5687 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5688 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5691 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5694 /* We cannot process node->left normally
5695 since we haven't ruled out the numbers less than
5696 this node's value. So handle node->left explicitly. */
5697 do_jump_if_equal (index
,
5698 expand_expr (node
->left
->low
, NULL_RTX
,
5700 label_rtx (node
->left
->code_label
), unsignedp
);
5705 /* Node is a range. These cases are very similar to those for a single
5706 value, except that we do not start by testing whether this node
5707 is the one to branch to. */
5709 if (node
->right
!= 0 && node
->left
!= 0)
5711 /* Node has subtrees on both sides.
5712 If the right-hand subtree is bounded,
5713 test for it first, since we can go straight there.
5714 Otherwise, we need to make a branch in the control structure,
5715 then handle the two subtrees. */
5716 tree test_label
= 0;
5718 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5720 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5722 if (node_is_bounded (node
->right
, index_type
))
5723 /* Right hand node is fully bounded so we can eliminate any
5724 testing and branch directly to the target code. */
5725 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
5728 /* Right hand node requires testing.
5729 Branch to a label where we will handle it later. */
5731 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
5732 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
5735 /* Value belongs to this node or to the left-hand subtree. */
5737 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5738 GE
, NULL_RTX
, mode
, unsignedp
, 0);
5739 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
5741 /* Handle the left-hand subtree. */
5742 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5744 /* If right node had to be handled later, do that now. */
5748 /* If the left-hand subtree fell through,
5749 don't let it fall into the right-hand subtree. */
5750 emit_jump_if_reachable (default_label
);
5752 expand_label (test_label
);
5753 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5757 else if (node
->right
!= 0 && node
->left
== 0)
5759 /* Deal with values to the left of this node,
5760 if they are possible. */
5761 if (!node_has_low_bound (node
, index_type
))
5763 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
5765 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5766 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5769 /* Value belongs to this node or to the right-hand subtree. */
5771 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5773 LE
, NULL_RTX
, mode
, unsignedp
, 0);
5774 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
5776 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
5779 else if (node
->right
== 0 && node
->left
!= 0)
5781 /* Deal with values to the right of this node,
5782 if they are possible. */
5783 if (!node_has_high_bound (node
, index_type
))
5785 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5787 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5788 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5791 /* Value belongs to this node or to the left-hand subtree. */
5793 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
5794 GE
, NULL_RTX
, mode
, unsignedp
, 0);
5795 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
5797 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
5802 /* Node has no children so we check low and high bounds to remove
5803 redundant tests. Only one of the bounds can exist,
5804 since otherwise this node is bounded--a case tested already. */
5806 if (!node_has_high_bound (node
, index_type
))
5808 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
5810 GT
, NULL_RTX
, mode
, unsignedp
, 0);
5811 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
5814 if (!node_has_low_bound (node
, index_type
))
5816 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
5818 LT
, NULL_RTX
, mode
, unsignedp
, 0);
5819 emit_jump_insn ((*gen_blt_pat
) (default_label
));
5822 emit_jump (label_rtx (node
->code_label
));
5827 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5828 so that the debugging info will be correct for the unrolled loop. */
5830 /* Indexed by block number, contains a pointer to the N'th block node. */
5832 static tree
*block_vector
;
5835 find_loop_tree_blocks ()
5837 tree block
= DECL_INITIAL (current_function_decl
);
5839 /* There first block is for the function body, and does not have
5840 corresponding block notes. Don't include it in the block vector. */
5841 block
= BLOCK_SUBBLOCKS (block
);
5843 block_vector
= identify_blocks (block
, get_insns ());
5847 unroll_block_trees ()
5849 tree block
= DECL_INITIAL (current_function_decl
);
5851 reorder_blocks (block_vector
, block
, get_insns ());