]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(USER_LABEL_PREFIX): Define.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
5b4ddd85 2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
37
38#include <stdio.h>
39#include <ctype.h>
40
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
6adb4e3a 44#include "except.h"
28d81abb
RK
45#include "function.h"
46#include "insn-flags.h"
47#include "insn-config.h"
48#include "insn-codes.h"
49#include "expr.h"
50#include "hard-reg-set.h"
51#include "obstack.h"
52#include "loop.h"
53#include "recog.h"
ca695ac9
JB
54#include "machmode.h"
55
56#include "bytecode.h"
57#include "bc-typecd.h"
58#include "bc-opcode.h"
59#include "bc-optab.h"
60#include "bc-emit.h"
28d81abb
RK
61
62#define obstack_chunk_alloc xmalloc
63#define obstack_chunk_free free
64struct obstack stmt_obstack;
65
28d81abb
RK
66/* Filename and line number of last line-number note,
67 whether we actually emitted it or not. */
68char *emit_filename;
69int emit_lineno;
70
71/* Nonzero if within a ({...}) grouping, in which case we must
72 always compute a value for each expr-stmt in case it is the last one. */
73
74int expr_stmts_for_value;
75
76/* Each time we expand an expression-statement,
77 record the expr's type and its RTL value here. */
78
79static tree last_expr_type;
80static rtx last_expr_value;
81
7629c936
RS
82/* Each time we expand the end of a binding contour (in `expand_end_bindings')
83 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84 This is used by the `remember_end_note' function to record the endpoint
85 of each generated block in its associated BLOCK node. */
86
87static rtx last_block_end_note;
88
28d81abb
RK
89/* Number of binding contours started so far in this function. */
90
91int block_start_count;
92
93/* Nonzero if function being compiled needs to
94 return the address of where it has put a structure value. */
95
96extern int current_function_returns_pcc_struct;
97
98/* Label that will go on parm cleanup code, if any.
99 Jumping to this label runs cleanup code for parameters, if
100 such code must be run. Following this code is the logical return label. */
101
102extern rtx cleanup_label;
103
104/* Label that will go on function epilogue.
105 Jumping to this label serves as a "return" instruction
106 on machines which require execution of the epilogue on all returns. */
107
108extern rtx return_label;
109
28d81abb
RK
110/* Offset to end of allocated area of stack frame.
111 If stack grows down, this is the address of the last stack slot allocated.
112 If stack grows up, this is the address for the next slot. */
113extern int frame_offset;
114
115/* Label to jump back to for tail recursion, or 0 if we have
116 not yet needed one for this function. */
117extern rtx tail_recursion_label;
118
119/* Place after which to insert the tail_recursion_label if we need one. */
120extern rtx tail_recursion_reentry;
121
122/* Location at which to save the argument pointer if it will need to be
123 referenced. There are two cases where this is done: if nonlocal gotos
124 exist, or if vars whose is an offset from the argument pointer will be
125 needed by inner routines. */
126
127extern rtx arg_pointer_save_area;
128
129/* Chain of all RTL_EXPRs that have insns in them. */
130extern tree rtl_expr_chain;
131
132#if 0 /* Turned off because 0 seems to work just as well. */
133/* Cleanup lists are required for binding levels regardless of whether
134 that binding level has cleanups or not. This node serves as the
135 cleanup list whenever an empty list is required. */
136static tree empty_cleanup_list;
137#endif
138\f
139/* Functions and data structures for expanding case statements. */
140
141/* Case label structure, used to hold info on labels within case
142 statements. We handle "range" labels; for a single-value label
143 as in C, the high and low limits are the same.
144
5720c7e7
RK
145 An AVL tree of case nodes is initially created, and later transformed
146 to a list linked via the RIGHT fields in the nodes. Nodes with
147 higher case values are later in the list.
28d81abb
RK
148
149 Switch statements can be output in one of two forms. A branch table
150 is used if there are more than a few labels and the labels are dense
151 within the range between the smallest and largest case value. If a
152 branch table is used, no further manipulations are done with the case
153 node chain.
154
155 The alternative to the use of a branch table is to generate a series
156 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
157 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
158 totally unbalanced, with everything on the right. We balance the tree
159 with nodes on the left having lower case values than the parent
28d81abb
RK
160 and nodes on the right having higher values. We then output the tree
161 in order. */
162
163struct case_node
164{
165 struct case_node *left; /* Left son in binary tree */
166 struct case_node *right; /* Right son in binary tree; also node chain */
167 struct case_node *parent; /* Parent of node in binary tree */
168 tree low; /* Lowest index value for this label */
169 tree high; /* Highest index value for this label */
170 tree code_label; /* Label to jump to when node matches */
57641239 171 int balance;
28d81abb
RK
172};
173
174typedef struct case_node case_node;
175typedef struct case_node *case_node_ptr;
176
177/* These are used by estimate_case_costs and balance_case_nodes. */
178
179/* This must be a signed type, and non-ANSI compilers lack signed char. */
180static short *cost_table;
181static int use_cost_table;
28d81abb
RK
182\f
183/* Stack of control and binding constructs we are currently inside.
184
185 These constructs begin when you call `expand_start_WHATEVER'
186 and end when you call `expand_end_WHATEVER'. This stack records
187 info about how the construct began that tells the end-function
188 what to do. It also may provide information about the construct
189 to alter the behavior of other constructs within the body.
190 For example, they may affect the behavior of C `break' and `continue'.
191
192 Each construct gets one `struct nesting' object.
193 All of these objects are chained through the `all' field.
194 `nesting_stack' points to the first object (innermost construct).
195 The position of an entry on `nesting_stack' is in its `depth' field.
196
197 Each type of construct has its own individual stack.
198 For example, loops have `loop_stack'. Each object points to the
199 next object of the same type through the `next' field.
200
201 Some constructs are visible to `break' exit-statements and others
202 are not. Which constructs are visible depends on the language.
203 Therefore, the data structure allows each construct to be visible
204 or not, according to the args given when the construct is started.
205 The construct is visible if the `exit_label' field is non-null.
206 In that case, the value should be a CODE_LABEL rtx. */
207
208struct nesting
209{
210 struct nesting *all;
211 struct nesting *next;
212 int depth;
213 rtx exit_label;
214 union
215 {
216 /* For conds (if-then and if-then-else statements). */
217 struct
218 {
219 /* Label for the end of the if construct.
220 There is none if EXITFLAG was not set
221 and no `else' has been seen yet. */
222 rtx endif_label;
223 /* Label for the end of this alternative.
0f41302f 224 This may be the end of the if or the next else/elseif. */
28d81abb
RK
225 rtx next_label;
226 } cond;
227 /* For loops. */
228 struct
229 {
230 /* Label at the top of the loop; place to loop back to. */
231 rtx start_label;
232 /* Label at the end of the whole construct. */
233 rtx end_label;
8afad312
JW
234 /* Label before a jump that branches to the end of the whole
235 construct. This is where destructors go if any. */
236 rtx alt_end_label;
28d81abb
RK
237 /* Label for `continue' statement to jump to;
238 this is in front of the stepper of the loop. */
239 rtx continue_label;
240 } loop;
241 /* For variable binding contours. */
242 struct
243 {
244 /* Sequence number of this binding contour within the function,
245 in order of entry. */
246 int block_start_count;
ca695ac9 247 /* Nonzero => value to restore stack to on exit. Complemented by
0f41302f 248 bc_stack_level (see below) when generating bytecodes. */
28d81abb
RK
249 rtx stack_level;
250 /* The NOTE that starts this contour.
251 Used by expand_goto to check whether the destination
252 is within each contour or not. */
253 rtx first_insn;
254 /* Innermost containing binding contour that has a stack level. */
255 struct nesting *innermost_stack_block;
256 /* List of cleanups to be run on exit from this contour.
257 This is a list of expressions to be evaluated.
258 The TREE_PURPOSE of each link is the ..._DECL node
259 which the cleanup pertains to. */
260 tree cleanups;
261 /* List of cleanup-lists of blocks containing this block,
262 as they were at the locus where this block appears.
263 There is an element for each containing block,
264 ordered innermost containing block first.
265 The tail of this list can be 0 (was empty_cleanup_list),
266 if all remaining elements would be empty lists.
267 The element's TREE_VALUE is the cleanup-list of that block,
268 which may be null. */
269 tree outer_cleanups;
270 /* Chain of labels defined inside this binding contour.
271 For contours that have stack levels or cleanups. */
272 struct label_chain *label_chain;
273 /* Number of function calls seen, as of start of this block. */
274 int function_call_count;
ca695ac9
JB
275 /* Bytecode specific: stack level to restore stack to on exit. */
276 int bc_stack_level;
28d81abb
RK
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
ca695ac9
JB
285 /* For bytecodes, the case table is in-lined right in the code.
286 A label is needed for skipping over this block. It is only
0f41302f 287 used when generating bytecodes. */
ca695ac9 288 rtx skip_label;
57641239
RK
289 /* A list of case labels; it is first built as an AVL tree.
290 During expand_end_case, this is converted to a list, and may be
291 rearranged into a nearly balanced binary tree. */
28d81abb
RK
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
28d81abb
RK
306 } data;
307};
308
309/* Chain of all pending binding contours. */
310struct nesting *block_stack;
311
6ed1d6c5
RS
312/* If any new stacks are added here, add them to POPSTACKS too. */
313
28d81abb
RK
314/* Chain of all pending binding contours that restore stack levels
315 or have cleanups. */
316struct nesting *stack_block_stack;
317
318/* Chain of all pending conditional statements. */
319struct nesting *cond_stack;
320
321/* Chain of all pending loops. */
322struct nesting *loop_stack;
323
324/* Chain of all pending case or switch statements. */
325struct nesting *case_stack;
326
28d81abb
RK
327/* Separate chain including all of the above,
328 chained through the `all' field. */
329struct nesting *nesting_stack;
330
331/* Number of entries on nesting_stack now. */
332int nesting_depth;
333
334/* Allocate and return a new `struct nesting'. */
335
336#define ALLOC_NESTING() \
337 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
338
6ed1d6c5
RS
339/* Pop the nesting stack element by element until we pop off
340 the element which is at the top of STACK.
341 Update all the other stacks, popping off elements from them
342 as we pop them from nesting_stack. */
28d81abb
RK
343
344#define POPSTACK(STACK) \
6ed1d6c5
RS
345do { struct nesting *target = STACK; \
346 struct nesting *this; \
347 do { this = nesting_stack; \
348 if (loop_stack == this) \
349 loop_stack = loop_stack->next; \
350 if (cond_stack == this) \
351 cond_stack = cond_stack->next; \
352 if (block_stack == this) \
353 block_stack = block_stack->next; \
354 if (stack_block_stack == this) \
355 stack_block_stack = stack_block_stack->next; \
356 if (case_stack == this) \
357 case_stack = case_stack->next; \
6ed1d6c5 358 nesting_depth = nesting_stack->depth - 1; \
28d81abb 359 nesting_stack = this->all; \
28d81abb 360 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 361 while (this != target); } while (0)
28d81abb
RK
362\f
363/* In some cases it is impossible to generate code for a forward goto
364 until the label definition is seen. This happens when it may be necessary
365 for the goto to reset the stack pointer: we don't yet know how to do that.
366 So expand_goto puts an entry on this fixup list.
367 Each time a binding contour that resets the stack is exited,
368 we check each fixup.
369 If the target label has now been defined, we can insert the proper code. */
370
371struct goto_fixup
372{
373 /* Points to following fixup. */
374 struct goto_fixup *next;
375 /* Points to the insn before the jump insn.
376 If more code must be inserted, it goes after this insn. */
377 rtx before_jump;
378 /* The LABEL_DECL that this jump is jumping to, or 0
379 for break, continue or return. */
380 tree target;
7629c936
RS
381 /* The BLOCK for the place where this goto was found. */
382 tree context;
28d81abb
RK
383 /* The CODE_LABEL rtx that this is jumping to. */
384 rtx target_rtl;
385 /* Number of binding contours started in current function
386 before the label reference. */
387 int block_start_count;
388 /* The outermost stack level that should be restored for this jump.
389 Each time a binding contour that resets the stack is exited,
390 if the target label is *not* yet defined, this slot is updated. */
391 rtx stack_level;
392 /* List of lists of cleanup expressions to be run by this goto.
393 There is one element for each block that this goto is within.
394 The tail of this list can be 0 (was empty_cleanup_list),
395 if all remaining elements would be empty.
396 The TREE_VALUE contains the cleanup list of that block as of the
397 time this goto was seen.
398 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
399 tree cleanup_list_list;
ca695ac9
JB
400
401 /* Bytecode specific members follow */
402
403 /* The label that this jump is jumping to, or 0 for break, continue
404 or return. */
405 struct bc_label *bc_target;
406
407 /* The label we use for the fixup patch */
408 struct bc_label *label;
409
410 /* True (non-0) if fixup has been handled */
411 int bc_handled:1;
412
413 /* Like stack_level above, except refers to the interpreter stack */
414 int bc_stack_level;
28d81abb
RK
415};
416
417static struct goto_fixup *goto_fixup_chain;
418
419/* Within any binding contour that must restore a stack level,
420 all labels are recorded with a chain of these structures. */
421
422struct label_chain
423{
424 /* Points to following fixup. */
425 struct label_chain *next;
426 tree label;
427};
cfc3d13f
RK
428static void expand_goto_internal PROTO((tree, rtx, rtx));
429static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
430 struct bc_label *, tree));
431static int expand_fixup PROTO((tree, rtx, rtx));
432static void bc_expand_fixup PROTO((enum bytecode_opcode,
433 struct bc_label *, int));
434static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
437 rtx, int));
cfc3d13f
RK
438static void bc_expand_start_cond PROTO((tree, int));
439static void bc_expand_end_cond PROTO((void));
440static void bc_expand_start_else PROTO((void));
441static void bc_expand_end_loop PROTO((void));
442static void bc_expand_end_bindings PROTO((tree, int, int));
443static void bc_expand_decl PROTO((tree, tree));
444static void bc_expand_variable_local_init PROTO((tree));
445static void bc_expand_decl_init PROTO((tree));
446static void expand_null_return_1 PROTO((rtx, int));
8d800403 447static void expand_value_return PROTO((rtx));
cfc3d13f 448static int tail_recursion_args PROTO((tree, tree));
50d1b7a1 449static void expand_cleanups PROTO((tree, tree, int, int));
cfc3d13f
RK
450static void bc_expand_start_case PROTO((struct nesting *, tree,
451 tree, char *));
452static int bc_pushcase PROTO((tree, tree));
453static void bc_check_for_full_enumeration_handling PROTO((tree));
454static void bc_expand_end_case PROTO((tree));
455static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
456static int estimate_case_costs PROTO((case_node_ptr));
457static void group_case_nodes PROTO((case_node_ptr));
458static void balance_case_nodes PROTO((case_node_ptr *,
459 case_node_ptr));
460static int node_has_low_bound PROTO((case_node_ptr, tree));
461static int node_has_high_bound PROTO((case_node_ptr, tree));
462static int node_is_bounded PROTO((case_node_ptr, tree));
463static void emit_jump_if_reachable PROTO((rtx));
464static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
57641239
RK
465static int add_case_node PROTO((tree, tree, tree, tree *));
466static struct case_node *case_tree2list PROTO((case_node *, case_node *));
cfc3d13f 467
cfc3d13f
RK
468extern rtx bc_allocate_local ();
469extern rtx bc_allocate_variable_array ();
28d81abb
RK
470\f
471void
472init_stmt ()
473{
474 gcc_obstack_init (&stmt_obstack);
6adb4e3a 475 init_eh ();
28d81abb
RK
476}
477
478void
479init_stmt_for_function ()
480{
481 /* We are not currently within any block, conditional, loop or case. */
482 block_stack = 0;
0b931590 483 stack_block_stack = 0;
28d81abb
RK
484 loop_stack = 0;
485 case_stack = 0;
486 cond_stack = 0;
487 nesting_stack = 0;
488 nesting_depth = 0;
489
490 block_start_count = 0;
491
492 /* No gotos have been expanded yet. */
493 goto_fixup_chain = 0;
494
495 /* We are not processing a ({...}) grouping. */
496 expr_stmts_for_value = 0;
497 last_expr_type = 0;
6adb4e3a
MS
498
499 init_eh_for_function ();
28d81abb
RK
500}
501
502void
503save_stmt_status (p)
504 struct function *p;
505{
506 p->block_stack = block_stack;
507 p->stack_block_stack = stack_block_stack;
508 p->cond_stack = cond_stack;
509 p->loop_stack = loop_stack;
510 p->case_stack = case_stack;
511 p->nesting_stack = nesting_stack;
512 p->nesting_depth = nesting_depth;
513 p->block_start_count = block_start_count;
514 p->last_expr_type = last_expr_type;
515 p->last_expr_value = last_expr_value;
516 p->expr_stmts_for_value = expr_stmts_for_value;
517 p->emit_filename = emit_filename;
518 p->emit_lineno = emit_lineno;
519 p->goto_fixup_chain = goto_fixup_chain;
6adb4e3a 520 save_eh_status (p);
28d81abb
RK
521}
522
523void
524restore_stmt_status (p)
525 struct function *p;
526{
527 block_stack = p->block_stack;
528 stack_block_stack = p->stack_block_stack;
529 cond_stack = p->cond_stack;
530 loop_stack = p->loop_stack;
531 case_stack = p->case_stack;
532 nesting_stack = p->nesting_stack;
533 nesting_depth = p->nesting_depth;
534 block_start_count = p->block_start_count;
535 last_expr_type = p->last_expr_type;
536 last_expr_value = p->last_expr_value;
537 expr_stmts_for_value = p->expr_stmts_for_value;
538 emit_filename = p->emit_filename;
539 emit_lineno = p->emit_lineno;
540 goto_fixup_chain = p->goto_fixup_chain;
6adb4e3a 541 restore_eh_status (p);
28d81abb
RK
542}
543\f
544/* Emit a no-op instruction. */
545
546void
547emit_nop ()
548{
ca695ac9
JB
549 rtx last_insn;
550
551 if (!output_bytecode)
552 {
553 last_insn = get_last_insn ();
554 if (!optimize
555 && (GET_CODE (last_insn) == CODE_LABEL
dfda5a87
RK
556 || (GET_CODE (last_insn) == NOTE
557 && prev_real_insn (last_insn) == 0)))
ca695ac9
JB
558 emit_insn (gen_nop ());
559 }
28d81abb
RK
560}
561\f
562/* Return the rtx-label that corresponds to a LABEL_DECL,
563 creating it if necessary. */
564
565rtx
566label_rtx (label)
567 tree label;
568{
569 if (TREE_CODE (label) != LABEL_DECL)
570 abort ();
571
572 if (DECL_RTL (label))
573 return DECL_RTL (label);
574
575 return DECL_RTL (label) = gen_label_rtx ();
576}
577
578/* Add an unconditional jump to LABEL as the next sequential instruction. */
579
580void
581emit_jump (label)
582 rtx label;
583{
584 do_pending_stack_adjust ();
585 emit_jump_insn (gen_jump (label));
586 emit_barrier ();
587}
588
589/* Emit code to jump to the address
590 specified by the pointer expression EXP. */
591
592void
593expand_computed_goto (exp)
594 tree exp;
595{
ca695ac9
JB
596 if (output_bytecode)
597 {
598 bc_expand_expr (exp);
599 bc_emit_instruction (jumpP);
600 }
601 else
602 {
603 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
604
605#ifdef POINTERS_EXTEND_UNSIGNED
606 x = convert_memory_address (Pmode, x);
607#endif
ffa1a1ce
RK
608
609 emit_queue ();
610 do_pending_stack_adjust ();
ca695ac9
JB
611 emit_indirect_jump (x);
612 }
28d81abb
RK
613}
614\f
615/* Handle goto statements and the labels that they can go to. */
616
617/* Specify the location in the RTL code of a label LABEL,
618 which is a LABEL_DECL tree node.
619
620 This is used for the kind of label that the user can jump to with a
621 goto statement, and for alternatives of a switch or case statement.
622 RTL labels generated for loops and conditionals don't go through here;
623 they are generated directly at the RTL level, by other functions below.
624
625 Note that this has nothing to do with defining label *names*.
626 Languages vary in how they do that and what that even means. */
627
628void
629expand_label (label)
630 tree label;
631{
632 struct label_chain *p;
633
ca695ac9
JB
634 if (output_bytecode)
635 {
636 if (! DECL_RTL (label))
637 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
c53e9440 638 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
ca695ac9
JB
639 error ("multiply defined label");
640 return;
641 }
642
28d81abb
RK
643 do_pending_stack_adjust ();
644 emit_label (label_rtx (label));
645 if (DECL_NAME (label))
646 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
647
648 if (stack_block_stack != 0)
649 {
650 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
651 p->next = stack_block_stack->data.block.label_chain;
652 stack_block_stack->data.block.label_chain = p;
653 p->label = label;
654 }
655}
656
657/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
658 from nested functions. */
659
660void
661declare_nonlocal_label (label)
662 tree label;
663{
664 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
665 LABEL_PRESERVE_P (label_rtx (label)) = 1;
666 if (nonlocal_goto_handler_slot == 0)
667 {
668 nonlocal_goto_handler_slot
669 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
670 emit_stack_save (SAVE_NONLOCAL,
671 &nonlocal_goto_stack_level,
672 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
673 }
674}
675
676/* Generate RTL code for a `goto' statement with target label LABEL.
677 LABEL should be a LABEL_DECL tree node that was or will later be
678 defined with `expand_label'. */
679
680void
681expand_goto (label)
682 tree label;
683{
ca695ac9
JB
684 tree context;
685
686 if (output_bytecode)
687 {
688 expand_goto_internal (label, label_rtx (label), NULL_RTX);
689 return;
690 }
691
28d81abb 692 /* Check for a nonlocal goto to a containing function. */
ca695ac9 693 context = decl_function_context (label);
28d81abb
RK
694 if (context != 0 && context != current_function_decl)
695 {
696 struct function *p = find_function_data (context);
dd132134 697 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
28d81abb 698 rtx temp;
dd132134 699
28d81abb 700 p->has_nonlocal_label = 1;
c1255328 701 current_function_has_nonlocal_goto = 1;
dd132134 702 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
703
704 /* Copy the rtl for the slots so that they won't be shared in
705 case the virtual stack vars register gets instantiated differently
706 in the parent than in the child. */
707
28d81abb
RK
708#if HAVE_nonlocal_goto
709 if (HAVE_nonlocal_goto)
710 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
711 copy_rtx (p->nonlocal_goto_handler_slot),
712 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 713 label_ref));
28d81abb
RK
714 else
715#endif
716 {
59257ff7
RK
717 rtx addr;
718
28d81abb
RK
719 /* Restore frame pointer for containing function.
720 This sets the actual hard register used for the frame pointer
721 to the location of the function's incoming static chain info.
722 The non-local goto handler will then adjust it to contain the
723 proper value and reload the argument pointer, if needed. */
a35ad168 724 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
725
726 /* We have now loaded the frame pointer hardware register with
727 the address of that corresponds to the start of the virtual
728 stack vars. So replace virtual_stack_vars_rtx in all
729 addresses we use with stack_pointer_rtx. */
730
28d81abb
RK
731 /* Get addr of containing function's current nonlocal goto handler,
732 which will do any cleanups and then jump to the label. */
59257ff7
RK
733 addr = copy_rtx (p->nonlocal_goto_handler_slot);
734 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 735 hard_frame_pointer_rtx));
59257ff7 736
28d81abb 737 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
738 addr = p->nonlocal_goto_stack_level;
739 if (addr)
5e116627 740 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
741 virtual_stack_vars_rtx,
742 hard_frame_pointer_rtx);
59257ff7 743
37366632 744 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 745
28d81abb 746 /* Put in the static chain register the nonlocal label address. */
dd132134 747 emit_move_insn (static_chain_rtx, label_ref);
a35ad168 748 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 749 really needed. */
a35ad168 750 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
28d81abb
RK
751 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
752 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
753 emit_indirect_jump (temp);
754 }
755 }
756 else
37366632 757 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
758}
759
760/* Generate RTL code for a `goto' statement with target label BODY.
761 LABEL should be a LABEL_REF.
762 LAST_INSN, if non-0, is the rtx we should consider as the last
763 insn emitted (for the purposes of cleaning up a return). */
764
765static void
766expand_goto_internal (body, label, last_insn)
767 tree body;
768 rtx label;
769 rtx last_insn;
770{
771 struct nesting *block;
772 rtx stack_level = 0;
773
ca695ac9
JB
774 /* NOTICE! If a bytecode instruction other than `jump' is needed,
775 then the caller has to call bc_expand_goto_internal()
776 directly. This is rather an exceptional case, and there aren't
0f41302f 777 that many places where this is necessary. */
ca695ac9
JB
778 if (output_bytecode)
779 {
780 expand_goto_internal (body, label, last_insn);
781 return;
782 }
783
28d81abb
RK
784 if (GET_CODE (label) != CODE_LABEL)
785 abort ();
786
787 /* If label has already been defined, we can tell now
788 whether and how we must alter the stack level. */
789
790 if (PREV_INSN (label) != 0)
791 {
792 /* Find the innermost pending block that contains the label.
793 (Check containment by comparing insn-uids.)
794 Then restore the outermost stack level within that block,
795 and do cleanups of all blocks contained in it. */
796 for (block = block_stack; block; block = block->next)
797 {
798 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
799 break;
800 if (block->data.block.stack_level != 0)
801 stack_level = block->data.block.stack_level;
802 /* Execute the cleanups for blocks we are exiting. */
803 if (block->data.block.cleanups != 0)
804 {
50d1b7a1 805 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
806 do_pending_stack_adjust ();
807 }
808 }
809
810 if (stack_level)
811 {
0f41302f
MS
812 /* Ensure stack adjust isn't done by emit_jump, as this
813 would clobber the stack pointer. This one should be
814 deleted as dead by flow. */
28d81abb
RK
815 clear_pending_stack_adjust ();
816 do_pending_stack_adjust ();
37366632 817 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
818 }
819
820 if (body != 0 && DECL_TOO_LATE (body))
821 error ("jump to `%s' invalidly jumps into binding contour",
822 IDENTIFIER_POINTER (DECL_NAME (body)));
823 }
824 /* Label not yet defined: may need to put this goto
825 on the fixup list. */
826 else if (! expand_fixup (body, label, last_insn))
827 {
828 /* No fixup needed. Record that the label is the target
829 of at least one goto that has no fixup. */
830 if (body != 0)
831 TREE_ADDRESSABLE (body) = 1;
832 }
833
834 emit_jump (label);
835}
836\f
ca695ac9 837/* Generate a jump with OPCODE to the given bytecode LABEL which is
0f41302f 838 found within BODY. */
cfc3d13f 839
ca695ac9
JB
840static void
841bc_expand_goto_internal (opcode, label, body)
842 enum bytecode_opcode opcode;
843 struct bc_label *label;
844 tree body;
845{
846 struct nesting *block;
847 int stack_level = -1;
848
849 /* If the label is defined, adjust the stack as necessary.
850 If it's not defined, we have to push the reference on the
0f41302f 851 fixup list. */
ca695ac9
JB
852
853 if (label->defined)
854 {
855
856 /* Find the innermost pending block that contains the label.
857 (Check containment by comparing bytecode uids.) Then restore the
858 outermost stack level within that block. */
859
860 for (block = block_stack; block; block = block->next)
861 {
c53e9440 862 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
ca695ac9
JB
863 break;
864 if (block->data.block.bc_stack_level)
865 stack_level = block->data.block.bc_stack_level;
866
867 /* Execute the cleanups for blocks we are exiting. */
868 if (block->data.block.cleanups != 0)
869 {
50d1b7a1 870 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
ca695ac9
JB
871 do_pending_stack_adjust ();
872 }
873 }
874
875 /* Restore the stack level. If we need to adjust the stack, we
876 must do so after the jump, since the jump may depend on
877 what's on the stack. Thus, any stack-modifying conditional
878 jumps (these are the only ones that rely on what's on the
0f41302f 879 stack) go into the fixup list. */
ca695ac9
JB
880
881 if (stack_level >= 0
882 && stack_depth != stack_level
883 && opcode != jump)
884
885 bc_expand_fixup (opcode, label, stack_level);
886 else
887 {
888 if (stack_level >= 0)
889 bc_adjust_stack (stack_depth - stack_level);
890
891 if (body && DECL_BIT_FIELD (body))
892 error ("jump to `%s' invalidly jumps into binding contour",
893 IDENTIFIER_POINTER (DECL_NAME (body)));
894
895 /* Emit immediate jump */
896 bc_emit_bytecode (opcode);
897 bc_emit_bytecode_labelref (label);
898
899#ifdef DEBUG_PRINT_CODE
900 fputc ('\n', stderr);
901#endif
902 }
903 }
904 else
905 /* Put goto in the fixup list */
906 bc_expand_fixup (opcode, label, stack_level);
907}
908\f
28d81abb
RK
909/* Generate if necessary a fixup for a goto
910 whose target label in tree structure (if any) is TREE_LABEL
911 and whose target in rtl is RTL_LABEL.
912
913 If LAST_INSN is nonzero, we pretend that the jump appears
914 after insn LAST_INSN instead of at the current point in the insn stream.
915
023b57e6
RS
916 The fixup will be used later to insert insns just before the goto.
917 Those insns will restore the stack level as appropriate for the
918 target label, and will (in the case of C++) also invoke any object
919 destructors which have to be invoked when we exit the scopes which
920 are exited by the goto.
28d81abb
RK
921
922 Value is nonzero if a fixup is made. */
923
924static int
925expand_fixup (tree_label, rtl_label, last_insn)
926 tree tree_label;
927 rtx rtl_label;
928 rtx last_insn;
929{
930 struct nesting *block, *end_block;
931
932 /* See if we can recognize which block the label will be output in.
933 This is possible in some very common cases.
934 If we succeed, set END_BLOCK to that block.
935 Otherwise, set it to 0. */
936
937 if (cond_stack
938 && (rtl_label == cond_stack->data.cond.endif_label
939 || rtl_label == cond_stack->data.cond.next_label))
940 end_block = cond_stack;
941 /* If we are in a loop, recognize certain labels which
942 are likely targets. This reduces the number of fixups
943 we need to create. */
944 else if (loop_stack
945 && (rtl_label == loop_stack->data.loop.start_label
946 || rtl_label == loop_stack->data.loop.end_label
947 || rtl_label == loop_stack->data.loop.continue_label))
948 end_block = loop_stack;
949 else
950 end_block = 0;
951
952 /* Now set END_BLOCK to the binding level to which we will return. */
953
954 if (end_block)
955 {
956 struct nesting *next_block = end_block->all;
957 block = block_stack;
958
959 /* First see if the END_BLOCK is inside the innermost binding level.
960 If so, then no cleanups or stack levels are relevant. */
961 while (next_block && next_block != block)
962 next_block = next_block->all;
963
964 if (next_block)
965 return 0;
966
967 /* Otherwise, set END_BLOCK to the innermost binding level
968 which is outside the relevant control-structure nesting. */
969 next_block = block_stack->next;
970 for (block = block_stack; block != end_block; block = block->all)
971 if (block == next_block)
972 next_block = next_block->next;
973 end_block = next_block;
974 }
975
976 /* Does any containing block have a stack level or cleanups?
977 If not, no fixup is needed, and that is the normal case
978 (the only case, for standard C). */
979 for (block = block_stack; block != end_block; block = block->next)
980 if (block->data.block.stack_level != 0
981 || block->data.block.cleanups != 0)
982 break;
983
984 if (block != end_block)
985 {
986 /* Ok, a fixup is needed. Add a fixup to the list of such. */
987 struct goto_fixup *fixup
988 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
989 /* In case an old stack level is restored, make sure that comes
990 after any pending stack adjust. */
991 /* ?? If the fixup isn't to come at the present position,
992 doing the stack adjust here isn't useful. Doing it with our
993 settings at that location isn't useful either. Let's hope
994 someone does it! */
995 if (last_insn == 0)
996 do_pending_stack_adjust ();
28d81abb
RK
997 fixup->target = tree_label;
998 fixup->target_rtl = rtl_label;
023b57e6
RS
999
1000 /* Create a BLOCK node and a corresponding matched set of
1001 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1002 this point. The notes will encapsulate any and all fixup
1003 code which we might later insert at this point in the insn
1004 stream. Also, the BLOCK node will be the parent (i.e. the
1005 `SUPERBLOCK') of any other BLOCK nodes which we might create
1006 later on when we are expanding the fixup code. */
1007
1008 {
1009 register rtx original_before_jump
1010 = last_insn ? last_insn : get_last_insn ();
1011
1012 start_sequence ();
1013 pushlevel (0);
1014 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1015 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1016 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1017 end_sequence ();
1018 emit_insns_after (fixup->before_jump, original_before_jump);
1019 }
1020
28d81abb
RK
1021 fixup->block_start_count = block_start_count;
1022 fixup->stack_level = 0;
1023 fixup->cleanup_list_list
1024 = (((block->data.block.outer_cleanups
1025#if 0
1026 && block->data.block.outer_cleanups != empty_cleanup_list
1027#endif
1028 )
1029 || block->data.block.cleanups)
37366632 1030 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1031 block->data.block.outer_cleanups)
1032 : 0);
1033 fixup->next = goto_fixup_chain;
1034 goto_fixup_chain = fixup;
1035 }
1036
1037 return block != 0;
1038}
1039
ca695ac9
JB
1040
1041/* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1042 Make the fixup restore the stack level to STACK_LEVEL. */
1043
1044static void
1045bc_expand_fixup (opcode, label, stack_level)
1046 enum bytecode_opcode opcode;
1047 struct bc_label *label;
1048 int stack_level;
1049{
1050 struct goto_fixup *fixup
1051 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1052
1053 fixup->label = bc_get_bytecode_label ();
1054 fixup->bc_target = label;
1055 fixup->bc_stack_level = stack_level;
1056 fixup->bc_handled = FALSE;
1057
1058 fixup->next = goto_fixup_chain;
1059 goto_fixup_chain = fixup;
1060
1061 /* Insert a jump to the fixup code */
1062 bc_emit_bytecode (opcode);
1063 bc_emit_bytecode_labelref (fixup->label);
1064
1065#ifdef DEBUG_PRINT_CODE
1066 fputc ('\n', stderr);
1067#endif
1068}
cfc3d13f
RK
1069\f
1070/* Expand any needed fixups in the outputmost binding level of the
1071 function. FIRST_INSN is the first insn in the function. */
ca695ac9 1072
cfc3d13f
RK
1073void
1074expand_fixups (first_insn)
1075 rtx first_insn;
1076{
1077 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1078}
ca695ac9 1079
28d81abb
RK
1080/* When exiting a binding contour, process all pending gotos requiring fixups.
1081 THISBLOCK is the structure that describes the block being exited.
1082 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1083 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1084 FIRST_INSN is the insn that began this contour.
1085
1086 Gotos that jump out of this contour must restore the
1087 stack level and do the cleanups before actually jumping.
1088
1089 DONT_JUMP_IN nonzero means report error there is a jump into this
1090 contour from before the beginning of the contour.
1091 This is also done if STACK_LEVEL is nonzero. */
1092
704f4dca 1093static void
28d81abb
RK
1094fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1095 struct nesting *thisblock;
1096 rtx stack_level;
1097 tree cleanup_list;
1098 rtx first_insn;
1099 int dont_jump_in;
1100{
1101 register struct goto_fixup *f, *prev;
1102
ca695ac9
JB
1103 if (output_bytecode)
1104 {
704f4dca
RK
1105 /* ??? The second arg is the bc stack level, which is not the same
1106 as STACK_LEVEL. I have no idea what should go here, so I'll
1107 just pass 0. */
1108 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
ca695ac9
JB
1109 return;
1110 }
1111
28d81abb
RK
1112 /* F is the fixup we are considering; PREV is the previous one. */
1113 /* We run this loop in two passes so that cleanups of exited blocks
1114 are run first, and blocks that are exited are marked so
1115 afterwards. */
1116
1117 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1118 {
1119 /* Test for a fixup that is inactive because it is already handled. */
1120 if (f->before_jump == 0)
1121 {
1122 /* Delete inactive fixup from the chain, if that is easy to do. */
1123 if (prev != 0)
1124 prev->next = f->next;
1125 }
1126 /* Has this fixup's target label been defined?
1127 If so, we can finalize it. */
1128 else if (PREV_INSN (f->target_rtl) != 0)
1129 {
7629c936 1130 register rtx cleanup_insns;
7629c936 1131
28d81abb
RK
1132 /* Get the first non-label after the label
1133 this goto jumps to. If that's before this scope begins,
1134 we don't have a jump into the scope. */
1135 rtx after_label = f->target_rtl;
1136 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1137 after_label = NEXT_INSN (after_label);
1138
1139 /* If this fixup jumped into this contour from before the beginning
1140 of this contour, report an error. */
1141 /* ??? Bug: this does not detect jumping in through intermediate
1142 blocks that have stack levels or cleanups.
1143 It detects only a problem with the innermost block
1144 around the label. */
1145 if (f->target != 0
1146 && (dont_jump_in || stack_level || cleanup_list)
1147 /* If AFTER_LABEL is 0, it means the jump goes to the end
1148 of the rtl, which means it jumps into this scope. */
1149 && (after_label == 0
1150 || INSN_UID (first_insn) < INSN_UID (after_label))
1151 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1152 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1153 {
1154 error_with_decl (f->target,
1155 "label `%s' used before containing binding contour");
1156 /* Prevent multiple errors for one label. */
33bc3ff5 1157 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1158 }
1159
7629c936
RS
1160 /* We will expand the cleanups into a sequence of their own and
1161 then later on we will attach this new sequence to the insn
1162 stream just ahead of the actual jump insn. */
1163
1164 start_sequence ();
1165
023b57e6
RS
1166 /* Temporarily restore the lexical context where we will
1167 logically be inserting the fixup code. We do this for the
1168 sake of getting the debugging information right. */
1169
7629c936 1170 pushlevel (0);
023b57e6 1171 set_block (f->context);
7629c936
RS
1172
1173 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1174 if (f->cleanup_list_list)
1175 {
1176 tree lists;
1177 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1178 /* Marked elements correspond to blocks that have been closed.
1179 Do their cleanups. */
1180 if (TREE_ADDRESSABLE (lists)
1181 && TREE_VALUE (lists) != 0)
7629c936 1182 {
50d1b7a1 1183 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1184 /* Pop any pushes done in the cleanups,
1185 in case function is about to return. */
1186 do_pending_stack_adjust ();
1187 }
28d81abb
RK
1188 }
1189
1190 /* Restore stack level for the biggest contour that this
1191 jump jumps out of. */
1192 if (f->stack_level)
59257ff7 1193 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1194
1195 /* Finish up the sequence containing the insns which implement the
1196 necessary cleanups, and then attach that whole sequence to the
1197 insn stream just ahead of the actual jump insn. Attaching it
1198 at that point insures that any cleanups which are in fact
1199 implicit C++ object destructions (which must be executed upon
1200 leaving the block) appear (to the debugger) to be taking place
1201 in an area of the generated code where the object(s) being
1202 destructed are still "in scope". */
1203
1204 cleanup_insns = get_insns ();
023b57e6 1205 poplevel (1, 0, 0);
7629c936
RS
1206
1207 end_sequence ();
1208 emit_insns_after (cleanup_insns, f->before_jump);
1209
7629c936 1210
28d81abb
RK
1211 f->before_jump = 0;
1212 }
1213 }
1214
6bc2f582
RK
1215 /* For any still-undefined labels, do the cleanups for this block now.
1216 We must do this now since items in the cleanup list may go out
0f41302f 1217 of scope when the block ends. */
28d81abb
RK
1218 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1219 if (f->before_jump != 0
1220 && PREV_INSN (f->target_rtl) == 0
1221 /* Label has still not appeared. If we are exiting a block with
1222 a stack level to restore, that started before the fixup,
1223 mark this stack level as needing restoration
6bc2f582 1224 when the fixup is later finalized. */
28d81abb 1225 && thisblock != 0
6bc2f582
RK
1226 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1227 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1228 && (thisblock->data.block.block_start_count
1229 <= f->block_start_count))
1230 {
1231 tree lists = f->cleanup_list_list;
6bc2f582
RK
1232 rtx cleanup_insns;
1233
28d81abb
RK
1234 for (; lists; lists = TREE_CHAIN (lists))
1235 /* If the following elt. corresponds to our containing block
1236 then the elt. must be for this block. */
1237 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1238 {
1239 start_sequence ();
1240 pushlevel (0);
1241 set_block (f->context);
1242 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1243 do_pending_stack_adjust ();
6bc2f582
RK
1244 cleanup_insns = get_insns ();
1245 poplevel (1, 0, 0);
1246 end_sequence ();
1247 f->before_jump
1248 = emit_insns_after (cleanup_insns, f->before_jump);
1249
e07ed33f 1250 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1251 }
28d81abb
RK
1252
1253 if (stack_level)
1254 f->stack_level = stack_level;
1255 }
1256}
ca695ac9
JB
1257
1258
1259/* When exiting a binding contour, process all pending gotos requiring fixups.
1260 Note: STACK_DEPTH is not altered.
1261
704f4dca
RK
1262 The arguments are currently not used in the bytecode compiler, but we may
1263 need them one day for languages other than C.
ca695ac9
JB
1264
1265 THISBLOCK is the structure that describes the block being exited.
1266 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1267 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1268 FIRST_INSN is the insn that began this contour.
1269
1270 Gotos that jump out of this contour must restore the
1271 stack level and do the cleanups before actually jumping.
1272
1273 DONT_JUMP_IN nonzero means report error there is a jump into this
1274 contour from before the beginning of the contour.
1275 This is also done if STACK_LEVEL is nonzero. */
1276
1277static void
1278bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1279 struct nesting *thisblock;
1280 int stack_level;
1281 tree cleanup_list;
1282 rtx first_insn;
1283 int dont_jump_in;
1284{
1285 register struct goto_fixup *f, *prev;
1286 int saved_stack_depth;
1287
1288 /* F is the fixup we are considering; PREV is the previous one. */
1289
1290 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1291 {
1292 /* Test for a fixup that is inactive because it is already handled. */
1293 if (f->before_jump == 0)
1294 {
1295 /* Delete inactive fixup from the chain, if that is easy to do. */
1296 if (prev)
1297 prev->next = f->next;
1298 }
1299
1300 /* Emit code to restore the stack and continue */
1301 bc_emit_bytecode_labeldef (f->label);
1302
1303 /* Save stack_depth across call, since bc_adjust_stack () will alter
0f41302f 1304 the perceived stack depth via the instructions generated. */
ca695ac9
JB
1305
1306 if (f->bc_stack_level >= 0)
1307 {
1308 saved_stack_depth = stack_depth;
1309 bc_adjust_stack (stack_depth - f->bc_stack_level);
1310 stack_depth = saved_stack_depth;
1311 }
1312
1313 bc_emit_bytecode (jump);
1314 bc_emit_bytecode_labelref (f->bc_target);
1315
1316#ifdef DEBUG_PRINT_CODE
1317 fputc ('\n', stderr);
1318#endif
1319 }
1320
1321 goto_fixup_chain = NULL;
1322}
28d81abb
RK
1323\f
1324/* Generate RTL for an asm statement (explicit assembler code).
1325 BODY is a STRING_CST node containing the assembler code text,
1326 or an ADDR_EXPR containing a STRING_CST. */
1327
1328void
1329expand_asm (body)
1330 tree body;
1331{
ca695ac9
JB
1332 if (output_bytecode)
1333 {
e3da71ef 1334 error ("`asm' is invalid when generating bytecode");
ca695ac9
JB
1335 return;
1336 }
1337
28d81abb
RK
1338 if (TREE_CODE (body) == ADDR_EXPR)
1339 body = TREE_OPERAND (body, 0);
1340
1341 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1342 TREE_STRING_POINTER (body)));
1343 last_expr_type = 0;
1344}
1345
1346/* Generate RTL for an asm statement with arguments.
1347 STRING is the instruction template.
1348 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1349 Each output or input has an expression in the TREE_VALUE and
1350 a constraint-string in the TREE_PURPOSE.
1351 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1352 that is clobbered by this insn.
1353
1354 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1355 Some elements of OUTPUTS may be replaced with trees representing temporary
1356 values. The caller should copy those temporary values to the originally
1357 specified lvalues.
1358
1359 VOL nonzero means the insn is volatile; don't optimize it. */
1360
1361void
1362expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1363 tree string, outputs, inputs, clobbers;
1364 int vol;
1365 char *filename;
1366 int line;
1367{
1368 rtvec argvec, constraints;
1369 rtx body;
1370 int ninputs = list_length (inputs);
1371 int noutputs = list_length (outputs);
b4ccaa16 1372 int nclobbers;
28d81abb
RK
1373 tree tail;
1374 register int i;
1375 /* Vector of RTX's of evaluated output operands. */
1376 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1377 /* The insn we have emitted. */
1378 rtx insn;
1379
ca695ac9
JB
1380 if (output_bytecode)
1381 {
e3da71ef 1382 error ("`asm' is invalid when generating bytecode");
ca695ac9
JB
1383 return;
1384 }
1385
b4ccaa16
RS
1386 /* Count the number of meaningful clobbered registers, ignoring what
1387 we would ignore later. */
1388 nclobbers = 0;
1389 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1390 {
1391 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1392 i = decode_reg_name (regname);
1393 if (i >= 0 || i == -4)
b4ccaa16 1394 ++nclobbers;
7859e3ac
DE
1395 else if (i == -2)
1396 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1397 }
1398
28d81abb
RK
1399 last_expr_type = 0;
1400
1401 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1402 {
1403 tree val = TREE_VALUE (tail);
b50a024d 1404 tree type = TREE_TYPE (val);
28d81abb
RK
1405 tree val1;
1406 int j;
d09a75ae
RK
1407 int found_equal = 0;
1408 int allows_reg = 0;
28d81abb
RK
1409
1410 /* If there's an erroneous arg, emit no insn. */
1411 if (TREE_TYPE (val) == error_mark_node)
1412 return;
1413
d09a75ae
RK
1414 /* Make sure constraint has `=' and does not have `+'. Also, see
1415 if it allows any register. Be liberal on the latter test, since
1416 the worst that happens if we get it wrong is we issue an error
1417 message. */
28d81abb 1418
4e05a62c 1419 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
d09a75ae
RK
1420 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1421 {
1422 case '+':
1423 error ("output operand constraint contains `+'");
1424 return;
1425
1426 case '=':
28d81abb 1427 found_equal = 1;
d09a75ae
RK
1428 break;
1429
1430 case '?': case '!': case '*': case '%': case '&':
d09a75ae
RK
1431 case 'V': case 'm': case 'o': case '<': case '>':
1432 case 'E': case 'F': case 'G': case 'H': case 'X':
1433 case 's': case 'i': case 'n':
1434 case 'I': case 'J': case 'K': case 'L': case 'M':
1435 case 'N': case 'O': case 'P': case ',':
1436#ifdef EXTRA_CONSTRAINT
1437 case 'Q': case 'R': case 'S': case 'T': case 'U':
1438#endif
1439 break;
1440
1441 case 'p': case 'g': case 'r':
7b7a33b3
JW
1442 /* Whether or not a numeric constraint allows a register is
1443 decided by the matching constraint, and so there is no need
1444 to do anything special with them. We must handle them in
1445 the default case, so that we don't unnecessarily force
1446 operands to memory. */
1447 case '0': case '1': case '2': case '3': case '4':
d09a75ae
RK
1448 default:
1449 allows_reg = 1;
1450 break;
1451 }
1452
28d81abb
RK
1453 if (! found_equal)
1454 {
1455 error ("output operand constraint lacks `='");
1456 return;
1457 }
1458
d09a75ae
RK
1459 /* If an output operand is not a decl or indirect ref and our constraint
1460 allows a register, make a temporary to act as an intermediate.
1461 Make the asm insn write into that, then our caller will copy it to
1462 the real output operand. Likewise for promoted variables. */
28d81abb 1463
b50a024d
RK
1464 if (TREE_CODE (val) == INDIRECT_REF
1465 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1466 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae
RK
1467 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1468 || ! allows_reg)
1469 {
1470 if (! allows_reg)
1471 mark_addressable (TREE_VALUE (tail));
1472
1473 output_rtx[i]
1474 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1475
1476 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1477 error ("output number %d not directly addressable", i);
1478 }
b50a024d 1479 else
e619bb8d 1480 {
6e81958a 1481 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1482 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1483 }
28d81abb
RK
1484 }
1485
1486 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1487 {
1488 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1489 return;
1490 }
1491
1492 /* Make vectors for the expression-rtx and constraint strings. */
1493
1494 argvec = rtvec_alloc (ninputs);
1495 constraints = rtvec_alloc (ninputs);
1496
1497 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1498 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1499 filename, line);
1500 MEM_VOLATILE_P (body) = vol;
1501
1502 /* Eval the inputs and put them into ARGVEC.
1503 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1504
1505 i = 0;
1506 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1507 {
1508 int j;
65fed0cb 1509 int allows_reg = 0;
28d81abb
RK
1510
1511 /* If there's an erroneous arg, emit no insn,
1512 because the ASM_INPUT would get VOIDmode
1513 and that could cause a crash in reload. */
1514 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1515 return;
1516 if (TREE_PURPOSE (tail) == NULL_TREE)
1517 {
1518 error ("hard register `%s' listed as input operand to `asm'",
1519 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1520 return;
1521 }
1522
1523 /* Make sure constraint has neither `=' nor `+'. */
1524
4e05a62c 1525 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
65fed0cb 1526 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
28d81abb 1527 {
65fed0cb 1528 case '+': case '=':
28d81abb
RK
1529 error ("input operand constraint contains `%c'",
1530 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1531 return;
65fed0cb
RK
1532
1533 case '?': case '!': case '*': case '%': case '&':
65fed0cb
RK
1534 case 'V': case 'm': case 'o': case '<': case '>':
1535 case 'E': case 'F': case 'G': case 'H': case 'X':
1536 case 's': case 'i': case 'n':
1537 case 'I': case 'J': case 'K': case 'L': case 'M':
1538 case 'N': case 'O': case 'P': case ',':
1539#ifdef EXTRA_CONSTRAINT
1540 case 'Q': case 'R': case 'S': case 'T': case 'U':
1541#endif
1542 break;
1543
1544 case 'p': case 'g': case 'r':
7b7a33b3
JW
1545 /* Whether or not a numeric constraint allows a register is
1546 decided by the matching constraint, and so there is no need
1547 to do anything special with them. We must handle them in
1548 the default case, so that we don't unnecessarily force
1549 operands to memory. */
1550 case '0': case '1': case '2': case '3': case '4':
65fed0cb
RK
1551 default:
1552 allows_reg = 1;
1553 break;
28d81abb
RK
1554 }
1555
65fed0cb
RK
1556 if (! allows_reg)
1557 mark_addressable (TREE_VALUE (tail));
1558
28d81abb 1559 XVECEXP (body, 3, i) /* argvec */
37366632 1560 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
76ebc969
RK
1561 if (CONSTANT_P (XVECEXP (body, 3, i))
1562 && ! general_operand (XVECEXP (body, 3, i),
1563 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
65fed0cb
RK
1564 {
1565 if (allows_reg)
1566 XVECEXP (body, 3, i)
1567 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1568 XVECEXP (body, 3, i));
1569 else
1570 XVECEXP (body, 3, i)
1571 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1572 XVECEXP (body, 3, i));
1573 }
1574
1575 if (! allows_reg
1576 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1577 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1578 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1579 {
1580 tree type = TREE_TYPE (TREE_VALUE (tail));
6e81958a 1581 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1582
65fed0cb
RK
1583 emit_move_insn (memloc, XVECEXP (body, 3, i));
1584 XVECEXP (body, 3, i) = memloc;
1585 }
1586
28d81abb
RK
1587 XVECEXP (body, 4, i) /* constraints */
1588 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1589 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1590 i++;
1591 }
1592
1593 /* Protect all the operands from the queue,
1594 now that they have all been evaluated. */
1595
1596 for (i = 0; i < ninputs; i++)
1597 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1598
1599 for (i = 0; i < noutputs; i++)
1600 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1601
1602 /* Now, for each output, construct an rtx
1603 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1604 ARGVEC CONSTRAINTS))
1605 If there is more than one, put them inside a PARALLEL. */
1606
1607 if (noutputs == 1 && nclobbers == 0)
1608 {
1609 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1610 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1611 }
1612 else if (noutputs == 0 && nclobbers == 0)
1613 {
1614 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1615 insn = emit_insn (body);
1616 }
1617 else
1618 {
1619 rtx obody = body;
1620 int num = noutputs;
1621 if (num == 0) num = 1;
1622 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1623
1624 /* For each output operand, store a SET. */
1625
1626 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1627 {
1628 XVECEXP (body, 0, i)
1629 = gen_rtx (SET, VOIDmode,
1630 output_rtx[i],
1631 gen_rtx (ASM_OPERANDS, VOIDmode,
1632 TREE_STRING_POINTER (string),
1633 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1634 i, argvec, constraints,
1635 filename, line));
1636 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1637 }
1638
1639 /* If there are no outputs (but there are some clobbers)
1640 store the bare ASM_OPERANDS into the PARALLEL. */
1641
1642 if (i == 0)
1643 XVECEXP (body, 0, i++) = obody;
1644
1645 /* Store (clobber REG) for each clobbered register specified. */
1646
b4ccaa16 1647 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1648 {
28d81abb 1649 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1650 int j = decode_reg_name (regname);
28d81abb 1651
b4ac57ab 1652 if (j < 0)
28d81abb 1653 {
c09e6498 1654 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1655 continue;
1656
c09e6498
RS
1657 if (j == -4) /* `memory', don't cache memory across asm */
1658 {
bffc6177
RS
1659 XVECEXP (body, 0, i++)
1660 = gen_rtx (CLOBBER, VOIDmode,
058f58ed 1661 gen_rtx (MEM, BLKmode,
bffc6177 1662 gen_rtx (SCRATCH, VOIDmode, 0)));
c09e6498
RS
1663 continue;
1664 }
1665
7859e3ac 1666 /* Ignore unknown register, error already signalled. */
cc1f5387 1667 continue;
28d81abb
RK
1668 }
1669
1670 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1671 XVECEXP (body, 0, i++)
28d81abb
RK
1672 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1673 }
1674
1675 insn = emit_insn (body);
1676 }
1677
1678 free_temp_slots ();
1679}
1680\f
1681/* Generate RTL to evaluate the expression EXP
1682 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1683
1684void
1685expand_expr_stmt (exp)
1686 tree exp;
1687{
ca695ac9
JB
1688 if (output_bytecode)
1689 {
1690 int org_stack_depth = stack_depth;
1691
1692 bc_expand_expr (exp);
1693
1694 /* Restore stack depth */
1695 if (stack_depth < org_stack_depth)
1696 abort ();
1697
1698 bc_emit_instruction (drop);
1699
1700 last_expr_type = TREE_TYPE (exp);
1701 return;
1702 }
1703
28d81abb
RK
1704 /* If -W, warn about statements with no side effects,
1705 except for an explicit cast to void (e.g. for assert()), and
1706 except inside a ({...}) where they may be useful. */
1707 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1708 {
1709 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1710 && !(TREE_CODE (exp) == CONVERT_EXPR
1711 && TREE_TYPE (exp) == void_type_node))
1712 warning_with_file_and_line (emit_filename, emit_lineno,
1713 "statement with no effect");
1714 else if (warn_unused)
1715 warn_if_unused_value (exp);
1716 }
b6ec8c5f
RK
1717
1718 /* If EXP is of function type and we are expanding statements for
1719 value, convert it to pointer-to-function. */
1720 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1721 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1722
28d81abb
RK
1723 last_expr_type = TREE_TYPE (exp);
1724 if (! flag_syntax_only)
37366632
RK
1725 last_expr_value = expand_expr (exp,
1726 (expr_stmts_for_value
1727 ? NULL_RTX : const0_rtx),
28d81abb
RK
1728 VOIDmode, 0);
1729
1730 /* If all we do is reference a volatile value in memory,
1731 copy it to a register to be sure it is actually touched. */
1732 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1733 && TREE_THIS_VOLATILE (exp))
1734 {
6a5bbbe6
RS
1735 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1736 ;
1737 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1738 copy_to_reg (last_expr_value);
1739 else
ddbe9812
RS
1740 {
1741 rtx lab = gen_label_rtx ();
1742
1743 /* Compare the value with itself to reference it. */
1744 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1745 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1746 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1747 BLKmode, 0,
1748 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1749 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1750 emit_label (lab);
1751 }
28d81abb
RK
1752 }
1753
1754 /* If this expression is part of a ({...}) and is in memory, we may have
1755 to preserve temporaries. */
1756 preserve_temp_slots (last_expr_value);
1757
1758 /* Free any temporaries used to evaluate this expression. Any temporary
1759 used as a result of this expression will already have been preserved
1760 above. */
1761 free_temp_slots ();
1762
1763 emit_queue ();
1764}
1765
1766/* Warn if EXP contains any computations whose results are not used.
1767 Return 1 if a warning is printed; 0 otherwise. */
1768
150a992a 1769int
28d81abb
RK
1770warn_if_unused_value (exp)
1771 tree exp;
1772{
1773 if (TREE_USED (exp))
1774 return 0;
1775
1776 switch (TREE_CODE (exp))
1777 {
1778 case PREINCREMENT_EXPR:
1779 case POSTINCREMENT_EXPR:
1780 case PREDECREMENT_EXPR:
1781 case POSTDECREMENT_EXPR:
1782 case MODIFY_EXPR:
1783 case INIT_EXPR:
1784 case TARGET_EXPR:
1785 case CALL_EXPR:
1786 case METHOD_CALL_EXPR:
1787 case RTL_EXPR:
28d81abb
RK
1788 case WITH_CLEANUP_EXPR:
1789 case EXIT_EXPR:
1790 /* We don't warn about COND_EXPR because it may be a useful
1791 construct if either arm contains a side effect. */
1792 case COND_EXPR:
1793 return 0;
1794
1795 case BIND_EXPR:
1796 /* For a binding, warn if no side effect within it. */
1797 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1798
de73f171
RK
1799 case SAVE_EXPR:
1800 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1801
28d81abb
RK
1802 case TRUTH_ORIF_EXPR:
1803 case TRUTH_ANDIF_EXPR:
1804 /* In && or ||, warn if 2nd operand has no side effect. */
1805 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1806
1807 case COMPOUND_EXPR:
a646a211
JM
1808 if (TREE_NO_UNUSED_WARNING (exp))
1809 return 0;
28d81abb
RK
1810 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1811 return 1;
4d23e509
RS
1812 /* Let people do `(foo (), 0)' without a warning. */
1813 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1814 return 0;
28d81abb
RK
1815 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1816
1817 case NOP_EXPR:
1818 case CONVERT_EXPR:
b4ac57ab 1819 case NON_LVALUE_EXPR:
28d81abb
RK
1820 /* Don't warn about values cast to void. */
1821 if (TREE_TYPE (exp) == void_type_node)
1822 return 0;
1823 /* Don't warn about conversions not explicit in the user's program. */
1824 if (TREE_NO_UNUSED_WARNING (exp))
1825 return 0;
1826 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1827 Don't complain about that. There can be an arbitrary number of
1828 casts before the modify, so we must loop until we find the first
1829 non-cast expression and then test to see if that is a modify. */
1830 {
1831 tree tem = TREE_OPERAND (exp, 0);
1832
1833 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1834 tem = TREE_OPERAND (tem, 0);
1835
de73f171
RK
1836 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1837 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1838 return 0;
1839 }
d1e1adfb 1840 goto warn;
28d81abb 1841
d1e1adfb
JM
1842 case INDIRECT_REF:
1843 /* Don't warn about automatic dereferencing of references, since
1844 the user cannot control it. */
1845 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1846 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1847 /* ... fall through ... */
d1e1adfb 1848
28d81abb 1849 default:
ddbe9812
RS
1850 /* Referencing a volatile value is a side effect, so don't warn. */
1851 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1852 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1853 && TREE_THIS_VOLATILE (exp))
1854 return 0;
d1e1adfb 1855 warn:
28d81abb
RK
1856 warning_with_file_and_line (emit_filename, emit_lineno,
1857 "value computed is not used");
1858 return 1;
1859 }
1860}
1861
1862/* Clear out the memory of the last expression evaluated. */
1863
1864void
1865clear_last_expr ()
1866{
1867 last_expr_type = 0;
1868}
1869
1870/* Begin a statement which will return a value.
1871 Return the RTL_EXPR for this statement expr.
1872 The caller must save that value and pass it to expand_end_stmt_expr. */
1873
1874tree
1875expand_start_stmt_expr ()
1876{
ca695ac9
JB
1877 int momentary;
1878 tree t;
1879
1880 /* When generating bytecode just note down the stack depth */
1881 if (output_bytecode)
1882 return (build_int_2 (stack_depth, 0));
1883
28d81abb
RK
1884 /* Make the RTL_EXPR node temporary, not momentary,
1885 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1886 momentary = suspend_momentary ();
1887 t = make_node (RTL_EXPR);
28d81abb 1888 resume_momentary (momentary);
33c6ab80 1889 do_pending_stack_adjust ();
e922dbad 1890 start_sequence_for_rtl_expr (t);
28d81abb
RK
1891 NO_DEFER_POP;
1892 expr_stmts_for_value++;
1893 return t;
1894}
1895
1896/* Restore the previous state at the end of a statement that returns a value.
1897 Returns a tree node representing the statement's value and the
1898 insns to compute the value.
1899
1900 The nodes of that expression have been freed by now, so we cannot use them.
1901 But we don't want to do that anyway; the expression has already been
1902 evaluated and now we just want to use the value. So generate a RTL_EXPR
1903 with the proper type and RTL value.
1904
1905 If the last substatement was not an expression,
1906 return something with type `void'. */
1907
1908tree
1909expand_end_stmt_expr (t)
1910 tree t;
1911{
ca695ac9
JB
1912 if (output_bytecode)
1913 {
1914 int i;
1915 tree t;
1916
1917
1918 /* At this point, all expressions have been evaluated in order.
1919 However, all expression values have been popped when evaluated,
1920 which means we have to recover the last expression value. This is
1921 the last value removed by means of a `drop' instruction. Instead
1922 of adding code to inhibit dropping the last expression value, it
1923 is here recovered by undoing the `drop'. Since `drop' is
1924 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
0f41302f 1925 [-1]'. */
ca695ac9
JB
1926
1927 bc_adjust_stack (-1);
1928
1929 if (!last_expr_type)
1930 last_expr_type = void_type_node;
1931
1932 t = make_node (RTL_EXPR);
1933 TREE_TYPE (t) = last_expr_type;
1934 RTL_EXPR_RTL (t) = NULL;
1935 RTL_EXPR_SEQUENCE (t) = NULL;
1936
1937 /* Don't consider deleting this expr or containing exprs at tree level. */
1938 TREE_THIS_VOLATILE (t) = 1;
1939
1940 last_expr_type = 0;
1941 return t;
1942 }
1943
28d81abb
RK
1944 OK_DEFER_POP;
1945
1946 if (last_expr_type == 0)
1947 {
1948 last_expr_type = void_type_node;
1949 last_expr_value = const0_rtx;
1950 }
1951 else if (last_expr_value == 0)
1952 /* There are some cases where this can happen, such as when the
1953 statement is void type. */
1954 last_expr_value = const0_rtx;
1955 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1956 /* Remove any possible QUEUED. */
1957 last_expr_value = protect_from_queue (last_expr_value, 0);
1958
1959 emit_queue ();
1960
1961 TREE_TYPE (t) = last_expr_type;
1962 RTL_EXPR_RTL (t) = last_expr_value;
1963 RTL_EXPR_SEQUENCE (t) = get_insns ();
1964
1965 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1966
1967 end_sequence ();
1968
1969 /* Don't consider deleting this expr or containing exprs at tree level. */
1970 TREE_SIDE_EFFECTS (t) = 1;
1971 /* Propagate volatility of the actual RTL expr. */
1972 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1973
1974 last_expr_type = 0;
1975 expr_stmts_for_value--;
1976
1977 return t;
1978}
1979\f
28d81abb
RK
1980/* Generate RTL for the start of an if-then. COND is the expression
1981 whose truth should be tested.
1982
1983 If EXITFLAG is nonzero, this conditional is visible to
1984 `exit_something'. */
1985
1986void
1987expand_start_cond (cond, exitflag)
1988 tree cond;
1989 int exitflag;
1990{
1991 struct nesting *thiscond = ALLOC_NESTING ();
1992
1993 /* Make an entry on cond_stack for the cond we are entering. */
1994
1995 thiscond->next = cond_stack;
1996 thiscond->all = nesting_stack;
1997 thiscond->depth = ++nesting_depth;
1998 thiscond->data.cond.next_label = gen_label_rtx ();
1999 /* Before we encounter an `else', we don't need a separate exit label
2000 unless there are supposed to be exit statements
2001 to exit this conditional. */
2002 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2003 thiscond->data.cond.endif_label = thiscond->exit_label;
2004 cond_stack = thiscond;
2005 nesting_stack = thiscond;
2006
ca695ac9
JB
2007 if (output_bytecode)
2008 bc_expand_start_cond (cond, exitflag);
2009 else
2010 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2011}
2012
2013/* Generate RTL between then-clause and the elseif-clause
2014 of an if-then-elseif-.... */
2015
2016void
2017expand_start_elseif (cond)
2018 tree cond;
2019{
2020 if (cond_stack->data.cond.endif_label == 0)
2021 cond_stack->data.cond.endif_label = gen_label_rtx ();
2022 emit_jump (cond_stack->data.cond.endif_label);
2023 emit_label (cond_stack->data.cond.next_label);
2024 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2025 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2026}
2027
2028/* Generate RTL between the then-clause and the else-clause
2029 of an if-then-else. */
2030
2031void
2032expand_start_else ()
2033{
2034 if (cond_stack->data.cond.endif_label == 0)
2035 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9
JB
2036
2037 if (output_bytecode)
2038 {
2039 bc_expand_start_else ();
2040 return;
2041 }
2042
28d81abb
RK
2043 emit_jump (cond_stack->data.cond.endif_label);
2044 emit_label (cond_stack->data.cond.next_label);
0f41302f 2045 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2046}
2047
d947ba59
RK
2048/* After calling expand_start_else, turn this "else" into an "else if"
2049 by providing another condition. */
2050
2051void
2052expand_elseif (cond)
2053 tree cond;
2054{
2055 cond_stack->data.cond.next_label = gen_label_rtx ();
2056 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2057}
2058
28d81abb
RK
2059/* Generate RTL for the end of an if-then.
2060 Pop the record for it off of cond_stack. */
2061
2062void
2063expand_end_cond ()
2064{
2065 struct nesting *thiscond = cond_stack;
2066
ca695ac9
JB
2067 if (output_bytecode)
2068 bc_expand_end_cond ();
2069 else
2070 {
2071 do_pending_stack_adjust ();
2072 if (thiscond->data.cond.next_label)
2073 emit_label (thiscond->data.cond.next_label);
2074 if (thiscond->data.cond.endif_label)
2075 emit_label (thiscond->data.cond.endif_label);
2076 }
28d81abb
RK
2077
2078 POPSTACK (cond_stack);
2079 last_expr_type = 0;
2080}
ca695ac9
JB
2081
2082
2083/* Generate code for the start of an if-then. COND is the expression
2084 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2085 is to be visible to exit_something. It is assumed that the caller
0f41302f 2086 has pushed the previous context on the cond stack. */
704f4dca
RK
2087
2088static void
ca695ac9
JB
2089bc_expand_start_cond (cond, exitflag)
2090 tree cond;
2091 int exitflag;
2092{
2093 struct nesting *thiscond = cond_stack;
2094
2095 thiscond->data.case_stmt.nominal_type = cond;
8e2b13c3
RK
2096 if (! exitflag)
2097 thiscond->exit_label = gen_label_rtx ();
ca695ac9 2098 bc_expand_expr (cond);
c3a2235b 2099 bc_emit_bytecode (xjumpifnot);
c53e9440 2100 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2101
2102#ifdef DEBUG_PRINT_CODE
2103 fputc ('\n', stderr);
2104#endif
2105}
2106
2107/* Generate the label for the end of an if with
2108 no else- clause. */
704f4dca
RK
2109
2110static void
ca695ac9
JB
2111bc_expand_end_cond ()
2112{
2113 struct nesting *thiscond = cond_stack;
2114
c53e9440 2115 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2116}
2117
2118/* Generate code for the start of the else- clause of
2119 an if-then-else. */
704f4dca
RK
2120
2121static void
ca695ac9
JB
2122bc_expand_start_else ()
2123{
2124 struct nesting *thiscond = cond_stack;
2125
2126 thiscond->data.cond.endif_label = thiscond->exit_label;
2127 thiscond->exit_label = gen_label_rtx ();
2128 bc_emit_bytecode (jump);
c53e9440 2129 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2130
2131#ifdef DEBUG_PRINT_CODE
2132 fputc ('\n', stderr);
2133#endif
2134
c53e9440 2135 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
ca695ac9 2136}
28d81abb
RK
2137\f
2138/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2139 loop should be exited by `exit_something'. This is a loop for which
2140 `expand_continue' will jump to the top of the loop.
2141
2142 Make an entry on loop_stack to record the labels associated with
2143 this loop. */
2144
2145struct nesting *
2146expand_start_loop (exit_flag)
2147 int exit_flag;
2148{
2149 register struct nesting *thisloop = ALLOC_NESTING ();
2150
2151 /* Make an entry on loop_stack for the loop we are entering. */
2152
2153 thisloop->next = loop_stack;
2154 thisloop->all = nesting_stack;
2155 thisloop->depth = ++nesting_depth;
2156 thisloop->data.loop.start_label = gen_label_rtx ();
2157 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2158 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2159 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2160 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2161 loop_stack = thisloop;
2162 nesting_stack = thisloop;
2163
ca695ac9
JB
2164 if (output_bytecode)
2165 {
c53e9440 2166 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2167 return thisloop;
2168 }
2169
28d81abb
RK
2170 do_pending_stack_adjust ();
2171 emit_queue ();
37366632 2172 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2173 emit_label (thisloop->data.loop.start_label);
2174
2175 return thisloop;
2176}
2177
2178/* Like expand_start_loop but for a loop where the continuation point
2179 (for expand_continue_loop) will be specified explicitly. */
2180
2181struct nesting *
2182expand_start_loop_continue_elsewhere (exit_flag)
2183 int exit_flag;
2184{
2185 struct nesting *thisloop = expand_start_loop (exit_flag);
2186 loop_stack->data.loop.continue_label = gen_label_rtx ();
2187 return thisloop;
2188}
2189
2190/* Specify the continuation point for a loop started with
2191 expand_start_loop_continue_elsewhere.
2192 Use this at the point in the code to which a continue statement
2193 should jump. */
2194
2195void
2196expand_loop_continue_here ()
2197{
ca695ac9
JB
2198 if (output_bytecode)
2199 {
c53e9440 2200 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
ca695ac9
JB
2201 return;
2202 }
28d81abb 2203 do_pending_stack_adjust ();
37366632 2204 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2205 emit_label (loop_stack->data.loop.continue_label);
2206}
2207
ca695ac9 2208/* End a loop. */
704f4dca 2209
ca695ac9
JB
2210static void
2211bc_expand_end_loop ()
2212{
2213 struct nesting *thisloop = loop_stack;
2214
2215 bc_emit_bytecode (jump);
c53e9440 2216 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2217
2218#ifdef DEBUG_PRINT_CODE
2219 fputc ('\n', stderr);
2220#endif
2221
c53e9440 2222 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
ca695ac9
JB
2223 POPSTACK (loop_stack);
2224 last_expr_type = 0;
2225}
2226
2227
28d81abb
RK
2228/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2229 Pop the block off of loop_stack. */
2230
2231void
2232expand_end_loop ()
2233{
ca695ac9
JB
2234 register rtx insn;
2235 register rtx start_label;
28d81abb
RK
2236 rtx last_test_insn = 0;
2237 int num_insns = 0;
ca695ac9
JB
2238
2239 if (output_bytecode)
2240 {
2241 bc_expand_end_loop ();
2242 return;
2243 }
2244
2245 insn = get_last_insn ();
2246 start_label = loop_stack->data.loop.start_label;
28d81abb
RK
2247
2248 /* Mark the continue-point at the top of the loop if none elsewhere. */
2249 if (start_label == loop_stack->data.loop.continue_label)
2250 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2251
2252 do_pending_stack_adjust ();
2253
2254 /* If optimizing, perhaps reorder the loop. If the loop
2255 starts with a conditional exit, roll that to the end
2256 where it will optimize together with the jump back.
2257
2258 We look for the last conditional branch to the exit that we encounter
2259 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2260 branch to the exit first, use it.
2261
2262 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2263 because moving them is not valid. */
2264
2265 if (optimize
2266 &&
2267 ! (GET_CODE (insn) == JUMP_INSN
2268 && GET_CODE (PATTERN (insn)) == SET
2269 && SET_DEST (PATTERN (insn)) == pc_rtx
2270 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2271 {
2272 /* Scan insns from the top of the loop looking for a qualified
2273 conditional exit. */
2274 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2275 insn = NEXT_INSN (insn))
2276 {
2277 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2278 break;
2279
2280 if (GET_CODE (insn) == NOTE
2281 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2282 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2283 break;
2284
2285 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2286 num_insns++;
2287
2288 if (last_test_insn && num_insns > 30)
2289 break;
2290
2291 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2292 && SET_DEST (PATTERN (insn)) == pc_rtx
2293 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2294 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
8afad312
JW
2295 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2296 == loop_stack->data.loop.end_label)
2297 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2298 == loop_stack->data.loop.alt_end_label)))
28d81abb 2299 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
8afad312
JW
2300 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2301 == loop_stack->data.loop.end_label)
2302 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2303 == loop_stack->data.loop.alt_end_label)))))
28d81abb
RK
2304 last_test_insn = insn;
2305
2306 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2307 && GET_CODE (PATTERN (insn)) == SET
2308 && SET_DEST (PATTERN (insn)) == pc_rtx
2309 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
8afad312
JW
2310 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2311 == loop_stack->data.loop.end_label)
2312 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2313 == loop_stack->data.loop.alt_end_label)))
28d81abb
RK
2314 /* Include BARRIER. */
2315 last_test_insn = NEXT_INSN (insn);
2316 }
2317
2318 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2319 {
2320 /* We found one. Move everything from there up
2321 to the end of the loop, and add a jump into the loop
2322 to jump to there. */
2323 register rtx newstart_label = gen_label_rtx ();
2324 register rtx start_move = start_label;
2325
b4ac57ab 2326 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2327 then we want to move this note also. */
2328 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2329 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2330 == NOTE_INSN_LOOP_CONT))
2331 start_move = PREV_INSN (start_move);
2332
2333 emit_label_after (newstart_label, PREV_INSN (start_move));
2334 reorder_insns (start_move, last_test_insn, get_last_insn ());
2335 emit_jump_insn_after (gen_jump (start_label),
2336 PREV_INSN (newstart_label));
2337 emit_barrier_after (PREV_INSN (newstart_label));
2338 start_label = newstart_label;
2339 }
2340 }
2341
2342 emit_jump (start_label);
37366632 2343 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2344 emit_label (loop_stack->data.loop.end_label);
2345
2346 POPSTACK (loop_stack);
2347
2348 last_expr_type = 0;
2349}
2350
2351/* Generate a jump to the current loop's continue-point.
2352 This is usually the top of the loop, but may be specified
2353 explicitly elsewhere. If not currently inside a loop,
2354 return 0 and do nothing; caller will print an error message. */
2355
2356int
2357expand_continue_loop (whichloop)
2358 struct nesting *whichloop;
2359{
2360 last_expr_type = 0;
2361 if (whichloop == 0)
2362 whichloop = loop_stack;
2363 if (whichloop == 0)
2364 return 0;
37366632
RK
2365 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2366 NULL_RTX);
28d81abb
RK
2367 return 1;
2368}
2369
2370/* Generate a jump to exit the current loop. If not currently inside a loop,
2371 return 0 and do nothing; caller will print an error message. */
2372
2373int
2374expand_exit_loop (whichloop)
2375 struct nesting *whichloop;
2376{
2377 last_expr_type = 0;
2378 if (whichloop == 0)
2379 whichloop = loop_stack;
2380 if (whichloop == 0)
2381 return 0;
37366632 2382 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2383 return 1;
2384}
2385
2386/* Generate a conditional jump to exit the current loop if COND
2387 evaluates to zero. If not currently inside a loop,
2388 return 0 and do nothing; caller will print an error message. */
2389
2390int
2391expand_exit_loop_if_false (whichloop, cond)
2392 struct nesting *whichloop;
2393 tree cond;
2394{
2395 last_expr_type = 0;
2396 if (whichloop == 0)
2397 whichloop = loop_stack;
2398 if (whichloop == 0)
2399 return 0;
ca695ac9
JB
2400 if (output_bytecode)
2401 {
2402 bc_expand_expr (cond);
c3a2235b
RS
2403 bc_expand_goto_internal (xjumpifnot,
2404 BYTECODE_BC_LABEL (whichloop->exit_label),
704f4dca 2405 NULL_TREE);
ca695ac9
JB
2406 }
2407 else
d902c7ea
JW
2408 {
2409 /* In order to handle fixups, we actually create a conditional jump
2410 around a unconditional branch to exit the loop. If fixups are
2411 necessary, they go before the unconditional branch. */
2412
2413 rtx label = gen_label_rtx ();
8afad312
JW
2414 rtx last_insn;
2415
d902c7ea 2416 do_jump (cond, NULL_RTX, label);
8afad312
JW
2417 last_insn = get_last_insn ();
2418 if (GET_CODE (last_insn) == CODE_LABEL)
2419 whichloop->data.loop.alt_end_label = last_insn;
d902c7ea
JW
2420 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2421 NULL_RTX);
2422 emit_label (label);
2423 }
ca695ac9 2424
28d81abb
RK
2425 return 1;
2426}
2427
2428/* Return non-zero if we should preserve sub-expressions as separate
2429 pseudos. We never do so if we aren't optimizing. We always do so
2430 if -fexpensive-optimizations.
2431
2432 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2433 the loop may still be a small one. */
2434
2435int
2436preserve_subexpressions_p ()
2437{
2438 rtx insn;
2439
2440 if (flag_expensive_optimizations)
2441 return 1;
2442
2443 if (optimize == 0 || loop_stack == 0)
2444 return 0;
2445
2446 insn = get_last_insn_anywhere ();
2447
2448 return (insn
2449 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2450 < n_non_fixed_regs * 3));
2451
2452}
2453
2454/* Generate a jump to exit the current loop, conditional, binding contour
2455 or case statement. Not all such constructs are visible to this function,
2456 only those started with EXIT_FLAG nonzero. Individual languages use
2457 the EXIT_FLAG parameter to control which kinds of constructs you can
2458 exit this way.
2459
2460 If not currently inside anything that can be exited,
2461 return 0 and do nothing; caller will print an error message. */
2462
2463int
2464expand_exit_something ()
2465{
2466 struct nesting *n;
2467 last_expr_type = 0;
2468 for (n = nesting_stack; n; n = n->all)
2469 if (n->exit_label != 0)
2470 {
37366632 2471 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2472 return 1;
2473 }
2474
2475 return 0;
2476}
2477\f
2478/* Generate RTL to return from the current function, with no value.
2479 (That is, we do not do anything about returning any value.) */
2480
2481void
2482expand_null_return ()
2483{
2484 struct nesting *block = block_stack;
2485 rtx last_insn = 0;
2486
ca695ac9
JB
2487 if (output_bytecode)
2488 {
2489 bc_emit_instruction (ret);
2490 return;
2491 }
2492
28d81abb
RK
2493 /* Does any pending block have cleanups? */
2494
2495 while (block && block->data.block.cleanups == 0)
2496 block = block->next;
2497
2498 /* If yes, use a goto to return, since that runs cleanups. */
2499
2500 expand_null_return_1 (last_insn, block != 0);
2501}
2502
2503/* Generate RTL to return from the current function, with value VAL. */
2504
8d800403 2505static void
28d81abb
RK
2506expand_value_return (val)
2507 rtx val;
2508{
2509 struct nesting *block = block_stack;
2510 rtx last_insn = get_last_insn ();
2511 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2512
2513 /* Copy the value to the return location
2514 unless it's already there. */
2515
2516 if (return_reg != val)
77636079
RS
2517 {
2518#ifdef PROMOTE_FUNCTION_RETURN
77636079
RS
2519 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2520 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
2521 enum machine_mode mode
2522 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2523 &unsignedp, 1);
77636079
RS
2524
2525 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2526 convert_move (return_reg, val, unsignedp);
77636079
RS
2527 else
2528#endif
2529 emit_move_insn (return_reg, val);
2530 }
28d81abb
RK
2531 if (GET_CODE (return_reg) == REG
2532 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2533 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
e5eeae65
JW
2534 /* Handle calls that return values in multiple non-contiguous locations.
2535 The Irix 6 ABI has examples of this. */
2536 else if (GET_CODE (return_reg) == PARALLEL)
2537 {
2538 int i;
2539
2540 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2541 {
2542 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2543
2544 if (GET_CODE (x) == REG
2545 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2546 emit_insn (gen_rtx (USE, VOIDmode, x));
2547 }
2548 }
28d81abb
RK
2549
2550 /* Does any pending block have cleanups? */
2551
2552 while (block && block->data.block.cleanups == 0)
2553 block = block->next;
2554
2555 /* If yes, use a goto to return, since that runs cleanups.
2556 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2557
2558 expand_null_return_1 (last_insn, block != 0);
2559}
2560
2561/* Output a return with no value. If LAST_INSN is nonzero,
2562 pretend that the return takes place after LAST_INSN.
2563 If USE_GOTO is nonzero then don't use a return instruction;
2564 go to the return label instead. This causes any cleanups
2565 of pending blocks to be executed normally. */
2566
2567static void
2568expand_null_return_1 (last_insn, use_goto)
2569 rtx last_insn;
2570 int use_goto;
2571{
2572 rtx end_label = cleanup_label ? cleanup_label : return_label;
2573
2574 clear_pending_stack_adjust ();
2575 do_pending_stack_adjust ();
2576 last_expr_type = 0;
2577
2578 /* PCC-struct return always uses an epilogue. */
2579 if (current_function_returns_pcc_struct || use_goto)
2580 {
2581 if (end_label == 0)
2582 end_label = return_label = gen_label_rtx ();
37366632 2583 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2584 return;
2585 }
2586
2587 /* Otherwise output a simple return-insn if one is available,
2588 unless it won't do the job. */
2589#ifdef HAVE_return
2590 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2591 {
2592 emit_jump_insn (gen_return ());
2593 emit_barrier ();
2594 return;
2595 }
2596#endif
2597
2598 /* Otherwise jump to the epilogue. */
37366632 2599 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2600}
2601\f
2602/* Generate RTL to evaluate the expression RETVAL and return it
2603 from the current function. */
2604
2605void
2606expand_return (retval)
2607 tree retval;
2608{
2609 /* If there are any cleanups to be performed, then they will
2610 be inserted following LAST_INSN. It is desirable
2611 that the last_insn, for such purposes, should be the
2612 last insn before computing the return value. Otherwise, cleanups
2613 which call functions can clobber the return value. */
2614 /* ??? rms: I think that is erroneous, because in C++ it would
2615 run destructors on variables that might be used in the subsequent
2616 computation of the return value. */
2617 rtx last_insn = 0;
2618 register rtx val = 0;
2619 register rtx op0;
2620 tree retval_rhs;
2621 int cleanups;
2622 struct nesting *block;
2623
ca695ac9 2624 /* Bytecode returns are quite simple, just leave the result on the
0f41302f 2625 arithmetic stack. */
ca695ac9
JB
2626 if (output_bytecode)
2627 {
2628 bc_expand_expr (retval);
2629 bc_emit_instruction (ret);
2630 return;
2631 }
2632
28d81abb
RK
2633 /* If function wants no value, give it none. */
2634 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2635 {
37366632 2636 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2637 emit_queue ();
28d81abb
RK
2638 expand_null_return ();
2639 return;
2640 }
2641
2642 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2643 /* This is not sufficient. We also need to watch for cleanups of the
2644 expression we are about to expand. Unfortunately, we cannot know
2645 if it has cleanups until we expand it, and we want to change how we
2646 expand it depending upon if we need cleanups. We can't win. */
2647#if 0
28d81abb 2648 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2649#else
2650 cleanups = 1;
2651#endif
28d81abb
RK
2652
2653 if (TREE_CODE (retval) == RESULT_DECL)
2654 retval_rhs = retval;
2655 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2656 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2657 retval_rhs = TREE_OPERAND (retval, 1);
2658 else if (TREE_TYPE (retval) == void_type_node)
2659 /* Recognize tail-recursive call to void function. */
2660 retval_rhs = retval;
2661 else
2662 retval_rhs = NULL_TREE;
2663
2664 /* Only use `last_insn' if there are cleanups which must be run. */
2665 if (cleanups || cleanup_label != 0)
2666 last_insn = get_last_insn ();
2667
2668 /* Distribute return down conditional expr if either of the sides
2669 may involve tail recursion (see test below). This enhances the number
2670 of tail recursions we see. Don't do this always since it can produce
2671 sub-optimal code in some cases and we distribute assignments into
2672 conditional expressions when it would help. */
2673
2674 if (optimize && retval_rhs != 0
2675 && frame_offset == 0
2676 && TREE_CODE (retval_rhs) == COND_EXPR
2677 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2678 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2679 {
2680 rtx label = gen_label_rtx ();
a0a34f94
RK
2681 tree expr;
2682
37366632 2683 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
a0a34f94
RK
2684 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2685 DECL_RESULT (current_function_decl),
2686 TREE_OPERAND (retval_rhs, 1));
2687 TREE_SIDE_EFFECTS (expr) = 1;
2688 expand_return (expr);
28d81abb 2689 emit_label (label);
a0a34f94
RK
2690
2691 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2692 DECL_RESULT (current_function_decl),
2693 TREE_OPERAND (retval_rhs, 2));
2694 TREE_SIDE_EFFECTS (expr) = 1;
2695 expand_return (expr);
28d81abb
RK
2696 return;
2697 }
2698
2699 /* For tail-recursive call to current function,
2700 just jump back to the beginning.
2701 It's unsafe if any auto variable in this function
2702 has its address taken; for simplicity,
2703 require stack frame to be empty. */
2704 if (optimize && retval_rhs != 0
2705 && frame_offset == 0
2706 && TREE_CODE (retval_rhs) == CALL_EXPR
2707 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2708 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2709 /* Finish checking validity, and if valid emit code
2710 to set the argument variables for the new call. */
2711 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2712 DECL_ARGUMENTS (current_function_decl)))
2713 {
2714 if (tail_recursion_label == 0)
2715 {
2716 tail_recursion_label = gen_label_rtx ();
2717 emit_label_after (tail_recursion_label,
2718 tail_recursion_reentry);
2719 }
a3229491 2720 emit_queue ();
37366632 2721 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2722 emit_barrier ();
2723 return;
2724 }
2725#ifdef HAVE_return
2726 /* This optimization is safe if there are local cleanups
2727 because expand_null_return takes care of them.
2728 ??? I think it should also be safe when there is a cleanup label,
2729 because expand_null_return takes care of them, too.
2730 Any reason why not? */
2731 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2732 && ! current_function_returns_pcc_struct
2733 && BRANCH_COST <= 1)
28d81abb
RK
2734 {
2735 /* If this is return x == y; then generate
2736 if (x == y) return 1; else return 0;
3f8b69de
TG
2737 if we can do it with explicit return insns and branches are cheap,
2738 but not if we have the corresponding scc insn. */
2739 int has_scc = 0;
28d81abb
RK
2740 if (retval_rhs)
2741 switch (TREE_CODE (retval_rhs))
2742 {
2743 case EQ_EXPR:
3f8b69de
TG
2744#ifdef HAVE_seq
2745 has_scc = HAVE_seq;
2746#endif
28d81abb 2747 case NE_EXPR:
3f8b69de
TG
2748#ifdef HAVE_sne
2749 has_scc = HAVE_sne;
2750#endif
28d81abb 2751 case GT_EXPR:
3f8b69de
TG
2752#ifdef HAVE_sgt
2753 has_scc = HAVE_sgt;
2754#endif
28d81abb 2755 case GE_EXPR:
3f8b69de
TG
2756#ifdef HAVE_sge
2757 has_scc = HAVE_sge;
2758#endif
28d81abb 2759 case LT_EXPR:
3f8b69de
TG
2760#ifdef HAVE_slt
2761 has_scc = HAVE_slt;
2762#endif
28d81abb 2763 case LE_EXPR:
3f8b69de
TG
2764#ifdef HAVE_sle
2765 has_scc = HAVE_sle;
2766#endif
28d81abb
RK
2767 case TRUTH_ANDIF_EXPR:
2768 case TRUTH_ORIF_EXPR:
2769 case TRUTH_AND_EXPR:
2770 case TRUTH_OR_EXPR:
2771 case TRUTH_NOT_EXPR:
94ed3915 2772 case TRUTH_XOR_EXPR:
3f8b69de
TG
2773 if (! has_scc)
2774 {
2775 op0 = gen_label_rtx ();
2776 jumpifnot (retval_rhs, op0);
2777 expand_value_return (const1_rtx);
2778 emit_label (op0);
2779 expand_value_return (const0_rtx);
2780 return;
2781 }
28d81abb
RK
2782 }
2783 }
2784#endif /* HAVE_return */
2785
4c485b63
JL
2786 /* If the result is an aggregate that is being returned in one (or more)
2787 registers, load the registers here. The compiler currently can't handle
2788 copying a BLKmode value into registers. We could put this code in a
2789 more general area (for use by everyone instead of just function
2790 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2791 (and in expand_call). The value must go into a pseudo in case there
2792 are cleanups that will clobber the real return register. */
4c485b63
JL
2793
2794 if (retval_rhs != 0
2795 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2796 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2797 {
a7f875d7 2798 int i, bitpos, xbitpos;
4c485b63
JL
2799 int big_endian_correction = 0;
2800 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2801 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
a7f875d7 2802 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
4c485b63 2803 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
a7f875d7 2804 rtx result_reg, src, dst;
4c485b63 2805 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2806 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2807
a7f875d7
RK
2808 /* Structures whose size is not a multiple of a word are aligned
2809 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2810 machine, this means we must skip the empty high order bytes when
2811 calculating the bit offset. */
2812 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2813 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2814 * BITS_PER_UNIT));
2815
2816 /* Copy the structure BITSIZE bits at a time. */
2817 for (bitpos = 0, xbitpos = big_endian_correction;
2818 bitpos < bytes * BITS_PER_UNIT;
2819 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2820 {
a7f875d7 2821 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2822 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2823 (the first time through). */
2824 if (xbitpos % BITS_PER_WORD == 0
2825 || xbitpos == big_endian_correction)
4c485b63 2826 {
a7f875d7
RK
2827 /* Generate an appropriate register. */
2828 dst = gen_reg_rtx (word_mode);
2829 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2830
2831 /* Clobber the destination before we move anything into it. */
2832 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
4c485b63 2833 }
a7f875d7
RK
2834
2835 /* We need a new source operand each time bitpos is on a word
2836 boundary. */
2837 if (bitpos % BITS_PER_WORD == 0)
2838 src = operand_subword_force (result_val,
2839 bitpos / BITS_PER_WORD,
2840 BLKmode);
2841
2842 /* Use bitpos for the source extraction (left justified) and
2843 xbitpos for the destination store (right justified). */
2844 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2845 extract_bit_field (src, bitsize,
2846 bitpos % BITS_PER_WORD, 1,
2847 NULL_RTX, word_mode,
2848 word_mode,
2849 bitsize / BITS_PER_UNIT,
2850 BITS_PER_WORD),
2851 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
2852 }
2853
4c485b63
JL
2854 /* Find the smallest integer mode large enough to hold the
2855 entire structure and use that mode instead of BLKmode
2856 on the USE insn for the return register. */
2857 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2858 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2859 tmpmode != MAX_MACHINE_MODE;
2860 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
2861 {
2862 /* Have we found a large enough mode? */
2863 if (GET_MODE_SIZE (tmpmode) >= bytes)
2864 break;
2865 }
4c485b63
JL
2866
2867 /* No suitable mode found. */
2868 if (tmpmode == MAX_MACHINE_MODE)
3ffeb8f1 2869 abort ();
4c485b63 2870
3ffeb8f1
JW
2871 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2872
af55da56
JW
2873 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2874 result_reg_mode = word_mode;
2875 else
2876 result_reg_mode = tmpmode;
2877 result_reg = gen_reg_rtx (result_reg_mode);
2878
3ffeb8f1
JW
2879 /* Now that the value is in pseudos, copy it to the result reg(s). */
2880 emit_queue ();
2881 free_temp_slots ();
2882 for (i = 0; i < n_regs; i++)
af55da56 2883 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2884 result_pseudos[i]);
4c485b63 2885
af55da56
JW
2886 if (tmpmode != result_reg_mode)
2887 result_reg = gen_lowpart (tmpmode, result_reg);
2888
4c485b63
JL
2889 expand_value_return (result_reg);
2890 }
2891 else if (cleanups
28d81abb
RK
2892 && retval_rhs != 0
2893 && TREE_TYPE (retval_rhs) != void_type_node
2894 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2895 {
2896 /* Calculate the return value into a pseudo reg. */
37366632 2897 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2898 emit_queue ();
2899 /* All temporaries have now been used. */
2900 free_temp_slots ();
2901 /* Return the calculated value, doing cleanups first. */
2902 expand_value_return (val);
2903 }
2904 else
2905 {
2906 /* No cleanups or no hard reg used;
2907 calculate value into hard return reg. */
cba389cd 2908 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb
RK
2909 emit_queue ();
2910 free_temp_slots ();
2911 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2912 }
2913}
2914
2915/* Return 1 if the end of the generated RTX is not a barrier.
2916 This means code already compiled can drop through. */
2917
2918int
2919drop_through_at_end_p ()
2920{
2921 rtx insn = get_last_insn ();
2922 while (insn && GET_CODE (insn) == NOTE)
2923 insn = PREV_INSN (insn);
2924 return insn && GET_CODE (insn) != BARRIER;
2925}
2926\f
2927/* Emit code to alter this function's formal parms for a tail-recursive call.
2928 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2929 FORMALS is the chain of decls of formals.
2930 Return 1 if this can be done;
2931 otherwise return 0 and do not emit any code. */
2932
2933static int
2934tail_recursion_args (actuals, formals)
2935 tree actuals, formals;
2936{
2937 register tree a = actuals, f = formals;
2938 register int i;
2939 register rtx *argvec;
2940
2941 /* Check that number and types of actuals are compatible
2942 with the formals. This is not always true in valid C code.
2943 Also check that no formal needs to be addressable
2944 and that all formals are scalars. */
2945
2946 /* Also count the args. */
2947
2948 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2949 {
5c7fe359
RK
2950 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2951 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
2952 return 0;
2953 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2954 return 0;
2955 }
2956 if (a != 0 || f != 0)
2957 return 0;
2958
2959 /* Compute all the actuals. */
2960
2961 argvec = (rtx *) alloca (i * sizeof (rtx));
2962
2963 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2964 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2965
2966 /* Find which actual values refer to current values of previous formals.
2967 Copy each of them now, before any formal is changed. */
2968
2969 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2970 {
2971 int copy = 0;
2972 register int j;
2973 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2974 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2975 { copy = 1; break; }
2976 if (copy)
2977 argvec[i] = copy_to_reg (argvec[i]);
2978 }
2979
2980 /* Store the values of the actuals into the formals. */
2981
2982 for (f = formals, a = actuals, i = 0; f;
2983 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2984 {
98f3b471 2985 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2986 emit_move_insn (DECL_RTL (f), argvec[i]);
2987 else
2988 convert_move (DECL_RTL (f), argvec[i],
2989 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2990 }
2991
2992 free_temp_slots ();
2993 return 1;
2994}
2995\f
2996/* Generate the RTL code for entering a binding contour.
2997 The variables are declared one by one, by calls to `expand_decl'.
2998
2999 EXIT_FLAG is nonzero if this construct should be visible to
3000 `exit_something'. */
3001
3002void
3003expand_start_bindings (exit_flag)
3004 int exit_flag;
3005{
3006 struct nesting *thisblock = ALLOC_NESTING ();
0575fe3c 3007 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
3008
3009 /* Make an entry on block_stack for the block we are entering. */
3010
3011 thisblock->next = block_stack;
3012 thisblock->all = nesting_stack;
3013 thisblock->depth = ++nesting_depth;
3014 thisblock->data.block.stack_level = 0;
3015 thisblock->data.block.cleanups = 0;
3016 thisblock->data.block.function_call_count = 0;
3017#if 0
3018 if (block_stack)
3019 {
3020 if (block_stack->data.block.cleanups == NULL_TREE
3021 && (block_stack->data.block.outer_cleanups == NULL_TREE
3022 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3023 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3024 else
3025 thisblock->data.block.outer_cleanups
3026 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3027 block_stack->data.block.outer_cleanups);
3028 }
3029 else
3030 thisblock->data.block.outer_cleanups = 0;
3031#endif
3032#if 1
3033 if (block_stack
3034 && !(block_stack->data.block.cleanups == NULL_TREE
3035 && block_stack->data.block.outer_cleanups == NULL_TREE))
3036 thisblock->data.block.outer_cleanups
3037 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3038 block_stack->data.block.outer_cleanups);
3039 else
3040 thisblock->data.block.outer_cleanups = 0;
3041#endif
3042 thisblock->data.block.label_chain = 0;
3043 thisblock->data.block.innermost_stack_block = stack_block_stack;
3044 thisblock->data.block.first_insn = note;
3045 thisblock->data.block.block_start_count = ++block_start_count;
3046 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3047 block_stack = thisblock;
3048 nesting_stack = thisblock;
3049
ca695ac9
JB
3050 if (!output_bytecode)
3051 {
3052 /* Make a new level for allocating stack slots. */
3053 push_temp_slots ();
3054 }
28d81abb
RK
3055}
3056
7629c936
RS
3057/* Given a pointer to a BLOCK node, save a pointer to the most recently
3058 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3059 BLOCK node. */
3060
3061void
3062remember_end_note (block)
3063 register tree block;
3064{
3065 BLOCK_END_NOTE (block) = last_block_end_note;
3066 last_block_end_note = NULL_RTX;
3067}
3068
28d81abb
RK
3069/* Generate RTL code to terminate a binding contour.
3070 VARS is the chain of VAR_DECL nodes
3071 for the variables bound in this contour.
3072 MARK_ENDS is nonzero if we should put a note at the beginning
3073 and end of this binding contour.
3074
3075 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3076 (That is true automatically if the contour has a saved stack level.) */
3077
3078void
3079expand_end_bindings (vars, mark_ends, dont_jump_in)
3080 tree vars;
3081 int mark_ends;
3082 int dont_jump_in;
3083{
3084 register struct nesting *thisblock = block_stack;
3085 register tree decl;
3086
ca695ac9
JB
3087 if (output_bytecode)
3088 {
3089 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3090 return;
3091 }
3092
28d81abb
RK
3093 if (warn_unused)
3094 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
3095 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3096 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
3097 warning_with_decl (decl, "unused variable `%s'");
3098
28d81abb
RK
3099 if (thisblock->exit_label)
3100 {
3101 do_pending_stack_adjust ();
3102 emit_label (thisblock->exit_label);
3103 }
3104
3105 /* If necessary, make a handler for nonlocal gotos taking
3106 place in the function calls in this block. */
3107 if (function_call_count != thisblock->data.block.function_call_count
3108 && nonlocal_labels
3109 /* Make handler for outermost block
3110 if there were any nonlocal gotos to this function. */
3111 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3112 /* Make handler for inner block if it has something
3113 special to do when you jump out of it. */
3114 : (thisblock->data.block.cleanups != 0
3115 || thisblock->data.block.stack_level != 0)))
3116 {
3117 tree link;
3118 rtx afterward = gen_label_rtx ();
3119 rtx handler_label = gen_label_rtx ();
3120 rtx save_receiver = gen_reg_rtx (Pmode);
ba83886f 3121 rtx insns;
28d81abb
RK
3122
3123 /* Don't let jump_optimize delete the handler. */
3124 LABEL_PRESERVE_P (handler_label) = 1;
3125
3126 /* Record the handler address in the stack slot for that purpose,
3127 during this block, saving and restoring the outer value. */
3128 if (thisblock->next != 0)
3129 {
3130 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
ba83886f
RS
3131
3132 start_sequence ();
3133 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3134 insns = get_insns ();
3135 end_sequence ();
3136 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb 3137 }
ba83886f
RS
3138
3139 start_sequence ();
3140 emit_move_insn (nonlocal_goto_handler_slot,
3141 gen_rtx (LABEL_REF, Pmode, handler_label));
3142 insns = get_insns ();
3143 end_sequence ();
3144 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb
RK
3145
3146 /* Jump around the handler; it runs only when specially invoked. */
3147 emit_jump (afterward);
3148 emit_label (handler_label);
3149
3150#ifdef HAVE_nonlocal_goto
3151 if (! HAVE_nonlocal_goto)
3152#endif
3153 /* First adjust our frame pointer to its actual value. It was
3154 previously set to the start of the virtual area corresponding to
3155 the stacked variables when we branched here and now needs to be
3156 adjusted to the actual hardware fp value.
3157
3158 Assignments are to virtual registers are converted by
3159 instantiate_virtual_regs into the corresponding assignment
3160 to the underlying register (fp in this case) that makes
3161 the original assignment true.
3162 So the following insn will actually be
3163 decrementing fp by STARTING_FRAME_OFFSET. */
705e524e 3164 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
28d81abb 3165
a35ad168 3166#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
28d81abb
RK
3167 if (fixed_regs[ARG_POINTER_REGNUM])
3168 {
42495ca0
RK
3169#ifdef ELIMINABLE_REGS
3170 /* If the argument pointer can be eliminated in favor of the
3171 frame pointer, we don't need to restore it. We assume here
3172 that if such an elimination is present, it can always be used.
3173 This is the case on all known machines; if we don't make this
3174 assumption, we do unnecessary saving on many machines. */
3175 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3176 int i;
3177
3178 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3179 if (elim_regs[i].from == ARG_POINTER_REGNUM
a35ad168 3180 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
42495ca0
RK
3181 break;
3182
3183 if (i == sizeof elim_regs / sizeof elim_regs [0])
3184#endif
3185 {
3186 /* Now restore our arg pointer from the address at which it
3187 was saved in our stack frame.
3188 If there hasn't be space allocated for it yet, make
3189 some now. */
3190 if (arg_pointer_save_area == 0)
3191 arg_pointer_save_area
3192 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3193 emit_move_insn (virtual_incoming_args_rtx,
3194 /* We need a pseudo here, or else
3195 instantiate_virtual_regs_1 complains. */
3196 copy_to_reg (arg_pointer_save_area));
3197 }
28d81abb
RK
3198 }
3199#endif
3200
4e05a62c
RK
3201#ifdef HAVE_nonlocal_goto_receiver
3202 if (HAVE_nonlocal_goto_receiver)
3203 emit_insn (gen_nonlocal_goto_receiver ());
3204#endif
3205
28d81abb
RK
3206 /* The handler expects the desired label address in the static chain
3207 register. It tests the address and does an appropriate jump
3208 to whatever label is desired. */
3209 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3210 /* Skip any labels we shouldn't be able to jump to from here. */
3211 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3212 {
3213 rtx not_this = gen_label_rtx ();
3214 rtx this = gen_label_rtx ();
3215 do_jump_if_equal (static_chain_rtx,
3216 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3217 this, 0);
3218 emit_jump (not_this);
3219 emit_label (this);
3220 expand_goto (TREE_VALUE (link));
3221 emit_label (not_this);
3222 }
3223 /* If label is not recognized, abort. */
3224 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3225 VOIDmode, 0);
a3fd7507 3226 emit_barrier ();
28d81abb
RK
3227 emit_label (afterward);
3228 }
3229
3230 /* Don't allow jumping into a block that has cleanups or a stack level. */
3231 if (dont_jump_in
3232 || thisblock->data.block.stack_level != 0
3233 || thisblock->data.block.cleanups != 0)
3234 {
3235 struct label_chain *chain;
3236
3237 /* Any labels in this block are no longer valid to go to.
3238 Mark them to cause an error message. */
3239 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3240 {
3241 DECL_TOO_LATE (chain->label) = 1;
3242 /* If any goto without a fixup came to this label,
3243 that must be an error, because gotos without fixups
3244 come from outside all saved stack-levels and all cleanups. */
3245 if (TREE_ADDRESSABLE (chain->label))
3246 error_with_decl (chain->label,
3247 "label `%s' used before containing binding contour");
3248 }
3249 }
3250
3251 /* Restore stack level in effect before the block
3252 (only if variable-size objects allocated). */
3253 /* Perform any cleanups associated with the block. */
3254
3255 if (thisblock->data.block.stack_level != 0
3256 || thisblock->data.block.cleanups != 0)
3257 {
413ec213 3258 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3259 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3260
50d1b7a1
MS
3261 /* Don't let cleanups affect ({...}) constructs. */
3262 int old_expr_stmts_for_value = expr_stmts_for_value;
3263 rtx old_last_expr_value = last_expr_value;
3264 tree old_last_expr_type = last_expr_type;
3265 expr_stmts_for_value = 0;
28d81abb 3266
50d1b7a1
MS
3267 /* Do the cleanups. */
3268 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3269 if (reachable)
3270 do_pending_stack_adjust ();
28d81abb 3271
50d1b7a1
MS
3272 expr_stmts_for_value = old_expr_stmts_for_value;
3273 last_expr_value = old_last_expr_value;
3274 last_expr_type = old_last_expr_type;
3275
3276 /* Restore the stack level. */
3277
3278 if (reachable && thisblock->data.block.stack_level != 0)
3279 {
3280 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3281 thisblock->data.block.stack_level, NULL_RTX);
3282 if (nonlocal_goto_handler_slot != 0)
3283 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3284 NULL_RTX);
28d81abb
RK
3285 }
3286
3287 /* Any gotos out of this block must also do these things.
59257ff7
RK
3288 Also report any gotos with fixups that came to labels in this
3289 level. */
28d81abb
RK
3290 fixup_gotos (thisblock,
3291 thisblock->data.block.stack_level,
3292 thisblock->data.block.cleanups,
3293 thisblock->data.block.first_insn,
3294 dont_jump_in);
3295 }
3296
c7d2d61d
RS
3297 /* Mark the beginning and end of the scope if requested.
3298 We do this now, after running cleanups on the variables
3299 just going out of scope, so they are in scope for their cleanups. */
3300
3301 if (mark_ends)
7629c936 3302 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3303 else
3304 /* Get rid of the beginning-mark if we don't make an end-mark. */
3305 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3306
28d81abb
RK
3307 /* If doing stupid register allocation, make sure lives of all
3308 register variables declared here extend thru end of scope. */
3309
3310 if (obey_regdecls)
3311 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3312 {
3313 rtx rtl = DECL_RTL (decl);
3314 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3315 use_variable (rtl);
3316 }
3317
3318 /* Restore block_stack level for containing block. */
3319
3320 stack_block_stack = thisblock->data.block.innermost_stack_block;
3321 POPSTACK (block_stack);
3322
3323 /* Pop the stack slot nesting and free any slots at this level. */
3324 pop_temp_slots ();
3325}
ca695ac9
JB
3326
3327
3328/* End a binding contour.
3329 VARS is the chain of VAR_DECL nodes for the variables bound
3330 in this contour. MARK_ENDS is nonzer if we should put a note
3331 at the beginning and end of this binding contour.
3332 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3333 contour. */
3334
704f4dca 3335static void
ca695ac9
JB
3336bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3337 tree vars;
3338 int mark_ends;
3339 int dont_jump_in;
3340{
3341 struct nesting *thisbind = nesting_stack;
3342 tree decl;
3343
3344 if (warn_unused)
3345 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3346 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3347 warning_with_decl (decl, "unused variable `%s'");
3348
8e2b13c3
RK
3349 if (thisbind->exit_label)
3350 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
ca695ac9
JB
3351
3352 /* Pop block/bindings off stack */
ca695ac9
JB
3353 POPSTACK (block_stack);
3354}
28d81abb
RK
3355\f
3356/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3357 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3358
3359void
3360expand_decl (decl)
3361 register tree decl;
3362{
3363 struct nesting *thisblock = block_stack;
ca695ac9
JB
3364 tree type;
3365
3366 if (output_bytecode)
3367 {
3368 bc_expand_decl (decl, 0);
3369 return;
3370 }
3371
3372 type = TREE_TYPE (decl);
28d81abb
RK
3373
3374 /* Only automatic variables need any expansion done.
3375 Static and external variables, and external functions,
3376 will be handled by `assemble_variable' (called from finish_decl).
3377 TYPE_DECL and CONST_DECL require nothing.
3378 PARM_DECLs are handled in `assign_parms'. */
3379
3380 if (TREE_CODE (decl) != VAR_DECL)
3381 return;
44fe2e80 3382 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3383 return;
3384
3385 /* Create the RTL representation for the variable. */
3386
3387 if (type == error_mark_node)
3388 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3389 else if (DECL_SIZE (decl) == 0)
3390 /* Variable with incomplete type. */
3391 {
3392 if (DECL_INITIAL (decl) == 0)
3393 /* Error message was already done; now avoid a crash. */
3394 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3395 else
3396 /* An initializer is going to decide the size of this array.
3397 Until we know the size, represent its address with a reg. */
3398 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3668e76e 3399 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
28d81abb
RK
3400 }
3401 else if (DECL_MODE (decl) != BLKmode
3402 /* If -ffloat-store, don't put explicit float vars
3403 into regs. */
3404 && !(flag_float_store
3405 && TREE_CODE (type) == REAL_TYPE)
3406 && ! TREE_THIS_VOLATILE (decl)
3407 && ! TREE_ADDRESSABLE (decl)
44fe2e80 3408 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
3409 {
3410 /* Automatic variable that can go in a register. */
98f3b471 3411 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3412 enum machine_mode reg_mode
3413 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3414
5998c7dc
RS
3415 if (TREE_CODE (type) == COMPLEX_TYPE)
3416 {
3417 rtx realpart, imagpart;
3418 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3419
3420 /* For a complex type variable, make a CONCAT of two pseudos
3421 so that the real and imaginary parts
3422 can be allocated separately. */
3423 realpart = gen_reg_rtx (partmode);
3424 REG_USERVAR_P (realpart) = 1;
3425 imagpart = gen_reg_rtx (partmode);
3426 REG_USERVAR_P (imagpart) = 1;
3427 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3428 }
3429 else
3430 {
3431 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3432 if (TREE_CODE (type) == POINTER_TYPE)
d902f80a
RK
3433 mark_reg_pointer (DECL_RTL (decl),
3434 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3435 / BITS_PER_UNIT));
5998c7dc
RS
3436 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3437 }
28d81abb
RK
3438 }
3439 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3440 {
3441 /* Variable of fixed size that goes on the stack. */
3442 rtx oldaddr = 0;
3443 rtx addr;
3444
3445 /* If we previously made RTL for this decl, it must be an array
3446 whose size was determined by the initializer.
3447 The old address was a register; set that register now
3448 to the proper address. */
3449 if (DECL_RTL (decl) != 0)
3450 {
3451 if (GET_CODE (DECL_RTL (decl)) != MEM
3452 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3453 abort ();
3454 oldaddr = XEXP (DECL_RTL (decl), 0);
3455 }
3456
3457 DECL_RTL (decl)
3458 = assign_stack_temp (DECL_MODE (decl),
3459 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3460 + BITS_PER_UNIT - 1)
3461 / BITS_PER_UNIT),
3462 1);
3668e76e 3463 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3464
3465 /* Set alignment we actually gave this decl. */
3466 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3467 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3468
3469 if (oldaddr)
3470 {
3471 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3472 if (addr != oldaddr)
3473 emit_move_insn (oldaddr, addr);
3474 }
3475
3476 /* If this is a memory ref that contains aggregate components,
3477 mark it as such for cse and loop optimize. */
05e3bdb9 3478 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3479#if 0
3480 /* If this is in memory because of -ffloat-store,
3481 set the volatile bit, to prevent optimizations from
3482 undoing the effects. */
3483 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3484 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3485#endif
3486 }
3487 else
3488 /* Dynamic-size object: must push space on the stack. */
3489 {
3490 rtx address, size;
3491
3492 /* Record the stack pointer on entry to block, if have
3493 not already done so. */
3494 if (thisblock->data.block.stack_level == 0)
3495 {
3496 do_pending_stack_adjust ();
59257ff7
RK
3497 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3498 &thisblock->data.block.stack_level,
3499 thisblock->data.block.first_insn);
28d81abb
RK
3500 stack_block_stack = thisblock;
3501 }
3502
3503 /* Compute the variable's size, in bytes. */
3504 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3505 DECL_SIZE (decl),
3506 size_int (BITS_PER_UNIT)),
37366632 3507 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3508 free_temp_slots ();
3509
ff91ad08
RK
3510 /* Allocate space on the stack for the variable. Note that
3511 DECL_ALIGN says how the variable is to be aligned and we
3512 cannot use it to conclude anything about the alignment of
3513 the size. */
37366632 3514 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3515 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3516
28d81abb
RK
3517 /* Reference the variable indirect through that rtx. */
3518 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3519
2207e295
RS
3520 /* If this is a memory ref that contains aggregate components,
3521 mark it as such for cse and loop optimize. */
05e3bdb9 3522 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
2207e295 3523
28d81abb
RK
3524 /* Indicate the alignment we actually gave this variable. */
3525#ifdef STACK_BOUNDARY
3526 DECL_ALIGN (decl) = STACK_BOUNDARY;
3527#else
3528 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3529#endif
3530 }
3531
3532 if (TREE_THIS_VOLATILE (decl))
3533 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3534#if 0 /* A variable is not necessarily unchanging
3535 just because it is const. RTX_UNCHANGING_P
3536 means no change in the function,
3537 not merely no change in the variable's scope.
3538 It is correct to set RTX_UNCHANGING_P if the variable's scope
3539 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3540 if (TREE_READONLY (decl))
3541 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3542#endif
28d81abb
RK
3543
3544 /* If doing stupid register allocation, make sure life of any
3545 register variable starts here, at the start of its scope. */
3546
3547 if (obey_regdecls)
3548 use_variable (DECL_RTL (decl));
3549}
ca695ac9
JB
3550
3551
3552/* Generate code for the automatic variable declaration DECL. For
3553 most variables this just means we give it a stack offset. The
3554 compiler sometimes emits cleanups without variables and we will
3555 have to deal with those too. */
3556
704f4dca 3557static void
ca695ac9
JB
3558bc_expand_decl (decl, cleanup)
3559 tree decl;
3560 tree cleanup;
3561{
3562 tree type;
3563
3564 if (!decl)
3565 {
3566 /* A cleanup with no variable. */
3567 if (!cleanup)
3568 abort ();
3569
3570 return;
3571 }
3572
3573 /* Only auto variables need any work. */
3574 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3575 return;
3576
3577 type = TREE_TYPE (decl);
3578
3579 if (type == error_mark_node)
3580 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3581
3582 else if (DECL_SIZE (decl) == 0)
3583
3584 /* Variable with incomplete type. The stack offset herein will be
3585 fixed later in expand_decl_init (). */
3586 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3587
3588 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3589 {
3590 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3591 DECL_ALIGN (decl));
3592 }
3593 else
3594 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3595}
28d81abb
RK
3596\f
3597/* Emit code to perform the initialization of a declaration DECL. */
3598
3599void
3600expand_decl_init (decl)
3601 tree decl;
3602{
b4ac57ab
RS
3603 int was_used = TREE_USED (decl);
3604
704f4dca
RK
3605 if (output_bytecode)
3606 {
3607 bc_expand_decl_init (decl);
3608 return;
3609 }
3610
3564e40e
RK
3611 /* If this is a CONST_DECL, we don't have to generate any code, but
3612 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3613 to be set while in the obstack containing the constant. If we don't
3614 do this, we can lose if we have functions nested three deep and the middle
3615 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3616 the innermost function is the first to expand that STRING_CST. */
3617 if (TREE_CODE (decl) == CONST_DECL)
3618 {
3619 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3620 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3621 EXPAND_INITIALIZER);
3622 return;
3623 }
3624
28d81abb
RK
3625 if (TREE_STATIC (decl))
3626 return;
3627
3628 /* Compute and store the initial value now. */
3629
3630 if (DECL_INITIAL (decl) == error_mark_node)
3631 {
3632 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3633 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3634 || code == POINTER_TYPE)
3635 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3636 0, 0);
3637 emit_queue ();
3638 }
3639 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3640 {
3641 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3642 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3643 emit_queue ();
3644 }
3645
b4ac57ab
RS
3646 /* Don't let the initialization count as "using" the variable. */
3647 TREE_USED (decl) = was_used;
3648
28d81abb 3649 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3650 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3651 free_temp_slots ();
3652}
3653
ca695ac9
JB
3654/* Expand initialization for variable-sized types. Allocate array
3655 using newlocalSI and set local variable, which is a pointer to the
0f41302f 3656 storage. */
ca695ac9 3657
704f4dca 3658static void
ca695ac9
JB
3659bc_expand_variable_local_init (decl)
3660 tree decl;
3661{
3662 /* Evaluate size expression and coerce to SI */
3663 bc_expand_expr (DECL_SIZE (decl));
3664
3665 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3666 no coercion is necessary (?) */
3667
3668/* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3669 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3670
3671 /* Emit code to allocate array */
3672 bc_emit_instruction (newlocalSI);
3673
3674 /* Store array pointer in local variable. This is the only instance
3675 where we actually want the address of the pointer to the
3676 variable-size block, rather than the pointer itself. We avoid
3677 using expand_address() since that would cause the pointer to be
3678 pushed rather than its address. Hence the hard-coded reference;
3679 notice also that the variable is always local (no global
0f41302f 3680 variable-size type variables). */
ca695ac9
JB
3681
3682 bc_load_localaddr (DECL_RTL (decl));
3683 bc_emit_instruction (storeP);
3684}
3685
3686
3687/* Emit code to initialize a declaration. */
704f4dca
RK
3688
3689static void
ca695ac9
JB
3690bc_expand_decl_init (decl)
3691 tree decl;
3692{
3693 int org_stack_depth;
3694
3695 /* Statical initializers are handled elsewhere */
3696
3697 if (TREE_STATIC (decl))
3698 return;
3699
3700 /* Memory original stack depth */
3701 org_stack_depth = stack_depth;
3702
3703 /* If the type is variable-size, we first create its space (we ASSUME
3704 it CAN'T be static). We do this regardless of whether there's an
0f41302f 3705 initializer assignment or not. */
ca695ac9
JB
3706
3707 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3708 bc_expand_variable_local_init (decl);
3709
3710 /* Expand initializer assignment */
3711 if (DECL_INITIAL (decl) == error_mark_node)
3712 {
3713 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3714
3715 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3716 || code == POINTER_TYPE)
3717
e81d77b5 3718 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3719 }
3720 else if (DECL_INITIAL (decl))
e81d77b5 3721 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3722
3723 /* Restore stack depth */
3724 if (org_stack_depth > stack_depth)
3725 abort ();
3726
3727 bc_adjust_stack (stack_depth - org_stack_depth);
3728}
3729
3730
28d81abb
RK
3731/* CLEANUP is an expression to be executed at exit from this binding contour;
3732 for example, in C++, it might call the destructor for this variable.
3733
4847c938
MS
3734 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3735 CLEANUP multiple times, and have the correct semantics. This
3736 happens in exception handling, and for non-local gotos.
28d81abb
RK
3737
3738 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3739 that is not associated with any particular variable. */
3740
3741int
3742expand_decl_cleanup (decl, cleanup)
3743 tree decl, cleanup;
3744{
3745 struct nesting *thisblock = block_stack;
3746
3747 /* Error if we are not in any block. */
3748 if (thisblock == 0)
3749 return 0;
3750
3751 /* Record the cleanup if there is one. */
3752
3753 if (cleanup != 0)
3754 {
4847c938
MS
3755 cleanup = unsave_expr (cleanup);
3756
28d81abb
RK
3757 thisblock->data.block.cleanups
3758 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3759 /* If this block has a cleanup, it belongs in stack_block_stack. */
3760 stack_block_stack = thisblock;
6adb4e3a 3761 expand_eh_region_start ();
28d81abb
RK
3762 }
3763 return 1;
3764}
3765\f
3766/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3767 DECL_ELTS is the list of elements that belong to DECL's type.
3768 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3769
3770void
3771expand_anon_union_decl (decl, cleanup, decl_elts)
3772 tree decl, cleanup, decl_elts;
3773{
3774 struct nesting *thisblock = block_stack;
3775 rtx x;
3776
ec5cd386
RK
3777 expand_decl (decl);
3778 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
3779 x = DECL_RTL (decl);
3780
3781 while (decl_elts)
3782 {
3783 tree decl_elt = TREE_VALUE (decl_elts);
3784 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3785 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3786
7b9032dd
JM
3787 /* Propagate the union's alignment to the elements. */
3788 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3789
3790 /* If the element has BLKmode and the union doesn't, the union is
3791 aligned such that the element doesn't need to have BLKmode, so
3792 change the element's mode to the appropriate one for its size. */
3793 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3794 DECL_MODE (decl_elt) = mode
3795 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3796 MODE_INT, 1);
3797
28d81abb
RK
3798 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3799 instead create a new MEM rtx with the proper mode. */
3800 if (GET_CODE (x) == MEM)
3801 {
3802 if (mode == GET_MODE (x))
3803 DECL_RTL (decl_elt) = x;
3804 else
3805 {
3806 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3807 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3808 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3809 }
3810 }
3811 else if (GET_CODE (x) == REG)
3812 {
3813 if (mode == GET_MODE (x))
3814 DECL_RTL (decl_elt) = x;
3815 else
3816 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3817 }
3818 else
3819 abort ();
3820
3821 /* Record the cleanup if there is one. */
3822
3823 if (cleanup != 0)
3824 thisblock->data.block.cleanups
3825 = temp_tree_cons (decl_elt, cleanup_elt,
3826 thisblock->data.block.cleanups);
3827
3828 decl_elts = TREE_CHAIN (decl_elts);
3829 }
3830}
3831\f
3832/* Expand a list of cleanups LIST.
3833 Elements may be expressions or may be nested lists.
3834
3835 If DONT_DO is nonnull, then any list-element
3836 whose TREE_PURPOSE matches DONT_DO is omitted.
3837 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
3838 a value that is being returned out of the scope.
3839
3840 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
3841 goto and handle protection regions specially in that case.
3842
3843 If REACHABLE, we emit code, otherwise just inform the exception handling
3844 code about this finalization. */
28d81abb
RK
3845
3846static void
50d1b7a1 3847expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
3848 tree list;
3849 tree dont_do;
4e44807b 3850 int in_fixup;
50d1b7a1 3851 int reachable;
28d81abb
RK
3852{
3853 tree tail;
3854 for (tail = list; tail; tail = TREE_CHAIN (tail))
3855 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3856 {
3857 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 3858 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
3859 else
3860 {
4e44807b 3861 if (! in_fixup)
6adb4e3a 3862 expand_eh_region_end (TREE_VALUE (tail));
61d6b1cc 3863
50d1b7a1
MS
3864 if (reachable)
3865 {
3866 /* Cleanups may be run multiple times. For example,
3867 when exiting a binding contour, we expand the
3868 cleanups associated with that contour. When a goto
3869 within that binding contour has a target outside that
3870 contour, it will expand all cleanups from its scope to
3871 the target. Though the cleanups are expanded multiple
3872 times, the control paths are non-overlapping so the
3873 cleanups will not be executed twice. */
3874 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3875 free_temp_slots ();
3876 }
28d81abb
RK
3877 }
3878 }
3879}
3880
28d81abb
RK
3881/* Move all cleanups from the current block_stack
3882 to the containing block_stack, where they are assumed to
3883 have been created. If anything can cause a temporary to
3884 be created, but not expanded for more than one level of
3885 block_stacks, then this code will have to change. */
3886
3887void
3888move_cleanups_up ()
3889{
3890 struct nesting *block = block_stack;
3891 struct nesting *outer = block->next;
3892
3893 outer->data.block.cleanups
3894 = chainon (block->data.block.cleanups,
3895 outer->data.block.cleanups);
3896 block->data.block.cleanups = 0;
3897}
3898
3899tree
3900last_cleanup_this_contour ()
3901{
3902 if (block_stack == 0)
3903 return 0;
3904
3905 return block_stack->data.block.cleanups;
3906}
3907
3908/* Return 1 if there are any pending cleanups at this point.
3909 If THIS_CONTOUR is nonzero, check the current contour as well.
3910 Otherwise, look only at the contours that enclose this one. */
3911
3912int
3913any_pending_cleanups (this_contour)
3914 int this_contour;
3915{
3916 struct nesting *block;
3917
3918 if (block_stack == 0)
3919 return 0;
3920
3921 if (this_contour && block_stack->data.block.cleanups != NULL)
3922 return 1;
3923 if (block_stack->data.block.cleanups == 0
3924 && (block_stack->data.block.outer_cleanups == 0
3925#if 0
3926 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3927#endif
3928 ))
3929 return 0;
3930
3931 for (block = block_stack->next; block; block = block->next)
3932 if (block->data.block.cleanups != 0)
3933 return 1;
3934
3935 return 0;
3936}
3937\f
3938/* Enter a case (Pascal) or switch (C) statement.
3939 Push a block onto case_stack and nesting_stack
3940 to accumulate the case-labels that are seen
3941 and to record the labels generated for the statement.
3942
3943 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3944 Otherwise, this construct is transparent for `exit_something'.
3945
3946 EXPR is the index-expression to be dispatched on.
3947 TYPE is its nominal type. We could simply convert EXPR to this type,
3948 but instead we take short cuts. */
3949
3950void
3951expand_start_case (exit_flag, expr, type, printname)
3952 int exit_flag;
3953 tree expr;
3954 tree type;
3955 char *printname;
3956{
3957 register struct nesting *thiscase = ALLOC_NESTING ();
3958
3959 /* Make an entry on case_stack for the case we are entering. */
3960
3961 thiscase->next = case_stack;
3962 thiscase->all = nesting_stack;
3963 thiscase->depth = ++nesting_depth;
3964 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3965 thiscase->data.case_stmt.case_list = 0;
3966 thiscase->data.case_stmt.index_expr = expr;
3967 thiscase->data.case_stmt.nominal_type = type;
3968 thiscase->data.case_stmt.default_label = 0;
3969 thiscase->data.case_stmt.num_ranges = 0;
3970 thiscase->data.case_stmt.printname = printname;
3971 thiscase->data.case_stmt.seenlabel = 0;
3972 case_stack = thiscase;
3973 nesting_stack = thiscase;
3974
ca695ac9
JB
3975 if (output_bytecode)
3976 {
3977 bc_expand_start_case (thiscase, expr, type, printname);
3978 return;
3979 }
3980
28d81abb
RK
3981 do_pending_stack_adjust ();
3982
3983 /* Make sure case_stmt.start points to something that won't
3984 need any transformation before expand_end_case. */
3985 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3986 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3987
3988 thiscase->data.case_stmt.start = get_last_insn ();
3989}
3990
ca695ac9
JB
3991
3992/* Enter a case statement. It is assumed that the caller has pushed
0f41302f 3993 the current context onto the case stack. */
704f4dca
RK
3994
3995static void
ca695ac9
JB
3996bc_expand_start_case (thiscase, expr, type, printname)
3997 struct nesting *thiscase;
3998 tree expr;
3999 tree type;
4000 char *printname;
4001{
4002 bc_expand_expr (expr);
4003 bc_expand_conversion (TREE_TYPE (expr), type);
4004
4005 /* For cases, the skip is a place we jump to that's emitted after
4006 the size of the jump table is known. */
4007
4008 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4009 bc_emit_bytecode (jump);
c53e9440 4010 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
4011
4012#ifdef DEBUG_PRINT_CODE
4013 fputc ('\n', stderr);
4014#endif
4015}
4016
4017
28d81abb
RK
4018/* Start a "dummy case statement" within which case labels are invalid
4019 and are not connected to any larger real case statement.
4020 This can be used if you don't want to let a case statement jump
4021 into the middle of certain kinds of constructs. */
4022
4023void
4024expand_start_case_dummy ()
4025{
4026 register struct nesting *thiscase = ALLOC_NESTING ();
4027
4028 /* Make an entry on case_stack for the dummy. */
4029
4030 thiscase->next = case_stack;
4031 thiscase->all = nesting_stack;
4032 thiscase->depth = ++nesting_depth;
4033 thiscase->exit_label = 0;
4034 thiscase->data.case_stmt.case_list = 0;
4035 thiscase->data.case_stmt.start = 0;
4036 thiscase->data.case_stmt.nominal_type = 0;
4037 thiscase->data.case_stmt.default_label = 0;
4038 thiscase->data.case_stmt.num_ranges = 0;
4039 case_stack = thiscase;
4040 nesting_stack = thiscase;
4041}
4042
4043/* End a dummy case statement. */
4044
4045void
4046expand_end_case_dummy ()
4047{
4048 POPSTACK (case_stack);
4049}
4050
4051/* Return the data type of the index-expression
4052 of the innermost case statement, or null if none. */
4053
4054tree
4055case_index_expr_type ()
4056{
4057 if (case_stack)
4058 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4059 return 0;
4060}
4061\f
4062/* Accumulate one case or default label inside a case or switch statement.
4063 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4064 The function CONVERTER, when applied to arguments T and V,
4065 converts the value V to the type T.
28d81abb
RK
4066
4067 If not currently inside a case or switch statement, return 1 and do
4068 nothing. The caller will print a language-specific error message.
4069 If VALUE is a duplicate or overlaps, return 2 and do nothing
4070 except store the (first) duplicate node in *DUPLICATE.
4071 If VALUE is out of range, return 3 and do nothing.
4072 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4073 Return 0 on success.
4074
4075 Extended to handle range statements. */
4076
4077int
f52fba84 4078pushcase (value, converter, label, duplicate)
28d81abb 4079 register tree value;
f52fba84 4080 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4081 register tree label;
4082 tree *duplicate;
4083{
4084 register struct case_node **l;
4085 register struct case_node *n;
4086 tree index_type;
4087 tree nominal_type;
4088
ca695ac9
JB
4089 if (output_bytecode)
4090 return bc_pushcase (value, label);
4091
28d81abb
RK
4092 /* Fail if not inside a real case statement. */
4093 if (! (case_stack && case_stack->data.case_stmt.start))
4094 return 1;
4095
4096 if (stack_block_stack
4097 && stack_block_stack->depth > case_stack->depth)
4098 return 5;
4099
4100 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4101 nominal_type = case_stack->data.case_stmt.nominal_type;
4102
4103 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4104 if (index_type == error_mark_node)
4105 return 0;
4106
4107 /* Convert VALUE to the type in which the comparisons are nominally done. */
4108 if (value != 0)
f52fba84 4109 value = (*converter) (nominal_type, value);
28d81abb
RK
4110
4111 /* If this is the first label, warn if any insns have been emitted. */
4112 if (case_stack->data.case_stmt.seenlabel == 0)
4113 {
4114 rtx insn;
4115 for (insn = case_stack->data.case_stmt.start;
4116 insn;
4117 insn = NEXT_INSN (insn))
4118 {
4119 if (GET_CODE (insn) == CODE_LABEL)
4120 break;
4121 if (GET_CODE (insn) != NOTE
4122 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4123 {
4124 warning ("unreachable code at beginning of %s",
4125 case_stack->data.case_stmt.printname);
4126 break;
4127 }
4128 }
4129 }
4130 case_stack->data.case_stmt.seenlabel = 1;
4131
4132 /* Fail if this value is out of range for the actual type of the index
4133 (which may be narrower than NOMINAL_TYPE). */
4134 if (value != 0 && ! int_fits_type_p (value, index_type))
4135 return 3;
4136
4137 /* Fail if this is a duplicate or overlaps another entry. */
4138 if (value == 0)
4139 {
4140 if (case_stack->data.case_stmt.default_label != 0)
4141 {
4142 *duplicate = case_stack->data.case_stmt.default_label;
4143 return 2;
4144 }
4145 case_stack->data.case_stmt.default_label = label;
4146 }
4147 else
57641239 4148 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4149
4150 expand_label (label);
4151 return 0;
4152}
4153
4154/* Like pushcase but this case applies to all values
4155 between VALUE1 and VALUE2 (inclusive).
4156 The return value is the same as that of pushcase
4157 but there is one additional error code:
4158 4 means the specified range was empty. */
4159
4160int
f52fba84 4161pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4162 register tree value1, value2;
f52fba84 4163 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4164 register tree label;
4165 tree *duplicate;
4166{
4167 register struct case_node **l;
4168 register struct case_node *n;
4169 tree index_type;
4170 tree nominal_type;
4171
4172 /* Fail if not inside a real case statement. */
4173 if (! (case_stack && case_stack->data.case_stmt.start))
4174 return 1;
4175
4176 if (stack_block_stack
4177 && stack_block_stack->depth > case_stack->depth)
4178 return 5;
4179
4180 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4181 nominal_type = case_stack->data.case_stmt.nominal_type;
4182
4183 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4184 if (index_type == error_mark_node)
4185 return 0;
4186
4187 /* If this is the first label, warn if any insns have been emitted. */
4188 if (case_stack->data.case_stmt.seenlabel == 0)
4189 {
4190 rtx insn;
4191 for (insn = case_stack->data.case_stmt.start;
4192 insn;
4193 insn = NEXT_INSN (insn))
4194 {
4195 if (GET_CODE (insn) == CODE_LABEL)
4196 break;
4197 if (GET_CODE (insn) != NOTE
4198 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4199 {
4200 warning ("unreachable code at beginning of %s",
4201 case_stack->data.case_stmt.printname);
4202 break;
4203 }
4204 }
4205 }
4206 case_stack->data.case_stmt.seenlabel = 1;
4207
4208 /* Convert VALUEs to type in which the comparisons are nominally done. */
0f41302f 4209 if (value1 == 0) /* Negative infinity. */
28d81abb 4210 value1 = TYPE_MIN_VALUE(index_type);
f52fba84 4211 value1 = (*converter) (nominal_type, value1);
28d81abb 4212
0f41302f 4213 if (value2 == 0) /* Positive infinity. */
28d81abb 4214 value2 = TYPE_MAX_VALUE(index_type);
f52fba84 4215 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4216
4217 /* Fail if these values are out of range. */
4218 if (! int_fits_type_p (value1, index_type))
4219 return 3;
4220
4221 if (! int_fits_type_p (value2, index_type))
4222 return 3;
4223
4224 /* Fail if the range is empty. */
4225 if (tree_int_cst_lt (value2, value1))
4226 return 4;
4227
57641239
RK
4228 return add_case_node (value1, value2, label, duplicate);
4229}
4230
4231/* Do the actual insertion of a case label for pushcase and pushcase_range
4232 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4233 slowdown for large switch statements. */
4234
4235static int
4236add_case_node (low, high, label, duplicate)
4237 tree low, high;
4238 tree label;
4239 tree *duplicate;
4240{
4241 struct case_node *p, **q, *r;
4242
4243 q = &case_stack->data.case_stmt.case_list;
4244 p = *q;
4245
4246 while (r = *q)
28d81abb 4247 {
57641239
RK
4248 p = r;
4249
4250 /* Keep going past elements distinctly greater than HIGH. */
4251 if (tree_int_cst_lt (high, p->low))
4252 q = &p->left;
4253
4254 /* or distinctly less than LOW. */
4255 else if (tree_int_cst_lt (p->high, low))
4256 q = &p->right;
4257
4258 else
28d81abb 4259 {
57641239
RK
4260 /* We have an overlap; this is an error. */
4261 *duplicate = p->code_label;
28d81abb
RK
4262 return 2;
4263 }
4264 }
4265
4266 /* Add this label to the chain, and succeed.
57641239 4267 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4268 obstack and will thus survive till the end of the case statement. */
4269
57641239
RK
4270 r = (struct case_node *) oballoc (sizeof (struct case_node));
4271 r->low = copy_node (low);
28d81abb 4272
57641239
RK
4273 /* If the bounds are equal, turn this into the one-value case. */
4274
4275 if (tree_int_cst_equal (low, high))
4276 r->high = r->low;
4277 else
4278 {
4279 r->high = copy_node (high);
4280 case_stack->data.case_stmt.num_ranges++;
4281 }
4282
4283 r->code_label = label;
28d81abb
RK
4284 expand_label (label);
4285
57641239
RK
4286 *q = r;
4287 r->parent = p;
4288 r->left = 0;
4289 r->right = 0;
4290 r->balance = 0;
4291
4292 while (p)
4293 {
4294 struct case_node *s;
4295
4296 if (r == p->left)
4297 {
4298 int b;
4299
4300 if (! (b = p->balance))
4301 /* Growth propagation from left side. */
4302 p->balance = -1;
4303 else if (b < 0)
4304 {
4305 if (r->balance < 0)
4306 {
4307 /* R-Rotation */
4308 if (p->left = s = r->right)
4309 s->parent = p;
4310
4311 r->right = p;
4312 p->balance = 0;
4313 r->balance = 0;
4314 s = p->parent;
4315 p->parent = r;
4316
4317 if (r->parent = s)
4318 {
4319 if (s->left == p)
4320 s->left = r;
4321 else
4322 s->right = r;
4323 }
4324 else
4325 case_stack->data.case_stmt.case_list = r;
4326 }
4327 else
4328 /* r->balance == +1 */
4329 {
5720c7e7
RK
4330 /* LR-Rotation */
4331
57641239
RK
4332 int b2;
4333 struct case_node *t = r->right;
4334
4335 if (p->left = s = t->right)
4336 s->parent = p;
4337
4338 t->right = p;
4339 if (r->right = s = t->left)
4340 s->parent = r;
4341
4342 t->left = r;
4343 b = t->balance;
4344 b2 = b < 0;
4345 p->balance = b2;
4346 b2 = -b2 - b;
4347 r->balance = b2;
4348 t->balance = 0;
4349 s = p->parent;
4350 p->parent = t;
4351 r->parent = t;
4352
4353 if (t->parent = s)
4354 {
4355 if (s->left == p)
4356 s->left = t;
4357 else
4358 s->right = t;
4359 }
4360 else
4361 case_stack->data.case_stmt.case_list = t;
4362 }
4363 break;
4364 }
4365
4366 else
4367 {
4368 /* p->balance == +1; growth of left side balances the node. */
4369 p->balance = 0;
4370 break;
4371 }
4372 }
4373 else
4374 /* r == p->right */
4375 {
4376 int b;
4377
4378 if (! (b = p->balance))
4379 /* Growth propagation from right side. */
4380 p->balance++;
4381 else if (b > 0)
4382 {
4383 if (r->balance > 0)
4384 {
4385 /* L-Rotation */
4386
4387 if (p->right = s = r->left)
4388 s->parent = p;
4389
4390 r->left = p;
4391 p->balance = 0;
4392 r->balance = 0;
4393 s = p->parent;
4394 p->parent = r;
4395 if (r->parent = s)
4396 {
4397 if (s->left == p)
4398 s->left = r;
4399 else
4400 s->right = r;
4401 }
4402
4403 else
4404 case_stack->data.case_stmt.case_list = r;
4405 }
4406
4407 else
4408 /* r->balance == -1 */
4409 {
4410 /* RL-Rotation */
4411 int b2;
4412 struct case_node *t = r->left;
4413
4414 if (p->right = s = t->left)
4415 s->parent = p;
4416
4417 t->left = p;
4418
4419 if (r->left = s = t->right)
4420 s->parent = r;
4421
4422 t->right = r;
4423 b = t->balance;
4424 b2 = b < 0;
4425 r->balance = b2;
4426 b2 = -b2 - b;
4427 p->balance = b2;
4428 t->balance = 0;
4429 s = p->parent;
4430 p->parent = t;
4431 r->parent = t;
4432
4433 if (t->parent = s)
4434 {
4435 if (s->left == p)
4436 s->left = t;
4437 else
4438 s->right = t;
4439 }
4440
4441 else
4442 case_stack->data.case_stmt.case_list = t;
4443 }
4444 break;
4445 }
4446 else
4447 {
4448 /* p->balance == -1; growth of right side balances the node. */
4449 p->balance = 0;
4450 break;
4451 }
4452 }
4453
4454 r = p;
4455 p = p->parent;
4456 }
28d81abb
RK
4457
4458 return 0;
4459}
ca695ac9 4460
ca695ac9
JB
4461/* Accumulate one case or default label; VALUE is the value of the
4462 case, or nil for a default label. If not currently inside a case,
4463 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4464 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4465 Return 0 on success. This function is a leftover from the earlier
4466 bytecode compiler, which was based on gcc 1.37. It should be
0f41302f 4467 merged into pushcase. */
ca695ac9 4468
704f4dca 4469static int
ca695ac9
JB
4470bc_pushcase (value, label)
4471 tree value;
4472 tree label;
4473{
4474 struct nesting *thiscase = case_stack;
4475 struct case_node *case_label, *new_label;
4476
4477 if (! thiscase)
4478 return 1;
4479
4480 /* Fail if duplicate, overlap, or out of type range. */
4481 if (value)
4482 {
4483 value = convert (thiscase->data.case_stmt.nominal_type, value);
4484 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4485 return 3;
4486
4487 for (case_label = thiscase->data.case_stmt.case_list;
4488 case_label->left; case_label = case_label->left)
4489 if (! tree_int_cst_lt (case_label->left->high, value))
4490 break;
4491
4492 if (case_label != thiscase->data.case_stmt.case_list
4493 && ! tree_int_cst_lt (case_label->high, value)
abf7b40a 4494 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
ca695ac9
JB
4495 return 2;
4496
4497 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4498 new_label->low = new_label->high = copy_node (value);
4499 new_label->code_label = label;
4500 new_label->left = case_label->left;
4501
4502 case_label->left = new_label;
4503 thiscase->data.case_stmt.num_ranges++;
4504 }
4505 else
4506 {
4507 if (thiscase->data.case_stmt.default_label)
4508 return 2;
4509 thiscase->data.case_stmt.default_label = label;
4510 }
4511
4512 expand_label (label);
4513 return 0;
4514}
28d81abb 4515\f
94d6511c
PB
4516/* Returns the number of possible values of TYPE.
4517 Returns -1 if the number is unknown or variable.
4518 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4519 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4520 do not increase monotonically (there may be duplicates);
4521 to 1 if the values increase monotonically, but not always by 1;
4522 otherwise sets it to 0. */
4523
4524HOST_WIDE_INT
4525all_cases_count (type, spareness)
4526 tree type;
4527 int *spareness;
4528{
4529 HOST_WIDE_INT count, count_high = 0;
4530 *spareness = 0;
4531
4532 switch (TREE_CODE (type))
4533 {
4534 tree t;
4535 case BOOLEAN_TYPE:
4536 count = 2;
4537 break;
4538 case CHAR_TYPE:
4539 count = 1 << BITS_PER_UNIT;
4540 break;
4541 default:
4542 case INTEGER_TYPE:
4543 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
c02aebe2 4544 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4545 return -1;
4546 else
4547 {
4548 /* count
4549 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4550 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4551 but with overflow checking. */
94d6511c
PB
4552 tree mint = TYPE_MIN_VALUE (type);
4553 tree maxt = TYPE_MAX_VALUE (type);
4554 HOST_WIDE_INT lo, hi;
4555 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4556 &lo, &hi);
4557 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4558 lo, hi, &lo, &hi);
4559 add_double (lo, hi, 1, 0, &lo, &hi);
4560 if (hi != 0 || lo < 0)
4561 return -2;
4562 count = lo;
4563 }
4564 break;
4565 case ENUMERAL_TYPE:
4566 count = 0;
4567 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4568 {
4569 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4570 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4571 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4572 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4573 *spareness = 1;
4574 count++;
4575 }
4576 if (*spareness == 1)
4577 {
4578 tree prev = TREE_VALUE (TYPE_VALUES (type));
4579 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4580 {
4581 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4582 {
4583 *spareness = 2;
4584 break;
4585 }
4586 prev = TREE_VALUE (t);
4587 }
4588
4589 }
4590 }
4591 return count;
4592}
4593
4594
4595#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4596 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4597 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4598#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4599 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4600 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4601
4602/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4603 with the case values we have seen, assuming the case expression
4604 has the given TYPE.
4605 SPARSENESS is as determined by all_cases_count.
4606
9faa82d8 4607 The time needed is proportional to COUNT, unless
94d6511c
PB
4608 SPARSENESS is 2, in which case quadratic time is needed. */
4609
4610void
4611mark_seen_cases (type, cases_seen, count, sparseness)
4612 tree type;
4613 unsigned char *cases_seen;
4614 long count;
4615 int sparseness;
4616{
4617 long i;
4618
4619 tree next_node_to_try = NULL_TREE;
4620 long next_node_offset = 0;
4621
5720c7e7 4622 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
4623 tree val = make_node (INTEGER_CST);
4624 TREE_TYPE (val) = type;
5720c7e7
RK
4625 if (! root)
4626 ; /* Do nothing */
4627 else if (sparseness == 2)
94d6511c 4628 {
5720c7e7
RK
4629 tree t;
4630 HOST_WIDE_INT xlo;
4631
4632 /* This less efficient loop is only needed to handle
4633 duplicate case values (multiple enum constants
4634 with the same value). */
4635 TREE_TYPE (val) = TREE_TYPE (root->low);
4636 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4637 t = TREE_CHAIN (t), xlo++)
94d6511c 4638 {
5720c7e7
RK
4639 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4640 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4641 n = root;
4642 do
94d6511c 4643 {
5720c7e7
RK
4644 /* Keep going past elements distinctly greater than VAL. */
4645 if (tree_int_cst_lt (val, n->low))
4646 n = n->left;
4647
4648 /* or distinctly less than VAL. */
4649 else if (tree_int_cst_lt (n->high, val))
4650 n = n->right;
4651
4652 else
94d6511c 4653 {
5720c7e7
RK
4654 /* We have found a matching range. */
4655 BITARRAY_SET (cases_seen, xlo);
4656 break;
94d6511c
PB
4657 }
4658 }
5720c7e7
RK
4659 while (n);
4660 }
4661 }
4662 else
4663 {
4664 if (root->left)
4665 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4666 for (n = root; n; n = n->right)
4667 {
4668 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4669 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4670 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 4671 {
5720c7e7
RK
4672 /* Calculate (into xlo) the "offset" of the integer (val).
4673 The element with lowest value has offset 0, the next smallest
4674 element has offset 1, etc. */
4675
4676 HOST_WIDE_INT xlo, xhi;
4677 tree t;
94d6511c
PB
4678 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4679 {
4680 /* The TYPE_VALUES will be in increasing order, so
4681 starting searching where we last ended. */
4682 t = next_node_to_try;
4683 xlo = next_node_offset;
4684 xhi = 0;
4685 for (;;)
4686 {
4687 if (t == NULL_TREE)
4688 {
4689 t = TYPE_VALUES (type);
4690 xlo = 0;
4691 }
4692 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4693 {
4694 next_node_to_try = TREE_CHAIN (t);
4695 next_node_offset = xlo + 1;
4696 break;
4697 }
4698 xlo++;
4699 t = TREE_CHAIN (t);
4700 if (t == next_node_to_try)
5720c7e7
RK
4701 {
4702 xlo = -1;
4703 break;
4704 }
94d6511c
PB
4705 }
4706 }
4707 else
4708 {
4709 t = TYPE_MIN_VALUE (type);
4710 if (t)
4711 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4712 &xlo, &xhi);
4713 else
4714 xlo = xhi = 0;
4715 add_double (xlo, xhi,
4716 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4717 &xlo, &xhi);
4718 }
4719
9dd53f1e 4720 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 4721 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
4722 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4723 1, 0,
4724 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 4725 }
94d6511c
PB
4726 }
4727 }
4728}
4729
28d81abb
RK
4730/* Called when the index of a switch statement is an enumerated type
4731 and there is no default label.
4732
4733 Checks that all enumeration literals are covered by the case
4734 expressions of a switch. Also, warn if there are any extra
4735 switch cases that are *not* elements of the enumerated type.
4736
4737 If all enumeration literals were covered by the case expressions,
4738 turn one of the expressions into the default expression since it should
4739 not be possible to fall through such a switch. */
4740
4741void
4742check_for_full_enumeration_handling (type)
4743 tree type;
4744{
4745 register struct case_node *n;
4746 register struct case_node **l;
4747 register tree chain;
4748 int all_values = 1;
4749
0f41302f 4750 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
4751 int sparseness = 0;
4752
0f41302f 4753 /* The number of possible selector values. */
94d6511c
PB
4754 HOST_WIDE_INT size;
4755
4756 /* For each possible selector value. a one iff it has been matched
0f41302f 4757 by a case value alternative. */
94d6511c
PB
4758 unsigned char *cases_seen;
4759
0f41302f 4760 /* The allocated size of cases_seen, in chars. */
94d6511c
PB
4761 long bytes_needed;
4762 tree t;
4763
ca695ac9
JB
4764 if (output_bytecode)
4765 {
4766 bc_check_for_full_enumeration_handling (type);
4767 return;
4768 }
4769
94d6511c
PB
4770 if (! warn_switch)
4771 return;
4772
4773 size = all_cases_count (type, &sparseness);
4774 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 4775
94d6511c 4776 if (size > 0 && size < 600000
0f41302f 4777 /* We deliberately use malloc here - not xmalloc. */
ad03007a 4778 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
28d81abb 4779 {
94d6511c
PB
4780 long i;
4781 tree v = TYPE_VALUES (type);
4782 bzero (cases_seen, bytes_needed);
28d81abb 4783
94d6511c
PB
4784 /* The time complexity of this code is normally O(N), where
4785 N being the number of members in the enumerated type.
4786 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 4787 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
4788
4789 mark_seen_cases (type, cases_seen, size, sparseness);
4790
4791 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 4792 {
94d6511c 4793 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 4794 warning ("enumeration value `%s' not handled in switch",
94d6511c 4795 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 4796 }
94d6511c
PB
4797
4798 free (cases_seen);
28d81abb
RK
4799 }
4800
4801 /* Now we go the other way around; we warn if there are case
ac2a9454 4802 expressions that don't correspond to enumerators. This can
28d81abb 4803 occur since C and C++ don't enforce type-checking of
0f41302f 4804 assignments to enumeration variables. */
28d81abb 4805
5720c7e7
RK
4806 if (case_stack->data.case_stmt.case_list
4807 && case_stack->data.case_stmt.case_list->left)
4808 case_stack->data.case_stmt.case_list
4809 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
4810 if (warn_switch)
4811 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4812 {
4813 for (chain = TYPE_VALUES (type);
4814 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4815 chain = TREE_CHAIN (chain))
4816 ;
4817
4818 if (!chain)
3b24f55b
RS
4819 {
4820 if (TYPE_NAME (type) == 0)
4821 warning ("case value `%d' not in enumerated type",
4822 TREE_INT_CST_LOW (n->low));
4823 else
4824 warning ("case value `%d' not in enumerated type `%s'",
4825 TREE_INT_CST_LOW (n->low),
4826 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4827 == IDENTIFIER_NODE)
4828 ? TYPE_NAME (type)
4829 : DECL_NAME (TYPE_NAME (type))));
4830 }
1ddde1cd
RS
4831 if (!tree_int_cst_equal (n->low, n->high))
4832 {
4833 for (chain = TYPE_VALUES (type);
4834 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4835 chain = TREE_CHAIN (chain))
4836 ;
4837
4838 if (!chain)
3b24f55b
RS
4839 {
4840 if (TYPE_NAME (type) == 0)
4841 warning ("case value `%d' not in enumerated type",
4842 TREE_INT_CST_LOW (n->high));
4843 else
4844 warning ("case value `%d' not in enumerated type `%s'",
4845 TREE_INT_CST_LOW (n->high),
4846 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4847 == IDENTIFIER_NODE)
4848 ? TYPE_NAME (type)
4849 : DECL_NAME (TYPE_NAME (type))));
4850 }
1ddde1cd 4851 }
28d81abb
RK
4852 }
4853
ae8cb346
RS
4854#if 0
4855 /* ??? This optimization is disabled because it causes valid programs to
4856 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 4857 will have a value that is the same as one of the enumeration literals. */
ae8cb346 4858
28d81abb
RK
4859 /* If all values were found as case labels, make one of them the default
4860 label. Thus, this switch will never fall through. We arbitrarily pick
4861 the last one to make the default since this is likely the most
4862 efficient choice. */
4863
4864 if (all_values)
4865 {
4866 for (l = &case_stack->data.case_stmt.case_list;
4867 (*l)->right != 0;
4868 l = &(*l)->right)
4869 ;
4870
4871 case_stack->data.case_stmt.default_label = (*l)->code_label;
4872 *l = 0;
4873 }
ae8cb346 4874#endif /* 0 */
28d81abb 4875}
ca695ac9
JB
4876
4877
4878/* Check that all enumeration literals are covered by the case
4879 expressions of a switch. Also warn if there are any cases
4880 that are not elements of the enumerated type. */
704f4dca
RK
4881
4882static void
ca695ac9
JB
4883bc_check_for_full_enumeration_handling (type)
4884 tree type;
4885{
4886 struct nesting *thiscase = case_stack;
4887 struct case_node *c;
4888 tree e;
4889
4890 /* Check for enums not handled. */
4891 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4892 {
4893 for (c = thiscase->data.case_stmt.case_list->left;
4894 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4895 c = c->left)
4896 ;
4897 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4898 warning ("enumerated value `%s' not handled in switch",
4899 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4900 }
4901
4902 /* Check for cases not in the enumeration. */
4903 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4904 {
4905 for (e = TYPE_VALUES (type);
4906 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4907 e = TREE_CHAIN (e))
4908 ;
4909 if (! e)
4910 warning ("case value `%d' not in enumerated type `%s'",
4911 TREE_INT_CST_LOW (c->low),
4912 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4913 ? TYPE_NAME (type)
4914 : DECL_NAME (TYPE_NAME (type))));
4915 }
4916}
28d81abb
RK
4917\f
4918/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4919 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4920 Generate the code to test it and jump to the right place. */
4921
4922void
4923expand_end_case (orig_index)
4924 tree orig_index;
4925{
3474db0e 4926 tree minval, maxval, range, orig_minval;
28d81abb
RK
4927 rtx default_label = 0;
4928 register struct case_node *n;
4929 int count;
4930 rtx index;
ca695ac9 4931 rtx table_label;
28d81abb
RK
4932 int ncases;
4933 rtx *labelvec;
4934 register int i;
4935 rtx before_case;
4936 register struct nesting *thiscase = case_stack;
1b0cb6fc 4937 tree index_expr, index_type;
ca695ac9
JB
4938 int unsignedp;
4939
4940 if (output_bytecode)
4941 {
4942 bc_expand_end_case (orig_index);
4943 return;
4944 }
4945
4946 table_label = gen_label_rtx ();
4947 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
4948 index_type = TREE_TYPE (index_expr);
4949 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
4950
4951 do_pending_stack_adjust ();
4952
4953 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4954 if (index_type != error_mark_node)
28d81abb
RK
4955 {
4956 /* If switch expression was an enumerated type, check that all
4957 enumeration literals are covered by the cases.
4958 No sense trying this if there's a default case, however. */
4959
4960 if (!thiscase->data.case_stmt.default_label
4961 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4962 && TREE_CODE (index_expr) != INTEGER_CST)
4963 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4964
4965 /* If this is the first label, warn if any insns have been emitted. */
4966 if (thiscase->data.case_stmt.seenlabel == 0)
4967 {
4968 rtx insn;
4969 for (insn = get_last_insn ();
4970 insn != case_stack->data.case_stmt.start;
4971 insn = PREV_INSN (insn))
4972 if (GET_CODE (insn) != NOTE
4973 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4974 {
4975 warning ("unreachable code at beginning of %s",
4976 case_stack->data.case_stmt.printname);
4977 break;
4978 }
4979 }
4980
4981 /* If we don't have a default-label, create one here,
4982 after the body of the switch. */
4983 if (thiscase->data.case_stmt.default_label == 0)
4984 {
4985 thiscase->data.case_stmt.default_label
4986 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4987 expand_label (thiscase->data.case_stmt.default_label);
4988 }
4989 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4990
4991 before_case = get_last_insn ();
4992
5720c7e7
RK
4993 if (thiscase->data.case_stmt.case_list
4994 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
4995 thiscase->data.case_stmt.case_list
4996 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4997
28d81abb
RK
4998 /* Simplify the case-list before we count it. */
4999 group_case_nodes (thiscase->data.case_stmt.case_list);
5000
5001 /* Get upper and lower bounds of case values.
5002 Also convert all the case values to the index expr's data type. */
5003
5004 count = 0;
5005 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5006 {
5007 /* Check low and high label values are integers. */
5008 if (TREE_CODE (n->low) != INTEGER_CST)
5009 abort ();
5010 if (TREE_CODE (n->high) != INTEGER_CST)
5011 abort ();
5012
1b0cb6fc
RK
5013 n->low = convert (index_type, n->low);
5014 n->high = convert (index_type, n->high);
28d81abb
RK
5015
5016 /* Count the elements and track the largest and smallest
5017 of them (treating them as signed even if they are not). */
5018 if (count++ == 0)
5019 {
5020 minval = n->low;
5021 maxval = n->high;
5022 }
5023 else
5024 {
5025 if (INT_CST_LT (n->low, minval))
5026 minval = n->low;
5027 if (INT_CST_LT (maxval, n->high))
5028 maxval = n->high;
5029 }
5030 /* A range counts double, since it requires two compares. */
5031 if (! tree_int_cst_equal (n->low, n->high))
5032 count++;
5033 }
5034
3474db0e
RS
5035 orig_minval = minval;
5036
28d81abb
RK
5037 /* Compute span of values. */
5038 if (count != 0)
1b0cb6fc 5039 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5040
1b0cb6fc 5041 if (count == 0)
28d81abb
RK
5042 {
5043 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5044 emit_queue ();
5045 emit_jump (default_label);
5046 }
3474db0e 5047
28d81abb
RK
5048 /* If range of values is much bigger than number of values,
5049 make a sequence of conditional branches instead of a dispatch.
5050 If the switch-index is a constant, do it this way
5051 because we can optimize it. */
4f73c5dd
TW
5052
5053#ifndef CASE_VALUES_THRESHOLD
28d81abb 5054#ifdef HAVE_casesi
4f73c5dd 5055#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5056#else
4f73c5dd
TW
5057 /* If machine does not have a case insn that compares the
5058 bounds, this means extra overhead for dispatch tables
5059 which raises the threshold for using them. */
5060#define CASE_VALUES_THRESHOLD 5
5061#endif /* HAVE_casesi */
5062#endif /* CASE_VALUES_THRESHOLD */
5063
5064 else if (TREE_INT_CST_HIGH (range) != 0
5065 || count < CASE_VALUES_THRESHOLD
37366632
RK
5066 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5067 > 10 * count)
3f6fe18e
RK
5068#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5069 || flag_pic
5070#endif
28d81abb 5071 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5072 /* These will reduce to a constant. */
28d81abb 5073 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5074 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5075 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
5076 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5077 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5078 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5079 {
37366632 5080 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5081
5082 /* If the index is a short or char that we do not have
5083 an insn to handle comparisons directly, convert it to
5084 a full integer now, rather than letting each comparison
5085 generate the conversion. */
5086
5087 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5088 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5089 == CODE_FOR_nothing))
5090 {
5091 enum machine_mode wider_mode;
5092 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5093 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5094 if (cmp_optab->handlers[(int) wider_mode].insn_code
5095 != CODE_FOR_nothing)
5096 {
5097 index = convert_to_mode (wider_mode, index, unsignedp);
5098 break;
5099 }
5100 }
5101
5102 emit_queue ();
5103 do_pending_stack_adjust ();
5104
5105 index = protect_from_queue (index, 0);
5106 if (GET_CODE (index) == MEM)
5107 index = copy_to_reg (index);
5108 if (GET_CODE (index) == CONST_INT
5109 || TREE_CODE (index_expr) == INTEGER_CST)
5110 {
5111 /* Make a tree node with the proper constant value
5112 if we don't already have one. */
5113 if (TREE_CODE (index_expr) != INTEGER_CST)
5114 {
5115 index_expr
5116 = build_int_2 (INTVAL (index),
e9a042b6 5117 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5118 index_expr = convert (index_type, index_expr);
28d81abb
RK
5119 }
5120
5121 /* For constant index expressions we need only
5122 issue a unconditional branch to the appropriate
5123 target code. The job of removing any unreachable
5124 code is left to the optimisation phase if the
5125 "-O" option is specified. */
1b0cb6fc
RK
5126 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5127 if (! tree_int_cst_lt (index_expr, n->low)
5128 && ! tree_int_cst_lt (n->high, index_expr))
5129 break;
5130
28d81abb
RK
5131 if (n)
5132 emit_jump (label_rtx (n->code_label));
5133 else
5134 emit_jump (default_label);
5135 }
5136 else
5137 {
5138 /* If the index expression is not constant we generate
5139 a binary decision tree to select the appropriate
5140 target code. This is done as follows:
5141
5142 The list of cases is rearranged into a binary tree,
5143 nearly optimal assuming equal probability for each case.
5144
5145 The tree is transformed into RTL, eliminating
5146 redundant test conditions at the same time.
5147
5148 If program flow could reach the end of the
5149 decision tree an unconditional jump to the
5150 default code is emitted. */
5151
5152 use_cost_table
5153 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5154 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
5155 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5156 NULL_PTR);
28d81abb 5157 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5158 default_label, index_type);
28d81abb
RK
5159 emit_jump_if_reachable (default_label);
5160 }
5161 }
5162 else
5163 {
5164 int win = 0;
5165#ifdef HAVE_casesi
5166 if (HAVE_casesi)
5167 {
c4fcf531 5168 enum machine_mode index_mode = SImode;
5130a5cc 5169 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5170 rtx op1, op2;
5171 enum machine_mode op_mode;
c4fcf531 5172
28d81abb 5173 /* Convert the index to SImode. */
1b0cb6fc 5174 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5175 > GET_MODE_BITSIZE (index_mode))
28d81abb 5176 {
1b0cb6fc 5177 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5178 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5179
5180 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5181 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5182 index_expr, minval);
5183 minval = integer_zero_node;
37366632 5184 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3474db0e 5185 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
af2682ef
RS
5186 emit_jump_insn (gen_bltu (default_label));
5187 /* Now we can safely truncate. */
5188 index = convert_to_mode (index_mode, index, 0);
5189 }
5190 else
5191 {
1b0cb6fc 5192 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5193 {
5194 index_expr = convert (type_for_size (index_bits, 0),
5195 index_expr);
5196 index_type = TREE_TYPE (index_expr);
5197 }
5198
37366632 5199 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5200 }
28d81abb
RK
5201 emit_queue ();
5202 index = protect_from_queue (index, 0);
5203 do_pending_stack_adjust ();
5204
086f237d
JW
5205 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5206 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5207 (index, op_mode))
5208 index = copy_to_mode_reg (op_mode, index);
5209
5210 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5211
5212 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5213 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5214 (op1, op_mode))
5215 op1 = copy_to_mode_reg (op_mode, op1);
5216
5217 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5218
5219 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5220 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5221 (op2, op_mode))
5222 op2 = copy_to_mode_reg (op_mode, op2);
5223
5224 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5225 table_label, default_label));
5226 win = 1;
5227 }
5228#endif
5229#ifdef HAVE_tablejump
5230 if (! win && HAVE_tablejump)
5231 {
5232 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 5233 fold (build (MINUS_EXPR, index_type,
b4ac57ab 5234 index_expr, minval)));
d3b35d75 5235 index_type = TREE_TYPE (index_expr);
37366632 5236 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5237 emit_queue ();
af2682ef 5238 index = protect_from_queue (index, 0);
28d81abb
RK
5239 do_pending_stack_adjust ();
5240
1b0cb6fc 5241 do_tablejump (index, TYPE_MODE (index_type),
37366632 5242 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5243 table_label, default_label);
5244 win = 1;
5245 }
5246#endif
5247 if (! win)
5248 abort ();
5249
5250 /* Get table of labels to jump to, in order of case index. */
5251
5252 ncases = TREE_INT_CST_LOW (range) + 1;
5253 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 5254 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
5255
5256 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5257 {
37366632 5258 register HOST_WIDE_INT i
3474db0e 5259 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5260
5261 while (1)
5262 {
5263 labelvec[i]
5264 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3474db0e 5265 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5266 == TREE_INT_CST_LOW (n->high))
5267 break;
5268 i++;
5269 }
5270 }
5271
5272 /* Fill in the gaps with the default. */
5273 for (i = 0; i < ncases; i++)
5274 if (labelvec[i] == 0)
5275 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5276
5277 /* Output the table */
5278 emit_label (table_label);
5279
5280 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 5281 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
5282 if (
5283#ifdef CASE_VECTOR_PC_RELATIVE
5284 1 ||
5285#endif
5286 flag_pic)
5287 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5288 gen_rtx (LABEL_REF, Pmode, table_label),
5289 gen_rtvec_v (ncases, labelvec)));
5290 else
5291 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5292 gen_rtvec_v (ncases, labelvec)));
5293
5294 /* If the case insn drops through the table,
5295 after the table we must jump to the default-label.
5296 Otherwise record no drop-through after the table. */
5297#ifdef CASE_DROPS_THROUGH
5298 emit_jump (default_label);
5299#else
5300 emit_barrier ();
5301#endif
5302 }
5303
915f619f
JW
5304 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5305 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5306 thiscase->data.case_stmt.start);
5307 }
1b0cb6fc 5308
28d81abb
RK
5309 if (thiscase->exit_label)
5310 emit_label (thiscase->exit_label);
5311
5312 POPSTACK (case_stack);
5313
5314 free_temp_slots ();
5315}
5316
57641239
RK
5317/* Convert the tree NODE into a list linked by the right field, with the left
5318 field zeroed. RIGHT is used for recursion; it is a list to be placed
5319 rightmost in the resulting list. */
5320
5321static struct case_node *
5322case_tree2list (node, right)
5323 struct case_node *node, *right;
5324{
5325 struct case_node *left;
5326
5327 if (node->right)
5328 right = case_tree2list (node->right, right);
5329
5330 node->right = right;
5331 if (left = node->left)
5332 {
5333 node->left = 0;
5334 return case_tree2list (left, node);
5335 }
5336
5337 return node;
5338}
ca695ac9
JB
5339
5340/* Terminate a case statement. EXPR is the original index
5341 expression. */
704f4dca
RK
5342
5343static void
ca695ac9
JB
5344bc_expand_end_case (expr)
5345 tree expr;
5346{
5347 struct nesting *thiscase = case_stack;
5348 enum bytecode_opcode opcode;
5349 struct bc_label *jump_label;
5350 struct case_node *c;
5351
5352 bc_emit_bytecode (jump);
c53e9440 5353 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5354
5355#ifdef DEBUG_PRINT_CODE
5356 fputc ('\n', stderr);
5357#endif
5358
5359 /* Now that the size of the jump table is known, emit the actual
5360 indexed jump instruction. */
c53e9440 5361 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
5362
5363 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5364 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5365 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5366
5367 bc_emit_bytecode (opcode);
5368
5369 /* Now emit the case instructions literal arguments, in order.
5370 In addition to the value on the stack, it uses:
5371 1. The address of the jump table.
5372 2. The size of the jump table.
5373 3. The default label. */
5374
5375 jump_label = bc_get_bytecode_label ();
5376 bc_emit_bytecode_labelref (jump_label);
5377 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5378 sizeof thiscase->data.case_stmt.num_ranges);
5379
5380 if (thiscase->data.case_stmt.default_label)
c53e9440 5381 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
ca695ac9 5382 else
c53e9440 5383 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5384
5385 /* Output the jump table. */
5386
5387 bc_align_bytecode (3 /* PTR_ALIGN */);
5388 bc_emit_bytecode_labeldef (jump_label);
5389
5390 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5391 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5392 {
5393 opcode = TREE_INT_CST_LOW (c->low);
5394 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5395
5396 opcode = TREE_INT_CST_LOW (c->high);
5397 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5398
c53e9440 5399 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
5400 }
5401 else
5402 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5403 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5404 {
5405 bc_emit_bytecode_DI_const (c->low);
5406 bc_emit_bytecode_DI_const (c->high);
5407
c53e9440 5408 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
5409 }
5410 else
5411 /* Bad mode */
5412 abort ();
5413
5414
c53e9440 5415 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5416
5417 /* Possibly issue enumeration warnings. */
5418
5419 if (!thiscase->data.case_stmt.default_label
5420 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5421 && TREE_CODE (expr) != INTEGER_CST
5422 && warn_switch)
5423 check_for_full_enumeration_handling (TREE_TYPE (expr));
5424
5425
5426#ifdef DEBUG_PRINT_CODE
5427 fputc ('\n', stderr);
5428#endif
5429
5430 POPSTACK (case_stack);
5431}
5432
5433
0f41302f 5434/* Return unique bytecode ID. */
704f4dca 5435
ca695ac9
JB
5436int
5437bc_new_uid ()
5438{
5439 static int bc_uid = 0;
5440
5441 return (++bc_uid);
5442}
5443
28d81abb
RK
5444/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5445
5446static void
5447do_jump_if_equal (op1, op2, label, unsignedp)
5448 rtx op1, op2, label;
5449 int unsignedp;
5450{
5451 if (GET_CODE (op1) == CONST_INT
5452 && GET_CODE (op2) == CONST_INT)
5453 {
5454 if (INTVAL (op1) == INTVAL (op2))
5455 emit_jump (label);
5456 }
5457 else
5458 {
5459 enum machine_mode mode = GET_MODE (op1);
5460 if (mode == VOIDmode)
5461 mode = GET_MODE (op2);
37366632 5462 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5463 emit_jump_insn (gen_beq (label));
5464 }
5465}
5466\f
5467/* Not all case values are encountered equally. This function
5468 uses a heuristic to weight case labels, in cases where that
5469 looks like a reasonable thing to do.
5470
5471 Right now, all we try to guess is text, and we establish the
5472 following weights:
5473
5474 chars above space: 16
5475 digits: 16
5476 default: 12
5477 space, punct: 8
5478 tab: 4
5479 newline: 2
5480 other "\" chars: 1
5481 remaining chars: 0
5482
5483 If we find any cases in the switch that are not either -1 or in the range
5484 of valid ASCII characters, or are control characters other than those
5485 commonly used with "\", don't treat this switch scanning text.
5486
5487 Return 1 if these nodes are suitable for cost estimation, otherwise
5488 return 0. */
5489
5490static int
5491estimate_case_costs (node)
5492 case_node_ptr node;
5493{
5494 tree min_ascii = build_int_2 (-1, -1);
5495 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5496 case_node_ptr n;
5497 int i;
5498
5499 /* If we haven't already made the cost table, make it now. Note that the
5500 lower bound of the table is -1, not zero. */
5501
5502 if (cost_table == NULL)
5503 {
5504 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4c9a05bc 5505 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
28d81abb
RK
5506
5507 for (i = 0; i < 128; i++)
5508 {
5509 if (isalnum (i))
5510 cost_table[i] = 16;
5511 else if (ispunct (i))
5512 cost_table[i] = 8;
5513 else if (iscntrl (i))
5514 cost_table[i] = -1;
5515 }
5516
5517 cost_table[' '] = 8;
5518 cost_table['\t'] = 4;
5519 cost_table['\0'] = 4;
5520 cost_table['\n'] = 2;
5521 cost_table['\f'] = 1;
5522 cost_table['\v'] = 1;
5523 cost_table['\b'] = 1;
5524 }
5525
5526 /* See if all the case expressions look like text. It is text if the
5527 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5528 as signed arithmetic since we don't want to ever access cost_table with a
5529 value less than -1. Also check that none of the constants in a range
5530 are strange control characters. */
5531
5532 for (n = node; n; n = n->right)
5533 {
5534 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5535 return 0;
5536
5537 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5538 if (cost_table[i] < 0)
5539 return 0;
5540 }
5541
5542 /* All interesting values are within the range of interesting
5543 ASCII characters. */
5544 return 1;
5545}
5546
5547/* Scan an ordered list of case nodes
5548 combining those with consecutive values or ranges.
5549
5550 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5551
5552static void
5553group_case_nodes (head)
5554 case_node_ptr head;
5555{
5556 case_node_ptr node = head;
5557
5558 while (node)
5559 {
5560 rtx lb = next_real_insn (label_rtx (node->code_label));
5561 case_node_ptr np = node;
5562
5563 /* Try to group the successors of NODE with NODE. */
5564 while (((np = np->right) != 0)
5565 /* Do they jump to the same place? */
5566 && next_real_insn (label_rtx (np->code_label)) == lb
5567 /* Are their ranges consecutive? */
5568 && tree_int_cst_equal (np->low,
5569 fold (build (PLUS_EXPR,
5570 TREE_TYPE (node->high),
5571 node->high,
5572 integer_one_node)))
5573 /* An overflow is not consecutive. */
5574 && tree_int_cst_lt (node->high,
5575 fold (build (PLUS_EXPR,
5576 TREE_TYPE (node->high),
5577 node->high,
5578 integer_one_node))))
5579 {
5580 node->high = np->high;
5581 }
5582 /* NP is the first node after NODE which can't be grouped with it.
5583 Delete the nodes in between, and move on to that node. */
5584 node->right = np;
5585 node = np;
5586 }
5587}
5588
5589/* Take an ordered list of case nodes
5590 and transform them into a near optimal binary tree,
6dc42e49 5591 on the assumption that any target code selection value is as
28d81abb
RK
5592 likely as any other.
5593
5594 The transformation is performed by splitting the ordered
5595 list into two equal sections plus a pivot. The parts are
5596 then attached to the pivot as left and right branches. Each
5597 branch is is then transformed recursively. */
5598
5599static void
5600balance_case_nodes (head, parent)
5601 case_node_ptr *head;
5602 case_node_ptr parent;
5603{
5604 register case_node_ptr np;
5605
5606 np = *head;
5607 if (np)
5608 {
5609 int cost = 0;
5610 int i = 0;
5611 int ranges = 0;
5612 register case_node_ptr *npp;
5613 case_node_ptr left;
5614
5615 /* Count the number of entries on branch. Also count the ranges. */
5616
5617 while (np)
5618 {
5619 if (!tree_int_cst_equal (np->low, np->high))
5620 {
5621 ranges++;
5622 if (use_cost_table)
5623 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5624 }
5625
5626 if (use_cost_table)
5627 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5628
5629 i++;
5630 np = np->right;
5631 }
5632
5633 if (i > 2)
5634 {
5635 /* Split this list if it is long enough for that to help. */
5636 npp = head;
5637 left = *npp;
5638 if (use_cost_table)
5639 {
5640 /* Find the place in the list that bisects the list's total cost,
5641 Here I gets half the total cost. */
5642 int n_moved = 0;
5643 i = (cost + 1) / 2;
5644 while (1)
5645 {
5646 /* Skip nodes while their cost does not reach that amount. */
5647 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5648 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5649 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5650 if (i <= 0)
5651 break;
5652 npp = &(*npp)->right;
5653 n_moved += 1;
5654 }
5655 if (n_moved == 0)
5656 {
5657 /* Leave this branch lopsided, but optimize left-hand
5658 side and fill in `parent' fields for right-hand side. */
5659 np = *head;
5660 np->parent = parent;
5661 balance_case_nodes (&np->left, np);
5662 for (; np->right; np = np->right)
5663 np->right->parent = np;
5664 return;
5665 }
5666 }
5667 /* If there are just three nodes, split at the middle one. */
5668 else if (i == 3)
5669 npp = &(*npp)->right;
5670 else
5671 {
5672 /* Find the place in the list that bisects the list's total cost,
5673 where ranges count as 2.
5674 Here I gets half the total cost. */
5675 i = (i + ranges + 1) / 2;
5676 while (1)
5677 {
5678 /* Skip nodes while their cost does not reach that amount. */
5679 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5680 i--;
5681 i--;
5682 if (i <= 0)
5683 break;
5684 npp = &(*npp)->right;
5685 }
5686 }
5687 *head = np = *npp;
5688 *npp = 0;
5689 np->parent = parent;
5690 np->left = left;
5691
5692 /* Optimize each of the two split parts. */
5693 balance_case_nodes (&np->left, np);
5694 balance_case_nodes (&np->right, np);
5695 }
5696 else
5697 {
5698 /* Else leave this branch as one level,
5699 but fill in `parent' fields. */
5700 np = *head;
5701 np->parent = parent;
5702 for (; np->right; np = np->right)
5703 np->right->parent = np;
5704 }
5705 }
5706}
5707\f
5708/* Search the parent sections of the case node tree
5709 to see if a test for the lower bound of NODE would be redundant.
5710 INDEX_TYPE is the type of the index expression.
5711
5712 The instructions to generate the case decision tree are
5713 output in the same order as nodes are processed so it is
5714 known that if a parent node checks the range of the current
5715 node minus one that the current node is bounded at its lower
5716 span. Thus the test would be redundant. */
5717
5718static int
5719node_has_low_bound (node, index_type)
5720 case_node_ptr node;
5721 tree index_type;
5722{
5723 tree low_minus_one;
5724 case_node_ptr pnode;
5725
5726 /* If the lower bound of this node is the lowest value in the index type,
5727 we need not test it. */
5728
5729 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5730 return 1;
5731
5732 /* If this node has a left branch, the value at the left must be less
5733 than that at this node, so it cannot be bounded at the bottom and
5734 we need not bother testing any further. */
5735
5736 if (node->left)
5737 return 0;
5738
5739 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5740 node->low, integer_one_node));
5741
5742 /* If the subtraction above overflowed, we can't verify anything.
5743 Otherwise, look for a parent that tests our value - 1. */
5744
5745 if (! tree_int_cst_lt (low_minus_one, node->low))
5746 return 0;
5747
5748 for (pnode = node->parent; pnode; pnode = pnode->parent)
5749 if (tree_int_cst_equal (low_minus_one, pnode->high))
5750 return 1;
5751
5752 return 0;
5753}
5754
5755/* Search the parent sections of the case node tree
5756 to see if a test for the upper bound of NODE would be redundant.
5757 INDEX_TYPE is the type of the index expression.
5758
5759 The instructions to generate the case decision tree are
5760 output in the same order as nodes are processed so it is
5761 known that if a parent node checks the range of the current
5762 node plus one that the current node is bounded at its upper
5763 span. Thus the test would be redundant. */
5764
5765static int
5766node_has_high_bound (node, index_type)
5767 case_node_ptr node;
5768 tree index_type;
5769{
5770 tree high_plus_one;
5771 case_node_ptr pnode;
5772
5773 /* If the upper bound of this node is the highest value in the type
5774 of the index expression, we need not test against it. */
5775
5776 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5777 return 1;
5778
5779 /* If this node has a right branch, the value at the right must be greater
5780 than that at this node, so it cannot be bounded at the top and
5781 we need not bother testing any further. */
5782
5783 if (node->right)
5784 return 0;
5785
5786 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5787 node->high, integer_one_node));
5788
5789 /* If the addition above overflowed, we can't verify anything.
5790 Otherwise, look for a parent that tests our value + 1. */
5791
5792 if (! tree_int_cst_lt (node->high, high_plus_one))
5793 return 0;
5794
5795 for (pnode = node->parent; pnode; pnode = pnode->parent)
5796 if (tree_int_cst_equal (high_plus_one, pnode->low))
5797 return 1;
5798
5799 return 0;
5800}
5801
5802/* Search the parent sections of the
5803 case node tree to see if both tests for the upper and lower
5804 bounds of NODE would be redundant. */
5805
5806static int
5807node_is_bounded (node, index_type)
5808 case_node_ptr node;
5809 tree index_type;
5810{
5811 return (node_has_low_bound (node, index_type)
5812 && node_has_high_bound (node, index_type));
5813}
5814
5815/* Emit an unconditional jump to LABEL unless it would be dead code. */
5816
5817static void
5818emit_jump_if_reachable (label)
5819 rtx label;
5820{
5821 if (GET_CODE (get_last_insn ()) != BARRIER)
5822 emit_jump (label);
5823}
5824\f
5825/* Emit step-by-step code to select a case for the value of INDEX.
5826 The thus generated decision tree follows the form of the
5827 case-node binary tree NODE, whose nodes represent test conditions.
5828 INDEX_TYPE is the type of the index of the switch.
5829
5830 Care is taken to prune redundant tests from the decision tree
5831 by detecting any boundary conditions already checked by
5832 emitted rtx. (See node_has_high_bound, node_has_low_bound
5833 and node_is_bounded, above.)
5834
5835 Where the test conditions can be shown to be redundant we emit
5836 an unconditional jump to the target code. As a further
5837 optimization, the subordinates of a tree node are examined to
5838 check for bounded nodes. In this case conditional and/or
5839 unconditional jumps as a result of the boundary check for the
5840 current node are arranged to target the subordinates associated
5841 code for out of bound conditions on the current node node.
5842
f72aed24 5843 We can assume that when control reaches the code generated here,
28d81abb
RK
5844 the index value has already been compared with the parents
5845 of this node, and determined to be on the same side of each parent
5846 as this node is. Thus, if this node tests for the value 51,
5847 and a parent tested for 52, we don't need to consider
5848 the possibility of a value greater than 51. If another parent
5849 tests for the value 50, then this node need not test anything. */
5850
5851static void
5852emit_case_nodes (index, node, default_label, index_type)
5853 rtx index;
5854 case_node_ptr node;
5855 rtx default_label;
5856 tree index_type;
5857{
5858 /* If INDEX has an unsigned type, we must make unsigned branches. */
5859 int unsignedp = TREE_UNSIGNED (index_type);
5860 typedef rtx rtx_function ();
5861 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5862 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5863 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5864 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5865 enum machine_mode mode = GET_MODE (index);
5866
5867 /* See if our parents have already tested everything for us.
5868 If they have, emit an unconditional jump for this node. */
5869 if (node_is_bounded (node, index_type))
5870 emit_jump (label_rtx (node->code_label));
5871
5872 else if (tree_int_cst_equal (node->low, node->high))
5873 {
5874 /* Node is single valued. First see if the index expression matches
0f41302f 5875 this node and then check our children, if any. */
28d81abb 5876
37366632 5877 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5878 label_rtx (node->code_label), unsignedp);
5879
5880 if (node->right != 0 && node->left != 0)
5881 {
5882 /* This node has children on both sides.
5883 Dispatch to one side or the other
5884 by comparing the index value with this node's value.
5885 If one subtree is bounded, check that one first,
5886 so we can avoid real branches in the tree. */
5887
5888 if (node_is_bounded (node->right, index_type))
5889 {
37366632
RK
5890 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5891 VOIDmode, 0),
5892 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5893
5894 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5895 emit_case_nodes (index, node->left, default_label, index_type);
5896 }
5897
5898 else if (node_is_bounded (node->left, index_type))
5899 {
37366632 5900 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5901 VOIDmode, 0),
37366632 5902 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5903 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5904 emit_case_nodes (index, node->right, default_label, index_type);
5905 }
5906
5907 else
5908 {
5909 /* Neither node is bounded. First distinguish the two sides;
5910 then emit the code for one side at a time. */
5911
5912 tree test_label
5913 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5914
5915 /* See if the value is on the right. */
37366632 5916 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5917 VOIDmode, 0),
37366632 5918 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5919 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5920
5921 /* Value must be on the left.
5922 Handle the left-hand subtree. */
5923 emit_case_nodes (index, node->left, default_label, index_type);
5924 /* If left-hand subtree does nothing,
5925 go to default. */
5926 emit_jump_if_reachable (default_label);
5927
5928 /* Code branches here for the right-hand subtree. */
5929 expand_label (test_label);
5930 emit_case_nodes (index, node->right, default_label, index_type);
5931 }
5932 }
5933
5934 else if (node->right != 0 && node->left == 0)
5935 {
5936 /* Here we have a right child but no left so we issue conditional
5937 branch to default and process the right child.
5938
5939 Omit the conditional branch to default if we it avoid only one
5940 right child; it costs too much space to save so little time. */
5941
de14fd73 5942 if (node->right->right || node->right->left
28d81abb
RK
5943 || !tree_int_cst_equal (node->right->low, node->right->high))
5944 {
5945 if (!node_has_low_bound (node, index_type))
5946 {
37366632
RK
5947 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5948 VOIDmode, 0),
5949 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5950 emit_jump_insn ((*gen_blt_pat) (default_label));
5951 }
5952
5953 emit_case_nodes (index, node->right, default_label, index_type);
5954 }
5955 else
5956 /* We cannot process node->right normally
5957 since we haven't ruled out the numbers less than
5958 this node's value. So handle node->right explicitly. */
5959 do_jump_if_equal (index,
37366632
RK
5960 expand_expr (node->right->low, NULL_RTX,
5961 VOIDmode, 0),
28d81abb
RK
5962 label_rtx (node->right->code_label), unsignedp);
5963 }
5964
5965 else if (node->right == 0 && node->left != 0)
5966 {
5967 /* Just one subtree, on the left. */
5968
de14fd73
RK
5969#if 0 /* The following code and comment were formerly part
5970 of the condition here, but they didn't work
5971 and I don't understand what the idea was. -- rms. */
5972 /* If our "most probable entry" is less probable
28d81abb
RK
5973 than the default label, emit a jump to
5974 the default label using condition codes
5975 already lying around. With no right branch,
5976 a branch-greater-than will get us to the default
5977 label correctly. */
de14fd73
RK
5978 if (use_cost_table
5979 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5980 ;
5981#endif /* 0 */
5982 if (node->left->left || node->left->right
28d81abb
RK
5983 || !tree_int_cst_equal (node->left->low, node->left->high))
5984 {
5985 if (!node_has_high_bound (node, index_type))
5986 {
37366632
RK
5987 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5988 VOIDmode, 0),
5989 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5990 emit_jump_insn ((*gen_bgt_pat) (default_label));
5991 }
5992
5993 emit_case_nodes (index, node->left, default_label, index_type);
5994 }
5995 else
5996 /* We cannot process node->left normally
5997 since we haven't ruled out the numbers less than
5998 this node's value. So handle node->left explicitly. */
5999 do_jump_if_equal (index,
37366632
RK
6000 expand_expr (node->left->low, NULL_RTX,
6001 VOIDmode, 0),
28d81abb
RK
6002 label_rtx (node->left->code_label), unsignedp);
6003 }
6004 }
6005 else
6006 {
6007 /* Node is a range. These cases are very similar to those for a single
6008 value, except that we do not start by testing whether this node
6009 is the one to branch to. */
6010
6011 if (node->right != 0 && node->left != 0)
6012 {
6013 /* Node has subtrees on both sides.
6014 If the right-hand subtree is bounded,
6015 test for it first, since we can go straight there.
6016 Otherwise, we need to make a branch in the control structure,
6017 then handle the two subtrees. */
6018 tree test_label = 0;
6019
37366632
RK
6020 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6021 VOIDmode, 0),
6022 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6023
6024 if (node_is_bounded (node->right, index_type))
6025 /* Right hand node is fully bounded so we can eliminate any
6026 testing and branch directly to the target code. */
6027 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6028 else
6029 {
6030 /* Right hand node requires testing.
6031 Branch to a label where we will handle it later. */
6032
6033 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6034 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6035 }
6036
6037 /* Value belongs to this node or to the left-hand subtree. */
6038
37366632
RK
6039 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6040 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6041 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6042
6043 /* Handle the left-hand subtree. */
6044 emit_case_nodes (index, node->left, default_label, index_type);
6045
6046 /* If right node had to be handled later, do that now. */
6047
6048 if (test_label)
6049 {
6050 /* If the left-hand subtree fell through,
6051 don't let it fall into the right-hand subtree. */
6052 emit_jump_if_reachable (default_label);
6053
6054 expand_label (test_label);
6055 emit_case_nodes (index, node->right, default_label, index_type);
6056 }
6057 }
6058
6059 else if (node->right != 0 && node->left == 0)
6060 {
6061 /* Deal with values to the left of this node,
6062 if they are possible. */
6063 if (!node_has_low_bound (node, index_type))
6064 {
37366632
RK
6065 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6066 VOIDmode, 0),
6067 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6068 emit_jump_insn ((*gen_blt_pat) (default_label));
6069 }
6070
6071 /* Value belongs to this node or to the right-hand subtree. */
6072
37366632
RK
6073 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6074 VOIDmode, 0),
6075 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6076 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6077
6078 emit_case_nodes (index, node->right, default_label, index_type);
6079 }
6080
6081 else if (node->right == 0 && node->left != 0)
6082 {
6083 /* Deal with values to the right of this node,
6084 if they are possible. */
6085 if (!node_has_high_bound (node, index_type))
6086 {
37366632
RK
6087 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6088 VOIDmode, 0),
6089 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6090 emit_jump_insn ((*gen_bgt_pat) (default_label));
6091 }
6092
6093 /* Value belongs to this node or to the left-hand subtree. */
6094
37366632
RK
6095 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6096 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6097 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6098
6099 emit_case_nodes (index, node->left, default_label, index_type);
6100 }
6101
6102 else
6103 {
6104 /* Node has no children so we check low and high bounds to remove
6105 redundant tests. Only one of the bounds can exist,
6106 since otherwise this node is bounded--a case tested already. */
6107
6108 if (!node_has_high_bound (node, index_type))
6109 {
37366632
RK
6110 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6111 VOIDmode, 0),
6112 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6113 emit_jump_insn ((*gen_bgt_pat) (default_label));
6114 }
6115
6116 if (!node_has_low_bound (node, index_type))
6117 {
37366632
RK
6118 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6119 VOIDmode, 0),
6120 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6121 emit_jump_insn ((*gen_blt_pat) (default_label));
6122 }
6123
6124 emit_jump (label_rtx (node->code_label));
6125 }
6126 }
6127}
6128\f
6129/* These routines are used by the loop unrolling code. They copy BLOCK trees
6130 so that the debugging info will be correct for the unrolled loop. */
6131
94dc8b56 6132/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 6133
94dc8b56 6134static tree *block_vector;
28d81abb
RK
6135
6136void
94dc8b56 6137find_loop_tree_blocks ()
28d81abb 6138{
94dc8b56 6139 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6140
94dc8b56 6141 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
6142}
6143
28d81abb 6144void
94dc8b56 6145unroll_block_trees ()
28d81abb 6146{
94dc8b56 6147 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6148
94dc8b56 6149 reorder_blocks (block_vector, block, get_insns ());
28d81abb 6150}
94dc8b56 6151
This page took 1.192362 seconds and 5 git commands to generate.