]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
formatting tweaks
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
5b4ddd85 2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
37
38#include <stdio.h>
39#include <ctype.h>
40
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
44#include "function.h"
45#include "insn-flags.h"
46#include "insn-config.h"
47#include "insn-codes.h"
48#include "expr.h"
49#include "hard-reg-set.h"
50#include "obstack.h"
51#include "loop.h"
52#include "recog.h"
ca695ac9
JB
53#include "machmode.h"
54
55#include "bytecode.h"
56#include "bc-typecd.h"
57#include "bc-opcode.h"
58#include "bc-optab.h"
59#include "bc-emit.h"
28d81abb
RK
60
61#define obstack_chunk_alloc xmalloc
62#define obstack_chunk_free free
63struct obstack stmt_obstack;
64
28d81abb
RK
65/* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67char *emit_filename;
68int emit_lineno;
69
70/* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73int expr_stmts_for_value;
74
75/* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78static tree last_expr_type;
79static rtx last_expr_value;
80
7629c936
RS
81/* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86static rtx last_block_end_note;
87
28d81abb
RK
88/* Number of binding contours started so far in this function. */
89
90int block_start_count;
91
92/* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95extern int current_function_returns_pcc_struct;
96
97/* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101extern rtx cleanup_label;
102
103/* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107extern rtx return_label;
108
28d81abb
RK
109/* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112extern int frame_offset;
113
114/* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116extern rtx tail_recursion_label;
117
118/* Place after which to insert the tail_recursion_label if we need one. */
119extern rtx tail_recursion_reentry;
120
121/* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126extern rtx arg_pointer_save_area;
127
128/* Chain of all RTL_EXPRs that have insns in them. */
129extern tree rtl_expr_chain;
130
131#if 0 /* Turned off because 0 seems to work just as well. */
132/* Cleanup lists are required for binding levels regardless of whether
133 that binding level has cleanups or not. This node serves as the
134 cleanup list whenever an empty list is required. */
135static tree empty_cleanup_list;
136#endif
61d6b1cc
MS
137
138extern void (*interim_eh_hook) PROTO((tree));
28d81abb
RK
139\f
140/* Functions and data structures for expanding case statements. */
141
142/* Case label structure, used to hold info on labels within case
143 statements. We handle "range" labels; for a single-value label
144 as in C, the high and low limits are the same.
145
5720c7e7
RK
146 An AVL tree of case nodes is initially created, and later transformed
147 to a list linked via the RIGHT fields in the nodes. Nodes with
148 higher case values are later in the list.
28d81abb
RK
149
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
154 node chain.
155
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
28d81abb
RK
161 and nodes on the right having higher values. We then output the tree
162 in order. */
163
164struct case_node
165{
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
57641239 172 int balance;
28d81abb
RK
173};
174
175typedef struct case_node case_node;
176typedef struct case_node *case_node_ptr;
177
178/* These are used by estimate_case_costs and balance_case_nodes. */
179
180/* This must be a signed type, and non-ANSI compilers lack signed char. */
181static short *cost_table;
182static int use_cost_table;
28d81abb
RK
183\f
184/* Stack of control and binding constructs we are currently inside.
185
186 These constructs begin when you call `expand_start_WHATEVER'
187 and end when you call `expand_end_WHATEVER'. This stack records
188 info about how the construct began that tells the end-function
189 what to do. It also may provide information about the construct
190 to alter the behavior of other constructs within the body.
191 For example, they may affect the behavior of C `break' and `continue'.
192
193 Each construct gets one `struct nesting' object.
194 All of these objects are chained through the `all' field.
195 `nesting_stack' points to the first object (innermost construct).
196 The position of an entry on `nesting_stack' is in its `depth' field.
197
198 Each type of construct has its own individual stack.
199 For example, loops have `loop_stack'. Each object points to the
200 next object of the same type through the `next' field.
201
202 Some constructs are visible to `break' exit-statements and others
203 are not. Which constructs are visible depends on the language.
204 Therefore, the data structure allows each construct to be visible
205 or not, according to the args given when the construct is started.
206 The construct is visible if the `exit_label' field is non-null.
207 In that case, the value should be a CODE_LABEL rtx. */
208
209struct nesting
210{
211 struct nesting *all;
212 struct nesting *next;
213 int depth;
214 rtx exit_label;
215 union
216 {
217 /* For conds (if-then and if-then-else statements). */
218 struct
219 {
220 /* Label for the end of the if construct.
221 There is none if EXITFLAG was not set
222 and no `else' has been seen yet. */
223 rtx endif_label;
224 /* Label for the end of this alternative.
0f41302f 225 This may be the end of the if or the next else/elseif. */
28d81abb
RK
226 rtx next_label;
227 } cond;
228 /* For loops. */
229 struct
230 {
231 /* Label at the top of the loop; place to loop back to. */
232 rtx start_label;
233 /* Label at the end of the whole construct. */
234 rtx end_label;
8afad312
JW
235 /* Label before a jump that branches to the end of the whole
236 construct. This is where destructors go if any. */
237 rtx alt_end_label;
28d81abb
RK
238 /* Label for `continue' statement to jump to;
239 this is in front of the stepper of the loop. */
240 rtx continue_label;
241 } loop;
242 /* For variable binding contours. */
243 struct
244 {
245 /* Sequence number of this binding contour within the function,
246 in order of entry. */
247 int block_start_count;
ca695ac9 248 /* Nonzero => value to restore stack to on exit. Complemented by
0f41302f 249 bc_stack_level (see below) when generating bytecodes. */
28d81abb
RK
250 rtx stack_level;
251 /* The NOTE that starts this contour.
252 Used by expand_goto to check whether the destination
253 is within each contour or not. */
254 rtx first_insn;
255 /* Innermost containing binding contour that has a stack level. */
256 struct nesting *innermost_stack_block;
257 /* List of cleanups to be run on exit from this contour.
258 This is a list of expressions to be evaluated.
259 The TREE_PURPOSE of each link is the ..._DECL node
260 which the cleanup pertains to. */
261 tree cleanups;
262 /* List of cleanup-lists of blocks containing this block,
263 as they were at the locus where this block appears.
264 There is an element for each containing block,
265 ordered innermost containing block first.
266 The tail of this list can be 0 (was empty_cleanup_list),
267 if all remaining elements would be empty lists.
268 The element's TREE_VALUE is the cleanup-list of that block,
269 which may be null. */
270 tree outer_cleanups;
271 /* Chain of labels defined inside this binding contour.
272 For contours that have stack levels or cleanups. */
273 struct label_chain *label_chain;
274 /* Number of function calls seen, as of start of this block. */
275 int function_call_count;
ca695ac9
JB
276 /* Bytecode specific: stack level to restore stack to on exit. */
277 int bc_stack_level;
28d81abb
RK
278 } block;
279 /* For switch (C) or case (Pascal) statements,
280 and also for dummies (see `expand_start_case_dummy'). */
281 struct
282 {
283 /* The insn after which the case dispatch should finally
284 be emitted. Zero for a dummy. */
285 rtx start;
ca695ac9
JB
286 /* For bytecodes, the case table is in-lined right in the code.
287 A label is needed for skipping over this block. It is only
0f41302f 288 used when generating bytecodes. */
ca695ac9 289 rtx skip_label;
57641239
RK
290 /* A list of case labels; it is first built as an AVL tree.
291 During expand_end_case, this is converted to a list, and may be
292 rearranged into a nearly balanced binary tree. */
28d81abb
RK
293 struct case_node *case_list;
294 /* Label to jump to if no case matches. */
295 tree default_label;
296 /* The expression to be dispatched on. */
297 tree index_expr;
298 /* Type that INDEX_EXPR should be converted to. */
299 tree nominal_type;
300 /* Number of range exprs in case statement. */
301 int num_ranges;
302 /* Name of this kind of statement, for warnings. */
303 char *printname;
304 /* Nonzero if a case label has been seen in this case stmt. */
305 char seenlabel;
306 } case_stmt;
28d81abb
RK
307 } data;
308};
309
310/* Chain of all pending binding contours. */
311struct nesting *block_stack;
312
6ed1d6c5
RS
313/* If any new stacks are added here, add them to POPSTACKS too. */
314
28d81abb
RK
315/* Chain of all pending binding contours that restore stack levels
316 or have cleanups. */
317struct nesting *stack_block_stack;
318
319/* Chain of all pending conditional statements. */
320struct nesting *cond_stack;
321
322/* Chain of all pending loops. */
323struct nesting *loop_stack;
324
325/* Chain of all pending case or switch statements. */
326struct nesting *case_stack;
327
28d81abb
RK
328/* Separate chain including all of the above,
329 chained through the `all' field. */
330struct nesting *nesting_stack;
331
332/* Number of entries on nesting_stack now. */
333int nesting_depth;
334
335/* Allocate and return a new `struct nesting'. */
336
337#define ALLOC_NESTING() \
338 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
339
6ed1d6c5
RS
340/* Pop the nesting stack element by element until we pop off
341 the element which is at the top of STACK.
342 Update all the other stacks, popping off elements from them
343 as we pop them from nesting_stack. */
28d81abb
RK
344
345#define POPSTACK(STACK) \
6ed1d6c5
RS
346do { struct nesting *target = STACK; \
347 struct nesting *this; \
348 do { this = nesting_stack; \
349 if (loop_stack == this) \
350 loop_stack = loop_stack->next; \
351 if (cond_stack == this) \
352 cond_stack = cond_stack->next; \
353 if (block_stack == this) \
354 block_stack = block_stack->next; \
355 if (stack_block_stack == this) \
356 stack_block_stack = stack_block_stack->next; \
357 if (case_stack == this) \
358 case_stack = case_stack->next; \
6ed1d6c5 359 nesting_depth = nesting_stack->depth - 1; \
28d81abb 360 nesting_stack = this->all; \
28d81abb 361 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 362 while (this != target); } while (0)
28d81abb
RK
363\f
364/* In some cases it is impossible to generate code for a forward goto
365 until the label definition is seen. This happens when it may be necessary
366 for the goto to reset the stack pointer: we don't yet know how to do that.
367 So expand_goto puts an entry on this fixup list.
368 Each time a binding contour that resets the stack is exited,
369 we check each fixup.
370 If the target label has now been defined, we can insert the proper code. */
371
372struct goto_fixup
373{
374 /* Points to following fixup. */
375 struct goto_fixup *next;
376 /* Points to the insn before the jump insn.
377 If more code must be inserted, it goes after this insn. */
378 rtx before_jump;
379 /* The LABEL_DECL that this jump is jumping to, or 0
380 for break, continue or return. */
381 tree target;
7629c936
RS
382 /* The BLOCK for the place where this goto was found. */
383 tree context;
28d81abb
RK
384 /* The CODE_LABEL rtx that this is jumping to. */
385 rtx target_rtl;
386 /* Number of binding contours started in current function
387 before the label reference. */
388 int block_start_count;
389 /* The outermost stack level that should be restored for this jump.
390 Each time a binding contour that resets the stack is exited,
391 if the target label is *not* yet defined, this slot is updated. */
392 rtx stack_level;
393 /* List of lists of cleanup expressions to be run by this goto.
394 There is one element for each block that this goto is within.
395 The tail of this list can be 0 (was empty_cleanup_list),
396 if all remaining elements would be empty.
397 The TREE_VALUE contains the cleanup list of that block as of the
398 time this goto was seen.
399 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
400 tree cleanup_list_list;
ca695ac9
JB
401
402 /* Bytecode specific members follow */
403
404 /* The label that this jump is jumping to, or 0 for break, continue
405 or return. */
406 struct bc_label *bc_target;
407
408 /* The label we use for the fixup patch */
409 struct bc_label *label;
410
411 /* True (non-0) if fixup has been handled */
412 int bc_handled:1;
413
414 /* Like stack_level above, except refers to the interpreter stack */
415 int bc_stack_level;
28d81abb
RK
416};
417
418static struct goto_fixup *goto_fixup_chain;
419
420/* Within any binding contour that must restore a stack level,
421 all labels are recorded with a chain of these structures. */
422
423struct label_chain
424{
425 /* Points to following fixup. */
426 struct label_chain *next;
427 tree label;
428};
cfc3d13f
RK
429static void expand_goto_internal PROTO((tree, rtx, rtx));
430static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
431 struct bc_label *, tree));
432static int expand_fixup PROTO((tree, rtx, rtx));
433static void bc_expand_fixup PROTO((enum bytecode_opcode,
434 struct bc_label *, int));
435static void fixup_gotos PROTO((struct nesting *, rtx, tree,
436 rtx, int));
437static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
438 rtx, int));
cfc3d13f
RK
439static void bc_expand_start_cond PROTO((tree, int));
440static void bc_expand_end_cond PROTO((void));
441static void bc_expand_start_else PROTO((void));
442static void bc_expand_end_loop PROTO((void));
443static void bc_expand_end_bindings PROTO((tree, int, int));
444static void bc_expand_decl PROTO((tree, tree));
445static void bc_expand_variable_local_init PROTO((tree));
446static void bc_expand_decl_init PROTO((tree));
447static void expand_null_return_1 PROTO((rtx, int));
8d800403 448static void expand_value_return PROTO((rtx));
cfc3d13f 449static int tail_recursion_args PROTO((tree, tree));
50d1b7a1 450static void expand_cleanups PROTO((tree, tree, int, int));
cfc3d13f
RK
451static void bc_expand_start_case PROTO((struct nesting *, tree,
452 tree, char *));
453static int bc_pushcase PROTO((tree, tree));
454static void bc_check_for_full_enumeration_handling PROTO((tree));
455static void bc_expand_end_case PROTO((tree));
456static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
457static int estimate_case_costs PROTO((case_node_ptr));
458static void group_case_nodes PROTO((case_node_ptr));
459static void balance_case_nodes PROTO((case_node_ptr *,
460 case_node_ptr));
461static int node_has_low_bound PROTO((case_node_ptr, tree));
462static int node_has_high_bound PROTO((case_node_ptr, tree));
463static int node_is_bounded PROTO((case_node_ptr, tree));
464static void emit_jump_if_reachable PROTO((rtx));
465static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
57641239
RK
466static int add_case_node PROTO((tree, tree, tree, tree *));
467static struct case_node *case_tree2list PROTO((case_node *, case_node *));
cfc3d13f 468
cfc3d13f
RK
469extern rtx bc_allocate_local ();
470extern rtx bc_allocate_variable_array ();
28d81abb
RK
471\f
472void
473init_stmt ()
474{
475 gcc_obstack_init (&stmt_obstack);
476#if 0
477 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
478#endif
479}
480
481void
482init_stmt_for_function ()
483{
484 /* We are not currently within any block, conditional, loop or case. */
485 block_stack = 0;
0b931590 486 stack_block_stack = 0;
28d81abb
RK
487 loop_stack = 0;
488 case_stack = 0;
489 cond_stack = 0;
490 nesting_stack = 0;
491 nesting_depth = 0;
492
493 block_start_count = 0;
494
495 /* No gotos have been expanded yet. */
496 goto_fixup_chain = 0;
497
498 /* We are not processing a ({...}) grouping. */
499 expr_stmts_for_value = 0;
500 last_expr_type = 0;
501}
502
503void
504save_stmt_status (p)
505 struct function *p;
506{
507 p->block_stack = block_stack;
508 p->stack_block_stack = stack_block_stack;
509 p->cond_stack = cond_stack;
510 p->loop_stack = loop_stack;
511 p->case_stack = case_stack;
512 p->nesting_stack = nesting_stack;
513 p->nesting_depth = nesting_depth;
514 p->block_start_count = block_start_count;
515 p->last_expr_type = last_expr_type;
516 p->last_expr_value = last_expr_value;
517 p->expr_stmts_for_value = expr_stmts_for_value;
518 p->emit_filename = emit_filename;
519 p->emit_lineno = emit_lineno;
520 p->goto_fixup_chain = goto_fixup_chain;
521}
522
523void
524restore_stmt_status (p)
525 struct function *p;
526{
527 block_stack = p->block_stack;
528 stack_block_stack = p->stack_block_stack;
529 cond_stack = p->cond_stack;
530 loop_stack = p->loop_stack;
531 case_stack = p->case_stack;
532 nesting_stack = p->nesting_stack;
533 nesting_depth = p->nesting_depth;
534 block_start_count = p->block_start_count;
535 last_expr_type = p->last_expr_type;
536 last_expr_value = p->last_expr_value;
537 expr_stmts_for_value = p->expr_stmts_for_value;
538 emit_filename = p->emit_filename;
539 emit_lineno = p->emit_lineno;
540 goto_fixup_chain = p->goto_fixup_chain;
541}
542\f
543/* Emit a no-op instruction. */
544
545void
546emit_nop ()
547{
ca695ac9
JB
548 rtx last_insn;
549
550 if (!output_bytecode)
551 {
552 last_insn = get_last_insn ();
553 if (!optimize
554 && (GET_CODE (last_insn) == CODE_LABEL
dfda5a87
RK
555 || (GET_CODE (last_insn) == NOTE
556 && prev_real_insn (last_insn) == 0)))
ca695ac9
JB
557 emit_insn (gen_nop ());
558 }
28d81abb
RK
559}
560\f
561/* Return the rtx-label that corresponds to a LABEL_DECL,
562 creating it if necessary. */
563
564rtx
565label_rtx (label)
566 tree label;
567{
568 if (TREE_CODE (label) != LABEL_DECL)
569 abort ();
570
571 if (DECL_RTL (label))
572 return DECL_RTL (label);
573
574 return DECL_RTL (label) = gen_label_rtx ();
575}
576
577/* Add an unconditional jump to LABEL as the next sequential instruction. */
578
579void
580emit_jump (label)
581 rtx label;
582{
583 do_pending_stack_adjust ();
584 emit_jump_insn (gen_jump (label));
585 emit_barrier ();
586}
587
588/* Emit code to jump to the address
589 specified by the pointer expression EXP. */
590
591void
592expand_computed_goto (exp)
593 tree exp;
594{
ca695ac9
JB
595 if (output_bytecode)
596 {
597 bc_expand_expr (exp);
598 bc_emit_instruction (jumpP);
599 }
600 else
601 {
602 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
603
604#ifdef POINTERS_EXTEND_UNSIGNED
605 x = convert_memory_address (Pmode, x);
606#endif
ffa1a1ce
RK
607
608 emit_queue ();
609 do_pending_stack_adjust ();
ca695ac9
JB
610 emit_indirect_jump (x);
611 }
28d81abb
RK
612}
613\f
614/* Handle goto statements and the labels that they can go to. */
615
616/* Specify the location in the RTL code of a label LABEL,
617 which is a LABEL_DECL tree node.
618
619 This is used for the kind of label that the user can jump to with a
620 goto statement, and for alternatives of a switch or case statement.
621 RTL labels generated for loops and conditionals don't go through here;
622 they are generated directly at the RTL level, by other functions below.
623
624 Note that this has nothing to do with defining label *names*.
625 Languages vary in how they do that and what that even means. */
626
627void
628expand_label (label)
629 tree label;
630{
631 struct label_chain *p;
632
ca695ac9
JB
633 if (output_bytecode)
634 {
635 if (! DECL_RTL (label))
636 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
c53e9440 637 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
ca695ac9
JB
638 error ("multiply defined label");
639 return;
640 }
641
28d81abb
RK
642 do_pending_stack_adjust ();
643 emit_label (label_rtx (label));
644 if (DECL_NAME (label))
645 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
646
647 if (stack_block_stack != 0)
648 {
649 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
650 p->next = stack_block_stack->data.block.label_chain;
651 stack_block_stack->data.block.label_chain = p;
652 p->label = label;
653 }
654}
655
656/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
657 from nested functions. */
658
659void
660declare_nonlocal_label (label)
661 tree label;
662{
663 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
664 LABEL_PRESERVE_P (label_rtx (label)) = 1;
665 if (nonlocal_goto_handler_slot == 0)
666 {
667 nonlocal_goto_handler_slot
668 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
669 emit_stack_save (SAVE_NONLOCAL,
670 &nonlocal_goto_stack_level,
671 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
672 }
673}
674
675/* Generate RTL code for a `goto' statement with target label LABEL.
676 LABEL should be a LABEL_DECL tree node that was or will later be
677 defined with `expand_label'. */
678
679void
680expand_goto (label)
681 tree label;
682{
ca695ac9
JB
683 tree context;
684
685 if (output_bytecode)
686 {
687 expand_goto_internal (label, label_rtx (label), NULL_RTX);
688 return;
689 }
690
28d81abb 691 /* Check for a nonlocal goto to a containing function. */
ca695ac9 692 context = decl_function_context (label);
28d81abb
RK
693 if (context != 0 && context != current_function_decl)
694 {
695 struct function *p = find_function_data (context);
dd132134 696 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
28d81abb 697 rtx temp;
dd132134 698
28d81abb 699 p->has_nonlocal_label = 1;
c1255328 700 current_function_has_nonlocal_goto = 1;
dd132134 701 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
702
703 /* Copy the rtl for the slots so that they won't be shared in
704 case the virtual stack vars register gets instantiated differently
705 in the parent than in the child. */
706
28d81abb
RK
707#if HAVE_nonlocal_goto
708 if (HAVE_nonlocal_goto)
709 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
710 copy_rtx (p->nonlocal_goto_handler_slot),
711 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 712 label_ref));
28d81abb
RK
713 else
714#endif
715 {
59257ff7
RK
716 rtx addr;
717
28d81abb
RK
718 /* Restore frame pointer for containing function.
719 This sets the actual hard register used for the frame pointer
720 to the location of the function's incoming static chain info.
721 The non-local goto handler will then adjust it to contain the
722 proper value and reload the argument pointer, if needed. */
a35ad168 723 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
724
725 /* We have now loaded the frame pointer hardware register with
726 the address of that corresponds to the start of the virtual
727 stack vars. So replace virtual_stack_vars_rtx in all
728 addresses we use with stack_pointer_rtx. */
729
28d81abb
RK
730 /* Get addr of containing function's current nonlocal goto handler,
731 which will do any cleanups and then jump to the label. */
59257ff7
RK
732 addr = copy_rtx (p->nonlocal_goto_handler_slot);
733 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 734 hard_frame_pointer_rtx));
59257ff7 735
28d81abb 736 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
737 addr = p->nonlocal_goto_stack_level;
738 if (addr)
5e116627 739 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
740 virtual_stack_vars_rtx,
741 hard_frame_pointer_rtx);
59257ff7 742
37366632 743 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 744
28d81abb 745 /* Put in the static chain register the nonlocal label address. */
dd132134 746 emit_move_insn (static_chain_rtx, label_ref);
a35ad168 747 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 748 really needed. */
a35ad168 749 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
28d81abb
RK
750 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
751 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
752 emit_indirect_jump (temp);
753 }
754 }
755 else
37366632 756 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
757}
758
759/* Generate RTL code for a `goto' statement with target label BODY.
760 LABEL should be a LABEL_REF.
761 LAST_INSN, if non-0, is the rtx we should consider as the last
762 insn emitted (for the purposes of cleaning up a return). */
763
764static void
765expand_goto_internal (body, label, last_insn)
766 tree body;
767 rtx label;
768 rtx last_insn;
769{
770 struct nesting *block;
771 rtx stack_level = 0;
772
ca695ac9
JB
773 /* NOTICE! If a bytecode instruction other than `jump' is needed,
774 then the caller has to call bc_expand_goto_internal()
775 directly. This is rather an exceptional case, and there aren't
0f41302f 776 that many places where this is necessary. */
ca695ac9
JB
777 if (output_bytecode)
778 {
779 expand_goto_internal (body, label, last_insn);
780 return;
781 }
782
28d81abb
RK
783 if (GET_CODE (label) != CODE_LABEL)
784 abort ();
785
786 /* If label has already been defined, we can tell now
787 whether and how we must alter the stack level. */
788
789 if (PREV_INSN (label) != 0)
790 {
791 /* Find the innermost pending block that contains the label.
792 (Check containment by comparing insn-uids.)
793 Then restore the outermost stack level within that block,
794 and do cleanups of all blocks contained in it. */
795 for (block = block_stack; block; block = block->next)
796 {
797 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
798 break;
799 if (block->data.block.stack_level != 0)
800 stack_level = block->data.block.stack_level;
801 /* Execute the cleanups for blocks we are exiting. */
802 if (block->data.block.cleanups != 0)
803 {
50d1b7a1 804 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
805 do_pending_stack_adjust ();
806 }
807 }
808
809 if (stack_level)
810 {
0f41302f
MS
811 /* Ensure stack adjust isn't done by emit_jump, as this
812 would clobber the stack pointer. This one should be
813 deleted as dead by flow. */
28d81abb
RK
814 clear_pending_stack_adjust ();
815 do_pending_stack_adjust ();
37366632 816 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
817 }
818
819 if (body != 0 && DECL_TOO_LATE (body))
820 error ("jump to `%s' invalidly jumps into binding contour",
821 IDENTIFIER_POINTER (DECL_NAME (body)));
822 }
823 /* Label not yet defined: may need to put this goto
824 on the fixup list. */
825 else if (! expand_fixup (body, label, last_insn))
826 {
827 /* No fixup needed. Record that the label is the target
828 of at least one goto that has no fixup. */
829 if (body != 0)
830 TREE_ADDRESSABLE (body) = 1;
831 }
832
833 emit_jump (label);
834}
835\f
ca695ac9 836/* Generate a jump with OPCODE to the given bytecode LABEL which is
0f41302f 837 found within BODY. */
cfc3d13f 838
ca695ac9
JB
839static void
840bc_expand_goto_internal (opcode, label, body)
841 enum bytecode_opcode opcode;
842 struct bc_label *label;
843 tree body;
844{
845 struct nesting *block;
846 int stack_level = -1;
847
848 /* If the label is defined, adjust the stack as necessary.
849 If it's not defined, we have to push the reference on the
0f41302f 850 fixup list. */
ca695ac9
JB
851
852 if (label->defined)
853 {
854
855 /* Find the innermost pending block that contains the label.
856 (Check containment by comparing bytecode uids.) Then restore the
857 outermost stack level within that block. */
858
859 for (block = block_stack; block; block = block->next)
860 {
c53e9440 861 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
ca695ac9
JB
862 break;
863 if (block->data.block.bc_stack_level)
864 stack_level = block->data.block.bc_stack_level;
865
866 /* Execute the cleanups for blocks we are exiting. */
867 if (block->data.block.cleanups != 0)
868 {
50d1b7a1 869 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
ca695ac9
JB
870 do_pending_stack_adjust ();
871 }
872 }
873
874 /* Restore the stack level. If we need to adjust the stack, we
875 must do so after the jump, since the jump may depend on
876 what's on the stack. Thus, any stack-modifying conditional
877 jumps (these are the only ones that rely on what's on the
0f41302f 878 stack) go into the fixup list. */
ca695ac9
JB
879
880 if (stack_level >= 0
881 && stack_depth != stack_level
882 && opcode != jump)
883
884 bc_expand_fixup (opcode, label, stack_level);
885 else
886 {
887 if (stack_level >= 0)
888 bc_adjust_stack (stack_depth - stack_level);
889
890 if (body && DECL_BIT_FIELD (body))
891 error ("jump to `%s' invalidly jumps into binding contour",
892 IDENTIFIER_POINTER (DECL_NAME (body)));
893
894 /* Emit immediate jump */
895 bc_emit_bytecode (opcode);
896 bc_emit_bytecode_labelref (label);
897
898#ifdef DEBUG_PRINT_CODE
899 fputc ('\n', stderr);
900#endif
901 }
902 }
903 else
904 /* Put goto in the fixup list */
905 bc_expand_fixup (opcode, label, stack_level);
906}
907\f
28d81abb
RK
908/* Generate if necessary a fixup for a goto
909 whose target label in tree structure (if any) is TREE_LABEL
910 and whose target in rtl is RTL_LABEL.
911
912 If LAST_INSN is nonzero, we pretend that the jump appears
913 after insn LAST_INSN instead of at the current point in the insn stream.
914
023b57e6
RS
915 The fixup will be used later to insert insns just before the goto.
916 Those insns will restore the stack level as appropriate for the
917 target label, and will (in the case of C++) also invoke any object
918 destructors which have to be invoked when we exit the scopes which
919 are exited by the goto.
28d81abb
RK
920
921 Value is nonzero if a fixup is made. */
922
923static int
924expand_fixup (tree_label, rtl_label, last_insn)
925 tree tree_label;
926 rtx rtl_label;
927 rtx last_insn;
928{
929 struct nesting *block, *end_block;
930
931 /* See if we can recognize which block the label will be output in.
932 This is possible in some very common cases.
933 If we succeed, set END_BLOCK to that block.
934 Otherwise, set it to 0. */
935
936 if (cond_stack
937 && (rtl_label == cond_stack->data.cond.endif_label
938 || rtl_label == cond_stack->data.cond.next_label))
939 end_block = cond_stack;
940 /* If we are in a loop, recognize certain labels which
941 are likely targets. This reduces the number of fixups
942 we need to create. */
943 else if (loop_stack
944 && (rtl_label == loop_stack->data.loop.start_label
945 || rtl_label == loop_stack->data.loop.end_label
946 || rtl_label == loop_stack->data.loop.continue_label))
947 end_block = loop_stack;
948 else
949 end_block = 0;
950
951 /* Now set END_BLOCK to the binding level to which we will return. */
952
953 if (end_block)
954 {
955 struct nesting *next_block = end_block->all;
956 block = block_stack;
957
958 /* First see if the END_BLOCK is inside the innermost binding level.
959 If so, then no cleanups or stack levels are relevant. */
960 while (next_block && next_block != block)
961 next_block = next_block->all;
962
963 if (next_block)
964 return 0;
965
966 /* Otherwise, set END_BLOCK to the innermost binding level
967 which is outside the relevant control-structure nesting. */
968 next_block = block_stack->next;
969 for (block = block_stack; block != end_block; block = block->all)
970 if (block == next_block)
971 next_block = next_block->next;
972 end_block = next_block;
973 }
974
975 /* Does any containing block have a stack level or cleanups?
976 If not, no fixup is needed, and that is the normal case
977 (the only case, for standard C). */
978 for (block = block_stack; block != end_block; block = block->next)
979 if (block->data.block.stack_level != 0
980 || block->data.block.cleanups != 0)
981 break;
982
983 if (block != end_block)
984 {
985 /* Ok, a fixup is needed. Add a fixup to the list of such. */
986 struct goto_fixup *fixup
987 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
988 /* In case an old stack level is restored, make sure that comes
989 after any pending stack adjust. */
990 /* ?? If the fixup isn't to come at the present position,
991 doing the stack adjust here isn't useful. Doing it with our
992 settings at that location isn't useful either. Let's hope
993 someone does it! */
994 if (last_insn == 0)
995 do_pending_stack_adjust ();
28d81abb
RK
996 fixup->target = tree_label;
997 fixup->target_rtl = rtl_label;
023b57e6
RS
998
999 /* Create a BLOCK node and a corresponding matched set of
1000 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1001 this point. The notes will encapsulate any and all fixup
1002 code which we might later insert at this point in the insn
1003 stream. Also, the BLOCK node will be the parent (i.e. the
1004 `SUPERBLOCK') of any other BLOCK nodes which we might create
1005 later on when we are expanding the fixup code. */
1006
1007 {
1008 register rtx original_before_jump
1009 = last_insn ? last_insn : get_last_insn ();
1010
1011 start_sequence ();
1012 pushlevel (0);
1013 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1014 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1015 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1016 end_sequence ();
1017 emit_insns_after (fixup->before_jump, original_before_jump);
1018 }
1019
28d81abb
RK
1020 fixup->block_start_count = block_start_count;
1021 fixup->stack_level = 0;
1022 fixup->cleanup_list_list
1023 = (((block->data.block.outer_cleanups
1024#if 0
1025 && block->data.block.outer_cleanups != empty_cleanup_list
1026#endif
1027 )
1028 || block->data.block.cleanups)
37366632 1029 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1030 block->data.block.outer_cleanups)
1031 : 0);
1032 fixup->next = goto_fixup_chain;
1033 goto_fixup_chain = fixup;
1034 }
1035
1036 return block != 0;
1037}
1038
ca695ac9
JB
1039
1040/* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1041 Make the fixup restore the stack level to STACK_LEVEL. */
1042
1043static void
1044bc_expand_fixup (opcode, label, stack_level)
1045 enum bytecode_opcode opcode;
1046 struct bc_label *label;
1047 int stack_level;
1048{
1049 struct goto_fixup *fixup
1050 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1051
1052 fixup->label = bc_get_bytecode_label ();
1053 fixup->bc_target = label;
1054 fixup->bc_stack_level = stack_level;
1055 fixup->bc_handled = FALSE;
1056
1057 fixup->next = goto_fixup_chain;
1058 goto_fixup_chain = fixup;
1059
1060 /* Insert a jump to the fixup code */
1061 bc_emit_bytecode (opcode);
1062 bc_emit_bytecode_labelref (fixup->label);
1063
1064#ifdef DEBUG_PRINT_CODE
1065 fputc ('\n', stderr);
1066#endif
1067}
cfc3d13f
RK
1068\f
1069/* Expand any needed fixups in the outputmost binding level of the
1070 function. FIRST_INSN is the first insn in the function. */
ca695ac9 1071
cfc3d13f
RK
1072void
1073expand_fixups (first_insn)
1074 rtx first_insn;
1075{
1076 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1077}
ca695ac9 1078
28d81abb
RK
1079/* When exiting a binding contour, process all pending gotos requiring fixups.
1080 THISBLOCK is the structure that describes the block being exited.
1081 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1082 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1083 FIRST_INSN is the insn that began this contour.
1084
1085 Gotos that jump out of this contour must restore the
1086 stack level and do the cleanups before actually jumping.
1087
1088 DONT_JUMP_IN nonzero means report error there is a jump into this
1089 contour from before the beginning of the contour.
1090 This is also done if STACK_LEVEL is nonzero. */
1091
704f4dca 1092static void
28d81abb
RK
1093fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1094 struct nesting *thisblock;
1095 rtx stack_level;
1096 tree cleanup_list;
1097 rtx first_insn;
1098 int dont_jump_in;
1099{
1100 register struct goto_fixup *f, *prev;
1101
ca695ac9
JB
1102 if (output_bytecode)
1103 {
704f4dca
RK
1104 /* ??? The second arg is the bc stack level, which is not the same
1105 as STACK_LEVEL. I have no idea what should go here, so I'll
1106 just pass 0. */
1107 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
ca695ac9
JB
1108 return;
1109 }
1110
28d81abb
RK
1111 /* F is the fixup we are considering; PREV is the previous one. */
1112 /* We run this loop in two passes so that cleanups of exited blocks
1113 are run first, and blocks that are exited are marked so
1114 afterwards. */
1115
1116 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1117 {
1118 /* Test for a fixup that is inactive because it is already handled. */
1119 if (f->before_jump == 0)
1120 {
1121 /* Delete inactive fixup from the chain, if that is easy to do. */
1122 if (prev != 0)
1123 prev->next = f->next;
1124 }
1125 /* Has this fixup's target label been defined?
1126 If so, we can finalize it. */
1127 else if (PREV_INSN (f->target_rtl) != 0)
1128 {
7629c936 1129 register rtx cleanup_insns;
7629c936 1130
28d81abb
RK
1131 /* Get the first non-label after the label
1132 this goto jumps to. If that's before this scope begins,
1133 we don't have a jump into the scope. */
1134 rtx after_label = f->target_rtl;
1135 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1136 after_label = NEXT_INSN (after_label);
1137
1138 /* If this fixup jumped into this contour from before the beginning
1139 of this contour, report an error. */
1140 /* ??? Bug: this does not detect jumping in through intermediate
1141 blocks that have stack levels or cleanups.
1142 It detects only a problem with the innermost block
1143 around the label. */
1144 if (f->target != 0
1145 && (dont_jump_in || stack_level || cleanup_list)
1146 /* If AFTER_LABEL is 0, it means the jump goes to the end
1147 of the rtl, which means it jumps into this scope. */
1148 && (after_label == 0
1149 || INSN_UID (first_insn) < INSN_UID (after_label))
1150 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1151 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1152 {
1153 error_with_decl (f->target,
1154 "label `%s' used before containing binding contour");
1155 /* Prevent multiple errors for one label. */
33bc3ff5 1156 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1157 }
1158
7629c936
RS
1159 /* We will expand the cleanups into a sequence of their own and
1160 then later on we will attach this new sequence to the insn
1161 stream just ahead of the actual jump insn. */
1162
1163 start_sequence ();
1164
023b57e6
RS
1165 /* Temporarily restore the lexical context where we will
1166 logically be inserting the fixup code. We do this for the
1167 sake of getting the debugging information right. */
1168
7629c936 1169 pushlevel (0);
023b57e6 1170 set_block (f->context);
7629c936
RS
1171
1172 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1173 if (f->cleanup_list_list)
1174 {
1175 tree lists;
1176 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1177 /* Marked elements correspond to blocks that have been closed.
1178 Do their cleanups. */
1179 if (TREE_ADDRESSABLE (lists)
1180 && TREE_VALUE (lists) != 0)
7629c936 1181 {
50d1b7a1 1182 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1183 /* Pop any pushes done in the cleanups,
1184 in case function is about to return. */
1185 do_pending_stack_adjust ();
1186 }
28d81abb
RK
1187 }
1188
1189 /* Restore stack level for the biggest contour that this
1190 jump jumps out of. */
1191 if (f->stack_level)
59257ff7 1192 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1193
1194 /* Finish up the sequence containing the insns which implement the
1195 necessary cleanups, and then attach that whole sequence to the
1196 insn stream just ahead of the actual jump insn. Attaching it
1197 at that point insures that any cleanups which are in fact
1198 implicit C++ object destructions (which must be executed upon
1199 leaving the block) appear (to the debugger) to be taking place
1200 in an area of the generated code where the object(s) being
1201 destructed are still "in scope". */
1202
1203 cleanup_insns = get_insns ();
023b57e6 1204 poplevel (1, 0, 0);
7629c936
RS
1205
1206 end_sequence ();
1207 emit_insns_after (cleanup_insns, f->before_jump);
1208
7629c936 1209
28d81abb
RK
1210 f->before_jump = 0;
1211 }
1212 }
1213
6bc2f582
RK
1214 /* For any still-undefined labels, do the cleanups for this block now.
1215 We must do this now since items in the cleanup list may go out
0f41302f 1216 of scope when the block ends. */
28d81abb
RK
1217 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1218 if (f->before_jump != 0
1219 && PREV_INSN (f->target_rtl) == 0
1220 /* Label has still not appeared. If we are exiting a block with
1221 a stack level to restore, that started before the fixup,
1222 mark this stack level as needing restoration
6bc2f582 1223 when the fixup is later finalized. */
28d81abb 1224 && thisblock != 0
6bc2f582
RK
1225 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1226 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1227 && (thisblock->data.block.block_start_count
1228 <= f->block_start_count))
1229 {
1230 tree lists = f->cleanup_list_list;
6bc2f582
RK
1231 rtx cleanup_insns;
1232
28d81abb
RK
1233 for (; lists; lists = TREE_CHAIN (lists))
1234 /* If the following elt. corresponds to our containing block
1235 then the elt. must be for this block. */
1236 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1237 {
1238 start_sequence ();
1239 pushlevel (0);
1240 set_block (f->context);
1241 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1242 do_pending_stack_adjust ();
6bc2f582
RK
1243 cleanup_insns = get_insns ();
1244 poplevel (1, 0, 0);
1245 end_sequence ();
1246 f->before_jump
1247 = emit_insns_after (cleanup_insns, f->before_jump);
1248
e07ed33f 1249 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1250 }
28d81abb
RK
1251
1252 if (stack_level)
1253 f->stack_level = stack_level;
1254 }
1255}
ca695ac9
JB
1256
1257
1258/* When exiting a binding contour, process all pending gotos requiring fixups.
1259 Note: STACK_DEPTH is not altered.
1260
704f4dca
RK
1261 The arguments are currently not used in the bytecode compiler, but we may
1262 need them one day for languages other than C.
ca695ac9
JB
1263
1264 THISBLOCK is the structure that describes the block being exited.
1265 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1266 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1267 FIRST_INSN is the insn that began this contour.
1268
1269 Gotos that jump out of this contour must restore the
1270 stack level and do the cleanups before actually jumping.
1271
1272 DONT_JUMP_IN nonzero means report error there is a jump into this
1273 contour from before the beginning of the contour.
1274 This is also done if STACK_LEVEL is nonzero. */
1275
1276static void
1277bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1278 struct nesting *thisblock;
1279 int stack_level;
1280 tree cleanup_list;
1281 rtx first_insn;
1282 int dont_jump_in;
1283{
1284 register struct goto_fixup *f, *prev;
1285 int saved_stack_depth;
1286
1287 /* F is the fixup we are considering; PREV is the previous one. */
1288
1289 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1290 {
1291 /* Test for a fixup that is inactive because it is already handled. */
1292 if (f->before_jump == 0)
1293 {
1294 /* Delete inactive fixup from the chain, if that is easy to do. */
1295 if (prev)
1296 prev->next = f->next;
1297 }
1298
1299 /* Emit code to restore the stack and continue */
1300 bc_emit_bytecode_labeldef (f->label);
1301
1302 /* Save stack_depth across call, since bc_adjust_stack () will alter
0f41302f 1303 the perceived stack depth via the instructions generated. */
ca695ac9
JB
1304
1305 if (f->bc_stack_level >= 0)
1306 {
1307 saved_stack_depth = stack_depth;
1308 bc_adjust_stack (stack_depth - f->bc_stack_level);
1309 stack_depth = saved_stack_depth;
1310 }
1311
1312 bc_emit_bytecode (jump);
1313 bc_emit_bytecode_labelref (f->bc_target);
1314
1315#ifdef DEBUG_PRINT_CODE
1316 fputc ('\n', stderr);
1317#endif
1318 }
1319
1320 goto_fixup_chain = NULL;
1321}
28d81abb
RK
1322\f
1323/* Generate RTL for an asm statement (explicit assembler code).
1324 BODY is a STRING_CST node containing the assembler code text,
1325 or an ADDR_EXPR containing a STRING_CST. */
1326
1327void
1328expand_asm (body)
1329 tree body;
1330{
ca695ac9
JB
1331 if (output_bytecode)
1332 {
e3da71ef 1333 error ("`asm' is invalid when generating bytecode");
ca695ac9
JB
1334 return;
1335 }
1336
28d81abb
RK
1337 if (TREE_CODE (body) == ADDR_EXPR)
1338 body = TREE_OPERAND (body, 0);
1339
1340 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1341 TREE_STRING_POINTER (body)));
1342 last_expr_type = 0;
1343}
1344
1345/* Generate RTL for an asm statement with arguments.
1346 STRING is the instruction template.
1347 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1348 Each output or input has an expression in the TREE_VALUE and
1349 a constraint-string in the TREE_PURPOSE.
1350 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1351 that is clobbered by this insn.
1352
1353 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1354 Some elements of OUTPUTS may be replaced with trees representing temporary
1355 values. The caller should copy those temporary values to the originally
1356 specified lvalues.
1357
1358 VOL nonzero means the insn is volatile; don't optimize it. */
1359
1360void
1361expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1362 tree string, outputs, inputs, clobbers;
1363 int vol;
1364 char *filename;
1365 int line;
1366{
1367 rtvec argvec, constraints;
1368 rtx body;
1369 int ninputs = list_length (inputs);
1370 int noutputs = list_length (outputs);
b4ccaa16 1371 int nclobbers;
28d81abb
RK
1372 tree tail;
1373 register int i;
1374 /* Vector of RTX's of evaluated output operands. */
1375 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1376 /* The insn we have emitted. */
1377 rtx insn;
1378
ca695ac9
JB
1379 if (output_bytecode)
1380 {
e3da71ef 1381 error ("`asm' is invalid when generating bytecode");
ca695ac9
JB
1382 return;
1383 }
1384
b4ccaa16
RS
1385 /* Count the number of meaningful clobbered registers, ignoring what
1386 we would ignore later. */
1387 nclobbers = 0;
1388 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1389 {
1390 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1391 i = decode_reg_name (regname);
1392 if (i >= 0 || i == -4)
b4ccaa16 1393 ++nclobbers;
7859e3ac
DE
1394 else if (i == -2)
1395 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1396 }
1397
28d81abb
RK
1398 last_expr_type = 0;
1399
1400 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1401 {
1402 tree val = TREE_VALUE (tail);
b50a024d 1403 tree type = TREE_TYPE (val);
28d81abb
RK
1404 tree val1;
1405 int j;
d09a75ae
RK
1406 int found_equal = 0;
1407 int allows_reg = 0;
28d81abb
RK
1408
1409 /* If there's an erroneous arg, emit no insn. */
1410 if (TREE_TYPE (val) == error_mark_node)
1411 return;
1412
d09a75ae
RK
1413 /* Make sure constraint has `=' and does not have `+'. Also, see
1414 if it allows any register. Be liberal on the latter test, since
1415 the worst that happens if we get it wrong is we issue an error
1416 message. */
28d81abb 1417
d09a75ae
RK
1418 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1419 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1420 {
1421 case '+':
1422 error ("output operand constraint contains `+'");
1423 return;
1424
1425 case '=':
28d81abb 1426 found_equal = 1;
d09a75ae
RK
1427 break;
1428
1429 case '?': case '!': case '*': case '%': case '&':
d09a75ae
RK
1430 case 'V': case 'm': case 'o': case '<': case '>':
1431 case 'E': case 'F': case 'G': case 'H': case 'X':
1432 case 's': case 'i': case 'n':
1433 case 'I': case 'J': case 'K': case 'L': case 'M':
1434 case 'N': case 'O': case 'P': case ',':
1435#ifdef EXTRA_CONSTRAINT
1436 case 'Q': case 'R': case 'S': case 'T': case 'U':
1437#endif
1438 break;
1439
1440 case 'p': case 'g': case 'r':
7b7a33b3
JW
1441 /* Whether or not a numeric constraint allows a register is
1442 decided by the matching constraint, and so there is no need
1443 to do anything special with them. We must handle them in
1444 the default case, so that we don't unnecessarily force
1445 operands to memory. */
1446 case '0': case '1': case '2': case '3': case '4':
d09a75ae
RK
1447 default:
1448 allows_reg = 1;
1449 break;
1450 }
1451
28d81abb
RK
1452 if (! found_equal)
1453 {
1454 error ("output operand constraint lacks `='");
1455 return;
1456 }
1457
d09a75ae
RK
1458 /* If an output operand is not a decl or indirect ref and our constraint
1459 allows a register, make a temporary to act as an intermediate.
1460 Make the asm insn write into that, then our caller will copy it to
1461 the real output operand. Likewise for promoted variables. */
28d81abb 1462
b50a024d
RK
1463 if (TREE_CODE (val) == INDIRECT_REF
1464 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1465 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae
RK
1466 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1467 || ! allows_reg)
1468 {
1469 if (! allows_reg)
1470 mark_addressable (TREE_VALUE (tail));
1471
1472 output_rtx[i]
1473 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1474
1475 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1476 error ("output number %d not directly addressable", i);
1477 }
b50a024d 1478 else
e619bb8d 1479 {
6e81958a 1480 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1481 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1482 }
28d81abb
RK
1483 }
1484
1485 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1486 {
1487 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1488 return;
1489 }
1490
1491 /* Make vectors for the expression-rtx and constraint strings. */
1492
1493 argvec = rtvec_alloc (ninputs);
1494 constraints = rtvec_alloc (ninputs);
1495
1496 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1497 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1498 filename, line);
1499 MEM_VOLATILE_P (body) = vol;
1500
1501 /* Eval the inputs and put them into ARGVEC.
1502 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1503
1504 i = 0;
1505 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1506 {
1507 int j;
65fed0cb 1508 int allows_reg = 0;
28d81abb
RK
1509
1510 /* If there's an erroneous arg, emit no insn,
1511 because the ASM_INPUT would get VOIDmode
1512 and that could cause a crash in reload. */
1513 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1514 return;
1515 if (TREE_PURPOSE (tail) == NULL_TREE)
1516 {
1517 error ("hard register `%s' listed as input operand to `asm'",
1518 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1519 return;
1520 }
1521
1522 /* Make sure constraint has neither `=' nor `+'. */
1523
65fed0cb
RK
1524 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1525 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
28d81abb 1526 {
65fed0cb 1527 case '+': case '=':
28d81abb
RK
1528 error ("input operand constraint contains `%c'",
1529 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1530 return;
65fed0cb
RK
1531
1532 case '?': case '!': case '*': case '%': case '&':
65fed0cb
RK
1533 case 'V': case 'm': case 'o': case '<': case '>':
1534 case 'E': case 'F': case 'G': case 'H': case 'X':
1535 case 's': case 'i': case 'n':
1536 case 'I': case 'J': case 'K': case 'L': case 'M':
1537 case 'N': case 'O': case 'P': case ',':
1538#ifdef EXTRA_CONSTRAINT
1539 case 'Q': case 'R': case 'S': case 'T': case 'U':
1540#endif
1541 break;
1542
1543 case 'p': case 'g': case 'r':
7b7a33b3
JW
1544 /* Whether or not a numeric constraint allows a register is
1545 decided by the matching constraint, and so there is no need
1546 to do anything special with them. We must handle them in
1547 the default case, so that we don't unnecessarily force
1548 operands to memory. */
1549 case '0': case '1': case '2': case '3': case '4':
65fed0cb
RK
1550 default:
1551 allows_reg = 1;
1552 break;
28d81abb
RK
1553 }
1554
65fed0cb
RK
1555 if (! allows_reg)
1556 mark_addressable (TREE_VALUE (tail));
1557
28d81abb 1558 XVECEXP (body, 3, i) /* argvec */
37366632 1559 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
76ebc969
RK
1560 if (CONSTANT_P (XVECEXP (body, 3, i))
1561 && ! general_operand (XVECEXP (body, 3, i),
1562 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
65fed0cb
RK
1563 {
1564 if (allows_reg)
1565 XVECEXP (body, 3, i)
1566 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1567 XVECEXP (body, 3, i));
1568 else
1569 XVECEXP (body, 3, i)
1570 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1571 XVECEXP (body, 3, i));
1572 }
1573
1574 if (! allows_reg
1575 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1576 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1577 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1578 {
1579 tree type = TREE_TYPE (TREE_VALUE (tail));
6e81958a 1580 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1581
65fed0cb
RK
1582 emit_move_insn (memloc, XVECEXP (body, 3, i));
1583 XVECEXP (body, 3, i) = memloc;
1584 }
1585
28d81abb
RK
1586 XVECEXP (body, 4, i) /* constraints */
1587 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1588 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1589 i++;
1590 }
1591
1592 /* Protect all the operands from the queue,
1593 now that they have all been evaluated. */
1594
1595 for (i = 0; i < ninputs; i++)
1596 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1597
1598 for (i = 0; i < noutputs; i++)
1599 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1600
1601 /* Now, for each output, construct an rtx
1602 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1603 ARGVEC CONSTRAINTS))
1604 If there is more than one, put them inside a PARALLEL. */
1605
1606 if (noutputs == 1 && nclobbers == 0)
1607 {
1608 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1609 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1610 }
1611 else if (noutputs == 0 && nclobbers == 0)
1612 {
1613 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1614 insn = emit_insn (body);
1615 }
1616 else
1617 {
1618 rtx obody = body;
1619 int num = noutputs;
1620 if (num == 0) num = 1;
1621 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1622
1623 /* For each output operand, store a SET. */
1624
1625 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1626 {
1627 XVECEXP (body, 0, i)
1628 = gen_rtx (SET, VOIDmode,
1629 output_rtx[i],
1630 gen_rtx (ASM_OPERANDS, VOIDmode,
1631 TREE_STRING_POINTER (string),
1632 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1633 i, argvec, constraints,
1634 filename, line));
1635 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1636 }
1637
1638 /* If there are no outputs (but there are some clobbers)
1639 store the bare ASM_OPERANDS into the PARALLEL. */
1640
1641 if (i == 0)
1642 XVECEXP (body, 0, i++) = obody;
1643
1644 /* Store (clobber REG) for each clobbered register specified. */
1645
b4ccaa16 1646 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1647 {
28d81abb 1648 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1649 int j = decode_reg_name (regname);
28d81abb 1650
b4ac57ab 1651 if (j < 0)
28d81abb 1652 {
c09e6498 1653 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1654 continue;
1655
c09e6498
RS
1656 if (j == -4) /* `memory', don't cache memory across asm */
1657 {
bffc6177
RS
1658 XVECEXP (body, 0, i++)
1659 = gen_rtx (CLOBBER, VOIDmode,
058f58ed 1660 gen_rtx (MEM, BLKmode,
bffc6177 1661 gen_rtx (SCRATCH, VOIDmode, 0)));
c09e6498
RS
1662 continue;
1663 }
1664
7859e3ac 1665 /* Ignore unknown register, error already signalled. */
cc1f5387 1666 continue;
28d81abb
RK
1667 }
1668
1669 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1670 XVECEXP (body, 0, i++)
28d81abb
RK
1671 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1672 }
1673
1674 insn = emit_insn (body);
1675 }
1676
1677 free_temp_slots ();
1678}
1679\f
1680/* Generate RTL to evaluate the expression EXP
1681 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1682
1683void
1684expand_expr_stmt (exp)
1685 tree exp;
1686{
ca695ac9
JB
1687 if (output_bytecode)
1688 {
1689 int org_stack_depth = stack_depth;
1690
1691 bc_expand_expr (exp);
1692
1693 /* Restore stack depth */
1694 if (stack_depth < org_stack_depth)
1695 abort ();
1696
1697 bc_emit_instruction (drop);
1698
1699 last_expr_type = TREE_TYPE (exp);
1700 return;
1701 }
1702
28d81abb
RK
1703 /* If -W, warn about statements with no side effects,
1704 except for an explicit cast to void (e.g. for assert()), and
1705 except inside a ({...}) where they may be useful. */
1706 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1707 {
1708 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1709 && !(TREE_CODE (exp) == CONVERT_EXPR
1710 && TREE_TYPE (exp) == void_type_node))
1711 warning_with_file_and_line (emit_filename, emit_lineno,
1712 "statement with no effect");
1713 else if (warn_unused)
1714 warn_if_unused_value (exp);
1715 }
b6ec8c5f
RK
1716
1717 /* If EXP is of function type and we are expanding statements for
1718 value, convert it to pointer-to-function. */
1719 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1720 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1721
28d81abb
RK
1722 last_expr_type = TREE_TYPE (exp);
1723 if (! flag_syntax_only)
37366632
RK
1724 last_expr_value = expand_expr (exp,
1725 (expr_stmts_for_value
1726 ? NULL_RTX : const0_rtx),
28d81abb
RK
1727 VOIDmode, 0);
1728
1729 /* If all we do is reference a volatile value in memory,
1730 copy it to a register to be sure it is actually touched. */
1731 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1732 && TREE_THIS_VOLATILE (exp))
1733 {
6a5bbbe6
RS
1734 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1735 ;
1736 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1737 copy_to_reg (last_expr_value);
1738 else
ddbe9812
RS
1739 {
1740 rtx lab = gen_label_rtx ();
1741
1742 /* Compare the value with itself to reference it. */
1743 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1744 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1745 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1746 BLKmode, 0,
1747 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1748 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1749 emit_label (lab);
1750 }
28d81abb
RK
1751 }
1752
1753 /* If this expression is part of a ({...}) and is in memory, we may have
1754 to preserve temporaries. */
1755 preserve_temp_slots (last_expr_value);
1756
1757 /* Free any temporaries used to evaluate this expression. Any temporary
1758 used as a result of this expression will already have been preserved
1759 above. */
1760 free_temp_slots ();
1761
1762 emit_queue ();
1763}
1764
1765/* Warn if EXP contains any computations whose results are not used.
1766 Return 1 if a warning is printed; 0 otherwise. */
1767
150a992a 1768int
28d81abb
RK
1769warn_if_unused_value (exp)
1770 tree exp;
1771{
1772 if (TREE_USED (exp))
1773 return 0;
1774
1775 switch (TREE_CODE (exp))
1776 {
1777 case PREINCREMENT_EXPR:
1778 case POSTINCREMENT_EXPR:
1779 case PREDECREMENT_EXPR:
1780 case POSTDECREMENT_EXPR:
1781 case MODIFY_EXPR:
1782 case INIT_EXPR:
1783 case TARGET_EXPR:
1784 case CALL_EXPR:
1785 case METHOD_CALL_EXPR:
1786 case RTL_EXPR:
28d81abb
RK
1787 case WITH_CLEANUP_EXPR:
1788 case EXIT_EXPR:
1789 /* We don't warn about COND_EXPR because it may be a useful
1790 construct if either arm contains a side effect. */
1791 case COND_EXPR:
1792 return 0;
1793
1794 case BIND_EXPR:
1795 /* For a binding, warn if no side effect within it. */
1796 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1797
de73f171
RK
1798 case SAVE_EXPR:
1799 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1800
28d81abb
RK
1801 case TRUTH_ORIF_EXPR:
1802 case TRUTH_ANDIF_EXPR:
1803 /* In && or ||, warn if 2nd operand has no side effect. */
1804 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1805
1806 case COMPOUND_EXPR:
a646a211
JM
1807 if (TREE_NO_UNUSED_WARNING (exp))
1808 return 0;
28d81abb
RK
1809 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1810 return 1;
4d23e509
RS
1811 /* Let people do `(foo (), 0)' without a warning. */
1812 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1813 return 0;
28d81abb
RK
1814 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1815
1816 case NOP_EXPR:
1817 case CONVERT_EXPR:
b4ac57ab 1818 case NON_LVALUE_EXPR:
28d81abb
RK
1819 /* Don't warn about values cast to void. */
1820 if (TREE_TYPE (exp) == void_type_node)
1821 return 0;
1822 /* Don't warn about conversions not explicit in the user's program. */
1823 if (TREE_NO_UNUSED_WARNING (exp))
1824 return 0;
1825 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1826 Don't complain about that. There can be an arbitrary number of
1827 casts before the modify, so we must loop until we find the first
1828 non-cast expression and then test to see if that is a modify. */
1829 {
1830 tree tem = TREE_OPERAND (exp, 0);
1831
1832 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1833 tem = TREE_OPERAND (tem, 0);
1834
de73f171
RK
1835 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1836 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1837 return 0;
1838 }
d1e1adfb 1839 goto warn;
28d81abb 1840
d1e1adfb
JM
1841 case INDIRECT_REF:
1842 /* Don't warn about automatic dereferencing of references, since
1843 the user cannot control it. */
1844 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1845 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1846 /* ... fall through ... */
d1e1adfb 1847
28d81abb 1848 default:
ddbe9812
RS
1849 /* Referencing a volatile value is a side effect, so don't warn. */
1850 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1851 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1852 && TREE_THIS_VOLATILE (exp))
1853 return 0;
d1e1adfb 1854 warn:
28d81abb
RK
1855 warning_with_file_and_line (emit_filename, emit_lineno,
1856 "value computed is not used");
1857 return 1;
1858 }
1859}
1860
1861/* Clear out the memory of the last expression evaluated. */
1862
1863void
1864clear_last_expr ()
1865{
1866 last_expr_type = 0;
1867}
1868
1869/* Begin a statement which will return a value.
1870 Return the RTL_EXPR for this statement expr.
1871 The caller must save that value and pass it to expand_end_stmt_expr. */
1872
1873tree
1874expand_start_stmt_expr ()
1875{
ca695ac9
JB
1876 int momentary;
1877 tree t;
1878
1879 /* When generating bytecode just note down the stack depth */
1880 if (output_bytecode)
1881 return (build_int_2 (stack_depth, 0));
1882
28d81abb
RK
1883 /* Make the RTL_EXPR node temporary, not momentary,
1884 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1885 momentary = suspend_momentary ();
1886 t = make_node (RTL_EXPR);
28d81abb 1887 resume_momentary (momentary);
33c6ab80 1888 do_pending_stack_adjust ();
e922dbad 1889 start_sequence_for_rtl_expr (t);
28d81abb
RK
1890 NO_DEFER_POP;
1891 expr_stmts_for_value++;
1892 return t;
1893}
1894
1895/* Restore the previous state at the end of a statement that returns a value.
1896 Returns a tree node representing the statement's value and the
1897 insns to compute the value.
1898
1899 The nodes of that expression have been freed by now, so we cannot use them.
1900 But we don't want to do that anyway; the expression has already been
1901 evaluated and now we just want to use the value. So generate a RTL_EXPR
1902 with the proper type and RTL value.
1903
1904 If the last substatement was not an expression,
1905 return something with type `void'. */
1906
1907tree
1908expand_end_stmt_expr (t)
1909 tree t;
1910{
ca695ac9
JB
1911 if (output_bytecode)
1912 {
1913 int i;
1914 tree t;
1915
1916
1917 /* At this point, all expressions have been evaluated in order.
1918 However, all expression values have been popped when evaluated,
1919 which means we have to recover the last expression value. This is
1920 the last value removed by means of a `drop' instruction. Instead
1921 of adding code to inhibit dropping the last expression value, it
1922 is here recovered by undoing the `drop'. Since `drop' is
1923 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
0f41302f 1924 [-1]'. */
ca695ac9
JB
1925
1926 bc_adjust_stack (-1);
1927
1928 if (!last_expr_type)
1929 last_expr_type = void_type_node;
1930
1931 t = make_node (RTL_EXPR);
1932 TREE_TYPE (t) = last_expr_type;
1933 RTL_EXPR_RTL (t) = NULL;
1934 RTL_EXPR_SEQUENCE (t) = NULL;
1935
1936 /* Don't consider deleting this expr or containing exprs at tree level. */
1937 TREE_THIS_VOLATILE (t) = 1;
1938
1939 last_expr_type = 0;
1940 return t;
1941 }
1942
28d81abb
RK
1943 OK_DEFER_POP;
1944
1945 if (last_expr_type == 0)
1946 {
1947 last_expr_type = void_type_node;
1948 last_expr_value = const0_rtx;
1949 }
1950 else if (last_expr_value == 0)
1951 /* There are some cases where this can happen, such as when the
1952 statement is void type. */
1953 last_expr_value = const0_rtx;
1954 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1955 /* Remove any possible QUEUED. */
1956 last_expr_value = protect_from_queue (last_expr_value, 0);
1957
1958 emit_queue ();
1959
1960 TREE_TYPE (t) = last_expr_type;
1961 RTL_EXPR_RTL (t) = last_expr_value;
1962 RTL_EXPR_SEQUENCE (t) = get_insns ();
1963
1964 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1965
1966 end_sequence ();
1967
1968 /* Don't consider deleting this expr or containing exprs at tree level. */
1969 TREE_SIDE_EFFECTS (t) = 1;
1970 /* Propagate volatility of the actual RTL expr. */
1971 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1972
1973 last_expr_type = 0;
1974 expr_stmts_for_value--;
1975
1976 return t;
1977}
1978\f
28d81abb
RK
1979/* Generate RTL for the start of an if-then. COND is the expression
1980 whose truth should be tested.
1981
1982 If EXITFLAG is nonzero, this conditional is visible to
1983 `exit_something'. */
1984
1985void
1986expand_start_cond (cond, exitflag)
1987 tree cond;
1988 int exitflag;
1989{
1990 struct nesting *thiscond = ALLOC_NESTING ();
1991
1992 /* Make an entry on cond_stack for the cond we are entering. */
1993
1994 thiscond->next = cond_stack;
1995 thiscond->all = nesting_stack;
1996 thiscond->depth = ++nesting_depth;
1997 thiscond->data.cond.next_label = gen_label_rtx ();
1998 /* Before we encounter an `else', we don't need a separate exit label
1999 unless there are supposed to be exit statements
2000 to exit this conditional. */
2001 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2002 thiscond->data.cond.endif_label = thiscond->exit_label;
2003 cond_stack = thiscond;
2004 nesting_stack = thiscond;
2005
ca695ac9
JB
2006 if (output_bytecode)
2007 bc_expand_start_cond (cond, exitflag);
2008 else
2009 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2010}
2011
2012/* Generate RTL between then-clause and the elseif-clause
2013 of an if-then-elseif-.... */
2014
2015void
2016expand_start_elseif (cond)
2017 tree cond;
2018{
2019 if (cond_stack->data.cond.endif_label == 0)
2020 cond_stack->data.cond.endif_label = gen_label_rtx ();
2021 emit_jump (cond_stack->data.cond.endif_label);
2022 emit_label (cond_stack->data.cond.next_label);
2023 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2024 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2025}
2026
2027/* Generate RTL between the then-clause and the else-clause
2028 of an if-then-else. */
2029
2030void
2031expand_start_else ()
2032{
2033 if (cond_stack->data.cond.endif_label == 0)
2034 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9
JB
2035
2036 if (output_bytecode)
2037 {
2038 bc_expand_start_else ();
2039 return;
2040 }
2041
28d81abb
RK
2042 emit_jump (cond_stack->data.cond.endif_label);
2043 emit_label (cond_stack->data.cond.next_label);
0f41302f 2044 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2045}
2046
d947ba59
RK
2047/* After calling expand_start_else, turn this "else" into an "else if"
2048 by providing another condition. */
2049
2050void
2051expand_elseif (cond)
2052 tree cond;
2053{
2054 cond_stack->data.cond.next_label = gen_label_rtx ();
2055 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2056}
2057
28d81abb
RK
2058/* Generate RTL for the end of an if-then.
2059 Pop the record for it off of cond_stack. */
2060
2061void
2062expand_end_cond ()
2063{
2064 struct nesting *thiscond = cond_stack;
2065
ca695ac9
JB
2066 if (output_bytecode)
2067 bc_expand_end_cond ();
2068 else
2069 {
2070 do_pending_stack_adjust ();
2071 if (thiscond->data.cond.next_label)
2072 emit_label (thiscond->data.cond.next_label);
2073 if (thiscond->data.cond.endif_label)
2074 emit_label (thiscond->data.cond.endif_label);
2075 }
28d81abb
RK
2076
2077 POPSTACK (cond_stack);
2078 last_expr_type = 0;
2079}
ca695ac9
JB
2080
2081
2082/* Generate code for the start of an if-then. COND is the expression
2083 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2084 is to be visible to exit_something. It is assumed that the caller
0f41302f 2085 has pushed the previous context on the cond stack. */
704f4dca
RK
2086
2087static void
ca695ac9
JB
2088bc_expand_start_cond (cond, exitflag)
2089 tree cond;
2090 int exitflag;
2091{
2092 struct nesting *thiscond = cond_stack;
2093
2094 thiscond->data.case_stmt.nominal_type = cond;
8e2b13c3
RK
2095 if (! exitflag)
2096 thiscond->exit_label = gen_label_rtx ();
ca695ac9 2097 bc_expand_expr (cond);
c3a2235b 2098 bc_emit_bytecode (xjumpifnot);
c53e9440 2099 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2100
2101#ifdef DEBUG_PRINT_CODE
2102 fputc ('\n', stderr);
2103#endif
2104}
2105
2106/* Generate the label for the end of an if with
2107 no else- clause. */
704f4dca
RK
2108
2109static void
ca695ac9
JB
2110bc_expand_end_cond ()
2111{
2112 struct nesting *thiscond = cond_stack;
2113
c53e9440 2114 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2115}
2116
2117/* Generate code for the start of the else- clause of
2118 an if-then-else. */
704f4dca
RK
2119
2120static void
ca695ac9
JB
2121bc_expand_start_else ()
2122{
2123 struct nesting *thiscond = cond_stack;
2124
2125 thiscond->data.cond.endif_label = thiscond->exit_label;
2126 thiscond->exit_label = gen_label_rtx ();
2127 bc_emit_bytecode (jump);
c53e9440 2128 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2129
2130#ifdef DEBUG_PRINT_CODE
2131 fputc ('\n', stderr);
2132#endif
2133
c53e9440 2134 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
ca695ac9 2135}
28d81abb
RK
2136\f
2137/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2138 loop should be exited by `exit_something'. This is a loop for which
2139 `expand_continue' will jump to the top of the loop.
2140
2141 Make an entry on loop_stack to record the labels associated with
2142 this loop. */
2143
2144struct nesting *
2145expand_start_loop (exit_flag)
2146 int exit_flag;
2147{
2148 register struct nesting *thisloop = ALLOC_NESTING ();
2149
2150 /* Make an entry on loop_stack for the loop we are entering. */
2151
2152 thisloop->next = loop_stack;
2153 thisloop->all = nesting_stack;
2154 thisloop->depth = ++nesting_depth;
2155 thisloop->data.loop.start_label = gen_label_rtx ();
2156 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2157 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2158 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2159 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2160 loop_stack = thisloop;
2161 nesting_stack = thisloop;
2162
ca695ac9
JB
2163 if (output_bytecode)
2164 {
c53e9440 2165 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2166 return thisloop;
2167 }
2168
28d81abb
RK
2169 do_pending_stack_adjust ();
2170 emit_queue ();
37366632 2171 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2172 emit_label (thisloop->data.loop.start_label);
2173
2174 return thisloop;
2175}
2176
2177/* Like expand_start_loop but for a loop where the continuation point
2178 (for expand_continue_loop) will be specified explicitly. */
2179
2180struct nesting *
2181expand_start_loop_continue_elsewhere (exit_flag)
2182 int exit_flag;
2183{
2184 struct nesting *thisloop = expand_start_loop (exit_flag);
2185 loop_stack->data.loop.continue_label = gen_label_rtx ();
2186 return thisloop;
2187}
2188
2189/* Specify the continuation point for a loop started with
2190 expand_start_loop_continue_elsewhere.
2191 Use this at the point in the code to which a continue statement
2192 should jump. */
2193
2194void
2195expand_loop_continue_here ()
2196{
ca695ac9
JB
2197 if (output_bytecode)
2198 {
c53e9440 2199 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
ca695ac9
JB
2200 return;
2201 }
28d81abb 2202 do_pending_stack_adjust ();
37366632 2203 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2204 emit_label (loop_stack->data.loop.continue_label);
2205}
2206
ca695ac9 2207/* End a loop. */
704f4dca 2208
ca695ac9
JB
2209static void
2210bc_expand_end_loop ()
2211{
2212 struct nesting *thisloop = loop_stack;
2213
2214 bc_emit_bytecode (jump);
c53e9440 2215 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2216
2217#ifdef DEBUG_PRINT_CODE
2218 fputc ('\n', stderr);
2219#endif
2220
c53e9440 2221 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
ca695ac9
JB
2222 POPSTACK (loop_stack);
2223 last_expr_type = 0;
2224}
2225
2226
28d81abb
RK
2227/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2228 Pop the block off of loop_stack. */
2229
2230void
2231expand_end_loop ()
2232{
ca695ac9
JB
2233 register rtx insn;
2234 register rtx start_label;
28d81abb
RK
2235 rtx last_test_insn = 0;
2236 int num_insns = 0;
ca695ac9
JB
2237
2238 if (output_bytecode)
2239 {
2240 bc_expand_end_loop ();
2241 return;
2242 }
2243
2244 insn = get_last_insn ();
2245 start_label = loop_stack->data.loop.start_label;
28d81abb
RK
2246
2247 /* Mark the continue-point at the top of the loop if none elsewhere. */
2248 if (start_label == loop_stack->data.loop.continue_label)
2249 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2250
2251 do_pending_stack_adjust ();
2252
2253 /* If optimizing, perhaps reorder the loop. If the loop
2254 starts with a conditional exit, roll that to the end
2255 where it will optimize together with the jump back.
2256
2257 We look for the last conditional branch to the exit that we encounter
2258 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2259 branch to the exit first, use it.
2260
2261 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2262 because moving them is not valid. */
2263
2264 if (optimize
2265 &&
2266 ! (GET_CODE (insn) == JUMP_INSN
2267 && GET_CODE (PATTERN (insn)) == SET
2268 && SET_DEST (PATTERN (insn)) == pc_rtx
2269 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2270 {
2271 /* Scan insns from the top of the loop looking for a qualified
2272 conditional exit. */
2273 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2274 insn = NEXT_INSN (insn))
2275 {
2276 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2277 break;
2278
2279 if (GET_CODE (insn) == NOTE
2280 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2281 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2282 break;
2283
2284 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2285 num_insns++;
2286
2287 if (last_test_insn && num_insns > 30)
2288 break;
2289
2290 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2291 && SET_DEST (PATTERN (insn)) == pc_rtx
2292 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2293 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
8afad312
JW
2294 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2295 == loop_stack->data.loop.end_label)
2296 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2297 == loop_stack->data.loop.alt_end_label)))
28d81abb 2298 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
8afad312
JW
2299 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2300 == loop_stack->data.loop.end_label)
2301 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2302 == loop_stack->data.loop.alt_end_label)))))
28d81abb
RK
2303 last_test_insn = insn;
2304
2305 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2306 && GET_CODE (PATTERN (insn)) == SET
2307 && SET_DEST (PATTERN (insn)) == pc_rtx
2308 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
8afad312
JW
2309 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2310 == loop_stack->data.loop.end_label)
2311 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2312 == loop_stack->data.loop.alt_end_label)))
28d81abb
RK
2313 /* Include BARRIER. */
2314 last_test_insn = NEXT_INSN (insn);
2315 }
2316
2317 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2318 {
2319 /* We found one. Move everything from there up
2320 to the end of the loop, and add a jump into the loop
2321 to jump to there. */
2322 register rtx newstart_label = gen_label_rtx ();
2323 register rtx start_move = start_label;
2324
b4ac57ab 2325 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2326 then we want to move this note also. */
2327 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2328 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2329 == NOTE_INSN_LOOP_CONT))
2330 start_move = PREV_INSN (start_move);
2331
2332 emit_label_after (newstart_label, PREV_INSN (start_move));
2333 reorder_insns (start_move, last_test_insn, get_last_insn ());
2334 emit_jump_insn_after (gen_jump (start_label),
2335 PREV_INSN (newstart_label));
2336 emit_barrier_after (PREV_INSN (newstart_label));
2337 start_label = newstart_label;
2338 }
2339 }
2340
2341 emit_jump (start_label);
37366632 2342 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2343 emit_label (loop_stack->data.loop.end_label);
2344
2345 POPSTACK (loop_stack);
2346
2347 last_expr_type = 0;
2348}
2349
2350/* Generate a jump to the current loop's continue-point.
2351 This is usually the top of the loop, but may be specified
2352 explicitly elsewhere. If not currently inside a loop,
2353 return 0 and do nothing; caller will print an error message. */
2354
2355int
2356expand_continue_loop (whichloop)
2357 struct nesting *whichloop;
2358{
2359 last_expr_type = 0;
2360 if (whichloop == 0)
2361 whichloop = loop_stack;
2362 if (whichloop == 0)
2363 return 0;
37366632
RK
2364 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2365 NULL_RTX);
28d81abb
RK
2366 return 1;
2367}
2368
2369/* Generate a jump to exit the current loop. If not currently inside a loop,
2370 return 0 and do nothing; caller will print an error message. */
2371
2372int
2373expand_exit_loop (whichloop)
2374 struct nesting *whichloop;
2375{
2376 last_expr_type = 0;
2377 if (whichloop == 0)
2378 whichloop = loop_stack;
2379 if (whichloop == 0)
2380 return 0;
37366632 2381 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2382 return 1;
2383}
2384
2385/* Generate a conditional jump to exit the current loop if COND
2386 evaluates to zero. If not currently inside a loop,
2387 return 0 and do nothing; caller will print an error message. */
2388
2389int
2390expand_exit_loop_if_false (whichloop, cond)
2391 struct nesting *whichloop;
2392 tree cond;
2393{
2394 last_expr_type = 0;
2395 if (whichloop == 0)
2396 whichloop = loop_stack;
2397 if (whichloop == 0)
2398 return 0;
ca695ac9
JB
2399 if (output_bytecode)
2400 {
2401 bc_expand_expr (cond);
c3a2235b
RS
2402 bc_expand_goto_internal (xjumpifnot,
2403 BYTECODE_BC_LABEL (whichloop->exit_label),
704f4dca 2404 NULL_TREE);
ca695ac9
JB
2405 }
2406 else
d902c7ea
JW
2407 {
2408 /* In order to handle fixups, we actually create a conditional jump
2409 around a unconditional branch to exit the loop. If fixups are
2410 necessary, they go before the unconditional branch. */
2411
2412 rtx label = gen_label_rtx ();
8afad312
JW
2413 rtx last_insn;
2414
d902c7ea 2415 do_jump (cond, NULL_RTX, label);
8afad312
JW
2416 last_insn = get_last_insn ();
2417 if (GET_CODE (last_insn) == CODE_LABEL)
2418 whichloop->data.loop.alt_end_label = last_insn;
d902c7ea
JW
2419 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2420 NULL_RTX);
2421 emit_label (label);
2422 }
ca695ac9 2423
28d81abb
RK
2424 return 1;
2425}
2426
2427/* Return non-zero if we should preserve sub-expressions as separate
2428 pseudos. We never do so if we aren't optimizing. We always do so
2429 if -fexpensive-optimizations.
2430
2431 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2432 the loop may still be a small one. */
2433
2434int
2435preserve_subexpressions_p ()
2436{
2437 rtx insn;
2438
2439 if (flag_expensive_optimizations)
2440 return 1;
2441
2442 if (optimize == 0 || loop_stack == 0)
2443 return 0;
2444
2445 insn = get_last_insn_anywhere ();
2446
2447 return (insn
2448 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2449 < n_non_fixed_regs * 3));
2450
2451}
2452
2453/* Generate a jump to exit the current loop, conditional, binding contour
2454 or case statement. Not all such constructs are visible to this function,
2455 only those started with EXIT_FLAG nonzero. Individual languages use
2456 the EXIT_FLAG parameter to control which kinds of constructs you can
2457 exit this way.
2458
2459 If not currently inside anything that can be exited,
2460 return 0 and do nothing; caller will print an error message. */
2461
2462int
2463expand_exit_something ()
2464{
2465 struct nesting *n;
2466 last_expr_type = 0;
2467 for (n = nesting_stack; n; n = n->all)
2468 if (n->exit_label != 0)
2469 {
37366632 2470 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2471 return 1;
2472 }
2473
2474 return 0;
2475}
2476\f
2477/* Generate RTL to return from the current function, with no value.
2478 (That is, we do not do anything about returning any value.) */
2479
2480void
2481expand_null_return ()
2482{
2483 struct nesting *block = block_stack;
2484 rtx last_insn = 0;
2485
ca695ac9
JB
2486 if (output_bytecode)
2487 {
2488 bc_emit_instruction (ret);
2489 return;
2490 }
2491
28d81abb
RK
2492 /* Does any pending block have cleanups? */
2493
2494 while (block && block->data.block.cleanups == 0)
2495 block = block->next;
2496
2497 /* If yes, use a goto to return, since that runs cleanups. */
2498
2499 expand_null_return_1 (last_insn, block != 0);
2500}
2501
2502/* Generate RTL to return from the current function, with value VAL. */
2503
8d800403 2504static void
28d81abb
RK
2505expand_value_return (val)
2506 rtx val;
2507{
2508 struct nesting *block = block_stack;
2509 rtx last_insn = get_last_insn ();
2510 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2511
2512 /* Copy the value to the return location
2513 unless it's already there. */
2514
2515 if (return_reg != val)
77636079
RS
2516 {
2517#ifdef PROMOTE_FUNCTION_RETURN
77636079
RS
2518 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2519 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
2520 enum machine_mode mode
2521 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2522 &unsignedp, 1);
77636079
RS
2523
2524 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2525 convert_move (return_reg, val, unsignedp);
77636079
RS
2526 else
2527#endif
2528 emit_move_insn (return_reg, val);
2529 }
28d81abb
RK
2530 if (GET_CODE (return_reg) == REG
2531 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2532 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
e5eeae65
JW
2533 /* Handle calls that return values in multiple non-contiguous locations.
2534 The Irix 6 ABI has examples of this. */
2535 else if (GET_CODE (return_reg) == PARALLEL)
2536 {
2537 int i;
2538
2539 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2540 {
2541 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2542
2543 if (GET_CODE (x) == REG
2544 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2545 emit_insn (gen_rtx (USE, VOIDmode, x));
2546 }
2547 }
28d81abb
RK
2548
2549 /* Does any pending block have cleanups? */
2550
2551 while (block && block->data.block.cleanups == 0)
2552 block = block->next;
2553
2554 /* If yes, use a goto to return, since that runs cleanups.
2555 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2556
2557 expand_null_return_1 (last_insn, block != 0);
2558}
2559
2560/* Output a return with no value. If LAST_INSN is nonzero,
2561 pretend that the return takes place after LAST_INSN.
2562 If USE_GOTO is nonzero then don't use a return instruction;
2563 go to the return label instead. This causes any cleanups
2564 of pending blocks to be executed normally. */
2565
2566static void
2567expand_null_return_1 (last_insn, use_goto)
2568 rtx last_insn;
2569 int use_goto;
2570{
2571 rtx end_label = cleanup_label ? cleanup_label : return_label;
2572
2573 clear_pending_stack_adjust ();
2574 do_pending_stack_adjust ();
2575 last_expr_type = 0;
2576
2577 /* PCC-struct return always uses an epilogue. */
2578 if (current_function_returns_pcc_struct || use_goto)
2579 {
2580 if (end_label == 0)
2581 end_label = return_label = gen_label_rtx ();
37366632 2582 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2583 return;
2584 }
2585
2586 /* Otherwise output a simple return-insn if one is available,
2587 unless it won't do the job. */
2588#ifdef HAVE_return
2589 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2590 {
2591 emit_jump_insn (gen_return ());
2592 emit_barrier ();
2593 return;
2594 }
2595#endif
2596
2597 /* Otherwise jump to the epilogue. */
37366632 2598 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2599}
2600\f
2601/* Generate RTL to evaluate the expression RETVAL and return it
2602 from the current function. */
2603
2604void
2605expand_return (retval)
2606 tree retval;
2607{
2608 /* If there are any cleanups to be performed, then they will
2609 be inserted following LAST_INSN. It is desirable
2610 that the last_insn, for such purposes, should be the
2611 last insn before computing the return value. Otherwise, cleanups
2612 which call functions can clobber the return value. */
2613 /* ??? rms: I think that is erroneous, because in C++ it would
2614 run destructors on variables that might be used in the subsequent
2615 computation of the return value. */
2616 rtx last_insn = 0;
2617 register rtx val = 0;
2618 register rtx op0;
2619 tree retval_rhs;
2620 int cleanups;
2621 struct nesting *block;
2622
ca695ac9 2623 /* Bytecode returns are quite simple, just leave the result on the
0f41302f 2624 arithmetic stack. */
ca695ac9
JB
2625 if (output_bytecode)
2626 {
2627 bc_expand_expr (retval);
2628 bc_emit_instruction (ret);
2629 return;
2630 }
2631
28d81abb
RK
2632 /* If function wants no value, give it none. */
2633 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2634 {
37366632 2635 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2636 emit_queue ();
28d81abb
RK
2637 expand_null_return ();
2638 return;
2639 }
2640
2641 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2642 /* This is not sufficient. We also need to watch for cleanups of the
2643 expression we are about to expand. Unfortunately, we cannot know
2644 if it has cleanups until we expand it, and we want to change how we
2645 expand it depending upon if we need cleanups. We can't win. */
2646#if 0
28d81abb 2647 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2648#else
2649 cleanups = 1;
2650#endif
28d81abb
RK
2651
2652 if (TREE_CODE (retval) == RESULT_DECL)
2653 retval_rhs = retval;
2654 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2655 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2656 retval_rhs = TREE_OPERAND (retval, 1);
2657 else if (TREE_TYPE (retval) == void_type_node)
2658 /* Recognize tail-recursive call to void function. */
2659 retval_rhs = retval;
2660 else
2661 retval_rhs = NULL_TREE;
2662
2663 /* Only use `last_insn' if there are cleanups which must be run. */
2664 if (cleanups || cleanup_label != 0)
2665 last_insn = get_last_insn ();
2666
2667 /* Distribute return down conditional expr if either of the sides
2668 may involve tail recursion (see test below). This enhances the number
2669 of tail recursions we see. Don't do this always since it can produce
2670 sub-optimal code in some cases and we distribute assignments into
2671 conditional expressions when it would help. */
2672
2673 if (optimize && retval_rhs != 0
2674 && frame_offset == 0
2675 && TREE_CODE (retval_rhs) == COND_EXPR
2676 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2677 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2678 {
2679 rtx label = gen_label_rtx ();
a0a34f94
RK
2680 tree expr;
2681
37366632 2682 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
a0a34f94
RK
2683 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2684 DECL_RESULT (current_function_decl),
2685 TREE_OPERAND (retval_rhs, 1));
2686 TREE_SIDE_EFFECTS (expr) = 1;
2687 expand_return (expr);
28d81abb 2688 emit_label (label);
a0a34f94
RK
2689
2690 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2691 DECL_RESULT (current_function_decl),
2692 TREE_OPERAND (retval_rhs, 2));
2693 TREE_SIDE_EFFECTS (expr) = 1;
2694 expand_return (expr);
28d81abb
RK
2695 return;
2696 }
2697
2698 /* For tail-recursive call to current function,
2699 just jump back to the beginning.
2700 It's unsafe if any auto variable in this function
2701 has its address taken; for simplicity,
2702 require stack frame to be empty. */
2703 if (optimize && retval_rhs != 0
2704 && frame_offset == 0
2705 && TREE_CODE (retval_rhs) == CALL_EXPR
2706 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2707 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2708 /* Finish checking validity, and if valid emit code
2709 to set the argument variables for the new call. */
2710 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2711 DECL_ARGUMENTS (current_function_decl)))
2712 {
2713 if (tail_recursion_label == 0)
2714 {
2715 tail_recursion_label = gen_label_rtx ();
2716 emit_label_after (tail_recursion_label,
2717 tail_recursion_reentry);
2718 }
a3229491 2719 emit_queue ();
37366632 2720 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2721 emit_barrier ();
2722 return;
2723 }
2724#ifdef HAVE_return
2725 /* This optimization is safe if there are local cleanups
2726 because expand_null_return takes care of them.
2727 ??? I think it should also be safe when there is a cleanup label,
2728 because expand_null_return takes care of them, too.
2729 Any reason why not? */
2730 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2731 && ! current_function_returns_pcc_struct
2732 && BRANCH_COST <= 1)
28d81abb
RK
2733 {
2734 /* If this is return x == y; then generate
2735 if (x == y) return 1; else return 0;
5eb94e4e
RK
2736 if we can do it with explicit return insns and
2737 branches are cheap. */
28d81abb
RK
2738 if (retval_rhs)
2739 switch (TREE_CODE (retval_rhs))
2740 {
2741 case EQ_EXPR:
2742 case NE_EXPR:
2743 case GT_EXPR:
2744 case GE_EXPR:
2745 case LT_EXPR:
2746 case LE_EXPR:
2747 case TRUTH_ANDIF_EXPR:
2748 case TRUTH_ORIF_EXPR:
2749 case TRUTH_AND_EXPR:
2750 case TRUTH_OR_EXPR:
2751 case TRUTH_NOT_EXPR:
94ed3915 2752 case TRUTH_XOR_EXPR:
28d81abb
RK
2753 op0 = gen_label_rtx ();
2754 jumpifnot (retval_rhs, op0);
2755 expand_value_return (const1_rtx);
2756 emit_label (op0);
2757 expand_value_return (const0_rtx);
2758 return;
2759 }
2760 }
2761#endif /* HAVE_return */
2762
4c485b63
JL
2763 /* If the result is an aggregate that is being returned in one (or more)
2764 registers, load the registers here. The compiler currently can't handle
2765 copying a BLKmode value into registers. We could put this code in a
2766 more general area (for use by everyone instead of just function
2767 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2768 (and in expand_call). The value must go into a pseudo in case there
2769 are cleanups that will clobber the real return register. */
4c485b63
JL
2770
2771 if (retval_rhs != 0
2772 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2773 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2774 {
a7f875d7 2775 int i, bitpos, xbitpos;
4c485b63
JL
2776 int big_endian_correction = 0;
2777 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2778 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
a7f875d7 2779 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
4c485b63 2780 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
a7f875d7 2781 rtx result_reg, src, dst;
4c485b63 2782 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2783 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2784
a7f875d7
RK
2785 /* Structures whose size is not a multiple of a word are aligned
2786 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2787 machine, this means we must skip the empty high order bytes when
2788 calculating the bit offset. */
2789 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2790 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2791 * BITS_PER_UNIT));
2792
2793 /* Copy the structure BITSIZE bits at a time. */
2794 for (bitpos = 0, xbitpos = big_endian_correction;
2795 bitpos < bytes * BITS_PER_UNIT;
2796 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2797 {
a7f875d7 2798 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2799 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2800 (the first time through). */
2801 if (xbitpos % BITS_PER_WORD == 0
2802 || xbitpos == big_endian_correction)
4c485b63 2803 {
a7f875d7
RK
2804 /* Generate an appropriate register. */
2805 dst = gen_reg_rtx (word_mode);
2806 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2807
2808 /* Clobber the destination before we move anything into it. */
2809 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
4c485b63 2810 }
a7f875d7
RK
2811
2812 /* We need a new source operand each time bitpos is on a word
2813 boundary. */
2814 if (bitpos % BITS_PER_WORD == 0)
2815 src = operand_subword_force (result_val,
2816 bitpos / BITS_PER_WORD,
2817 BLKmode);
2818
2819 /* Use bitpos for the source extraction (left justified) and
2820 xbitpos for the destination store (right justified). */
2821 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2822 extract_bit_field (src, bitsize,
2823 bitpos % BITS_PER_WORD, 1,
2824 NULL_RTX, word_mode,
2825 word_mode,
2826 bitsize / BITS_PER_UNIT,
2827 BITS_PER_WORD),
2828 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
2829 }
2830
4c485b63
JL
2831 /* Find the smallest integer mode large enough to hold the
2832 entire structure and use that mode instead of BLKmode
2833 on the USE insn for the return register. */
2834 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2835 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2836 tmpmode != MAX_MACHINE_MODE;
2837 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
2838 {
2839 /* Have we found a large enough mode? */
2840 if (GET_MODE_SIZE (tmpmode) >= bytes)
2841 break;
2842 }
4c485b63
JL
2843
2844 /* No suitable mode found. */
2845 if (tmpmode == MAX_MACHINE_MODE)
3ffeb8f1 2846 abort ();
4c485b63 2847
3ffeb8f1
JW
2848 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2849
af55da56
JW
2850 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2851 result_reg_mode = word_mode;
2852 else
2853 result_reg_mode = tmpmode;
2854 result_reg = gen_reg_rtx (result_reg_mode);
2855
3ffeb8f1
JW
2856 /* Now that the value is in pseudos, copy it to the result reg(s). */
2857 emit_queue ();
2858 free_temp_slots ();
2859 for (i = 0; i < n_regs; i++)
af55da56 2860 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2861 result_pseudos[i]);
4c485b63 2862
af55da56
JW
2863 if (tmpmode != result_reg_mode)
2864 result_reg = gen_lowpart (tmpmode, result_reg);
2865
4c485b63
JL
2866 expand_value_return (result_reg);
2867 }
2868 else if (cleanups
28d81abb
RK
2869 && retval_rhs != 0
2870 && TREE_TYPE (retval_rhs) != void_type_node
2871 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2872 {
2873 /* Calculate the return value into a pseudo reg. */
37366632 2874 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2875 emit_queue ();
2876 /* All temporaries have now been used. */
2877 free_temp_slots ();
2878 /* Return the calculated value, doing cleanups first. */
2879 expand_value_return (val);
2880 }
2881 else
2882 {
2883 /* No cleanups or no hard reg used;
2884 calculate value into hard return reg. */
cba389cd 2885 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb
RK
2886 emit_queue ();
2887 free_temp_slots ();
2888 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2889 }
2890}
2891
2892/* Return 1 if the end of the generated RTX is not a barrier.
2893 This means code already compiled can drop through. */
2894
2895int
2896drop_through_at_end_p ()
2897{
2898 rtx insn = get_last_insn ();
2899 while (insn && GET_CODE (insn) == NOTE)
2900 insn = PREV_INSN (insn);
2901 return insn && GET_CODE (insn) != BARRIER;
2902}
2903\f
2904/* Emit code to alter this function's formal parms for a tail-recursive call.
2905 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2906 FORMALS is the chain of decls of formals.
2907 Return 1 if this can be done;
2908 otherwise return 0 and do not emit any code. */
2909
2910static int
2911tail_recursion_args (actuals, formals)
2912 tree actuals, formals;
2913{
2914 register tree a = actuals, f = formals;
2915 register int i;
2916 register rtx *argvec;
2917
2918 /* Check that number and types of actuals are compatible
2919 with the formals. This is not always true in valid C code.
2920 Also check that no formal needs to be addressable
2921 and that all formals are scalars. */
2922
2923 /* Also count the args. */
2924
2925 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2926 {
5c7fe359
RK
2927 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2928 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
2929 return 0;
2930 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2931 return 0;
2932 }
2933 if (a != 0 || f != 0)
2934 return 0;
2935
2936 /* Compute all the actuals. */
2937
2938 argvec = (rtx *) alloca (i * sizeof (rtx));
2939
2940 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2941 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2942
2943 /* Find which actual values refer to current values of previous formals.
2944 Copy each of them now, before any formal is changed. */
2945
2946 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2947 {
2948 int copy = 0;
2949 register int j;
2950 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2951 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2952 { copy = 1; break; }
2953 if (copy)
2954 argvec[i] = copy_to_reg (argvec[i]);
2955 }
2956
2957 /* Store the values of the actuals into the formals. */
2958
2959 for (f = formals, a = actuals, i = 0; f;
2960 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2961 {
98f3b471 2962 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2963 emit_move_insn (DECL_RTL (f), argvec[i]);
2964 else
2965 convert_move (DECL_RTL (f), argvec[i],
2966 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2967 }
2968
2969 free_temp_slots ();
2970 return 1;
2971}
2972\f
2973/* Generate the RTL code for entering a binding contour.
2974 The variables are declared one by one, by calls to `expand_decl'.
2975
2976 EXIT_FLAG is nonzero if this construct should be visible to
2977 `exit_something'. */
2978
2979void
2980expand_start_bindings (exit_flag)
2981 int exit_flag;
2982{
2983 struct nesting *thisblock = ALLOC_NESTING ();
0575fe3c 2984 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2985
2986 /* Make an entry on block_stack for the block we are entering. */
2987
2988 thisblock->next = block_stack;
2989 thisblock->all = nesting_stack;
2990 thisblock->depth = ++nesting_depth;
2991 thisblock->data.block.stack_level = 0;
2992 thisblock->data.block.cleanups = 0;
2993 thisblock->data.block.function_call_count = 0;
2994#if 0
2995 if (block_stack)
2996 {
2997 if (block_stack->data.block.cleanups == NULL_TREE
2998 && (block_stack->data.block.outer_cleanups == NULL_TREE
2999 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3000 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3001 else
3002 thisblock->data.block.outer_cleanups
3003 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3004 block_stack->data.block.outer_cleanups);
3005 }
3006 else
3007 thisblock->data.block.outer_cleanups = 0;
3008#endif
3009#if 1
3010 if (block_stack
3011 && !(block_stack->data.block.cleanups == NULL_TREE
3012 && block_stack->data.block.outer_cleanups == NULL_TREE))
3013 thisblock->data.block.outer_cleanups
3014 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3015 block_stack->data.block.outer_cleanups);
3016 else
3017 thisblock->data.block.outer_cleanups = 0;
3018#endif
3019 thisblock->data.block.label_chain = 0;
3020 thisblock->data.block.innermost_stack_block = stack_block_stack;
3021 thisblock->data.block.first_insn = note;
3022 thisblock->data.block.block_start_count = ++block_start_count;
3023 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3024 block_stack = thisblock;
3025 nesting_stack = thisblock;
3026
ca695ac9
JB
3027 if (!output_bytecode)
3028 {
3029 /* Make a new level for allocating stack slots. */
3030 push_temp_slots ();
3031 }
28d81abb
RK
3032}
3033
7629c936
RS
3034/* Given a pointer to a BLOCK node, save a pointer to the most recently
3035 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3036 BLOCK node. */
3037
3038void
3039remember_end_note (block)
3040 register tree block;
3041{
3042 BLOCK_END_NOTE (block) = last_block_end_note;
3043 last_block_end_note = NULL_RTX;
3044}
3045
28d81abb
RK
3046/* Generate RTL code to terminate a binding contour.
3047 VARS is the chain of VAR_DECL nodes
3048 for the variables bound in this contour.
3049 MARK_ENDS is nonzero if we should put a note at the beginning
3050 and end of this binding contour.
3051
3052 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3053 (That is true automatically if the contour has a saved stack level.) */
3054
3055void
3056expand_end_bindings (vars, mark_ends, dont_jump_in)
3057 tree vars;
3058 int mark_ends;
3059 int dont_jump_in;
3060{
3061 register struct nesting *thisblock = block_stack;
3062 register tree decl;
3063
ca695ac9
JB
3064 if (output_bytecode)
3065 {
3066 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3067 return;
3068 }
3069
28d81abb
RK
3070 if (warn_unused)
3071 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
3072 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3073 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
3074 warning_with_decl (decl, "unused variable `%s'");
3075
28d81abb
RK
3076 if (thisblock->exit_label)
3077 {
3078 do_pending_stack_adjust ();
3079 emit_label (thisblock->exit_label);
3080 }
3081
3082 /* If necessary, make a handler for nonlocal gotos taking
3083 place in the function calls in this block. */
3084 if (function_call_count != thisblock->data.block.function_call_count
3085 && nonlocal_labels
3086 /* Make handler for outermost block
3087 if there were any nonlocal gotos to this function. */
3088 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3089 /* Make handler for inner block if it has something
3090 special to do when you jump out of it. */
3091 : (thisblock->data.block.cleanups != 0
3092 || thisblock->data.block.stack_level != 0)))
3093 {
3094 tree link;
3095 rtx afterward = gen_label_rtx ();
3096 rtx handler_label = gen_label_rtx ();
3097 rtx save_receiver = gen_reg_rtx (Pmode);
ba83886f 3098 rtx insns;
28d81abb
RK
3099
3100 /* Don't let jump_optimize delete the handler. */
3101 LABEL_PRESERVE_P (handler_label) = 1;
3102
3103 /* Record the handler address in the stack slot for that purpose,
3104 during this block, saving and restoring the outer value. */
3105 if (thisblock->next != 0)
3106 {
3107 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
ba83886f
RS
3108
3109 start_sequence ();
3110 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3111 insns = get_insns ();
3112 end_sequence ();
3113 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb 3114 }
ba83886f
RS
3115
3116 start_sequence ();
3117 emit_move_insn (nonlocal_goto_handler_slot,
3118 gen_rtx (LABEL_REF, Pmode, handler_label));
3119 insns = get_insns ();
3120 end_sequence ();
3121 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb
RK
3122
3123 /* Jump around the handler; it runs only when specially invoked. */
3124 emit_jump (afterward);
3125 emit_label (handler_label);
3126
3127#ifdef HAVE_nonlocal_goto
3128 if (! HAVE_nonlocal_goto)
3129#endif
3130 /* First adjust our frame pointer to its actual value. It was
3131 previously set to the start of the virtual area corresponding to
3132 the stacked variables when we branched here and now needs to be
3133 adjusted to the actual hardware fp value.
3134
3135 Assignments are to virtual registers are converted by
3136 instantiate_virtual_regs into the corresponding assignment
3137 to the underlying register (fp in this case) that makes
3138 the original assignment true.
3139 So the following insn will actually be
3140 decrementing fp by STARTING_FRAME_OFFSET. */
705e524e 3141 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
28d81abb 3142
a35ad168 3143#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
28d81abb
RK
3144 if (fixed_regs[ARG_POINTER_REGNUM])
3145 {
42495ca0
RK
3146#ifdef ELIMINABLE_REGS
3147 /* If the argument pointer can be eliminated in favor of the
3148 frame pointer, we don't need to restore it. We assume here
3149 that if such an elimination is present, it can always be used.
3150 This is the case on all known machines; if we don't make this
3151 assumption, we do unnecessary saving on many machines. */
3152 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3153 int i;
3154
3155 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3156 if (elim_regs[i].from == ARG_POINTER_REGNUM
a35ad168 3157 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
42495ca0
RK
3158 break;
3159
3160 if (i == sizeof elim_regs / sizeof elim_regs [0])
3161#endif
3162 {
3163 /* Now restore our arg pointer from the address at which it
3164 was saved in our stack frame.
3165 If there hasn't be space allocated for it yet, make
3166 some now. */
3167 if (arg_pointer_save_area == 0)
3168 arg_pointer_save_area
3169 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3170 emit_move_insn (virtual_incoming_args_rtx,
3171 /* We need a pseudo here, or else
3172 instantiate_virtual_regs_1 complains. */
3173 copy_to_reg (arg_pointer_save_area));
3174 }
28d81abb
RK
3175 }
3176#endif
3177
3178 /* The handler expects the desired label address in the static chain
3179 register. It tests the address and does an appropriate jump
3180 to whatever label is desired. */
3181 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3182 /* Skip any labels we shouldn't be able to jump to from here. */
3183 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3184 {
3185 rtx not_this = gen_label_rtx ();
3186 rtx this = gen_label_rtx ();
3187 do_jump_if_equal (static_chain_rtx,
3188 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3189 this, 0);
3190 emit_jump (not_this);
3191 emit_label (this);
3192 expand_goto (TREE_VALUE (link));
3193 emit_label (not_this);
3194 }
3195 /* If label is not recognized, abort. */
3196 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3197 VOIDmode, 0);
a3fd7507 3198 emit_barrier ();
28d81abb
RK
3199 emit_label (afterward);
3200 }
3201
3202 /* Don't allow jumping into a block that has cleanups or a stack level. */
3203 if (dont_jump_in
3204 || thisblock->data.block.stack_level != 0
3205 || thisblock->data.block.cleanups != 0)
3206 {
3207 struct label_chain *chain;
3208
3209 /* Any labels in this block are no longer valid to go to.
3210 Mark them to cause an error message. */
3211 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3212 {
3213 DECL_TOO_LATE (chain->label) = 1;
3214 /* If any goto without a fixup came to this label,
3215 that must be an error, because gotos without fixups
3216 come from outside all saved stack-levels and all cleanups. */
3217 if (TREE_ADDRESSABLE (chain->label))
3218 error_with_decl (chain->label,
3219 "label `%s' used before containing binding contour");
3220 }
3221 }
3222
3223 /* Restore stack level in effect before the block
3224 (only if variable-size objects allocated). */
3225 /* Perform any cleanups associated with the block. */
3226
3227 if (thisblock->data.block.stack_level != 0
3228 || thisblock->data.block.cleanups != 0)
3229 {
413ec213 3230 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3231 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3232
50d1b7a1
MS
3233 /* Don't let cleanups affect ({...}) constructs. */
3234 int old_expr_stmts_for_value = expr_stmts_for_value;
3235 rtx old_last_expr_value = last_expr_value;
3236 tree old_last_expr_type = last_expr_type;
3237 expr_stmts_for_value = 0;
28d81abb 3238
50d1b7a1
MS
3239 /* Do the cleanups. */
3240 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3241 if (reachable)
3242 do_pending_stack_adjust ();
28d81abb 3243
50d1b7a1
MS
3244 expr_stmts_for_value = old_expr_stmts_for_value;
3245 last_expr_value = old_last_expr_value;
3246 last_expr_type = old_last_expr_type;
3247
3248 /* Restore the stack level. */
3249
3250 if (reachable && thisblock->data.block.stack_level != 0)
3251 {
3252 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3253 thisblock->data.block.stack_level, NULL_RTX);
3254 if (nonlocal_goto_handler_slot != 0)
3255 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3256 NULL_RTX);
28d81abb
RK
3257 }
3258
3259 /* Any gotos out of this block must also do these things.
59257ff7
RK
3260 Also report any gotos with fixups that came to labels in this
3261 level. */
28d81abb
RK
3262 fixup_gotos (thisblock,
3263 thisblock->data.block.stack_level,
3264 thisblock->data.block.cleanups,
3265 thisblock->data.block.first_insn,
3266 dont_jump_in);
3267 }
3268
c7d2d61d
RS
3269 /* Mark the beginning and end of the scope if requested.
3270 We do this now, after running cleanups on the variables
3271 just going out of scope, so they are in scope for their cleanups. */
3272
3273 if (mark_ends)
7629c936 3274 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3275 else
3276 /* Get rid of the beginning-mark if we don't make an end-mark. */
3277 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3278
28d81abb
RK
3279 /* If doing stupid register allocation, make sure lives of all
3280 register variables declared here extend thru end of scope. */
3281
3282 if (obey_regdecls)
3283 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3284 {
3285 rtx rtl = DECL_RTL (decl);
3286 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3287 use_variable (rtl);
3288 }
3289
3290 /* Restore block_stack level for containing block. */
3291
3292 stack_block_stack = thisblock->data.block.innermost_stack_block;
3293 POPSTACK (block_stack);
3294
3295 /* Pop the stack slot nesting and free any slots at this level. */
3296 pop_temp_slots ();
3297}
ca695ac9
JB
3298
3299
3300/* End a binding contour.
3301 VARS is the chain of VAR_DECL nodes for the variables bound
3302 in this contour. MARK_ENDS is nonzer if we should put a note
3303 at the beginning and end of this binding contour.
3304 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3305 contour. */
3306
704f4dca 3307static void
ca695ac9
JB
3308bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3309 tree vars;
3310 int mark_ends;
3311 int dont_jump_in;
3312{
3313 struct nesting *thisbind = nesting_stack;
3314 tree decl;
3315
3316 if (warn_unused)
3317 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3318 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3319 warning_with_decl (decl, "unused variable `%s'");
3320
8e2b13c3
RK
3321 if (thisbind->exit_label)
3322 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
ca695ac9
JB
3323
3324 /* Pop block/bindings off stack */
ca695ac9
JB
3325 POPSTACK (block_stack);
3326}
28d81abb
RK
3327\f
3328/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3329 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3330
3331void
3332expand_decl (decl)
3333 register tree decl;
3334{
3335 struct nesting *thisblock = block_stack;
ca695ac9
JB
3336 tree type;
3337
3338 if (output_bytecode)
3339 {
3340 bc_expand_decl (decl, 0);
3341 return;
3342 }
3343
3344 type = TREE_TYPE (decl);
28d81abb
RK
3345
3346 /* Only automatic variables need any expansion done.
3347 Static and external variables, and external functions,
3348 will be handled by `assemble_variable' (called from finish_decl).
3349 TYPE_DECL and CONST_DECL require nothing.
3350 PARM_DECLs are handled in `assign_parms'. */
3351
3352 if (TREE_CODE (decl) != VAR_DECL)
3353 return;
44fe2e80 3354 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3355 return;
3356
3357 /* Create the RTL representation for the variable. */
3358
3359 if (type == error_mark_node)
3360 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3361 else if (DECL_SIZE (decl) == 0)
3362 /* Variable with incomplete type. */
3363 {
3364 if (DECL_INITIAL (decl) == 0)
3365 /* Error message was already done; now avoid a crash. */
3366 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3367 else
3368 /* An initializer is going to decide the size of this array.
3369 Until we know the size, represent its address with a reg. */
3370 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3668e76e 3371 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
28d81abb
RK
3372 }
3373 else if (DECL_MODE (decl) != BLKmode
3374 /* If -ffloat-store, don't put explicit float vars
3375 into regs. */
3376 && !(flag_float_store
3377 && TREE_CODE (type) == REAL_TYPE)
3378 && ! TREE_THIS_VOLATILE (decl)
3379 && ! TREE_ADDRESSABLE (decl)
44fe2e80 3380 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
3381 {
3382 /* Automatic variable that can go in a register. */
98f3b471 3383 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3384 enum machine_mode reg_mode
3385 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3386
5998c7dc
RS
3387 if (TREE_CODE (type) == COMPLEX_TYPE)
3388 {
3389 rtx realpart, imagpart;
3390 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3391
3392 /* For a complex type variable, make a CONCAT of two pseudos
3393 so that the real and imaginary parts
3394 can be allocated separately. */
3395 realpart = gen_reg_rtx (partmode);
3396 REG_USERVAR_P (realpart) = 1;
3397 imagpart = gen_reg_rtx (partmode);
3398 REG_USERVAR_P (imagpart) = 1;
3399 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3400 }
3401 else
3402 {
3403 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3404 if (TREE_CODE (type) == POINTER_TYPE)
d902f80a
RK
3405 mark_reg_pointer (DECL_RTL (decl),
3406 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3407 / BITS_PER_UNIT));
5998c7dc
RS
3408 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3409 }
28d81abb
RK
3410 }
3411 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3412 {
3413 /* Variable of fixed size that goes on the stack. */
3414 rtx oldaddr = 0;
3415 rtx addr;
3416
3417 /* If we previously made RTL for this decl, it must be an array
3418 whose size was determined by the initializer.
3419 The old address was a register; set that register now
3420 to the proper address. */
3421 if (DECL_RTL (decl) != 0)
3422 {
3423 if (GET_CODE (DECL_RTL (decl)) != MEM
3424 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3425 abort ();
3426 oldaddr = XEXP (DECL_RTL (decl), 0);
3427 }
3428
3429 DECL_RTL (decl)
3430 = assign_stack_temp (DECL_MODE (decl),
3431 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3432 + BITS_PER_UNIT - 1)
3433 / BITS_PER_UNIT),
3434 1);
3668e76e 3435 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3436
3437 /* Set alignment we actually gave this decl. */
3438 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3439 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3440
3441 if (oldaddr)
3442 {
3443 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3444 if (addr != oldaddr)
3445 emit_move_insn (oldaddr, addr);
3446 }
3447
3448 /* If this is a memory ref that contains aggregate components,
3449 mark it as such for cse and loop optimize. */
05e3bdb9 3450 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3451#if 0
3452 /* If this is in memory because of -ffloat-store,
3453 set the volatile bit, to prevent optimizations from
3454 undoing the effects. */
3455 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3456 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3457#endif
3458 }
3459 else
3460 /* Dynamic-size object: must push space on the stack. */
3461 {
3462 rtx address, size;
3463
3464 /* Record the stack pointer on entry to block, if have
3465 not already done so. */
3466 if (thisblock->data.block.stack_level == 0)
3467 {
3468 do_pending_stack_adjust ();
59257ff7
RK
3469 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3470 &thisblock->data.block.stack_level,
3471 thisblock->data.block.first_insn);
28d81abb
RK
3472 stack_block_stack = thisblock;
3473 }
3474
3475 /* Compute the variable's size, in bytes. */
3476 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3477 DECL_SIZE (decl),
3478 size_int (BITS_PER_UNIT)),
37366632 3479 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3480 free_temp_slots ();
3481
ff91ad08
RK
3482 /* Allocate space on the stack for the variable. Note that
3483 DECL_ALIGN says how the variable is to be aligned and we
3484 cannot use it to conclude anything about the alignment of
3485 the size. */
37366632 3486 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3487 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3488
28d81abb
RK
3489 /* Reference the variable indirect through that rtx. */
3490 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3491
2207e295
RS
3492 /* If this is a memory ref that contains aggregate components,
3493 mark it as such for cse and loop optimize. */
05e3bdb9 3494 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
2207e295 3495
28d81abb
RK
3496 /* Indicate the alignment we actually gave this variable. */
3497#ifdef STACK_BOUNDARY
3498 DECL_ALIGN (decl) = STACK_BOUNDARY;
3499#else
3500 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3501#endif
3502 }
3503
3504 if (TREE_THIS_VOLATILE (decl))
3505 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3506#if 0 /* A variable is not necessarily unchanging
3507 just because it is const. RTX_UNCHANGING_P
3508 means no change in the function,
3509 not merely no change in the variable's scope.
3510 It is correct to set RTX_UNCHANGING_P if the variable's scope
3511 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3512 if (TREE_READONLY (decl))
3513 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3514#endif
28d81abb
RK
3515
3516 /* If doing stupid register allocation, make sure life of any
3517 register variable starts here, at the start of its scope. */
3518
3519 if (obey_regdecls)
3520 use_variable (DECL_RTL (decl));
3521}
ca695ac9
JB
3522
3523
3524/* Generate code for the automatic variable declaration DECL. For
3525 most variables this just means we give it a stack offset. The
3526 compiler sometimes emits cleanups without variables and we will
3527 have to deal with those too. */
3528
704f4dca 3529static void
ca695ac9
JB
3530bc_expand_decl (decl, cleanup)
3531 tree decl;
3532 tree cleanup;
3533{
3534 tree type;
3535
3536 if (!decl)
3537 {
3538 /* A cleanup with no variable. */
3539 if (!cleanup)
3540 abort ();
3541
3542 return;
3543 }
3544
3545 /* Only auto variables need any work. */
3546 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3547 return;
3548
3549 type = TREE_TYPE (decl);
3550
3551 if (type == error_mark_node)
3552 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3553
3554 else if (DECL_SIZE (decl) == 0)
3555
3556 /* Variable with incomplete type. The stack offset herein will be
3557 fixed later in expand_decl_init (). */
3558 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3559
3560 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3561 {
3562 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3563 DECL_ALIGN (decl));
3564 }
3565 else
3566 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3567}
28d81abb
RK
3568\f
3569/* Emit code to perform the initialization of a declaration DECL. */
3570
3571void
3572expand_decl_init (decl)
3573 tree decl;
3574{
b4ac57ab
RS
3575 int was_used = TREE_USED (decl);
3576
704f4dca
RK
3577 if (output_bytecode)
3578 {
3579 bc_expand_decl_init (decl);
3580 return;
3581 }
3582
3564e40e
RK
3583 /* If this is a CONST_DECL, we don't have to generate any code, but
3584 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3585 to be set while in the obstack containing the constant. If we don't
3586 do this, we can lose if we have functions nested three deep and the middle
3587 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3588 the innermost function is the first to expand that STRING_CST. */
3589 if (TREE_CODE (decl) == CONST_DECL)
3590 {
3591 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3592 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3593 EXPAND_INITIALIZER);
3594 return;
3595 }
3596
28d81abb
RK
3597 if (TREE_STATIC (decl))
3598 return;
3599
3600 /* Compute and store the initial value now. */
3601
3602 if (DECL_INITIAL (decl) == error_mark_node)
3603 {
3604 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3605 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3606 || code == POINTER_TYPE)
3607 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3608 0, 0);
3609 emit_queue ();
3610 }
3611 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3612 {
3613 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3614 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3615 emit_queue ();
3616 }
3617
b4ac57ab
RS
3618 /* Don't let the initialization count as "using" the variable. */
3619 TREE_USED (decl) = was_used;
3620
28d81abb 3621 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3622 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3623 free_temp_slots ();
3624}
3625
ca695ac9
JB
3626/* Expand initialization for variable-sized types. Allocate array
3627 using newlocalSI and set local variable, which is a pointer to the
0f41302f 3628 storage. */
ca695ac9 3629
704f4dca 3630static void
ca695ac9
JB
3631bc_expand_variable_local_init (decl)
3632 tree decl;
3633{
3634 /* Evaluate size expression and coerce to SI */
3635 bc_expand_expr (DECL_SIZE (decl));
3636
3637 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3638 no coercion is necessary (?) */
3639
3640/* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3641 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3642
3643 /* Emit code to allocate array */
3644 bc_emit_instruction (newlocalSI);
3645
3646 /* Store array pointer in local variable. This is the only instance
3647 where we actually want the address of the pointer to the
3648 variable-size block, rather than the pointer itself. We avoid
3649 using expand_address() since that would cause the pointer to be
3650 pushed rather than its address. Hence the hard-coded reference;
3651 notice also that the variable is always local (no global
0f41302f 3652 variable-size type variables). */
ca695ac9
JB
3653
3654 bc_load_localaddr (DECL_RTL (decl));
3655 bc_emit_instruction (storeP);
3656}
3657
3658
3659/* Emit code to initialize a declaration. */
704f4dca
RK
3660
3661static void
ca695ac9
JB
3662bc_expand_decl_init (decl)
3663 tree decl;
3664{
3665 int org_stack_depth;
3666
3667 /* Statical initializers are handled elsewhere */
3668
3669 if (TREE_STATIC (decl))
3670 return;
3671
3672 /* Memory original stack depth */
3673 org_stack_depth = stack_depth;
3674
3675 /* If the type is variable-size, we first create its space (we ASSUME
3676 it CAN'T be static). We do this regardless of whether there's an
0f41302f 3677 initializer assignment or not. */
ca695ac9
JB
3678
3679 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3680 bc_expand_variable_local_init (decl);
3681
3682 /* Expand initializer assignment */
3683 if (DECL_INITIAL (decl) == error_mark_node)
3684 {
3685 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3686
3687 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3688 || code == POINTER_TYPE)
3689
e81d77b5 3690 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3691 }
3692 else if (DECL_INITIAL (decl))
e81d77b5 3693 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3694
3695 /* Restore stack depth */
3696 if (org_stack_depth > stack_depth)
3697 abort ();
3698
3699 bc_adjust_stack (stack_depth - org_stack_depth);
3700}
3701
3702
28d81abb
RK
3703/* CLEANUP is an expression to be executed at exit from this binding contour;
3704 for example, in C++, it might call the destructor for this variable.
3705
4847c938
MS
3706 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3707 CLEANUP multiple times, and have the correct semantics. This
3708 happens in exception handling, and for non-local gotos.
28d81abb
RK
3709
3710 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3711 that is not associated with any particular variable. */
3712
3713int
3714expand_decl_cleanup (decl, cleanup)
3715 tree decl, cleanup;
3716{
3717 struct nesting *thisblock = block_stack;
3718
3719 /* Error if we are not in any block. */
3720 if (thisblock == 0)
3721 return 0;
3722
3723 /* Record the cleanup if there is one. */
3724
3725 if (cleanup != 0)
3726 {
4847c938
MS
3727 cleanup = unsave_expr (cleanup);
3728
28d81abb
RK
3729 thisblock->data.block.cleanups
3730 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3731 /* If this block has a cleanup, it belongs in stack_block_stack. */
3732 stack_block_stack = thisblock;
61d6b1cc 3733 (*interim_eh_hook) (NULL_TREE);
28d81abb
RK
3734 }
3735 return 1;
3736}
3737\f
3738/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3739 DECL_ELTS is the list of elements that belong to DECL's type.
3740 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3741
3742void
3743expand_anon_union_decl (decl, cleanup, decl_elts)
3744 tree decl, cleanup, decl_elts;
3745{
3746 struct nesting *thisblock = block_stack;
3747 rtx x;
3748
ec5cd386
RK
3749 expand_decl (decl);
3750 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
3751 x = DECL_RTL (decl);
3752
3753 while (decl_elts)
3754 {
3755 tree decl_elt = TREE_VALUE (decl_elts);
3756 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3757 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3758
7b9032dd
JM
3759 /* Propagate the union's alignment to the elements. */
3760 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3761
3762 /* If the element has BLKmode and the union doesn't, the union is
3763 aligned such that the element doesn't need to have BLKmode, so
3764 change the element's mode to the appropriate one for its size. */
3765 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3766 DECL_MODE (decl_elt) = mode
3767 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3768 MODE_INT, 1);
3769
28d81abb
RK
3770 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3771 instead create a new MEM rtx with the proper mode. */
3772 if (GET_CODE (x) == MEM)
3773 {
3774 if (mode == GET_MODE (x))
3775 DECL_RTL (decl_elt) = x;
3776 else
3777 {
3778 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3779 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3780 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3781 }
3782 }
3783 else if (GET_CODE (x) == REG)
3784 {
3785 if (mode == GET_MODE (x))
3786 DECL_RTL (decl_elt) = x;
3787 else
3788 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3789 }
3790 else
3791 abort ();
3792
3793 /* Record the cleanup if there is one. */
3794
3795 if (cleanup != 0)
3796 thisblock->data.block.cleanups
3797 = temp_tree_cons (decl_elt, cleanup_elt,
3798 thisblock->data.block.cleanups);
3799
3800 decl_elts = TREE_CHAIN (decl_elts);
3801 }
3802}
3803\f
3804/* Expand a list of cleanups LIST.
3805 Elements may be expressions or may be nested lists.
3806
3807 If DONT_DO is nonnull, then any list-element
3808 whose TREE_PURPOSE matches DONT_DO is omitted.
3809 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
3810 a value that is being returned out of the scope.
3811
3812 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
3813 goto and handle protection regions specially in that case.
3814
3815 If REACHABLE, we emit code, otherwise just inform the exception handling
3816 code about this finalization. */
28d81abb
RK
3817
3818static void
50d1b7a1 3819expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
3820 tree list;
3821 tree dont_do;
4e44807b 3822 int in_fixup;
50d1b7a1 3823 int reachable;
28d81abb
RK
3824{
3825 tree tail;
3826 for (tail = list; tail; tail = TREE_CHAIN (tail))
3827 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3828 {
3829 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 3830 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
3831 else
3832 {
4e44807b
MS
3833 if (! in_fixup)
3834 (*interim_eh_hook) (TREE_VALUE (tail));
61d6b1cc 3835
50d1b7a1
MS
3836 if (reachable)
3837 {
3838 /* Cleanups may be run multiple times. For example,
3839 when exiting a binding contour, we expand the
3840 cleanups associated with that contour. When a goto
3841 within that binding contour has a target outside that
3842 contour, it will expand all cleanups from its scope to
3843 the target. Though the cleanups are expanded multiple
3844 times, the control paths are non-overlapping so the
3845 cleanups will not be executed twice. */
3846 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3847 free_temp_slots ();
3848 }
28d81abb
RK
3849 }
3850 }
3851}
3852
28d81abb
RK
3853/* Move all cleanups from the current block_stack
3854 to the containing block_stack, where they are assumed to
3855 have been created. If anything can cause a temporary to
3856 be created, but not expanded for more than one level of
3857 block_stacks, then this code will have to change. */
3858
3859void
3860move_cleanups_up ()
3861{
3862 struct nesting *block = block_stack;
3863 struct nesting *outer = block->next;
3864
3865 outer->data.block.cleanups
3866 = chainon (block->data.block.cleanups,
3867 outer->data.block.cleanups);
3868 block->data.block.cleanups = 0;
3869}
3870
3871tree
3872last_cleanup_this_contour ()
3873{
3874 if (block_stack == 0)
3875 return 0;
3876
3877 return block_stack->data.block.cleanups;
3878}
3879
3880/* Return 1 if there are any pending cleanups at this point.
3881 If THIS_CONTOUR is nonzero, check the current contour as well.
3882 Otherwise, look only at the contours that enclose this one. */
3883
3884int
3885any_pending_cleanups (this_contour)
3886 int this_contour;
3887{
3888 struct nesting *block;
3889
3890 if (block_stack == 0)
3891 return 0;
3892
3893 if (this_contour && block_stack->data.block.cleanups != NULL)
3894 return 1;
3895 if (block_stack->data.block.cleanups == 0
3896 && (block_stack->data.block.outer_cleanups == 0
3897#if 0
3898 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3899#endif
3900 ))
3901 return 0;
3902
3903 for (block = block_stack->next; block; block = block->next)
3904 if (block->data.block.cleanups != 0)
3905 return 1;
3906
3907 return 0;
3908}
3909\f
3910/* Enter a case (Pascal) or switch (C) statement.
3911 Push a block onto case_stack and nesting_stack
3912 to accumulate the case-labels that are seen
3913 and to record the labels generated for the statement.
3914
3915 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3916 Otherwise, this construct is transparent for `exit_something'.
3917
3918 EXPR is the index-expression to be dispatched on.
3919 TYPE is its nominal type. We could simply convert EXPR to this type,
3920 but instead we take short cuts. */
3921
3922void
3923expand_start_case (exit_flag, expr, type, printname)
3924 int exit_flag;
3925 tree expr;
3926 tree type;
3927 char *printname;
3928{
3929 register struct nesting *thiscase = ALLOC_NESTING ();
3930
3931 /* Make an entry on case_stack for the case we are entering. */
3932
3933 thiscase->next = case_stack;
3934 thiscase->all = nesting_stack;
3935 thiscase->depth = ++nesting_depth;
3936 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3937 thiscase->data.case_stmt.case_list = 0;
3938 thiscase->data.case_stmt.index_expr = expr;
3939 thiscase->data.case_stmt.nominal_type = type;
3940 thiscase->data.case_stmt.default_label = 0;
3941 thiscase->data.case_stmt.num_ranges = 0;
3942 thiscase->data.case_stmt.printname = printname;
3943 thiscase->data.case_stmt.seenlabel = 0;
3944 case_stack = thiscase;
3945 nesting_stack = thiscase;
3946
ca695ac9
JB
3947 if (output_bytecode)
3948 {
3949 bc_expand_start_case (thiscase, expr, type, printname);
3950 return;
3951 }
3952
28d81abb
RK
3953 do_pending_stack_adjust ();
3954
3955 /* Make sure case_stmt.start points to something that won't
3956 need any transformation before expand_end_case. */
3957 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3958 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3959
3960 thiscase->data.case_stmt.start = get_last_insn ();
3961}
3962
ca695ac9
JB
3963
3964/* Enter a case statement. It is assumed that the caller has pushed
0f41302f 3965 the current context onto the case stack. */
704f4dca
RK
3966
3967static void
ca695ac9
JB
3968bc_expand_start_case (thiscase, expr, type, printname)
3969 struct nesting *thiscase;
3970 tree expr;
3971 tree type;
3972 char *printname;
3973{
3974 bc_expand_expr (expr);
3975 bc_expand_conversion (TREE_TYPE (expr), type);
3976
3977 /* For cases, the skip is a place we jump to that's emitted after
3978 the size of the jump table is known. */
3979
3980 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3981 bc_emit_bytecode (jump);
c53e9440 3982 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
3983
3984#ifdef DEBUG_PRINT_CODE
3985 fputc ('\n', stderr);
3986#endif
3987}
3988
3989
28d81abb
RK
3990/* Start a "dummy case statement" within which case labels are invalid
3991 and are not connected to any larger real case statement.
3992 This can be used if you don't want to let a case statement jump
3993 into the middle of certain kinds of constructs. */
3994
3995void
3996expand_start_case_dummy ()
3997{
3998 register struct nesting *thiscase = ALLOC_NESTING ();
3999
4000 /* Make an entry on case_stack for the dummy. */
4001
4002 thiscase->next = case_stack;
4003 thiscase->all = nesting_stack;
4004 thiscase->depth = ++nesting_depth;
4005 thiscase->exit_label = 0;
4006 thiscase->data.case_stmt.case_list = 0;
4007 thiscase->data.case_stmt.start = 0;
4008 thiscase->data.case_stmt.nominal_type = 0;
4009 thiscase->data.case_stmt.default_label = 0;
4010 thiscase->data.case_stmt.num_ranges = 0;
4011 case_stack = thiscase;
4012 nesting_stack = thiscase;
4013}
4014
4015/* End a dummy case statement. */
4016
4017void
4018expand_end_case_dummy ()
4019{
4020 POPSTACK (case_stack);
4021}
4022
4023/* Return the data type of the index-expression
4024 of the innermost case statement, or null if none. */
4025
4026tree
4027case_index_expr_type ()
4028{
4029 if (case_stack)
4030 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4031 return 0;
4032}
4033\f
4034/* Accumulate one case or default label inside a case or switch statement.
4035 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4036 The function CONVERTER, when applied to arguments T and V,
4037 converts the value V to the type T.
28d81abb
RK
4038
4039 If not currently inside a case or switch statement, return 1 and do
4040 nothing. The caller will print a language-specific error message.
4041 If VALUE is a duplicate or overlaps, return 2 and do nothing
4042 except store the (first) duplicate node in *DUPLICATE.
4043 If VALUE is out of range, return 3 and do nothing.
4044 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4045 Return 0 on success.
4046
4047 Extended to handle range statements. */
4048
4049int
f52fba84 4050pushcase (value, converter, label, duplicate)
28d81abb 4051 register tree value;
f52fba84 4052 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4053 register tree label;
4054 tree *duplicate;
4055{
4056 register struct case_node **l;
4057 register struct case_node *n;
4058 tree index_type;
4059 tree nominal_type;
4060
ca695ac9
JB
4061 if (output_bytecode)
4062 return bc_pushcase (value, label);
4063
28d81abb
RK
4064 /* Fail if not inside a real case statement. */
4065 if (! (case_stack && case_stack->data.case_stmt.start))
4066 return 1;
4067
4068 if (stack_block_stack
4069 && stack_block_stack->depth > case_stack->depth)
4070 return 5;
4071
4072 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4073 nominal_type = case_stack->data.case_stmt.nominal_type;
4074
4075 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4076 if (index_type == error_mark_node)
4077 return 0;
4078
4079 /* Convert VALUE to the type in which the comparisons are nominally done. */
4080 if (value != 0)
f52fba84 4081 value = (*converter) (nominal_type, value);
28d81abb
RK
4082
4083 /* If this is the first label, warn if any insns have been emitted. */
4084 if (case_stack->data.case_stmt.seenlabel == 0)
4085 {
4086 rtx insn;
4087 for (insn = case_stack->data.case_stmt.start;
4088 insn;
4089 insn = NEXT_INSN (insn))
4090 {
4091 if (GET_CODE (insn) == CODE_LABEL)
4092 break;
4093 if (GET_CODE (insn) != NOTE
4094 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4095 {
4096 warning ("unreachable code at beginning of %s",
4097 case_stack->data.case_stmt.printname);
4098 break;
4099 }
4100 }
4101 }
4102 case_stack->data.case_stmt.seenlabel = 1;
4103
4104 /* Fail if this value is out of range for the actual type of the index
4105 (which may be narrower than NOMINAL_TYPE). */
4106 if (value != 0 && ! int_fits_type_p (value, index_type))
4107 return 3;
4108
4109 /* Fail if this is a duplicate or overlaps another entry. */
4110 if (value == 0)
4111 {
4112 if (case_stack->data.case_stmt.default_label != 0)
4113 {
4114 *duplicate = case_stack->data.case_stmt.default_label;
4115 return 2;
4116 }
4117 case_stack->data.case_stmt.default_label = label;
4118 }
4119 else
57641239 4120 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4121
4122 expand_label (label);
4123 return 0;
4124}
4125
4126/* Like pushcase but this case applies to all values
4127 between VALUE1 and VALUE2 (inclusive).
4128 The return value is the same as that of pushcase
4129 but there is one additional error code:
4130 4 means the specified range was empty. */
4131
4132int
f52fba84 4133pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4134 register tree value1, value2;
f52fba84 4135 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4136 register tree label;
4137 tree *duplicate;
4138{
4139 register struct case_node **l;
4140 register struct case_node *n;
4141 tree index_type;
4142 tree nominal_type;
4143
4144 /* Fail if not inside a real case statement. */
4145 if (! (case_stack && case_stack->data.case_stmt.start))
4146 return 1;
4147
4148 if (stack_block_stack
4149 && stack_block_stack->depth > case_stack->depth)
4150 return 5;
4151
4152 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4153 nominal_type = case_stack->data.case_stmt.nominal_type;
4154
4155 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4156 if (index_type == error_mark_node)
4157 return 0;
4158
4159 /* If this is the first label, warn if any insns have been emitted. */
4160 if (case_stack->data.case_stmt.seenlabel == 0)
4161 {
4162 rtx insn;
4163 for (insn = case_stack->data.case_stmt.start;
4164 insn;
4165 insn = NEXT_INSN (insn))
4166 {
4167 if (GET_CODE (insn) == CODE_LABEL)
4168 break;
4169 if (GET_CODE (insn) != NOTE
4170 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4171 {
4172 warning ("unreachable code at beginning of %s",
4173 case_stack->data.case_stmt.printname);
4174 break;
4175 }
4176 }
4177 }
4178 case_stack->data.case_stmt.seenlabel = 1;
4179
4180 /* Convert VALUEs to type in which the comparisons are nominally done. */
0f41302f 4181 if (value1 == 0) /* Negative infinity. */
28d81abb 4182 value1 = TYPE_MIN_VALUE(index_type);
f52fba84 4183 value1 = (*converter) (nominal_type, value1);
28d81abb 4184
0f41302f 4185 if (value2 == 0) /* Positive infinity. */
28d81abb 4186 value2 = TYPE_MAX_VALUE(index_type);
f52fba84 4187 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4188
4189 /* Fail if these values are out of range. */
4190 if (! int_fits_type_p (value1, index_type))
4191 return 3;
4192
4193 if (! int_fits_type_p (value2, index_type))
4194 return 3;
4195
4196 /* Fail if the range is empty. */
4197 if (tree_int_cst_lt (value2, value1))
4198 return 4;
4199
57641239
RK
4200 return add_case_node (value1, value2, label, duplicate);
4201}
4202
4203/* Do the actual insertion of a case label for pushcase and pushcase_range
4204 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4205 slowdown for large switch statements. */
4206
4207static int
4208add_case_node (low, high, label, duplicate)
4209 tree low, high;
4210 tree label;
4211 tree *duplicate;
4212{
4213 struct case_node *p, **q, *r;
4214
4215 q = &case_stack->data.case_stmt.case_list;
4216 p = *q;
4217
4218 while (r = *q)
28d81abb 4219 {
57641239
RK
4220 p = r;
4221
4222 /* Keep going past elements distinctly greater than HIGH. */
4223 if (tree_int_cst_lt (high, p->low))
4224 q = &p->left;
4225
4226 /* or distinctly less than LOW. */
4227 else if (tree_int_cst_lt (p->high, low))
4228 q = &p->right;
4229
4230 else
28d81abb 4231 {
57641239
RK
4232 /* We have an overlap; this is an error. */
4233 *duplicate = p->code_label;
28d81abb
RK
4234 return 2;
4235 }
4236 }
4237
4238 /* Add this label to the chain, and succeed.
57641239 4239 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4240 obstack and will thus survive till the end of the case statement. */
4241
57641239
RK
4242 r = (struct case_node *) oballoc (sizeof (struct case_node));
4243 r->low = copy_node (low);
28d81abb 4244
57641239
RK
4245 /* If the bounds are equal, turn this into the one-value case. */
4246
4247 if (tree_int_cst_equal (low, high))
4248 r->high = r->low;
4249 else
4250 {
4251 r->high = copy_node (high);
4252 case_stack->data.case_stmt.num_ranges++;
4253 }
4254
4255 r->code_label = label;
28d81abb
RK
4256 expand_label (label);
4257
57641239
RK
4258 *q = r;
4259 r->parent = p;
4260 r->left = 0;
4261 r->right = 0;
4262 r->balance = 0;
4263
4264 while (p)
4265 {
4266 struct case_node *s;
4267
4268 if (r == p->left)
4269 {
4270 int b;
4271
4272 if (! (b = p->balance))
4273 /* Growth propagation from left side. */
4274 p->balance = -1;
4275 else if (b < 0)
4276 {
4277 if (r->balance < 0)
4278 {
4279 /* R-Rotation */
4280 if (p->left = s = r->right)
4281 s->parent = p;
4282
4283 r->right = p;
4284 p->balance = 0;
4285 r->balance = 0;
4286 s = p->parent;
4287 p->parent = r;
4288
4289 if (r->parent = s)
4290 {
4291 if (s->left == p)
4292 s->left = r;
4293 else
4294 s->right = r;
4295 }
4296 else
4297 case_stack->data.case_stmt.case_list = r;
4298 }
4299 else
4300 /* r->balance == +1 */
4301 {
5720c7e7
RK
4302 /* LR-Rotation */
4303
57641239
RK
4304 int b2;
4305 struct case_node *t = r->right;
4306
4307 if (p->left = s = t->right)
4308 s->parent = p;
4309
4310 t->right = p;
4311 if (r->right = s = t->left)
4312 s->parent = r;
4313
4314 t->left = r;
4315 b = t->balance;
4316 b2 = b < 0;
4317 p->balance = b2;
4318 b2 = -b2 - b;
4319 r->balance = b2;
4320 t->balance = 0;
4321 s = p->parent;
4322 p->parent = t;
4323 r->parent = t;
4324
4325 if (t->parent = s)
4326 {
4327 if (s->left == p)
4328 s->left = t;
4329 else
4330 s->right = t;
4331 }
4332 else
4333 case_stack->data.case_stmt.case_list = t;
4334 }
4335 break;
4336 }
4337
4338 else
4339 {
4340 /* p->balance == +1; growth of left side balances the node. */
4341 p->balance = 0;
4342 break;
4343 }
4344 }
4345 else
4346 /* r == p->right */
4347 {
4348 int b;
4349
4350 if (! (b = p->balance))
4351 /* Growth propagation from right side. */
4352 p->balance++;
4353 else if (b > 0)
4354 {
4355 if (r->balance > 0)
4356 {
4357 /* L-Rotation */
4358
4359 if (p->right = s = r->left)
4360 s->parent = p;
4361
4362 r->left = p;
4363 p->balance = 0;
4364 r->balance = 0;
4365 s = p->parent;
4366 p->parent = r;
4367 if (r->parent = s)
4368 {
4369 if (s->left == p)
4370 s->left = r;
4371 else
4372 s->right = r;
4373 }
4374
4375 else
4376 case_stack->data.case_stmt.case_list = r;
4377 }
4378
4379 else
4380 /* r->balance == -1 */
4381 {
4382 /* RL-Rotation */
4383 int b2;
4384 struct case_node *t = r->left;
4385
4386 if (p->right = s = t->left)
4387 s->parent = p;
4388
4389 t->left = p;
4390
4391 if (r->left = s = t->right)
4392 s->parent = r;
4393
4394 t->right = r;
4395 b = t->balance;
4396 b2 = b < 0;
4397 r->balance = b2;
4398 b2 = -b2 - b;
4399 p->balance = b2;
4400 t->balance = 0;
4401 s = p->parent;
4402 p->parent = t;
4403 r->parent = t;
4404
4405 if (t->parent = s)
4406 {
4407 if (s->left == p)
4408 s->left = t;
4409 else
4410 s->right = t;
4411 }
4412
4413 else
4414 case_stack->data.case_stmt.case_list = t;
4415 }
4416 break;
4417 }
4418 else
4419 {
4420 /* p->balance == -1; growth of right side balances the node. */
4421 p->balance = 0;
4422 break;
4423 }
4424 }
4425
4426 r = p;
4427 p = p->parent;
4428 }
28d81abb
RK
4429
4430 return 0;
4431}
ca695ac9 4432
ca695ac9
JB
4433/* Accumulate one case or default label; VALUE is the value of the
4434 case, or nil for a default label. If not currently inside a case,
4435 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4436 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4437 Return 0 on success. This function is a leftover from the earlier
4438 bytecode compiler, which was based on gcc 1.37. It should be
0f41302f 4439 merged into pushcase. */
ca695ac9 4440
704f4dca 4441static int
ca695ac9
JB
4442bc_pushcase (value, label)
4443 tree value;
4444 tree label;
4445{
4446 struct nesting *thiscase = case_stack;
4447 struct case_node *case_label, *new_label;
4448
4449 if (! thiscase)
4450 return 1;
4451
4452 /* Fail if duplicate, overlap, or out of type range. */
4453 if (value)
4454 {
4455 value = convert (thiscase->data.case_stmt.nominal_type, value);
4456 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4457 return 3;
4458
4459 for (case_label = thiscase->data.case_stmt.case_list;
4460 case_label->left; case_label = case_label->left)
4461 if (! tree_int_cst_lt (case_label->left->high, value))
4462 break;
4463
4464 if (case_label != thiscase->data.case_stmt.case_list
4465 && ! tree_int_cst_lt (case_label->high, value)
abf7b40a 4466 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
ca695ac9
JB
4467 return 2;
4468
4469 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4470 new_label->low = new_label->high = copy_node (value);
4471 new_label->code_label = label;
4472 new_label->left = case_label->left;
4473
4474 case_label->left = new_label;
4475 thiscase->data.case_stmt.num_ranges++;
4476 }
4477 else
4478 {
4479 if (thiscase->data.case_stmt.default_label)
4480 return 2;
4481 thiscase->data.case_stmt.default_label = label;
4482 }
4483
4484 expand_label (label);
4485 return 0;
4486}
28d81abb 4487\f
94d6511c
PB
4488/* Returns the number of possible values of TYPE.
4489 Returns -1 if the number is unknown or variable.
4490 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4491 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4492 do not increase monotonically (there may be duplicates);
4493 to 1 if the values increase monotonically, but not always by 1;
4494 otherwise sets it to 0. */
4495
4496HOST_WIDE_INT
4497all_cases_count (type, spareness)
4498 tree type;
4499 int *spareness;
4500{
4501 HOST_WIDE_INT count, count_high = 0;
4502 *spareness = 0;
4503
4504 switch (TREE_CODE (type))
4505 {
4506 tree t;
4507 case BOOLEAN_TYPE:
4508 count = 2;
4509 break;
4510 case CHAR_TYPE:
4511 count = 1 << BITS_PER_UNIT;
4512 break;
4513 default:
4514 case INTEGER_TYPE:
4515 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
c02aebe2 4516 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4517 return -1;
4518 else
4519 {
4520 /* count
4521 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4522 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4523 but with overflow checking. */
94d6511c
PB
4524 tree mint = TYPE_MIN_VALUE (type);
4525 tree maxt = TYPE_MAX_VALUE (type);
4526 HOST_WIDE_INT lo, hi;
4527 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4528 &lo, &hi);
4529 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4530 lo, hi, &lo, &hi);
4531 add_double (lo, hi, 1, 0, &lo, &hi);
4532 if (hi != 0 || lo < 0)
4533 return -2;
4534 count = lo;
4535 }
4536 break;
4537 case ENUMERAL_TYPE:
4538 count = 0;
4539 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4540 {
4541 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4542 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4543 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4544 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4545 *spareness = 1;
4546 count++;
4547 }
4548 if (*spareness == 1)
4549 {
4550 tree prev = TREE_VALUE (TYPE_VALUES (type));
4551 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4552 {
4553 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4554 {
4555 *spareness = 2;
4556 break;
4557 }
4558 prev = TREE_VALUE (t);
4559 }
4560
4561 }
4562 }
4563 return count;
4564}
4565
4566
4567#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4568 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4569 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4570#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4571 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4572 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4573
4574/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4575 with the case values we have seen, assuming the case expression
4576 has the given TYPE.
4577 SPARSENESS is as determined by all_cases_count.
4578
9faa82d8 4579 The time needed is proportional to COUNT, unless
94d6511c
PB
4580 SPARSENESS is 2, in which case quadratic time is needed. */
4581
4582void
4583mark_seen_cases (type, cases_seen, count, sparseness)
4584 tree type;
4585 unsigned char *cases_seen;
4586 long count;
4587 int sparseness;
4588{
4589 long i;
4590
4591 tree next_node_to_try = NULL_TREE;
4592 long next_node_offset = 0;
4593
5720c7e7 4594 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
4595 tree val = make_node (INTEGER_CST);
4596 TREE_TYPE (val) = type;
5720c7e7
RK
4597 if (! root)
4598 ; /* Do nothing */
4599 else if (sparseness == 2)
94d6511c 4600 {
5720c7e7
RK
4601 tree t;
4602 HOST_WIDE_INT xlo;
4603
4604 /* This less efficient loop is only needed to handle
4605 duplicate case values (multiple enum constants
4606 with the same value). */
4607 TREE_TYPE (val) = TREE_TYPE (root->low);
4608 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4609 t = TREE_CHAIN (t), xlo++)
94d6511c 4610 {
5720c7e7
RK
4611 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4612 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4613 n = root;
4614 do
94d6511c 4615 {
5720c7e7
RK
4616 /* Keep going past elements distinctly greater than VAL. */
4617 if (tree_int_cst_lt (val, n->low))
4618 n = n->left;
4619
4620 /* or distinctly less than VAL. */
4621 else if (tree_int_cst_lt (n->high, val))
4622 n = n->right;
4623
4624 else
94d6511c 4625 {
5720c7e7
RK
4626 /* We have found a matching range. */
4627 BITARRAY_SET (cases_seen, xlo);
4628 break;
94d6511c
PB
4629 }
4630 }
5720c7e7
RK
4631 while (n);
4632 }
4633 }
4634 else
4635 {
4636 if (root->left)
4637 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4638 for (n = root; n; n = n->right)
4639 {
4640 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4641 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4642 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 4643 {
5720c7e7
RK
4644 /* Calculate (into xlo) the "offset" of the integer (val).
4645 The element with lowest value has offset 0, the next smallest
4646 element has offset 1, etc. */
4647
4648 HOST_WIDE_INT xlo, xhi;
4649 tree t;
94d6511c
PB
4650 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4651 {
4652 /* The TYPE_VALUES will be in increasing order, so
4653 starting searching where we last ended. */
4654 t = next_node_to_try;
4655 xlo = next_node_offset;
4656 xhi = 0;
4657 for (;;)
4658 {
4659 if (t == NULL_TREE)
4660 {
4661 t = TYPE_VALUES (type);
4662 xlo = 0;
4663 }
4664 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4665 {
4666 next_node_to_try = TREE_CHAIN (t);
4667 next_node_offset = xlo + 1;
4668 break;
4669 }
4670 xlo++;
4671 t = TREE_CHAIN (t);
4672 if (t == next_node_to_try)
5720c7e7
RK
4673 {
4674 xlo = -1;
4675 break;
4676 }
94d6511c
PB
4677 }
4678 }
4679 else
4680 {
4681 t = TYPE_MIN_VALUE (type);
4682 if (t)
4683 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4684 &xlo, &xhi);
4685 else
4686 xlo = xhi = 0;
4687 add_double (xlo, xhi,
4688 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4689 &xlo, &xhi);
4690 }
4691
9dd53f1e 4692 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 4693 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
4694 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4695 1, 0,
4696 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 4697 }
94d6511c
PB
4698 }
4699 }
4700}
4701
28d81abb
RK
4702/* Called when the index of a switch statement is an enumerated type
4703 and there is no default label.
4704
4705 Checks that all enumeration literals are covered by the case
4706 expressions of a switch. Also, warn if there are any extra
4707 switch cases that are *not* elements of the enumerated type.
4708
4709 If all enumeration literals were covered by the case expressions,
4710 turn one of the expressions into the default expression since it should
4711 not be possible to fall through such a switch. */
4712
4713void
4714check_for_full_enumeration_handling (type)
4715 tree type;
4716{
4717 register struct case_node *n;
4718 register struct case_node **l;
4719 register tree chain;
4720 int all_values = 1;
4721
0f41302f 4722 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
4723 int sparseness = 0;
4724
0f41302f 4725 /* The number of possible selector values. */
94d6511c
PB
4726 HOST_WIDE_INT size;
4727
4728 /* For each possible selector value. a one iff it has been matched
0f41302f 4729 by a case value alternative. */
94d6511c
PB
4730 unsigned char *cases_seen;
4731
0f41302f 4732 /* The allocated size of cases_seen, in chars. */
94d6511c
PB
4733 long bytes_needed;
4734 tree t;
4735
ca695ac9
JB
4736 if (output_bytecode)
4737 {
4738 bc_check_for_full_enumeration_handling (type);
4739 return;
4740 }
4741
94d6511c
PB
4742 if (! warn_switch)
4743 return;
4744
4745 size = all_cases_count (type, &sparseness);
4746 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 4747
94d6511c 4748 if (size > 0 && size < 600000
0f41302f 4749 /* We deliberately use malloc here - not xmalloc. */
ad03007a 4750 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
28d81abb 4751 {
94d6511c
PB
4752 long i;
4753 tree v = TYPE_VALUES (type);
4754 bzero (cases_seen, bytes_needed);
28d81abb 4755
94d6511c
PB
4756 /* The time complexity of this code is normally O(N), where
4757 N being the number of members in the enumerated type.
4758 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 4759 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
4760
4761 mark_seen_cases (type, cases_seen, size, sparseness);
4762
4763 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 4764 {
94d6511c 4765 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 4766 warning ("enumeration value `%s' not handled in switch",
94d6511c 4767 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 4768 }
94d6511c
PB
4769
4770 free (cases_seen);
28d81abb
RK
4771 }
4772
4773 /* Now we go the other way around; we warn if there are case
ac2a9454 4774 expressions that don't correspond to enumerators. This can
28d81abb 4775 occur since C and C++ don't enforce type-checking of
0f41302f 4776 assignments to enumeration variables. */
28d81abb 4777
5720c7e7
RK
4778 if (case_stack->data.case_stmt.case_list
4779 && case_stack->data.case_stmt.case_list->left)
4780 case_stack->data.case_stmt.case_list
4781 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
4782 if (warn_switch)
4783 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4784 {
4785 for (chain = TYPE_VALUES (type);
4786 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4787 chain = TREE_CHAIN (chain))
4788 ;
4789
4790 if (!chain)
3b24f55b
RS
4791 {
4792 if (TYPE_NAME (type) == 0)
4793 warning ("case value `%d' not in enumerated type",
4794 TREE_INT_CST_LOW (n->low));
4795 else
4796 warning ("case value `%d' not in enumerated type `%s'",
4797 TREE_INT_CST_LOW (n->low),
4798 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4799 == IDENTIFIER_NODE)
4800 ? TYPE_NAME (type)
4801 : DECL_NAME (TYPE_NAME (type))));
4802 }
1ddde1cd
RS
4803 if (!tree_int_cst_equal (n->low, n->high))
4804 {
4805 for (chain = TYPE_VALUES (type);
4806 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4807 chain = TREE_CHAIN (chain))
4808 ;
4809
4810 if (!chain)
3b24f55b
RS
4811 {
4812 if (TYPE_NAME (type) == 0)
4813 warning ("case value `%d' not in enumerated type",
4814 TREE_INT_CST_LOW (n->high));
4815 else
4816 warning ("case value `%d' not in enumerated type `%s'",
4817 TREE_INT_CST_LOW (n->high),
4818 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4819 == IDENTIFIER_NODE)
4820 ? TYPE_NAME (type)
4821 : DECL_NAME (TYPE_NAME (type))));
4822 }
1ddde1cd 4823 }
28d81abb
RK
4824 }
4825
ae8cb346
RS
4826#if 0
4827 /* ??? This optimization is disabled because it causes valid programs to
4828 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 4829 will have a value that is the same as one of the enumeration literals. */
ae8cb346 4830
28d81abb
RK
4831 /* If all values were found as case labels, make one of them the default
4832 label. Thus, this switch will never fall through. We arbitrarily pick
4833 the last one to make the default since this is likely the most
4834 efficient choice. */
4835
4836 if (all_values)
4837 {
4838 for (l = &case_stack->data.case_stmt.case_list;
4839 (*l)->right != 0;
4840 l = &(*l)->right)
4841 ;
4842
4843 case_stack->data.case_stmt.default_label = (*l)->code_label;
4844 *l = 0;
4845 }
ae8cb346 4846#endif /* 0 */
28d81abb 4847}
ca695ac9
JB
4848
4849
4850/* Check that all enumeration literals are covered by the case
4851 expressions of a switch. Also warn if there are any cases
4852 that are not elements of the enumerated type. */
704f4dca
RK
4853
4854static void
ca695ac9
JB
4855bc_check_for_full_enumeration_handling (type)
4856 tree type;
4857{
4858 struct nesting *thiscase = case_stack;
4859 struct case_node *c;
4860 tree e;
4861
4862 /* Check for enums not handled. */
4863 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4864 {
4865 for (c = thiscase->data.case_stmt.case_list->left;
4866 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4867 c = c->left)
4868 ;
4869 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4870 warning ("enumerated value `%s' not handled in switch",
4871 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4872 }
4873
4874 /* Check for cases not in the enumeration. */
4875 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4876 {
4877 for (e = TYPE_VALUES (type);
4878 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4879 e = TREE_CHAIN (e))
4880 ;
4881 if (! e)
4882 warning ("case value `%d' not in enumerated type `%s'",
4883 TREE_INT_CST_LOW (c->low),
4884 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4885 ? TYPE_NAME (type)
4886 : DECL_NAME (TYPE_NAME (type))));
4887 }
4888}
28d81abb
RK
4889\f
4890/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4891 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4892 Generate the code to test it and jump to the right place. */
4893
4894void
4895expand_end_case (orig_index)
4896 tree orig_index;
4897{
3474db0e 4898 tree minval, maxval, range, orig_minval;
28d81abb
RK
4899 rtx default_label = 0;
4900 register struct case_node *n;
4901 int count;
4902 rtx index;
ca695ac9 4903 rtx table_label;
28d81abb
RK
4904 int ncases;
4905 rtx *labelvec;
4906 register int i;
4907 rtx before_case;
4908 register struct nesting *thiscase = case_stack;
1b0cb6fc 4909 tree index_expr, index_type;
ca695ac9
JB
4910 int unsignedp;
4911
4912 if (output_bytecode)
4913 {
4914 bc_expand_end_case (orig_index);
4915 return;
4916 }
4917
4918 table_label = gen_label_rtx ();
4919 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
4920 index_type = TREE_TYPE (index_expr);
4921 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
4922
4923 do_pending_stack_adjust ();
4924
4925 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4926 if (index_type != error_mark_node)
28d81abb
RK
4927 {
4928 /* If switch expression was an enumerated type, check that all
4929 enumeration literals are covered by the cases.
4930 No sense trying this if there's a default case, however. */
4931
4932 if (!thiscase->data.case_stmt.default_label
4933 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4934 && TREE_CODE (index_expr) != INTEGER_CST)
4935 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4936
4937 /* If this is the first label, warn if any insns have been emitted. */
4938 if (thiscase->data.case_stmt.seenlabel == 0)
4939 {
4940 rtx insn;
4941 for (insn = get_last_insn ();
4942 insn != case_stack->data.case_stmt.start;
4943 insn = PREV_INSN (insn))
4944 if (GET_CODE (insn) != NOTE
4945 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4946 {
4947 warning ("unreachable code at beginning of %s",
4948 case_stack->data.case_stmt.printname);
4949 break;
4950 }
4951 }
4952
4953 /* If we don't have a default-label, create one here,
4954 after the body of the switch. */
4955 if (thiscase->data.case_stmt.default_label == 0)
4956 {
4957 thiscase->data.case_stmt.default_label
4958 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4959 expand_label (thiscase->data.case_stmt.default_label);
4960 }
4961 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4962
4963 before_case = get_last_insn ();
4964
5720c7e7
RK
4965 if (thiscase->data.case_stmt.case_list
4966 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
4967 thiscase->data.case_stmt.case_list
4968 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4969
28d81abb
RK
4970 /* Simplify the case-list before we count it. */
4971 group_case_nodes (thiscase->data.case_stmt.case_list);
4972
4973 /* Get upper and lower bounds of case values.
4974 Also convert all the case values to the index expr's data type. */
4975
4976 count = 0;
4977 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4978 {
4979 /* Check low and high label values are integers. */
4980 if (TREE_CODE (n->low) != INTEGER_CST)
4981 abort ();
4982 if (TREE_CODE (n->high) != INTEGER_CST)
4983 abort ();
4984
1b0cb6fc
RK
4985 n->low = convert (index_type, n->low);
4986 n->high = convert (index_type, n->high);
28d81abb
RK
4987
4988 /* Count the elements and track the largest and smallest
4989 of them (treating them as signed even if they are not). */
4990 if (count++ == 0)
4991 {
4992 minval = n->low;
4993 maxval = n->high;
4994 }
4995 else
4996 {
4997 if (INT_CST_LT (n->low, minval))
4998 minval = n->low;
4999 if (INT_CST_LT (maxval, n->high))
5000 maxval = n->high;
5001 }
5002 /* A range counts double, since it requires two compares. */
5003 if (! tree_int_cst_equal (n->low, n->high))
5004 count++;
5005 }
5006
3474db0e
RS
5007 orig_minval = minval;
5008
28d81abb
RK
5009 /* Compute span of values. */
5010 if (count != 0)
1b0cb6fc 5011 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5012
1b0cb6fc 5013 if (count == 0)
28d81abb
RK
5014 {
5015 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5016 emit_queue ();
5017 emit_jump (default_label);
5018 }
3474db0e 5019
28d81abb
RK
5020 /* If range of values is much bigger than number of values,
5021 make a sequence of conditional branches instead of a dispatch.
5022 If the switch-index is a constant, do it this way
5023 because we can optimize it. */
4f73c5dd
TW
5024
5025#ifndef CASE_VALUES_THRESHOLD
28d81abb 5026#ifdef HAVE_casesi
4f73c5dd 5027#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5028#else
4f73c5dd
TW
5029 /* If machine does not have a case insn that compares the
5030 bounds, this means extra overhead for dispatch tables
5031 which raises the threshold for using them. */
5032#define CASE_VALUES_THRESHOLD 5
5033#endif /* HAVE_casesi */
5034#endif /* CASE_VALUES_THRESHOLD */
5035
5036 else if (TREE_INT_CST_HIGH (range) != 0
5037 || count < CASE_VALUES_THRESHOLD
37366632
RK
5038 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5039 > 10 * count)
3f6fe18e
RK
5040#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5041 || flag_pic
5042#endif
28d81abb 5043 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5044 /* These will reduce to a constant. */
28d81abb 5045 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5046 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5047 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
5048 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5049 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5050 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5051 {
37366632 5052 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5053
5054 /* If the index is a short or char that we do not have
5055 an insn to handle comparisons directly, convert it to
5056 a full integer now, rather than letting each comparison
5057 generate the conversion. */
5058
5059 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5060 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5061 == CODE_FOR_nothing))
5062 {
5063 enum machine_mode wider_mode;
5064 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5065 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5066 if (cmp_optab->handlers[(int) wider_mode].insn_code
5067 != CODE_FOR_nothing)
5068 {
5069 index = convert_to_mode (wider_mode, index, unsignedp);
5070 break;
5071 }
5072 }
5073
5074 emit_queue ();
5075 do_pending_stack_adjust ();
5076
5077 index = protect_from_queue (index, 0);
5078 if (GET_CODE (index) == MEM)
5079 index = copy_to_reg (index);
5080 if (GET_CODE (index) == CONST_INT
5081 || TREE_CODE (index_expr) == INTEGER_CST)
5082 {
5083 /* Make a tree node with the proper constant value
5084 if we don't already have one. */
5085 if (TREE_CODE (index_expr) != INTEGER_CST)
5086 {
5087 index_expr
5088 = build_int_2 (INTVAL (index),
e9a042b6 5089 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5090 index_expr = convert (index_type, index_expr);
28d81abb
RK
5091 }
5092
5093 /* For constant index expressions we need only
5094 issue a unconditional branch to the appropriate
5095 target code. The job of removing any unreachable
5096 code is left to the optimisation phase if the
5097 "-O" option is specified. */
1b0cb6fc
RK
5098 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5099 if (! tree_int_cst_lt (index_expr, n->low)
5100 && ! tree_int_cst_lt (n->high, index_expr))
5101 break;
5102
28d81abb
RK
5103 if (n)
5104 emit_jump (label_rtx (n->code_label));
5105 else
5106 emit_jump (default_label);
5107 }
5108 else
5109 {
5110 /* If the index expression is not constant we generate
5111 a binary decision tree to select the appropriate
5112 target code. This is done as follows:
5113
5114 The list of cases is rearranged into a binary tree,
5115 nearly optimal assuming equal probability for each case.
5116
5117 The tree is transformed into RTL, eliminating
5118 redundant test conditions at the same time.
5119
5120 If program flow could reach the end of the
5121 decision tree an unconditional jump to the
5122 default code is emitted. */
5123
5124 use_cost_table
5125 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5126 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
5127 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5128 NULL_PTR);
28d81abb 5129 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5130 default_label, index_type);
28d81abb
RK
5131 emit_jump_if_reachable (default_label);
5132 }
5133 }
5134 else
5135 {
5136 int win = 0;
5137#ifdef HAVE_casesi
5138 if (HAVE_casesi)
5139 {
c4fcf531 5140 enum machine_mode index_mode = SImode;
5130a5cc 5141 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5142 rtx op1, op2;
5143 enum machine_mode op_mode;
c4fcf531 5144
28d81abb 5145 /* Convert the index to SImode. */
1b0cb6fc 5146 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5147 > GET_MODE_BITSIZE (index_mode))
28d81abb 5148 {
1b0cb6fc 5149 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5150 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5151
5152 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5153 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5154 index_expr, minval);
5155 minval = integer_zero_node;
37366632 5156 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3474db0e 5157 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
af2682ef
RS
5158 emit_jump_insn (gen_bltu (default_label));
5159 /* Now we can safely truncate. */
5160 index = convert_to_mode (index_mode, index, 0);
5161 }
5162 else
5163 {
1b0cb6fc 5164 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5165 {
5166 index_expr = convert (type_for_size (index_bits, 0),
5167 index_expr);
5168 index_type = TREE_TYPE (index_expr);
5169 }
5170
37366632 5171 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5172 }
28d81abb
RK
5173 emit_queue ();
5174 index = protect_from_queue (index, 0);
5175 do_pending_stack_adjust ();
5176
086f237d
JW
5177 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5178 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5179 (index, op_mode))
5180 index = copy_to_mode_reg (op_mode, index);
5181
5182 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5183
5184 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5185 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5186 (op1, op_mode))
5187 op1 = copy_to_mode_reg (op_mode, op1);
5188
5189 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5190
5191 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5192 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5193 (op2, op_mode))
5194 op2 = copy_to_mode_reg (op_mode, op2);
5195
5196 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5197 table_label, default_label));
5198 win = 1;
5199 }
5200#endif
5201#ifdef HAVE_tablejump
5202 if (! win && HAVE_tablejump)
5203 {
5204 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 5205 fold (build (MINUS_EXPR, index_type,
b4ac57ab 5206 index_expr, minval)));
d3b35d75 5207 index_type = TREE_TYPE (index_expr);
37366632 5208 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5209 emit_queue ();
af2682ef 5210 index = protect_from_queue (index, 0);
28d81abb
RK
5211 do_pending_stack_adjust ();
5212
1b0cb6fc 5213 do_tablejump (index, TYPE_MODE (index_type),
37366632 5214 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5215 table_label, default_label);
5216 win = 1;
5217 }
5218#endif
5219 if (! win)
5220 abort ();
5221
5222 /* Get table of labels to jump to, in order of case index. */
5223
5224 ncases = TREE_INT_CST_LOW (range) + 1;
5225 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 5226 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
5227
5228 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5229 {
37366632 5230 register HOST_WIDE_INT i
3474db0e 5231 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5232
5233 while (1)
5234 {
5235 labelvec[i]
5236 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3474db0e 5237 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5238 == TREE_INT_CST_LOW (n->high))
5239 break;
5240 i++;
5241 }
5242 }
5243
5244 /* Fill in the gaps with the default. */
5245 for (i = 0; i < ncases; i++)
5246 if (labelvec[i] == 0)
5247 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5248
5249 /* Output the table */
5250 emit_label (table_label);
5251
5252 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 5253 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
5254 if (
5255#ifdef CASE_VECTOR_PC_RELATIVE
5256 1 ||
5257#endif
5258 flag_pic)
5259 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5260 gen_rtx (LABEL_REF, Pmode, table_label),
5261 gen_rtvec_v (ncases, labelvec)));
5262 else
5263 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5264 gen_rtvec_v (ncases, labelvec)));
5265
5266 /* If the case insn drops through the table,
5267 after the table we must jump to the default-label.
5268 Otherwise record no drop-through after the table. */
5269#ifdef CASE_DROPS_THROUGH
5270 emit_jump (default_label);
5271#else
5272 emit_barrier ();
5273#endif
5274 }
5275
915f619f
JW
5276 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5277 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5278 thiscase->data.case_stmt.start);
5279 }
1b0cb6fc 5280
28d81abb
RK
5281 if (thiscase->exit_label)
5282 emit_label (thiscase->exit_label);
5283
5284 POPSTACK (case_stack);
5285
5286 free_temp_slots ();
5287}
5288
57641239
RK
5289/* Convert the tree NODE into a list linked by the right field, with the left
5290 field zeroed. RIGHT is used for recursion; it is a list to be placed
5291 rightmost in the resulting list. */
5292
5293static struct case_node *
5294case_tree2list (node, right)
5295 struct case_node *node, *right;
5296{
5297 struct case_node *left;
5298
5299 if (node->right)
5300 right = case_tree2list (node->right, right);
5301
5302 node->right = right;
5303 if (left = node->left)
5304 {
5305 node->left = 0;
5306 return case_tree2list (left, node);
5307 }
5308
5309 return node;
5310}
ca695ac9
JB
5311
5312/* Terminate a case statement. EXPR is the original index
5313 expression. */
704f4dca
RK
5314
5315static void
ca695ac9
JB
5316bc_expand_end_case (expr)
5317 tree expr;
5318{
5319 struct nesting *thiscase = case_stack;
5320 enum bytecode_opcode opcode;
5321 struct bc_label *jump_label;
5322 struct case_node *c;
5323
5324 bc_emit_bytecode (jump);
c53e9440 5325 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5326
5327#ifdef DEBUG_PRINT_CODE
5328 fputc ('\n', stderr);
5329#endif
5330
5331 /* Now that the size of the jump table is known, emit the actual
5332 indexed jump instruction. */
c53e9440 5333 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
5334
5335 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5336 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5337 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5338
5339 bc_emit_bytecode (opcode);
5340
5341 /* Now emit the case instructions literal arguments, in order.
5342 In addition to the value on the stack, it uses:
5343 1. The address of the jump table.
5344 2. The size of the jump table.
5345 3. The default label. */
5346
5347 jump_label = bc_get_bytecode_label ();
5348 bc_emit_bytecode_labelref (jump_label);
5349 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5350 sizeof thiscase->data.case_stmt.num_ranges);
5351
5352 if (thiscase->data.case_stmt.default_label)
c53e9440 5353 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
ca695ac9 5354 else
c53e9440 5355 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5356
5357 /* Output the jump table. */
5358
5359 bc_align_bytecode (3 /* PTR_ALIGN */);
5360 bc_emit_bytecode_labeldef (jump_label);
5361
5362 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5363 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5364 {
5365 opcode = TREE_INT_CST_LOW (c->low);
5366 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5367
5368 opcode = TREE_INT_CST_LOW (c->high);
5369 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5370
c53e9440 5371 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
5372 }
5373 else
5374 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5375 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5376 {
5377 bc_emit_bytecode_DI_const (c->low);
5378 bc_emit_bytecode_DI_const (c->high);
5379
c53e9440 5380 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
5381 }
5382 else
5383 /* Bad mode */
5384 abort ();
5385
5386
c53e9440 5387 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
5388
5389 /* Possibly issue enumeration warnings. */
5390
5391 if (!thiscase->data.case_stmt.default_label
5392 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5393 && TREE_CODE (expr) != INTEGER_CST
5394 && warn_switch)
5395 check_for_full_enumeration_handling (TREE_TYPE (expr));
5396
5397
5398#ifdef DEBUG_PRINT_CODE
5399 fputc ('\n', stderr);
5400#endif
5401
5402 POPSTACK (case_stack);
5403}
5404
5405
0f41302f 5406/* Return unique bytecode ID. */
704f4dca 5407
ca695ac9
JB
5408int
5409bc_new_uid ()
5410{
5411 static int bc_uid = 0;
5412
5413 return (++bc_uid);
5414}
5415
28d81abb
RK
5416/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5417
5418static void
5419do_jump_if_equal (op1, op2, label, unsignedp)
5420 rtx op1, op2, label;
5421 int unsignedp;
5422{
5423 if (GET_CODE (op1) == CONST_INT
5424 && GET_CODE (op2) == CONST_INT)
5425 {
5426 if (INTVAL (op1) == INTVAL (op2))
5427 emit_jump (label);
5428 }
5429 else
5430 {
5431 enum machine_mode mode = GET_MODE (op1);
5432 if (mode == VOIDmode)
5433 mode = GET_MODE (op2);
37366632 5434 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5435 emit_jump_insn (gen_beq (label));
5436 }
5437}
5438\f
5439/* Not all case values are encountered equally. This function
5440 uses a heuristic to weight case labels, in cases where that
5441 looks like a reasonable thing to do.
5442
5443 Right now, all we try to guess is text, and we establish the
5444 following weights:
5445
5446 chars above space: 16
5447 digits: 16
5448 default: 12
5449 space, punct: 8
5450 tab: 4
5451 newline: 2
5452 other "\" chars: 1
5453 remaining chars: 0
5454
5455 If we find any cases in the switch that are not either -1 or in the range
5456 of valid ASCII characters, or are control characters other than those
5457 commonly used with "\", don't treat this switch scanning text.
5458
5459 Return 1 if these nodes are suitable for cost estimation, otherwise
5460 return 0. */
5461
5462static int
5463estimate_case_costs (node)
5464 case_node_ptr node;
5465{
5466 tree min_ascii = build_int_2 (-1, -1);
5467 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5468 case_node_ptr n;
5469 int i;
5470
5471 /* If we haven't already made the cost table, make it now. Note that the
5472 lower bound of the table is -1, not zero. */
5473
5474 if (cost_table == NULL)
5475 {
5476 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4c9a05bc 5477 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
28d81abb
RK
5478
5479 for (i = 0; i < 128; i++)
5480 {
5481 if (isalnum (i))
5482 cost_table[i] = 16;
5483 else if (ispunct (i))
5484 cost_table[i] = 8;
5485 else if (iscntrl (i))
5486 cost_table[i] = -1;
5487 }
5488
5489 cost_table[' '] = 8;
5490 cost_table['\t'] = 4;
5491 cost_table['\0'] = 4;
5492 cost_table['\n'] = 2;
5493 cost_table['\f'] = 1;
5494 cost_table['\v'] = 1;
5495 cost_table['\b'] = 1;
5496 }
5497
5498 /* See if all the case expressions look like text. It is text if the
5499 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5500 as signed arithmetic since we don't want to ever access cost_table with a
5501 value less than -1. Also check that none of the constants in a range
5502 are strange control characters. */
5503
5504 for (n = node; n; n = n->right)
5505 {
5506 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5507 return 0;
5508
5509 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5510 if (cost_table[i] < 0)
5511 return 0;
5512 }
5513
5514 /* All interesting values are within the range of interesting
5515 ASCII characters. */
5516 return 1;
5517}
5518
5519/* Scan an ordered list of case nodes
5520 combining those with consecutive values or ranges.
5521
5522 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5523
5524static void
5525group_case_nodes (head)
5526 case_node_ptr head;
5527{
5528 case_node_ptr node = head;
5529
5530 while (node)
5531 {
5532 rtx lb = next_real_insn (label_rtx (node->code_label));
5533 case_node_ptr np = node;
5534
5535 /* Try to group the successors of NODE with NODE. */
5536 while (((np = np->right) != 0)
5537 /* Do they jump to the same place? */
5538 && next_real_insn (label_rtx (np->code_label)) == lb
5539 /* Are their ranges consecutive? */
5540 && tree_int_cst_equal (np->low,
5541 fold (build (PLUS_EXPR,
5542 TREE_TYPE (node->high),
5543 node->high,
5544 integer_one_node)))
5545 /* An overflow is not consecutive. */
5546 && tree_int_cst_lt (node->high,
5547 fold (build (PLUS_EXPR,
5548 TREE_TYPE (node->high),
5549 node->high,
5550 integer_one_node))))
5551 {
5552 node->high = np->high;
5553 }
5554 /* NP is the first node after NODE which can't be grouped with it.
5555 Delete the nodes in between, and move on to that node. */
5556 node->right = np;
5557 node = np;
5558 }
5559}
5560
5561/* Take an ordered list of case nodes
5562 and transform them into a near optimal binary tree,
6dc42e49 5563 on the assumption that any target code selection value is as
28d81abb
RK
5564 likely as any other.
5565
5566 The transformation is performed by splitting the ordered
5567 list into two equal sections plus a pivot. The parts are
5568 then attached to the pivot as left and right branches. Each
5569 branch is is then transformed recursively. */
5570
5571static void
5572balance_case_nodes (head, parent)
5573 case_node_ptr *head;
5574 case_node_ptr parent;
5575{
5576 register case_node_ptr np;
5577
5578 np = *head;
5579 if (np)
5580 {
5581 int cost = 0;
5582 int i = 0;
5583 int ranges = 0;
5584 register case_node_ptr *npp;
5585 case_node_ptr left;
5586
5587 /* Count the number of entries on branch. Also count the ranges. */
5588
5589 while (np)
5590 {
5591 if (!tree_int_cst_equal (np->low, np->high))
5592 {
5593 ranges++;
5594 if (use_cost_table)
5595 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5596 }
5597
5598 if (use_cost_table)
5599 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5600
5601 i++;
5602 np = np->right;
5603 }
5604
5605 if (i > 2)
5606 {
5607 /* Split this list if it is long enough for that to help. */
5608 npp = head;
5609 left = *npp;
5610 if (use_cost_table)
5611 {
5612 /* Find the place in the list that bisects the list's total cost,
5613 Here I gets half the total cost. */
5614 int n_moved = 0;
5615 i = (cost + 1) / 2;
5616 while (1)
5617 {
5618 /* Skip nodes while their cost does not reach that amount. */
5619 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5620 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5621 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5622 if (i <= 0)
5623 break;
5624 npp = &(*npp)->right;
5625 n_moved += 1;
5626 }
5627 if (n_moved == 0)
5628 {
5629 /* Leave this branch lopsided, but optimize left-hand
5630 side and fill in `parent' fields for right-hand side. */
5631 np = *head;
5632 np->parent = parent;
5633 balance_case_nodes (&np->left, np);
5634 for (; np->right; np = np->right)
5635 np->right->parent = np;
5636 return;
5637 }
5638 }
5639 /* If there are just three nodes, split at the middle one. */
5640 else if (i == 3)
5641 npp = &(*npp)->right;
5642 else
5643 {
5644 /* Find the place in the list that bisects the list's total cost,
5645 where ranges count as 2.
5646 Here I gets half the total cost. */
5647 i = (i + ranges + 1) / 2;
5648 while (1)
5649 {
5650 /* Skip nodes while their cost does not reach that amount. */
5651 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5652 i--;
5653 i--;
5654 if (i <= 0)
5655 break;
5656 npp = &(*npp)->right;
5657 }
5658 }
5659 *head = np = *npp;
5660 *npp = 0;
5661 np->parent = parent;
5662 np->left = left;
5663
5664 /* Optimize each of the two split parts. */
5665 balance_case_nodes (&np->left, np);
5666 balance_case_nodes (&np->right, np);
5667 }
5668 else
5669 {
5670 /* Else leave this branch as one level,
5671 but fill in `parent' fields. */
5672 np = *head;
5673 np->parent = parent;
5674 for (; np->right; np = np->right)
5675 np->right->parent = np;
5676 }
5677 }
5678}
5679\f
5680/* Search the parent sections of the case node tree
5681 to see if a test for the lower bound of NODE would be redundant.
5682 INDEX_TYPE is the type of the index expression.
5683
5684 The instructions to generate the case decision tree are
5685 output in the same order as nodes are processed so it is
5686 known that if a parent node checks the range of the current
5687 node minus one that the current node is bounded at its lower
5688 span. Thus the test would be redundant. */
5689
5690static int
5691node_has_low_bound (node, index_type)
5692 case_node_ptr node;
5693 tree index_type;
5694{
5695 tree low_minus_one;
5696 case_node_ptr pnode;
5697
5698 /* If the lower bound of this node is the lowest value in the index type,
5699 we need not test it. */
5700
5701 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5702 return 1;
5703
5704 /* If this node has a left branch, the value at the left must be less
5705 than that at this node, so it cannot be bounded at the bottom and
5706 we need not bother testing any further. */
5707
5708 if (node->left)
5709 return 0;
5710
5711 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5712 node->low, integer_one_node));
5713
5714 /* If the subtraction above overflowed, we can't verify anything.
5715 Otherwise, look for a parent that tests our value - 1. */
5716
5717 if (! tree_int_cst_lt (low_minus_one, node->low))
5718 return 0;
5719
5720 for (pnode = node->parent; pnode; pnode = pnode->parent)
5721 if (tree_int_cst_equal (low_minus_one, pnode->high))
5722 return 1;
5723
5724 return 0;
5725}
5726
5727/* Search the parent sections of the case node tree
5728 to see if a test for the upper bound of NODE would be redundant.
5729 INDEX_TYPE is the type of the index expression.
5730
5731 The instructions to generate the case decision tree are
5732 output in the same order as nodes are processed so it is
5733 known that if a parent node checks the range of the current
5734 node plus one that the current node is bounded at its upper
5735 span. Thus the test would be redundant. */
5736
5737static int
5738node_has_high_bound (node, index_type)
5739 case_node_ptr node;
5740 tree index_type;
5741{
5742 tree high_plus_one;
5743 case_node_ptr pnode;
5744
5745 /* If the upper bound of this node is the highest value in the type
5746 of the index expression, we need not test against it. */
5747
5748 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5749 return 1;
5750
5751 /* If this node has a right branch, the value at the right must be greater
5752 than that at this node, so it cannot be bounded at the top and
5753 we need not bother testing any further. */
5754
5755 if (node->right)
5756 return 0;
5757
5758 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5759 node->high, integer_one_node));
5760
5761 /* If the addition above overflowed, we can't verify anything.
5762 Otherwise, look for a parent that tests our value + 1. */
5763
5764 if (! tree_int_cst_lt (node->high, high_plus_one))
5765 return 0;
5766
5767 for (pnode = node->parent; pnode; pnode = pnode->parent)
5768 if (tree_int_cst_equal (high_plus_one, pnode->low))
5769 return 1;
5770
5771 return 0;
5772}
5773
5774/* Search the parent sections of the
5775 case node tree to see if both tests for the upper and lower
5776 bounds of NODE would be redundant. */
5777
5778static int
5779node_is_bounded (node, index_type)
5780 case_node_ptr node;
5781 tree index_type;
5782{
5783 return (node_has_low_bound (node, index_type)
5784 && node_has_high_bound (node, index_type));
5785}
5786
5787/* Emit an unconditional jump to LABEL unless it would be dead code. */
5788
5789static void
5790emit_jump_if_reachable (label)
5791 rtx label;
5792{
5793 if (GET_CODE (get_last_insn ()) != BARRIER)
5794 emit_jump (label);
5795}
5796\f
5797/* Emit step-by-step code to select a case for the value of INDEX.
5798 The thus generated decision tree follows the form of the
5799 case-node binary tree NODE, whose nodes represent test conditions.
5800 INDEX_TYPE is the type of the index of the switch.
5801
5802 Care is taken to prune redundant tests from the decision tree
5803 by detecting any boundary conditions already checked by
5804 emitted rtx. (See node_has_high_bound, node_has_low_bound
5805 and node_is_bounded, above.)
5806
5807 Where the test conditions can be shown to be redundant we emit
5808 an unconditional jump to the target code. As a further
5809 optimization, the subordinates of a tree node are examined to
5810 check for bounded nodes. In this case conditional and/or
5811 unconditional jumps as a result of the boundary check for the
5812 current node are arranged to target the subordinates associated
5813 code for out of bound conditions on the current node node.
5814
f72aed24 5815 We can assume that when control reaches the code generated here,
28d81abb
RK
5816 the index value has already been compared with the parents
5817 of this node, and determined to be on the same side of each parent
5818 as this node is. Thus, if this node tests for the value 51,
5819 and a parent tested for 52, we don't need to consider
5820 the possibility of a value greater than 51. If another parent
5821 tests for the value 50, then this node need not test anything. */
5822
5823static void
5824emit_case_nodes (index, node, default_label, index_type)
5825 rtx index;
5826 case_node_ptr node;
5827 rtx default_label;
5828 tree index_type;
5829{
5830 /* If INDEX has an unsigned type, we must make unsigned branches. */
5831 int unsignedp = TREE_UNSIGNED (index_type);
5832 typedef rtx rtx_function ();
5833 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5834 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5835 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5836 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5837 enum machine_mode mode = GET_MODE (index);
5838
5839 /* See if our parents have already tested everything for us.
5840 If they have, emit an unconditional jump for this node. */
5841 if (node_is_bounded (node, index_type))
5842 emit_jump (label_rtx (node->code_label));
5843
5844 else if (tree_int_cst_equal (node->low, node->high))
5845 {
5846 /* Node is single valued. First see if the index expression matches
0f41302f 5847 this node and then check our children, if any. */
28d81abb 5848
37366632 5849 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5850 label_rtx (node->code_label), unsignedp);
5851
5852 if (node->right != 0 && node->left != 0)
5853 {
5854 /* This node has children on both sides.
5855 Dispatch to one side or the other
5856 by comparing the index value with this node's value.
5857 If one subtree is bounded, check that one first,
5858 so we can avoid real branches in the tree. */
5859
5860 if (node_is_bounded (node->right, index_type))
5861 {
37366632
RK
5862 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5863 VOIDmode, 0),
5864 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5865
5866 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5867 emit_case_nodes (index, node->left, default_label, index_type);
5868 }
5869
5870 else if (node_is_bounded (node->left, index_type))
5871 {
37366632 5872 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5873 VOIDmode, 0),
37366632 5874 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5875 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5876 emit_case_nodes (index, node->right, default_label, index_type);
5877 }
5878
5879 else
5880 {
5881 /* Neither node is bounded. First distinguish the two sides;
5882 then emit the code for one side at a time. */
5883
5884 tree test_label
5885 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5886
5887 /* See if the value is on the right. */
37366632 5888 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5889 VOIDmode, 0),
37366632 5890 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5891 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5892
5893 /* Value must be on the left.
5894 Handle the left-hand subtree. */
5895 emit_case_nodes (index, node->left, default_label, index_type);
5896 /* If left-hand subtree does nothing,
5897 go to default. */
5898 emit_jump_if_reachable (default_label);
5899
5900 /* Code branches here for the right-hand subtree. */
5901 expand_label (test_label);
5902 emit_case_nodes (index, node->right, default_label, index_type);
5903 }
5904 }
5905
5906 else if (node->right != 0 && node->left == 0)
5907 {
5908 /* Here we have a right child but no left so we issue conditional
5909 branch to default and process the right child.
5910
5911 Omit the conditional branch to default if we it avoid only one
5912 right child; it costs too much space to save so little time. */
5913
de14fd73 5914 if (node->right->right || node->right->left
28d81abb
RK
5915 || !tree_int_cst_equal (node->right->low, node->right->high))
5916 {
5917 if (!node_has_low_bound (node, index_type))
5918 {
37366632
RK
5919 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5920 VOIDmode, 0),
5921 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5922 emit_jump_insn ((*gen_blt_pat) (default_label));
5923 }
5924
5925 emit_case_nodes (index, node->right, default_label, index_type);
5926 }
5927 else
5928 /* We cannot process node->right normally
5929 since we haven't ruled out the numbers less than
5930 this node's value. So handle node->right explicitly. */
5931 do_jump_if_equal (index,
37366632
RK
5932 expand_expr (node->right->low, NULL_RTX,
5933 VOIDmode, 0),
28d81abb
RK
5934 label_rtx (node->right->code_label), unsignedp);
5935 }
5936
5937 else if (node->right == 0 && node->left != 0)
5938 {
5939 /* Just one subtree, on the left. */
5940
de14fd73
RK
5941#if 0 /* The following code and comment were formerly part
5942 of the condition here, but they didn't work
5943 and I don't understand what the idea was. -- rms. */
5944 /* If our "most probable entry" is less probable
28d81abb
RK
5945 than the default label, emit a jump to
5946 the default label using condition codes
5947 already lying around. With no right branch,
5948 a branch-greater-than will get us to the default
5949 label correctly. */
de14fd73
RK
5950 if (use_cost_table
5951 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5952 ;
5953#endif /* 0 */
5954 if (node->left->left || node->left->right
28d81abb
RK
5955 || !tree_int_cst_equal (node->left->low, node->left->high))
5956 {
5957 if (!node_has_high_bound (node, index_type))
5958 {
37366632
RK
5959 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5960 VOIDmode, 0),
5961 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5962 emit_jump_insn ((*gen_bgt_pat) (default_label));
5963 }
5964
5965 emit_case_nodes (index, node->left, default_label, index_type);
5966 }
5967 else
5968 /* We cannot process node->left normally
5969 since we haven't ruled out the numbers less than
5970 this node's value. So handle node->left explicitly. */
5971 do_jump_if_equal (index,
37366632
RK
5972 expand_expr (node->left->low, NULL_RTX,
5973 VOIDmode, 0),
28d81abb
RK
5974 label_rtx (node->left->code_label), unsignedp);
5975 }
5976 }
5977 else
5978 {
5979 /* Node is a range. These cases are very similar to those for a single
5980 value, except that we do not start by testing whether this node
5981 is the one to branch to. */
5982
5983 if (node->right != 0 && node->left != 0)
5984 {
5985 /* Node has subtrees on both sides.
5986 If the right-hand subtree is bounded,
5987 test for it first, since we can go straight there.
5988 Otherwise, we need to make a branch in the control structure,
5989 then handle the two subtrees. */
5990 tree test_label = 0;
5991
37366632
RK
5992 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5993 VOIDmode, 0),
5994 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5995
5996 if (node_is_bounded (node->right, index_type))
5997 /* Right hand node is fully bounded so we can eliminate any
5998 testing and branch directly to the target code. */
5999 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6000 else
6001 {
6002 /* Right hand node requires testing.
6003 Branch to a label where we will handle it later. */
6004
6005 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6006 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6007 }
6008
6009 /* Value belongs to this node or to the left-hand subtree. */
6010
37366632
RK
6011 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6012 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6013 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6014
6015 /* Handle the left-hand subtree. */
6016 emit_case_nodes (index, node->left, default_label, index_type);
6017
6018 /* If right node had to be handled later, do that now. */
6019
6020 if (test_label)
6021 {
6022 /* If the left-hand subtree fell through,
6023 don't let it fall into the right-hand subtree. */
6024 emit_jump_if_reachable (default_label);
6025
6026 expand_label (test_label);
6027 emit_case_nodes (index, node->right, default_label, index_type);
6028 }
6029 }
6030
6031 else if (node->right != 0 && node->left == 0)
6032 {
6033 /* Deal with values to the left of this node,
6034 if they are possible. */
6035 if (!node_has_low_bound (node, index_type))
6036 {
37366632
RK
6037 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6038 VOIDmode, 0),
6039 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6040 emit_jump_insn ((*gen_blt_pat) (default_label));
6041 }
6042
6043 /* Value belongs to this node or to the right-hand subtree. */
6044
37366632
RK
6045 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6046 VOIDmode, 0),
6047 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6048 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6049
6050 emit_case_nodes (index, node->right, default_label, index_type);
6051 }
6052
6053 else if (node->right == 0 && node->left != 0)
6054 {
6055 /* Deal with values to the right of this node,
6056 if they are possible. */
6057 if (!node_has_high_bound (node, index_type))
6058 {
37366632
RK
6059 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6060 VOIDmode, 0),
6061 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6062 emit_jump_insn ((*gen_bgt_pat) (default_label));
6063 }
6064
6065 /* Value belongs to this node or to the left-hand subtree. */
6066
37366632
RK
6067 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6068 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6069 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6070
6071 emit_case_nodes (index, node->left, default_label, index_type);
6072 }
6073
6074 else
6075 {
6076 /* Node has no children so we check low and high bounds to remove
6077 redundant tests. Only one of the bounds can exist,
6078 since otherwise this node is bounded--a case tested already. */
6079
6080 if (!node_has_high_bound (node, index_type))
6081 {
37366632
RK
6082 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6083 VOIDmode, 0),
6084 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6085 emit_jump_insn ((*gen_bgt_pat) (default_label));
6086 }
6087
6088 if (!node_has_low_bound (node, index_type))
6089 {
37366632
RK
6090 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6091 VOIDmode, 0),
6092 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
6093 emit_jump_insn ((*gen_blt_pat) (default_label));
6094 }
6095
6096 emit_jump (label_rtx (node->code_label));
6097 }
6098 }
6099}
6100\f
6101/* These routines are used by the loop unrolling code. They copy BLOCK trees
6102 so that the debugging info will be correct for the unrolled loop. */
6103
94dc8b56 6104/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 6105
94dc8b56 6106static tree *block_vector;
28d81abb
RK
6107
6108void
94dc8b56 6109find_loop_tree_blocks ()
28d81abb 6110{
94dc8b56 6111 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6112
94dc8b56 6113 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
6114}
6115
28d81abb 6116void
94dc8b56 6117unroll_block_trees ()
28d81abb 6118{
94dc8b56 6119 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6120
94dc8b56 6121 reorder_blocks (block_vector, block, get_insns ());
28d81abb 6122}
94dc8b56 6123
This page took 1.128648 seconds and 5 git commands to generate.