]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(perform_divsi3, perform_modsi3): Add '&' in constraint for DX;
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
5eb94e4e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
ca695ac9
JB
52#include "machmode.h"
53
54#include "bytecode.h"
55#include "bc-typecd.h"
56#include "bc-opcode.h"
57#include "bc-optab.h"
58#include "bc-emit.h"
28d81abb
RK
59
60#define obstack_chunk_alloc xmalloc
61#define obstack_chunk_free free
62struct obstack stmt_obstack;
63
28d81abb
RK
64/* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
66char *emit_filename;
67int emit_lineno;
68
69/* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
71
72int expr_stmts_for_value;
73
74/* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
76
77static tree last_expr_type;
78static rtx last_expr_value;
79
7629c936
RS
80/* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
84
85static rtx last_block_end_note;
86
28d81abb
RK
87/* Number of binding contours started so far in this function. */
88
89int block_start_count;
90
91/* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
93
94extern int current_function_returns_pcc_struct;
95
96/* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
99
100extern rtx cleanup_label;
101
102/* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
105
106extern rtx return_label;
107
108/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110extern rtx save_expr_regs;
111
112/* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115extern int frame_offset;
116
117/* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119extern rtx tail_recursion_label;
120
121/* Place after which to insert the tail_recursion_label if we need one. */
122extern rtx tail_recursion_reentry;
123
124/* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
128
129extern rtx arg_pointer_save_area;
130
131/* Chain of all RTL_EXPRs that have insns in them. */
132extern tree rtl_expr_chain;
133
134#if 0 /* Turned off because 0 seems to work just as well. */
135/* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138static tree empty_cleanup_list;
139#endif
140\f
141/* Functions and data structures for expanding case statements. */
142
143/* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
146
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
149
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
154 node chain.
155
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
28d81abb
RK
161 and nodes on the right having higher values. We then output the tree
162 in order. */
163
164struct case_node
165{
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
172};
173
174typedef struct case_node case_node;
175typedef struct case_node *case_node_ptr;
176
177/* These are used by estimate_case_costs and balance_case_nodes. */
178
179/* This must be a signed type, and non-ANSI compilers lack signed char. */
180static short *cost_table;
181static int use_cost_table;
182
183static int estimate_case_costs ();
184static void balance_case_nodes ();
185static void emit_case_nodes ();
186static void group_case_nodes ();
187static void emit_jump_if_reachable ();
188
189static int warn_if_unused_value ();
190static void expand_goto_internal ();
ca695ac9 191static void bc_expand_goto_internal ();
28d81abb 192static int expand_fixup ();
ca695ac9 193static void bc_expand_fixup ();
28d81abb 194void fixup_gotos ();
ca695ac9 195static void bc_fixup_gotos ();
28d81abb
RK
196void free_temp_slots ();
197static void expand_cleanups ();
28d81abb
RK
198static void expand_null_return_1 ();
199static int tail_recursion_args ();
200static void do_jump_if_equal ();
ca695ac9
JB
201int bc_expand_exit_loop_if_false ();
202void bc_expand_start_cond ();
203void bc_expand_end_cond ();
204void bc_expand_start_else ();
205void bc_expand_end_bindings ();
206void bc_expand_start_case ();
207void bc_check_for_full_enumeration_handling ();
208void bc_expand_end_case ();
209void bc_expand_decl ();
210
211extern rtx bc_allocate_local ();
212extern rtx bc_allocate_variable_array ();
28d81abb
RK
213\f
214/* Stack of control and binding constructs we are currently inside.
215
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
222
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
227
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
231
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
238
239struct nesting
240{
241 struct nesting *all;
242 struct nesting *next;
243 int depth;
244 rtx exit_label;
245 union
246 {
247 /* For conds (if-then and if-then-else statements). */
248 struct
249 {
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
253 rtx endif_label;
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
256 rtx next_label;
257 } cond;
258 /* For loops. */
259 struct
260 {
261 /* Label at the top of the loop; place to loop back to. */
262 rtx start_label;
263 /* Label at the end of the whole construct. */
264 rtx end_label;
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
267 rtx continue_label;
268 } loop;
269 /* For variable binding contours. */
270 struct
271 {
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
ca695ac9
JB
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
28d81abb
RK
277 rtx stack_level;
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
281 rtx first_insn;
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
288 tree cleanups;
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
297 tree outer_cleanups;
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
ca695ac9
JB
303 /* Bytecode specific: stack level to restore stack to on exit. */
304 int bc_stack_level;
28d81abb
RK
305 } block;
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
308 struct
309 {
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
312 rtx start;
ca695ac9
JB
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
316 rtx skip_label;
28d81abb
RK
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
323 tree default_label;
324 /* The expression to be dispatched on. */
325 tree index_expr;
326 /* Type that INDEX_EXPR should be converted to. */
327 tree nominal_type;
328 /* Number of range exprs in case statement. */
329 int num_ranges;
330 /* Name of this kind of statement, for warnings. */
331 char *printname;
332 /* Nonzero if a case label has been seen in this case stmt. */
333 char seenlabel;
334 } case_stmt;
335 /* For exception contours. */
336 struct
337 {
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
340 tree raised;
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
344 tree handled;
345
346 /* First insn of TRY block, in case resumptive model is needed. */
347 rtx first_insn;
348 /* Label for the catch clauses. */
349 rtx except_label;
350 /* Label for unhandled exceptions. */
351 rtx unhandled_label;
352 /* Label at the end of whole construct. */
353 rtx after_label;
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
356 rtx escape_label;
357 } except_stmt;
358 } data;
359};
360
361/* Chain of all pending binding contours. */
362struct nesting *block_stack;
363
6ed1d6c5
RS
364/* If any new stacks are added here, add them to POPSTACKS too. */
365
28d81abb
RK
366/* Chain of all pending binding contours that restore stack levels
367 or have cleanups. */
368struct nesting *stack_block_stack;
369
370/* Chain of all pending conditional statements. */
371struct nesting *cond_stack;
372
373/* Chain of all pending loops. */
374struct nesting *loop_stack;
375
376/* Chain of all pending case or switch statements. */
377struct nesting *case_stack;
378
379/* Chain of all pending exception contours. */
380struct nesting *except_stack;
381
382/* Separate chain including all of the above,
383 chained through the `all' field. */
384struct nesting *nesting_stack;
385
386/* Number of entries on nesting_stack now. */
387int nesting_depth;
388
389/* Allocate and return a new `struct nesting'. */
390
391#define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
393
6ed1d6c5
RS
394/* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
28d81abb
RK
398
399#define POPSTACK(STACK) \
6ed1d6c5
RS
400do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
28d81abb 416 nesting_stack = this->all; \
28d81abb 417 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 418 while (this != target); } while (0)
28d81abb
RK
419\f
420/* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
425 we check each fixup.
426 If the target label has now been defined, we can insert the proper code. */
427
428struct goto_fixup
429{
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
434 rtx before_jump;
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
437 tree target;
7629c936
RS
438 /* The BLOCK for the place where this goto was found. */
439 tree context;
28d81abb
RK
440 /* The CODE_LABEL rtx that this is jumping to. */
441 rtx target_rtl;
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
448 rtx stack_level;
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
ca695ac9
JB
457
458 /* Bytecode specific members follow */
459
460 /* The label that this jump is jumping to, or 0 for break, continue
461 or return. */
462 struct bc_label *bc_target;
463
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
466
467 /* True (non-0) if fixup has been handled */
468 int bc_handled:1;
469
470 /* Like stack_level above, except refers to the interpreter stack */
471 int bc_stack_level;
28d81abb
RK
472};
473
474static struct goto_fixup *goto_fixup_chain;
475
476/* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
478
479struct label_chain
480{
481 /* Points to following fixup. */
482 struct label_chain *next;
483 tree label;
484};
485\f
486void
487init_stmt ()
488{
489 gcc_obstack_init (&stmt_obstack);
490#if 0
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
492#endif
493}
494
495void
496init_stmt_for_function ()
497{
498 /* We are not currently within any block, conditional, loop or case. */
499 block_stack = 0;
500 loop_stack = 0;
501 case_stack = 0;
502 cond_stack = 0;
503 nesting_stack = 0;
504 nesting_depth = 0;
505
506 block_start_count = 0;
507
508 /* No gotos have been expanded yet. */
509 goto_fixup_chain = 0;
510
511 /* We are not processing a ({...}) grouping. */
512 expr_stmts_for_value = 0;
513 last_expr_type = 0;
514}
515
516void
517save_stmt_status (p)
518 struct function *p;
519{
520 p->block_stack = block_stack;
521 p->stack_block_stack = stack_block_stack;
522 p->cond_stack = cond_stack;
523 p->loop_stack = loop_stack;
524 p->case_stack = case_stack;
525 p->nesting_stack = nesting_stack;
526 p->nesting_depth = nesting_depth;
527 p->block_start_count = block_start_count;
528 p->last_expr_type = last_expr_type;
529 p->last_expr_value = last_expr_value;
530 p->expr_stmts_for_value = expr_stmts_for_value;
531 p->emit_filename = emit_filename;
532 p->emit_lineno = emit_lineno;
533 p->goto_fixup_chain = goto_fixup_chain;
534}
535
536void
537restore_stmt_status (p)
538 struct function *p;
539{
540 block_stack = p->block_stack;
541 stack_block_stack = p->stack_block_stack;
542 cond_stack = p->cond_stack;
543 loop_stack = p->loop_stack;
544 case_stack = p->case_stack;
545 nesting_stack = p->nesting_stack;
546 nesting_depth = p->nesting_depth;
547 block_start_count = p->block_start_count;
548 last_expr_type = p->last_expr_type;
549 last_expr_value = p->last_expr_value;
550 expr_stmts_for_value = p->expr_stmts_for_value;
551 emit_filename = p->emit_filename;
552 emit_lineno = p->emit_lineno;
553 goto_fixup_chain = p->goto_fixup_chain;
554}
555\f
556/* Emit a no-op instruction. */
557
558void
559emit_nop ()
560{
ca695ac9
JB
561 rtx last_insn;
562
563 if (!output_bytecode)
564 {
565 last_insn = get_last_insn ();
566 if (!optimize
567 && (GET_CODE (last_insn) == CODE_LABEL
568 || prev_real_insn (last_insn) == 0))
569 emit_insn (gen_nop ());
570 }
28d81abb
RK
571}
572\f
573/* Return the rtx-label that corresponds to a LABEL_DECL,
574 creating it if necessary. */
575
576rtx
577label_rtx (label)
578 tree label;
579{
580 if (TREE_CODE (label) != LABEL_DECL)
581 abort ();
582
583 if (DECL_RTL (label))
584 return DECL_RTL (label);
585
586 return DECL_RTL (label) = gen_label_rtx ();
587}
588
589/* Add an unconditional jump to LABEL as the next sequential instruction. */
590
591void
592emit_jump (label)
593 rtx label;
594{
595 do_pending_stack_adjust ();
596 emit_jump_insn (gen_jump (label));
597 emit_barrier ();
598}
599
600/* Emit code to jump to the address
601 specified by the pointer expression EXP. */
602
603void
604expand_computed_goto (exp)
605 tree exp;
606{
ca695ac9
JB
607 if (output_bytecode)
608 {
609 bc_expand_expr (exp);
610 bc_emit_instruction (jumpP);
611 }
612 else
613 {
614 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
615 emit_queue ();
616 emit_indirect_jump (x);
617 }
28d81abb
RK
618}
619\f
620/* Handle goto statements and the labels that they can go to. */
621
622/* Specify the location in the RTL code of a label LABEL,
623 which is a LABEL_DECL tree node.
624
625 This is used for the kind of label that the user can jump to with a
626 goto statement, and for alternatives of a switch or case statement.
627 RTL labels generated for loops and conditionals don't go through here;
628 they are generated directly at the RTL level, by other functions below.
629
630 Note that this has nothing to do with defining label *names*.
631 Languages vary in how they do that and what that even means. */
632
633void
634expand_label (label)
635 tree label;
636{
637 struct label_chain *p;
638
ca695ac9
JB
639 if (output_bytecode)
640 {
641 if (! DECL_RTL (label))
642 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
c53e9440 643 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
ca695ac9
JB
644 error ("multiply defined label");
645 return;
646 }
647
28d81abb
RK
648 do_pending_stack_adjust ();
649 emit_label (label_rtx (label));
650 if (DECL_NAME (label))
651 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
652
653 if (stack_block_stack != 0)
654 {
655 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
656 p->next = stack_block_stack->data.block.label_chain;
657 stack_block_stack->data.block.label_chain = p;
658 p->label = label;
659 }
660}
661
662/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
663 from nested functions. */
664
665void
666declare_nonlocal_label (label)
667 tree label;
668{
669 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
670 LABEL_PRESERVE_P (label_rtx (label)) = 1;
671 if (nonlocal_goto_handler_slot == 0)
672 {
673 nonlocal_goto_handler_slot
674 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
675 emit_stack_save (SAVE_NONLOCAL,
676 &nonlocal_goto_stack_level,
677 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
678 }
679}
680
681/* Generate RTL code for a `goto' statement with target label LABEL.
682 LABEL should be a LABEL_DECL tree node that was or will later be
683 defined with `expand_label'. */
684
685void
686expand_goto (label)
687 tree label;
688{
ca695ac9
JB
689 tree context;
690
691 if (output_bytecode)
692 {
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
694 return;
695 }
696
28d81abb 697 /* Check for a nonlocal goto to a containing function. */
ca695ac9 698 context = decl_function_context (label);
28d81abb
RK
699 if (context != 0 && context != current_function_decl)
700 {
701 struct function *p = find_function_data (context);
dd132134 702 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
28d81abb 703 rtx temp;
dd132134 704
28d81abb 705 p->has_nonlocal_label = 1;
c1255328 706 current_function_has_nonlocal_goto = 1;
dd132134 707 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
708
709 /* Copy the rtl for the slots so that they won't be shared in
710 case the virtual stack vars register gets instantiated differently
711 in the parent than in the child. */
712
28d81abb
RK
713#if HAVE_nonlocal_goto
714 if (HAVE_nonlocal_goto)
715 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
716 copy_rtx (p->nonlocal_goto_handler_slot),
717 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 718 label_ref));
28d81abb
RK
719 else
720#endif
721 {
59257ff7
RK
722 rtx addr;
723
28d81abb
RK
724 /* Restore frame pointer for containing function.
725 This sets the actual hard register used for the frame pointer
726 to the location of the function's incoming static chain info.
727 The non-local goto handler will then adjust it to contain the
728 proper value and reload the argument pointer, if needed. */
a35ad168 729 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
730
731 /* We have now loaded the frame pointer hardware register with
732 the address of that corresponds to the start of the virtual
733 stack vars. So replace virtual_stack_vars_rtx in all
734 addresses we use with stack_pointer_rtx. */
735
28d81abb
RK
736 /* Get addr of containing function's current nonlocal goto handler,
737 which will do any cleanups and then jump to the label. */
59257ff7
RK
738 addr = copy_rtx (p->nonlocal_goto_handler_slot);
739 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 740 hard_frame_pointer_rtx));
59257ff7 741
28d81abb 742 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
743 addr = p->nonlocal_goto_stack_level;
744 if (addr)
5e116627 745 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
746 virtual_stack_vars_rtx,
747 hard_frame_pointer_rtx);
59257ff7 748
37366632 749 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 750
28d81abb 751 /* Put in the static chain register the nonlocal label address. */
dd132134 752 emit_move_insn (static_chain_rtx, label_ref);
a35ad168 753 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 754 really needed. */
a35ad168 755 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
28d81abb
RK
756 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
757 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
758 emit_indirect_jump (temp);
759 }
760 }
761 else
37366632 762 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
763}
764
765/* Generate RTL code for a `goto' statement with target label BODY.
766 LABEL should be a LABEL_REF.
767 LAST_INSN, if non-0, is the rtx we should consider as the last
768 insn emitted (for the purposes of cleaning up a return). */
769
770static void
771expand_goto_internal (body, label, last_insn)
772 tree body;
773 rtx label;
774 rtx last_insn;
775{
776 struct nesting *block;
777 rtx stack_level = 0;
778
ca695ac9
JB
779 /* NOTICE! If a bytecode instruction other than `jump' is needed,
780 then the caller has to call bc_expand_goto_internal()
781 directly. This is rather an exceptional case, and there aren't
782 that many places where this is necessary. */
783 if (output_bytecode)
784 {
785 expand_goto_internal (body, label, last_insn);
786 return;
787 }
788
28d81abb
RK
789 if (GET_CODE (label) != CODE_LABEL)
790 abort ();
791
792 /* If label has already been defined, we can tell now
793 whether and how we must alter the stack level. */
794
795 if (PREV_INSN (label) != 0)
796 {
797 /* Find the innermost pending block that contains the label.
798 (Check containment by comparing insn-uids.)
799 Then restore the outermost stack level within that block,
800 and do cleanups of all blocks contained in it. */
801 for (block = block_stack; block; block = block->next)
802 {
803 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
804 break;
805 if (block->data.block.stack_level != 0)
806 stack_level = block->data.block.stack_level;
807 /* Execute the cleanups for blocks we are exiting. */
808 if (block->data.block.cleanups != 0)
809 {
37366632 810 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
811 do_pending_stack_adjust ();
812 }
813 }
814
815 if (stack_level)
816 {
817 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
818 the stack pointer. This one should be deleted as dead by flow. */
819 clear_pending_stack_adjust ();
820 do_pending_stack_adjust ();
37366632 821 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
822 }
823
824 if (body != 0 && DECL_TOO_LATE (body))
825 error ("jump to `%s' invalidly jumps into binding contour",
826 IDENTIFIER_POINTER (DECL_NAME (body)));
827 }
828 /* Label not yet defined: may need to put this goto
829 on the fixup list. */
830 else if (! expand_fixup (body, label, last_insn))
831 {
832 /* No fixup needed. Record that the label is the target
833 of at least one goto that has no fixup. */
834 if (body != 0)
835 TREE_ADDRESSABLE (body) = 1;
836 }
837
838 emit_jump (label);
839}
840\f
ca695ac9
JB
841/* Generate a jump with OPCODE to the given bytecode LABEL which is
842 found within BODY. */
843static void
844bc_expand_goto_internal (opcode, label, body)
845 enum bytecode_opcode opcode;
846 struct bc_label *label;
847 tree body;
848{
849 struct nesting *block;
850 int stack_level = -1;
851
852 /* If the label is defined, adjust the stack as necessary.
853 If it's not defined, we have to push the reference on the
854 fixup list. */
855
856 if (label->defined)
857 {
858
859 /* Find the innermost pending block that contains the label.
860 (Check containment by comparing bytecode uids.) Then restore the
861 outermost stack level within that block. */
862
863 for (block = block_stack; block; block = block->next)
864 {
c53e9440 865 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
ca695ac9
JB
866 break;
867 if (block->data.block.bc_stack_level)
868 stack_level = block->data.block.bc_stack_level;
869
870 /* Execute the cleanups for blocks we are exiting. */
871 if (block->data.block.cleanups != 0)
872 {
873 expand_cleanups (block->data.block.cleanups, NULL_TREE);
874 do_pending_stack_adjust ();
875 }
876 }
877
878 /* Restore the stack level. If we need to adjust the stack, we
879 must do so after the jump, since the jump may depend on
880 what's on the stack. Thus, any stack-modifying conditional
881 jumps (these are the only ones that rely on what's on the
882 stack) go into the fixup list. */
883
884 if (stack_level >= 0
885 && stack_depth != stack_level
886 && opcode != jump)
887
888 bc_expand_fixup (opcode, label, stack_level);
889 else
890 {
891 if (stack_level >= 0)
892 bc_adjust_stack (stack_depth - stack_level);
893
894 if (body && DECL_BIT_FIELD (body))
895 error ("jump to `%s' invalidly jumps into binding contour",
896 IDENTIFIER_POINTER (DECL_NAME (body)));
897
898 /* Emit immediate jump */
899 bc_emit_bytecode (opcode);
900 bc_emit_bytecode_labelref (label);
901
902#ifdef DEBUG_PRINT_CODE
903 fputc ('\n', stderr);
904#endif
905 }
906 }
907 else
908 /* Put goto in the fixup list */
909 bc_expand_fixup (opcode, label, stack_level);
910}
911\f
28d81abb
RK
912/* Generate if necessary a fixup for a goto
913 whose target label in tree structure (if any) is TREE_LABEL
914 and whose target in rtl is RTL_LABEL.
915
916 If LAST_INSN is nonzero, we pretend that the jump appears
917 after insn LAST_INSN instead of at the current point in the insn stream.
918
023b57e6
RS
919 The fixup will be used later to insert insns just before the goto.
920 Those insns will restore the stack level as appropriate for the
921 target label, and will (in the case of C++) also invoke any object
922 destructors which have to be invoked when we exit the scopes which
923 are exited by the goto.
28d81abb
RK
924
925 Value is nonzero if a fixup is made. */
926
927static int
928expand_fixup (tree_label, rtl_label, last_insn)
929 tree tree_label;
930 rtx rtl_label;
931 rtx last_insn;
932{
933 struct nesting *block, *end_block;
934
935 /* See if we can recognize which block the label will be output in.
936 This is possible in some very common cases.
937 If we succeed, set END_BLOCK to that block.
938 Otherwise, set it to 0. */
939
940 if (cond_stack
941 && (rtl_label == cond_stack->data.cond.endif_label
942 || rtl_label == cond_stack->data.cond.next_label))
943 end_block = cond_stack;
944 /* If we are in a loop, recognize certain labels which
945 are likely targets. This reduces the number of fixups
946 we need to create. */
947 else if (loop_stack
948 && (rtl_label == loop_stack->data.loop.start_label
949 || rtl_label == loop_stack->data.loop.end_label
950 || rtl_label == loop_stack->data.loop.continue_label))
951 end_block = loop_stack;
952 else
953 end_block = 0;
954
955 /* Now set END_BLOCK to the binding level to which we will return. */
956
957 if (end_block)
958 {
959 struct nesting *next_block = end_block->all;
960 block = block_stack;
961
962 /* First see if the END_BLOCK is inside the innermost binding level.
963 If so, then no cleanups or stack levels are relevant. */
964 while (next_block && next_block != block)
965 next_block = next_block->all;
966
967 if (next_block)
968 return 0;
969
970 /* Otherwise, set END_BLOCK to the innermost binding level
971 which is outside the relevant control-structure nesting. */
972 next_block = block_stack->next;
973 for (block = block_stack; block != end_block; block = block->all)
974 if (block == next_block)
975 next_block = next_block->next;
976 end_block = next_block;
977 }
978
979 /* Does any containing block have a stack level or cleanups?
980 If not, no fixup is needed, and that is the normal case
981 (the only case, for standard C). */
982 for (block = block_stack; block != end_block; block = block->next)
983 if (block->data.block.stack_level != 0
984 || block->data.block.cleanups != 0)
985 break;
986
987 if (block != end_block)
988 {
989 /* Ok, a fixup is needed. Add a fixup to the list of such. */
990 struct goto_fixup *fixup
991 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
992 /* In case an old stack level is restored, make sure that comes
993 after any pending stack adjust. */
994 /* ?? If the fixup isn't to come at the present position,
995 doing the stack adjust here isn't useful. Doing it with our
996 settings at that location isn't useful either. Let's hope
997 someone does it! */
998 if (last_insn == 0)
999 do_pending_stack_adjust ();
28d81abb
RK
1000 fixup->target = tree_label;
1001 fixup->target_rtl = rtl_label;
023b57e6
RS
1002
1003 /* Create a BLOCK node and a corresponding matched set of
1004 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1005 this point. The notes will encapsulate any and all fixup
1006 code which we might later insert at this point in the insn
1007 stream. Also, the BLOCK node will be the parent (i.e. the
1008 `SUPERBLOCK') of any other BLOCK nodes which we might create
1009 later on when we are expanding the fixup code. */
1010
1011 {
1012 register rtx original_before_jump
1013 = last_insn ? last_insn : get_last_insn ();
1014
1015 start_sequence ();
1016 pushlevel (0);
1017 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1018 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1019 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1020 end_sequence ();
1021 emit_insns_after (fixup->before_jump, original_before_jump);
1022 }
1023
28d81abb
RK
1024 fixup->block_start_count = block_start_count;
1025 fixup->stack_level = 0;
1026 fixup->cleanup_list_list
1027 = (((block->data.block.outer_cleanups
1028#if 0
1029 && block->data.block.outer_cleanups != empty_cleanup_list
1030#endif
1031 )
1032 || block->data.block.cleanups)
37366632 1033 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1034 block->data.block.outer_cleanups)
1035 : 0);
1036 fixup->next = goto_fixup_chain;
1037 goto_fixup_chain = fixup;
1038 }
1039
1040 return block != 0;
1041}
1042
ca695ac9
JB
1043
1044/* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1045 Make the fixup restore the stack level to STACK_LEVEL. */
1046
1047static void
1048bc_expand_fixup (opcode, label, stack_level)
1049 enum bytecode_opcode opcode;
1050 struct bc_label *label;
1051 int stack_level;
1052{
1053 struct goto_fixup *fixup
1054 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1055
1056 fixup->label = bc_get_bytecode_label ();
1057 fixup->bc_target = label;
1058 fixup->bc_stack_level = stack_level;
1059 fixup->bc_handled = FALSE;
1060
1061 fixup->next = goto_fixup_chain;
1062 goto_fixup_chain = fixup;
1063
1064 /* Insert a jump to the fixup code */
1065 bc_emit_bytecode (opcode);
1066 bc_emit_bytecode_labelref (fixup->label);
1067
1068#ifdef DEBUG_PRINT_CODE
1069 fputc ('\n', stderr);
1070#endif
1071}
1072
1073
28d81abb
RK
1074/* When exiting a binding contour, process all pending gotos requiring fixups.
1075 THISBLOCK is the structure that describes the block being exited.
1076 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1077 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1078 FIRST_INSN is the insn that began this contour.
1079
1080 Gotos that jump out of this contour must restore the
1081 stack level and do the cleanups before actually jumping.
1082
1083 DONT_JUMP_IN nonzero means report error there is a jump into this
1084 contour from before the beginning of the contour.
1085 This is also done if STACK_LEVEL is nonzero. */
1086
1087void
1088fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1089 struct nesting *thisblock;
1090 rtx stack_level;
1091 tree cleanup_list;
1092 rtx first_insn;
1093 int dont_jump_in;
1094{
1095 register struct goto_fixup *f, *prev;
1096
ca695ac9
JB
1097 if (output_bytecode)
1098 {
1099 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1100 return;
1101 }
1102
28d81abb
RK
1103 /* F is the fixup we are considering; PREV is the previous one. */
1104 /* We run this loop in two passes so that cleanups of exited blocks
1105 are run first, and blocks that are exited are marked so
1106 afterwards. */
1107
1108 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1109 {
1110 /* Test for a fixup that is inactive because it is already handled. */
1111 if (f->before_jump == 0)
1112 {
1113 /* Delete inactive fixup from the chain, if that is easy to do. */
1114 if (prev != 0)
1115 prev->next = f->next;
1116 }
1117 /* Has this fixup's target label been defined?
1118 If so, we can finalize it. */
1119 else if (PREV_INSN (f->target_rtl) != 0)
1120 {
7629c936 1121 register rtx cleanup_insns;
7629c936 1122
28d81abb
RK
1123 /* Get the first non-label after the label
1124 this goto jumps to. If that's before this scope begins,
1125 we don't have a jump into the scope. */
1126 rtx after_label = f->target_rtl;
1127 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1128 after_label = NEXT_INSN (after_label);
1129
1130 /* If this fixup jumped into this contour from before the beginning
1131 of this contour, report an error. */
1132 /* ??? Bug: this does not detect jumping in through intermediate
1133 blocks that have stack levels or cleanups.
1134 It detects only a problem with the innermost block
1135 around the label. */
1136 if (f->target != 0
1137 && (dont_jump_in || stack_level || cleanup_list)
1138 /* If AFTER_LABEL is 0, it means the jump goes to the end
1139 of the rtl, which means it jumps into this scope. */
1140 && (after_label == 0
1141 || INSN_UID (first_insn) < INSN_UID (after_label))
1142 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
44fe2e80 1143 && ! DECL_REGISTER (f->target))
28d81abb
RK
1144 {
1145 error_with_decl (f->target,
1146 "label `%s' used before containing binding contour");
1147 /* Prevent multiple errors for one label. */
44fe2e80 1148 DECL_REGISTER (f->target) = 1;
28d81abb
RK
1149 }
1150
7629c936
RS
1151 /* We will expand the cleanups into a sequence of their own and
1152 then later on we will attach this new sequence to the insn
1153 stream just ahead of the actual jump insn. */
1154
1155 start_sequence ();
1156
023b57e6
RS
1157 /* Temporarily restore the lexical context where we will
1158 logically be inserting the fixup code. We do this for the
1159 sake of getting the debugging information right. */
1160
7629c936 1161 pushlevel (0);
023b57e6 1162 set_block (f->context);
7629c936
RS
1163
1164 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1165 if (f->cleanup_list_list)
1166 {
1167 tree lists;
1168 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1169 /* Marked elements correspond to blocks that have been closed.
1170 Do their cleanups. */
1171 if (TREE_ADDRESSABLE (lists)
1172 && TREE_VALUE (lists) != 0)
7629c936
RS
1173 {
1174 expand_cleanups (TREE_VALUE (lists), 0);
1175 /* Pop any pushes done in the cleanups,
1176 in case function is about to return. */
1177 do_pending_stack_adjust ();
1178 }
28d81abb
RK
1179 }
1180
1181 /* Restore stack level for the biggest contour that this
1182 jump jumps out of. */
1183 if (f->stack_level)
59257ff7 1184 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1185
1186 /* Finish up the sequence containing the insns which implement the
1187 necessary cleanups, and then attach that whole sequence to the
1188 insn stream just ahead of the actual jump insn. Attaching it
1189 at that point insures that any cleanups which are in fact
1190 implicit C++ object destructions (which must be executed upon
1191 leaving the block) appear (to the debugger) to be taking place
1192 in an area of the generated code where the object(s) being
1193 destructed are still "in scope". */
1194
1195 cleanup_insns = get_insns ();
023b57e6 1196 poplevel (1, 0, 0);
7629c936
RS
1197
1198 end_sequence ();
1199 emit_insns_after (cleanup_insns, f->before_jump);
1200
7629c936 1201
28d81abb
RK
1202 f->before_jump = 0;
1203 }
1204 }
1205
1206 /* Mark the cleanups of exited blocks so that they are executed
1207 by the code above. */
1208 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1209 if (f->before_jump != 0
1210 && PREV_INSN (f->target_rtl) == 0
1211 /* Label has still not appeared. If we are exiting a block with
1212 a stack level to restore, that started before the fixup,
1213 mark this stack level as needing restoration
1214 when the fixup is later finalized.
1215 Also mark the cleanup_list_list element for F
1216 that corresponds to this block, so that ultimately
1217 this block's cleanups will be executed by the code above. */
1218 && thisblock != 0
1219 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1220 it means the label is undefined. That's erroneous, but possible. */
1221 && (thisblock->data.block.block_start_count
1222 <= f->block_start_count))
1223 {
1224 tree lists = f->cleanup_list_list;
1225 for (; lists; lists = TREE_CHAIN (lists))
1226 /* If the following elt. corresponds to our containing block
1227 then the elt. must be for this block. */
1228 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1229 TREE_ADDRESSABLE (lists) = 1;
1230
1231 if (stack_level)
1232 f->stack_level = stack_level;
1233 }
1234}
ca695ac9
JB
1235
1236
1237/* When exiting a binding contour, process all pending gotos requiring fixups.
1238 Note: STACK_DEPTH is not altered.
1239
1240 The arguments are currently not used in the bytecode compiler, but we may need
1241 them one day for languages other than C.
1242
1243 THISBLOCK is the structure that describes the block being exited.
1244 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1245 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1246 FIRST_INSN is the insn that began this contour.
1247
1248 Gotos that jump out of this contour must restore the
1249 stack level and do the cleanups before actually jumping.
1250
1251 DONT_JUMP_IN nonzero means report error there is a jump into this
1252 contour from before the beginning of the contour.
1253 This is also done if STACK_LEVEL is nonzero. */
1254
1255static void
1256bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1257 struct nesting *thisblock;
1258 int stack_level;
1259 tree cleanup_list;
1260 rtx first_insn;
1261 int dont_jump_in;
1262{
1263 register struct goto_fixup *f, *prev;
1264 int saved_stack_depth;
1265
1266 /* F is the fixup we are considering; PREV is the previous one. */
1267
1268 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1269 {
1270 /* Test for a fixup that is inactive because it is already handled. */
1271 if (f->before_jump == 0)
1272 {
1273 /* Delete inactive fixup from the chain, if that is easy to do. */
1274 if (prev)
1275 prev->next = f->next;
1276 }
1277
1278 /* Emit code to restore the stack and continue */
1279 bc_emit_bytecode_labeldef (f->label);
1280
1281 /* Save stack_depth across call, since bc_adjust_stack () will alter
1282 the perceived stack depth via the instructions generated. */
1283
1284 if (f->bc_stack_level >= 0)
1285 {
1286 saved_stack_depth = stack_depth;
1287 bc_adjust_stack (stack_depth - f->bc_stack_level);
1288 stack_depth = saved_stack_depth;
1289 }
1290
1291 bc_emit_bytecode (jump);
1292 bc_emit_bytecode_labelref (f->bc_target);
1293
1294#ifdef DEBUG_PRINT_CODE
1295 fputc ('\n', stderr);
1296#endif
1297 }
1298
1299 goto_fixup_chain = NULL;
1300}
28d81abb
RK
1301\f
1302/* Generate RTL for an asm statement (explicit assembler code).
1303 BODY is a STRING_CST node containing the assembler code text,
1304 or an ADDR_EXPR containing a STRING_CST. */
1305
1306void
1307expand_asm (body)
1308 tree body;
1309{
ca695ac9
JB
1310 if (output_bytecode)
1311 {
1312 error ("`asm' is illegal when generating bytecode");
1313 return;
1314 }
1315
28d81abb
RK
1316 if (TREE_CODE (body) == ADDR_EXPR)
1317 body = TREE_OPERAND (body, 0);
1318
1319 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1320 TREE_STRING_POINTER (body)));
1321 last_expr_type = 0;
1322}
1323
1324/* Generate RTL for an asm statement with arguments.
1325 STRING is the instruction template.
1326 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1327 Each output or input has an expression in the TREE_VALUE and
1328 a constraint-string in the TREE_PURPOSE.
1329 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1330 that is clobbered by this insn.
1331
1332 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1333 Some elements of OUTPUTS may be replaced with trees representing temporary
1334 values. The caller should copy those temporary values to the originally
1335 specified lvalues.
1336
1337 VOL nonzero means the insn is volatile; don't optimize it. */
1338
1339void
1340expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1341 tree string, outputs, inputs, clobbers;
1342 int vol;
1343 char *filename;
1344 int line;
1345{
1346 rtvec argvec, constraints;
1347 rtx body;
1348 int ninputs = list_length (inputs);
1349 int noutputs = list_length (outputs);
b4ccaa16 1350 int nclobbers;
28d81abb
RK
1351 tree tail;
1352 register int i;
1353 /* Vector of RTX's of evaluated output operands. */
1354 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1355 /* The insn we have emitted. */
1356 rtx insn;
1357
ca695ac9
JB
1358 if (output_bytecode)
1359 {
1360 error ("`asm' is illegal when generating bytecode");
1361 return;
1362 }
1363
b4ccaa16
RS
1364 /* Count the number of meaningful clobbered registers, ignoring what
1365 we would ignore later. */
1366 nclobbers = 0;
1367 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1368 {
1369 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1370 i = decode_reg_name (regname);
1371 if (i >= 0 || i == -4)
b4ccaa16
RS
1372 ++nclobbers;
1373 }
1374
28d81abb
RK
1375 last_expr_type = 0;
1376
1377 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1378 {
1379 tree val = TREE_VALUE (tail);
1380 tree val1;
1381 int j;
1382 int found_equal;
1383
1384 /* If there's an erroneous arg, emit no insn. */
1385 if (TREE_TYPE (val) == error_mark_node)
1386 return;
1387
1388 /* Make sure constraint has `=' and does not have `+'. */
1389
1390 found_equal = 0;
1391 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1392 {
1393 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1394 {
1395 error ("output operand constraint contains `+'");
1396 return;
1397 }
1398 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1399 found_equal = 1;
1400 }
1401 if (! found_equal)
1402 {
1403 error ("output operand constraint lacks `='");
1404 return;
1405 }
1406
1407 /* If an output operand is not a variable or indirect ref,
1408 or a part of one,
1409 create a SAVE_EXPR which is a pseudo-reg
1410 to act as an intermediate temporary.
1411 Make the asm insn write into that, then copy it to
1412 the real output operand. */
1413
1414 while (TREE_CODE (val) == COMPONENT_REF
1415 || TREE_CODE (val) == ARRAY_REF)
1416 val = TREE_OPERAND (val, 0);
1417
1418 if (TREE_CODE (val) != VAR_DECL
1419 && TREE_CODE (val) != PARM_DECL
1420 && TREE_CODE (val) != INDIRECT_REF)
e619bb8d
RS
1421 {
1422 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1423 /* If it's a constant, print error now so don't crash later. */
1424 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1425 {
1426 error ("invalid output in `asm'");
1427 return;
1428 }
1429 }
28d81abb 1430
37366632 1431 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1432 }
1433
1434 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1435 {
1436 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1437 return;
1438 }
1439
1440 /* Make vectors for the expression-rtx and constraint strings. */
1441
1442 argvec = rtvec_alloc (ninputs);
1443 constraints = rtvec_alloc (ninputs);
1444
1445 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1446 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1447 filename, line);
1448 MEM_VOLATILE_P (body) = vol;
1449
1450 /* Eval the inputs and put them into ARGVEC.
1451 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1452
1453 i = 0;
1454 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1455 {
1456 int j;
1457
1458 /* If there's an erroneous arg, emit no insn,
1459 because the ASM_INPUT would get VOIDmode
1460 and that could cause a crash in reload. */
1461 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1462 return;
1463 if (TREE_PURPOSE (tail) == NULL_TREE)
1464 {
1465 error ("hard register `%s' listed as input operand to `asm'",
1466 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1467 return;
1468 }
1469
1470 /* Make sure constraint has neither `=' nor `+'. */
1471
1472 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1473 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1474 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1475 {
1476 error ("input operand constraint contains `%c'",
1477 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1478 return;
1479 }
1480
1481 XVECEXP (body, 3, i) /* argvec */
37366632 1482 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1483 XVECEXP (body, 4, i) /* constraints */
1484 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1485 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1486 i++;
1487 }
1488
1489 /* Protect all the operands from the queue,
1490 now that they have all been evaluated. */
1491
1492 for (i = 0; i < ninputs; i++)
1493 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1494
1495 for (i = 0; i < noutputs; i++)
1496 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1497
1498 /* Now, for each output, construct an rtx
1499 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1500 ARGVEC CONSTRAINTS))
1501 If there is more than one, put them inside a PARALLEL. */
1502
1503 if (noutputs == 1 && nclobbers == 0)
1504 {
1505 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1506 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1507 }
1508 else if (noutputs == 0 && nclobbers == 0)
1509 {
1510 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1511 insn = emit_insn (body);
1512 }
1513 else
1514 {
1515 rtx obody = body;
1516 int num = noutputs;
1517 if (num == 0) num = 1;
1518 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1519
1520 /* For each output operand, store a SET. */
1521
1522 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1523 {
1524 XVECEXP (body, 0, i)
1525 = gen_rtx (SET, VOIDmode,
1526 output_rtx[i],
1527 gen_rtx (ASM_OPERANDS, VOIDmode,
1528 TREE_STRING_POINTER (string),
1529 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1530 i, argvec, constraints,
1531 filename, line));
1532 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1533 }
1534
1535 /* If there are no outputs (but there are some clobbers)
1536 store the bare ASM_OPERANDS into the PARALLEL. */
1537
1538 if (i == 0)
1539 XVECEXP (body, 0, i++) = obody;
1540
1541 /* Store (clobber REG) for each clobbered register specified. */
1542
b4ccaa16 1543 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1544 {
28d81abb 1545 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1546 int j = decode_reg_name (regname);
28d81abb 1547
b4ac57ab 1548 if (j < 0)
28d81abb 1549 {
c09e6498 1550 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1551 continue;
1552
c09e6498
RS
1553 if (j == -4) /* `memory', don't cache memory across asm */
1554 {
bffc6177
RS
1555 XVECEXP (body, 0, i++)
1556 = gen_rtx (CLOBBER, VOIDmode,
1557 gen_rtx (MEM, QImode,
1558 gen_rtx (SCRATCH, VOIDmode, 0)));
c09e6498
RS
1559 continue;
1560 }
1561
28d81abb
RK
1562 error ("unknown register name `%s' in `asm'", regname);
1563 return;
1564 }
1565
1566 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1567 XVECEXP (body, 0, i++)
28d81abb
RK
1568 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1569 }
1570
1571 insn = emit_insn (body);
1572 }
1573
1574 free_temp_slots ();
1575}
1576\f
1577/* Generate RTL to evaluate the expression EXP
1578 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1579
1580void
1581expand_expr_stmt (exp)
1582 tree exp;
1583{
ca695ac9
JB
1584 if (output_bytecode)
1585 {
1586 int org_stack_depth = stack_depth;
1587
1588 bc_expand_expr (exp);
1589
1590 /* Restore stack depth */
1591 if (stack_depth < org_stack_depth)
1592 abort ();
1593
1594 bc_emit_instruction (drop);
1595
1596 last_expr_type = TREE_TYPE (exp);
1597 return;
1598 }
1599
28d81abb
RK
1600 /* If -W, warn about statements with no side effects,
1601 except for an explicit cast to void (e.g. for assert()), and
1602 except inside a ({...}) where they may be useful. */
1603 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1604 {
1605 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1606 && !(TREE_CODE (exp) == CONVERT_EXPR
1607 && TREE_TYPE (exp) == void_type_node))
1608 warning_with_file_and_line (emit_filename, emit_lineno,
1609 "statement with no effect");
1610 else if (warn_unused)
1611 warn_if_unused_value (exp);
1612 }
1613 last_expr_type = TREE_TYPE (exp);
1614 if (! flag_syntax_only)
37366632
RK
1615 last_expr_value = expand_expr (exp,
1616 (expr_stmts_for_value
1617 ? NULL_RTX : const0_rtx),
28d81abb
RK
1618 VOIDmode, 0);
1619
1620 /* If all we do is reference a volatile value in memory,
1621 copy it to a register to be sure it is actually touched. */
1622 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1623 && TREE_THIS_VOLATILE (exp))
1624 {
6a5bbbe6
RS
1625 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1626 ;
1627 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1628 copy_to_reg (last_expr_value);
1629 else
ddbe9812
RS
1630 {
1631 rtx lab = gen_label_rtx ();
1632
1633 /* Compare the value with itself to reference it. */
1634 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1635 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1636 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1637 BLKmode, 0,
1638 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1639 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1640 emit_label (lab);
1641 }
28d81abb
RK
1642 }
1643
1644 /* If this expression is part of a ({...}) and is in memory, we may have
1645 to preserve temporaries. */
1646 preserve_temp_slots (last_expr_value);
1647
1648 /* Free any temporaries used to evaluate this expression. Any temporary
1649 used as a result of this expression will already have been preserved
1650 above. */
1651 free_temp_slots ();
1652
1653 emit_queue ();
1654}
1655
1656/* Warn if EXP contains any computations whose results are not used.
1657 Return 1 if a warning is printed; 0 otherwise. */
1658
1659static int
1660warn_if_unused_value (exp)
1661 tree exp;
1662{
1663 if (TREE_USED (exp))
1664 return 0;
1665
1666 switch (TREE_CODE (exp))
1667 {
1668 case PREINCREMENT_EXPR:
1669 case POSTINCREMENT_EXPR:
1670 case PREDECREMENT_EXPR:
1671 case POSTDECREMENT_EXPR:
1672 case MODIFY_EXPR:
1673 case INIT_EXPR:
1674 case TARGET_EXPR:
1675 case CALL_EXPR:
1676 case METHOD_CALL_EXPR:
1677 case RTL_EXPR:
28d81abb
RK
1678 case WITH_CLEANUP_EXPR:
1679 case EXIT_EXPR:
1680 /* We don't warn about COND_EXPR because it may be a useful
1681 construct if either arm contains a side effect. */
1682 case COND_EXPR:
1683 return 0;
1684
1685 case BIND_EXPR:
1686 /* For a binding, warn if no side effect within it. */
1687 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1688
1689 case TRUTH_ORIF_EXPR:
1690 case TRUTH_ANDIF_EXPR:
1691 /* In && or ||, warn if 2nd operand has no side effect. */
1692 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1693
1694 case COMPOUND_EXPR:
1695 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1696 return 1;
4d23e509
RS
1697 /* Let people do `(foo (), 0)' without a warning. */
1698 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1699 return 0;
28d81abb
RK
1700 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1701
1702 case NOP_EXPR:
1703 case CONVERT_EXPR:
b4ac57ab 1704 case NON_LVALUE_EXPR:
28d81abb
RK
1705 /* Don't warn about values cast to void. */
1706 if (TREE_TYPE (exp) == void_type_node)
1707 return 0;
1708 /* Don't warn about conversions not explicit in the user's program. */
1709 if (TREE_NO_UNUSED_WARNING (exp))
1710 return 0;
1711 /* Assignment to a cast usually results in a cast of a modify.
1712 Don't complain about that. */
1713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1714 return 0;
1715 /* Sometimes it results in a cast of a cast of a modify.
1716 Don't complain about that. */
1717 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1718 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1719 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1720 return 0;
1721
1722 default:
ddbe9812
RS
1723 /* Referencing a volatile value is a side effect, so don't warn. */
1724 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1725 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1726 && TREE_THIS_VOLATILE (exp))
1727 return 0;
28d81abb
RK
1728 warning_with_file_and_line (emit_filename, emit_lineno,
1729 "value computed is not used");
1730 return 1;
1731 }
1732}
1733
1734/* Clear out the memory of the last expression evaluated. */
1735
1736void
1737clear_last_expr ()
1738{
1739 last_expr_type = 0;
1740}
1741
1742/* Begin a statement which will return a value.
1743 Return the RTL_EXPR for this statement expr.
1744 The caller must save that value and pass it to expand_end_stmt_expr. */
1745
1746tree
1747expand_start_stmt_expr ()
1748{
ca695ac9
JB
1749 int momentary;
1750 tree t;
1751
1752 /* When generating bytecode just note down the stack depth */
1753 if (output_bytecode)
1754 return (build_int_2 (stack_depth, 0));
1755
28d81abb
RK
1756 /* Make the RTL_EXPR node temporary, not momentary,
1757 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1758 momentary = suspend_momentary ();
1759 t = make_node (RTL_EXPR);
28d81abb
RK
1760 resume_momentary (momentary);
1761 start_sequence ();
1762 NO_DEFER_POP;
1763 expr_stmts_for_value++;
1764 return t;
1765}
1766
1767/* Restore the previous state at the end of a statement that returns a value.
1768 Returns a tree node representing the statement's value and the
1769 insns to compute the value.
1770
1771 The nodes of that expression have been freed by now, so we cannot use them.
1772 But we don't want to do that anyway; the expression has already been
1773 evaluated and now we just want to use the value. So generate a RTL_EXPR
1774 with the proper type and RTL value.
1775
1776 If the last substatement was not an expression,
1777 return something with type `void'. */
1778
1779tree
1780expand_end_stmt_expr (t)
1781 tree t;
1782{
ca695ac9
JB
1783 if (output_bytecode)
1784 {
1785 int i;
1786 tree t;
1787
1788
1789 /* At this point, all expressions have been evaluated in order.
1790 However, all expression values have been popped when evaluated,
1791 which means we have to recover the last expression value. This is
1792 the last value removed by means of a `drop' instruction. Instead
1793 of adding code to inhibit dropping the last expression value, it
1794 is here recovered by undoing the `drop'. Since `drop' is
1795 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1796 [-1]'. */
1797
1798 bc_adjust_stack (-1);
1799
1800 if (!last_expr_type)
1801 last_expr_type = void_type_node;
1802
1803 t = make_node (RTL_EXPR);
1804 TREE_TYPE (t) = last_expr_type;
1805 RTL_EXPR_RTL (t) = NULL;
1806 RTL_EXPR_SEQUENCE (t) = NULL;
1807
1808 /* Don't consider deleting this expr or containing exprs at tree level. */
1809 TREE_THIS_VOLATILE (t) = 1;
1810
1811 last_expr_type = 0;
1812 return t;
1813 }
1814
28d81abb
RK
1815 OK_DEFER_POP;
1816
1817 if (last_expr_type == 0)
1818 {
1819 last_expr_type = void_type_node;
1820 last_expr_value = const0_rtx;
1821 }
1822 else if (last_expr_value == 0)
1823 /* There are some cases where this can happen, such as when the
1824 statement is void type. */
1825 last_expr_value = const0_rtx;
1826 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1827 /* Remove any possible QUEUED. */
1828 last_expr_value = protect_from_queue (last_expr_value, 0);
1829
1830 emit_queue ();
1831
1832 TREE_TYPE (t) = last_expr_type;
1833 RTL_EXPR_RTL (t) = last_expr_value;
1834 RTL_EXPR_SEQUENCE (t) = get_insns ();
1835
1836 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1837
1838 end_sequence ();
1839
1840 /* Don't consider deleting this expr or containing exprs at tree level. */
1841 TREE_SIDE_EFFECTS (t) = 1;
1842 /* Propagate volatility of the actual RTL expr. */
1843 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1844
1845 last_expr_type = 0;
1846 expr_stmts_for_value--;
1847
1848 return t;
1849}
1850\f
1851/* The exception handling nesting looks like this:
1852
1853 <-- Level N-1
1854 { <-- exception handler block
1855 <-- Level N
1856 <-- in an exception handler
1857 { <-- try block
1858 : <-- in a TRY block
1859 : <-- in an exception handler
1860 :
1861 }
1862
1863 { <-- except block
1864 : <-- in an except block
1865 : <-- in an exception handler
1866 :
1867 }
1868
1869 }
a124fd5e 1870*/
28d81abb
RK
1871
1872/* Return nonzero iff in a try block at level LEVEL. */
1873
1874int
1875in_try_block (level)
1876 int level;
1877{
1878 struct nesting *n = except_stack;
1879 while (1)
1880 {
1881 while (n && n->data.except_stmt.after_label != 0)
1882 n = n->next;
1883 if (n == 0)
1884 return 0;
1885 if (level == 0)
1886 return n != 0;
1887 level--;
1888 n = n->next;
1889 }
1890}
1891
1892/* Return nonzero iff in an except block at level LEVEL. */
1893
1894int
1895in_except_block (level)
1896 int level;
1897{
1898 struct nesting *n = except_stack;
1899 while (1)
1900 {
1901 while (n && n->data.except_stmt.after_label == 0)
1902 n = n->next;
1903 if (n == 0)
1904 return 0;
1905 if (level == 0)
1906 return n != 0;
1907 level--;
1908 n = n->next;
1909 }
1910}
1911
1912/* Return nonzero iff in an exception handler at level LEVEL. */
1913
1914int
1915in_exception_handler (level)
1916 int level;
1917{
1918 struct nesting *n = except_stack;
1919 while (n && level--)
1920 n = n->next;
1921 return n != 0;
1922}
1923
1924/* Record the fact that the current exception nesting raises
1925 exception EX. If not in an exception handler, return 0. */
1926int
1927expand_raise (ex)
1928 tree ex;
1929{
1930 tree *raises_ptr;
1931
1932 if (except_stack == 0)
1933 return 0;
1934 raises_ptr = &except_stack->data.except_stmt.raised;
1935 if (! value_member (ex, *raises_ptr))
1936 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1937 return 1;
1938}
1939
1940/* Generate RTL for the start of a try block.
1941
1942 TRY_CLAUSE is the condition to test to enter the try block. */
1943
1944void
1945expand_start_try (try_clause, exitflag, escapeflag)
1946 tree try_clause;
1947 int exitflag;
1948 int escapeflag;
1949{
1950 struct nesting *thishandler = ALLOC_NESTING ();
1951
1952 /* Make an entry on cond_stack for the cond we are entering. */
1953
1954 thishandler->next = except_stack;
1955 thishandler->all = nesting_stack;
1956 thishandler->depth = ++nesting_depth;
1957 thishandler->data.except_stmt.raised = 0;
1958 thishandler->data.except_stmt.handled = 0;
1959 thishandler->data.except_stmt.first_insn = get_insns ();
1960 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1961 thishandler->data.except_stmt.unhandled_label = 0;
1962 thishandler->data.except_stmt.after_label = 0;
1963 thishandler->data.except_stmt.escape_label
1964 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1965 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1966 except_stack = thishandler;
1967 nesting_stack = thishandler;
1968
37366632 1969 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1970}
1971
1972/* End of a TRY block. Nothing to do for now. */
1973
1974void
1975expand_end_try ()
1976{
1977 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1978 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1979 NULL_RTX);
28d81abb
RK
1980}
1981
1982/* Start an `except' nesting contour.
1983 EXITFLAG says whether this contour should be able to `exit' something.
1984 ESCAPEFLAG says whether this contour should be escapable. */
1985
1986void
1987expand_start_except (exitflag, escapeflag)
1988 int exitflag;
1989 int escapeflag;
1990{
1991 if (exitflag)
1992 {
1993 struct nesting *n;
1994 /* An `exit' from catch clauses goes out to next exit level,
1995 if there is one. Otherwise, it just goes to the end
1996 of the construct. */
1997 for (n = except_stack->next; n; n = n->next)
1998 if (n->exit_label != 0)
1999 {
2000 except_stack->exit_label = n->exit_label;
2001 break;
2002 }
2003 if (n == 0)
2004 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2005 }
2006 if (escapeflag)
2007 {
2008 struct nesting *n;
2009 /* An `escape' from catch clauses goes out to next escape level,
2010 if there is one. Otherwise, it just goes to the end
2011 of the construct. */
2012 for (n = except_stack->next; n; n = n->next)
2013 if (n->data.except_stmt.escape_label != 0)
2014 {
2015 except_stack->data.except_stmt.escape_label
2016 = n->data.except_stmt.escape_label;
2017 break;
2018 }
2019 if (n == 0)
2020 except_stack->data.except_stmt.escape_label
2021 = except_stack->data.except_stmt.after_label;
2022 }
2023 do_pending_stack_adjust ();
2024 emit_label (except_stack->data.except_stmt.except_label);
2025}
2026
2027/* Generate code to `escape' from an exception contour. This
2028 is like `exiting', but does not conflict with constructs which
2029 use `exit_label'.
2030
2031 Return nonzero if this contour is escapable, otherwise
2032 return zero, and language-specific code will emit the
2033 appropriate error message. */
2034int
2035expand_escape_except ()
2036{
2037 struct nesting *n;
2038 last_expr_type = 0;
2039 for (n = except_stack; n; n = n->next)
2040 if (n->data.except_stmt.escape_label != 0)
2041 {
37366632
RK
2042 expand_goto_internal (NULL_TREE,
2043 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
2044 return 1;
2045 }
2046
2047 return 0;
2048}
2049
2050/* Finish processing and `except' contour.
2051 Culls out all exceptions which might be raise but not
2052 handled, and returns the list to the caller.
2053 Language-specific code is responsible for dealing with these
2054 exceptions. */
2055
2056tree
2057expand_end_except ()
2058{
2059 struct nesting *n;
2060 tree raised = NULL_TREE;
2061
2062 do_pending_stack_adjust ();
2063 emit_label (except_stack->data.except_stmt.after_label);
2064
2065 n = except_stack->next;
2066 if (n)
2067 {
2068 /* Propagate exceptions raised but not handled to next
2069 highest level. */
2070 tree handled = except_stack->data.except_stmt.raised;
2071 if (handled != void_type_node)
2072 {
2073 tree prev = NULL_TREE;
2074 raised = except_stack->data.except_stmt.raised;
2075 while (handled)
2076 {
2077 tree this_raise;
2078 for (this_raise = raised, prev = 0; this_raise;
2079 this_raise = TREE_CHAIN (this_raise))
2080 {
2081 if (value_member (TREE_VALUE (this_raise), handled))
2082 {
2083 if (prev)
2084 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2085 else
2086 {
2087 raised = TREE_CHAIN (raised);
2088 if (raised == NULL_TREE)
2089 goto nada;
2090 }
2091 }
2092 else
2093 prev = this_raise;
2094 }
2095 handled = TREE_CHAIN (handled);
2096 }
2097 if (prev == NULL_TREE)
2098 prev = raised;
2099 if (prev)
2100 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2101 nada:
2102 n->data.except_stmt.raised = raised;
2103 }
2104 }
2105
2106 POPSTACK (except_stack);
2107 last_expr_type = 0;
2108 return raised;
2109}
2110
2111/* Record that exception EX is caught by this exception handler.
2112 Return nonzero if in exception handling construct, otherwise return 0. */
2113int
2114expand_catch (ex)
2115 tree ex;
2116{
2117 tree *raises_ptr;
2118
2119 if (except_stack == 0)
2120 return 0;
2121 raises_ptr = &except_stack->data.except_stmt.handled;
2122 if (*raises_ptr != void_type_node
2123 && ex != NULL_TREE
2124 && ! value_member (ex, *raises_ptr))
2125 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2126 return 1;
2127}
2128
2129/* Record that this exception handler catches all exceptions.
2130 Return nonzero if in exception handling construct, otherwise return 0. */
2131
2132int
2133expand_catch_default ()
2134{
2135 if (except_stack == 0)
2136 return 0;
2137 except_stack->data.except_stmt.handled = void_type_node;
2138 return 1;
2139}
2140
2141int
2142expand_end_catch ()
2143{
2144 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2145 return 0;
37366632
RK
2146 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2147 NULL_RTX);
28d81abb
RK
2148 return 1;
2149}
2150\f
2151/* Generate RTL for the start of an if-then. COND is the expression
2152 whose truth should be tested.
2153
2154 If EXITFLAG is nonzero, this conditional is visible to
2155 `exit_something'. */
2156
2157void
2158expand_start_cond (cond, exitflag)
2159 tree cond;
2160 int exitflag;
2161{
2162 struct nesting *thiscond = ALLOC_NESTING ();
2163
2164 /* Make an entry on cond_stack for the cond we are entering. */
2165
2166 thiscond->next = cond_stack;
2167 thiscond->all = nesting_stack;
2168 thiscond->depth = ++nesting_depth;
2169 thiscond->data.cond.next_label = gen_label_rtx ();
2170 /* Before we encounter an `else', we don't need a separate exit label
2171 unless there are supposed to be exit statements
2172 to exit this conditional. */
2173 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2174 thiscond->data.cond.endif_label = thiscond->exit_label;
2175 cond_stack = thiscond;
2176 nesting_stack = thiscond;
2177
ca695ac9
JB
2178 if (output_bytecode)
2179 bc_expand_start_cond (cond, exitflag);
2180 else
2181 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2182}
2183
2184/* Generate RTL between then-clause and the elseif-clause
2185 of an if-then-elseif-.... */
2186
2187void
2188expand_start_elseif (cond)
2189 tree cond;
2190{
2191 if (cond_stack->data.cond.endif_label == 0)
2192 cond_stack->data.cond.endif_label = gen_label_rtx ();
2193 emit_jump (cond_stack->data.cond.endif_label);
2194 emit_label (cond_stack->data.cond.next_label);
2195 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2196 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2197}
2198
2199/* Generate RTL between the then-clause and the else-clause
2200 of an if-then-else. */
2201
2202void
2203expand_start_else ()
2204{
2205 if (cond_stack->data.cond.endif_label == 0)
2206 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9
JB
2207
2208 if (output_bytecode)
2209 {
2210 bc_expand_start_else ();
2211 return;
2212 }
2213
28d81abb
RK
2214 emit_jump (cond_stack->data.cond.endif_label);
2215 emit_label (cond_stack->data.cond.next_label);
2216 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2217}
2218
2219/* Generate RTL for the end of an if-then.
2220 Pop the record for it off of cond_stack. */
2221
2222void
2223expand_end_cond ()
2224{
2225 struct nesting *thiscond = cond_stack;
2226
ca695ac9
JB
2227 if (output_bytecode)
2228 bc_expand_end_cond ();
2229 else
2230 {
2231 do_pending_stack_adjust ();
2232 if (thiscond->data.cond.next_label)
2233 emit_label (thiscond->data.cond.next_label);
2234 if (thiscond->data.cond.endif_label)
2235 emit_label (thiscond->data.cond.endif_label);
2236 }
28d81abb
RK
2237
2238 POPSTACK (cond_stack);
2239 last_expr_type = 0;
2240}
ca695ac9
JB
2241
2242
2243/* Generate code for the start of an if-then. COND is the expression
2244 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2245 is to be visible to exit_something. It is assumed that the caller
2246 has pushed the previous context on the cond stack. */
2247void
2248bc_expand_start_cond (cond, exitflag)
2249 tree cond;
2250 int exitflag;
2251{
2252 struct nesting *thiscond = cond_stack;
2253
2254 thiscond->data.case_stmt.nominal_type = cond;
2255 bc_expand_expr (cond);
c3a2235b 2256 bc_emit_bytecode (xjumpifnot);
c53e9440 2257 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2258
2259#ifdef DEBUG_PRINT_CODE
2260 fputc ('\n', stderr);
2261#endif
2262}
2263
2264/* Generate the label for the end of an if with
2265 no else- clause. */
2266void
2267bc_expand_end_cond ()
2268{
2269 struct nesting *thiscond = cond_stack;
2270
c53e9440 2271 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2272}
2273
2274/* Generate code for the start of the else- clause of
2275 an if-then-else. */
2276void
2277bc_expand_start_else ()
2278{
2279 struct nesting *thiscond = cond_stack;
2280
2281 thiscond->data.cond.endif_label = thiscond->exit_label;
2282 thiscond->exit_label = gen_label_rtx ();
2283 bc_emit_bytecode (jump);
c53e9440 2284 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
ca695ac9
JB
2285
2286#ifdef DEBUG_PRINT_CODE
2287 fputc ('\n', stderr);
2288#endif
2289
c53e9440 2290 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
ca695ac9 2291}
28d81abb
RK
2292\f
2293/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2294 loop should be exited by `exit_something'. This is a loop for which
2295 `expand_continue' will jump to the top of the loop.
2296
2297 Make an entry on loop_stack to record the labels associated with
2298 this loop. */
2299
2300struct nesting *
2301expand_start_loop (exit_flag)
2302 int exit_flag;
2303{
2304 register struct nesting *thisloop = ALLOC_NESTING ();
2305
2306 /* Make an entry on loop_stack for the loop we are entering. */
2307
2308 thisloop->next = loop_stack;
2309 thisloop->all = nesting_stack;
2310 thisloop->depth = ++nesting_depth;
2311 thisloop->data.loop.start_label = gen_label_rtx ();
2312 thisloop->data.loop.end_label = gen_label_rtx ();
2313 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2314 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2315 loop_stack = thisloop;
2316 nesting_stack = thisloop;
2317
ca695ac9
JB
2318 if (output_bytecode)
2319 {
c53e9440 2320 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2321 return thisloop;
2322 }
2323
28d81abb
RK
2324 do_pending_stack_adjust ();
2325 emit_queue ();
37366632 2326 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2327 emit_label (thisloop->data.loop.start_label);
2328
2329 return thisloop;
2330}
2331
2332/* Like expand_start_loop but for a loop where the continuation point
2333 (for expand_continue_loop) will be specified explicitly. */
2334
2335struct nesting *
2336expand_start_loop_continue_elsewhere (exit_flag)
2337 int exit_flag;
2338{
2339 struct nesting *thisloop = expand_start_loop (exit_flag);
2340 loop_stack->data.loop.continue_label = gen_label_rtx ();
2341 return thisloop;
2342}
2343
2344/* Specify the continuation point for a loop started with
2345 expand_start_loop_continue_elsewhere.
2346 Use this at the point in the code to which a continue statement
2347 should jump. */
2348
2349void
2350expand_loop_continue_here ()
2351{
ca695ac9
JB
2352 if (output_bytecode)
2353 {
c53e9440 2354 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
ca695ac9
JB
2355 return;
2356 }
28d81abb 2357 do_pending_stack_adjust ();
37366632 2358 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2359 emit_label (loop_stack->data.loop.continue_label);
2360}
2361
ca695ac9
JB
2362/* End a loop. */
2363static void
2364bc_expand_end_loop ()
2365{
2366 struct nesting *thisloop = loop_stack;
2367
2368 bc_emit_bytecode (jump);
c53e9440 2369 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
ca695ac9
JB
2370
2371#ifdef DEBUG_PRINT_CODE
2372 fputc ('\n', stderr);
2373#endif
2374
c53e9440 2375 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
ca695ac9
JB
2376 POPSTACK (loop_stack);
2377 last_expr_type = 0;
2378}
2379
2380
28d81abb
RK
2381/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2382 Pop the block off of loop_stack. */
2383
2384void
2385expand_end_loop ()
2386{
ca695ac9
JB
2387 register rtx insn;
2388 register rtx start_label;
28d81abb
RK
2389 rtx last_test_insn = 0;
2390 int num_insns = 0;
ca695ac9
JB
2391
2392 if (output_bytecode)
2393 {
2394 bc_expand_end_loop ();
2395 return;
2396 }
2397
2398 insn = get_last_insn ();
2399 start_label = loop_stack->data.loop.start_label;
28d81abb
RK
2400
2401 /* Mark the continue-point at the top of the loop if none elsewhere. */
2402 if (start_label == loop_stack->data.loop.continue_label)
2403 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2404
2405 do_pending_stack_adjust ();
2406
2407 /* If optimizing, perhaps reorder the loop. If the loop
2408 starts with a conditional exit, roll that to the end
2409 where it will optimize together with the jump back.
2410
2411 We look for the last conditional branch to the exit that we encounter
2412 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2413 branch to the exit first, use it.
2414
2415 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2416 because moving them is not valid. */
2417
2418 if (optimize
2419 &&
2420 ! (GET_CODE (insn) == JUMP_INSN
2421 && GET_CODE (PATTERN (insn)) == SET
2422 && SET_DEST (PATTERN (insn)) == pc_rtx
2423 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2424 {
2425 /* Scan insns from the top of the loop looking for a qualified
2426 conditional exit. */
2427 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2428 insn = NEXT_INSN (insn))
2429 {
2430 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2431 break;
2432
2433 if (GET_CODE (insn) == NOTE
2434 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2435 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2436 break;
2437
2438 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2439 num_insns++;
2440
2441 if (last_test_insn && num_insns > 30)
2442 break;
2443
2444 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2445 && SET_DEST (PATTERN (insn)) == pc_rtx
2446 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2447 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2448 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2449 == loop_stack->data.loop.end_label))
2450 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2451 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2452 == loop_stack->data.loop.end_label))))
2453 last_test_insn = insn;
2454
2455 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2456 && GET_CODE (PATTERN (insn)) == SET
2457 && SET_DEST (PATTERN (insn)) == pc_rtx
2458 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2459 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2460 == loop_stack->data.loop.end_label))
2461 /* Include BARRIER. */
2462 last_test_insn = NEXT_INSN (insn);
2463 }
2464
2465 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2466 {
2467 /* We found one. Move everything from there up
2468 to the end of the loop, and add a jump into the loop
2469 to jump to there. */
2470 register rtx newstart_label = gen_label_rtx ();
2471 register rtx start_move = start_label;
2472
b4ac57ab 2473 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2474 then we want to move this note also. */
2475 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2476 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2477 == NOTE_INSN_LOOP_CONT))
2478 start_move = PREV_INSN (start_move);
2479
2480 emit_label_after (newstart_label, PREV_INSN (start_move));
2481 reorder_insns (start_move, last_test_insn, get_last_insn ());
2482 emit_jump_insn_after (gen_jump (start_label),
2483 PREV_INSN (newstart_label));
2484 emit_barrier_after (PREV_INSN (newstart_label));
2485 start_label = newstart_label;
2486 }
2487 }
2488
2489 emit_jump (start_label);
37366632 2490 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2491 emit_label (loop_stack->data.loop.end_label);
2492
2493 POPSTACK (loop_stack);
2494
2495 last_expr_type = 0;
2496}
2497
2498/* Generate a jump to the current loop's continue-point.
2499 This is usually the top of the loop, but may be specified
2500 explicitly elsewhere. If not currently inside a loop,
2501 return 0 and do nothing; caller will print an error message. */
2502
2503int
2504expand_continue_loop (whichloop)
2505 struct nesting *whichloop;
2506{
2507 last_expr_type = 0;
2508 if (whichloop == 0)
2509 whichloop = loop_stack;
2510 if (whichloop == 0)
2511 return 0;
37366632
RK
2512 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2513 NULL_RTX);
28d81abb
RK
2514 return 1;
2515}
2516
2517/* Generate a jump to exit the current loop. If not currently inside a loop,
2518 return 0 and do nothing; caller will print an error message. */
2519
2520int
2521expand_exit_loop (whichloop)
2522 struct nesting *whichloop;
2523{
2524 last_expr_type = 0;
2525 if (whichloop == 0)
2526 whichloop = loop_stack;
2527 if (whichloop == 0)
2528 return 0;
37366632 2529 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2530 return 1;
2531}
2532
2533/* Generate a conditional jump to exit the current loop if COND
2534 evaluates to zero. If not currently inside a loop,
2535 return 0 and do nothing; caller will print an error message. */
2536
2537int
2538expand_exit_loop_if_false (whichloop, cond)
2539 struct nesting *whichloop;
2540 tree cond;
2541{
2542 last_expr_type = 0;
2543 if (whichloop == 0)
2544 whichloop = loop_stack;
2545 if (whichloop == 0)
2546 return 0;
ca695ac9
JB
2547 if (output_bytecode)
2548 {
2549 bc_expand_expr (cond);
c3a2235b
RS
2550 bc_expand_goto_internal (xjumpifnot,
2551 BYTECODE_BC_LABEL (whichloop->exit_label),
c53e9440 2552 NULL_RTX);
ca695ac9
JB
2553 }
2554 else
2555 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2556
28d81abb
RK
2557 return 1;
2558}
2559
2560/* Return non-zero if we should preserve sub-expressions as separate
2561 pseudos. We never do so if we aren't optimizing. We always do so
2562 if -fexpensive-optimizations.
2563
2564 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2565 the loop may still be a small one. */
2566
2567int
2568preserve_subexpressions_p ()
2569{
2570 rtx insn;
2571
2572 if (flag_expensive_optimizations)
2573 return 1;
2574
2575 if (optimize == 0 || loop_stack == 0)
2576 return 0;
2577
2578 insn = get_last_insn_anywhere ();
2579
2580 return (insn
2581 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2582 < n_non_fixed_regs * 3));
2583
2584}
2585
2586/* Generate a jump to exit the current loop, conditional, binding contour
2587 or case statement. Not all such constructs are visible to this function,
2588 only those started with EXIT_FLAG nonzero. Individual languages use
2589 the EXIT_FLAG parameter to control which kinds of constructs you can
2590 exit this way.
2591
2592 If not currently inside anything that can be exited,
2593 return 0 and do nothing; caller will print an error message. */
2594
2595int
2596expand_exit_something ()
2597{
2598 struct nesting *n;
2599 last_expr_type = 0;
2600 for (n = nesting_stack; n; n = n->all)
2601 if (n->exit_label != 0)
2602 {
37366632 2603 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2604 return 1;
2605 }
2606
2607 return 0;
2608}
2609\f
2610/* Generate RTL to return from the current function, with no value.
2611 (That is, we do not do anything about returning any value.) */
2612
2613void
2614expand_null_return ()
2615{
2616 struct nesting *block = block_stack;
2617 rtx last_insn = 0;
2618
ca695ac9
JB
2619 if (output_bytecode)
2620 {
2621 bc_emit_instruction (ret);
2622 return;
2623 }
2624
28d81abb
RK
2625 /* Does any pending block have cleanups? */
2626
2627 while (block && block->data.block.cleanups == 0)
2628 block = block->next;
2629
2630 /* If yes, use a goto to return, since that runs cleanups. */
2631
2632 expand_null_return_1 (last_insn, block != 0);
2633}
2634
2635/* Generate RTL to return from the current function, with value VAL. */
2636
2637void
2638expand_value_return (val)
2639 rtx val;
2640{
2641 struct nesting *block = block_stack;
2642 rtx last_insn = get_last_insn ();
2643 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2644
2645 /* Copy the value to the return location
2646 unless it's already there. */
2647
2648 if (return_reg != val)
77636079
RS
2649 {
2650#ifdef PROMOTE_FUNCTION_RETURN
2651 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2652 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2653 int unsignedp = TREE_UNSIGNED (type);
2654
2655 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2656 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2657 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2658 || TREE_CODE (type) == OFFSET_TYPE)
2659 {
2660 PROMOTE_MODE (mode, unsignedp, type);
2661 }
2662
2663 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2664 convert_move (return_reg, val, unsignedp);
77636079
RS
2665 else
2666#endif
2667 emit_move_insn (return_reg, val);
2668 }
28d81abb
RK
2669 if (GET_CODE (return_reg) == REG
2670 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2671 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2672
2673 /* Does any pending block have cleanups? */
2674
2675 while (block && block->data.block.cleanups == 0)
2676 block = block->next;
2677
2678 /* If yes, use a goto to return, since that runs cleanups.
2679 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2680
2681 expand_null_return_1 (last_insn, block != 0);
2682}
2683
2684/* Output a return with no value. If LAST_INSN is nonzero,
2685 pretend that the return takes place after LAST_INSN.
2686 If USE_GOTO is nonzero then don't use a return instruction;
2687 go to the return label instead. This causes any cleanups
2688 of pending blocks to be executed normally. */
2689
2690static void
2691expand_null_return_1 (last_insn, use_goto)
2692 rtx last_insn;
2693 int use_goto;
2694{
2695 rtx end_label = cleanup_label ? cleanup_label : return_label;
2696
2697 clear_pending_stack_adjust ();
2698 do_pending_stack_adjust ();
2699 last_expr_type = 0;
2700
2701 /* PCC-struct return always uses an epilogue. */
2702 if (current_function_returns_pcc_struct || use_goto)
2703 {
2704 if (end_label == 0)
2705 end_label = return_label = gen_label_rtx ();
37366632 2706 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2707 return;
2708 }
2709
2710 /* Otherwise output a simple return-insn if one is available,
2711 unless it won't do the job. */
2712#ifdef HAVE_return
2713 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2714 {
2715 emit_jump_insn (gen_return ());
2716 emit_barrier ();
2717 return;
2718 }
2719#endif
2720
2721 /* Otherwise jump to the epilogue. */
37366632 2722 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2723}
2724\f
2725/* Generate RTL to evaluate the expression RETVAL and return it
2726 from the current function. */
2727
2728void
2729expand_return (retval)
2730 tree retval;
2731{
2732 /* If there are any cleanups to be performed, then they will
2733 be inserted following LAST_INSN. It is desirable
2734 that the last_insn, for such purposes, should be the
2735 last insn before computing the return value. Otherwise, cleanups
2736 which call functions can clobber the return value. */
2737 /* ??? rms: I think that is erroneous, because in C++ it would
2738 run destructors on variables that might be used in the subsequent
2739 computation of the return value. */
2740 rtx last_insn = 0;
2741 register rtx val = 0;
2742 register rtx op0;
2743 tree retval_rhs;
2744 int cleanups;
2745 struct nesting *block;
2746
ca695ac9
JB
2747 /* Bytecode returns are quite simple, just leave the result on the
2748 arithmetic stack. */
2749 if (output_bytecode)
2750 {
2751 bc_expand_expr (retval);
2752 bc_emit_instruction (ret);
2753 return;
2754 }
2755
28d81abb
RK
2756 /* If function wants no value, give it none. */
2757 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2758 {
37366632 2759 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2760 emit_queue ();
28d81abb
RK
2761 expand_null_return ();
2762 return;
2763 }
2764
2765 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2766 cleanups = any_pending_cleanups (1);
2767
2768 if (TREE_CODE (retval) == RESULT_DECL)
2769 retval_rhs = retval;
2770 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2771 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2772 retval_rhs = TREE_OPERAND (retval, 1);
2773 else if (TREE_TYPE (retval) == void_type_node)
2774 /* Recognize tail-recursive call to void function. */
2775 retval_rhs = retval;
2776 else
2777 retval_rhs = NULL_TREE;
2778
2779 /* Only use `last_insn' if there are cleanups which must be run. */
2780 if (cleanups || cleanup_label != 0)
2781 last_insn = get_last_insn ();
2782
2783 /* Distribute return down conditional expr if either of the sides
2784 may involve tail recursion (see test below). This enhances the number
2785 of tail recursions we see. Don't do this always since it can produce
2786 sub-optimal code in some cases and we distribute assignments into
2787 conditional expressions when it would help. */
2788
2789 if (optimize && retval_rhs != 0
2790 && frame_offset == 0
2791 && TREE_CODE (retval_rhs) == COND_EXPR
2792 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2793 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2794 {
2795 rtx label = gen_label_rtx ();
a0a34f94
RK
2796 tree expr;
2797
37366632 2798 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
a0a34f94
RK
2799 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2800 DECL_RESULT (current_function_decl),
2801 TREE_OPERAND (retval_rhs, 1));
2802 TREE_SIDE_EFFECTS (expr) = 1;
2803 expand_return (expr);
28d81abb 2804 emit_label (label);
a0a34f94
RK
2805
2806 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2807 DECL_RESULT (current_function_decl),
2808 TREE_OPERAND (retval_rhs, 2));
2809 TREE_SIDE_EFFECTS (expr) = 1;
2810 expand_return (expr);
28d81abb
RK
2811 return;
2812 }
2813
2814 /* For tail-recursive call to current function,
2815 just jump back to the beginning.
2816 It's unsafe if any auto variable in this function
2817 has its address taken; for simplicity,
2818 require stack frame to be empty. */
2819 if (optimize && retval_rhs != 0
2820 && frame_offset == 0
2821 && TREE_CODE (retval_rhs) == CALL_EXPR
2822 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2823 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2824 /* Finish checking validity, and if valid emit code
2825 to set the argument variables for the new call. */
2826 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2827 DECL_ARGUMENTS (current_function_decl)))
2828 {
2829 if (tail_recursion_label == 0)
2830 {
2831 tail_recursion_label = gen_label_rtx ();
2832 emit_label_after (tail_recursion_label,
2833 tail_recursion_reentry);
2834 }
a3229491 2835 emit_queue ();
37366632 2836 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2837 emit_barrier ();
2838 return;
2839 }
2840#ifdef HAVE_return
2841 /* This optimization is safe if there are local cleanups
2842 because expand_null_return takes care of them.
2843 ??? I think it should also be safe when there is a cleanup label,
2844 because expand_null_return takes care of them, too.
2845 Any reason why not? */
2846 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2847 && ! current_function_returns_pcc_struct
2848 && BRANCH_COST <= 1)
28d81abb
RK
2849 {
2850 /* If this is return x == y; then generate
2851 if (x == y) return 1; else return 0;
5eb94e4e
RK
2852 if we can do it with explicit return insns and
2853 branches are cheap. */
28d81abb
RK
2854 if (retval_rhs)
2855 switch (TREE_CODE (retval_rhs))
2856 {
2857 case EQ_EXPR:
2858 case NE_EXPR:
2859 case GT_EXPR:
2860 case GE_EXPR:
2861 case LT_EXPR:
2862 case LE_EXPR:
2863 case TRUTH_ANDIF_EXPR:
2864 case TRUTH_ORIF_EXPR:
2865 case TRUTH_AND_EXPR:
2866 case TRUTH_OR_EXPR:
2867 case TRUTH_NOT_EXPR:
94ed3915 2868 case TRUTH_XOR_EXPR:
28d81abb
RK
2869 op0 = gen_label_rtx ();
2870 jumpifnot (retval_rhs, op0);
2871 expand_value_return (const1_rtx);
2872 emit_label (op0);
2873 expand_value_return (const0_rtx);
2874 return;
2875 }
2876 }
2877#endif /* HAVE_return */
2878
2879 if (cleanups
2880 && retval_rhs != 0
2881 && TREE_TYPE (retval_rhs) != void_type_node
2882 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2883 {
2884 /* Calculate the return value into a pseudo reg. */
37366632 2885 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2886 emit_queue ();
2887 /* All temporaries have now been used. */
2888 free_temp_slots ();
2889 /* Return the calculated value, doing cleanups first. */
2890 expand_value_return (val);
2891 }
2892 else
2893 {
2894 /* No cleanups or no hard reg used;
2895 calculate value into hard return reg. */
cba389cd 2896 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb
RK
2897 emit_queue ();
2898 free_temp_slots ();
2899 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2900 }
2901}
2902
2903/* Return 1 if the end of the generated RTX is not a barrier.
2904 This means code already compiled can drop through. */
2905
2906int
2907drop_through_at_end_p ()
2908{
2909 rtx insn = get_last_insn ();
2910 while (insn && GET_CODE (insn) == NOTE)
2911 insn = PREV_INSN (insn);
2912 return insn && GET_CODE (insn) != BARRIER;
2913}
2914\f
2915/* Emit code to alter this function's formal parms for a tail-recursive call.
2916 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2917 FORMALS is the chain of decls of formals.
2918 Return 1 if this can be done;
2919 otherwise return 0 and do not emit any code. */
2920
2921static int
2922tail_recursion_args (actuals, formals)
2923 tree actuals, formals;
2924{
2925 register tree a = actuals, f = formals;
2926 register int i;
2927 register rtx *argvec;
2928
2929 /* Check that number and types of actuals are compatible
2930 with the formals. This is not always true in valid C code.
2931 Also check that no formal needs to be addressable
2932 and that all formals are scalars. */
2933
2934 /* Also count the args. */
2935
2936 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2937 {
2938 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2939 return 0;
2940 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2941 return 0;
2942 }
2943 if (a != 0 || f != 0)
2944 return 0;
2945
2946 /* Compute all the actuals. */
2947
2948 argvec = (rtx *) alloca (i * sizeof (rtx));
2949
2950 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2951 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2952
2953 /* Find which actual values refer to current values of previous formals.
2954 Copy each of them now, before any formal is changed. */
2955
2956 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2957 {
2958 int copy = 0;
2959 register int j;
2960 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2961 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2962 { copy = 1; break; }
2963 if (copy)
2964 argvec[i] = copy_to_reg (argvec[i]);
2965 }
2966
2967 /* Store the values of the actuals into the formals. */
2968
2969 for (f = formals, a = actuals, i = 0; f;
2970 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2971 {
98f3b471 2972 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2973 emit_move_insn (DECL_RTL (f), argvec[i]);
2974 else
2975 convert_move (DECL_RTL (f), argvec[i],
2976 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2977 }
2978
2979 free_temp_slots ();
2980 return 1;
2981}
2982\f
2983/* Generate the RTL code for entering a binding contour.
2984 The variables are declared one by one, by calls to `expand_decl'.
2985
2986 EXIT_FLAG is nonzero if this construct should be visible to
2987 `exit_something'. */
2988
2989void
2990expand_start_bindings (exit_flag)
2991 int exit_flag;
2992{
2993 struct nesting *thisblock = ALLOC_NESTING ();
ca695ac9 2994 rtx note;
28d81abb 2995
ca695ac9
JB
2996 if (!output_bytecode)
2997 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2998
2999 /* Make an entry on block_stack for the block we are entering. */
3000
3001 thisblock->next = block_stack;
3002 thisblock->all = nesting_stack;
3003 thisblock->depth = ++nesting_depth;
3004 thisblock->data.block.stack_level = 0;
3005 thisblock->data.block.cleanups = 0;
3006 thisblock->data.block.function_call_count = 0;
3007#if 0
3008 if (block_stack)
3009 {
3010 if (block_stack->data.block.cleanups == NULL_TREE
3011 && (block_stack->data.block.outer_cleanups == NULL_TREE
3012 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3013 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3014 else
3015 thisblock->data.block.outer_cleanups
3016 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3017 block_stack->data.block.outer_cleanups);
3018 }
3019 else
3020 thisblock->data.block.outer_cleanups = 0;
3021#endif
3022#if 1
3023 if (block_stack
3024 && !(block_stack->data.block.cleanups == NULL_TREE
3025 && block_stack->data.block.outer_cleanups == NULL_TREE))
3026 thisblock->data.block.outer_cleanups
3027 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3028 block_stack->data.block.outer_cleanups);
3029 else
3030 thisblock->data.block.outer_cleanups = 0;
3031#endif
3032 thisblock->data.block.label_chain = 0;
3033 thisblock->data.block.innermost_stack_block = stack_block_stack;
3034 thisblock->data.block.first_insn = note;
3035 thisblock->data.block.block_start_count = ++block_start_count;
3036 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3037 block_stack = thisblock;
3038 nesting_stack = thisblock;
3039
ca695ac9
JB
3040 if (!output_bytecode)
3041 {
3042 /* Make a new level for allocating stack slots. */
3043 push_temp_slots ();
3044 }
28d81abb
RK
3045}
3046
7629c936
RS
3047/* Given a pointer to a BLOCK node, save a pointer to the most recently
3048 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3049 BLOCK node. */
3050
3051void
3052remember_end_note (block)
3053 register tree block;
3054{
3055 BLOCK_END_NOTE (block) = last_block_end_note;
3056 last_block_end_note = NULL_RTX;
3057}
3058
28d81abb
RK
3059/* Generate RTL code to terminate a binding contour.
3060 VARS is the chain of VAR_DECL nodes
3061 for the variables bound in this contour.
3062 MARK_ENDS is nonzero if we should put a note at the beginning
3063 and end of this binding contour.
3064
3065 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3066 (That is true automatically if the contour has a saved stack level.) */
3067
3068void
3069expand_end_bindings (vars, mark_ends, dont_jump_in)
3070 tree vars;
3071 int mark_ends;
3072 int dont_jump_in;
3073{
3074 register struct nesting *thisblock = block_stack;
3075 register tree decl;
3076
ca695ac9
JB
3077 if (output_bytecode)
3078 {
3079 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3080 return;
3081 }
3082
28d81abb
RK
3083 if (warn_unused)
3084 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
3085 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3086 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
3087 warning_with_decl (decl, "unused variable `%s'");
3088
28d81abb
RK
3089 if (thisblock->exit_label)
3090 {
3091 do_pending_stack_adjust ();
3092 emit_label (thisblock->exit_label);
3093 }
3094
3095 /* If necessary, make a handler for nonlocal gotos taking
3096 place in the function calls in this block. */
3097 if (function_call_count != thisblock->data.block.function_call_count
3098 && nonlocal_labels
3099 /* Make handler for outermost block
3100 if there were any nonlocal gotos to this function. */
3101 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3102 /* Make handler for inner block if it has something
3103 special to do when you jump out of it. */
3104 : (thisblock->data.block.cleanups != 0
3105 || thisblock->data.block.stack_level != 0)))
3106 {
3107 tree link;
3108 rtx afterward = gen_label_rtx ();
3109 rtx handler_label = gen_label_rtx ();
3110 rtx save_receiver = gen_reg_rtx (Pmode);
ba83886f 3111 rtx insns;
28d81abb
RK
3112
3113 /* Don't let jump_optimize delete the handler. */
3114 LABEL_PRESERVE_P (handler_label) = 1;
3115
3116 /* Record the handler address in the stack slot for that purpose,
3117 during this block, saving and restoring the outer value. */
3118 if (thisblock->next != 0)
3119 {
3120 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
ba83886f
RS
3121
3122 start_sequence ();
3123 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3124 insns = get_insns ();
3125 end_sequence ();
3126 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb 3127 }
ba83886f
RS
3128
3129 start_sequence ();
3130 emit_move_insn (nonlocal_goto_handler_slot,
3131 gen_rtx (LABEL_REF, Pmode, handler_label));
3132 insns = get_insns ();
3133 end_sequence ();
3134 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb
RK
3135
3136 /* Jump around the handler; it runs only when specially invoked. */
3137 emit_jump (afterward);
3138 emit_label (handler_label);
3139
3140#ifdef HAVE_nonlocal_goto
3141 if (! HAVE_nonlocal_goto)
3142#endif
3143 /* First adjust our frame pointer to its actual value. It was
3144 previously set to the start of the virtual area corresponding to
3145 the stacked variables when we branched here and now needs to be
3146 adjusted to the actual hardware fp value.
3147
3148 Assignments are to virtual registers are converted by
3149 instantiate_virtual_regs into the corresponding assignment
3150 to the underlying register (fp in this case) that makes
3151 the original assignment true.
3152 So the following insn will actually be
3153 decrementing fp by STARTING_FRAME_OFFSET. */
3154 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3155
a35ad168 3156#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
28d81abb
RK
3157 if (fixed_regs[ARG_POINTER_REGNUM])
3158 {
42495ca0
RK
3159#ifdef ELIMINABLE_REGS
3160 /* If the argument pointer can be eliminated in favor of the
3161 frame pointer, we don't need to restore it. We assume here
3162 that if such an elimination is present, it can always be used.
3163 This is the case on all known machines; if we don't make this
3164 assumption, we do unnecessary saving on many machines. */
3165 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3166 int i;
3167
3168 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3169 if (elim_regs[i].from == ARG_POINTER_REGNUM
a35ad168 3170 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
42495ca0
RK
3171 break;
3172
3173 if (i == sizeof elim_regs / sizeof elim_regs [0])
3174#endif
3175 {
3176 /* Now restore our arg pointer from the address at which it
3177 was saved in our stack frame.
3178 If there hasn't be space allocated for it yet, make
3179 some now. */
3180 if (arg_pointer_save_area == 0)
3181 arg_pointer_save_area
3182 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3183 emit_move_insn (virtual_incoming_args_rtx,
3184 /* We need a pseudo here, or else
3185 instantiate_virtual_regs_1 complains. */
3186 copy_to_reg (arg_pointer_save_area));
3187 }
28d81abb
RK
3188 }
3189#endif
3190
3191 /* The handler expects the desired label address in the static chain
3192 register. It tests the address and does an appropriate jump
3193 to whatever label is desired. */
3194 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3195 /* Skip any labels we shouldn't be able to jump to from here. */
3196 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3197 {
3198 rtx not_this = gen_label_rtx ();
3199 rtx this = gen_label_rtx ();
3200 do_jump_if_equal (static_chain_rtx,
3201 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3202 this, 0);
3203 emit_jump (not_this);
3204 emit_label (this);
3205 expand_goto (TREE_VALUE (link));
3206 emit_label (not_this);
3207 }
3208 /* If label is not recognized, abort. */
3209 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3210 VOIDmode, 0);
3211 emit_label (afterward);
3212 }
3213
3214 /* Don't allow jumping into a block that has cleanups or a stack level. */
3215 if (dont_jump_in
3216 || thisblock->data.block.stack_level != 0
3217 || thisblock->data.block.cleanups != 0)
3218 {
3219 struct label_chain *chain;
3220
3221 /* Any labels in this block are no longer valid to go to.
3222 Mark them to cause an error message. */
3223 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3224 {
3225 DECL_TOO_LATE (chain->label) = 1;
3226 /* If any goto without a fixup came to this label,
3227 that must be an error, because gotos without fixups
3228 come from outside all saved stack-levels and all cleanups. */
3229 if (TREE_ADDRESSABLE (chain->label))
3230 error_with_decl (chain->label,
3231 "label `%s' used before containing binding contour");
3232 }
3233 }
3234
3235 /* Restore stack level in effect before the block
3236 (only if variable-size objects allocated). */
3237 /* Perform any cleanups associated with the block. */
3238
3239 if (thisblock->data.block.stack_level != 0
3240 || thisblock->data.block.cleanups != 0)
3241 {
3242 /* Don't let cleanups affect ({...}) constructs. */
3243 int old_expr_stmts_for_value = expr_stmts_for_value;
3244 rtx old_last_expr_value = last_expr_value;
3245 tree old_last_expr_type = last_expr_type;
3246 expr_stmts_for_value = 0;
3247
3248 /* Do the cleanups. */
37366632 3249 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
3250 do_pending_stack_adjust ();
3251
3252 expr_stmts_for_value = old_expr_stmts_for_value;
3253 last_expr_value = old_last_expr_value;
3254 last_expr_type = old_last_expr_type;
3255
3256 /* Restore the stack level. */
3257
3258 if (thisblock->data.block.stack_level != 0)
3259 {
59257ff7 3260 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 3261 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 3262 if (nonlocal_goto_handler_slot != 0)
37366632
RK
3263 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3264 NULL_RTX);
28d81abb
RK
3265 }
3266
3267 /* Any gotos out of this block must also do these things.
59257ff7
RK
3268 Also report any gotos with fixups that came to labels in this
3269 level. */
28d81abb
RK
3270 fixup_gotos (thisblock,
3271 thisblock->data.block.stack_level,
3272 thisblock->data.block.cleanups,
3273 thisblock->data.block.first_insn,
3274 dont_jump_in);
3275 }
3276
c7d2d61d
RS
3277 /* Mark the beginning and end of the scope if requested.
3278 We do this now, after running cleanups on the variables
3279 just going out of scope, so they are in scope for their cleanups. */
3280
3281 if (mark_ends)
7629c936 3282 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3283 else
3284 /* Get rid of the beginning-mark if we don't make an end-mark. */
3285 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3286
28d81abb
RK
3287 /* If doing stupid register allocation, make sure lives of all
3288 register variables declared here extend thru end of scope. */
3289
3290 if (obey_regdecls)
3291 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3292 {
3293 rtx rtl = DECL_RTL (decl);
3294 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3295 use_variable (rtl);
3296 }
3297
3298 /* Restore block_stack level for containing block. */
3299
3300 stack_block_stack = thisblock->data.block.innermost_stack_block;
3301 POPSTACK (block_stack);
3302
3303 /* Pop the stack slot nesting and free any slots at this level. */
3304 pop_temp_slots ();
3305}
ca695ac9
JB
3306
3307
3308/* End a binding contour.
3309 VARS is the chain of VAR_DECL nodes for the variables bound
3310 in this contour. MARK_ENDS is nonzer if we should put a note
3311 at the beginning and end of this binding contour.
3312 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3313 contour. */
3314
3315void
3316bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3317 tree vars;
3318 int mark_ends;
3319 int dont_jump_in;
3320{
3321 struct nesting *thisbind = nesting_stack;
3322 tree decl;
3323
3324 if (warn_unused)
3325 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3326 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3327 warning_with_decl (decl, "unused variable `%s'");
3328
c53e9440 3329 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
ca695ac9
JB
3330
3331 /* Pop block/bindings off stack */
3332 POPSTACK (nesting_stack);
3333 POPSTACK (block_stack);
3334}
28d81abb
RK
3335\f
3336/* Generate RTL for the automatic variable declaration DECL.
3337 (Other kinds of declarations are simply ignored if seen here.)
3338 CLEANUP is an expression to be executed at exit from this binding contour;
3339 for example, in C++, it might call the destructor for this variable.
3340
3341 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3342 either before or after calling `expand_decl' but before compiling
3343 any subsequent expressions. This is because CLEANUP may be expanded
3344 more than once, on different branches of execution.
3345 For the same reason, CLEANUP may not contain a CALL_EXPR
3346 except as its topmost node--else `preexpand_calls' would get confused.
3347
3348 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3349 that is not associated with any particular variable.
3350
3351 There is no special support here for C++ constructors.
3352 They should be handled by the proper code in DECL_INITIAL. */
3353
3354void
3355expand_decl (decl)
3356 register tree decl;
3357{
3358 struct nesting *thisblock = block_stack;
ca695ac9
JB
3359 tree type;
3360
3361 if (output_bytecode)
3362 {
3363 bc_expand_decl (decl, 0);
3364 return;
3365 }
3366
3367 type = TREE_TYPE (decl);
28d81abb
RK
3368
3369 /* Only automatic variables need any expansion done.
3370 Static and external variables, and external functions,
3371 will be handled by `assemble_variable' (called from finish_decl).
3372 TYPE_DECL and CONST_DECL require nothing.
3373 PARM_DECLs are handled in `assign_parms'. */
3374
3375 if (TREE_CODE (decl) != VAR_DECL)
3376 return;
44fe2e80 3377 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3378 return;
3379
3380 /* Create the RTL representation for the variable. */
3381
3382 if (type == error_mark_node)
3383 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3384 else if (DECL_SIZE (decl) == 0)
3385 /* Variable with incomplete type. */
3386 {
3387 if (DECL_INITIAL (decl) == 0)
3388 /* Error message was already done; now avoid a crash. */
3389 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3390 else
3391 /* An initializer is going to decide the size of this array.
3392 Until we know the size, represent its address with a reg. */
3393 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3394 }
3395 else if (DECL_MODE (decl) != BLKmode
3396 /* If -ffloat-store, don't put explicit float vars
3397 into regs. */
3398 && !(flag_float_store
3399 && TREE_CODE (type) == REAL_TYPE)
3400 && ! TREE_THIS_VOLATILE (decl)
3401 && ! TREE_ADDRESSABLE (decl)
44fe2e80 3402 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
3403 {
3404 /* Automatic variable that can go in a register. */
98f3b471
RK
3405 enum machine_mode reg_mode = DECL_MODE (decl);
3406 int unsignedp = TREE_UNSIGNED (type);
3407
3408 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
3409 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
3410 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
3411 || TREE_CODE (type) == OFFSET_TYPE)
3412 {
3413 PROMOTE_MODE (reg_mode, unsignedp, type);
3414 }
3415
5998c7dc
RS
3416 if (TREE_CODE (type) == COMPLEX_TYPE)
3417 {
3418 rtx realpart, imagpart;
3419 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3420
3421 /* For a complex type variable, make a CONCAT of two pseudos
3422 so that the real and imaginary parts
3423 can be allocated separately. */
3424 realpart = gen_reg_rtx (partmode);
3425 REG_USERVAR_P (realpart) = 1;
3426 imagpart = gen_reg_rtx (partmode);
3427 REG_USERVAR_P (imagpart) = 1;
3428 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3429 }
3430 else
3431 {
3432 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3433 if (TREE_CODE (type) == POINTER_TYPE)
3434 mark_reg_pointer (DECL_RTL (decl));
3435 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3436 }
28d81abb
RK
3437 }
3438 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3439 {
3440 /* Variable of fixed size that goes on the stack. */
3441 rtx oldaddr = 0;
3442 rtx addr;
3443
3444 /* If we previously made RTL for this decl, it must be an array
3445 whose size was determined by the initializer.
3446 The old address was a register; set that register now
3447 to the proper address. */
3448 if (DECL_RTL (decl) != 0)
3449 {
3450 if (GET_CODE (DECL_RTL (decl)) != MEM
3451 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3452 abort ();
3453 oldaddr = XEXP (DECL_RTL (decl), 0);
3454 }
3455
3456 DECL_RTL (decl)
3457 = assign_stack_temp (DECL_MODE (decl),
3458 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3459 + BITS_PER_UNIT - 1)
3460 / BITS_PER_UNIT),
3461 1);
3462
3463 /* Set alignment we actually gave this decl. */
3464 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3465 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3466
3467 if (oldaddr)
3468 {
3469 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3470 if (addr != oldaddr)
3471 emit_move_insn (oldaddr, addr);
3472 }
3473
3474 /* If this is a memory ref that contains aggregate components,
3475 mark it as such for cse and loop optimize. */
3476 MEM_IN_STRUCT_P (DECL_RTL (decl))
3477 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3478 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
c1b98a95
RK
3479 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3480 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
28d81abb
RK
3481#if 0
3482 /* If this is in memory because of -ffloat-store,
3483 set the volatile bit, to prevent optimizations from
3484 undoing the effects. */
3485 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3486 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3487#endif
3488 }
3489 else
3490 /* Dynamic-size object: must push space on the stack. */
3491 {
3492 rtx address, size;
3493
3494 /* Record the stack pointer on entry to block, if have
3495 not already done so. */
3496 if (thisblock->data.block.stack_level == 0)
3497 {
3498 do_pending_stack_adjust ();
59257ff7
RK
3499 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3500 &thisblock->data.block.stack_level,
3501 thisblock->data.block.first_insn);
28d81abb
RK
3502 stack_block_stack = thisblock;
3503 }
3504
3505 /* Compute the variable's size, in bytes. */
3506 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3507 DECL_SIZE (decl),
3508 size_int (BITS_PER_UNIT)),
37366632 3509 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3510 free_temp_slots ();
3511
59257ff7
RK
3512 /* This is equivalent to calling alloca. */
3513 current_function_calls_alloca = 1;
3514
28d81abb 3515 /* Allocate space on the stack for the variable. */
37366632
RK
3516 address = allocate_dynamic_stack_space (size, NULL_RTX,
3517 DECL_ALIGN (decl));
28d81abb 3518
59257ff7 3519 if (nonlocal_goto_handler_slot != 0)
37366632 3520 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
3521
3522 /* Reference the variable indirect through that rtx. */
3523 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3524
2207e295
RS
3525 /* If this is a memory ref that contains aggregate components,
3526 mark it as such for cse and loop optimize. */
3527 MEM_IN_STRUCT_P (DECL_RTL (decl))
3528 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3529 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
c1b98a95
RK
3530 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3531 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
2207e295 3532
28d81abb
RK
3533 /* Indicate the alignment we actually gave this variable. */
3534#ifdef STACK_BOUNDARY
3535 DECL_ALIGN (decl) = STACK_BOUNDARY;
3536#else
3537 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3538#endif
3539 }
3540
3541 if (TREE_THIS_VOLATILE (decl))
3542 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3543#if 0 /* A variable is not necessarily unchanging
3544 just because it is const. RTX_UNCHANGING_P
3545 means no change in the function,
3546 not merely no change in the variable's scope.
3547 It is correct to set RTX_UNCHANGING_P if the variable's scope
3548 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3549 if (TREE_READONLY (decl))
3550 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3551#endif
28d81abb
RK
3552
3553 /* If doing stupid register allocation, make sure life of any
3554 register variable starts here, at the start of its scope. */
3555
3556 if (obey_regdecls)
3557 use_variable (DECL_RTL (decl));
3558}
ca695ac9
JB
3559
3560
3561/* Generate code for the automatic variable declaration DECL. For
3562 most variables this just means we give it a stack offset. The
3563 compiler sometimes emits cleanups without variables and we will
3564 have to deal with those too. */
3565
3566void
3567bc_expand_decl (decl, cleanup)
3568 tree decl;
3569 tree cleanup;
3570{
3571 tree type;
3572
3573 if (!decl)
3574 {
3575 /* A cleanup with no variable. */
3576 if (!cleanup)
3577 abort ();
3578
3579 return;
3580 }
3581
3582 /* Only auto variables need any work. */
3583 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3584 return;
3585
3586 type = TREE_TYPE (decl);
3587
3588 if (type == error_mark_node)
3589 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3590
3591 else if (DECL_SIZE (decl) == 0)
3592
3593 /* Variable with incomplete type. The stack offset herein will be
3594 fixed later in expand_decl_init (). */
3595 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3596
3597 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3598 {
3599 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3600 DECL_ALIGN (decl));
3601 }
3602 else
3603 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3604}
28d81abb
RK
3605\f
3606/* Emit code to perform the initialization of a declaration DECL. */
3607
3608void
3609expand_decl_init (decl)
3610 tree decl;
3611{
b4ac57ab
RS
3612 int was_used = TREE_USED (decl);
3613
3564e40e
RK
3614 /* If this is a CONST_DECL, we don't have to generate any code, but
3615 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3616 to be set while in the obstack containing the constant. If we don't
3617 do this, we can lose if we have functions nested three deep and the middle
3618 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3619 the innermost function is the first to expand that STRING_CST. */
3620 if (TREE_CODE (decl) == CONST_DECL)
3621 {
3622 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3623 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3624 EXPAND_INITIALIZER);
3625 return;
3626 }
3627
28d81abb
RK
3628 if (TREE_STATIC (decl))
3629 return;
3630
3631 /* Compute and store the initial value now. */
3632
3633 if (DECL_INITIAL (decl) == error_mark_node)
3634 {
3635 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3636 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3637 || code == POINTER_TYPE)
3638 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3639 0, 0);
3640 emit_queue ();
3641 }
3642 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3643 {
3644 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3645 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3646 emit_queue ();
3647 }
3648
b4ac57ab
RS
3649 /* Don't let the initialization count as "using" the variable. */
3650 TREE_USED (decl) = was_used;
3651
28d81abb
RK
3652 /* Free any temporaries we made while initializing the decl. */
3653 free_temp_slots ();
3654}
3655
ca695ac9
JB
3656/* Expand initialization for variable-sized types. Allocate array
3657 using newlocalSI and set local variable, which is a pointer to the
3658 storage. */
3659
3660bc_expand_variable_local_init (decl)
3661 tree decl;
3662{
3663 /* Evaluate size expression and coerce to SI */
3664 bc_expand_expr (DECL_SIZE (decl));
3665
3666 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3667 no coercion is necessary (?) */
3668
3669/* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3670 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3671
3672 /* Emit code to allocate array */
3673 bc_emit_instruction (newlocalSI);
3674
3675 /* Store array pointer in local variable. This is the only instance
3676 where we actually want the address of the pointer to the
3677 variable-size block, rather than the pointer itself. We avoid
3678 using expand_address() since that would cause the pointer to be
3679 pushed rather than its address. Hence the hard-coded reference;
3680 notice also that the variable is always local (no global
3681 variable-size type variables). */
3682
3683 bc_load_localaddr (DECL_RTL (decl));
3684 bc_emit_instruction (storeP);
3685}
3686
3687
3688/* Emit code to initialize a declaration. */
3689void
3690bc_expand_decl_init (decl)
3691 tree decl;
3692{
3693 int org_stack_depth;
3694
3695 /* Statical initializers are handled elsewhere */
3696
3697 if (TREE_STATIC (decl))
3698 return;
3699
3700 /* Memory original stack depth */
3701 org_stack_depth = stack_depth;
3702
3703 /* If the type is variable-size, we first create its space (we ASSUME
3704 it CAN'T be static). We do this regardless of whether there's an
3705 initializer assignment or not. */
3706
3707 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3708 bc_expand_variable_local_init (decl);
3709
3710 /* Expand initializer assignment */
3711 if (DECL_INITIAL (decl) == error_mark_node)
3712 {
3713 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3714
3715 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3716 || code == POINTER_TYPE)
3717
e81d77b5 3718 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3719 }
3720 else if (DECL_INITIAL (decl))
e81d77b5 3721 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3722
3723 /* Restore stack depth */
3724 if (org_stack_depth > stack_depth)
3725 abort ();
3726
3727 bc_adjust_stack (stack_depth - org_stack_depth);
3728}
3729
3730
28d81abb
RK
3731/* CLEANUP is an expression to be executed at exit from this binding contour;
3732 for example, in C++, it might call the destructor for this variable.
3733
3734 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3735 either before or after calling `expand_decl' but before compiling
3736 any subsequent expressions. This is because CLEANUP may be expanded
3737 more than once, on different branches of execution.
3738 For the same reason, CLEANUP may not contain a CALL_EXPR
3739 except as its topmost node--else `preexpand_calls' would get confused.
3740
3741 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3742 that is not associated with any particular variable. */
3743
3744int
3745expand_decl_cleanup (decl, cleanup)
3746 tree decl, cleanup;
3747{
3748 struct nesting *thisblock = block_stack;
3749
3750 /* Error if we are not in any block. */
3751 if (thisblock == 0)
3752 return 0;
3753
3754 /* Record the cleanup if there is one. */
3755
3756 if (cleanup != 0)
3757 {
3758 thisblock->data.block.cleanups
3759 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3760 /* If this block has a cleanup, it belongs in stack_block_stack. */
3761 stack_block_stack = thisblock;
3762 }
3763 return 1;
3764}
3765\f
3766/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3767 DECL_ELTS is the list of elements that belong to DECL's type.
3768 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3769
3770void
3771expand_anon_union_decl (decl, cleanup, decl_elts)
3772 tree decl, cleanup, decl_elts;
3773{
3774 struct nesting *thisblock = block_stack;
3775 rtx x;
3776
3777 expand_decl (decl, cleanup);
3778 x = DECL_RTL (decl);
3779
3780 while (decl_elts)
3781 {
3782 tree decl_elt = TREE_VALUE (decl_elts);
3783 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3784 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3785
3786 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3787 instead create a new MEM rtx with the proper mode. */
3788 if (GET_CODE (x) == MEM)
3789 {
3790 if (mode == GET_MODE (x))
3791 DECL_RTL (decl_elt) = x;
3792 else
3793 {
3794 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3795 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3796 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3797 }
3798 }
3799 else if (GET_CODE (x) == REG)
3800 {
3801 if (mode == GET_MODE (x))
3802 DECL_RTL (decl_elt) = x;
3803 else
3804 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3805 }
3806 else
3807 abort ();
3808
3809 /* Record the cleanup if there is one. */
3810
3811 if (cleanup != 0)
3812 thisblock->data.block.cleanups
3813 = temp_tree_cons (decl_elt, cleanup_elt,
3814 thisblock->data.block.cleanups);
3815
3816 decl_elts = TREE_CHAIN (decl_elts);
3817 }
3818}
3819\f
3820/* Expand a list of cleanups LIST.
3821 Elements may be expressions or may be nested lists.
3822
3823 If DONT_DO is nonnull, then any list-element
3824 whose TREE_PURPOSE matches DONT_DO is omitted.
3825 This is sometimes used to avoid a cleanup associated with
3826 a value that is being returned out of the scope. */
3827
3828static void
3829expand_cleanups (list, dont_do)
3830 tree list;
3831 tree dont_do;
3832{
3833 tree tail;
3834 for (tail = list; tail; tail = TREE_CHAIN (tail))
3835 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3836 {
3837 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3838 expand_cleanups (TREE_VALUE (tail), dont_do);
3839 else
3840 {
3841 /* Cleanups may be run multiple times. For example,
3842 when exiting a binding contour, we expand the
3843 cleanups associated with that contour. When a goto
3844 within that binding contour has a target outside that
3845 contour, it will expand all cleanups from its scope to
3846 the target. Though the cleanups are expanded multiple
3847 times, the control paths are non-overlapping so the
3848 cleanups will not be executed twice. */
3849 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3850 free_temp_slots ();
3851 }
3852 }
3853}
3854
28d81abb
RK
3855/* Move all cleanups from the current block_stack
3856 to the containing block_stack, where they are assumed to
3857 have been created. If anything can cause a temporary to
3858 be created, but not expanded for more than one level of
3859 block_stacks, then this code will have to change. */
3860
3861void
3862move_cleanups_up ()
3863{
3864 struct nesting *block = block_stack;
3865 struct nesting *outer = block->next;
3866
3867 outer->data.block.cleanups
3868 = chainon (block->data.block.cleanups,
3869 outer->data.block.cleanups);
3870 block->data.block.cleanups = 0;
3871}
3872
3873tree
3874last_cleanup_this_contour ()
3875{
3876 if (block_stack == 0)
3877 return 0;
3878
3879 return block_stack->data.block.cleanups;
3880}
3881
3882/* Return 1 if there are any pending cleanups at this point.
3883 If THIS_CONTOUR is nonzero, check the current contour as well.
3884 Otherwise, look only at the contours that enclose this one. */
3885
3886int
3887any_pending_cleanups (this_contour)
3888 int this_contour;
3889{
3890 struct nesting *block;
3891
3892 if (block_stack == 0)
3893 return 0;
3894
3895 if (this_contour && block_stack->data.block.cleanups != NULL)
3896 return 1;
3897 if (block_stack->data.block.cleanups == 0
3898 && (block_stack->data.block.outer_cleanups == 0
3899#if 0
3900 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3901#endif
3902 ))
3903 return 0;
3904
3905 for (block = block_stack->next; block; block = block->next)
3906 if (block->data.block.cleanups != 0)
3907 return 1;
3908
3909 return 0;
3910}
3911\f
3912/* Enter a case (Pascal) or switch (C) statement.
3913 Push a block onto case_stack and nesting_stack
3914 to accumulate the case-labels that are seen
3915 and to record the labels generated for the statement.
3916
3917 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3918 Otherwise, this construct is transparent for `exit_something'.
3919
3920 EXPR is the index-expression to be dispatched on.
3921 TYPE is its nominal type. We could simply convert EXPR to this type,
3922 but instead we take short cuts. */
3923
3924void
3925expand_start_case (exit_flag, expr, type, printname)
3926 int exit_flag;
3927 tree expr;
3928 tree type;
3929 char *printname;
3930{
3931 register struct nesting *thiscase = ALLOC_NESTING ();
3932
3933 /* Make an entry on case_stack for the case we are entering. */
3934
3935 thiscase->next = case_stack;
3936 thiscase->all = nesting_stack;
3937 thiscase->depth = ++nesting_depth;
3938 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3939 thiscase->data.case_stmt.case_list = 0;
3940 thiscase->data.case_stmt.index_expr = expr;
3941 thiscase->data.case_stmt.nominal_type = type;
3942 thiscase->data.case_stmt.default_label = 0;
3943 thiscase->data.case_stmt.num_ranges = 0;
3944 thiscase->data.case_stmt.printname = printname;
3945 thiscase->data.case_stmt.seenlabel = 0;
3946 case_stack = thiscase;
3947 nesting_stack = thiscase;
3948
ca695ac9
JB
3949 if (output_bytecode)
3950 {
3951 bc_expand_start_case (thiscase, expr, type, printname);
3952 return;
3953 }
3954
28d81abb
RK
3955 do_pending_stack_adjust ();
3956
3957 /* Make sure case_stmt.start points to something that won't
3958 need any transformation before expand_end_case. */
3959 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3960 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3961
3962 thiscase->data.case_stmt.start = get_last_insn ();
3963}
3964
ca695ac9
JB
3965
3966/* Enter a case statement. It is assumed that the caller has pushed
3967 the current context onto the case stack. */
3968void
3969bc_expand_start_case (thiscase, expr, type, printname)
3970 struct nesting *thiscase;
3971 tree expr;
3972 tree type;
3973 char *printname;
3974{
3975 bc_expand_expr (expr);
3976 bc_expand_conversion (TREE_TYPE (expr), type);
3977
3978 /* For cases, the skip is a place we jump to that's emitted after
3979 the size of the jump table is known. */
3980
3981 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3982 bc_emit_bytecode (jump);
c53e9440 3983 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
3984
3985#ifdef DEBUG_PRINT_CODE
3986 fputc ('\n', stderr);
3987#endif
3988}
3989
3990
28d81abb
RK
3991/* Start a "dummy case statement" within which case labels are invalid
3992 and are not connected to any larger real case statement.
3993 This can be used if you don't want to let a case statement jump
3994 into the middle of certain kinds of constructs. */
3995
3996void
3997expand_start_case_dummy ()
3998{
3999 register struct nesting *thiscase = ALLOC_NESTING ();
4000
4001 /* Make an entry on case_stack for the dummy. */
4002
4003 thiscase->next = case_stack;
4004 thiscase->all = nesting_stack;
4005 thiscase->depth = ++nesting_depth;
4006 thiscase->exit_label = 0;
4007 thiscase->data.case_stmt.case_list = 0;
4008 thiscase->data.case_stmt.start = 0;
4009 thiscase->data.case_stmt.nominal_type = 0;
4010 thiscase->data.case_stmt.default_label = 0;
4011 thiscase->data.case_stmt.num_ranges = 0;
4012 case_stack = thiscase;
4013 nesting_stack = thiscase;
4014}
4015
4016/* End a dummy case statement. */
4017
4018void
4019expand_end_case_dummy ()
4020{
4021 POPSTACK (case_stack);
4022}
4023
4024/* Return the data type of the index-expression
4025 of the innermost case statement, or null if none. */
4026
4027tree
4028case_index_expr_type ()
4029{
4030 if (case_stack)
4031 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4032 return 0;
4033}
4034\f
4035/* Accumulate one case or default label inside a case or switch statement.
4036 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4037 The function CONVERTER, when applied to arguments T and V,
4038 converts the value V to the type T.
28d81abb
RK
4039
4040 If not currently inside a case or switch statement, return 1 and do
4041 nothing. The caller will print a language-specific error message.
4042 If VALUE is a duplicate or overlaps, return 2 and do nothing
4043 except store the (first) duplicate node in *DUPLICATE.
4044 If VALUE is out of range, return 3 and do nothing.
4045 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4046 Return 0 on success.
4047
4048 Extended to handle range statements. */
4049
4050int
f52fba84 4051pushcase (value, converter, label, duplicate)
28d81abb 4052 register tree value;
f52fba84 4053 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4054 register tree label;
4055 tree *duplicate;
4056{
4057 register struct case_node **l;
4058 register struct case_node *n;
4059 tree index_type;
4060 tree nominal_type;
4061
ca695ac9
JB
4062 if (output_bytecode)
4063 return bc_pushcase (value, label);
4064
28d81abb
RK
4065 /* Fail if not inside a real case statement. */
4066 if (! (case_stack && case_stack->data.case_stmt.start))
4067 return 1;
4068
4069 if (stack_block_stack
4070 && stack_block_stack->depth > case_stack->depth)
4071 return 5;
4072
4073 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4074 nominal_type = case_stack->data.case_stmt.nominal_type;
4075
4076 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4077 if (index_type == error_mark_node)
4078 return 0;
4079
4080 /* Convert VALUE to the type in which the comparisons are nominally done. */
4081 if (value != 0)
f52fba84 4082 value = (*converter) (nominal_type, value);
28d81abb
RK
4083
4084 /* If this is the first label, warn if any insns have been emitted. */
4085 if (case_stack->data.case_stmt.seenlabel == 0)
4086 {
4087 rtx insn;
4088 for (insn = case_stack->data.case_stmt.start;
4089 insn;
4090 insn = NEXT_INSN (insn))
4091 {
4092 if (GET_CODE (insn) == CODE_LABEL)
4093 break;
4094 if (GET_CODE (insn) != NOTE
4095 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4096 {
4097 warning ("unreachable code at beginning of %s",
4098 case_stack->data.case_stmt.printname);
4099 break;
4100 }
4101 }
4102 }
4103 case_stack->data.case_stmt.seenlabel = 1;
4104
4105 /* Fail if this value is out of range for the actual type of the index
4106 (which may be narrower than NOMINAL_TYPE). */
4107 if (value != 0 && ! int_fits_type_p (value, index_type))
4108 return 3;
4109
4110 /* Fail if this is a duplicate or overlaps another entry. */
4111 if (value == 0)
4112 {
4113 if (case_stack->data.case_stmt.default_label != 0)
4114 {
4115 *duplicate = case_stack->data.case_stmt.default_label;
4116 return 2;
4117 }
4118 case_stack->data.case_stmt.default_label = label;
4119 }
4120 else
4121 {
4122 /* Find the elt in the chain before which to insert the new value,
4123 to keep the chain sorted in increasing order.
4124 But report an error if this element is a duplicate. */
4125 for (l = &case_stack->data.case_stmt.case_list;
4126 /* Keep going past elements distinctly less than VALUE. */
4127 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4128 l = &(*l)->right)
4129 ;
4130 if (*l)
4131 {
4132 /* Element we will insert before must be distinctly greater;
4133 overlap means error. */
4134 if (! tree_int_cst_lt (value, (*l)->low))
4135 {
4136 *duplicate = (*l)->code_label;
4137 return 2;
4138 }
4139 }
4140
4141 /* Add this label to the chain, and succeed.
4142 Copy VALUE so it is on temporary rather than momentary
4143 obstack and will thus survive till the end of the case statement. */
4144 n = (struct case_node *) oballoc (sizeof (struct case_node));
4145 n->left = 0;
4146 n->right = *l;
4147 n->high = n->low = copy_node (value);
4148 n->code_label = label;
4149 *l = n;
4150 }
4151
4152 expand_label (label);
4153 return 0;
4154}
4155
4156/* Like pushcase but this case applies to all values
4157 between VALUE1 and VALUE2 (inclusive).
4158 The return value is the same as that of pushcase
4159 but there is one additional error code:
4160 4 means the specified range was empty. */
4161
4162int
f52fba84 4163pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4164 register tree value1, value2;
f52fba84 4165 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4166 register tree label;
4167 tree *duplicate;
4168{
4169 register struct case_node **l;
4170 register struct case_node *n;
4171 tree index_type;
4172 tree nominal_type;
4173
4174 /* Fail if not inside a real case statement. */
4175 if (! (case_stack && case_stack->data.case_stmt.start))
4176 return 1;
4177
4178 if (stack_block_stack
4179 && stack_block_stack->depth > case_stack->depth)
4180 return 5;
4181
4182 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4183 nominal_type = case_stack->data.case_stmt.nominal_type;
4184
4185 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4186 if (index_type == error_mark_node)
4187 return 0;
4188
4189 /* If this is the first label, warn if any insns have been emitted. */
4190 if (case_stack->data.case_stmt.seenlabel == 0)
4191 {
4192 rtx insn;
4193 for (insn = case_stack->data.case_stmt.start;
4194 insn;
4195 insn = NEXT_INSN (insn))
4196 {
4197 if (GET_CODE (insn) == CODE_LABEL)
4198 break;
4199 if (GET_CODE (insn) != NOTE
4200 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4201 {
4202 warning ("unreachable code at beginning of %s",
4203 case_stack->data.case_stmt.printname);
4204 break;
4205 }
4206 }
4207 }
4208 case_stack->data.case_stmt.seenlabel = 1;
4209
4210 /* Convert VALUEs to type in which the comparisons are nominally done. */
4211 if (value1 == 0) /* Negative infinity. */
4212 value1 = TYPE_MIN_VALUE(index_type);
f52fba84 4213 value1 = (*converter) (nominal_type, value1);
28d81abb
RK
4214
4215 if (value2 == 0) /* Positive infinity. */
4216 value2 = TYPE_MAX_VALUE(index_type);
f52fba84 4217 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4218
4219 /* Fail if these values are out of range. */
4220 if (! int_fits_type_p (value1, index_type))
4221 return 3;
4222
4223 if (! int_fits_type_p (value2, index_type))
4224 return 3;
4225
4226 /* Fail if the range is empty. */
4227 if (tree_int_cst_lt (value2, value1))
4228 return 4;
4229
4230 /* If the bounds are equal, turn this into the one-value case. */
4231 if (tree_int_cst_equal (value1, value2))
f52fba84 4232 return pushcase (value1, converter, label, duplicate);
28d81abb
RK
4233
4234 /* Find the elt in the chain before which to insert the new value,
4235 to keep the chain sorted in increasing order.
4236 But report an error if this element is a duplicate. */
4237 for (l = &case_stack->data.case_stmt.case_list;
4238 /* Keep going past elements distinctly less than this range. */
4239 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4240 l = &(*l)->right)
4241 ;
4242 if (*l)
4243 {
4244 /* Element we will insert before must be distinctly greater;
4245 overlap means error. */
4246 if (! tree_int_cst_lt (value2, (*l)->low))
4247 {
4248 *duplicate = (*l)->code_label;
4249 return 2;
4250 }
4251 }
4252
4253 /* Add this label to the chain, and succeed.
4254 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4255 obstack and will thus survive till the end of the case statement. */
4256
4257 n = (struct case_node *) oballoc (sizeof (struct case_node));
4258 n->left = 0;
4259 n->right = *l;
4260 n->low = copy_node (value1);
4261 n->high = copy_node (value2);
4262 n->code_label = label;
4263 *l = n;
4264
4265 expand_label (label);
4266
4267 case_stack->data.case_stmt.num_ranges++;
4268
4269 return 0;
4270}
ca695ac9
JB
4271
4272
4273/* Accumulate one case or default label; VALUE is the value of the
4274 case, or nil for a default label. If not currently inside a case,
4275 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4276 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4277 Return 0 on success. This function is a leftover from the earlier
4278 bytecode compiler, which was based on gcc 1.37. It should be
4279 merged into pushcase. */
4280
4281int
4282bc_pushcase (value, label)
4283 tree value;
4284 tree label;
4285{
4286 struct nesting *thiscase = case_stack;
4287 struct case_node *case_label, *new_label;
4288
4289 if (! thiscase)
4290 return 1;
4291
4292 /* Fail if duplicate, overlap, or out of type range. */
4293 if (value)
4294 {
4295 value = convert (thiscase->data.case_stmt.nominal_type, value);
4296 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4297 return 3;
4298
4299 for (case_label = thiscase->data.case_stmt.case_list;
4300 case_label->left; case_label = case_label->left)
4301 if (! tree_int_cst_lt (case_label->left->high, value))
4302 break;
4303
4304 if (case_label != thiscase->data.case_stmt.case_list
4305 && ! tree_int_cst_lt (case_label->high, value)
4306 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4307 return 2;
4308
4309 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4310 new_label->low = new_label->high = copy_node (value);
4311 new_label->code_label = label;
4312 new_label->left = case_label->left;
4313
4314 case_label->left = new_label;
4315 thiscase->data.case_stmt.num_ranges++;
4316 }
4317 else
4318 {
4319 if (thiscase->data.case_stmt.default_label)
4320 return 2;
4321 thiscase->data.case_stmt.default_label = label;
4322 }
4323
4324 expand_label (label);
4325 return 0;
4326}
28d81abb
RK
4327\f
4328/* Called when the index of a switch statement is an enumerated type
4329 and there is no default label.
4330
4331 Checks that all enumeration literals are covered by the case
4332 expressions of a switch. Also, warn if there are any extra
4333 switch cases that are *not* elements of the enumerated type.
4334
4335 If all enumeration literals were covered by the case expressions,
4336 turn one of the expressions into the default expression since it should
4337 not be possible to fall through such a switch. */
4338
4339void
4340check_for_full_enumeration_handling (type)
4341 tree type;
4342{
4343 register struct case_node *n;
4344 register struct case_node **l;
4345 register tree chain;
4346 int all_values = 1;
4347
ca695ac9
JB
4348 if (output_bytecode)
4349 {
4350 bc_check_for_full_enumeration_handling (type);
4351 return;
4352 }
4353
28d81abb 4354 /* The time complexity of this loop is currently O(N * M), with
ac2a9454 4355 N being the number of members in the enumerated type, and
28d81abb
RK
4356 M being the number of case expressions in the switch. */
4357
4358 for (chain = TYPE_VALUES (type);
4359 chain;
4360 chain = TREE_CHAIN (chain))
4361 {
4362 /* Find a match between enumeral and case expression, if possible.
4363 Quit looking when we've gone too far (since case expressions
ac2a9454 4364 are kept sorted in ascending order). Warn about enumerators not
28d81abb
RK
4365 handled in the switch statement case expression list. */
4366
4367 for (n = case_stack->data.case_stmt.case_list;
4368 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4369 n = n->right)
4370 ;
4371
1ddde1cd 4372 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
4373 {
4374 if (warn_switch)
1ddde1cd 4375 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
4376 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4377 all_values = 0;
4378 }
4379 }
4380
4381 /* Now we go the other way around; we warn if there are case
ac2a9454 4382 expressions that don't correspond to enumerators. This can
28d81abb
RK
4383 occur since C and C++ don't enforce type-checking of
4384 assignments to enumeration variables. */
4385
4386 if (warn_switch)
4387 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4388 {
4389 for (chain = TYPE_VALUES (type);
4390 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4391 chain = TREE_CHAIN (chain))
4392 ;
4393
4394 if (!chain)
3b24f55b
RS
4395 {
4396 if (TYPE_NAME (type) == 0)
4397 warning ("case value `%d' not in enumerated type",
4398 TREE_INT_CST_LOW (n->low));
4399 else
4400 warning ("case value `%d' not in enumerated type `%s'",
4401 TREE_INT_CST_LOW (n->low),
4402 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4403 == IDENTIFIER_NODE)
4404 ? TYPE_NAME (type)
4405 : DECL_NAME (TYPE_NAME (type))));
4406 }
1ddde1cd
RS
4407 if (!tree_int_cst_equal (n->low, n->high))
4408 {
4409 for (chain = TYPE_VALUES (type);
4410 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4411 chain = TREE_CHAIN (chain))
4412 ;
4413
4414 if (!chain)
3b24f55b
RS
4415 {
4416 if (TYPE_NAME (type) == 0)
4417 warning ("case value `%d' not in enumerated type",
4418 TREE_INT_CST_LOW (n->high));
4419 else
4420 warning ("case value `%d' not in enumerated type `%s'",
4421 TREE_INT_CST_LOW (n->high),
4422 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4423 == IDENTIFIER_NODE)
4424 ? TYPE_NAME (type)
4425 : DECL_NAME (TYPE_NAME (type))));
4426 }
1ddde1cd 4427 }
28d81abb
RK
4428 }
4429
ae8cb346
RS
4430#if 0
4431 /* ??? This optimization is disabled because it causes valid programs to
4432 fail. ANSI C does not guarantee that an expression with enum type
4433 will have a value that is the same as one of the enumation literals. */
4434
28d81abb
RK
4435 /* If all values were found as case labels, make one of them the default
4436 label. Thus, this switch will never fall through. We arbitrarily pick
4437 the last one to make the default since this is likely the most
4438 efficient choice. */
4439
4440 if (all_values)
4441 {
4442 for (l = &case_stack->data.case_stmt.case_list;
4443 (*l)->right != 0;
4444 l = &(*l)->right)
4445 ;
4446
4447 case_stack->data.case_stmt.default_label = (*l)->code_label;
4448 *l = 0;
4449 }
ae8cb346 4450#endif /* 0 */
28d81abb 4451}
ca695ac9
JB
4452
4453
4454/* Check that all enumeration literals are covered by the case
4455 expressions of a switch. Also warn if there are any cases
4456 that are not elements of the enumerated type. */
4457void
4458bc_check_for_full_enumeration_handling (type)
4459 tree type;
4460{
4461 struct nesting *thiscase = case_stack;
4462 struct case_node *c;
4463 tree e;
4464
4465 /* Check for enums not handled. */
4466 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4467 {
4468 for (c = thiscase->data.case_stmt.case_list->left;
4469 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4470 c = c->left)
4471 ;
4472 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4473 warning ("enumerated value `%s' not handled in switch",
4474 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4475 }
4476
4477 /* Check for cases not in the enumeration. */
4478 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4479 {
4480 for (e = TYPE_VALUES (type);
4481 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4482 e = TREE_CHAIN (e))
4483 ;
4484 if (! e)
4485 warning ("case value `%d' not in enumerated type `%s'",
4486 TREE_INT_CST_LOW (c->low),
4487 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4488 ? TYPE_NAME (type)
4489 : DECL_NAME (TYPE_NAME (type))));
4490 }
4491}
28d81abb
RK
4492\f
4493/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4494 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4495 Generate the code to test it and jump to the right place. */
4496
4497void
4498expand_end_case (orig_index)
4499 tree orig_index;
4500{
3474db0e 4501 tree minval, maxval, range, orig_minval;
28d81abb
RK
4502 rtx default_label = 0;
4503 register struct case_node *n;
4504 int count;
4505 rtx index;
ca695ac9 4506 rtx table_label;
28d81abb
RK
4507 int ncases;
4508 rtx *labelvec;
4509 register int i;
4510 rtx before_case;
4511 register struct nesting *thiscase = case_stack;
ca695ac9
JB
4512 tree index_expr;
4513 int unsignedp;
4514
4515 if (output_bytecode)
4516 {
4517 bc_expand_end_case (orig_index);
4518 return;
4519 }
4520
4521 table_label = gen_label_rtx ();
4522 index_expr = thiscase->data.case_stmt.index_expr;
4523 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
28d81abb
RK
4524
4525 do_pending_stack_adjust ();
4526
4527 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4528 if (TREE_TYPE (index_expr) != error_mark_node)
4529 {
4530 /* If switch expression was an enumerated type, check that all
4531 enumeration literals are covered by the cases.
4532 No sense trying this if there's a default case, however. */
4533
4534 if (!thiscase->data.case_stmt.default_label
4535 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4536 && TREE_CODE (index_expr) != INTEGER_CST)
4537 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4538
4539 /* If this is the first label, warn if any insns have been emitted. */
4540 if (thiscase->data.case_stmt.seenlabel == 0)
4541 {
4542 rtx insn;
4543 for (insn = get_last_insn ();
4544 insn != case_stack->data.case_stmt.start;
4545 insn = PREV_INSN (insn))
4546 if (GET_CODE (insn) != NOTE
4547 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4548 {
4549 warning ("unreachable code at beginning of %s",
4550 case_stack->data.case_stmt.printname);
4551 break;
4552 }
4553 }
4554
4555 /* If we don't have a default-label, create one here,
4556 after the body of the switch. */
4557 if (thiscase->data.case_stmt.default_label == 0)
4558 {
4559 thiscase->data.case_stmt.default_label
4560 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4561 expand_label (thiscase->data.case_stmt.default_label);
4562 }
4563 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4564
4565 before_case = get_last_insn ();
4566
4567 /* Simplify the case-list before we count it. */
4568 group_case_nodes (thiscase->data.case_stmt.case_list);
4569
4570 /* Get upper and lower bounds of case values.
4571 Also convert all the case values to the index expr's data type. */
4572
4573 count = 0;
4574 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4575 {
4576 /* Check low and high label values are integers. */
4577 if (TREE_CODE (n->low) != INTEGER_CST)
4578 abort ();
4579 if (TREE_CODE (n->high) != INTEGER_CST)
4580 abort ();
4581
4582 n->low = convert (TREE_TYPE (index_expr), n->low);
4583 n->high = convert (TREE_TYPE (index_expr), n->high);
4584
4585 /* Count the elements and track the largest and smallest
4586 of them (treating them as signed even if they are not). */
4587 if (count++ == 0)
4588 {
4589 minval = n->low;
4590 maxval = n->high;
4591 }
4592 else
4593 {
4594 if (INT_CST_LT (n->low, minval))
4595 minval = n->low;
4596 if (INT_CST_LT (maxval, n->high))
4597 maxval = n->high;
4598 }
4599 /* A range counts double, since it requires two compares. */
4600 if (! tree_int_cst_equal (n->low, n->high))
4601 count++;
4602 }
4603
3474db0e
RS
4604 orig_minval = minval;
4605
28d81abb
RK
4606 /* Compute span of values. */
4607 if (count != 0)
4608 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4609 maxval, minval));
4610
4611 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4612 {
4613 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4614 emit_queue ();
4615 emit_jump (default_label);
4616 }
3474db0e 4617
28d81abb
RK
4618 /* If range of values is much bigger than number of values,
4619 make a sequence of conditional branches instead of a dispatch.
4620 If the switch-index is a constant, do it this way
4621 because we can optimize it. */
4f73c5dd
TW
4622
4623#ifndef CASE_VALUES_THRESHOLD
28d81abb 4624#ifdef HAVE_casesi
4f73c5dd 4625#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 4626#else
4f73c5dd
TW
4627 /* If machine does not have a case insn that compares the
4628 bounds, this means extra overhead for dispatch tables
4629 which raises the threshold for using them. */
4630#define CASE_VALUES_THRESHOLD 5
4631#endif /* HAVE_casesi */
4632#endif /* CASE_VALUES_THRESHOLD */
4633
4634 else if (TREE_INT_CST_HIGH (range) != 0
4635 || count < CASE_VALUES_THRESHOLD
37366632
RK
4636 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4637 > 10 * count)
28d81abb 4638 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 4639 /* These will reduce to a constant. */
28d81abb 4640 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 4641 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 4642 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
4643 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4644 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4645 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 4646 {
37366632 4647 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
4648
4649 /* If the index is a short or char that we do not have
4650 an insn to handle comparisons directly, convert it to
4651 a full integer now, rather than letting each comparison
4652 generate the conversion. */
4653
4654 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4655 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4656 == CODE_FOR_nothing))
4657 {
4658 enum machine_mode wider_mode;
4659 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4660 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4661 if (cmp_optab->handlers[(int) wider_mode].insn_code
4662 != CODE_FOR_nothing)
4663 {
4664 index = convert_to_mode (wider_mode, index, unsignedp);
4665 break;
4666 }
4667 }
4668
4669 emit_queue ();
4670 do_pending_stack_adjust ();
4671
4672 index = protect_from_queue (index, 0);
4673 if (GET_CODE (index) == MEM)
4674 index = copy_to_reg (index);
4675 if (GET_CODE (index) == CONST_INT
4676 || TREE_CODE (index_expr) == INTEGER_CST)
4677 {
4678 /* Make a tree node with the proper constant value
4679 if we don't already have one. */
4680 if (TREE_CODE (index_expr) != INTEGER_CST)
4681 {
4682 index_expr
4683 = build_int_2 (INTVAL (index),
4684 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4685 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4686 }
4687
4688 /* For constant index expressions we need only
4689 issue a unconditional branch to the appropriate
4690 target code. The job of removing any unreachable
4691 code is left to the optimisation phase if the
4692 "-O" option is specified. */
4693 for (n = thiscase->data.case_stmt.case_list;
4694 n;
4695 n = n->right)
4696 {
4697 if (! tree_int_cst_lt (index_expr, n->low)
4698 && ! tree_int_cst_lt (n->high, index_expr))
4699 break;
4700 }
4701 if (n)
4702 emit_jump (label_rtx (n->code_label));
4703 else
4704 emit_jump (default_label);
4705 }
4706 else
4707 {
4708 /* If the index expression is not constant we generate
4709 a binary decision tree to select the appropriate
4710 target code. This is done as follows:
4711
4712 The list of cases is rearranged into a binary tree,
4713 nearly optimal assuming equal probability for each case.
4714
4715 The tree is transformed into RTL, eliminating
4716 redundant test conditions at the same time.
4717
4718 If program flow could reach the end of the
4719 decision tree an unconditional jump to the
4720 default code is emitted. */
4721
4722 use_cost_table
4723 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 4724 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
4725 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4726 NULL_PTR);
28d81abb
RK
4727 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4728 default_label, TREE_TYPE (index_expr));
4729 emit_jump_if_reachable (default_label);
4730 }
4731 }
4732 else
4733 {
4734 int win = 0;
4735#ifdef HAVE_casesi
4736 if (HAVE_casesi)
4737 {
c4fcf531 4738 enum machine_mode index_mode = SImode;
5130a5cc 4739 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 4740
28d81abb 4741 /* Convert the index to SImode. */
c4fcf531
RS
4742 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4743 > GET_MODE_BITSIZE (index_mode))
28d81abb 4744 {
af2682ef 4745 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 4746 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
4747
4748 /* We must handle the endpoints in the original mode. */
28d81abb
RK
4749 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4750 index_expr, minval);
4751 minval = integer_zero_node;
37366632 4752 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3474db0e 4753 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
af2682ef
RS
4754 emit_jump_insn (gen_bltu (default_label));
4755 /* Now we can safely truncate. */
4756 index = convert_to_mode (index_mode, index, 0);
4757 }
4758 else
4759 {
4760 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4761 index_expr = convert (type_for_size (index_bits, 0),
4762 index_expr);
37366632 4763 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4764 }
28d81abb
RK
4765 emit_queue ();
4766 index = protect_from_queue (index, 0);
4767 do_pending_stack_adjust ();
4768
37366632
RK
4769 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4770 VOIDmode, 0),
4771 expand_expr (range, NULL_RTX,
4772 VOIDmode, 0),
28d81abb
RK
4773 table_label, default_label));
4774 win = 1;
4775 }
4776#endif
4777#ifdef HAVE_tablejump
4778 if (! win && HAVE_tablejump)
4779 {
4780 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
4781 fold (build (MINUS_EXPR,
4782 TREE_TYPE (index_expr),
4783 index_expr, minval)));
37366632 4784 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4785 emit_queue ();
af2682ef 4786 index = protect_from_queue (index, 0);
28d81abb
RK
4787 do_pending_stack_adjust ();
4788
af2682ef 4789 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 4790 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4791 table_label, default_label);
4792 win = 1;
4793 }
4794#endif
4795 if (! win)
4796 abort ();
4797
4798 /* Get table of labels to jump to, in order of case index. */
4799
4800 ncases = TREE_INT_CST_LOW (range) + 1;
4801 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4802 bzero (labelvec, ncases * sizeof (rtx));
4803
4804 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4805 {
37366632 4806 register HOST_WIDE_INT i
3474db0e 4807 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
4808
4809 while (1)
4810 {
4811 labelvec[i]
4812 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3474db0e 4813 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
4814 == TREE_INT_CST_LOW (n->high))
4815 break;
4816 i++;
4817 }
4818 }
4819
4820 /* Fill in the gaps with the default. */
4821 for (i = 0; i < ncases; i++)
4822 if (labelvec[i] == 0)
4823 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4824
4825 /* Output the table */
4826 emit_label (table_label);
4827
4828 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 4829 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
4830 if (
4831#ifdef CASE_VECTOR_PC_RELATIVE
4832 1 ||
4833#endif
4834 flag_pic)
4835 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4836 gen_rtx (LABEL_REF, Pmode, table_label),
4837 gen_rtvec_v (ncases, labelvec)));
4838 else
4839 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4840 gen_rtvec_v (ncases, labelvec)));
4841
4842 /* If the case insn drops through the table,
4843 after the table we must jump to the default-label.
4844 Otherwise record no drop-through after the table. */
4845#ifdef CASE_DROPS_THROUGH
4846 emit_jump (default_label);
4847#else
4848 emit_barrier ();
4849#endif
4850 }
4851
915f619f
JW
4852 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4853 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
4854 thiscase->data.case_stmt.start);
4855 }
4856 if (thiscase->exit_label)
4857 emit_label (thiscase->exit_label);
4858
4859 POPSTACK (case_stack);
4860
4861 free_temp_slots ();
4862}
4863
ca695ac9
JB
4864
4865/* Terminate a case statement. EXPR is the original index
4866 expression. */
4867void
4868bc_expand_end_case (expr)
4869 tree expr;
4870{
4871 struct nesting *thiscase = case_stack;
4872 enum bytecode_opcode opcode;
4873 struct bc_label *jump_label;
4874 struct case_node *c;
4875
4876 bc_emit_bytecode (jump);
c53e9440 4877 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
4878
4879#ifdef DEBUG_PRINT_CODE
4880 fputc ('\n', stderr);
4881#endif
4882
4883 /* Now that the size of the jump table is known, emit the actual
4884 indexed jump instruction. */
c53e9440 4885 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
ca695ac9
JB
4886
4887 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4888 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4889 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4890
4891 bc_emit_bytecode (opcode);
4892
4893 /* Now emit the case instructions literal arguments, in order.
4894 In addition to the value on the stack, it uses:
4895 1. The address of the jump table.
4896 2. The size of the jump table.
4897 3. The default label. */
4898
4899 jump_label = bc_get_bytecode_label ();
4900 bc_emit_bytecode_labelref (jump_label);
4901 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4902 sizeof thiscase->data.case_stmt.num_ranges);
4903
4904 if (thiscase->data.case_stmt.default_label)
c53e9440 4905 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
ca695ac9 4906 else
c53e9440 4907 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
4908
4909 /* Output the jump table. */
4910
4911 bc_align_bytecode (3 /* PTR_ALIGN */);
4912 bc_emit_bytecode_labeldef (jump_label);
4913
4914 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4915 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4916 {
4917 opcode = TREE_INT_CST_LOW (c->low);
4918 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4919
4920 opcode = TREE_INT_CST_LOW (c->high);
4921 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4922
c53e9440 4923 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
4924 }
4925 else
4926 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4927 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4928 {
4929 bc_emit_bytecode_DI_const (c->low);
4930 bc_emit_bytecode_DI_const (c->high);
4931
c53e9440 4932 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
ca695ac9
JB
4933 }
4934 else
4935 /* Bad mode */
4936 abort ();
4937
4938
c53e9440 4939 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
ca695ac9
JB
4940
4941 /* Possibly issue enumeration warnings. */
4942
4943 if (!thiscase->data.case_stmt.default_label
4944 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4945 && TREE_CODE (expr) != INTEGER_CST
4946 && warn_switch)
4947 check_for_full_enumeration_handling (TREE_TYPE (expr));
4948
4949
4950#ifdef DEBUG_PRINT_CODE
4951 fputc ('\n', stderr);
4952#endif
4953
4954 POPSTACK (case_stack);
4955}
4956
4957
4958/* Return unique bytecode ID. */
4959int
4960bc_new_uid ()
4961{
4962 static int bc_uid = 0;
4963
4964 return (++bc_uid);
4965}
4966
28d81abb
RK
4967/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4968
4969static void
4970do_jump_if_equal (op1, op2, label, unsignedp)
4971 rtx op1, op2, label;
4972 int unsignedp;
4973{
4974 if (GET_CODE (op1) == CONST_INT
4975 && GET_CODE (op2) == CONST_INT)
4976 {
4977 if (INTVAL (op1) == INTVAL (op2))
4978 emit_jump (label);
4979 }
4980 else
4981 {
4982 enum machine_mode mode = GET_MODE (op1);
4983 if (mode == VOIDmode)
4984 mode = GET_MODE (op2);
37366632 4985 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4986 emit_jump_insn (gen_beq (label));
4987 }
4988}
4989\f
4990/* Not all case values are encountered equally. This function
4991 uses a heuristic to weight case labels, in cases where that
4992 looks like a reasonable thing to do.
4993
4994 Right now, all we try to guess is text, and we establish the
4995 following weights:
4996
4997 chars above space: 16
4998 digits: 16
4999 default: 12
5000 space, punct: 8
5001 tab: 4
5002 newline: 2
5003 other "\" chars: 1
5004 remaining chars: 0
5005
5006 If we find any cases in the switch that are not either -1 or in the range
5007 of valid ASCII characters, or are control characters other than those
5008 commonly used with "\", don't treat this switch scanning text.
5009
5010 Return 1 if these nodes are suitable for cost estimation, otherwise
5011 return 0. */
5012
5013static int
5014estimate_case_costs (node)
5015 case_node_ptr node;
5016{
5017 tree min_ascii = build_int_2 (-1, -1);
5018 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5019 case_node_ptr n;
5020 int i;
5021
5022 /* If we haven't already made the cost table, make it now. Note that the
5023 lower bound of the table is -1, not zero. */
5024
5025 if (cost_table == NULL)
5026 {
5027 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5028 bzero (cost_table - 1, 129 * sizeof (short));
5029
5030 for (i = 0; i < 128; i++)
5031 {
5032 if (isalnum (i))
5033 cost_table[i] = 16;
5034 else if (ispunct (i))
5035 cost_table[i] = 8;
5036 else if (iscntrl (i))
5037 cost_table[i] = -1;
5038 }
5039
5040 cost_table[' '] = 8;
5041 cost_table['\t'] = 4;
5042 cost_table['\0'] = 4;
5043 cost_table['\n'] = 2;
5044 cost_table['\f'] = 1;
5045 cost_table['\v'] = 1;
5046 cost_table['\b'] = 1;
5047 }
5048
5049 /* See if all the case expressions look like text. It is text if the
5050 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5051 as signed arithmetic since we don't want to ever access cost_table with a
5052 value less than -1. Also check that none of the constants in a range
5053 are strange control characters. */
5054
5055 for (n = node; n; n = n->right)
5056 {
5057 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5058 return 0;
5059
5060 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5061 if (cost_table[i] < 0)
5062 return 0;
5063 }
5064
5065 /* All interesting values are within the range of interesting
5066 ASCII characters. */
5067 return 1;
5068}
5069
5070/* Scan an ordered list of case nodes
5071 combining those with consecutive values or ranges.
5072
5073 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5074
5075static void
5076group_case_nodes (head)
5077 case_node_ptr head;
5078{
5079 case_node_ptr node = head;
5080
5081 while (node)
5082 {
5083 rtx lb = next_real_insn (label_rtx (node->code_label));
5084 case_node_ptr np = node;
5085
5086 /* Try to group the successors of NODE with NODE. */
5087 while (((np = np->right) != 0)
5088 /* Do they jump to the same place? */
5089 && next_real_insn (label_rtx (np->code_label)) == lb
5090 /* Are their ranges consecutive? */
5091 && tree_int_cst_equal (np->low,
5092 fold (build (PLUS_EXPR,
5093 TREE_TYPE (node->high),
5094 node->high,
5095 integer_one_node)))
5096 /* An overflow is not consecutive. */
5097 && tree_int_cst_lt (node->high,
5098 fold (build (PLUS_EXPR,
5099 TREE_TYPE (node->high),
5100 node->high,
5101 integer_one_node))))
5102 {
5103 node->high = np->high;
5104 }
5105 /* NP is the first node after NODE which can't be grouped with it.
5106 Delete the nodes in between, and move on to that node. */
5107 node->right = np;
5108 node = np;
5109 }
5110}
5111
5112/* Take an ordered list of case nodes
5113 and transform them into a near optimal binary tree,
6dc42e49 5114 on the assumption that any target code selection value is as
28d81abb
RK
5115 likely as any other.
5116
5117 The transformation is performed by splitting the ordered
5118 list into two equal sections plus a pivot. The parts are
5119 then attached to the pivot as left and right branches. Each
5120 branch is is then transformed recursively. */
5121
5122static void
5123balance_case_nodes (head, parent)
5124 case_node_ptr *head;
5125 case_node_ptr parent;
5126{
5127 register case_node_ptr np;
5128
5129 np = *head;
5130 if (np)
5131 {
5132 int cost = 0;
5133 int i = 0;
5134 int ranges = 0;
5135 register case_node_ptr *npp;
5136 case_node_ptr left;
5137
5138 /* Count the number of entries on branch. Also count the ranges. */
5139
5140 while (np)
5141 {
5142 if (!tree_int_cst_equal (np->low, np->high))
5143 {
5144 ranges++;
5145 if (use_cost_table)
5146 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5147 }
5148
5149 if (use_cost_table)
5150 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5151
5152 i++;
5153 np = np->right;
5154 }
5155
5156 if (i > 2)
5157 {
5158 /* Split this list if it is long enough for that to help. */
5159 npp = head;
5160 left = *npp;
5161 if (use_cost_table)
5162 {
5163 /* Find the place in the list that bisects the list's total cost,
5164 Here I gets half the total cost. */
5165 int n_moved = 0;
5166 i = (cost + 1) / 2;
5167 while (1)
5168 {
5169 /* Skip nodes while their cost does not reach that amount. */
5170 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5171 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5172 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5173 if (i <= 0)
5174 break;
5175 npp = &(*npp)->right;
5176 n_moved += 1;
5177 }
5178 if (n_moved == 0)
5179 {
5180 /* Leave this branch lopsided, but optimize left-hand
5181 side and fill in `parent' fields for right-hand side. */
5182 np = *head;
5183 np->parent = parent;
5184 balance_case_nodes (&np->left, np);
5185 for (; np->right; np = np->right)
5186 np->right->parent = np;
5187 return;
5188 }
5189 }
5190 /* If there are just three nodes, split at the middle one. */
5191 else if (i == 3)
5192 npp = &(*npp)->right;
5193 else
5194 {
5195 /* Find the place in the list that bisects the list's total cost,
5196 where ranges count as 2.
5197 Here I gets half the total cost. */
5198 i = (i + ranges + 1) / 2;
5199 while (1)
5200 {
5201 /* Skip nodes while their cost does not reach that amount. */
5202 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5203 i--;
5204 i--;
5205 if (i <= 0)
5206 break;
5207 npp = &(*npp)->right;
5208 }
5209 }
5210 *head = np = *npp;
5211 *npp = 0;
5212 np->parent = parent;
5213 np->left = left;
5214
5215 /* Optimize each of the two split parts. */
5216 balance_case_nodes (&np->left, np);
5217 balance_case_nodes (&np->right, np);
5218 }
5219 else
5220 {
5221 /* Else leave this branch as one level,
5222 but fill in `parent' fields. */
5223 np = *head;
5224 np->parent = parent;
5225 for (; np->right; np = np->right)
5226 np->right->parent = np;
5227 }
5228 }
5229}
5230\f
5231/* Search the parent sections of the case node tree
5232 to see if a test for the lower bound of NODE would be redundant.
5233 INDEX_TYPE is the type of the index expression.
5234
5235 The instructions to generate the case decision tree are
5236 output in the same order as nodes are processed so it is
5237 known that if a parent node checks the range of the current
5238 node minus one that the current node is bounded at its lower
5239 span. Thus the test would be redundant. */
5240
5241static int
5242node_has_low_bound (node, index_type)
5243 case_node_ptr node;
5244 tree index_type;
5245{
5246 tree low_minus_one;
5247 case_node_ptr pnode;
5248
5249 /* If the lower bound of this node is the lowest value in the index type,
5250 we need not test it. */
5251
5252 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5253 return 1;
5254
5255 /* If this node has a left branch, the value at the left must be less
5256 than that at this node, so it cannot be bounded at the bottom and
5257 we need not bother testing any further. */
5258
5259 if (node->left)
5260 return 0;
5261
5262 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5263 node->low, integer_one_node));
5264
5265 /* If the subtraction above overflowed, we can't verify anything.
5266 Otherwise, look for a parent that tests our value - 1. */
5267
5268 if (! tree_int_cst_lt (low_minus_one, node->low))
5269 return 0;
5270
5271 for (pnode = node->parent; pnode; pnode = pnode->parent)
5272 if (tree_int_cst_equal (low_minus_one, pnode->high))
5273 return 1;
5274
5275 return 0;
5276}
5277
5278/* Search the parent sections of the case node tree
5279 to see if a test for the upper bound of NODE would be redundant.
5280 INDEX_TYPE is the type of the index expression.
5281
5282 The instructions to generate the case decision tree are
5283 output in the same order as nodes are processed so it is
5284 known that if a parent node checks the range of the current
5285 node plus one that the current node is bounded at its upper
5286 span. Thus the test would be redundant. */
5287
5288static int
5289node_has_high_bound (node, index_type)
5290 case_node_ptr node;
5291 tree index_type;
5292{
5293 tree high_plus_one;
5294 case_node_ptr pnode;
5295
5296 /* If the upper bound of this node is the highest value in the type
5297 of the index expression, we need not test against it. */
5298
5299 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5300 return 1;
5301
5302 /* If this node has a right branch, the value at the right must be greater
5303 than that at this node, so it cannot be bounded at the top and
5304 we need not bother testing any further. */
5305
5306 if (node->right)
5307 return 0;
5308
5309 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5310 node->high, integer_one_node));
5311
5312 /* If the addition above overflowed, we can't verify anything.
5313 Otherwise, look for a parent that tests our value + 1. */
5314
5315 if (! tree_int_cst_lt (node->high, high_plus_one))
5316 return 0;
5317
5318 for (pnode = node->parent; pnode; pnode = pnode->parent)
5319 if (tree_int_cst_equal (high_plus_one, pnode->low))
5320 return 1;
5321
5322 return 0;
5323}
5324
5325/* Search the parent sections of the
5326 case node tree to see if both tests for the upper and lower
5327 bounds of NODE would be redundant. */
5328
5329static int
5330node_is_bounded (node, index_type)
5331 case_node_ptr node;
5332 tree index_type;
5333{
5334 return (node_has_low_bound (node, index_type)
5335 && node_has_high_bound (node, index_type));
5336}
5337
5338/* Emit an unconditional jump to LABEL unless it would be dead code. */
5339
5340static void
5341emit_jump_if_reachable (label)
5342 rtx label;
5343{
5344 if (GET_CODE (get_last_insn ()) != BARRIER)
5345 emit_jump (label);
5346}
5347\f
5348/* Emit step-by-step code to select a case for the value of INDEX.
5349 The thus generated decision tree follows the form of the
5350 case-node binary tree NODE, whose nodes represent test conditions.
5351 INDEX_TYPE is the type of the index of the switch.
5352
5353 Care is taken to prune redundant tests from the decision tree
5354 by detecting any boundary conditions already checked by
5355 emitted rtx. (See node_has_high_bound, node_has_low_bound
5356 and node_is_bounded, above.)
5357
5358 Where the test conditions can be shown to be redundant we emit
5359 an unconditional jump to the target code. As a further
5360 optimization, the subordinates of a tree node are examined to
5361 check for bounded nodes. In this case conditional and/or
5362 unconditional jumps as a result of the boundary check for the
5363 current node are arranged to target the subordinates associated
5364 code for out of bound conditions on the current node node.
5365
f72aed24 5366 We can assume that when control reaches the code generated here,
28d81abb
RK
5367 the index value has already been compared with the parents
5368 of this node, and determined to be on the same side of each parent
5369 as this node is. Thus, if this node tests for the value 51,
5370 and a parent tested for 52, we don't need to consider
5371 the possibility of a value greater than 51. If another parent
5372 tests for the value 50, then this node need not test anything. */
5373
5374static void
5375emit_case_nodes (index, node, default_label, index_type)
5376 rtx index;
5377 case_node_ptr node;
5378 rtx default_label;
5379 tree index_type;
5380{
5381 /* If INDEX has an unsigned type, we must make unsigned branches. */
5382 int unsignedp = TREE_UNSIGNED (index_type);
5383 typedef rtx rtx_function ();
5384 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5385 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5386 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5387 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5388 enum machine_mode mode = GET_MODE (index);
5389
5390 /* See if our parents have already tested everything for us.
5391 If they have, emit an unconditional jump for this node. */
5392 if (node_is_bounded (node, index_type))
5393 emit_jump (label_rtx (node->code_label));
5394
5395 else if (tree_int_cst_equal (node->low, node->high))
5396 {
5397 /* Node is single valued. First see if the index expression matches
5398 this node and then check our children, if any. */
5399
37366632 5400 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5401 label_rtx (node->code_label), unsignedp);
5402
5403 if (node->right != 0 && node->left != 0)
5404 {
5405 /* This node has children on both sides.
5406 Dispatch to one side or the other
5407 by comparing the index value with this node's value.
5408 If one subtree is bounded, check that one first,
5409 so we can avoid real branches in the tree. */
5410
5411 if (node_is_bounded (node->right, index_type))
5412 {
37366632
RK
5413 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5414 VOIDmode, 0),
5415 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5416
5417 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5418 emit_case_nodes (index, node->left, default_label, index_type);
5419 }
5420
5421 else if (node_is_bounded (node->left, index_type))
5422 {
37366632 5423 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5424 VOIDmode, 0),
37366632 5425 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5426 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5427 emit_case_nodes (index, node->right, default_label, index_type);
5428 }
5429
5430 else
5431 {
5432 /* Neither node is bounded. First distinguish the two sides;
5433 then emit the code for one side at a time. */
5434
5435 tree test_label
5436 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5437
5438 /* See if the value is on the right. */
37366632 5439 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5440 VOIDmode, 0),
37366632 5441 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5442 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5443
5444 /* Value must be on the left.
5445 Handle the left-hand subtree. */
5446 emit_case_nodes (index, node->left, default_label, index_type);
5447 /* If left-hand subtree does nothing,
5448 go to default. */
5449 emit_jump_if_reachable (default_label);
5450
5451 /* Code branches here for the right-hand subtree. */
5452 expand_label (test_label);
5453 emit_case_nodes (index, node->right, default_label, index_type);
5454 }
5455 }
5456
5457 else if (node->right != 0 && node->left == 0)
5458 {
5459 /* Here we have a right child but no left so we issue conditional
5460 branch to default and process the right child.
5461
5462 Omit the conditional branch to default if we it avoid only one
5463 right child; it costs too much space to save so little time. */
5464
de14fd73 5465 if (node->right->right || node->right->left
28d81abb
RK
5466 || !tree_int_cst_equal (node->right->low, node->right->high))
5467 {
5468 if (!node_has_low_bound (node, index_type))
5469 {
37366632
RK
5470 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5471 VOIDmode, 0),
5472 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5473 emit_jump_insn ((*gen_blt_pat) (default_label));
5474 }
5475
5476 emit_case_nodes (index, node->right, default_label, index_type);
5477 }
5478 else
5479 /* We cannot process node->right normally
5480 since we haven't ruled out the numbers less than
5481 this node's value. So handle node->right explicitly. */
5482 do_jump_if_equal (index,
37366632
RK
5483 expand_expr (node->right->low, NULL_RTX,
5484 VOIDmode, 0),
28d81abb
RK
5485 label_rtx (node->right->code_label), unsignedp);
5486 }
5487
5488 else if (node->right == 0 && node->left != 0)
5489 {
5490 /* Just one subtree, on the left. */
5491
de14fd73
RK
5492#if 0 /* The following code and comment were formerly part
5493 of the condition here, but they didn't work
5494 and I don't understand what the idea was. -- rms. */
5495 /* If our "most probable entry" is less probable
28d81abb
RK
5496 than the default label, emit a jump to
5497 the default label using condition codes
5498 already lying around. With no right branch,
5499 a branch-greater-than will get us to the default
5500 label correctly. */
de14fd73
RK
5501 if (use_cost_table
5502 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5503 ;
5504#endif /* 0 */
5505 if (node->left->left || node->left->right
28d81abb
RK
5506 || !tree_int_cst_equal (node->left->low, node->left->high))
5507 {
5508 if (!node_has_high_bound (node, index_type))
5509 {
37366632
RK
5510 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5511 VOIDmode, 0),
5512 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5513 emit_jump_insn ((*gen_bgt_pat) (default_label));
5514 }
5515
5516 emit_case_nodes (index, node->left, default_label, index_type);
5517 }
5518 else
5519 /* We cannot process node->left normally
5520 since we haven't ruled out the numbers less than
5521 this node's value. So handle node->left explicitly. */
5522 do_jump_if_equal (index,
37366632
RK
5523 expand_expr (node->left->low, NULL_RTX,
5524 VOIDmode, 0),
28d81abb
RK
5525 label_rtx (node->left->code_label), unsignedp);
5526 }
5527 }
5528 else
5529 {
5530 /* Node is a range. These cases are very similar to those for a single
5531 value, except that we do not start by testing whether this node
5532 is the one to branch to. */
5533
5534 if (node->right != 0 && node->left != 0)
5535 {
5536 /* Node has subtrees on both sides.
5537 If the right-hand subtree is bounded,
5538 test for it first, since we can go straight there.
5539 Otherwise, we need to make a branch in the control structure,
5540 then handle the two subtrees. */
5541 tree test_label = 0;
5542
37366632
RK
5543 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5544 VOIDmode, 0),
5545 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5546
5547 if (node_is_bounded (node->right, index_type))
5548 /* Right hand node is fully bounded so we can eliminate any
5549 testing and branch directly to the target code. */
5550 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5551 else
5552 {
5553 /* Right hand node requires testing.
5554 Branch to a label where we will handle it later. */
5555
5556 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5557 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5558 }
5559
5560 /* Value belongs to this node or to the left-hand subtree. */
5561
37366632
RK
5562 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5563 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5564 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5565
5566 /* Handle the left-hand subtree. */
5567 emit_case_nodes (index, node->left, default_label, index_type);
5568
5569 /* If right node had to be handled later, do that now. */
5570
5571 if (test_label)
5572 {
5573 /* If the left-hand subtree fell through,
5574 don't let it fall into the right-hand subtree. */
5575 emit_jump_if_reachable (default_label);
5576
5577 expand_label (test_label);
5578 emit_case_nodes (index, node->right, default_label, index_type);
5579 }
5580 }
5581
5582 else if (node->right != 0 && node->left == 0)
5583 {
5584 /* Deal with values to the left of this node,
5585 if they are possible. */
5586 if (!node_has_low_bound (node, index_type))
5587 {
37366632
RK
5588 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5589 VOIDmode, 0),
5590 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5591 emit_jump_insn ((*gen_blt_pat) (default_label));
5592 }
5593
5594 /* Value belongs to this node or to the right-hand subtree. */
5595
37366632
RK
5596 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5597 VOIDmode, 0),
5598 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5599 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5600
5601 emit_case_nodes (index, node->right, default_label, index_type);
5602 }
5603
5604 else if (node->right == 0 && node->left != 0)
5605 {
5606 /* Deal with values to the right of this node,
5607 if they are possible. */
5608 if (!node_has_high_bound (node, index_type))
5609 {
37366632
RK
5610 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5611 VOIDmode, 0),
5612 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5613 emit_jump_insn ((*gen_bgt_pat) (default_label));
5614 }
5615
5616 /* Value belongs to this node or to the left-hand subtree. */
5617
37366632
RK
5618 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5619 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5620 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5621
5622 emit_case_nodes (index, node->left, default_label, index_type);
5623 }
5624
5625 else
5626 {
5627 /* Node has no children so we check low and high bounds to remove
5628 redundant tests. Only one of the bounds can exist,
5629 since otherwise this node is bounded--a case tested already. */
5630
5631 if (!node_has_high_bound (node, index_type))
5632 {
37366632
RK
5633 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5634 VOIDmode, 0),
5635 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5636 emit_jump_insn ((*gen_bgt_pat) (default_label));
5637 }
5638
5639 if (!node_has_low_bound (node, index_type))
5640 {
37366632
RK
5641 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5642 VOIDmode, 0),
5643 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5644 emit_jump_insn ((*gen_blt_pat) (default_label));
5645 }
5646
5647 emit_jump (label_rtx (node->code_label));
5648 }
5649 }
5650}
5651\f
5652/* These routines are used by the loop unrolling code. They copy BLOCK trees
5653 so that the debugging info will be correct for the unrolled loop. */
5654
94dc8b56 5655/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 5656
94dc8b56 5657static tree *block_vector;
28d81abb
RK
5658
5659void
94dc8b56 5660find_loop_tree_blocks ()
28d81abb 5661{
94dc8b56 5662 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5663
94dc8b56
JW
5664 /* There first block is for the function body, and does not have
5665 corresponding block notes. Don't include it in the block vector. */
5666 block = BLOCK_SUBBLOCKS (block);
28d81abb 5667
94dc8b56 5668 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
5669}
5670
28d81abb 5671void
94dc8b56 5672unroll_block_trees ()
28d81abb 5673{
94dc8b56 5674 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5675
94dc8b56 5676 reorder_blocks (block_vector, block, get_insns ());
28d81abb 5677}
94dc8b56 5678
This page took 0.776496 seconds and 5 git commands to generate.