]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
change bytecode rtx structure references into macros
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
5eb94e4e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
ca695ac9
JB
52#include "machmode.h"
53
54#include "bytecode.h"
55#include "bc-typecd.h"
56#include "bc-opcode.h"
57#include "bc-optab.h"
58#include "bc-emit.h"
28d81abb
RK
59
60#define obstack_chunk_alloc xmalloc
61#define obstack_chunk_free free
62struct obstack stmt_obstack;
63
28d81abb
RK
64/* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
66char *emit_filename;
67int emit_lineno;
68
69/* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
71
72int expr_stmts_for_value;
73
74/* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
76
77static tree last_expr_type;
78static rtx last_expr_value;
79
7629c936
RS
80/* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
84
85static rtx last_block_end_note;
86
28d81abb
RK
87/* Number of binding contours started so far in this function. */
88
89int block_start_count;
90
91/* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
93
94extern int current_function_returns_pcc_struct;
95
96/* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
99
100extern rtx cleanup_label;
101
102/* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
105
106extern rtx return_label;
107
108/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110extern rtx save_expr_regs;
111
112/* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115extern int frame_offset;
116
117/* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119extern rtx tail_recursion_label;
120
121/* Place after which to insert the tail_recursion_label if we need one. */
122extern rtx tail_recursion_reentry;
123
124/* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
128
129extern rtx arg_pointer_save_area;
130
131/* Chain of all RTL_EXPRs that have insns in them. */
132extern tree rtl_expr_chain;
133
134#if 0 /* Turned off because 0 seems to work just as well. */
135/* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138static tree empty_cleanup_list;
139#endif
140\f
141/* Functions and data structures for expanding case statements. */
142
143/* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
146
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
149
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
154 node chain.
155
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
28d81abb
RK
161 and nodes on the right having higher values. We then output the tree
162 in order. */
163
164struct case_node
165{
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
172};
173
174typedef struct case_node case_node;
175typedef struct case_node *case_node_ptr;
176
177/* These are used by estimate_case_costs and balance_case_nodes. */
178
179/* This must be a signed type, and non-ANSI compilers lack signed char. */
180static short *cost_table;
181static int use_cost_table;
182
183static int estimate_case_costs ();
184static void balance_case_nodes ();
185static void emit_case_nodes ();
186static void group_case_nodes ();
187static void emit_jump_if_reachable ();
188
189static int warn_if_unused_value ();
190static void expand_goto_internal ();
ca695ac9 191static void bc_expand_goto_internal ();
28d81abb 192static int expand_fixup ();
ca695ac9 193static void bc_expand_fixup ();
28d81abb 194void fixup_gotos ();
ca695ac9 195static void bc_fixup_gotos ();
28d81abb
RK
196void free_temp_slots ();
197static void expand_cleanups ();
28d81abb
RK
198static void expand_null_return_1 ();
199static int tail_recursion_args ();
200static void do_jump_if_equal ();
ca695ac9
JB
201int bc_expand_exit_loop_if_false ();
202void bc_expand_start_cond ();
203void bc_expand_end_cond ();
204void bc_expand_start_else ();
205void bc_expand_end_bindings ();
206void bc_expand_start_case ();
207void bc_check_for_full_enumeration_handling ();
208void bc_expand_end_case ();
209void bc_expand_decl ();
210
211extern rtx bc_allocate_local ();
212extern rtx bc_allocate_variable_array ();
28d81abb
RK
213\f
214/* Stack of control and binding constructs we are currently inside.
215
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
222
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
227
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
231
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
238
239struct nesting
240{
241 struct nesting *all;
242 struct nesting *next;
243 int depth;
244 rtx exit_label;
245 union
246 {
247 /* For conds (if-then and if-then-else statements). */
248 struct
249 {
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
253 rtx endif_label;
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
256 rtx next_label;
257 } cond;
258 /* For loops. */
259 struct
260 {
261 /* Label at the top of the loop; place to loop back to. */
262 rtx start_label;
263 /* Label at the end of the whole construct. */
264 rtx end_label;
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
267 rtx continue_label;
268 } loop;
269 /* For variable binding contours. */
270 struct
271 {
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
ca695ac9
JB
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
28d81abb
RK
277 rtx stack_level;
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
281 rtx first_insn;
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
288 tree cleanups;
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
297 tree outer_cleanups;
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
ca695ac9
JB
303 /* Bytecode specific: stack level to restore stack to on exit. */
304 int bc_stack_level;
28d81abb
RK
305 } block;
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
308 struct
309 {
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
312 rtx start;
ca695ac9
JB
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
316 rtx skip_label;
28d81abb
RK
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
323 tree default_label;
324 /* The expression to be dispatched on. */
325 tree index_expr;
326 /* Type that INDEX_EXPR should be converted to. */
327 tree nominal_type;
328 /* Number of range exprs in case statement. */
329 int num_ranges;
330 /* Name of this kind of statement, for warnings. */
331 char *printname;
332 /* Nonzero if a case label has been seen in this case stmt. */
333 char seenlabel;
334 } case_stmt;
335 /* For exception contours. */
336 struct
337 {
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
340 tree raised;
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
344 tree handled;
345
346 /* First insn of TRY block, in case resumptive model is needed. */
347 rtx first_insn;
348 /* Label for the catch clauses. */
349 rtx except_label;
350 /* Label for unhandled exceptions. */
351 rtx unhandled_label;
352 /* Label at the end of whole construct. */
353 rtx after_label;
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
356 rtx escape_label;
357 } except_stmt;
358 } data;
359};
360
361/* Chain of all pending binding contours. */
362struct nesting *block_stack;
363
6ed1d6c5
RS
364/* If any new stacks are added here, add them to POPSTACKS too. */
365
28d81abb
RK
366/* Chain of all pending binding contours that restore stack levels
367 or have cleanups. */
368struct nesting *stack_block_stack;
369
370/* Chain of all pending conditional statements. */
371struct nesting *cond_stack;
372
373/* Chain of all pending loops. */
374struct nesting *loop_stack;
375
376/* Chain of all pending case or switch statements. */
377struct nesting *case_stack;
378
379/* Chain of all pending exception contours. */
380struct nesting *except_stack;
381
382/* Separate chain including all of the above,
383 chained through the `all' field. */
384struct nesting *nesting_stack;
385
386/* Number of entries on nesting_stack now. */
387int nesting_depth;
388
389/* Allocate and return a new `struct nesting'. */
390
391#define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
393
6ed1d6c5
RS
394/* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
28d81abb
RK
398
399#define POPSTACK(STACK) \
6ed1d6c5
RS
400do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
28d81abb 416 nesting_stack = this->all; \
28d81abb 417 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 418 while (this != target); } while (0)
28d81abb
RK
419\f
420/* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
425 we check each fixup.
426 If the target label has now been defined, we can insert the proper code. */
427
428struct goto_fixup
429{
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
434 rtx before_jump;
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
437 tree target;
7629c936
RS
438 /* The BLOCK for the place where this goto was found. */
439 tree context;
28d81abb
RK
440 /* The CODE_LABEL rtx that this is jumping to. */
441 rtx target_rtl;
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
448 rtx stack_level;
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
ca695ac9
JB
457
458 /* Bytecode specific members follow */
459
460 /* The label that this jump is jumping to, or 0 for break, continue
461 or return. */
462 struct bc_label *bc_target;
463
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
466
467 /* True (non-0) if fixup has been handled */
468 int bc_handled:1;
469
470 /* Like stack_level above, except refers to the interpreter stack */
471 int bc_stack_level;
28d81abb
RK
472};
473
474static struct goto_fixup *goto_fixup_chain;
475
476/* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
478
479struct label_chain
480{
481 /* Points to following fixup. */
482 struct label_chain *next;
483 tree label;
484};
485\f
486void
487init_stmt ()
488{
489 gcc_obstack_init (&stmt_obstack);
490#if 0
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
492#endif
493}
494
495void
496init_stmt_for_function ()
497{
498 /* We are not currently within any block, conditional, loop or case. */
499 block_stack = 0;
500 loop_stack = 0;
501 case_stack = 0;
502 cond_stack = 0;
503 nesting_stack = 0;
504 nesting_depth = 0;
505
506 block_start_count = 0;
507
508 /* No gotos have been expanded yet. */
509 goto_fixup_chain = 0;
510
511 /* We are not processing a ({...}) grouping. */
512 expr_stmts_for_value = 0;
513 last_expr_type = 0;
514}
515
516void
517save_stmt_status (p)
518 struct function *p;
519{
520 p->block_stack = block_stack;
521 p->stack_block_stack = stack_block_stack;
522 p->cond_stack = cond_stack;
523 p->loop_stack = loop_stack;
524 p->case_stack = case_stack;
525 p->nesting_stack = nesting_stack;
526 p->nesting_depth = nesting_depth;
527 p->block_start_count = block_start_count;
528 p->last_expr_type = last_expr_type;
529 p->last_expr_value = last_expr_value;
530 p->expr_stmts_for_value = expr_stmts_for_value;
531 p->emit_filename = emit_filename;
532 p->emit_lineno = emit_lineno;
533 p->goto_fixup_chain = goto_fixup_chain;
534}
535
536void
537restore_stmt_status (p)
538 struct function *p;
539{
540 block_stack = p->block_stack;
541 stack_block_stack = p->stack_block_stack;
542 cond_stack = p->cond_stack;
543 loop_stack = p->loop_stack;
544 case_stack = p->case_stack;
545 nesting_stack = p->nesting_stack;
546 nesting_depth = p->nesting_depth;
547 block_start_count = p->block_start_count;
548 last_expr_type = p->last_expr_type;
549 last_expr_value = p->last_expr_value;
550 expr_stmts_for_value = p->expr_stmts_for_value;
551 emit_filename = p->emit_filename;
552 emit_lineno = p->emit_lineno;
553 goto_fixup_chain = p->goto_fixup_chain;
554}
555\f
556/* Emit a no-op instruction. */
557
558void
559emit_nop ()
560{
ca695ac9
JB
561 rtx last_insn;
562
563 if (!output_bytecode)
564 {
565 last_insn = get_last_insn ();
566 if (!optimize
567 && (GET_CODE (last_insn) == CODE_LABEL
568 || prev_real_insn (last_insn) == 0))
569 emit_insn (gen_nop ());
570 }
28d81abb
RK
571}
572\f
573/* Return the rtx-label that corresponds to a LABEL_DECL,
574 creating it if necessary. */
575
576rtx
577label_rtx (label)
578 tree label;
579{
580 if (TREE_CODE (label) != LABEL_DECL)
581 abort ();
582
583 if (DECL_RTL (label))
584 return DECL_RTL (label);
585
586 return DECL_RTL (label) = gen_label_rtx ();
587}
588
589/* Add an unconditional jump to LABEL as the next sequential instruction. */
590
591void
592emit_jump (label)
593 rtx label;
594{
595 do_pending_stack_adjust ();
596 emit_jump_insn (gen_jump (label));
597 emit_barrier ();
598}
599
600/* Emit code to jump to the address
601 specified by the pointer expression EXP. */
602
603void
604expand_computed_goto (exp)
605 tree exp;
606{
ca695ac9
JB
607 if (output_bytecode)
608 {
609 bc_expand_expr (exp);
610 bc_emit_instruction (jumpP);
611 }
612 else
613 {
614 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
615 emit_queue ();
616 emit_indirect_jump (x);
617 }
28d81abb
RK
618}
619\f
620/* Handle goto statements and the labels that they can go to. */
621
622/* Specify the location in the RTL code of a label LABEL,
623 which is a LABEL_DECL tree node.
624
625 This is used for the kind of label that the user can jump to with a
626 goto statement, and for alternatives of a switch or case statement.
627 RTL labels generated for loops and conditionals don't go through here;
628 they are generated directly at the RTL level, by other functions below.
629
630 Note that this has nothing to do with defining label *names*.
631 Languages vary in how they do that and what that even means. */
632
633void
634expand_label (label)
635 tree label;
636{
637 struct label_chain *p;
638
ca695ac9
JB
639 if (output_bytecode)
640 {
641 if (! DECL_RTL (label))
642 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
643 if (! bc_emit_bytecode_labeldef (DECL_RTL (label)->bc_label))
644 error ("multiply defined label");
645 return;
646 }
647
28d81abb
RK
648 do_pending_stack_adjust ();
649 emit_label (label_rtx (label));
650 if (DECL_NAME (label))
651 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
652
653 if (stack_block_stack != 0)
654 {
655 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
656 p->next = stack_block_stack->data.block.label_chain;
657 stack_block_stack->data.block.label_chain = p;
658 p->label = label;
659 }
660}
661
662/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
663 from nested functions. */
664
665void
666declare_nonlocal_label (label)
667 tree label;
668{
669 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
670 LABEL_PRESERVE_P (label_rtx (label)) = 1;
671 if (nonlocal_goto_handler_slot == 0)
672 {
673 nonlocal_goto_handler_slot
674 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
675 emit_stack_save (SAVE_NONLOCAL,
676 &nonlocal_goto_stack_level,
677 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
678 }
679}
680
681/* Generate RTL code for a `goto' statement with target label LABEL.
682 LABEL should be a LABEL_DECL tree node that was or will later be
683 defined with `expand_label'. */
684
685void
686expand_goto (label)
687 tree label;
688{
ca695ac9
JB
689 tree context;
690
691 if (output_bytecode)
692 {
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
694 return;
695 }
696
28d81abb 697 /* Check for a nonlocal goto to a containing function. */
ca695ac9 698 context = decl_function_context (label);
28d81abb
RK
699 if (context != 0 && context != current_function_decl)
700 {
701 struct function *p = find_function_data (context);
dd132134 702 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
28d81abb 703 rtx temp;
dd132134 704
28d81abb 705 p->has_nonlocal_label = 1;
c1255328 706 current_function_has_nonlocal_goto = 1;
dd132134 707 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
708
709 /* Copy the rtl for the slots so that they won't be shared in
710 case the virtual stack vars register gets instantiated differently
711 in the parent than in the child. */
712
28d81abb
RK
713#if HAVE_nonlocal_goto
714 if (HAVE_nonlocal_goto)
715 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
716 copy_rtx (p->nonlocal_goto_handler_slot),
717 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 718 label_ref));
28d81abb
RK
719 else
720#endif
721 {
59257ff7
RK
722 rtx addr;
723
28d81abb
RK
724 /* Restore frame pointer for containing function.
725 This sets the actual hard register used for the frame pointer
726 to the location of the function's incoming static chain info.
727 The non-local goto handler will then adjust it to contain the
728 proper value and reload the argument pointer, if needed. */
729 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
730
731 /* We have now loaded the frame pointer hardware register with
732 the address of that corresponds to the start of the virtual
733 stack vars. So replace virtual_stack_vars_rtx in all
734 addresses we use with stack_pointer_rtx. */
735
28d81abb
RK
736 /* Get addr of containing function's current nonlocal goto handler,
737 which will do any cleanups and then jump to the label. */
59257ff7
RK
738 addr = copy_rtx (p->nonlocal_goto_handler_slot);
739 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
740 frame_pointer_rtx));
741
28d81abb 742 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
743 addr = p->nonlocal_goto_stack_level;
744 if (addr)
5e116627
MM
745 addr = replace_rtx (copy_rtx (addr),
746 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7 747
37366632 748 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 749
28d81abb 750 /* Put in the static chain register the nonlocal label address. */
dd132134 751 emit_move_insn (static_chain_rtx, label_ref);
28d81abb
RK
752 /* USE of frame_pointer_rtx added for consistency; not clear if
753 really needed. */
754 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
755 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
756 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
757 emit_indirect_jump (temp);
758 }
759 }
760 else
37366632 761 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
762}
763
764/* Generate RTL code for a `goto' statement with target label BODY.
765 LABEL should be a LABEL_REF.
766 LAST_INSN, if non-0, is the rtx we should consider as the last
767 insn emitted (for the purposes of cleaning up a return). */
768
769static void
770expand_goto_internal (body, label, last_insn)
771 tree body;
772 rtx label;
773 rtx last_insn;
774{
775 struct nesting *block;
776 rtx stack_level = 0;
777
ca695ac9
JB
778 /* NOTICE! If a bytecode instruction other than `jump' is needed,
779 then the caller has to call bc_expand_goto_internal()
780 directly. This is rather an exceptional case, and there aren't
781 that many places where this is necessary. */
782 if (output_bytecode)
783 {
784 expand_goto_internal (body, label, last_insn);
785 return;
786 }
787
28d81abb
RK
788 if (GET_CODE (label) != CODE_LABEL)
789 abort ();
790
791 /* If label has already been defined, we can tell now
792 whether and how we must alter the stack level. */
793
794 if (PREV_INSN (label) != 0)
795 {
796 /* Find the innermost pending block that contains the label.
797 (Check containment by comparing insn-uids.)
798 Then restore the outermost stack level within that block,
799 and do cleanups of all blocks contained in it. */
800 for (block = block_stack; block; block = block->next)
801 {
802 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
803 break;
804 if (block->data.block.stack_level != 0)
805 stack_level = block->data.block.stack_level;
806 /* Execute the cleanups for blocks we are exiting. */
807 if (block->data.block.cleanups != 0)
808 {
37366632 809 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
810 do_pending_stack_adjust ();
811 }
812 }
813
814 if (stack_level)
815 {
816 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
817 the stack pointer. This one should be deleted as dead by flow. */
818 clear_pending_stack_adjust ();
819 do_pending_stack_adjust ();
37366632 820 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
821 }
822
823 if (body != 0 && DECL_TOO_LATE (body))
824 error ("jump to `%s' invalidly jumps into binding contour",
825 IDENTIFIER_POINTER (DECL_NAME (body)));
826 }
827 /* Label not yet defined: may need to put this goto
828 on the fixup list. */
829 else if (! expand_fixup (body, label, last_insn))
830 {
831 /* No fixup needed. Record that the label is the target
832 of at least one goto that has no fixup. */
833 if (body != 0)
834 TREE_ADDRESSABLE (body) = 1;
835 }
836
837 emit_jump (label);
838}
839\f
ca695ac9
JB
840/* Generate a jump with OPCODE to the given bytecode LABEL which is
841 found within BODY. */
842static void
843bc_expand_goto_internal (opcode, label, body)
844 enum bytecode_opcode opcode;
845 struct bc_label *label;
846 tree body;
847{
848 struct nesting *block;
849 int stack_level = -1;
850
851 /* If the label is defined, adjust the stack as necessary.
852 If it's not defined, we have to push the reference on the
853 fixup list. */
854
855 if (label->defined)
856 {
857
858 /* Find the innermost pending block that contains the label.
859 (Check containment by comparing bytecode uids.) Then restore the
860 outermost stack level within that block. */
861
862 for (block = block_stack; block; block = block->next)
863 {
864 if (block->data.block.first_insn->uid < label->uid)
865 break;
866 if (block->data.block.bc_stack_level)
867 stack_level = block->data.block.bc_stack_level;
868
869 /* Execute the cleanups for blocks we are exiting. */
870 if (block->data.block.cleanups != 0)
871 {
872 expand_cleanups (block->data.block.cleanups, NULL_TREE);
873 do_pending_stack_adjust ();
874 }
875 }
876
877 /* Restore the stack level. If we need to adjust the stack, we
878 must do so after the jump, since the jump may depend on
879 what's on the stack. Thus, any stack-modifying conditional
880 jumps (these are the only ones that rely on what's on the
881 stack) go into the fixup list. */
882
883 if (stack_level >= 0
884 && stack_depth != stack_level
885 && opcode != jump)
886
887 bc_expand_fixup (opcode, label, stack_level);
888 else
889 {
890 if (stack_level >= 0)
891 bc_adjust_stack (stack_depth - stack_level);
892
893 if (body && DECL_BIT_FIELD (body))
894 error ("jump to `%s' invalidly jumps into binding contour",
895 IDENTIFIER_POINTER (DECL_NAME (body)));
896
897 /* Emit immediate jump */
898 bc_emit_bytecode (opcode);
899 bc_emit_bytecode_labelref (label);
900
901#ifdef DEBUG_PRINT_CODE
902 fputc ('\n', stderr);
903#endif
904 }
905 }
906 else
907 /* Put goto in the fixup list */
908 bc_expand_fixup (opcode, label, stack_level);
909}
910\f
28d81abb
RK
911/* Generate if necessary a fixup for a goto
912 whose target label in tree structure (if any) is TREE_LABEL
913 and whose target in rtl is RTL_LABEL.
914
915 If LAST_INSN is nonzero, we pretend that the jump appears
916 after insn LAST_INSN instead of at the current point in the insn stream.
917
023b57e6
RS
918 The fixup will be used later to insert insns just before the goto.
919 Those insns will restore the stack level as appropriate for the
920 target label, and will (in the case of C++) also invoke any object
921 destructors which have to be invoked when we exit the scopes which
922 are exited by the goto.
28d81abb
RK
923
924 Value is nonzero if a fixup is made. */
925
926static int
927expand_fixup (tree_label, rtl_label, last_insn)
928 tree tree_label;
929 rtx rtl_label;
930 rtx last_insn;
931{
932 struct nesting *block, *end_block;
933
934 /* See if we can recognize which block the label will be output in.
935 This is possible in some very common cases.
936 If we succeed, set END_BLOCK to that block.
937 Otherwise, set it to 0. */
938
939 if (cond_stack
940 && (rtl_label == cond_stack->data.cond.endif_label
941 || rtl_label == cond_stack->data.cond.next_label))
942 end_block = cond_stack;
943 /* If we are in a loop, recognize certain labels which
944 are likely targets. This reduces the number of fixups
945 we need to create. */
946 else if (loop_stack
947 && (rtl_label == loop_stack->data.loop.start_label
948 || rtl_label == loop_stack->data.loop.end_label
949 || rtl_label == loop_stack->data.loop.continue_label))
950 end_block = loop_stack;
951 else
952 end_block = 0;
953
954 /* Now set END_BLOCK to the binding level to which we will return. */
955
956 if (end_block)
957 {
958 struct nesting *next_block = end_block->all;
959 block = block_stack;
960
961 /* First see if the END_BLOCK is inside the innermost binding level.
962 If so, then no cleanups or stack levels are relevant. */
963 while (next_block && next_block != block)
964 next_block = next_block->all;
965
966 if (next_block)
967 return 0;
968
969 /* Otherwise, set END_BLOCK to the innermost binding level
970 which is outside the relevant control-structure nesting. */
971 next_block = block_stack->next;
972 for (block = block_stack; block != end_block; block = block->all)
973 if (block == next_block)
974 next_block = next_block->next;
975 end_block = next_block;
976 }
977
978 /* Does any containing block have a stack level or cleanups?
979 If not, no fixup is needed, and that is the normal case
980 (the only case, for standard C). */
981 for (block = block_stack; block != end_block; block = block->next)
982 if (block->data.block.stack_level != 0
983 || block->data.block.cleanups != 0)
984 break;
985
986 if (block != end_block)
987 {
988 /* Ok, a fixup is needed. Add a fixup to the list of such. */
989 struct goto_fixup *fixup
990 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
991 /* In case an old stack level is restored, make sure that comes
992 after any pending stack adjust. */
993 /* ?? If the fixup isn't to come at the present position,
994 doing the stack adjust here isn't useful. Doing it with our
995 settings at that location isn't useful either. Let's hope
996 someone does it! */
997 if (last_insn == 0)
998 do_pending_stack_adjust ();
28d81abb
RK
999 fixup->target = tree_label;
1000 fixup->target_rtl = rtl_label;
023b57e6
RS
1001
1002 /* Create a BLOCK node and a corresponding matched set of
1003 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1004 this point. The notes will encapsulate any and all fixup
1005 code which we might later insert at this point in the insn
1006 stream. Also, the BLOCK node will be the parent (i.e. the
1007 `SUPERBLOCK') of any other BLOCK nodes which we might create
1008 later on when we are expanding the fixup code. */
1009
1010 {
1011 register rtx original_before_jump
1012 = last_insn ? last_insn : get_last_insn ();
1013
1014 start_sequence ();
1015 pushlevel (0);
1016 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1017 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1018 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1019 end_sequence ();
1020 emit_insns_after (fixup->before_jump, original_before_jump);
1021 }
1022
28d81abb
RK
1023 fixup->block_start_count = block_start_count;
1024 fixup->stack_level = 0;
1025 fixup->cleanup_list_list
1026 = (((block->data.block.outer_cleanups
1027#if 0
1028 && block->data.block.outer_cleanups != empty_cleanup_list
1029#endif
1030 )
1031 || block->data.block.cleanups)
37366632 1032 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1033 block->data.block.outer_cleanups)
1034 : 0);
1035 fixup->next = goto_fixup_chain;
1036 goto_fixup_chain = fixup;
1037 }
1038
1039 return block != 0;
1040}
1041
ca695ac9
JB
1042
1043/* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1044 Make the fixup restore the stack level to STACK_LEVEL. */
1045
1046static void
1047bc_expand_fixup (opcode, label, stack_level)
1048 enum bytecode_opcode opcode;
1049 struct bc_label *label;
1050 int stack_level;
1051{
1052 struct goto_fixup *fixup
1053 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1054
1055 fixup->label = bc_get_bytecode_label ();
1056 fixup->bc_target = label;
1057 fixup->bc_stack_level = stack_level;
1058 fixup->bc_handled = FALSE;
1059
1060 fixup->next = goto_fixup_chain;
1061 goto_fixup_chain = fixup;
1062
1063 /* Insert a jump to the fixup code */
1064 bc_emit_bytecode (opcode);
1065 bc_emit_bytecode_labelref (fixup->label);
1066
1067#ifdef DEBUG_PRINT_CODE
1068 fputc ('\n', stderr);
1069#endif
1070}
1071
1072
28d81abb
RK
1073/* When exiting a binding contour, process all pending gotos requiring fixups.
1074 THISBLOCK is the structure that describes the block being exited.
1075 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1076 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1077 FIRST_INSN is the insn that began this contour.
1078
1079 Gotos that jump out of this contour must restore the
1080 stack level and do the cleanups before actually jumping.
1081
1082 DONT_JUMP_IN nonzero means report error there is a jump into this
1083 contour from before the beginning of the contour.
1084 This is also done if STACK_LEVEL is nonzero. */
1085
1086void
1087fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1088 struct nesting *thisblock;
1089 rtx stack_level;
1090 tree cleanup_list;
1091 rtx first_insn;
1092 int dont_jump_in;
1093{
1094 register struct goto_fixup *f, *prev;
1095
ca695ac9
JB
1096 if (output_bytecode)
1097 {
1098 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1099 return;
1100 }
1101
28d81abb
RK
1102 /* F is the fixup we are considering; PREV is the previous one. */
1103 /* We run this loop in two passes so that cleanups of exited blocks
1104 are run first, and blocks that are exited are marked so
1105 afterwards. */
1106
1107 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1108 {
1109 /* Test for a fixup that is inactive because it is already handled. */
1110 if (f->before_jump == 0)
1111 {
1112 /* Delete inactive fixup from the chain, if that is easy to do. */
1113 if (prev != 0)
1114 prev->next = f->next;
1115 }
1116 /* Has this fixup's target label been defined?
1117 If so, we can finalize it. */
1118 else if (PREV_INSN (f->target_rtl) != 0)
1119 {
7629c936 1120 register rtx cleanup_insns;
7629c936 1121
28d81abb
RK
1122 /* Get the first non-label after the label
1123 this goto jumps to. If that's before this scope begins,
1124 we don't have a jump into the scope. */
1125 rtx after_label = f->target_rtl;
1126 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1127 after_label = NEXT_INSN (after_label);
1128
1129 /* If this fixup jumped into this contour from before the beginning
1130 of this contour, report an error. */
1131 /* ??? Bug: this does not detect jumping in through intermediate
1132 blocks that have stack levels or cleanups.
1133 It detects only a problem with the innermost block
1134 around the label. */
1135 if (f->target != 0
1136 && (dont_jump_in || stack_level || cleanup_list)
1137 /* If AFTER_LABEL is 0, it means the jump goes to the end
1138 of the rtl, which means it jumps into this scope. */
1139 && (after_label == 0
1140 || INSN_UID (first_insn) < INSN_UID (after_label))
1141 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
44fe2e80 1142 && ! DECL_REGISTER (f->target))
28d81abb
RK
1143 {
1144 error_with_decl (f->target,
1145 "label `%s' used before containing binding contour");
1146 /* Prevent multiple errors for one label. */
44fe2e80 1147 DECL_REGISTER (f->target) = 1;
28d81abb
RK
1148 }
1149
7629c936
RS
1150 /* We will expand the cleanups into a sequence of their own and
1151 then later on we will attach this new sequence to the insn
1152 stream just ahead of the actual jump insn. */
1153
1154 start_sequence ();
1155
023b57e6
RS
1156 /* Temporarily restore the lexical context where we will
1157 logically be inserting the fixup code. We do this for the
1158 sake of getting the debugging information right. */
1159
7629c936 1160 pushlevel (0);
023b57e6 1161 set_block (f->context);
7629c936
RS
1162
1163 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1164 if (f->cleanup_list_list)
1165 {
1166 tree lists;
1167 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1168 /* Marked elements correspond to blocks that have been closed.
1169 Do their cleanups. */
1170 if (TREE_ADDRESSABLE (lists)
1171 && TREE_VALUE (lists) != 0)
7629c936
RS
1172 {
1173 expand_cleanups (TREE_VALUE (lists), 0);
1174 /* Pop any pushes done in the cleanups,
1175 in case function is about to return. */
1176 do_pending_stack_adjust ();
1177 }
28d81abb
RK
1178 }
1179
1180 /* Restore stack level for the biggest contour that this
1181 jump jumps out of. */
1182 if (f->stack_level)
59257ff7 1183 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1184
1185 /* Finish up the sequence containing the insns which implement the
1186 necessary cleanups, and then attach that whole sequence to the
1187 insn stream just ahead of the actual jump insn. Attaching it
1188 at that point insures that any cleanups which are in fact
1189 implicit C++ object destructions (which must be executed upon
1190 leaving the block) appear (to the debugger) to be taking place
1191 in an area of the generated code where the object(s) being
1192 destructed are still "in scope". */
1193
1194 cleanup_insns = get_insns ();
023b57e6 1195 poplevel (1, 0, 0);
7629c936
RS
1196
1197 end_sequence ();
1198 emit_insns_after (cleanup_insns, f->before_jump);
1199
7629c936 1200
28d81abb
RK
1201 f->before_jump = 0;
1202 }
1203 }
1204
1205 /* Mark the cleanups of exited blocks so that they are executed
1206 by the code above. */
1207 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1208 if (f->before_jump != 0
1209 && PREV_INSN (f->target_rtl) == 0
1210 /* Label has still not appeared. If we are exiting a block with
1211 a stack level to restore, that started before the fixup,
1212 mark this stack level as needing restoration
1213 when the fixup is later finalized.
1214 Also mark the cleanup_list_list element for F
1215 that corresponds to this block, so that ultimately
1216 this block's cleanups will be executed by the code above. */
1217 && thisblock != 0
1218 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1219 it means the label is undefined. That's erroneous, but possible. */
1220 && (thisblock->data.block.block_start_count
1221 <= f->block_start_count))
1222 {
1223 tree lists = f->cleanup_list_list;
1224 for (; lists; lists = TREE_CHAIN (lists))
1225 /* If the following elt. corresponds to our containing block
1226 then the elt. must be for this block. */
1227 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1228 TREE_ADDRESSABLE (lists) = 1;
1229
1230 if (stack_level)
1231 f->stack_level = stack_level;
1232 }
1233}
ca695ac9
JB
1234
1235
1236/* When exiting a binding contour, process all pending gotos requiring fixups.
1237 Note: STACK_DEPTH is not altered.
1238
1239 The arguments are currently not used in the bytecode compiler, but we may need
1240 them one day for languages other than C.
1241
1242 THISBLOCK is the structure that describes the block being exited.
1243 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1244 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1245 FIRST_INSN is the insn that began this contour.
1246
1247 Gotos that jump out of this contour must restore the
1248 stack level and do the cleanups before actually jumping.
1249
1250 DONT_JUMP_IN nonzero means report error there is a jump into this
1251 contour from before the beginning of the contour.
1252 This is also done if STACK_LEVEL is nonzero. */
1253
1254static void
1255bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1256 struct nesting *thisblock;
1257 int stack_level;
1258 tree cleanup_list;
1259 rtx first_insn;
1260 int dont_jump_in;
1261{
1262 register struct goto_fixup *f, *prev;
1263 int saved_stack_depth;
1264
1265 /* F is the fixup we are considering; PREV is the previous one. */
1266
1267 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1268 {
1269 /* Test for a fixup that is inactive because it is already handled. */
1270 if (f->before_jump == 0)
1271 {
1272 /* Delete inactive fixup from the chain, if that is easy to do. */
1273 if (prev)
1274 prev->next = f->next;
1275 }
1276
1277 /* Emit code to restore the stack and continue */
1278 bc_emit_bytecode_labeldef (f->label);
1279
1280 /* Save stack_depth across call, since bc_adjust_stack () will alter
1281 the perceived stack depth via the instructions generated. */
1282
1283 if (f->bc_stack_level >= 0)
1284 {
1285 saved_stack_depth = stack_depth;
1286 bc_adjust_stack (stack_depth - f->bc_stack_level);
1287 stack_depth = saved_stack_depth;
1288 }
1289
1290 bc_emit_bytecode (jump);
1291 bc_emit_bytecode_labelref (f->bc_target);
1292
1293#ifdef DEBUG_PRINT_CODE
1294 fputc ('\n', stderr);
1295#endif
1296 }
1297
1298 goto_fixup_chain = NULL;
1299}
28d81abb
RK
1300\f
1301/* Generate RTL for an asm statement (explicit assembler code).
1302 BODY is a STRING_CST node containing the assembler code text,
1303 or an ADDR_EXPR containing a STRING_CST. */
1304
1305void
1306expand_asm (body)
1307 tree body;
1308{
ca695ac9
JB
1309 if (output_bytecode)
1310 {
1311 error ("`asm' is illegal when generating bytecode");
1312 return;
1313 }
1314
28d81abb
RK
1315 if (TREE_CODE (body) == ADDR_EXPR)
1316 body = TREE_OPERAND (body, 0);
1317
1318 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1319 TREE_STRING_POINTER (body)));
1320 last_expr_type = 0;
1321}
1322
1323/* Generate RTL for an asm statement with arguments.
1324 STRING is the instruction template.
1325 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1326 Each output or input has an expression in the TREE_VALUE and
1327 a constraint-string in the TREE_PURPOSE.
1328 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1329 that is clobbered by this insn.
1330
1331 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1332 Some elements of OUTPUTS may be replaced with trees representing temporary
1333 values. The caller should copy those temporary values to the originally
1334 specified lvalues.
1335
1336 VOL nonzero means the insn is volatile; don't optimize it. */
1337
1338void
1339expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1340 tree string, outputs, inputs, clobbers;
1341 int vol;
1342 char *filename;
1343 int line;
1344{
1345 rtvec argvec, constraints;
1346 rtx body;
1347 int ninputs = list_length (inputs);
1348 int noutputs = list_length (outputs);
b4ccaa16 1349 int nclobbers;
28d81abb
RK
1350 tree tail;
1351 register int i;
1352 /* Vector of RTX's of evaluated output operands. */
1353 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1354 /* The insn we have emitted. */
1355 rtx insn;
1356
ca695ac9
JB
1357 if (output_bytecode)
1358 {
1359 error ("`asm' is illegal when generating bytecode");
1360 return;
1361 }
1362
b4ccaa16
RS
1363 /* Count the number of meaningful clobbered registers, ignoring what
1364 we would ignore later. */
1365 nclobbers = 0;
1366 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1367 {
1368 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1369 i = decode_reg_name (regname);
1370 if (i >= 0 || i == -4)
b4ccaa16
RS
1371 ++nclobbers;
1372 }
1373
28d81abb
RK
1374 last_expr_type = 0;
1375
1376 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1377 {
1378 tree val = TREE_VALUE (tail);
1379 tree val1;
1380 int j;
1381 int found_equal;
1382
1383 /* If there's an erroneous arg, emit no insn. */
1384 if (TREE_TYPE (val) == error_mark_node)
1385 return;
1386
1387 /* Make sure constraint has `=' and does not have `+'. */
1388
1389 found_equal = 0;
1390 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1391 {
1392 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1393 {
1394 error ("output operand constraint contains `+'");
1395 return;
1396 }
1397 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1398 found_equal = 1;
1399 }
1400 if (! found_equal)
1401 {
1402 error ("output operand constraint lacks `='");
1403 return;
1404 }
1405
1406 /* If an output operand is not a variable or indirect ref,
1407 or a part of one,
1408 create a SAVE_EXPR which is a pseudo-reg
1409 to act as an intermediate temporary.
1410 Make the asm insn write into that, then copy it to
1411 the real output operand. */
1412
1413 while (TREE_CODE (val) == COMPONENT_REF
1414 || TREE_CODE (val) == ARRAY_REF)
1415 val = TREE_OPERAND (val, 0);
1416
1417 if (TREE_CODE (val) != VAR_DECL
1418 && TREE_CODE (val) != PARM_DECL
1419 && TREE_CODE (val) != INDIRECT_REF)
e619bb8d
RS
1420 {
1421 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1422 /* If it's a constant, print error now so don't crash later. */
1423 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1424 {
1425 error ("invalid output in `asm'");
1426 return;
1427 }
1428 }
28d81abb 1429
37366632 1430 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1431 }
1432
1433 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1434 {
1435 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1436 return;
1437 }
1438
1439 /* Make vectors for the expression-rtx and constraint strings. */
1440
1441 argvec = rtvec_alloc (ninputs);
1442 constraints = rtvec_alloc (ninputs);
1443
1444 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1445 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1446 filename, line);
1447 MEM_VOLATILE_P (body) = vol;
1448
1449 /* Eval the inputs and put them into ARGVEC.
1450 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1451
1452 i = 0;
1453 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1454 {
1455 int j;
1456
1457 /* If there's an erroneous arg, emit no insn,
1458 because the ASM_INPUT would get VOIDmode
1459 and that could cause a crash in reload. */
1460 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1461 return;
1462 if (TREE_PURPOSE (tail) == NULL_TREE)
1463 {
1464 error ("hard register `%s' listed as input operand to `asm'",
1465 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1466 return;
1467 }
1468
1469 /* Make sure constraint has neither `=' nor `+'. */
1470
1471 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1472 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1473 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1474 {
1475 error ("input operand constraint contains `%c'",
1476 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1477 return;
1478 }
1479
1480 XVECEXP (body, 3, i) /* argvec */
37366632 1481 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1482 XVECEXP (body, 4, i) /* constraints */
1483 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1484 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1485 i++;
1486 }
1487
1488 /* Protect all the operands from the queue,
1489 now that they have all been evaluated. */
1490
1491 for (i = 0; i < ninputs; i++)
1492 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1493
1494 for (i = 0; i < noutputs; i++)
1495 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1496
1497 /* Now, for each output, construct an rtx
1498 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1499 ARGVEC CONSTRAINTS))
1500 If there is more than one, put them inside a PARALLEL. */
1501
1502 if (noutputs == 1 && nclobbers == 0)
1503 {
1504 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1505 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1506 }
1507 else if (noutputs == 0 && nclobbers == 0)
1508 {
1509 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1510 insn = emit_insn (body);
1511 }
1512 else
1513 {
1514 rtx obody = body;
1515 int num = noutputs;
1516 if (num == 0) num = 1;
1517 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1518
1519 /* For each output operand, store a SET. */
1520
1521 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1522 {
1523 XVECEXP (body, 0, i)
1524 = gen_rtx (SET, VOIDmode,
1525 output_rtx[i],
1526 gen_rtx (ASM_OPERANDS, VOIDmode,
1527 TREE_STRING_POINTER (string),
1528 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1529 i, argvec, constraints,
1530 filename, line));
1531 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1532 }
1533
1534 /* If there are no outputs (but there are some clobbers)
1535 store the bare ASM_OPERANDS into the PARALLEL. */
1536
1537 if (i == 0)
1538 XVECEXP (body, 0, i++) = obody;
1539
1540 /* Store (clobber REG) for each clobbered register specified. */
1541
b4ccaa16 1542 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1543 {
28d81abb 1544 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1545 int j = decode_reg_name (regname);
28d81abb 1546
b4ac57ab 1547 if (j < 0)
28d81abb 1548 {
c09e6498 1549 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1550 continue;
1551
c09e6498
RS
1552 if (j == -4) /* `memory', don't cache memory across asm */
1553 {
bffc6177
RS
1554 XVECEXP (body, 0, i++)
1555 = gen_rtx (CLOBBER, VOIDmode,
1556 gen_rtx (MEM, QImode,
1557 gen_rtx (SCRATCH, VOIDmode, 0)));
c09e6498
RS
1558 continue;
1559 }
1560
28d81abb
RK
1561 error ("unknown register name `%s' in `asm'", regname);
1562 return;
1563 }
1564
1565 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1566 XVECEXP (body, 0, i++)
28d81abb
RK
1567 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1568 }
1569
1570 insn = emit_insn (body);
1571 }
1572
1573 free_temp_slots ();
1574}
1575\f
1576/* Generate RTL to evaluate the expression EXP
1577 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1578
1579void
1580expand_expr_stmt (exp)
1581 tree exp;
1582{
ca695ac9
JB
1583 if (output_bytecode)
1584 {
1585 int org_stack_depth = stack_depth;
1586
1587 bc_expand_expr (exp);
1588
1589 /* Restore stack depth */
1590 if (stack_depth < org_stack_depth)
1591 abort ();
1592
1593 bc_emit_instruction (drop);
1594
1595 last_expr_type = TREE_TYPE (exp);
1596 return;
1597 }
1598
28d81abb
RK
1599 /* If -W, warn about statements with no side effects,
1600 except for an explicit cast to void (e.g. for assert()), and
1601 except inside a ({...}) where they may be useful. */
1602 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1603 {
1604 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1605 && !(TREE_CODE (exp) == CONVERT_EXPR
1606 && TREE_TYPE (exp) == void_type_node))
1607 warning_with_file_and_line (emit_filename, emit_lineno,
1608 "statement with no effect");
1609 else if (warn_unused)
1610 warn_if_unused_value (exp);
1611 }
1612 last_expr_type = TREE_TYPE (exp);
1613 if (! flag_syntax_only)
37366632
RK
1614 last_expr_value = expand_expr (exp,
1615 (expr_stmts_for_value
1616 ? NULL_RTX : const0_rtx),
28d81abb
RK
1617 VOIDmode, 0);
1618
1619 /* If all we do is reference a volatile value in memory,
1620 copy it to a register to be sure it is actually touched. */
1621 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1622 && TREE_THIS_VOLATILE (exp))
1623 {
6a5bbbe6
RS
1624 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1625 ;
1626 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1627 copy_to_reg (last_expr_value);
1628 else
ddbe9812
RS
1629 {
1630 rtx lab = gen_label_rtx ();
1631
1632 /* Compare the value with itself to reference it. */
1633 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1634 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1635 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1636 BLKmode, 0,
1637 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1638 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1639 emit_label (lab);
1640 }
28d81abb
RK
1641 }
1642
1643 /* If this expression is part of a ({...}) and is in memory, we may have
1644 to preserve temporaries. */
1645 preserve_temp_slots (last_expr_value);
1646
1647 /* Free any temporaries used to evaluate this expression. Any temporary
1648 used as a result of this expression will already have been preserved
1649 above. */
1650 free_temp_slots ();
1651
1652 emit_queue ();
1653}
1654
1655/* Warn if EXP contains any computations whose results are not used.
1656 Return 1 if a warning is printed; 0 otherwise. */
1657
1658static int
1659warn_if_unused_value (exp)
1660 tree exp;
1661{
1662 if (TREE_USED (exp))
1663 return 0;
1664
1665 switch (TREE_CODE (exp))
1666 {
1667 case PREINCREMENT_EXPR:
1668 case POSTINCREMENT_EXPR:
1669 case PREDECREMENT_EXPR:
1670 case POSTDECREMENT_EXPR:
1671 case MODIFY_EXPR:
1672 case INIT_EXPR:
1673 case TARGET_EXPR:
1674 case CALL_EXPR:
1675 case METHOD_CALL_EXPR:
1676 case RTL_EXPR:
28d81abb
RK
1677 case WITH_CLEANUP_EXPR:
1678 case EXIT_EXPR:
1679 /* We don't warn about COND_EXPR because it may be a useful
1680 construct if either arm contains a side effect. */
1681 case COND_EXPR:
1682 return 0;
1683
1684 case BIND_EXPR:
1685 /* For a binding, warn if no side effect within it. */
1686 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1687
1688 case TRUTH_ORIF_EXPR:
1689 case TRUTH_ANDIF_EXPR:
1690 /* In && or ||, warn if 2nd operand has no side effect. */
1691 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1692
1693 case COMPOUND_EXPR:
1694 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1695 return 1;
4d23e509
RS
1696 /* Let people do `(foo (), 0)' without a warning. */
1697 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1698 return 0;
28d81abb
RK
1699 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1700
1701 case NOP_EXPR:
1702 case CONVERT_EXPR:
b4ac57ab 1703 case NON_LVALUE_EXPR:
28d81abb
RK
1704 /* Don't warn about values cast to void. */
1705 if (TREE_TYPE (exp) == void_type_node)
1706 return 0;
1707 /* Don't warn about conversions not explicit in the user's program. */
1708 if (TREE_NO_UNUSED_WARNING (exp))
1709 return 0;
1710 /* Assignment to a cast usually results in a cast of a modify.
1711 Don't complain about that. */
1712 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1713 return 0;
1714 /* Sometimes it results in a cast of a cast of a modify.
1715 Don't complain about that. */
1716 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1717 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1718 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1719 return 0;
1720
1721 default:
ddbe9812
RS
1722 /* Referencing a volatile value is a side effect, so don't warn. */
1723 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1724 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1725 && TREE_THIS_VOLATILE (exp))
1726 return 0;
28d81abb
RK
1727 warning_with_file_and_line (emit_filename, emit_lineno,
1728 "value computed is not used");
1729 return 1;
1730 }
1731}
1732
1733/* Clear out the memory of the last expression evaluated. */
1734
1735void
1736clear_last_expr ()
1737{
1738 last_expr_type = 0;
1739}
1740
1741/* Begin a statement which will return a value.
1742 Return the RTL_EXPR for this statement expr.
1743 The caller must save that value and pass it to expand_end_stmt_expr. */
1744
1745tree
1746expand_start_stmt_expr ()
1747{
ca695ac9
JB
1748 int momentary;
1749 tree t;
1750
1751 /* When generating bytecode just note down the stack depth */
1752 if (output_bytecode)
1753 return (build_int_2 (stack_depth, 0));
1754
28d81abb
RK
1755 /* Make the RTL_EXPR node temporary, not momentary,
1756 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1757 momentary = suspend_momentary ();
1758 t = make_node (RTL_EXPR);
28d81abb
RK
1759 resume_momentary (momentary);
1760 start_sequence ();
1761 NO_DEFER_POP;
1762 expr_stmts_for_value++;
1763 return t;
1764}
1765
1766/* Restore the previous state at the end of a statement that returns a value.
1767 Returns a tree node representing the statement's value and the
1768 insns to compute the value.
1769
1770 The nodes of that expression have been freed by now, so we cannot use them.
1771 But we don't want to do that anyway; the expression has already been
1772 evaluated and now we just want to use the value. So generate a RTL_EXPR
1773 with the proper type and RTL value.
1774
1775 If the last substatement was not an expression,
1776 return something with type `void'. */
1777
1778tree
1779expand_end_stmt_expr (t)
1780 tree t;
1781{
ca695ac9
JB
1782 if (output_bytecode)
1783 {
1784 int i;
1785 tree t;
1786
1787
1788 /* At this point, all expressions have been evaluated in order.
1789 However, all expression values have been popped when evaluated,
1790 which means we have to recover the last expression value. This is
1791 the last value removed by means of a `drop' instruction. Instead
1792 of adding code to inhibit dropping the last expression value, it
1793 is here recovered by undoing the `drop'. Since `drop' is
1794 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1795 [-1]'. */
1796
1797 bc_adjust_stack (-1);
1798
1799 if (!last_expr_type)
1800 last_expr_type = void_type_node;
1801
1802 t = make_node (RTL_EXPR);
1803 TREE_TYPE (t) = last_expr_type;
1804 RTL_EXPR_RTL (t) = NULL;
1805 RTL_EXPR_SEQUENCE (t) = NULL;
1806
1807 /* Don't consider deleting this expr or containing exprs at tree level. */
1808 TREE_THIS_VOLATILE (t) = 1;
1809
1810 last_expr_type = 0;
1811 return t;
1812 }
1813
28d81abb
RK
1814 OK_DEFER_POP;
1815
1816 if (last_expr_type == 0)
1817 {
1818 last_expr_type = void_type_node;
1819 last_expr_value = const0_rtx;
1820 }
1821 else if (last_expr_value == 0)
1822 /* There are some cases where this can happen, such as when the
1823 statement is void type. */
1824 last_expr_value = const0_rtx;
1825 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1826 /* Remove any possible QUEUED. */
1827 last_expr_value = protect_from_queue (last_expr_value, 0);
1828
1829 emit_queue ();
1830
1831 TREE_TYPE (t) = last_expr_type;
1832 RTL_EXPR_RTL (t) = last_expr_value;
1833 RTL_EXPR_SEQUENCE (t) = get_insns ();
1834
1835 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1836
1837 end_sequence ();
1838
1839 /* Don't consider deleting this expr or containing exprs at tree level. */
1840 TREE_SIDE_EFFECTS (t) = 1;
1841 /* Propagate volatility of the actual RTL expr. */
1842 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1843
1844 last_expr_type = 0;
1845 expr_stmts_for_value--;
1846
1847 return t;
1848}
1849\f
1850/* The exception handling nesting looks like this:
1851
1852 <-- Level N-1
1853 { <-- exception handler block
1854 <-- Level N
1855 <-- in an exception handler
1856 { <-- try block
1857 : <-- in a TRY block
1858 : <-- in an exception handler
1859 :
1860 }
1861
1862 { <-- except block
1863 : <-- in an except block
1864 : <-- in an exception handler
1865 :
1866 }
1867
1868 }
a124fd5e 1869*/
28d81abb
RK
1870
1871/* Return nonzero iff in a try block at level LEVEL. */
1872
1873int
1874in_try_block (level)
1875 int level;
1876{
1877 struct nesting *n = except_stack;
1878 while (1)
1879 {
1880 while (n && n->data.except_stmt.after_label != 0)
1881 n = n->next;
1882 if (n == 0)
1883 return 0;
1884 if (level == 0)
1885 return n != 0;
1886 level--;
1887 n = n->next;
1888 }
1889}
1890
1891/* Return nonzero iff in an except block at level LEVEL. */
1892
1893int
1894in_except_block (level)
1895 int level;
1896{
1897 struct nesting *n = except_stack;
1898 while (1)
1899 {
1900 while (n && n->data.except_stmt.after_label == 0)
1901 n = n->next;
1902 if (n == 0)
1903 return 0;
1904 if (level == 0)
1905 return n != 0;
1906 level--;
1907 n = n->next;
1908 }
1909}
1910
1911/* Return nonzero iff in an exception handler at level LEVEL. */
1912
1913int
1914in_exception_handler (level)
1915 int level;
1916{
1917 struct nesting *n = except_stack;
1918 while (n && level--)
1919 n = n->next;
1920 return n != 0;
1921}
1922
1923/* Record the fact that the current exception nesting raises
1924 exception EX. If not in an exception handler, return 0. */
1925int
1926expand_raise (ex)
1927 tree ex;
1928{
1929 tree *raises_ptr;
1930
1931 if (except_stack == 0)
1932 return 0;
1933 raises_ptr = &except_stack->data.except_stmt.raised;
1934 if (! value_member (ex, *raises_ptr))
1935 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1936 return 1;
1937}
1938
1939/* Generate RTL for the start of a try block.
1940
1941 TRY_CLAUSE is the condition to test to enter the try block. */
1942
1943void
1944expand_start_try (try_clause, exitflag, escapeflag)
1945 tree try_clause;
1946 int exitflag;
1947 int escapeflag;
1948{
1949 struct nesting *thishandler = ALLOC_NESTING ();
1950
1951 /* Make an entry on cond_stack for the cond we are entering. */
1952
1953 thishandler->next = except_stack;
1954 thishandler->all = nesting_stack;
1955 thishandler->depth = ++nesting_depth;
1956 thishandler->data.except_stmt.raised = 0;
1957 thishandler->data.except_stmt.handled = 0;
1958 thishandler->data.except_stmt.first_insn = get_insns ();
1959 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1960 thishandler->data.except_stmt.unhandled_label = 0;
1961 thishandler->data.except_stmt.after_label = 0;
1962 thishandler->data.except_stmt.escape_label
1963 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1964 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1965 except_stack = thishandler;
1966 nesting_stack = thishandler;
1967
37366632 1968 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1969}
1970
1971/* End of a TRY block. Nothing to do for now. */
1972
1973void
1974expand_end_try ()
1975{
1976 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1977 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1978 NULL_RTX);
28d81abb
RK
1979}
1980
1981/* Start an `except' nesting contour.
1982 EXITFLAG says whether this contour should be able to `exit' something.
1983 ESCAPEFLAG says whether this contour should be escapable. */
1984
1985void
1986expand_start_except (exitflag, escapeflag)
1987 int exitflag;
1988 int escapeflag;
1989{
1990 if (exitflag)
1991 {
1992 struct nesting *n;
1993 /* An `exit' from catch clauses goes out to next exit level,
1994 if there is one. Otherwise, it just goes to the end
1995 of the construct. */
1996 for (n = except_stack->next; n; n = n->next)
1997 if (n->exit_label != 0)
1998 {
1999 except_stack->exit_label = n->exit_label;
2000 break;
2001 }
2002 if (n == 0)
2003 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2004 }
2005 if (escapeflag)
2006 {
2007 struct nesting *n;
2008 /* An `escape' from catch clauses goes out to next escape level,
2009 if there is one. Otherwise, it just goes to the end
2010 of the construct. */
2011 for (n = except_stack->next; n; n = n->next)
2012 if (n->data.except_stmt.escape_label != 0)
2013 {
2014 except_stack->data.except_stmt.escape_label
2015 = n->data.except_stmt.escape_label;
2016 break;
2017 }
2018 if (n == 0)
2019 except_stack->data.except_stmt.escape_label
2020 = except_stack->data.except_stmt.after_label;
2021 }
2022 do_pending_stack_adjust ();
2023 emit_label (except_stack->data.except_stmt.except_label);
2024}
2025
2026/* Generate code to `escape' from an exception contour. This
2027 is like `exiting', but does not conflict with constructs which
2028 use `exit_label'.
2029
2030 Return nonzero if this contour is escapable, otherwise
2031 return zero, and language-specific code will emit the
2032 appropriate error message. */
2033int
2034expand_escape_except ()
2035{
2036 struct nesting *n;
2037 last_expr_type = 0;
2038 for (n = except_stack; n; n = n->next)
2039 if (n->data.except_stmt.escape_label != 0)
2040 {
37366632
RK
2041 expand_goto_internal (NULL_TREE,
2042 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
2043 return 1;
2044 }
2045
2046 return 0;
2047}
2048
2049/* Finish processing and `except' contour.
2050 Culls out all exceptions which might be raise but not
2051 handled, and returns the list to the caller.
2052 Language-specific code is responsible for dealing with these
2053 exceptions. */
2054
2055tree
2056expand_end_except ()
2057{
2058 struct nesting *n;
2059 tree raised = NULL_TREE;
2060
2061 do_pending_stack_adjust ();
2062 emit_label (except_stack->data.except_stmt.after_label);
2063
2064 n = except_stack->next;
2065 if (n)
2066 {
2067 /* Propagate exceptions raised but not handled to next
2068 highest level. */
2069 tree handled = except_stack->data.except_stmt.raised;
2070 if (handled != void_type_node)
2071 {
2072 tree prev = NULL_TREE;
2073 raised = except_stack->data.except_stmt.raised;
2074 while (handled)
2075 {
2076 tree this_raise;
2077 for (this_raise = raised, prev = 0; this_raise;
2078 this_raise = TREE_CHAIN (this_raise))
2079 {
2080 if (value_member (TREE_VALUE (this_raise), handled))
2081 {
2082 if (prev)
2083 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2084 else
2085 {
2086 raised = TREE_CHAIN (raised);
2087 if (raised == NULL_TREE)
2088 goto nada;
2089 }
2090 }
2091 else
2092 prev = this_raise;
2093 }
2094 handled = TREE_CHAIN (handled);
2095 }
2096 if (prev == NULL_TREE)
2097 prev = raised;
2098 if (prev)
2099 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2100 nada:
2101 n->data.except_stmt.raised = raised;
2102 }
2103 }
2104
2105 POPSTACK (except_stack);
2106 last_expr_type = 0;
2107 return raised;
2108}
2109
2110/* Record that exception EX is caught by this exception handler.
2111 Return nonzero if in exception handling construct, otherwise return 0. */
2112int
2113expand_catch (ex)
2114 tree ex;
2115{
2116 tree *raises_ptr;
2117
2118 if (except_stack == 0)
2119 return 0;
2120 raises_ptr = &except_stack->data.except_stmt.handled;
2121 if (*raises_ptr != void_type_node
2122 && ex != NULL_TREE
2123 && ! value_member (ex, *raises_ptr))
2124 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2125 return 1;
2126}
2127
2128/* Record that this exception handler catches all exceptions.
2129 Return nonzero if in exception handling construct, otherwise return 0. */
2130
2131int
2132expand_catch_default ()
2133{
2134 if (except_stack == 0)
2135 return 0;
2136 except_stack->data.except_stmt.handled = void_type_node;
2137 return 1;
2138}
2139
2140int
2141expand_end_catch ()
2142{
2143 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2144 return 0;
37366632
RK
2145 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2146 NULL_RTX);
28d81abb
RK
2147 return 1;
2148}
2149\f
2150/* Generate RTL for the start of an if-then. COND is the expression
2151 whose truth should be tested.
2152
2153 If EXITFLAG is nonzero, this conditional is visible to
2154 `exit_something'. */
2155
2156void
2157expand_start_cond (cond, exitflag)
2158 tree cond;
2159 int exitflag;
2160{
2161 struct nesting *thiscond = ALLOC_NESTING ();
2162
2163 /* Make an entry on cond_stack for the cond we are entering. */
2164
2165 thiscond->next = cond_stack;
2166 thiscond->all = nesting_stack;
2167 thiscond->depth = ++nesting_depth;
2168 thiscond->data.cond.next_label = gen_label_rtx ();
2169 /* Before we encounter an `else', we don't need a separate exit label
2170 unless there are supposed to be exit statements
2171 to exit this conditional. */
2172 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2173 thiscond->data.cond.endif_label = thiscond->exit_label;
2174 cond_stack = thiscond;
2175 nesting_stack = thiscond;
2176
ca695ac9
JB
2177 if (output_bytecode)
2178 bc_expand_start_cond (cond, exitflag);
2179 else
2180 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2181}
2182
2183/* Generate RTL between then-clause and the elseif-clause
2184 of an if-then-elseif-.... */
2185
2186void
2187expand_start_elseif (cond)
2188 tree cond;
2189{
2190 if (cond_stack->data.cond.endif_label == 0)
2191 cond_stack->data.cond.endif_label = gen_label_rtx ();
2192 emit_jump (cond_stack->data.cond.endif_label);
2193 emit_label (cond_stack->data.cond.next_label);
2194 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2195 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2196}
2197
2198/* Generate RTL between the then-clause and the else-clause
2199 of an if-then-else. */
2200
2201void
2202expand_start_else ()
2203{
2204 if (cond_stack->data.cond.endif_label == 0)
2205 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9
JB
2206
2207 if (output_bytecode)
2208 {
2209 bc_expand_start_else ();
2210 return;
2211 }
2212
28d81abb
RK
2213 emit_jump (cond_stack->data.cond.endif_label);
2214 emit_label (cond_stack->data.cond.next_label);
2215 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2216}
2217
2218/* Generate RTL for the end of an if-then.
2219 Pop the record for it off of cond_stack. */
2220
2221void
2222expand_end_cond ()
2223{
2224 struct nesting *thiscond = cond_stack;
2225
ca695ac9
JB
2226 if (output_bytecode)
2227 bc_expand_end_cond ();
2228 else
2229 {
2230 do_pending_stack_adjust ();
2231 if (thiscond->data.cond.next_label)
2232 emit_label (thiscond->data.cond.next_label);
2233 if (thiscond->data.cond.endif_label)
2234 emit_label (thiscond->data.cond.endif_label);
2235 }
28d81abb
RK
2236
2237 POPSTACK (cond_stack);
2238 last_expr_type = 0;
2239}
ca695ac9
JB
2240
2241
2242/* Generate code for the start of an if-then. COND is the expression
2243 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2244 is to be visible to exit_something. It is assumed that the caller
2245 has pushed the previous context on the cond stack. */
2246void
2247bc_expand_start_cond (cond, exitflag)
2248 tree cond;
2249 int exitflag;
2250{
2251 struct nesting *thiscond = cond_stack;
2252
2253 thiscond->data.case_stmt.nominal_type = cond;
2254 bc_expand_expr (cond);
2255 bc_emit_bytecode (jumpifnot);
2256 bc_emit_bytecode_labelref (thiscond->exit_label->bc_label);
2257
2258#ifdef DEBUG_PRINT_CODE
2259 fputc ('\n', stderr);
2260#endif
2261}
2262
2263/* Generate the label for the end of an if with
2264 no else- clause. */
2265void
2266bc_expand_end_cond ()
2267{
2268 struct nesting *thiscond = cond_stack;
2269
2270 bc_emit_bytecode_labeldef (thiscond->exit_label->bc_label);
2271}
2272
2273/* Generate code for the start of the else- clause of
2274 an if-then-else. */
2275void
2276bc_expand_start_else ()
2277{
2278 struct nesting *thiscond = cond_stack;
2279
2280 thiscond->data.cond.endif_label = thiscond->exit_label;
2281 thiscond->exit_label = gen_label_rtx ();
2282 bc_emit_bytecode (jump);
2283 bc_emit_bytecode_labelref (thiscond->exit_label->bc_label);
2284
2285#ifdef DEBUG_PRINT_CODE
2286 fputc ('\n', stderr);
2287#endif
2288
2289 bc_emit_bytecode_labeldef (thiscond->data.cond.endif_label->bc_label);
2290}
28d81abb
RK
2291\f
2292/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2293 loop should be exited by `exit_something'. This is a loop for which
2294 `expand_continue' will jump to the top of the loop.
2295
2296 Make an entry on loop_stack to record the labels associated with
2297 this loop. */
2298
2299struct nesting *
2300expand_start_loop (exit_flag)
2301 int exit_flag;
2302{
2303 register struct nesting *thisloop = ALLOC_NESTING ();
2304
2305 /* Make an entry on loop_stack for the loop we are entering. */
2306
2307 thisloop->next = loop_stack;
2308 thisloop->all = nesting_stack;
2309 thisloop->depth = ++nesting_depth;
2310 thisloop->data.loop.start_label = gen_label_rtx ();
2311 thisloop->data.loop.end_label = gen_label_rtx ();
2312 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2313 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2314 loop_stack = thisloop;
2315 nesting_stack = thisloop;
2316
ca695ac9
JB
2317 if (output_bytecode)
2318 {
2319 bc_emit_bytecode_labeldef (thisloop->data.loop.start_label->bc_label);
2320 return thisloop;
2321 }
2322
28d81abb
RK
2323 do_pending_stack_adjust ();
2324 emit_queue ();
37366632 2325 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2326 emit_label (thisloop->data.loop.start_label);
2327
2328 return thisloop;
2329}
2330
2331/* Like expand_start_loop but for a loop where the continuation point
2332 (for expand_continue_loop) will be specified explicitly. */
2333
2334struct nesting *
2335expand_start_loop_continue_elsewhere (exit_flag)
2336 int exit_flag;
2337{
2338 struct nesting *thisloop = expand_start_loop (exit_flag);
2339 loop_stack->data.loop.continue_label = gen_label_rtx ();
2340 return thisloop;
2341}
2342
2343/* Specify the continuation point for a loop started with
2344 expand_start_loop_continue_elsewhere.
2345 Use this at the point in the code to which a continue statement
2346 should jump. */
2347
2348void
2349expand_loop_continue_here ()
2350{
ca695ac9
JB
2351 if (output_bytecode)
2352 {
2353 bc_emit_bytecode_labeldef (loop_stack->data.loop.continue_label->bc_label);
2354 return;
2355 }
28d81abb 2356 do_pending_stack_adjust ();
37366632 2357 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2358 emit_label (loop_stack->data.loop.continue_label);
2359}
2360
ca695ac9
JB
2361/* End a loop. */
2362static void
2363bc_expand_end_loop ()
2364{
2365 struct nesting *thisloop = loop_stack;
2366
2367 bc_emit_bytecode (jump);
2368 bc_emit_bytecode_labelref (thisloop->data.loop.start_label->bc_label);
2369
2370#ifdef DEBUG_PRINT_CODE
2371 fputc ('\n', stderr);
2372#endif
2373
2374 bc_emit_bytecode_labeldef (thisloop->exit_label->bc_label);
2375 POPSTACK (loop_stack);
2376 last_expr_type = 0;
2377}
2378
2379
28d81abb
RK
2380/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2381 Pop the block off of loop_stack. */
2382
2383void
2384expand_end_loop ()
2385{
ca695ac9
JB
2386 register rtx insn;
2387 register rtx start_label;
28d81abb
RK
2388 rtx last_test_insn = 0;
2389 int num_insns = 0;
ca695ac9
JB
2390
2391 if (output_bytecode)
2392 {
2393 bc_expand_end_loop ();
2394 return;
2395 }
2396
2397 insn = get_last_insn ();
2398 start_label = loop_stack->data.loop.start_label;
28d81abb
RK
2399
2400 /* Mark the continue-point at the top of the loop if none elsewhere. */
2401 if (start_label == loop_stack->data.loop.continue_label)
2402 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2403
2404 do_pending_stack_adjust ();
2405
2406 /* If optimizing, perhaps reorder the loop. If the loop
2407 starts with a conditional exit, roll that to the end
2408 where it will optimize together with the jump back.
2409
2410 We look for the last conditional branch to the exit that we encounter
2411 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2412 branch to the exit first, use it.
2413
2414 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2415 because moving them is not valid. */
2416
2417 if (optimize
2418 &&
2419 ! (GET_CODE (insn) == JUMP_INSN
2420 && GET_CODE (PATTERN (insn)) == SET
2421 && SET_DEST (PATTERN (insn)) == pc_rtx
2422 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2423 {
2424 /* Scan insns from the top of the loop looking for a qualified
2425 conditional exit. */
2426 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2427 insn = NEXT_INSN (insn))
2428 {
2429 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2430 break;
2431
2432 if (GET_CODE (insn) == NOTE
2433 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2434 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2435 break;
2436
2437 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2438 num_insns++;
2439
2440 if (last_test_insn && num_insns > 30)
2441 break;
2442
2443 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2444 && SET_DEST (PATTERN (insn)) == pc_rtx
2445 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2446 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2447 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2448 == loop_stack->data.loop.end_label))
2449 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2450 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2451 == loop_stack->data.loop.end_label))))
2452 last_test_insn = insn;
2453
2454 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2455 && GET_CODE (PATTERN (insn)) == SET
2456 && SET_DEST (PATTERN (insn)) == pc_rtx
2457 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2458 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2459 == loop_stack->data.loop.end_label))
2460 /* Include BARRIER. */
2461 last_test_insn = NEXT_INSN (insn);
2462 }
2463
2464 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2465 {
2466 /* We found one. Move everything from there up
2467 to the end of the loop, and add a jump into the loop
2468 to jump to there. */
2469 register rtx newstart_label = gen_label_rtx ();
2470 register rtx start_move = start_label;
2471
b4ac57ab 2472 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2473 then we want to move this note also. */
2474 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2475 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2476 == NOTE_INSN_LOOP_CONT))
2477 start_move = PREV_INSN (start_move);
2478
2479 emit_label_after (newstart_label, PREV_INSN (start_move));
2480 reorder_insns (start_move, last_test_insn, get_last_insn ());
2481 emit_jump_insn_after (gen_jump (start_label),
2482 PREV_INSN (newstart_label));
2483 emit_barrier_after (PREV_INSN (newstart_label));
2484 start_label = newstart_label;
2485 }
2486 }
2487
2488 emit_jump (start_label);
37366632 2489 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2490 emit_label (loop_stack->data.loop.end_label);
2491
2492 POPSTACK (loop_stack);
2493
2494 last_expr_type = 0;
2495}
2496
2497/* Generate a jump to the current loop's continue-point.
2498 This is usually the top of the loop, but may be specified
2499 explicitly elsewhere. If not currently inside a loop,
2500 return 0 and do nothing; caller will print an error message. */
2501
2502int
2503expand_continue_loop (whichloop)
2504 struct nesting *whichloop;
2505{
2506 last_expr_type = 0;
2507 if (whichloop == 0)
2508 whichloop = loop_stack;
2509 if (whichloop == 0)
2510 return 0;
37366632
RK
2511 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2512 NULL_RTX);
28d81abb
RK
2513 return 1;
2514}
2515
2516/* Generate a jump to exit the current loop. If not currently inside a loop,
2517 return 0 and do nothing; caller will print an error message. */
2518
2519int
2520expand_exit_loop (whichloop)
2521 struct nesting *whichloop;
2522{
2523 last_expr_type = 0;
2524 if (whichloop == 0)
2525 whichloop = loop_stack;
2526 if (whichloop == 0)
2527 return 0;
37366632 2528 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2529 return 1;
2530}
2531
2532/* Generate a conditional jump to exit the current loop if COND
2533 evaluates to zero. If not currently inside a loop,
2534 return 0 and do nothing; caller will print an error message. */
2535
2536int
2537expand_exit_loop_if_false (whichloop, cond)
2538 struct nesting *whichloop;
2539 tree cond;
2540{
2541 last_expr_type = 0;
2542 if (whichloop == 0)
2543 whichloop = loop_stack;
2544 if (whichloop == 0)
2545 return 0;
ca695ac9
JB
2546 if (output_bytecode)
2547 {
2548 bc_expand_expr (cond);
2549 bc_expand_goto_internal (jumpifnot,
2550 whichloop->exit_label->bc_label, NULL_RTX);
2551 }
2552 else
2553 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2554
28d81abb
RK
2555 return 1;
2556}
2557
2558/* Return non-zero if we should preserve sub-expressions as separate
2559 pseudos. We never do so if we aren't optimizing. We always do so
2560 if -fexpensive-optimizations.
2561
2562 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2563 the loop may still be a small one. */
2564
2565int
2566preserve_subexpressions_p ()
2567{
2568 rtx insn;
2569
2570 if (flag_expensive_optimizations)
2571 return 1;
2572
2573 if (optimize == 0 || loop_stack == 0)
2574 return 0;
2575
2576 insn = get_last_insn_anywhere ();
2577
2578 return (insn
2579 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2580 < n_non_fixed_regs * 3));
2581
2582}
2583
2584/* Generate a jump to exit the current loop, conditional, binding contour
2585 or case statement. Not all such constructs are visible to this function,
2586 only those started with EXIT_FLAG nonzero. Individual languages use
2587 the EXIT_FLAG parameter to control which kinds of constructs you can
2588 exit this way.
2589
2590 If not currently inside anything that can be exited,
2591 return 0 and do nothing; caller will print an error message. */
2592
2593int
2594expand_exit_something ()
2595{
2596 struct nesting *n;
2597 last_expr_type = 0;
2598 for (n = nesting_stack; n; n = n->all)
2599 if (n->exit_label != 0)
2600 {
37366632 2601 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2602 return 1;
2603 }
2604
2605 return 0;
2606}
2607\f
2608/* Generate RTL to return from the current function, with no value.
2609 (That is, we do not do anything about returning any value.) */
2610
2611void
2612expand_null_return ()
2613{
2614 struct nesting *block = block_stack;
2615 rtx last_insn = 0;
2616
ca695ac9
JB
2617 if (output_bytecode)
2618 {
2619 bc_emit_instruction (ret);
2620 return;
2621 }
2622
28d81abb
RK
2623 /* Does any pending block have cleanups? */
2624
2625 while (block && block->data.block.cleanups == 0)
2626 block = block->next;
2627
2628 /* If yes, use a goto to return, since that runs cleanups. */
2629
2630 expand_null_return_1 (last_insn, block != 0);
2631}
2632
2633/* Generate RTL to return from the current function, with value VAL. */
2634
2635void
2636expand_value_return (val)
2637 rtx val;
2638{
2639 struct nesting *block = block_stack;
2640 rtx last_insn = get_last_insn ();
2641 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2642
2643 /* Copy the value to the return location
2644 unless it's already there. */
2645
2646 if (return_reg != val)
77636079
RS
2647 {
2648#ifdef PROMOTE_FUNCTION_RETURN
2649 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2650 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2651 int unsignedp = TREE_UNSIGNED (type);
2652
2653 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2654 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2655 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2656 || TREE_CODE (type) == OFFSET_TYPE)
2657 {
2658 PROMOTE_MODE (mode, unsignedp, type);
2659 }
2660
2661 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2662 convert_move (return_reg, val, unsignedp);
77636079
RS
2663 else
2664#endif
2665 emit_move_insn (return_reg, val);
2666 }
28d81abb
RK
2667 if (GET_CODE (return_reg) == REG
2668 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2669 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2670
2671 /* Does any pending block have cleanups? */
2672
2673 while (block && block->data.block.cleanups == 0)
2674 block = block->next;
2675
2676 /* If yes, use a goto to return, since that runs cleanups.
2677 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2678
2679 expand_null_return_1 (last_insn, block != 0);
2680}
2681
2682/* Output a return with no value. If LAST_INSN is nonzero,
2683 pretend that the return takes place after LAST_INSN.
2684 If USE_GOTO is nonzero then don't use a return instruction;
2685 go to the return label instead. This causes any cleanups
2686 of pending blocks to be executed normally. */
2687
2688static void
2689expand_null_return_1 (last_insn, use_goto)
2690 rtx last_insn;
2691 int use_goto;
2692{
2693 rtx end_label = cleanup_label ? cleanup_label : return_label;
2694
2695 clear_pending_stack_adjust ();
2696 do_pending_stack_adjust ();
2697 last_expr_type = 0;
2698
2699 /* PCC-struct return always uses an epilogue. */
2700 if (current_function_returns_pcc_struct || use_goto)
2701 {
2702 if (end_label == 0)
2703 end_label = return_label = gen_label_rtx ();
37366632 2704 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2705 return;
2706 }
2707
2708 /* Otherwise output a simple return-insn if one is available,
2709 unless it won't do the job. */
2710#ifdef HAVE_return
2711 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2712 {
2713 emit_jump_insn (gen_return ());
2714 emit_barrier ();
2715 return;
2716 }
2717#endif
2718
2719 /* Otherwise jump to the epilogue. */
37366632 2720 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2721}
2722\f
2723/* Generate RTL to evaluate the expression RETVAL and return it
2724 from the current function. */
2725
2726void
2727expand_return (retval)
2728 tree retval;
2729{
2730 /* If there are any cleanups to be performed, then they will
2731 be inserted following LAST_INSN. It is desirable
2732 that the last_insn, for such purposes, should be the
2733 last insn before computing the return value. Otherwise, cleanups
2734 which call functions can clobber the return value. */
2735 /* ??? rms: I think that is erroneous, because in C++ it would
2736 run destructors on variables that might be used in the subsequent
2737 computation of the return value. */
2738 rtx last_insn = 0;
2739 register rtx val = 0;
2740 register rtx op0;
2741 tree retval_rhs;
2742 int cleanups;
2743 struct nesting *block;
2744
ca695ac9
JB
2745 /* Bytecode returns are quite simple, just leave the result on the
2746 arithmetic stack. */
2747 if (output_bytecode)
2748 {
2749 bc_expand_expr (retval);
2750 bc_emit_instruction (ret);
2751 return;
2752 }
2753
28d81abb
RK
2754 /* If function wants no value, give it none. */
2755 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2756 {
37366632 2757 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2758 emit_queue ();
28d81abb
RK
2759 expand_null_return ();
2760 return;
2761 }
2762
2763 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2764 cleanups = any_pending_cleanups (1);
2765
2766 if (TREE_CODE (retval) == RESULT_DECL)
2767 retval_rhs = retval;
2768 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2769 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2770 retval_rhs = TREE_OPERAND (retval, 1);
2771 else if (TREE_TYPE (retval) == void_type_node)
2772 /* Recognize tail-recursive call to void function. */
2773 retval_rhs = retval;
2774 else
2775 retval_rhs = NULL_TREE;
2776
2777 /* Only use `last_insn' if there are cleanups which must be run. */
2778 if (cleanups || cleanup_label != 0)
2779 last_insn = get_last_insn ();
2780
2781 /* Distribute return down conditional expr if either of the sides
2782 may involve tail recursion (see test below). This enhances the number
2783 of tail recursions we see. Don't do this always since it can produce
2784 sub-optimal code in some cases and we distribute assignments into
2785 conditional expressions when it would help. */
2786
2787 if (optimize && retval_rhs != 0
2788 && frame_offset == 0
2789 && TREE_CODE (retval_rhs) == COND_EXPR
2790 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2791 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2792 {
2793 rtx label = gen_label_rtx ();
a0a34f94
RK
2794 tree expr;
2795
37366632 2796 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
a0a34f94
RK
2797 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2798 DECL_RESULT (current_function_decl),
2799 TREE_OPERAND (retval_rhs, 1));
2800 TREE_SIDE_EFFECTS (expr) = 1;
2801 expand_return (expr);
28d81abb 2802 emit_label (label);
a0a34f94
RK
2803
2804 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2805 DECL_RESULT (current_function_decl),
2806 TREE_OPERAND (retval_rhs, 2));
2807 TREE_SIDE_EFFECTS (expr) = 1;
2808 expand_return (expr);
28d81abb
RK
2809 return;
2810 }
2811
2812 /* For tail-recursive call to current function,
2813 just jump back to the beginning.
2814 It's unsafe if any auto variable in this function
2815 has its address taken; for simplicity,
2816 require stack frame to be empty. */
2817 if (optimize && retval_rhs != 0
2818 && frame_offset == 0
2819 && TREE_CODE (retval_rhs) == CALL_EXPR
2820 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2821 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2822 /* Finish checking validity, and if valid emit code
2823 to set the argument variables for the new call. */
2824 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2825 DECL_ARGUMENTS (current_function_decl)))
2826 {
2827 if (tail_recursion_label == 0)
2828 {
2829 tail_recursion_label = gen_label_rtx ();
2830 emit_label_after (tail_recursion_label,
2831 tail_recursion_reentry);
2832 }
a3229491 2833 emit_queue ();
37366632 2834 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2835 emit_barrier ();
2836 return;
2837 }
2838#ifdef HAVE_return
2839 /* This optimization is safe if there are local cleanups
2840 because expand_null_return takes care of them.
2841 ??? I think it should also be safe when there is a cleanup label,
2842 because expand_null_return takes care of them, too.
2843 Any reason why not? */
2844 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2845 && ! current_function_returns_pcc_struct
2846 && BRANCH_COST <= 1)
28d81abb
RK
2847 {
2848 /* If this is return x == y; then generate
2849 if (x == y) return 1; else return 0;
5eb94e4e
RK
2850 if we can do it with explicit return insns and
2851 branches are cheap. */
28d81abb
RK
2852 if (retval_rhs)
2853 switch (TREE_CODE (retval_rhs))
2854 {
2855 case EQ_EXPR:
2856 case NE_EXPR:
2857 case GT_EXPR:
2858 case GE_EXPR:
2859 case LT_EXPR:
2860 case LE_EXPR:
2861 case TRUTH_ANDIF_EXPR:
2862 case TRUTH_ORIF_EXPR:
2863 case TRUTH_AND_EXPR:
2864 case TRUTH_OR_EXPR:
2865 case TRUTH_NOT_EXPR:
94ed3915 2866 case TRUTH_XOR_EXPR:
28d81abb
RK
2867 op0 = gen_label_rtx ();
2868 jumpifnot (retval_rhs, op0);
2869 expand_value_return (const1_rtx);
2870 emit_label (op0);
2871 expand_value_return (const0_rtx);
2872 return;
2873 }
2874 }
2875#endif /* HAVE_return */
2876
2877 if (cleanups
2878 && retval_rhs != 0
2879 && TREE_TYPE (retval_rhs) != void_type_node
2880 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2881 {
2882 /* Calculate the return value into a pseudo reg. */
37366632 2883 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2884 emit_queue ();
2885 /* All temporaries have now been used. */
2886 free_temp_slots ();
2887 /* Return the calculated value, doing cleanups first. */
2888 expand_value_return (val);
2889 }
2890 else
2891 {
2892 /* No cleanups or no hard reg used;
2893 calculate value into hard return reg. */
cba389cd 2894 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb
RK
2895 emit_queue ();
2896 free_temp_slots ();
2897 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2898 }
2899}
2900
2901/* Return 1 if the end of the generated RTX is not a barrier.
2902 This means code already compiled can drop through. */
2903
2904int
2905drop_through_at_end_p ()
2906{
2907 rtx insn = get_last_insn ();
2908 while (insn && GET_CODE (insn) == NOTE)
2909 insn = PREV_INSN (insn);
2910 return insn && GET_CODE (insn) != BARRIER;
2911}
2912\f
2913/* Emit code to alter this function's formal parms for a tail-recursive call.
2914 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2915 FORMALS is the chain of decls of formals.
2916 Return 1 if this can be done;
2917 otherwise return 0 and do not emit any code. */
2918
2919static int
2920tail_recursion_args (actuals, formals)
2921 tree actuals, formals;
2922{
2923 register tree a = actuals, f = formals;
2924 register int i;
2925 register rtx *argvec;
2926
2927 /* Check that number and types of actuals are compatible
2928 with the formals. This is not always true in valid C code.
2929 Also check that no formal needs to be addressable
2930 and that all formals are scalars. */
2931
2932 /* Also count the args. */
2933
2934 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2935 {
2936 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2937 return 0;
2938 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2939 return 0;
2940 }
2941 if (a != 0 || f != 0)
2942 return 0;
2943
2944 /* Compute all the actuals. */
2945
2946 argvec = (rtx *) alloca (i * sizeof (rtx));
2947
2948 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2949 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2950
2951 /* Find which actual values refer to current values of previous formals.
2952 Copy each of them now, before any formal is changed. */
2953
2954 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2955 {
2956 int copy = 0;
2957 register int j;
2958 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2959 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2960 { copy = 1; break; }
2961 if (copy)
2962 argvec[i] = copy_to_reg (argvec[i]);
2963 }
2964
2965 /* Store the values of the actuals into the formals. */
2966
2967 for (f = formals, a = actuals, i = 0; f;
2968 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2969 {
98f3b471 2970 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2971 emit_move_insn (DECL_RTL (f), argvec[i]);
2972 else
2973 convert_move (DECL_RTL (f), argvec[i],
2974 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2975 }
2976
2977 free_temp_slots ();
2978 return 1;
2979}
2980\f
2981/* Generate the RTL code for entering a binding contour.
2982 The variables are declared one by one, by calls to `expand_decl'.
2983
2984 EXIT_FLAG is nonzero if this construct should be visible to
2985 `exit_something'. */
2986
2987void
2988expand_start_bindings (exit_flag)
2989 int exit_flag;
2990{
2991 struct nesting *thisblock = ALLOC_NESTING ();
ca695ac9 2992 rtx note;
28d81abb 2993
ca695ac9
JB
2994 if (!output_bytecode)
2995 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2996
2997 /* Make an entry on block_stack for the block we are entering. */
2998
2999 thisblock->next = block_stack;
3000 thisblock->all = nesting_stack;
3001 thisblock->depth = ++nesting_depth;
3002 thisblock->data.block.stack_level = 0;
3003 thisblock->data.block.cleanups = 0;
3004 thisblock->data.block.function_call_count = 0;
3005#if 0
3006 if (block_stack)
3007 {
3008 if (block_stack->data.block.cleanups == NULL_TREE
3009 && (block_stack->data.block.outer_cleanups == NULL_TREE
3010 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3011 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3012 else
3013 thisblock->data.block.outer_cleanups
3014 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3015 block_stack->data.block.outer_cleanups);
3016 }
3017 else
3018 thisblock->data.block.outer_cleanups = 0;
3019#endif
3020#if 1
3021 if (block_stack
3022 && !(block_stack->data.block.cleanups == NULL_TREE
3023 && block_stack->data.block.outer_cleanups == NULL_TREE))
3024 thisblock->data.block.outer_cleanups
3025 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3026 block_stack->data.block.outer_cleanups);
3027 else
3028 thisblock->data.block.outer_cleanups = 0;
3029#endif
3030 thisblock->data.block.label_chain = 0;
3031 thisblock->data.block.innermost_stack_block = stack_block_stack;
3032 thisblock->data.block.first_insn = note;
3033 thisblock->data.block.block_start_count = ++block_start_count;
3034 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3035 block_stack = thisblock;
3036 nesting_stack = thisblock;
3037
ca695ac9
JB
3038 if (!output_bytecode)
3039 {
3040 /* Make a new level for allocating stack slots. */
3041 push_temp_slots ();
3042 }
28d81abb
RK
3043}
3044
7629c936
RS
3045/* Given a pointer to a BLOCK node, save a pointer to the most recently
3046 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3047 BLOCK node. */
3048
3049void
3050remember_end_note (block)
3051 register tree block;
3052{
3053 BLOCK_END_NOTE (block) = last_block_end_note;
3054 last_block_end_note = NULL_RTX;
3055}
3056
28d81abb
RK
3057/* Generate RTL code to terminate a binding contour.
3058 VARS is the chain of VAR_DECL nodes
3059 for the variables bound in this contour.
3060 MARK_ENDS is nonzero if we should put a note at the beginning
3061 and end of this binding contour.
3062
3063 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3064 (That is true automatically if the contour has a saved stack level.) */
3065
3066void
3067expand_end_bindings (vars, mark_ends, dont_jump_in)
3068 tree vars;
3069 int mark_ends;
3070 int dont_jump_in;
3071{
3072 register struct nesting *thisblock = block_stack;
3073 register tree decl;
3074
ca695ac9
JB
3075 if (output_bytecode)
3076 {
3077 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3078 return;
3079 }
3080
28d81abb
RK
3081 if (warn_unused)
3082 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
3083 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3084 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
3085 warning_with_decl (decl, "unused variable `%s'");
3086
28d81abb
RK
3087 if (thisblock->exit_label)
3088 {
3089 do_pending_stack_adjust ();
3090 emit_label (thisblock->exit_label);
3091 }
3092
3093 /* If necessary, make a handler for nonlocal gotos taking
3094 place in the function calls in this block. */
3095 if (function_call_count != thisblock->data.block.function_call_count
3096 && nonlocal_labels
3097 /* Make handler for outermost block
3098 if there were any nonlocal gotos to this function. */
3099 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3100 /* Make handler for inner block if it has something
3101 special to do when you jump out of it. */
3102 : (thisblock->data.block.cleanups != 0
3103 || thisblock->data.block.stack_level != 0)))
3104 {
3105 tree link;
3106 rtx afterward = gen_label_rtx ();
3107 rtx handler_label = gen_label_rtx ();
3108 rtx save_receiver = gen_reg_rtx (Pmode);
3109
3110 /* Don't let jump_optimize delete the handler. */
3111 LABEL_PRESERVE_P (handler_label) = 1;
3112
3113 /* Record the handler address in the stack slot for that purpose,
3114 during this block, saving and restoring the outer value. */
3115 if (thisblock->next != 0)
3116 {
3117 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3118 emit_insn_before (gen_move_insn (save_receiver,
3119 nonlocal_goto_handler_slot),
3120 thisblock->data.block.first_insn);
3121 }
3122 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
3123 gen_rtx (LABEL_REF, Pmode,
3124 handler_label)),
3125 thisblock->data.block.first_insn);
3126
3127 /* Jump around the handler; it runs only when specially invoked. */
3128 emit_jump (afterward);
3129 emit_label (handler_label);
3130
3131#ifdef HAVE_nonlocal_goto
3132 if (! HAVE_nonlocal_goto)
3133#endif
3134 /* First adjust our frame pointer to its actual value. It was
3135 previously set to the start of the virtual area corresponding to
3136 the stacked variables when we branched here and now needs to be
3137 adjusted to the actual hardware fp value.
3138
3139 Assignments are to virtual registers are converted by
3140 instantiate_virtual_regs into the corresponding assignment
3141 to the underlying register (fp in this case) that makes
3142 the original assignment true.
3143 So the following insn will actually be
3144 decrementing fp by STARTING_FRAME_OFFSET. */
3145 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3146
3147#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
3148 if (fixed_regs[ARG_POINTER_REGNUM])
3149 {
42495ca0
RK
3150#ifdef ELIMINABLE_REGS
3151 /* If the argument pointer can be eliminated in favor of the
3152 frame pointer, we don't need to restore it. We assume here
3153 that if such an elimination is present, it can always be used.
3154 This is the case on all known machines; if we don't make this
3155 assumption, we do unnecessary saving on many machines. */
3156 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3157 int i;
3158
3159 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3160 if (elim_regs[i].from == ARG_POINTER_REGNUM
3161 && elim_regs[i].to == FRAME_POINTER_REGNUM)
3162 break;
3163
3164 if (i == sizeof elim_regs / sizeof elim_regs [0])
3165#endif
3166 {
3167 /* Now restore our arg pointer from the address at which it
3168 was saved in our stack frame.
3169 If there hasn't be space allocated for it yet, make
3170 some now. */
3171 if (arg_pointer_save_area == 0)
3172 arg_pointer_save_area
3173 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3174 emit_move_insn (virtual_incoming_args_rtx,
3175 /* We need a pseudo here, or else
3176 instantiate_virtual_regs_1 complains. */
3177 copy_to_reg (arg_pointer_save_area));
3178 }
28d81abb
RK
3179 }
3180#endif
3181
3182 /* The handler expects the desired label address in the static chain
3183 register. It tests the address and does an appropriate jump
3184 to whatever label is desired. */
3185 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3186 /* Skip any labels we shouldn't be able to jump to from here. */
3187 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3188 {
3189 rtx not_this = gen_label_rtx ();
3190 rtx this = gen_label_rtx ();
3191 do_jump_if_equal (static_chain_rtx,
3192 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3193 this, 0);
3194 emit_jump (not_this);
3195 emit_label (this);
3196 expand_goto (TREE_VALUE (link));
3197 emit_label (not_this);
3198 }
3199 /* If label is not recognized, abort. */
3200 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3201 VOIDmode, 0);
3202 emit_label (afterward);
3203 }
3204
3205 /* Don't allow jumping into a block that has cleanups or a stack level. */
3206 if (dont_jump_in
3207 || thisblock->data.block.stack_level != 0
3208 || thisblock->data.block.cleanups != 0)
3209 {
3210 struct label_chain *chain;
3211
3212 /* Any labels in this block are no longer valid to go to.
3213 Mark them to cause an error message. */
3214 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3215 {
3216 DECL_TOO_LATE (chain->label) = 1;
3217 /* If any goto without a fixup came to this label,
3218 that must be an error, because gotos without fixups
3219 come from outside all saved stack-levels and all cleanups. */
3220 if (TREE_ADDRESSABLE (chain->label))
3221 error_with_decl (chain->label,
3222 "label `%s' used before containing binding contour");
3223 }
3224 }
3225
3226 /* Restore stack level in effect before the block
3227 (only if variable-size objects allocated). */
3228 /* Perform any cleanups associated with the block. */
3229
3230 if (thisblock->data.block.stack_level != 0
3231 || thisblock->data.block.cleanups != 0)
3232 {
3233 /* Don't let cleanups affect ({...}) constructs. */
3234 int old_expr_stmts_for_value = expr_stmts_for_value;
3235 rtx old_last_expr_value = last_expr_value;
3236 tree old_last_expr_type = last_expr_type;
3237 expr_stmts_for_value = 0;
3238
3239 /* Do the cleanups. */
37366632 3240 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
3241 do_pending_stack_adjust ();
3242
3243 expr_stmts_for_value = old_expr_stmts_for_value;
3244 last_expr_value = old_last_expr_value;
3245 last_expr_type = old_last_expr_type;
3246
3247 /* Restore the stack level. */
3248
3249 if (thisblock->data.block.stack_level != 0)
3250 {
59257ff7 3251 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 3252 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 3253 if (nonlocal_goto_handler_slot != 0)
37366632
RK
3254 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3255 NULL_RTX);
28d81abb
RK
3256 }
3257
3258 /* Any gotos out of this block must also do these things.
59257ff7
RK
3259 Also report any gotos with fixups that came to labels in this
3260 level. */
28d81abb
RK
3261 fixup_gotos (thisblock,
3262 thisblock->data.block.stack_level,
3263 thisblock->data.block.cleanups,
3264 thisblock->data.block.first_insn,
3265 dont_jump_in);
3266 }
3267
c7d2d61d
RS
3268 /* Mark the beginning and end of the scope if requested.
3269 We do this now, after running cleanups on the variables
3270 just going out of scope, so they are in scope for their cleanups. */
3271
3272 if (mark_ends)
7629c936 3273 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3274 else
3275 /* Get rid of the beginning-mark if we don't make an end-mark. */
3276 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3277
28d81abb
RK
3278 /* If doing stupid register allocation, make sure lives of all
3279 register variables declared here extend thru end of scope. */
3280
3281 if (obey_regdecls)
3282 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3283 {
3284 rtx rtl = DECL_RTL (decl);
3285 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3286 use_variable (rtl);
3287 }
3288
3289 /* Restore block_stack level for containing block. */
3290
3291 stack_block_stack = thisblock->data.block.innermost_stack_block;
3292 POPSTACK (block_stack);
3293
3294 /* Pop the stack slot nesting and free any slots at this level. */
3295 pop_temp_slots ();
3296}
ca695ac9
JB
3297
3298
3299/* End a binding contour.
3300 VARS is the chain of VAR_DECL nodes for the variables bound
3301 in this contour. MARK_ENDS is nonzer if we should put a note
3302 at the beginning and end of this binding contour.
3303 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3304 contour. */
3305
3306void
3307bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3308 tree vars;
3309 int mark_ends;
3310 int dont_jump_in;
3311{
3312 struct nesting *thisbind = nesting_stack;
3313 tree decl;
3314
3315 if (warn_unused)
3316 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3317 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3318 warning_with_decl (decl, "unused variable `%s'");
3319
3320 bc_emit_bytecode_labeldef (thisbind->exit_label->bc_label);
3321
3322 /* Pop block/bindings off stack */
3323 POPSTACK (nesting_stack);
3324 POPSTACK (block_stack);
3325}
28d81abb
RK
3326\f
3327/* Generate RTL for the automatic variable declaration DECL.
3328 (Other kinds of declarations are simply ignored if seen here.)
3329 CLEANUP is an expression to be executed at exit from this binding contour;
3330 for example, in C++, it might call the destructor for this variable.
3331
3332 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3333 either before or after calling `expand_decl' but before compiling
3334 any subsequent expressions. This is because CLEANUP may be expanded
3335 more than once, on different branches of execution.
3336 For the same reason, CLEANUP may not contain a CALL_EXPR
3337 except as its topmost node--else `preexpand_calls' would get confused.
3338
3339 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3340 that is not associated with any particular variable.
3341
3342 There is no special support here for C++ constructors.
3343 They should be handled by the proper code in DECL_INITIAL. */
3344
3345void
3346expand_decl (decl)
3347 register tree decl;
3348{
3349 struct nesting *thisblock = block_stack;
ca695ac9
JB
3350 tree type;
3351
3352 if (output_bytecode)
3353 {
3354 bc_expand_decl (decl, 0);
3355 return;
3356 }
3357
3358 type = TREE_TYPE (decl);
28d81abb
RK
3359
3360 /* Only automatic variables need any expansion done.
3361 Static and external variables, and external functions,
3362 will be handled by `assemble_variable' (called from finish_decl).
3363 TYPE_DECL and CONST_DECL require nothing.
3364 PARM_DECLs are handled in `assign_parms'. */
3365
3366 if (TREE_CODE (decl) != VAR_DECL)
3367 return;
44fe2e80 3368 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3369 return;
3370
3371 /* Create the RTL representation for the variable. */
3372
3373 if (type == error_mark_node)
3374 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3375 else if (DECL_SIZE (decl) == 0)
3376 /* Variable with incomplete type. */
3377 {
3378 if (DECL_INITIAL (decl) == 0)
3379 /* Error message was already done; now avoid a crash. */
3380 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3381 else
3382 /* An initializer is going to decide the size of this array.
3383 Until we know the size, represent its address with a reg. */
3384 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3385 }
3386 else if (DECL_MODE (decl) != BLKmode
3387 /* If -ffloat-store, don't put explicit float vars
3388 into regs. */
3389 && !(flag_float_store
3390 && TREE_CODE (type) == REAL_TYPE)
3391 && ! TREE_THIS_VOLATILE (decl)
3392 && ! TREE_ADDRESSABLE (decl)
44fe2e80 3393 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
3394 {
3395 /* Automatic variable that can go in a register. */
98f3b471
RK
3396 enum machine_mode reg_mode = DECL_MODE (decl);
3397 int unsignedp = TREE_UNSIGNED (type);
3398
3399 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
3400 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
3401 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
3402 || TREE_CODE (type) == OFFSET_TYPE)
3403 {
3404 PROMOTE_MODE (reg_mode, unsignedp, type);
3405 }
3406
5998c7dc
RS
3407 if (TREE_CODE (type) == COMPLEX_TYPE)
3408 {
3409 rtx realpart, imagpart;
3410 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3411
3412 /* For a complex type variable, make a CONCAT of two pseudos
3413 so that the real and imaginary parts
3414 can be allocated separately. */
3415 realpart = gen_reg_rtx (partmode);
3416 REG_USERVAR_P (realpart) = 1;
3417 imagpart = gen_reg_rtx (partmode);
3418 REG_USERVAR_P (imagpart) = 1;
3419 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3420 }
3421 else
3422 {
3423 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3424 if (TREE_CODE (type) == POINTER_TYPE)
3425 mark_reg_pointer (DECL_RTL (decl));
3426 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3427 }
28d81abb
RK
3428 }
3429 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3430 {
3431 /* Variable of fixed size that goes on the stack. */
3432 rtx oldaddr = 0;
3433 rtx addr;
3434
3435 /* If we previously made RTL for this decl, it must be an array
3436 whose size was determined by the initializer.
3437 The old address was a register; set that register now
3438 to the proper address. */
3439 if (DECL_RTL (decl) != 0)
3440 {
3441 if (GET_CODE (DECL_RTL (decl)) != MEM
3442 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3443 abort ();
3444 oldaddr = XEXP (DECL_RTL (decl), 0);
3445 }
3446
3447 DECL_RTL (decl)
3448 = assign_stack_temp (DECL_MODE (decl),
3449 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3450 + BITS_PER_UNIT - 1)
3451 / BITS_PER_UNIT),
3452 1);
3453
3454 /* Set alignment we actually gave this decl. */
3455 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3456 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3457
3458 if (oldaddr)
3459 {
3460 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3461 if (addr != oldaddr)
3462 emit_move_insn (oldaddr, addr);
3463 }
3464
3465 /* If this is a memory ref that contains aggregate components,
3466 mark it as such for cse and loop optimize. */
3467 MEM_IN_STRUCT_P (DECL_RTL (decl))
3468 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3469 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
c1b98a95
RK
3470 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3471 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
28d81abb
RK
3472#if 0
3473 /* If this is in memory because of -ffloat-store,
3474 set the volatile bit, to prevent optimizations from
3475 undoing the effects. */
3476 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3477 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3478#endif
3479 }
3480 else
3481 /* Dynamic-size object: must push space on the stack. */
3482 {
3483 rtx address, size;
3484
3485 /* Record the stack pointer on entry to block, if have
3486 not already done so. */
3487 if (thisblock->data.block.stack_level == 0)
3488 {
3489 do_pending_stack_adjust ();
59257ff7
RK
3490 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3491 &thisblock->data.block.stack_level,
3492 thisblock->data.block.first_insn);
28d81abb
RK
3493 stack_block_stack = thisblock;
3494 }
3495
3496 /* Compute the variable's size, in bytes. */
3497 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3498 DECL_SIZE (decl),
3499 size_int (BITS_PER_UNIT)),
37366632 3500 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3501 free_temp_slots ();
3502
59257ff7
RK
3503 /* This is equivalent to calling alloca. */
3504 current_function_calls_alloca = 1;
3505
28d81abb 3506 /* Allocate space on the stack for the variable. */
37366632
RK
3507 address = allocate_dynamic_stack_space (size, NULL_RTX,
3508 DECL_ALIGN (decl));
28d81abb 3509
59257ff7 3510 if (nonlocal_goto_handler_slot != 0)
37366632 3511 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
3512
3513 /* Reference the variable indirect through that rtx. */
3514 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3515
2207e295
RS
3516 /* If this is a memory ref that contains aggregate components,
3517 mark it as such for cse and loop optimize. */
3518 MEM_IN_STRUCT_P (DECL_RTL (decl))
3519 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3520 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
c1b98a95
RK
3521 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3522 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
2207e295 3523
28d81abb
RK
3524 /* Indicate the alignment we actually gave this variable. */
3525#ifdef STACK_BOUNDARY
3526 DECL_ALIGN (decl) = STACK_BOUNDARY;
3527#else
3528 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3529#endif
3530 }
3531
3532 if (TREE_THIS_VOLATILE (decl))
3533 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3534#if 0 /* A variable is not necessarily unchanging
3535 just because it is const. RTX_UNCHANGING_P
3536 means no change in the function,
3537 not merely no change in the variable's scope.
3538 It is correct to set RTX_UNCHANGING_P if the variable's scope
3539 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3540 if (TREE_READONLY (decl))
3541 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3542#endif
28d81abb
RK
3543
3544 /* If doing stupid register allocation, make sure life of any
3545 register variable starts here, at the start of its scope. */
3546
3547 if (obey_regdecls)
3548 use_variable (DECL_RTL (decl));
3549}
ca695ac9
JB
3550
3551
3552/* Generate code for the automatic variable declaration DECL. For
3553 most variables this just means we give it a stack offset. The
3554 compiler sometimes emits cleanups without variables and we will
3555 have to deal with those too. */
3556
3557void
3558bc_expand_decl (decl, cleanup)
3559 tree decl;
3560 tree cleanup;
3561{
3562 tree type;
3563
3564 if (!decl)
3565 {
3566 /* A cleanup with no variable. */
3567 if (!cleanup)
3568 abort ();
3569
3570 return;
3571 }
3572
3573 /* Only auto variables need any work. */
3574 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3575 return;
3576
3577 type = TREE_TYPE (decl);
3578
3579 if (type == error_mark_node)
3580 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3581
3582 else if (DECL_SIZE (decl) == 0)
3583
3584 /* Variable with incomplete type. The stack offset herein will be
3585 fixed later in expand_decl_init (). */
3586 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3587
3588 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3589 {
3590 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3591 DECL_ALIGN (decl));
3592 }
3593 else
3594 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3595}
28d81abb
RK
3596\f
3597/* Emit code to perform the initialization of a declaration DECL. */
3598
3599void
3600expand_decl_init (decl)
3601 tree decl;
3602{
b4ac57ab
RS
3603 int was_used = TREE_USED (decl);
3604
28d81abb
RK
3605 if (TREE_STATIC (decl))
3606 return;
3607
3608 /* Compute and store the initial value now. */
3609
3610 if (DECL_INITIAL (decl) == error_mark_node)
3611 {
3612 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3613 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3614 || code == POINTER_TYPE)
3615 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3616 0, 0);
3617 emit_queue ();
3618 }
3619 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3620 {
3621 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3622 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3623 emit_queue ();
3624 }
3625
b4ac57ab
RS
3626 /* Don't let the initialization count as "using" the variable. */
3627 TREE_USED (decl) = was_used;
3628
28d81abb
RK
3629 /* Free any temporaries we made while initializing the decl. */
3630 free_temp_slots ();
3631}
3632
ca695ac9
JB
3633/* Expand initialization for variable-sized types. Allocate array
3634 using newlocalSI and set local variable, which is a pointer to the
3635 storage. */
3636
3637bc_expand_variable_local_init (decl)
3638 tree decl;
3639{
3640 /* Evaluate size expression and coerce to SI */
3641 bc_expand_expr (DECL_SIZE (decl));
3642
3643 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3644 no coercion is necessary (?) */
3645
3646/* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3647 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3648
3649 /* Emit code to allocate array */
3650 bc_emit_instruction (newlocalSI);
3651
3652 /* Store array pointer in local variable. This is the only instance
3653 where we actually want the address of the pointer to the
3654 variable-size block, rather than the pointer itself. We avoid
3655 using expand_address() since that would cause the pointer to be
3656 pushed rather than its address. Hence the hard-coded reference;
3657 notice also that the variable is always local (no global
3658 variable-size type variables). */
3659
3660 bc_load_localaddr (DECL_RTL (decl));
3661 bc_emit_instruction (storeP);
3662}
3663
3664
3665/* Emit code to initialize a declaration. */
3666void
3667bc_expand_decl_init (decl)
3668 tree decl;
3669{
3670 int org_stack_depth;
3671
3672 /* Statical initializers are handled elsewhere */
3673
3674 if (TREE_STATIC (decl))
3675 return;
3676
3677 /* Memory original stack depth */
3678 org_stack_depth = stack_depth;
3679
3680 /* If the type is variable-size, we first create its space (we ASSUME
3681 it CAN'T be static). We do this regardless of whether there's an
3682 initializer assignment or not. */
3683
3684 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3685 bc_expand_variable_local_init (decl);
3686
3687 /* Expand initializer assignment */
3688 if (DECL_INITIAL (decl) == error_mark_node)
3689 {
3690 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3691
3692 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3693 || code == POINTER_TYPE)
3694
e81d77b5 3695 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3696 }
3697 else if (DECL_INITIAL (decl))
e81d77b5 3698 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
ca695ac9
JB
3699
3700 /* Restore stack depth */
3701 if (org_stack_depth > stack_depth)
3702 abort ();
3703
3704 bc_adjust_stack (stack_depth - org_stack_depth);
3705}
3706
3707
28d81abb
RK
3708/* CLEANUP is an expression to be executed at exit from this binding contour;
3709 for example, in C++, it might call the destructor for this variable.
3710
3711 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3712 either before or after calling `expand_decl' but before compiling
3713 any subsequent expressions. This is because CLEANUP may be expanded
3714 more than once, on different branches of execution.
3715 For the same reason, CLEANUP may not contain a CALL_EXPR
3716 except as its topmost node--else `preexpand_calls' would get confused.
3717
3718 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3719 that is not associated with any particular variable. */
3720
3721int
3722expand_decl_cleanup (decl, cleanup)
3723 tree decl, cleanup;
3724{
3725 struct nesting *thisblock = block_stack;
3726
3727 /* Error if we are not in any block. */
3728 if (thisblock == 0)
3729 return 0;
3730
3731 /* Record the cleanup if there is one. */
3732
3733 if (cleanup != 0)
3734 {
3735 thisblock->data.block.cleanups
3736 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3737 /* If this block has a cleanup, it belongs in stack_block_stack. */
3738 stack_block_stack = thisblock;
3739 }
3740 return 1;
3741}
3742\f
3743/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3744 DECL_ELTS is the list of elements that belong to DECL's type.
3745 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3746
3747void
3748expand_anon_union_decl (decl, cleanup, decl_elts)
3749 tree decl, cleanup, decl_elts;
3750{
3751 struct nesting *thisblock = block_stack;
3752 rtx x;
3753
3754 expand_decl (decl, cleanup);
3755 x = DECL_RTL (decl);
3756
3757 while (decl_elts)
3758 {
3759 tree decl_elt = TREE_VALUE (decl_elts);
3760 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3761 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3762
3763 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3764 instead create a new MEM rtx with the proper mode. */
3765 if (GET_CODE (x) == MEM)
3766 {
3767 if (mode == GET_MODE (x))
3768 DECL_RTL (decl_elt) = x;
3769 else
3770 {
3771 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3772 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3773 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3774 }
3775 }
3776 else if (GET_CODE (x) == REG)
3777 {
3778 if (mode == GET_MODE (x))
3779 DECL_RTL (decl_elt) = x;
3780 else
3781 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3782 }
3783 else
3784 abort ();
3785
3786 /* Record the cleanup if there is one. */
3787
3788 if (cleanup != 0)
3789 thisblock->data.block.cleanups
3790 = temp_tree_cons (decl_elt, cleanup_elt,
3791 thisblock->data.block.cleanups);
3792
3793 decl_elts = TREE_CHAIN (decl_elts);
3794 }
3795}
3796\f
3797/* Expand a list of cleanups LIST.
3798 Elements may be expressions or may be nested lists.
3799
3800 If DONT_DO is nonnull, then any list-element
3801 whose TREE_PURPOSE matches DONT_DO is omitted.
3802 This is sometimes used to avoid a cleanup associated with
3803 a value that is being returned out of the scope. */
3804
3805static void
3806expand_cleanups (list, dont_do)
3807 tree list;
3808 tree dont_do;
3809{
3810 tree tail;
3811 for (tail = list; tail; tail = TREE_CHAIN (tail))
3812 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3813 {
3814 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3815 expand_cleanups (TREE_VALUE (tail), dont_do);
3816 else
3817 {
3818 /* Cleanups may be run multiple times. For example,
3819 when exiting a binding contour, we expand the
3820 cleanups associated with that contour. When a goto
3821 within that binding contour has a target outside that
3822 contour, it will expand all cleanups from its scope to
3823 the target. Though the cleanups are expanded multiple
3824 times, the control paths are non-overlapping so the
3825 cleanups will not be executed twice. */
3826 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3827 free_temp_slots ();
3828 }
3829 }
3830}
3831
28d81abb
RK
3832/* Move all cleanups from the current block_stack
3833 to the containing block_stack, where they are assumed to
3834 have been created. If anything can cause a temporary to
3835 be created, but not expanded for more than one level of
3836 block_stacks, then this code will have to change. */
3837
3838void
3839move_cleanups_up ()
3840{
3841 struct nesting *block = block_stack;
3842 struct nesting *outer = block->next;
3843
3844 outer->data.block.cleanups
3845 = chainon (block->data.block.cleanups,
3846 outer->data.block.cleanups);
3847 block->data.block.cleanups = 0;
3848}
3849
3850tree
3851last_cleanup_this_contour ()
3852{
3853 if (block_stack == 0)
3854 return 0;
3855
3856 return block_stack->data.block.cleanups;
3857}
3858
3859/* Return 1 if there are any pending cleanups at this point.
3860 If THIS_CONTOUR is nonzero, check the current contour as well.
3861 Otherwise, look only at the contours that enclose this one. */
3862
3863int
3864any_pending_cleanups (this_contour)
3865 int this_contour;
3866{
3867 struct nesting *block;
3868
3869 if (block_stack == 0)
3870 return 0;
3871
3872 if (this_contour && block_stack->data.block.cleanups != NULL)
3873 return 1;
3874 if (block_stack->data.block.cleanups == 0
3875 && (block_stack->data.block.outer_cleanups == 0
3876#if 0
3877 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3878#endif
3879 ))
3880 return 0;
3881
3882 for (block = block_stack->next; block; block = block->next)
3883 if (block->data.block.cleanups != 0)
3884 return 1;
3885
3886 return 0;
3887}
3888\f
3889/* Enter a case (Pascal) or switch (C) statement.
3890 Push a block onto case_stack and nesting_stack
3891 to accumulate the case-labels that are seen
3892 and to record the labels generated for the statement.
3893
3894 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3895 Otherwise, this construct is transparent for `exit_something'.
3896
3897 EXPR is the index-expression to be dispatched on.
3898 TYPE is its nominal type. We could simply convert EXPR to this type,
3899 but instead we take short cuts. */
3900
3901void
3902expand_start_case (exit_flag, expr, type, printname)
3903 int exit_flag;
3904 tree expr;
3905 tree type;
3906 char *printname;
3907{
3908 register struct nesting *thiscase = ALLOC_NESTING ();
3909
3910 /* Make an entry on case_stack for the case we are entering. */
3911
3912 thiscase->next = case_stack;
3913 thiscase->all = nesting_stack;
3914 thiscase->depth = ++nesting_depth;
3915 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3916 thiscase->data.case_stmt.case_list = 0;
3917 thiscase->data.case_stmt.index_expr = expr;
3918 thiscase->data.case_stmt.nominal_type = type;
3919 thiscase->data.case_stmt.default_label = 0;
3920 thiscase->data.case_stmt.num_ranges = 0;
3921 thiscase->data.case_stmt.printname = printname;
3922 thiscase->data.case_stmt.seenlabel = 0;
3923 case_stack = thiscase;
3924 nesting_stack = thiscase;
3925
ca695ac9
JB
3926 if (output_bytecode)
3927 {
3928 bc_expand_start_case (thiscase, expr, type, printname);
3929 return;
3930 }
3931
28d81abb
RK
3932 do_pending_stack_adjust ();
3933
3934 /* Make sure case_stmt.start points to something that won't
3935 need any transformation before expand_end_case. */
3936 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3937 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3938
3939 thiscase->data.case_stmt.start = get_last_insn ();
3940}
3941
ca695ac9
JB
3942
3943/* Enter a case statement. It is assumed that the caller has pushed
3944 the current context onto the case stack. */
3945void
3946bc_expand_start_case (thiscase, expr, type, printname)
3947 struct nesting *thiscase;
3948 tree expr;
3949 tree type;
3950 char *printname;
3951{
3952 bc_expand_expr (expr);
3953 bc_expand_conversion (TREE_TYPE (expr), type);
3954
3955 /* For cases, the skip is a place we jump to that's emitted after
3956 the size of the jump table is known. */
3957
3958 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3959 bc_emit_bytecode (jump);
3960 bc_emit_bytecode_labelref (thiscase->data.case_stmt.skip_label->bc_label);
3961
3962#ifdef DEBUG_PRINT_CODE
3963 fputc ('\n', stderr);
3964#endif
3965}
3966
3967
28d81abb
RK
3968/* Start a "dummy case statement" within which case labels are invalid
3969 and are not connected to any larger real case statement.
3970 This can be used if you don't want to let a case statement jump
3971 into the middle of certain kinds of constructs. */
3972
3973void
3974expand_start_case_dummy ()
3975{
3976 register struct nesting *thiscase = ALLOC_NESTING ();
3977
3978 /* Make an entry on case_stack for the dummy. */
3979
3980 thiscase->next = case_stack;
3981 thiscase->all = nesting_stack;
3982 thiscase->depth = ++nesting_depth;
3983 thiscase->exit_label = 0;
3984 thiscase->data.case_stmt.case_list = 0;
3985 thiscase->data.case_stmt.start = 0;
3986 thiscase->data.case_stmt.nominal_type = 0;
3987 thiscase->data.case_stmt.default_label = 0;
3988 thiscase->data.case_stmt.num_ranges = 0;
3989 case_stack = thiscase;
3990 nesting_stack = thiscase;
3991}
3992
3993/* End a dummy case statement. */
3994
3995void
3996expand_end_case_dummy ()
3997{
3998 POPSTACK (case_stack);
3999}
4000
4001/* Return the data type of the index-expression
4002 of the innermost case statement, or null if none. */
4003
4004tree
4005case_index_expr_type ()
4006{
4007 if (case_stack)
4008 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4009 return 0;
4010}
4011\f
4012/* Accumulate one case or default label inside a case or switch statement.
4013 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4014 The function CONVERTER, when applied to arguments T and V,
4015 converts the value V to the type T.
28d81abb
RK
4016
4017 If not currently inside a case or switch statement, return 1 and do
4018 nothing. The caller will print a language-specific error message.
4019 If VALUE is a duplicate or overlaps, return 2 and do nothing
4020 except store the (first) duplicate node in *DUPLICATE.
4021 If VALUE is out of range, return 3 and do nothing.
4022 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4023 Return 0 on success.
4024
4025 Extended to handle range statements. */
4026
4027int
f52fba84 4028pushcase (value, converter, label, duplicate)
28d81abb 4029 register tree value;
f52fba84 4030 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4031 register tree label;
4032 tree *duplicate;
4033{
4034 register struct case_node **l;
4035 register struct case_node *n;
4036 tree index_type;
4037 tree nominal_type;
4038
ca695ac9
JB
4039 if (output_bytecode)
4040 return bc_pushcase (value, label);
4041
28d81abb
RK
4042 /* Fail if not inside a real case statement. */
4043 if (! (case_stack && case_stack->data.case_stmt.start))
4044 return 1;
4045
4046 if (stack_block_stack
4047 && stack_block_stack->depth > case_stack->depth)
4048 return 5;
4049
4050 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4051 nominal_type = case_stack->data.case_stmt.nominal_type;
4052
4053 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4054 if (index_type == error_mark_node)
4055 return 0;
4056
4057 /* Convert VALUE to the type in which the comparisons are nominally done. */
4058 if (value != 0)
f52fba84 4059 value = (*converter) (nominal_type, value);
28d81abb
RK
4060
4061 /* If this is the first label, warn if any insns have been emitted. */
4062 if (case_stack->data.case_stmt.seenlabel == 0)
4063 {
4064 rtx insn;
4065 for (insn = case_stack->data.case_stmt.start;
4066 insn;
4067 insn = NEXT_INSN (insn))
4068 {
4069 if (GET_CODE (insn) == CODE_LABEL)
4070 break;
4071 if (GET_CODE (insn) != NOTE
4072 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4073 {
4074 warning ("unreachable code at beginning of %s",
4075 case_stack->data.case_stmt.printname);
4076 break;
4077 }
4078 }
4079 }
4080 case_stack->data.case_stmt.seenlabel = 1;
4081
4082 /* Fail if this value is out of range for the actual type of the index
4083 (which may be narrower than NOMINAL_TYPE). */
4084 if (value != 0 && ! int_fits_type_p (value, index_type))
4085 return 3;
4086
4087 /* Fail if this is a duplicate or overlaps another entry. */
4088 if (value == 0)
4089 {
4090 if (case_stack->data.case_stmt.default_label != 0)
4091 {
4092 *duplicate = case_stack->data.case_stmt.default_label;
4093 return 2;
4094 }
4095 case_stack->data.case_stmt.default_label = label;
4096 }
4097 else
4098 {
4099 /* Find the elt in the chain before which to insert the new value,
4100 to keep the chain sorted in increasing order.
4101 But report an error if this element is a duplicate. */
4102 for (l = &case_stack->data.case_stmt.case_list;
4103 /* Keep going past elements distinctly less than VALUE. */
4104 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4105 l = &(*l)->right)
4106 ;
4107 if (*l)
4108 {
4109 /* Element we will insert before must be distinctly greater;
4110 overlap means error. */
4111 if (! tree_int_cst_lt (value, (*l)->low))
4112 {
4113 *duplicate = (*l)->code_label;
4114 return 2;
4115 }
4116 }
4117
4118 /* Add this label to the chain, and succeed.
4119 Copy VALUE so it is on temporary rather than momentary
4120 obstack and will thus survive till the end of the case statement. */
4121 n = (struct case_node *) oballoc (sizeof (struct case_node));
4122 n->left = 0;
4123 n->right = *l;
4124 n->high = n->low = copy_node (value);
4125 n->code_label = label;
4126 *l = n;
4127 }
4128
4129 expand_label (label);
4130 return 0;
4131}
4132
4133/* Like pushcase but this case applies to all values
4134 between VALUE1 and VALUE2 (inclusive).
4135 The return value is the same as that of pushcase
4136 but there is one additional error code:
4137 4 means the specified range was empty. */
4138
4139int
f52fba84 4140pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4141 register tree value1, value2;
f52fba84 4142 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4143 register tree label;
4144 tree *duplicate;
4145{
4146 register struct case_node **l;
4147 register struct case_node *n;
4148 tree index_type;
4149 tree nominal_type;
4150
4151 /* Fail if not inside a real case statement. */
4152 if (! (case_stack && case_stack->data.case_stmt.start))
4153 return 1;
4154
4155 if (stack_block_stack
4156 && stack_block_stack->depth > case_stack->depth)
4157 return 5;
4158
4159 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4160 nominal_type = case_stack->data.case_stmt.nominal_type;
4161
4162 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4163 if (index_type == error_mark_node)
4164 return 0;
4165
4166 /* If this is the first label, warn if any insns have been emitted. */
4167 if (case_stack->data.case_stmt.seenlabel == 0)
4168 {
4169 rtx insn;
4170 for (insn = case_stack->data.case_stmt.start;
4171 insn;
4172 insn = NEXT_INSN (insn))
4173 {
4174 if (GET_CODE (insn) == CODE_LABEL)
4175 break;
4176 if (GET_CODE (insn) != NOTE
4177 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4178 {
4179 warning ("unreachable code at beginning of %s",
4180 case_stack->data.case_stmt.printname);
4181 break;
4182 }
4183 }
4184 }
4185 case_stack->data.case_stmt.seenlabel = 1;
4186
4187 /* Convert VALUEs to type in which the comparisons are nominally done. */
4188 if (value1 == 0) /* Negative infinity. */
4189 value1 = TYPE_MIN_VALUE(index_type);
f52fba84 4190 value1 = (*converter) (nominal_type, value1);
28d81abb
RK
4191
4192 if (value2 == 0) /* Positive infinity. */
4193 value2 = TYPE_MAX_VALUE(index_type);
f52fba84 4194 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4195
4196 /* Fail if these values are out of range. */
4197 if (! int_fits_type_p (value1, index_type))
4198 return 3;
4199
4200 if (! int_fits_type_p (value2, index_type))
4201 return 3;
4202
4203 /* Fail if the range is empty. */
4204 if (tree_int_cst_lt (value2, value1))
4205 return 4;
4206
4207 /* If the bounds are equal, turn this into the one-value case. */
4208 if (tree_int_cst_equal (value1, value2))
f52fba84 4209 return pushcase (value1, converter, label, duplicate);
28d81abb
RK
4210
4211 /* Find the elt in the chain before which to insert the new value,
4212 to keep the chain sorted in increasing order.
4213 But report an error if this element is a duplicate. */
4214 for (l = &case_stack->data.case_stmt.case_list;
4215 /* Keep going past elements distinctly less than this range. */
4216 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4217 l = &(*l)->right)
4218 ;
4219 if (*l)
4220 {
4221 /* Element we will insert before must be distinctly greater;
4222 overlap means error. */
4223 if (! tree_int_cst_lt (value2, (*l)->low))
4224 {
4225 *duplicate = (*l)->code_label;
4226 return 2;
4227 }
4228 }
4229
4230 /* Add this label to the chain, and succeed.
4231 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4232 obstack and will thus survive till the end of the case statement. */
4233
4234 n = (struct case_node *) oballoc (sizeof (struct case_node));
4235 n->left = 0;
4236 n->right = *l;
4237 n->low = copy_node (value1);
4238 n->high = copy_node (value2);
4239 n->code_label = label;
4240 *l = n;
4241
4242 expand_label (label);
4243
4244 case_stack->data.case_stmt.num_ranges++;
4245
4246 return 0;
4247}
ca695ac9
JB
4248
4249
4250/* Accumulate one case or default label; VALUE is the value of the
4251 case, or nil for a default label. If not currently inside a case,
4252 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4253 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4254 Return 0 on success. This function is a leftover from the earlier
4255 bytecode compiler, which was based on gcc 1.37. It should be
4256 merged into pushcase. */
4257
4258int
4259bc_pushcase (value, label)
4260 tree value;
4261 tree label;
4262{
4263 struct nesting *thiscase = case_stack;
4264 struct case_node *case_label, *new_label;
4265
4266 if (! thiscase)
4267 return 1;
4268
4269 /* Fail if duplicate, overlap, or out of type range. */
4270 if (value)
4271 {
4272 value = convert (thiscase->data.case_stmt.nominal_type, value);
4273 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4274 return 3;
4275
4276 for (case_label = thiscase->data.case_stmt.case_list;
4277 case_label->left; case_label = case_label->left)
4278 if (! tree_int_cst_lt (case_label->left->high, value))
4279 break;
4280
4281 if (case_label != thiscase->data.case_stmt.case_list
4282 && ! tree_int_cst_lt (case_label->high, value)
4283 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4284 return 2;
4285
4286 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4287 new_label->low = new_label->high = copy_node (value);
4288 new_label->code_label = label;
4289 new_label->left = case_label->left;
4290
4291 case_label->left = new_label;
4292 thiscase->data.case_stmt.num_ranges++;
4293 }
4294 else
4295 {
4296 if (thiscase->data.case_stmt.default_label)
4297 return 2;
4298 thiscase->data.case_stmt.default_label = label;
4299 }
4300
4301 expand_label (label);
4302 return 0;
4303}
28d81abb
RK
4304\f
4305/* Called when the index of a switch statement is an enumerated type
4306 and there is no default label.
4307
4308 Checks that all enumeration literals are covered by the case
4309 expressions of a switch. Also, warn if there are any extra
4310 switch cases that are *not* elements of the enumerated type.
4311
4312 If all enumeration literals were covered by the case expressions,
4313 turn one of the expressions into the default expression since it should
4314 not be possible to fall through such a switch. */
4315
4316void
4317check_for_full_enumeration_handling (type)
4318 tree type;
4319{
4320 register struct case_node *n;
4321 register struct case_node **l;
4322 register tree chain;
4323 int all_values = 1;
4324
ca695ac9
JB
4325 if (output_bytecode)
4326 {
4327 bc_check_for_full_enumeration_handling (type);
4328 return;
4329 }
4330
28d81abb 4331 /* The time complexity of this loop is currently O(N * M), with
ac2a9454 4332 N being the number of members in the enumerated type, and
28d81abb
RK
4333 M being the number of case expressions in the switch. */
4334
4335 for (chain = TYPE_VALUES (type);
4336 chain;
4337 chain = TREE_CHAIN (chain))
4338 {
4339 /* Find a match between enumeral and case expression, if possible.
4340 Quit looking when we've gone too far (since case expressions
ac2a9454 4341 are kept sorted in ascending order). Warn about enumerators not
28d81abb
RK
4342 handled in the switch statement case expression list. */
4343
4344 for (n = case_stack->data.case_stmt.case_list;
4345 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4346 n = n->right)
4347 ;
4348
1ddde1cd 4349 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
4350 {
4351 if (warn_switch)
1ddde1cd 4352 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
4353 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4354 all_values = 0;
4355 }
4356 }
4357
4358 /* Now we go the other way around; we warn if there are case
ac2a9454 4359 expressions that don't correspond to enumerators. This can
28d81abb
RK
4360 occur since C and C++ don't enforce type-checking of
4361 assignments to enumeration variables. */
4362
4363 if (warn_switch)
4364 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4365 {
4366 for (chain = TYPE_VALUES (type);
4367 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4368 chain = TREE_CHAIN (chain))
4369 ;
4370
4371 if (!chain)
3b24f55b
RS
4372 {
4373 if (TYPE_NAME (type) == 0)
4374 warning ("case value `%d' not in enumerated type",
4375 TREE_INT_CST_LOW (n->low));
4376 else
4377 warning ("case value `%d' not in enumerated type `%s'",
4378 TREE_INT_CST_LOW (n->low),
4379 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4380 == IDENTIFIER_NODE)
4381 ? TYPE_NAME (type)
4382 : DECL_NAME (TYPE_NAME (type))));
4383 }
1ddde1cd
RS
4384 if (!tree_int_cst_equal (n->low, n->high))
4385 {
4386 for (chain = TYPE_VALUES (type);
4387 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4388 chain = TREE_CHAIN (chain))
4389 ;
4390
4391 if (!chain)
3b24f55b
RS
4392 {
4393 if (TYPE_NAME (type) == 0)
4394 warning ("case value `%d' not in enumerated type",
4395 TREE_INT_CST_LOW (n->high));
4396 else
4397 warning ("case value `%d' not in enumerated type `%s'",
4398 TREE_INT_CST_LOW (n->high),
4399 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4400 == IDENTIFIER_NODE)
4401 ? TYPE_NAME (type)
4402 : DECL_NAME (TYPE_NAME (type))));
4403 }
1ddde1cd 4404 }
28d81abb
RK
4405 }
4406
ae8cb346
RS
4407#if 0
4408 /* ??? This optimization is disabled because it causes valid programs to
4409 fail. ANSI C does not guarantee that an expression with enum type
4410 will have a value that is the same as one of the enumation literals. */
4411
28d81abb
RK
4412 /* If all values were found as case labels, make one of them the default
4413 label. Thus, this switch will never fall through. We arbitrarily pick
4414 the last one to make the default since this is likely the most
4415 efficient choice. */
4416
4417 if (all_values)
4418 {
4419 for (l = &case_stack->data.case_stmt.case_list;
4420 (*l)->right != 0;
4421 l = &(*l)->right)
4422 ;
4423
4424 case_stack->data.case_stmt.default_label = (*l)->code_label;
4425 *l = 0;
4426 }
ae8cb346 4427#endif /* 0 */
28d81abb 4428}
ca695ac9
JB
4429
4430
4431/* Check that all enumeration literals are covered by the case
4432 expressions of a switch. Also warn if there are any cases
4433 that are not elements of the enumerated type. */
4434void
4435bc_check_for_full_enumeration_handling (type)
4436 tree type;
4437{
4438 struct nesting *thiscase = case_stack;
4439 struct case_node *c;
4440 tree e;
4441
4442 /* Check for enums not handled. */
4443 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4444 {
4445 for (c = thiscase->data.case_stmt.case_list->left;
4446 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4447 c = c->left)
4448 ;
4449 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4450 warning ("enumerated value `%s' not handled in switch",
4451 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4452 }
4453
4454 /* Check for cases not in the enumeration. */
4455 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4456 {
4457 for (e = TYPE_VALUES (type);
4458 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4459 e = TREE_CHAIN (e))
4460 ;
4461 if (! e)
4462 warning ("case value `%d' not in enumerated type `%s'",
4463 TREE_INT_CST_LOW (c->low),
4464 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4465 ? TYPE_NAME (type)
4466 : DECL_NAME (TYPE_NAME (type))));
4467 }
4468}
28d81abb
RK
4469\f
4470/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4471 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4472 Generate the code to test it and jump to the right place. */
4473
4474void
4475expand_end_case (orig_index)
4476 tree orig_index;
4477{
3474db0e 4478 tree minval, maxval, range, orig_minval;
28d81abb
RK
4479 rtx default_label = 0;
4480 register struct case_node *n;
4481 int count;
4482 rtx index;
ca695ac9 4483 rtx table_label;
28d81abb
RK
4484 int ncases;
4485 rtx *labelvec;
4486 register int i;
4487 rtx before_case;
4488 register struct nesting *thiscase = case_stack;
ca695ac9
JB
4489 tree index_expr;
4490 int unsignedp;
4491
4492 if (output_bytecode)
4493 {
4494 bc_expand_end_case (orig_index);
4495 return;
4496 }
4497
4498 table_label = gen_label_rtx ();
4499 index_expr = thiscase->data.case_stmt.index_expr;
4500 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
28d81abb
RK
4501
4502 do_pending_stack_adjust ();
4503
4504 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4505 if (TREE_TYPE (index_expr) != error_mark_node)
4506 {
4507 /* If switch expression was an enumerated type, check that all
4508 enumeration literals are covered by the cases.
4509 No sense trying this if there's a default case, however. */
4510
4511 if (!thiscase->data.case_stmt.default_label
4512 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4513 && TREE_CODE (index_expr) != INTEGER_CST)
4514 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4515
4516 /* If this is the first label, warn if any insns have been emitted. */
4517 if (thiscase->data.case_stmt.seenlabel == 0)
4518 {
4519 rtx insn;
4520 for (insn = get_last_insn ();
4521 insn != case_stack->data.case_stmt.start;
4522 insn = PREV_INSN (insn))
4523 if (GET_CODE (insn) != NOTE
4524 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4525 {
4526 warning ("unreachable code at beginning of %s",
4527 case_stack->data.case_stmt.printname);
4528 break;
4529 }
4530 }
4531
4532 /* If we don't have a default-label, create one here,
4533 after the body of the switch. */
4534 if (thiscase->data.case_stmt.default_label == 0)
4535 {
4536 thiscase->data.case_stmt.default_label
4537 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4538 expand_label (thiscase->data.case_stmt.default_label);
4539 }
4540 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4541
4542 before_case = get_last_insn ();
4543
4544 /* Simplify the case-list before we count it. */
4545 group_case_nodes (thiscase->data.case_stmt.case_list);
4546
4547 /* Get upper and lower bounds of case values.
4548 Also convert all the case values to the index expr's data type. */
4549
4550 count = 0;
4551 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4552 {
4553 /* Check low and high label values are integers. */
4554 if (TREE_CODE (n->low) != INTEGER_CST)
4555 abort ();
4556 if (TREE_CODE (n->high) != INTEGER_CST)
4557 abort ();
4558
4559 n->low = convert (TREE_TYPE (index_expr), n->low);
4560 n->high = convert (TREE_TYPE (index_expr), n->high);
4561
4562 /* Count the elements and track the largest and smallest
4563 of them (treating them as signed even if they are not). */
4564 if (count++ == 0)
4565 {
4566 minval = n->low;
4567 maxval = n->high;
4568 }
4569 else
4570 {
4571 if (INT_CST_LT (n->low, minval))
4572 minval = n->low;
4573 if (INT_CST_LT (maxval, n->high))
4574 maxval = n->high;
4575 }
4576 /* A range counts double, since it requires two compares. */
4577 if (! tree_int_cst_equal (n->low, n->high))
4578 count++;
4579 }
4580
3474db0e
RS
4581 orig_minval = minval;
4582
28d81abb
RK
4583 /* Compute span of values. */
4584 if (count != 0)
4585 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4586 maxval, minval));
4587
4588 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4589 {
4590 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4591 emit_queue ();
4592 emit_jump (default_label);
4593 }
3474db0e 4594
28d81abb
RK
4595 /* If range of values is much bigger than number of values,
4596 make a sequence of conditional branches instead of a dispatch.
4597 If the switch-index is a constant, do it this way
4598 because we can optimize it. */
4f73c5dd
TW
4599
4600#ifndef CASE_VALUES_THRESHOLD
28d81abb 4601#ifdef HAVE_casesi
4f73c5dd 4602#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 4603#else
4f73c5dd
TW
4604 /* If machine does not have a case insn that compares the
4605 bounds, this means extra overhead for dispatch tables
4606 which raises the threshold for using them. */
4607#define CASE_VALUES_THRESHOLD 5
4608#endif /* HAVE_casesi */
4609#endif /* CASE_VALUES_THRESHOLD */
4610
4611 else if (TREE_INT_CST_HIGH (range) != 0
4612 || count < CASE_VALUES_THRESHOLD
37366632
RK
4613 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4614 > 10 * count)
28d81abb 4615 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 4616 /* These will reduce to a constant. */
28d81abb 4617 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 4618 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 4619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
4620 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4621 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4622 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 4623 {
37366632 4624 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
4625
4626 /* If the index is a short or char that we do not have
4627 an insn to handle comparisons directly, convert it to
4628 a full integer now, rather than letting each comparison
4629 generate the conversion. */
4630
4631 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4632 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4633 == CODE_FOR_nothing))
4634 {
4635 enum machine_mode wider_mode;
4636 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4637 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4638 if (cmp_optab->handlers[(int) wider_mode].insn_code
4639 != CODE_FOR_nothing)
4640 {
4641 index = convert_to_mode (wider_mode, index, unsignedp);
4642 break;
4643 }
4644 }
4645
4646 emit_queue ();
4647 do_pending_stack_adjust ();
4648
4649 index = protect_from_queue (index, 0);
4650 if (GET_CODE (index) == MEM)
4651 index = copy_to_reg (index);
4652 if (GET_CODE (index) == CONST_INT
4653 || TREE_CODE (index_expr) == INTEGER_CST)
4654 {
4655 /* Make a tree node with the proper constant value
4656 if we don't already have one. */
4657 if (TREE_CODE (index_expr) != INTEGER_CST)
4658 {
4659 index_expr
4660 = build_int_2 (INTVAL (index),
4661 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4662 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4663 }
4664
4665 /* For constant index expressions we need only
4666 issue a unconditional branch to the appropriate
4667 target code. The job of removing any unreachable
4668 code is left to the optimisation phase if the
4669 "-O" option is specified. */
4670 for (n = thiscase->data.case_stmt.case_list;
4671 n;
4672 n = n->right)
4673 {
4674 if (! tree_int_cst_lt (index_expr, n->low)
4675 && ! tree_int_cst_lt (n->high, index_expr))
4676 break;
4677 }
4678 if (n)
4679 emit_jump (label_rtx (n->code_label));
4680 else
4681 emit_jump (default_label);
4682 }
4683 else
4684 {
4685 /* If the index expression is not constant we generate
4686 a binary decision tree to select the appropriate
4687 target code. This is done as follows:
4688
4689 The list of cases is rearranged into a binary tree,
4690 nearly optimal assuming equal probability for each case.
4691
4692 The tree is transformed into RTL, eliminating
4693 redundant test conditions at the same time.
4694
4695 If program flow could reach the end of the
4696 decision tree an unconditional jump to the
4697 default code is emitted. */
4698
4699 use_cost_table
4700 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 4701 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
4702 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4703 NULL_PTR);
28d81abb
RK
4704 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4705 default_label, TREE_TYPE (index_expr));
4706 emit_jump_if_reachable (default_label);
4707 }
4708 }
4709 else
4710 {
4711 int win = 0;
4712#ifdef HAVE_casesi
4713 if (HAVE_casesi)
4714 {
c4fcf531 4715 enum machine_mode index_mode = SImode;
5130a5cc 4716 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 4717
28d81abb 4718 /* Convert the index to SImode. */
c4fcf531
RS
4719 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4720 > GET_MODE_BITSIZE (index_mode))
28d81abb 4721 {
af2682ef 4722 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 4723 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
4724
4725 /* We must handle the endpoints in the original mode. */
28d81abb
RK
4726 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4727 index_expr, minval);
4728 minval = integer_zero_node;
37366632 4729 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3474db0e 4730 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
af2682ef
RS
4731 emit_jump_insn (gen_bltu (default_label));
4732 /* Now we can safely truncate. */
4733 index = convert_to_mode (index_mode, index, 0);
4734 }
4735 else
4736 {
4737 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4738 index_expr = convert (type_for_size (index_bits, 0),
4739 index_expr);
37366632 4740 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4741 }
28d81abb
RK
4742 emit_queue ();
4743 index = protect_from_queue (index, 0);
4744 do_pending_stack_adjust ();
4745
37366632
RK
4746 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4747 VOIDmode, 0),
4748 expand_expr (range, NULL_RTX,
4749 VOIDmode, 0),
28d81abb
RK
4750 table_label, default_label));
4751 win = 1;
4752 }
4753#endif
4754#ifdef HAVE_tablejump
4755 if (! win && HAVE_tablejump)
4756 {
4757 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
4758 fold (build (MINUS_EXPR,
4759 TREE_TYPE (index_expr),
4760 index_expr, minval)));
37366632 4761 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4762 emit_queue ();
af2682ef 4763 index = protect_from_queue (index, 0);
28d81abb
RK
4764 do_pending_stack_adjust ();
4765
af2682ef 4766 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 4767 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4768 table_label, default_label);
4769 win = 1;
4770 }
4771#endif
4772 if (! win)
4773 abort ();
4774
4775 /* Get table of labels to jump to, in order of case index. */
4776
4777 ncases = TREE_INT_CST_LOW (range) + 1;
4778 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4779 bzero (labelvec, ncases * sizeof (rtx));
4780
4781 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4782 {
37366632 4783 register HOST_WIDE_INT i
3474db0e 4784 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
4785
4786 while (1)
4787 {
4788 labelvec[i]
4789 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3474db0e 4790 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
4791 == TREE_INT_CST_LOW (n->high))
4792 break;
4793 i++;
4794 }
4795 }
4796
4797 /* Fill in the gaps with the default. */
4798 for (i = 0; i < ncases; i++)
4799 if (labelvec[i] == 0)
4800 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4801
4802 /* Output the table */
4803 emit_label (table_label);
4804
4805 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 4806 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
4807 if (
4808#ifdef CASE_VECTOR_PC_RELATIVE
4809 1 ||
4810#endif
4811 flag_pic)
4812 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4813 gen_rtx (LABEL_REF, Pmode, table_label),
4814 gen_rtvec_v (ncases, labelvec)));
4815 else
4816 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4817 gen_rtvec_v (ncases, labelvec)));
4818
4819 /* If the case insn drops through the table,
4820 after the table we must jump to the default-label.
4821 Otherwise record no drop-through after the table. */
4822#ifdef CASE_DROPS_THROUGH
4823 emit_jump (default_label);
4824#else
4825 emit_barrier ();
4826#endif
4827 }
4828
915f619f
JW
4829 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4830 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
4831 thiscase->data.case_stmt.start);
4832 }
4833 if (thiscase->exit_label)
4834 emit_label (thiscase->exit_label);
4835
4836 POPSTACK (case_stack);
4837
4838 free_temp_slots ();
4839}
4840
ca695ac9
JB
4841
4842/* Terminate a case statement. EXPR is the original index
4843 expression. */
4844void
4845bc_expand_end_case (expr)
4846 tree expr;
4847{
4848 struct nesting *thiscase = case_stack;
4849 enum bytecode_opcode opcode;
4850 struct bc_label *jump_label;
4851 struct case_node *c;
4852
4853 bc_emit_bytecode (jump);
4854 bc_emit_bytecode_labelref (thiscase->exit_label->bc_label);
4855
4856#ifdef DEBUG_PRINT_CODE
4857 fputc ('\n', stderr);
4858#endif
4859
4860 /* Now that the size of the jump table is known, emit the actual
4861 indexed jump instruction. */
4862 bc_emit_bytecode_labeldef (thiscase->data.case_stmt.skip_label->bc_label);
4863
4864 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4865 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4866 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4867
4868 bc_emit_bytecode (opcode);
4869
4870 /* Now emit the case instructions literal arguments, in order.
4871 In addition to the value on the stack, it uses:
4872 1. The address of the jump table.
4873 2. The size of the jump table.
4874 3. The default label. */
4875
4876 jump_label = bc_get_bytecode_label ();
4877 bc_emit_bytecode_labelref (jump_label);
4878 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4879 sizeof thiscase->data.case_stmt.num_ranges);
4880
4881 if (thiscase->data.case_stmt.default_label)
4882 bc_emit_bytecode_labelref (DECL_RTL (thiscase->
4883 data.case_stmt.default_label)->bc_label);
4884 else
4885 bc_emit_bytecode_labelref (thiscase->exit_label->bc_label);
4886
4887 /* Output the jump table. */
4888
4889 bc_align_bytecode (3 /* PTR_ALIGN */);
4890 bc_emit_bytecode_labeldef (jump_label);
4891
4892 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4893 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4894 {
4895 opcode = TREE_INT_CST_LOW (c->low);
4896 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4897
4898 opcode = TREE_INT_CST_LOW (c->high);
4899 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4900
4901 bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label);
4902 }
4903 else
4904 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4905 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4906 {
4907 bc_emit_bytecode_DI_const (c->low);
4908 bc_emit_bytecode_DI_const (c->high);
4909
4910 bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label);
4911 }
4912 else
4913 /* Bad mode */
4914 abort ();
4915
4916
4917 bc_emit_bytecode_labeldef (thiscase->exit_label->bc_label);
4918
4919 /* Possibly issue enumeration warnings. */
4920
4921 if (!thiscase->data.case_stmt.default_label
4922 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4923 && TREE_CODE (expr) != INTEGER_CST
4924 && warn_switch)
4925 check_for_full_enumeration_handling (TREE_TYPE (expr));
4926
4927
4928#ifdef DEBUG_PRINT_CODE
4929 fputc ('\n', stderr);
4930#endif
4931
4932 POPSTACK (case_stack);
4933}
4934
4935
4936/* Return unique bytecode ID. */
4937int
4938bc_new_uid ()
4939{
4940 static int bc_uid = 0;
4941
4942 return (++bc_uid);
4943}
4944
28d81abb
RK
4945/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4946
4947static void
4948do_jump_if_equal (op1, op2, label, unsignedp)
4949 rtx op1, op2, label;
4950 int unsignedp;
4951{
4952 if (GET_CODE (op1) == CONST_INT
4953 && GET_CODE (op2) == CONST_INT)
4954 {
4955 if (INTVAL (op1) == INTVAL (op2))
4956 emit_jump (label);
4957 }
4958 else
4959 {
4960 enum machine_mode mode = GET_MODE (op1);
4961 if (mode == VOIDmode)
4962 mode = GET_MODE (op2);
37366632 4963 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4964 emit_jump_insn (gen_beq (label));
4965 }
4966}
4967\f
4968/* Not all case values are encountered equally. This function
4969 uses a heuristic to weight case labels, in cases where that
4970 looks like a reasonable thing to do.
4971
4972 Right now, all we try to guess is text, and we establish the
4973 following weights:
4974
4975 chars above space: 16
4976 digits: 16
4977 default: 12
4978 space, punct: 8
4979 tab: 4
4980 newline: 2
4981 other "\" chars: 1
4982 remaining chars: 0
4983
4984 If we find any cases in the switch that are not either -1 or in the range
4985 of valid ASCII characters, or are control characters other than those
4986 commonly used with "\", don't treat this switch scanning text.
4987
4988 Return 1 if these nodes are suitable for cost estimation, otherwise
4989 return 0. */
4990
4991static int
4992estimate_case_costs (node)
4993 case_node_ptr node;
4994{
4995 tree min_ascii = build_int_2 (-1, -1);
4996 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4997 case_node_ptr n;
4998 int i;
4999
5000 /* If we haven't already made the cost table, make it now. Note that the
5001 lower bound of the table is -1, not zero. */
5002
5003 if (cost_table == NULL)
5004 {
5005 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5006 bzero (cost_table - 1, 129 * sizeof (short));
5007
5008 for (i = 0; i < 128; i++)
5009 {
5010 if (isalnum (i))
5011 cost_table[i] = 16;
5012 else if (ispunct (i))
5013 cost_table[i] = 8;
5014 else if (iscntrl (i))
5015 cost_table[i] = -1;
5016 }
5017
5018 cost_table[' '] = 8;
5019 cost_table['\t'] = 4;
5020 cost_table['\0'] = 4;
5021 cost_table['\n'] = 2;
5022 cost_table['\f'] = 1;
5023 cost_table['\v'] = 1;
5024 cost_table['\b'] = 1;
5025 }
5026
5027 /* See if all the case expressions look like text. It is text if the
5028 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5029 as signed arithmetic since we don't want to ever access cost_table with a
5030 value less than -1. Also check that none of the constants in a range
5031 are strange control characters. */
5032
5033 for (n = node; n; n = n->right)
5034 {
5035 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5036 return 0;
5037
5038 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5039 if (cost_table[i] < 0)
5040 return 0;
5041 }
5042
5043 /* All interesting values are within the range of interesting
5044 ASCII characters. */
5045 return 1;
5046}
5047
5048/* Scan an ordered list of case nodes
5049 combining those with consecutive values or ranges.
5050
5051 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5052
5053static void
5054group_case_nodes (head)
5055 case_node_ptr head;
5056{
5057 case_node_ptr node = head;
5058
5059 while (node)
5060 {
5061 rtx lb = next_real_insn (label_rtx (node->code_label));
5062 case_node_ptr np = node;
5063
5064 /* Try to group the successors of NODE with NODE. */
5065 while (((np = np->right) != 0)
5066 /* Do they jump to the same place? */
5067 && next_real_insn (label_rtx (np->code_label)) == lb
5068 /* Are their ranges consecutive? */
5069 && tree_int_cst_equal (np->low,
5070 fold (build (PLUS_EXPR,
5071 TREE_TYPE (node->high),
5072 node->high,
5073 integer_one_node)))
5074 /* An overflow is not consecutive. */
5075 && tree_int_cst_lt (node->high,
5076 fold (build (PLUS_EXPR,
5077 TREE_TYPE (node->high),
5078 node->high,
5079 integer_one_node))))
5080 {
5081 node->high = np->high;
5082 }
5083 /* NP is the first node after NODE which can't be grouped with it.
5084 Delete the nodes in between, and move on to that node. */
5085 node->right = np;
5086 node = np;
5087 }
5088}
5089
5090/* Take an ordered list of case nodes
5091 and transform them into a near optimal binary tree,
6dc42e49 5092 on the assumption that any target code selection value is as
28d81abb
RK
5093 likely as any other.
5094
5095 The transformation is performed by splitting the ordered
5096 list into two equal sections plus a pivot. The parts are
5097 then attached to the pivot as left and right branches. Each
5098 branch is is then transformed recursively. */
5099
5100static void
5101balance_case_nodes (head, parent)
5102 case_node_ptr *head;
5103 case_node_ptr parent;
5104{
5105 register case_node_ptr np;
5106
5107 np = *head;
5108 if (np)
5109 {
5110 int cost = 0;
5111 int i = 0;
5112 int ranges = 0;
5113 register case_node_ptr *npp;
5114 case_node_ptr left;
5115
5116 /* Count the number of entries on branch. Also count the ranges. */
5117
5118 while (np)
5119 {
5120 if (!tree_int_cst_equal (np->low, np->high))
5121 {
5122 ranges++;
5123 if (use_cost_table)
5124 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5125 }
5126
5127 if (use_cost_table)
5128 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5129
5130 i++;
5131 np = np->right;
5132 }
5133
5134 if (i > 2)
5135 {
5136 /* Split this list if it is long enough for that to help. */
5137 npp = head;
5138 left = *npp;
5139 if (use_cost_table)
5140 {
5141 /* Find the place in the list that bisects the list's total cost,
5142 Here I gets half the total cost. */
5143 int n_moved = 0;
5144 i = (cost + 1) / 2;
5145 while (1)
5146 {
5147 /* Skip nodes while their cost does not reach that amount. */
5148 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5149 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5150 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5151 if (i <= 0)
5152 break;
5153 npp = &(*npp)->right;
5154 n_moved += 1;
5155 }
5156 if (n_moved == 0)
5157 {
5158 /* Leave this branch lopsided, but optimize left-hand
5159 side and fill in `parent' fields for right-hand side. */
5160 np = *head;
5161 np->parent = parent;
5162 balance_case_nodes (&np->left, np);
5163 for (; np->right; np = np->right)
5164 np->right->parent = np;
5165 return;
5166 }
5167 }
5168 /* If there are just three nodes, split at the middle one. */
5169 else if (i == 3)
5170 npp = &(*npp)->right;
5171 else
5172 {
5173 /* Find the place in the list that bisects the list's total cost,
5174 where ranges count as 2.
5175 Here I gets half the total cost. */
5176 i = (i + ranges + 1) / 2;
5177 while (1)
5178 {
5179 /* Skip nodes while their cost does not reach that amount. */
5180 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5181 i--;
5182 i--;
5183 if (i <= 0)
5184 break;
5185 npp = &(*npp)->right;
5186 }
5187 }
5188 *head = np = *npp;
5189 *npp = 0;
5190 np->parent = parent;
5191 np->left = left;
5192
5193 /* Optimize each of the two split parts. */
5194 balance_case_nodes (&np->left, np);
5195 balance_case_nodes (&np->right, np);
5196 }
5197 else
5198 {
5199 /* Else leave this branch as one level,
5200 but fill in `parent' fields. */
5201 np = *head;
5202 np->parent = parent;
5203 for (; np->right; np = np->right)
5204 np->right->parent = np;
5205 }
5206 }
5207}
5208\f
5209/* Search the parent sections of the case node tree
5210 to see if a test for the lower bound of NODE would be redundant.
5211 INDEX_TYPE is the type of the index expression.
5212
5213 The instructions to generate the case decision tree are
5214 output in the same order as nodes are processed so it is
5215 known that if a parent node checks the range of the current
5216 node minus one that the current node is bounded at its lower
5217 span. Thus the test would be redundant. */
5218
5219static int
5220node_has_low_bound (node, index_type)
5221 case_node_ptr node;
5222 tree index_type;
5223{
5224 tree low_minus_one;
5225 case_node_ptr pnode;
5226
5227 /* If the lower bound of this node is the lowest value in the index type,
5228 we need not test it. */
5229
5230 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5231 return 1;
5232
5233 /* If this node has a left branch, the value at the left must be less
5234 than that at this node, so it cannot be bounded at the bottom and
5235 we need not bother testing any further. */
5236
5237 if (node->left)
5238 return 0;
5239
5240 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5241 node->low, integer_one_node));
5242
5243 /* If the subtraction above overflowed, we can't verify anything.
5244 Otherwise, look for a parent that tests our value - 1. */
5245
5246 if (! tree_int_cst_lt (low_minus_one, node->low))
5247 return 0;
5248
5249 for (pnode = node->parent; pnode; pnode = pnode->parent)
5250 if (tree_int_cst_equal (low_minus_one, pnode->high))
5251 return 1;
5252
5253 return 0;
5254}
5255
5256/* Search the parent sections of the case node tree
5257 to see if a test for the upper bound of NODE would be redundant.
5258 INDEX_TYPE is the type of the index expression.
5259
5260 The instructions to generate the case decision tree are
5261 output in the same order as nodes are processed so it is
5262 known that if a parent node checks the range of the current
5263 node plus one that the current node is bounded at its upper
5264 span. Thus the test would be redundant. */
5265
5266static int
5267node_has_high_bound (node, index_type)
5268 case_node_ptr node;
5269 tree index_type;
5270{
5271 tree high_plus_one;
5272 case_node_ptr pnode;
5273
5274 /* If the upper bound of this node is the highest value in the type
5275 of the index expression, we need not test against it. */
5276
5277 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5278 return 1;
5279
5280 /* If this node has a right branch, the value at the right must be greater
5281 than that at this node, so it cannot be bounded at the top and
5282 we need not bother testing any further. */
5283
5284 if (node->right)
5285 return 0;
5286
5287 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5288 node->high, integer_one_node));
5289
5290 /* If the addition above overflowed, we can't verify anything.
5291 Otherwise, look for a parent that tests our value + 1. */
5292
5293 if (! tree_int_cst_lt (node->high, high_plus_one))
5294 return 0;
5295
5296 for (pnode = node->parent; pnode; pnode = pnode->parent)
5297 if (tree_int_cst_equal (high_plus_one, pnode->low))
5298 return 1;
5299
5300 return 0;
5301}
5302
5303/* Search the parent sections of the
5304 case node tree to see if both tests for the upper and lower
5305 bounds of NODE would be redundant. */
5306
5307static int
5308node_is_bounded (node, index_type)
5309 case_node_ptr node;
5310 tree index_type;
5311{
5312 return (node_has_low_bound (node, index_type)
5313 && node_has_high_bound (node, index_type));
5314}
5315
5316/* Emit an unconditional jump to LABEL unless it would be dead code. */
5317
5318static void
5319emit_jump_if_reachable (label)
5320 rtx label;
5321{
5322 if (GET_CODE (get_last_insn ()) != BARRIER)
5323 emit_jump (label);
5324}
5325\f
5326/* Emit step-by-step code to select a case for the value of INDEX.
5327 The thus generated decision tree follows the form of the
5328 case-node binary tree NODE, whose nodes represent test conditions.
5329 INDEX_TYPE is the type of the index of the switch.
5330
5331 Care is taken to prune redundant tests from the decision tree
5332 by detecting any boundary conditions already checked by
5333 emitted rtx. (See node_has_high_bound, node_has_low_bound
5334 and node_is_bounded, above.)
5335
5336 Where the test conditions can be shown to be redundant we emit
5337 an unconditional jump to the target code. As a further
5338 optimization, the subordinates of a tree node are examined to
5339 check for bounded nodes. In this case conditional and/or
5340 unconditional jumps as a result of the boundary check for the
5341 current node are arranged to target the subordinates associated
5342 code for out of bound conditions on the current node node.
5343
f72aed24 5344 We can assume that when control reaches the code generated here,
28d81abb
RK
5345 the index value has already been compared with the parents
5346 of this node, and determined to be on the same side of each parent
5347 as this node is. Thus, if this node tests for the value 51,
5348 and a parent tested for 52, we don't need to consider
5349 the possibility of a value greater than 51. If another parent
5350 tests for the value 50, then this node need not test anything. */
5351
5352static void
5353emit_case_nodes (index, node, default_label, index_type)
5354 rtx index;
5355 case_node_ptr node;
5356 rtx default_label;
5357 tree index_type;
5358{
5359 /* If INDEX has an unsigned type, we must make unsigned branches. */
5360 int unsignedp = TREE_UNSIGNED (index_type);
5361 typedef rtx rtx_function ();
5362 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5363 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5364 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5365 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5366 enum machine_mode mode = GET_MODE (index);
5367
5368 /* See if our parents have already tested everything for us.
5369 If they have, emit an unconditional jump for this node. */
5370 if (node_is_bounded (node, index_type))
5371 emit_jump (label_rtx (node->code_label));
5372
5373 else if (tree_int_cst_equal (node->low, node->high))
5374 {
5375 /* Node is single valued. First see if the index expression matches
5376 this node and then check our children, if any. */
5377
37366632 5378 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5379 label_rtx (node->code_label), unsignedp);
5380
5381 if (node->right != 0 && node->left != 0)
5382 {
5383 /* This node has children on both sides.
5384 Dispatch to one side or the other
5385 by comparing the index value with this node's value.
5386 If one subtree is bounded, check that one first,
5387 so we can avoid real branches in the tree. */
5388
5389 if (node_is_bounded (node->right, index_type))
5390 {
37366632
RK
5391 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5392 VOIDmode, 0),
5393 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5394
5395 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5396 emit_case_nodes (index, node->left, default_label, index_type);
5397 }
5398
5399 else if (node_is_bounded (node->left, index_type))
5400 {
37366632 5401 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5402 VOIDmode, 0),
37366632 5403 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5404 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5405 emit_case_nodes (index, node->right, default_label, index_type);
5406 }
5407
5408 else
5409 {
5410 /* Neither node is bounded. First distinguish the two sides;
5411 then emit the code for one side at a time. */
5412
5413 tree test_label
5414 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5415
5416 /* See if the value is on the right. */
37366632 5417 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5418 VOIDmode, 0),
37366632 5419 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5420 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5421
5422 /* Value must be on the left.
5423 Handle the left-hand subtree. */
5424 emit_case_nodes (index, node->left, default_label, index_type);
5425 /* If left-hand subtree does nothing,
5426 go to default. */
5427 emit_jump_if_reachable (default_label);
5428
5429 /* Code branches here for the right-hand subtree. */
5430 expand_label (test_label);
5431 emit_case_nodes (index, node->right, default_label, index_type);
5432 }
5433 }
5434
5435 else if (node->right != 0 && node->left == 0)
5436 {
5437 /* Here we have a right child but no left so we issue conditional
5438 branch to default and process the right child.
5439
5440 Omit the conditional branch to default if we it avoid only one
5441 right child; it costs too much space to save so little time. */
5442
de14fd73 5443 if (node->right->right || node->right->left
28d81abb
RK
5444 || !tree_int_cst_equal (node->right->low, node->right->high))
5445 {
5446 if (!node_has_low_bound (node, index_type))
5447 {
37366632
RK
5448 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5449 VOIDmode, 0),
5450 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5451 emit_jump_insn ((*gen_blt_pat) (default_label));
5452 }
5453
5454 emit_case_nodes (index, node->right, default_label, index_type);
5455 }
5456 else
5457 /* We cannot process node->right normally
5458 since we haven't ruled out the numbers less than
5459 this node's value. So handle node->right explicitly. */
5460 do_jump_if_equal (index,
37366632
RK
5461 expand_expr (node->right->low, NULL_RTX,
5462 VOIDmode, 0),
28d81abb
RK
5463 label_rtx (node->right->code_label), unsignedp);
5464 }
5465
5466 else if (node->right == 0 && node->left != 0)
5467 {
5468 /* Just one subtree, on the left. */
5469
de14fd73
RK
5470#if 0 /* The following code and comment were formerly part
5471 of the condition here, but they didn't work
5472 and I don't understand what the idea was. -- rms. */
5473 /* If our "most probable entry" is less probable
28d81abb
RK
5474 than the default label, emit a jump to
5475 the default label using condition codes
5476 already lying around. With no right branch,
5477 a branch-greater-than will get us to the default
5478 label correctly. */
de14fd73
RK
5479 if (use_cost_table
5480 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5481 ;
5482#endif /* 0 */
5483 if (node->left->left || node->left->right
28d81abb
RK
5484 || !tree_int_cst_equal (node->left->low, node->left->high))
5485 {
5486 if (!node_has_high_bound (node, index_type))
5487 {
37366632
RK
5488 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5489 VOIDmode, 0),
5490 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5491 emit_jump_insn ((*gen_bgt_pat) (default_label));
5492 }
5493
5494 emit_case_nodes (index, node->left, default_label, index_type);
5495 }
5496 else
5497 /* We cannot process node->left normally
5498 since we haven't ruled out the numbers less than
5499 this node's value. So handle node->left explicitly. */
5500 do_jump_if_equal (index,
37366632
RK
5501 expand_expr (node->left->low, NULL_RTX,
5502 VOIDmode, 0),
28d81abb
RK
5503 label_rtx (node->left->code_label), unsignedp);
5504 }
5505 }
5506 else
5507 {
5508 /* Node is a range. These cases are very similar to those for a single
5509 value, except that we do not start by testing whether this node
5510 is the one to branch to. */
5511
5512 if (node->right != 0 && node->left != 0)
5513 {
5514 /* Node has subtrees on both sides.
5515 If the right-hand subtree is bounded,
5516 test for it first, since we can go straight there.
5517 Otherwise, we need to make a branch in the control structure,
5518 then handle the two subtrees. */
5519 tree test_label = 0;
5520
37366632
RK
5521 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5522 VOIDmode, 0),
5523 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5524
5525 if (node_is_bounded (node->right, index_type))
5526 /* Right hand node is fully bounded so we can eliminate any
5527 testing and branch directly to the target code. */
5528 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5529 else
5530 {
5531 /* Right hand node requires testing.
5532 Branch to a label where we will handle it later. */
5533
5534 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5535 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5536 }
5537
5538 /* Value belongs to this node or to the left-hand subtree. */
5539
37366632
RK
5540 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5541 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5542 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5543
5544 /* Handle the left-hand subtree. */
5545 emit_case_nodes (index, node->left, default_label, index_type);
5546
5547 /* If right node had to be handled later, do that now. */
5548
5549 if (test_label)
5550 {
5551 /* If the left-hand subtree fell through,
5552 don't let it fall into the right-hand subtree. */
5553 emit_jump_if_reachable (default_label);
5554
5555 expand_label (test_label);
5556 emit_case_nodes (index, node->right, default_label, index_type);
5557 }
5558 }
5559
5560 else if (node->right != 0 && node->left == 0)
5561 {
5562 /* Deal with values to the left of this node,
5563 if they are possible. */
5564 if (!node_has_low_bound (node, index_type))
5565 {
37366632
RK
5566 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5567 VOIDmode, 0),
5568 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5569 emit_jump_insn ((*gen_blt_pat) (default_label));
5570 }
5571
5572 /* Value belongs to this node or to the right-hand subtree. */
5573
37366632
RK
5574 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5575 VOIDmode, 0),
5576 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5577 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5578
5579 emit_case_nodes (index, node->right, default_label, index_type);
5580 }
5581
5582 else if (node->right == 0 && node->left != 0)
5583 {
5584 /* Deal with values to the right of this node,
5585 if they are possible. */
5586 if (!node_has_high_bound (node, index_type))
5587 {
37366632
RK
5588 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5589 VOIDmode, 0),
5590 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5591 emit_jump_insn ((*gen_bgt_pat) (default_label));
5592 }
5593
5594 /* Value belongs to this node or to the left-hand subtree. */
5595
37366632
RK
5596 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5597 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5598 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5599
5600 emit_case_nodes (index, node->left, default_label, index_type);
5601 }
5602
5603 else
5604 {
5605 /* Node has no children so we check low and high bounds to remove
5606 redundant tests. Only one of the bounds can exist,
5607 since otherwise this node is bounded--a case tested already. */
5608
5609 if (!node_has_high_bound (node, index_type))
5610 {
37366632
RK
5611 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5612 VOIDmode, 0),
5613 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5614 emit_jump_insn ((*gen_bgt_pat) (default_label));
5615 }
5616
5617 if (!node_has_low_bound (node, index_type))
5618 {
37366632
RK
5619 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5620 VOIDmode, 0),
5621 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5622 emit_jump_insn ((*gen_blt_pat) (default_label));
5623 }
5624
5625 emit_jump (label_rtx (node->code_label));
5626 }
5627 }
5628}
5629\f
5630/* These routines are used by the loop unrolling code. They copy BLOCK trees
5631 so that the debugging info will be correct for the unrolled loop. */
5632
94dc8b56 5633/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 5634
94dc8b56 5635static tree *block_vector;
28d81abb
RK
5636
5637void
94dc8b56 5638find_loop_tree_blocks ()
28d81abb 5639{
94dc8b56 5640 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5641
94dc8b56
JW
5642 /* There first block is for the function body, and does not have
5643 corresponding block notes. Don't include it in the block vector. */
5644 block = BLOCK_SUBBLOCKS (block);
28d81abb 5645
94dc8b56 5646 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
5647}
5648
28d81abb 5649void
94dc8b56 5650unroll_block_trees ()
28d81abb 5651{
94dc8b56 5652 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5653
94dc8b56 5654 reorder_blocks (block_vector, block, get_insns ());
28d81abb 5655}
94dc8b56 5656
This page took 0.809458 seconds and 5 git commands to generate.