]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
57extern int xmalloc ();
58extern void free ();
59
60/* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62char *emit_filename;
63int emit_lineno;
64
65/* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68int expr_stmts_for_value;
69
70/* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73static tree last_expr_type;
74static rtx last_expr_value;
75
76/* Number of binding contours started so far in this function. */
77
78int block_start_count;
79
80/* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83extern int current_function_returns_pcc_struct;
84
85/* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89extern rtx cleanup_label;
90
91/* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95extern rtx return_label;
96
97/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99extern rtx save_expr_regs;
100
101/* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104extern int frame_offset;
105
106/* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108extern rtx tail_recursion_label;
109
110/* Place after which to insert the tail_recursion_label if we need one. */
111extern rtx tail_recursion_reentry;
112
113/* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118extern rtx arg_pointer_save_area;
119
120/* Chain of all RTL_EXPRs that have insns in them. */
121extern tree rtl_expr_chain;
122
123#if 0 /* Turned off because 0 seems to work just as well. */
124/* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127static tree empty_cleanup_list;
128#endif
129\f
130/* Functions and data structures for expanding case statements. */
131
132/* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
28d81abb
RK
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153struct case_node
154{
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161};
162
163typedef struct case_node case_node;
164typedef struct case_node *case_node_ptr;
165
166/* These are used by estimate_case_costs and balance_case_nodes. */
167
168/* This must be a signed type, and non-ANSI compilers lack signed char. */
169static short *cost_table;
170static int use_cost_table;
171
172static int estimate_case_costs ();
173static void balance_case_nodes ();
174static void emit_case_nodes ();
175static void group_case_nodes ();
176static void emit_jump_if_reachable ();
177
178static int warn_if_unused_value ();
179static void expand_goto_internal ();
180static int expand_fixup ();
181void fixup_gotos ();
182void free_temp_slots ();
183static void expand_cleanups ();
184static void fixup_cleanups ();
185static void expand_null_return_1 ();
186static int tail_recursion_args ();
187static void do_jump_if_equal ();
188\f
189/* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214struct nesting
215{
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327};
328
329/* Chain of all pending binding contours. */
330struct nesting *block_stack;
331
332/* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334struct nesting *stack_block_stack;
335
336/* Chain of all pending conditional statements. */
337struct nesting *cond_stack;
338
339/* Chain of all pending loops. */
340struct nesting *loop_stack;
341
342/* Chain of all pending case or switch statements. */
343struct nesting *case_stack;
344
345/* Chain of all pending exception contours. */
346struct nesting *except_stack;
347
348/* Separate chain including all of the above,
349 chained through the `all' field. */
350struct nesting *nesting_stack;
351
352/* Number of entries on nesting_stack now. */
353int nesting_depth;
354
355/* Allocate and return a new `struct nesting'. */
356
357#define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363#define POPSTACK(STACK) \
364do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371\f
372/* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380struct goto_fixup
381{
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407};
408
409static struct goto_fixup *goto_fixup_chain;
410
411/* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414struct label_chain
415{
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419};
420\f
421void
422init_stmt ()
423{
424 gcc_obstack_init (&stmt_obstack);
425#if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427#endif
428}
429
430void
431init_stmt_for_function ()
432{
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449}
450
451void
452save_stmt_status (p)
453 struct function *p;
454{
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469}
470
471void
472restore_stmt_status (p)
473 struct function *p;
474{
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489}
490\f
491/* Emit a no-op instruction. */
492
493void
494emit_nop ()
495{
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501}
502\f
503/* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506rtx
507label_rtx (label)
508 tree label;
509{
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517}
518
519/* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521void
522emit_jump (label)
523 rtx label;
524{
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528}
529
530/* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533void
534expand_computed_goto (exp)
535 tree exp;
536{
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
de14fd73 538 emit_queue ();
28d81abb
RK
539 emit_indirect_jump (x);
540 emit_barrier ();
541}
542\f
543/* Handle goto statements and the labels that they can go to. */
544
545/* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556void
557expand_label (label)
558 tree label;
559{
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
569 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574}
575
576/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579void
580declare_nonlocal_label (label)
581 tree label;
582{
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slot == 0)
586 {
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
589 emit_stack_save (SAVE_NONLOCAL,
590 &nonlocal_goto_stack_level,
591 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
592 }
593}
594
595/* Generate RTL code for a `goto' statement with target label LABEL.
596 LABEL should be a LABEL_DECL tree node that was or will later be
597 defined with `expand_label'. */
598
599void
600expand_goto (label)
601 tree label;
602{
603 /* Check for a nonlocal goto to a containing function. */
604 tree context = decl_function_context (label);
605 if (context != 0 && context != current_function_decl)
606 {
607 struct function *p = find_function_data (context);
608 rtx temp;
609 p->has_nonlocal_label = 1;
59257ff7
RK
610
611 /* Copy the rtl for the slots so that they won't be shared in
612 case the virtual stack vars register gets instantiated differently
613 in the parent than in the child. */
614
28d81abb
RK
615#if HAVE_nonlocal_goto
616 if (HAVE_nonlocal_goto)
617 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
618 copy_rtx (p->nonlocal_goto_handler_slot),
619 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
620 gen_rtx (LABEL_REF, Pmode,
621 label_rtx (label))));
622 else
623#endif
624 {
59257ff7
RK
625 rtx addr;
626
28d81abb
RK
627 /* Restore frame pointer for containing function.
628 This sets the actual hard register used for the frame pointer
629 to the location of the function's incoming static chain info.
630 The non-local goto handler will then adjust it to contain the
631 proper value and reload the argument pointer, if needed. */
632 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
633
634 /* We have now loaded the frame pointer hardware register with
635 the address of that corresponds to the start of the virtual
636 stack vars. So replace virtual_stack_vars_rtx in all
637 addresses we use with stack_pointer_rtx. */
638
28d81abb
RK
639 /* Get addr of containing function's current nonlocal goto handler,
640 which will do any cleanups and then jump to the label. */
59257ff7
RK
641 addr = copy_rtx (p->nonlocal_goto_handler_slot);
642 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
643 frame_pointer_rtx));
644
28d81abb 645 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
646 addr = p->nonlocal_goto_stack_level;
647 if (addr)
648 addr = replace_rtx (copy_rtx (p->nonlocal_goto_stack_level),
649 replace_rtx (addr, virtual_stack_vars_rtx,
650 frame_pointer_rtx));
651
652 emit_stack_restore (SAVE_NONLOCAL, addr, 0);
653
28d81abb
RK
654 /* Put in the static chain register the nonlocal label address. */
655 emit_move_insn (static_chain_rtx,
656 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
657 /* USE of frame_pointer_rtx added for consistency; not clear if
658 really needed. */
659 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
660 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
661 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
662 emit_indirect_jump (temp);
663 }
664 }
665 else
666 expand_goto_internal (label, label_rtx (label), 0);
667}
668
669/* Generate RTL code for a `goto' statement with target label BODY.
670 LABEL should be a LABEL_REF.
671 LAST_INSN, if non-0, is the rtx we should consider as the last
672 insn emitted (for the purposes of cleaning up a return). */
673
674static void
675expand_goto_internal (body, label, last_insn)
676 tree body;
677 rtx label;
678 rtx last_insn;
679{
680 struct nesting *block;
681 rtx stack_level = 0;
682
683 if (GET_CODE (label) != CODE_LABEL)
684 abort ();
685
686 /* If label has already been defined, we can tell now
687 whether and how we must alter the stack level. */
688
689 if (PREV_INSN (label) != 0)
690 {
691 /* Find the innermost pending block that contains the label.
692 (Check containment by comparing insn-uids.)
693 Then restore the outermost stack level within that block,
694 and do cleanups of all blocks contained in it. */
695 for (block = block_stack; block; block = block->next)
696 {
697 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
698 break;
699 if (block->data.block.stack_level != 0)
700 stack_level = block->data.block.stack_level;
701 /* Execute the cleanups for blocks we are exiting. */
702 if (block->data.block.cleanups != 0)
703 {
704 expand_cleanups (block->data.block.cleanups, 0);
705 do_pending_stack_adjust ();
706 }
707 }
708
709 if (stack_level)
710 {
711 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
712 the stack pointer. This one should be deleted as dead by flow. */
713 clear_pending_stack_adjust ();
714 do_pending_stack_adjust ();
59257ff7 715 emit_stack_restore (SAVE_BLOCK, stack_level, 0);
28d81abb
RK
716 }
717
718 if (body != 0 && DECL_TOO_LATE (body))
719 error ("jump to `%s' invalidly jumps into binding contour",
720 IDENTIFIER_POINTER (DECL_NAME (body)));
721 }
722 /* Label not yet defined: may need to put this goto
723 on the fixup list. */
724 else if (! expand_fixup (body, label, last_insn))
725 {
726 /* No fixup needed. Record that the label is the target
727 of at least one goto that has no fixup. */
728 if (body != 0)
729 TREE_ADDRESSABLE (body) = 1;
730 }
731
732 emit_jump (label);
733}
734\f
735/* Generate if necessary a fixup for a goto
736 whose target label in tree structure (if any) is TREE_LABEL
737 and whose target in rtl is RTL_LABEL.
738
739 If LAST_INSN is nonzero, we pretend that the jump appears
740 after insn LAST_INSN instead of at the current point in the insn stream.
741
742 The fixup will be used later to insert insns at this point
743 to restore the stack level as appropriate for the target label.
744
745 Value is nonzero if a fixup is made. */
746
747static int
748expand_fixup (tree_label, rtl_label, last_insn)
749 tree tree_label;
750 rtx rtl_label;
751 rtx last_insn;
752{
753 struct nesting *block, *end_block;
754
755 /* See if we can recognize which block the label will be output in.
756 This is possible in some very common cases.
757 If we succeed, set END_BLOCK to that block.
758 Otherwise, set it to 0. */
759
760 if (cond_stack
761 && (rtl_label == cond_stack->data.cond.endif_label
762 || rtl_label == cond_stack->data.cond.next_label))
763 end_block = cond_stack;
764 /* If we are in a loop, recognize certain labels which
765 are likely targets. This reduces the number of fixups
766 we need to create. */
767 else if (loop_stack
768 && (rtl_label == loop_stack->data.loop.start_label
769 || rtl_label == loop_stack->data.loop.end_label
770 || rtl_label == loop_stack->data.loop.continue_label))
771 end_block = loop_stack;
772 else
773 end_block = 0;
774
775 /* Now set END_BLOCK to the binding level to which we will return. */
776
777 if (end_block)
778 {
779 struct nesting *next_block = end_block->all;
780 block = block_stack;
781
782 /* First see if the END_BLOCK is inside the innermost binding level.
783 If so, then no cleanups or stack levels are relevant. */
784 while (next_block && next_block != block)
785 next_block = next_block->all;
786
787 if (next_block)
788 return 0;
789
790 /* Otherwise, set END_BLOCK to the innermost binding level
791 which is outside the relevant control-structure nesting. */
792 next_block = block_stack->next;
793 for (block = block_stack; block != end_block; block = block->all)
794 if (block == next_block)
795 next_block = next_block->next;
796 end_block = next_block;
797 }
798
799 /* Does any containing block have a stack level or cleanups?
800 If not, no fixup is needed, and that is the normal case
801 (the only case, for standard C). */
802 for (block = block_stack; block != end_block; block = block->next)
803 if (block->data.block.stack_level != 0
804 || block->data.block.cleanups != 0)
805 break;
806
807 if (block != end_block)
808 {
809 /* Ok, a fixup is needed. Add a fixup to the list of such. */
810 struct goto_fixup *fixup
811 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
812 /* In case an old stack level is restored, make sure that comes
813 after any pending stack adjust. */
814 /* ?? If the fixup isn't to come at the present position,
815 doing the stack adjust here isn't useful. Doing it with our
816 settings at that location isn't useful either. Let's hope
817 someone does it! */
818 if (last_insn == 0)
819 do_pending_stack_adjust ();
820 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
821 fixup->target = tree_label;
822 fixup->target_rtl = rtl_label;
823 fixup->block_start_count = block_start_count;
824 fixup->stack_level = 0;
825 fixup->cleanup_list_list
826 = (((block->data.block.outer_cleanups
827#if 0
828 && block->data.block.outer_cleanups != empty_cleanup_list
829#endif
830 )
831 || block->data.block.cleanups)
832 ? tree_cons (0, block->data.block.cleanups,
833 block->data.block.outer_cleanups)
834 : 0);
835 fixup->next = goto_fixup_chain;
836 goto_fixup_chain = fixup;
837 }
838
839 return block != 0;
840}
841
842/* When exiting a binding contour, process all pending gotos requiring fixups.
843 THISBLOCK is the structure that describes the block being exited.
844 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
845 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
846 FIRST_INSN is the insn that began this contour.
847
848 Gotos that jump out of this contour must restore the
849 stack level and do the cleanups before actually jumping.
850
851 DONT_JUMP_IN nonzero means report error there is a jump into this
852 contour from before the beginning of the contour.
853 This is also done if STACK_LEVEL is nonzero. */
854
855void
856fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
857 struct nesting *thisblock;
858 rtx stack_level;
859 tree cleanup_list;
860 rtx first_insn;
861 int dont_jump_in;
862{
863 register struct goto_fixup *f, *prev;
864
865 /* F is the fixup we are considering; PREV is the previous one. */
866 /* We run this loop in two passes so that cleanups of exited blocks
867 are run first, and blocks that are exited are marked so
868 afterwards. */
869
870 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
871 {
872 /* Test for a fixup that is inactive because it is already handled. */
873 if (f->before_jump == 0)
874 {
875 /* Delete inactive fixup from the chain, if that is easy to do. */
876 if (prev != 0)
877 prev->next = f->next;
878 }
879 /* Has this fixup's target label been defined?
880 If so, we can finalize it. */
881 else if (PREV_INSN (f->target_rtl) != 0)
882 {
883 /* Get the first non-label after the label
884 this goto jumps to. If that's before this scope begins,
885 we don't have a jump into the scope. */
886 rtx after_label = f->target_rtl;
887 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
888 after_label = NEXT_INSN (after_label);
889
890 /* If this fixup jumped into this contour from before the beginning
891 of this contour, report an error. */
892 /* ??? Bug: this does not detect jumping in through intermediate
893 blocks that have stack levels or cleanups.
894 It detects only a problem with the innermost block
895 around the label. */
896 if (f->target != 0
897 && (dont_jump_in || stack_level || cleanup_list)
898 /* If AFTER_LABEL is 0, it means the jump goes to the end
899 of the rtl, which means it jumps into this scope. */
900 && (after_label == 0
901 || INSN_UID (first_insn) < INSN_UID (after_label))
902 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
903 && ! TREE_REGDECL (f->target))
904 {
905 error_with_decl (f->target,
906 "label `%s' used before containing binding contour");
907 /* Prevent multiple errors for one label. */
908 TREE_REGDECL (f->target) = 1;
909 }
910
911 /* Execute cleanups for blocks this jump exits. */
912 if (f->cleanup_list_list)
913 {
914 tree lists;
915 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
916 /* Marked elements correspond to blocks that have been closed.
917 Do their cleanups. */
918 if (TREE_ADDRESSABLE (lists)
919 && TREE_VALUE (lists) != 0)
920 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
921 }
922
923 /* Restore stack level for the biggest contour that this
924 jump jumps out of. */
925 if (f->stack_level)
59257ff7 926 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
28d81abb
RK
927 f->before_jump = 0;
928 }
929 }
930
931 /* Mark the cleanups of exited blocks so that they are executed
932 by the code above. */
933 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
934 if (f->before_jump != 0
935 && PREV_INSN (f->target_rtl) == 0
936 /* Label has still not appeared. If we are exiting a block with
937 a stack level to restore, that started before the fixup,
938 mark this stack level as needing restoration
939 when the fixup is later finalized.
940 Also mark the cleanup_list_list element for F
941 that corresponds to this block, so that ultimately
942 this block's cleanups will be executed by the code above. */
943 && thisblock != 0
944 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
945 it means the label is undefined. That's erroneous, but possible. */
946 && (thisblock->data.block.block_start_count
947 <= f->block_start_count))
948 {
949 tree lists = f->cleanup_list_list;
950 for (; lists; lists = TREE_CHAIN (lists))
951 /* If the following elt. corresponds to our containing block
952 then the elt. must be for this block. */
953 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
954 TREE_ADDRESSABLE (lists) = 1;
955
956 if (stack_level)
957 f->stack_level = stack_level;
958 }
959}
960\f
961/* Generate RTL for an asm statement (explicit assembler code).
962 BODY is a STRING_CST node containing the assembler code text,
963 or an ADDR_EXPR containing a STRING_CST. */
964
965void
966expand_asm (body)
967 tree body;
968{
969 if (TREE_CODE (body) == ADDR_EXPR)
970 body = TREE_OPERAND (body, 0);
971
972 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
973 TREE_STRING_POINTER (body)));
974 last_expr_type = 0;
975}
976
977/* Generate RTL for an asm statement with arguments.
978 STRING is the instruction template.
979 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
980 Each output or input has an expression in the TREE_VALUE and
981 a constraint-string in the TREE_PURPOSE.
982 CLOBBERS is a list of STRING_CST nodes each naming a hard register
983 that is clobbered by this insn.
984
985 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
986 Some elements of OUTPUTS may be replaced with trees representing temporary
987 values. The caller should copy those temporary values to the originally
988 specified lvalues.
989
990 VOL nonzero means the insn is volatile; don't optimize it. */
991
992void
993expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
994 tree string, outputs, inputs, clobbers;
995 int vol;
996 char *filename;
997 int line;
998{
999 rtvec argvec, constraints;
1000 rtx body;
1001 int ninputs = list_length (inputs);
1002 int noutputs = list_length (outputs);
b4ccaa16 1003 int nclobbers;
28d81abb
RK
1004 tree tail;
1005 register int i;
1006 /* Vector of RTX's of evaluated output operands. */
1007 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1008 /* The insn we have emitted. */
1009 rtx insn;
1010
b4ccaa16
RS
1011 /* Count the number of meaningful clobbered registers, ignoring what
1012 we would ignore later. */
1013 nclobbers = 0;
1014 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1015 {
1016 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1017 if (decode_reg_name (regname) >= 0)
1018 ++nclobbers;
1019 }
1020
28d81abb
RK
1021 last_expr_type = 0;
1022
1023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1024 {
1025 tree val = TREE_VALUE (tail);
1026 tree val1;
1027 int j;
1028 int found_equal;
1029
1030 /* If there's an erroneous arg, emit no insn. */
1031 if (TREE_TYPE (val) == error_mark_node)
1032 return;
1033
1034 /* Make sure constraint has `=' and does not have `+'. */
1035
1036 found_equal = 0;
1037 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1038 {
1039 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1040 {
1041 error ("output operand constraint contains `+'");
1042 return;
1043 }
1044 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1045 found_equal = 1;
1046 }
1047 if (! found_equal)
1048 {
1049 error ("output operand constraint lacks `='");
1050 return;
1051 }
1052
1053 /* If an output operand is not a variable or indirect ref,
1054 or a part of one,
1055 create a SAVE_EXPR which is a pseudo-reg
1056 to act as an intermediate temporary.
1057 Make the asm insn write into that, then copy it to
1058 the real output operand. */
1059
1060 while (TREE_CODE (val) == COMPONENT_REF
1061 || TREE_CODE (val) == ARRAY_REF)
1062 val = TREE_OPERAND (val, 0);
1063
1064 if (TREE_CODE (val) != VAR_DECL
1065 && TREE_CODE (val) != PARM_DECL
1066 && TREE_CODE (val) != INDIRECT_REF)
1067 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1068
1069 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1070 }
1071
1072 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1073 {
1074 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1075 return;
1076 }
1077
1078 /* Make vectors for the expression-rtx and constraint strings. */
1079
1080 argvec = rtvec_alloc (ninputs);
1081 constraints = rtvec_alloc (ninputs);
1082
1083 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1084 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1085 filename, line);
1086 MEM_VOLATILE_P (body) = vol;
1087
1088 /* Eval the inputs and put them into ARGVEC.
1089 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1090
1091 i = 0;
1092 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1093 {
1094 int j;
1095
1096 /* If there's an erroneous arg, emit no insn,
1097 because the ASM_INPUT would get VOIDmode
1098 and that could cause a crash in reload. */
1099 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1100 return;
1101 if (TREE_PURPOSE (tail) == NULL_TREE)
1102 {
1103 error ("hard register `%s' listed as input operand to `asm'",
1104 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1105 return;
1106 }
1107
1108 /* Make sure constraint has neither `=' nor `+'. */
1109
1110 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1111 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1112 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1113 {
1114 error ("input operand constraint contains `%c'",
1115 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1116 return;
1117 }
1118
1119 XVECEXP (body, 3, i) /* argvec */
1120 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1121 XVECEXP (body, 4, i) /* constraints */
1122 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1123 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1124 i++;
1125 }
1126
1127 /* Protect all the operands from the queue,
1128 now that they have all been evaluated. */
1129
1130 for (i = 0; i < ninputs; i++)
1131 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1132
1133 for (i = 0; i < noutputs; i++)
1134 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1135
1136 /* Now, for each output, construct an rtx
1137 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1138 ARGVEC CONSTRAINTS))
1139 If there is more than one, put them inside a PARALLEL. */
1140
1141 if (noutputs == 1 && nclobbers == 0)
1142 {
1143 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1144 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1145 }
1146 else if (noutputs == 0 && nclobbers == 0)
1147 {
1148 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1149 insn = emit_insn (body);
1150 }
1151 else
1152 {
1153 rtx obody = body;
1154 int num = noutputs;
1155 if (num == 0) num = 1;
1156 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1157
1158 /* For each output operand, store a SET. */
1159
1160 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1161 {
1162 XVECEXP (body, 0, i)
1163 = gen_rtx (SET, VOIDmode,
1164 output_rtx[i],
1165 gen_rtx (ASM_OPERANDS, VOIDmode,
1166 TREE_STRING_POINTER (string),
1167 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1168 i, argvec, constraints,
1169 filename, line));
1170 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1171 }
1172
1173 /* If there are no outputs (but there are some clobbers)
1174 store the bare ASM_OPERANDS into the PARALLEL. */
1175
1176 if (i == 0)
1177 XVECEXP (body, 0, i++) = obody;
1178
1179 /* Store (clobber REG) for each clobbered register specified. */
1180
b4ccaa16 1181 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1182 {
28d81abb 1183 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1184 int j = decode_reg_name (regname);
28d81abb 1185
b4ac57ab 1186 if (j < 0)
28d81abb 1187 {
dcfedcd0
RK
1188 if (j == -3)
1189 continue;
1190
28d81abb
RK
1191 error ("unknown register name `%s' in `asm'", regname);
1192 return;
1193 }
1194
1195 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1196 XVECEXP (body, 0, i++)
28d81abb
RK
1197 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1198 }
1199
1200 insn = emit_insn (body);
1201 }
1202
1203 free_temp_slots ();
1204}
1205\f
1206/* Generate RTL to evaluate the expression EXP
1207 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1208
1209void
1210expand_expr_stmt (exp)
1211 tree exp;
1212{
1213 /* If -W, warn about statements with no side effects,
1214 except for an explicit cast to void (e.g. for assert()), and
1215 except inside a ({...}) where they may be useful. */
1216 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1217 {
1218 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1219 && !(TREE_CODE (exp) == CONVERT_EXPR
1220 && TREE_TYPE (exp) == void_type_node))
1221 warning_with_file_and_line (emit_filename, emit_lineno,
1222 "statement with no effect");
1223 else if (warn_unused)
1224 warn_if_unused_value (exp);
1225 }
1226 last_expr_type = TREE_TYPE (exp);
1227 if (! flag_syntax_only)
1228 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1229 VOIDmode, 0);
1230
1231 /* If all we do is reference a volatile value in memory,
1232 copy it to a register to be sure it is actually touched. */
1233 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1234 && TREE_THIS_VOLATILE (exp))
1235 {
1236 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1237 copy_to_reg (last_expr_value);
1238 else
ddbe9812
RS
1239 {
1240 rtx lab = gen_label_rtx ();
1241
1242 /* Compare the value with itself to reference it. */
1243 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1244 expand_expr (TYPE_SIZE (last_expr_type),
1245 0, VOIDmode, 0),
1246 BLKmode, 0,
1247 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1248 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1249 emit_label (lab);
1250 }
28d81abb
RK
1251 }
1252
1253 /* If this expression is part of a ({...}) and is in memory, we may have
1254 to preserve temporaries. */
1255 preserve_temp_slots (last_expr_value);
1256
1257 /* Free any temporaries used to evaluate this expression. Any temporary
1258 used as a result of this expression will already have been preserved
1259 above. */
1260 free_temp_slots ();
1261
1262 emit_queue ();
1263}
1264
1265/* Warn if EXP contains any computations whose results are not used.
1266 Return 1 if a warning is printed; 0 otherwise. */
1267
1268static int
1269warn_if_unused_value (exp)
1270 tree exp;
1271{
1272 if (TREE_USED (exp))
1273 return 0;
1274
1275 switch (TREE_CODE (exp))
1276 {
1277 case PREINCREMENT_EXPR:
1278 case POSTINCREMENT_EXPR:
1279 case PREDECREMENT_EXPR:
1280 case POSTDECREMENT_EXPR:
1281 case MODIFY_EXPR:
1282 case INIT_EXPR:
1283 case TARGET_EXPR:
1284 case CALL_EXPR:
1285 case METHOD_CALL_EXPR:
1286 case RTL_EXPR:
1287 case WRAPPER_EXPR:
1288 case ANTI_WRAPPER_EXPR:
1289 case WITH_CLEANUP_EXPR:
1290 case EXIT_EXPR:
1291 /* We don't warn about COND_EXPR because it may be a useful
1292 construct if either arm contains a side effect. */
1293 case COND_EXPR:
1294 return 0;
1295
1296 case BIND_EXPR:
1297 /* For a binding, warn if no side effect within it. */
1298 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1299
1300 case TRUTH_ORIF_EXPR:
1301 case TRUTH_ANDIF_EXPR:
1302 /* In && or ||, warn if 2nd operand has no side effect. */
1303 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1304
1305 case COMPOUND_EXPR:
1306 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1307 return 1;
4d23e509
RS
1308 /* Let people do `(foo (), 0)' without a warning. */
1309 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1310 return 0;
28d81abb
RK
1311 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1312
1313 case NOP_EXPR:
1314 case CONVERT_EXPR:
b4ac57ab 1315 case NON_LVALUE_EXPR:
28d81abb
RK
1316 /* Don't warn about values cast to void. */
1317 if (TREE_TYPE (exp) == void_type_node)
1318 return 0;
1319 /* Don't warn about conversions not explicit in the user's program. */
1320 if (TREE_NO_UNUSED_WARNING (exp))
1321 return 0;
1322 /* Assignment to a cast usually results in a cast of a modify.
1323 Don't complain about that. */
1324 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1325 return 0;
1326 /* Sometimes it results in a cast of a cast of a modify.
1327 Don't complain about that. */
1328 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1329 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1330 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1331 return 0;
1332
1333 default:
ddbe9812
RS
1334 /* Referencing a volatile value is a side effect, so don't warn. */
1335 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1336 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1337 && TREE_THIS_VOLATILE (exp))
1338 return 0;
28d81abb
RK
1339 warning_with_file_and_line (emit_filename, emit_lineno,
1340 "value computed is not used");
1341 return 1;
1342 }
1343}
1344
1345/* Clear out the memory of the last expression evaluated. */
1346
1347void
1348clear_last_expr ()
1349{
1350 last_expr_type = 0;
1351}
1352
1353/* Begin a statement which will return a value.
1354 Return the RTL_EXPR for this statement expr.
1355 The caller must save that value and pass it to expand_end_stmt_expr. */
1356
1357tree
1358expand_start_stmt_expr ()
1359{
1360 /* Make the RTL_EXPR node temporary, not momentary,
1361 so that rtl_expr_chain doesn't become garbage. */
1362 int momentary = suspend_momentary ();
1363 tree t = make_node (RTL_EXPR);
1364 resume_momentary (momentary);
1365 start_sequence ();
1366 NO_DEFER_POP;
1367 expr_stmts_for_value++;
1368 return t;
1369}
1370
1371/* Restore the previous state at the end of a statement that returns a value.
1372 Returns a tree node representing the statement's value and the
1373 insns to compute the value.
1374
1375 The nodes of that expression have been freed by now, so we cannot use them.
1376 But we don't want to do that anyway; the expression has already been
1377 evaluated and now we just want to use the value. So generate a RTL_EXPR
1378 with the proper type and RTL value.
1379
1380 If the last substatement was not an expression,
1381 return something with type `void'. */
1382
1383tree
1384expand_end_stmt_expr (t)
1385 tree t;
1386{
1387 OK_DEFER_POP;
1388
1389 if (last_expr_type == 0)
1390 {
1391 last_expr_type = void_type_node;
1392 last_expr_value = const0_rtx;
1393 }
1394 else if (last_expr_value == 0)
1395 /* There are some cases where this can happen, such as when the
1396 statement is void type. */
1397 last_expr_value = const0_rtx;
1398 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1399 /* Remove any possible QUEUED. */
1400 last_expr_value = protect_from_queue (last_expr_value, 0);
1401
1402 emit_queue ();
1403
1404 TREE_TYPE (t) = last_expr_type;
1405 RTL_EXPR_RTL (t) = last_expr_value;
1406 RTL_EXPR_SEQUENCE (t) = get_insns ();
1407
1408 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1409
1410 end_sequence ();
1411
1412 /* Don't consider deleting this expr or containing exprs at tree level. */
1413 TREE_SIDE_EFFECTS (t) = 1;
1414 /* Propagate volatility of the actual RTL expr. */
1415 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1416
1417 last_expr_type = 0;
1418 expr_stmts_for_value--;
1419
1420 return t;
1421}
1422\f
1423/* The exception handling nesting looks like this:
1424
1425 <-- Level N-1
1426 { <-- exception handler block
1427 <-- Level N
1428 <-- in an exception handler
1429 { <-- try block
1430 : <-- in a TRY block
1431 : <-- in an exception handler
1432 :
1433 }
1434
1435 { <-- except block
1436 : <-- in an except block
1437 : <-- in an exception handler
1438 :
1439 }
1440
1441 }
1442
1443/* Return nonzero iff in a try block at level LEVEL. */
1444
1445int
1446in_try_block (level)
1447 int level;
1448{
1449 struct nesting *n = except_stack;
1450 while (1)
1451 {
1452 while (n && n->data.except_stmt.after_label != 0)
1453 n = n->next;
1454 if (n == 0)
1455 return 0;
1456 if (level == 0)
1457 return n != 0;
1458 level--;
1459 n = n->next;
1460 }
1461}
1462
1463/* Return nonzero iff in an except block at level LEVEL. */
1464
1465int
1466in_except_block (level)
1467 int level;
1468{
1469 struct nesting *n = except_stack;
1470 while (1)
1471 {
1472 while (n && n->data.except_stmt.after_label == 0)
1473 n = n->next;
1474 if (n == 0)
1475 return 0;
1476 if (level == 0)
1477 return n != 0;
1478 level--;
1479 n = n->next;
1480 }
1481}
1482
1483/* Return nonzero iff in an exception handler at level LEVEL. */
1484
1485int
1486in_exception_handler (level)
1487 int level;
1488{
1489 struct nesting *n = except_stack;
1490 while (n && level--)
1491 n = n->next;
1492 return n != 0;
1493}
1494
1495/* Record the fact that the current exception nesting raises
1496 exception EX. If not in an exception handler, return 0. */
1497int
1498expand_raise (ex)
1499 tree ex;
1500{
1501 tree *raises_ptr;
1502
1503 if (except_stack == 0)
1504 return 0;
1505 raises_ptr = &except_stack->data.except_stmt.raised;
1506 if (! value_member (ex, *raises_ptr))
1507 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1508 return 1;
1509}
1510
1511/* Generate RTL for the start of a try block.
1512
1513 TRY_CLAUSE is the condition to test to enter the try block. */
1514
1515void
1516expand_start_try (try_clause, exitflag, escapeflag)
1517 tree try_clause;
1518 int exitflag;
1519 int escapeflag;
1520{
1521 struct nesting *thishandler = ALLOC_NESTING ();
1522
1523 /* Make an entry on cond_stack for the cond we are entering. */
1524
1525 thishandler->next = except_stack;
1526 thishandler->all = nesting_stack;
1527 thishandler->depth = ++nesting_depth;
1528 thishandler->data.except_stmt.raised = 0;
1529 thishandler->data.except_stmt.handled = 0;
1530 thishandler->data.except_stmt.first_insn = get_insns ();
1531 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1532 thishandler->data.except_stmt.unhandled_label = 0;
1533 thishandler->data.except_stmt.after_label = 0;
1534 thishandler->data.except_stmt.escape_label
1535 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1536 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1537 except_stack = thishandler;
1538 nesting_stack = thishandler;
1539
1540 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1541}
1542
1543/* End of a TRY block. Nothing to do for now. */
1544
1545void
1546expand_end_try ()
1547{
1548 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1549 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1550}
1551
1552/* Start an `except' nesting contour.
1553 EXITFLAG says whether this contour should be able to `exit' something.
1554 ESCAPEFLAG says whether this contour should be escapable. */
1555
1556void
1557expand_start_except (exitflag, escapeflag)
1558 int exitflag;
1559 int escapeflag;
1560{
1561 if (exitflag)
1562 {
1563 struct nesting *n;
1564 /* An `exit' from catch clauses goes out to next exit level,
1565 if there is one. Otherwise, it just goes to the end
1566 of the construct. */
1567 for (n = except_stack->next; n; n = n->next)
1568 if (n->exit_label != 0)
1569 {
1570 except_stack->exit_label = n->exit_label;
1571 break;
1572 }
1573 if (n == 0)
1574 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1575 }
1576 if (escapeflag)
1577 {
1578 struct nesting *n;
1579 /* An `escape' from catch clauses goes out to next escape level,
1580 if there is one. Otherwise, it just goes to the end
1581 of the construct. */
1582 for (n = except_stack->next; n; n = n->next)
1583 if (n->data.except_stmt.escape_label != 0)
1584 {
1585 except_stack->data.except_stmt.escape_label
1586 = n->data.except_stmt.escape_label;
1587 break;
1588 }
1589 if (n == 0)
1590 except_stack->data.except_stmt.escape_label
1591 = except_stack->data.except_stmt.after_label;
1592 }
1593 do_pending_stack_adjust ();
1594 emit_label (except_stack->data.except_stmt.except_label);
1595}
1596
1597/* Generate code to `escape' from an exception contour. This
1598 is like `exiting', but does not conflict with constructs which
1599 use `exit_label'.
1600
1601 Return nonzero if this contour is escapable, otherwise
1602 return zero, and language-specific code will emit the
1603 appropriate error message. */
1604int
1605expand_escape_except ()
1606{
1607 struct nesting *n;
1608 last_expr_type = 0;
1609 for (n = except_stack; n; n = n->next)
1610 if (n->data.except_stmt.escape_label != 0)
1611 {
1612 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1613 return 1;
1614 }
1615
1616 return 0;
1617}
1618
1619/* Finish processing and `except' contour.
1620 Culls out all exceptions which might be raise but not
1621 handled, and returns the list to the caller.
1622 Language-specific code is responsible for dealing with these
1623 exceptions. */
1624
1625tree
1626expand_end_except ()
1627{
1628 struct nesting *n;
1629 tree raised = NULL_TREE;
1630
1631 do_pending_stack_adjust ();
1632 emit_label (except_stack->data.except_stmt.after_label);
1633
1634 n = except_stack->next;
1635 if (n)
1636 {
1637 /* Propagate exceptions raised but not handled to next
1638 highest level. */
1639 tree handled = except_stack->data.except_stmt.raised;
1640 if (handled != void_type_node)
1641 {
1642 tree prev = NULL_TREE;
1643 raised = except_stack->data.except_stmt.raised;
1644 while (handled)
1645 {
1646 tree this_raise;
1647 for (this_raise = raised, prev = 0; this_raise;
1648 this_raise = TREE_CHAIN (this_raise))
1649 {
1650 if (value_member (TREE_VALUE (this_raise), handled))
1651 {
1652 if (prev)
1653 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1654 else
1655 {
1656 raised = TREE_CHAIN (raised);
1657 if (raised == NULL_TREE)
1658 goto nada;
1659 }
1660 }
1661 else
1662 prev = this_raise;
1663 }
1664 handled = TREE_CHAIN (handled);
1665 }
1666 if (prev == NULL_TREE)
1667 prev = raised;
1668 if (prev)
1669 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1670 nada:
1671 n->data.except_stmt.raised = raised;
1672 }
1673 }
1674
1675 POPSTACK (except_stack);
1676 last_expr_type = 0;
1677 return raised;
1678}
1679
1680/* Record that exception EX is caught by this exception handler.
1681 Return nonzero if in exception handling construct, otherwise return 0. */
1682int
1683expand_catch (ex)
1684 tree ex;
1685{
1686 tree *raises_ptr;
1687
1688 if (except_stack == 0)
1689 return 0;
1690 raises_ptr = &except_stack->data.except_stmt.handled;
1691 if (*raises_ptr != void_type_node
1692 && ex != NULL_TREE
1693 && ! value_member (ex, *raises_ptr))
1694 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1695 return 1;
1696}
1697
1698/* Record that this exception handler catches all exceptions.
1699 Return nonzero if in exception handling construct, otherwise return 0. */
1700
1701int
1702expand_catch_default ()
1703{
1704 if (except_stack == 0)
1705 return 0;
1706 except_stack->data.except_stmt.handled = void_type_node;
1707 return 1;
1708}
1709
1710int
1711expand_end_catch ()
1712{
1713 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1714 return 0;
1715 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1716 return 1;
1717}
1718\f
1719/* Generate RTL for the start of an if-then. COND is the expression
1720 whose truth should be tested.
1721
1722 If EXITFLAG is nonzero, this conditional is visible to
1723 `exit_something'. */
1724
1725void
1726expand_start_cond (cond, exitflag)
1727 tree cond;
1728 int exitflag;
1729{
1730 struct nesting *thiscond = ALLOC_NESTING ();
1731
1732 /* Make an entry on cond_stack for the cond we are entering. */
1733
1734 thiscond->next = cond_stack;
1735 thiscond->all = nesting_stack;
1736 thiscond->depth = ++nesting_depth;
1737 thiscond->data.cond.next_label = gen_label_rtx ();
1738 /* Before we encounter an `else', we don't need a separate exit label
1739 unless there are supposed to be exit statements
1740 to exit this conditional. */
1741 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1742 thiscond->data.cond.endif_label = thiscond->exit_label;
1743 cond_stack = thiscond;
1744 nesting_stack = thiscond;
1745
1746 do_jump (cond, thiscond->data.cond.next_label, NULL);
1747}
1748
1749/* Generate RTL between then-clause and the elseif-clause
1750 of an if-then-elseif-.... */
1751
1752void
1753expand_start_elseif (cond)
1754 tree cond;
1755{
1756 if (cond_stack->data.cond.endif_label == 0)
1757 cond_stack->data.cond.endif_label = gen_label_rtx ();
1758 emit_jump (cond_stack->data.cond.endif_label);
1759 emit_label (cond_stack->data.cond.next_label);
1760 cond_stack->data.cond.next_label = gen_label_rtx ();
1761 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1762}
1763
1764/* Generate RTL between the then-clause and the else-clause
1765 of an if-then-else. */
1766
1767void
1768expand_start_else ()
1769{
1770 if (cond_stack->data.cond.endif_label == 0)
1771 cond_stack->data.cond.endif_label = gen_label_rtx ();
1772 emit_jump (cond_stack->data.cond.endif_label);
1773 emit_label (cond_stack->data.cond.next_label);
1774 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1775}
1776
1777/* Generate RTL for the end of an if-then.
1778 Pop the record for it off of cond_stack. */
1779
1780void
1781expand_end_cond ()
1782{
1783 struct nesting *thiscond = cond_stack;
1784
1785 do_pending_stack_adjust ();
1786 if (thiscond->data.cond.next_label)
1787 emit_label (thiscond->data.cond.next_label);
1788 if (thiscond->data.cond.endif_label)
1789 emit_label (thiscond->data.cond.endif_label);
1790
1791 POPSTACK (cond_stack);
1792 last_expr_type = 0;
1793}
1794\f
1795/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1796 loop should be exited by `exit_something'. This is a loop for which
1797 `expand_continue' will jump to the top of the loop.
1798
1799 Make an entry on loop_stack to record the labels associated with
1800 this loop. */
1801
1802struct nesting *
1803expand_start_loop (exit_flag)
1804 int exit_flag;
1805{
1806 register struct nesting *thisloop = ALLOC_NESTING ();
1807
1808 /* Make an entry on loop_stack for the loop we are entering. */
1809
1810 thisloop->next = loop_stack;
1811 thisloop->all = nesting_stack;
1812 thisloop->depth = ++nesting_depth;
1813 thisloop->data.loop.start_label = gen_label_rtx ();
1814 thisloop->data.loop.end_label = gen_label_rtx ();
1815 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1816 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1817 loop_stack = thisloop;
1818 nesting_stack = thisloop;
1819
1820 do_pending_stack_adjust ();
1821 emit_queue ();
1822 emit_note (0, NOTE_INSN_LOOP_BEG);
1823 emit_label (thisloop->data.loop.start_label);
1824
1825 return thisloop;
1826}
1827
1828/* Like expand_start_loop but for a loop where the continuation point
1829 (for expand_continue_loop) will be specified explicitly. */
1830
1831struct nesting *
1832expand_start_loop_continue_elsewhere (exit_flag)
1833 int exit_flag;
1834{
1835 struct nesting *thisloop = expand_start_loop (exit_flag);
1836 loop_stack->data.loop.continue_label = gen_label_rtx ();
1837 return thisloop;
1838}
1839
1840/* Specify the continuation point for a loop started with
1841 expand_start_loop_continue_elsewhere.
1842 Use this at the point in the code to which a continue statement
1843 should jump. */
1844
1845void
1846expand_loop_continue_here ()
1847{
1848 do_pending_stack_adjust ();
1849 emit_note (0, NOTE_INSN_LOOP_CONT);
1850 emit_label (loop_stack->data.loop.continue_label);
1851}
1852
1853/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1854 Pop the block off of loop_stack. */
1855
1856void
1857expand_end_loop ()
1858{
1859 register rtx insn = get_last_insn ();
1860 register rtx start_label = loop_stack->data.loop.start_label;
1861 rtx last_test_insn = 0;
1862 int num_insns = 0;
1863
1864 /* Mark the continue-point at the top of the loop if none elsewhere. */
1865 if (start_label == loop_stack->data.loop.continue_label)
1866 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1867
1868 do_pending_stack_adjust ();
1869
1870 /* If optimizing, perhaps reorder the loop. If the loop
1871 starts with a conditional exit, roll that to the end
1872 where it will optimize together with the jump back.
1873
1874 We look for the last conditional branch to the exit that we encounter
1875 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1876 branch to the exit first, use it.
1877
1878 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1879 because moving them is not valid. */
1880
1881 if (optimize
1882 &&
1883 ! (GET_CODE (insn) == JUMP_INSN
1884 && GET_CODE (PATTERN (insn)) == SET
1885 && SET_DEST (PATTERN (insn)) == pc_rtx
1886 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1887 {
1888 /* Scan insns from the top of the loop looking for a qualified
1889 conditional exit. */
1890 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1891 insn = NEXT_INSN (insn))
1892 {
1893 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1894 break;
1895
1896 if (GET_CODE (insn) == NOTE
1897 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1898 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1899 break;
1900
1901 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1902 num_insns++;
1903
1904 if (last_test_insn && num_insns > 30)
1905 break;
1906
1907 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1908 && SET_DEST (PATTERN (insn)) == pc_rtx
1909 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1910 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1911 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1912 == loop_stack->data.loop.end_label))
1913 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1914 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1915 == loop_stack->data.loop.end_label))))
1916 last_test_insn = insn;
1917
1918 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1919 && GET_CODE (PATTERN (insn)) == SET
1920 && SET_DEST (PATTERN (insn)) == pc_rtx
1921 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1922 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1923 == loop_stack->data.loop.end_label))
1924 /* Include BARRIER. */
1925 last_test_insn = NEXT_INSN (insn);
1926 }
1927
1928 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1929 {
1930 /* We found one. Move everything from there up
1931 to the end of the loop, and add a jump into the loop
1932 to jump to there. */
1933 register rtx newstart_label = gen_label_rtx ();
1934 register rtx start_move = start_label;
1935
b4ac57ab 1936 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1937 then we want to move this note also. */
1938 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1939 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1940 == NOTE_INSN_LOOP_CONT))
1941 start_move = PREV_INSN (start_move);
1942
1943 emit_label_after (newstart_label, PREV_INSN (start_move));
1944 reorder_insns (start_move, last_test_insn, get_last_insn ());
1945 emit_jump_insn_after (gen_jump (start_label),
1946 PREV_INSN (newstart_label));
1947 emit_barrier_after (PREV_INSN (newstart_label));
1948 start_label = newstart_label;
1949 }
1950 }
1951
1952 emit_jump (start_label);
1953 emit_note (0, NOTE_INSN_LOOP_END);
1954 emit_label (loop_stack->data.loop.end_label);
1955
1956 POPSTACK (loop_stack);
1957
1958 last_expr_type = 0;
1959}
1960
1961/* Generate a jump to the current loop's continue-point.
1962 This is usually the top of the loop, but may be specified
1963 explicitly elsewhere. If not currently inside a loop,
1964 return 0 and do nothing; caller will print an error message. */
1965
1966int
1967expand_continue_loop (whichloop)
1968 struct nesting *whichloop;
1969{
1970 last_expr_type = 0;
1971 if (whichloop == 0)
1972 whichloop = loop_stack;
1973 if (whichloop == 0)
1974 return 0;
1975 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1976 return 1;
1977}
1978
1979/* Generate a jump to exit the current loop. If not currently inside a loop,
1980 return 0 and do nothing; caller will print an error message. */
1981
1982int
1983expand_exit_loop (whichloop)
1984 struct nesting *whichloop;
1985{
1986 last_expr_type = 0;
1987 if (whichloop == 0)
1988 whichloop = loop_stack;
1989 if (whichloop == 0)
1990 return 0;
1991 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1992 return 1;
1993}
1994
1995/* Generate a conditional jump to exit the current loop if COND
1996 evaluates to zero. If not currently inside a loop,
1997 return 0 and do nothing; caller will print an error message. */
1998
1999int
2000expand_exit_loop_if_false (whichloop, cond)
2001 struct nesting *whichloop;
2002 tree cond;
2003{
2004 last_expr_type = 0;
2005 if (whichloop == 0)
2006 whichloop = loop_stack;
2007 if (whichloop == 0)
2008 return 0;
2009 do_jump (cond, whichloop->data.loop.end_label, NULL);
2010 return 1;
2011}
2012
2013/* Return non-zero if we should preserve sub-expressions as separate
2014 pseudos. We never do so if we aren't optimizing. We always do so
2015 if -fexpensive-optimizations.
2016
2017 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2018 the loop may still be a small one. */
2019
2020int
2021preserve_subexpressions_p ()
2022{
2023 rtx insn;
2024
2025 if (flag_expensive_optimizations)
2026 return 1;
2027
2028 if (optimize == 0 || loop_stack == 0)
2029 return 0;
2030
2031 insn = get_last_insn_anywhere ();
2032
2033 return (insn
2034 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2035 < n_non_fixed_regs * 3));
2036
2037}
2038
2039/* Generate a jump to exit the current loop, conditional, binding contour
2040 or case statement. Not all such constructs are visible to this function,
2041 only those started with EXIT_FLAG nonzero. Individual languages use
2042 the EXIT_FLAG parameter to control which kinds of constructs you can
2043 exit this way.
2044
2045 If not currently inside anything that can be exited,
2046 return 0 and do nothing; caller will print an error message. */
2047
2048int
2049expand_exit_something ()
2050{
2051 struct nesting *n;
2052 last_expr_type = 0;
2053 for (n = nesting_stack; n; n = n->all)
2054 if (n->exit_label != 0)
2055 {
2056 expand_goto_internal (0, n->exit_label, 0);
2057 return 1;
2058 }
2059
2060 return 0;
2061}
2062\f
2063/* Generate RTL to return from the current function, with no value.
2064 (That is, we do not do anything about returning any value.) */
2065
2066void
2067expand_null_return ()
2068{
2069 struct nesting *block = block_stack;
2070 rtx last_insn = 0;
2071
2072 /* Does any pending block have cleanups? */
2073
2074 while (block && block->data.block.cleanups == 0)
2075 block = block->next;
2076
2077 /* If yes, use a goto to return, since that runs cleanups. */
2078
2079 expand_null_return_1 (last_insn, block != 0);
2080}
2081
2082/* Generate RTL to return from the current function, with value VAL. */
2083
2084void
2085expand_value_return (val)
2086 rtx val;
2087{
2088 struct nesting *block = block_stack;
2089 rtx last_insn = get_last_insn ();
2090 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2091
2092 /* Copy the value to the return location
2093 unless it's already there. */
2094
2095 if (return_reg != val)
2096 emit_move_insn (return_reg, val);
2097 if (GET_CODE (return_reg) == REG
2098 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2099 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2100
2101 /* Does any pending block have cleanups? */
2102
2103 while (block && block->data.block.cleanups == 0)
2104 block = block->next;
2105
2106 /* If yes, use a goto to return, since that runs cleanups.
2107 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2108
2109 expand_null_return_1 (last_insn, block != 0);
2110}
2111
2112/* Output a return with no value. If LAST_INSN is nonzero,
2113 pretend that the return takes place after LAST_INSN.
2114 If USE_GOTO is nonzero then don't use a return instruction;
2115 go to the return label instead. This causes any cleanups
2116 of pending blocks to be executed normally. */
2117
2118static void
2119expand_null_return_1 (last_insn, use_goto)
2120 rtx last_insn;
2121 int use_goto;
2122{
2123 rtx end_label = cleanup_label ? cleanup_label : return_label;
2124
2125 clear_pending_stack_adjust ();
2126 do_pending_stack_adjust ();
2127 last_expr_type = 0;
2128
2129 /* PCC-struct return always uses an epilogue. */
2130 if (current_function_returns_pcc_struct || use_goto)
2131 {
2132 if (end_label == 0)
2133 end_label = return_label = gen_label_rtx ();
2134 expand_goto_internal (0, end_label, last_insn);
2135 return;
2136 }
2137
2138 /* Otherwise output a simple return-insn if one is available,
2139 unless it won't do the job. */
2140#ifdef HAVE_return
2141 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2142 {
2143 emit_jump_insn (gen_return ());
2144 emit_barrier ();
2145 return;
2146 }
2147#endif
2148
2149 /* Otherwise jump to the epilogue. */
2150 expand_goto_internal (0, end_label, last_insn);
2151}
2152\f
2153/* Generate RTL to evaluate the expression RETVAL and return it
2154 from the current function. */
2155
2156void
2157expand_return (retval)
2158 tree retval;
2159{
2160 /* If there are any cleanups to be performed, then they will
2161 be inserted following LAST_INSN. It is desirable
2162 that the last_insn, for such purposes, should be the
2163 last insn before computing the return value. Otherwise, cleanups
2164 which call functions can clobber the return value. */
2165 /* ??? rms: I think that is erroneous, because in C++ it would
2166 run destructors on variables that might be used in the subsequent
2167 computation of the return value. */
2168 rtx last_insn = 0;
2169 register rtx val = 0;
2170 register rtx op0;
2171 tree retval_rhs;
2172 int cleanups;
2173 struct nesting *block;
2174
2175 /* If function wants no value, give it none. */
2176 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2177 {
2178 expand_expr (retval, 0, VOIDmode, 0);
2179 expand_null_return ();
2180 return;
2181 }
2182
2183 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2184 cleanups = any_pending_cleanups (1);
2185
2186 if (TREE_CODE (retval) == RESULT_DECL)
2187 retval_rhs = retval;
2188 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2189 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2190 retval_rhs = TREE_OPERAND (retval, 1);
2191 else if (TREE_TYPE (retval) == void_type_node)
2192 /* Recognize tail-recursive call to void function. */
2193 retval_rhs = retval;
2194 else
2195 retval_rhs = NULL_TREE;
2196
2197 /* Only use `last_insn' if there are cleanups which must be run. */
2198 if (cleanups || cleanup_label != 0)
2199 last_insn = get_last_insn ();
2200
2201 /* Distribute return down conditional expr if either of the sides
2202 may involve tail recursion (see test below). This enhances the number
2203 of tail recursions we see. Don't do this always since it can produce
2204 sub-optimal code in some cases and we distribute assignments into
2205 conditional expressions when it would help. */
2206
2207 if (optimize && retval_rhs != 0
2208 && frame_offset == 0
2209 && TREE_CODE (retval_rhs) == COND_EXPR
2210 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2211 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2212 {
2213 rtx label = gen_label_rtx ();
2214 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2215 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2216 DECL_RESULT (current_function_decl),
2217 TREE_OPERAND (retval_rhs, 1)));
2218 emit_label (label);
2219 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2220 DECL_RESULT (current_function_decl),
2221 TREE_OPERAND (retval_rhs, 2)));
2222 return;
2223 }
2224
2225 /* For tail-recursive call to current function,
2226 just jump back to the beginning.
2227 It's unsafe if any auto variable in this function
2228 has its address taken; for simplicity,
2229 require stack frame to be empty. */
2230 if (optimize && retval_rhs != 0
2231 && frame_offset == 0
2232 && TREE_CODE (retval_rhs) == CALL_EXPR
2233 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2234 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2235 /* Finish checking validity, and if valid emit code
2236 to set the argument variables for the new call. */
2237 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2238 DECL_ARGUMENTS (current_function_decl)))
2239 {
2240 if (tail_recursion_label == 0)
2241 {
2242 tail_recursion_label = gen_label_rtx ();
2243 emit_label_after (tail_recursion_label,
2244 tail_recursion_reentry);
2245 }
2246 expand_goto_internal (0, tail_recursion_label, last_insn);
2247 emit_barrier ();
2248 return;
2249 }
2250#ifdef HAVE_return
2251 /* This optimization is safe if there are local cleanups
2252 because expand_null_return takes care of them.
2253 ??? I think it should also be safe when there is a cleanup label,
2254 because expand_null_return takes care of them, too.
2255 Any reason why not? */
2256 if (HAVE_return && cleanup_label == 0
2257 && ! current_function_returns_pcc_struct)
2258 {
2259 /* If this is return x == y; then generate
2260 if (x == y) return 1; else return 0;
2261 if we can do it with explicit return insns. */
2262 if (retval_rhs)
2263 switch (TREE_CODE (retval_rhs))
2264 {
2265 case EQ_EXPR:
2266 case NE_EXPR:
2267 case GT_EXPR:
2268 case GE_EXPR:
2269 case LT_EXPR:
2270 case LE_EXPR:
2271 case TRUTH_ANDIF_EXPR:
2272 case TRUTH_ORIF_EXPR:
2273 case TRUTH_AND_EXPR:
2274 case TRUTH_OR_EXPR:
2275 case TRUTH_NOT_EXPR:
2276 op0 = gen_label_rtx ();
2277 jumpifnot (retval_rhs, op0);
2278 expand_value_return (const1_rtx);
2279 emit_label (op0);
2280 expand_value_return (const0_rtx);
2281 return;
2282 }
2283 }
2284#endif /* HAVE_return */
2285
2286 if (cleanups
2287 && retval_rhs != 0
2288 && TREE_TYPE (retval_rhs) != void_type_node
2289 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2290 {
2291 /* Calculate the return value into a pseudo reg. */
2292 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2293 emit_queue ();
2294 /* All temporaries have now been used. */
2295 free_temp_slots ();
2296 /* Return the calculated value, doing cleanups first. */
2297 expand_value_return (val);
2298 }
2299 else
2300 {
2301 /* No cleanups or no hard reg used;
2302 calculate value into hard return reg. */
2303 expand_expr (retval, 0, VOIDmode, 0);
2304 emit_queue ();
2305 free_temp_slots ();
2306 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2307 }
2308}
2309
2310/* Return 1 if the end of the generated RTX is not a barrier.
2311 This means code already compiled can drop through. */
2312
2313int
2314drop_through_at_end_p ()
2315{
2316 rtx insn = get_last_insn ();
2317 while (insn && GET_CODE (insn) == NOTE)
2318 insn = PREV_INSN (insn);
2319 return insn && GET_CODE (insn) != BARRIER;
2320}
2321\f
2322/* Emit code to alter this function's formal parms for a tail-recursive call.
2323 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2324 FORMALS is the chain of decls of formals.
2325 Return 1 if this can be done;
2326 otherwise return 0 and do not emit any code. */
2327
2328static int
2329tail_recursion_args (actuals, formals)
2330 tree actuals, formals;
2331{
2332 register tree a = actuals, f = formals;
2333 register int i;
2334 register rtx *argvec;
2335
2336 /* Check that number and types of actuals are compatible
2337 with the formals. This is not always true in valid C code.
2338 Also check that no formal needs to be addressable
2339 and that all formals are scalars. */
2340
2341 /* Also count the args. */
2342
2343 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2344 {
2345 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2346 return 0;
2347 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2348 return 0;
2349 }
2350 if (a != 0 || f != 0)
2351 return 0;
2352
2353 /* Compute all the actuals. */
2354
2355 argvec = (rtx *) alloca (i * sizeof (rtx));
2356
2357 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2358 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2359
2360 /* Find which actual values refer to current values of previous formals.
2361 Copy each of them now, before any formal is changed. */
2362
2363 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2364 {
2365 int copy = 0;
2366 register int j;
2367 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2368 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2369 { copy = 1; break; }
2370 if (copy)
2371 argvec[i] = copy_to_reg (argvec[i]);
2372 }
2373
2374 /* Store the values of the actuals into the formals. */
2375
2376 for (f = formals, a = actuals, i = 0; f;
2377 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2378 {
2379 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2380 emit_move_insn (DECL_RTL (f), argvec[i]);
2381 else
2382 convert_move (DECL_RTL (f), argvec[i],
2383 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2384 }
2385
2386 free_temp_slots ();
2387 return 1;
2388}
2389\f
2390/* Generate the RTL code for entering a binding contour.
2391 The variables are declared one by one, by calls to `expand_decl'.
2392
2393 EXIT_FLAG is nonzero if this construct should be visible to
2394 `exit_something'. */
2395
2396void
2397expand_start_bindings (exit_flag)
2398 int exit_flag;
2399{
2400 struct nesting *thisblock = ALLOC_NESTING ();
2401
2402 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2403
2404 /* Make an entry on block_stack for the block we are entering. */
2405
2406 thisblock->next = block_stack;
2407 thisblock->all = nesting_stack;
2408 thisblock->depth = ++nesting_depth;
2409 thisblock->data.block.stack_level = 0;
2410 thisblock->data.block.cleanups = 0;
2411 thisblock->data.block.function_call_count = 0;
2412#if 0
2413 if (block_stack)
2414 {
2415 if (block_stack->data.block.cleanups == NULL_TREE
2416 && (block_stack->data.block.outer_cleanups == NULL_TREE
2417 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2418 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2419 else
2420 thisblock->data.block.outer_cleanups
2421 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2422 block_stack->data.block.outer_cleanups);
2423 }
2424 else
2425 thisblock->data.block.outer_cleanups = 0;
2426#endif
2427#if 1
2428 if (block_stack
2429 && !(block_stack->data.block.cleanups == NULL_TREE
2430 && block_stack->data.block.outer_cleanups == NULL_TREE))
2431 thisblock->data.block.outer_cleanups
2432 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2433 block_stack->data.block.outer_cleanups);
2434 else
2435 thisblock->data.block.outer_cleanups = 0;
2436#endif
2437 thisblock->data.block.label_chain = 0;
2438 thisblock->data.block.innermost_stack_block = stack_block_stack;
2439 thisblock->data.block.first_insn = note;
2440 thisblock->data.block.block_start_count = ++block_start_count;
2441 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2442 block_stack = thisblock;
2443 nesting_stack = thisblock;
2444
2445 /* Make a new level for allocating stack slots. */
2446 push_temp_slots ();
2447}
2448
2449/* Generate RTL code to terminate a binding contour.
2450 VARS is the chain of VAR_DECL nodes
2451 for the variables bound in this contour.
2452 MARK_ENDS is nonzero if we should put a note at the beginning
2453 and end of this binding contour.
2454
2455 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2456 (That is true automatically if the contour has a saved stack level.) */
2457
2458void
2459expand_end_bindings (vars, mark_ends, dont_jump_in)
2460 tree vars;
2461 int mark_ends;
2462 int dont_jump_in;
2463{
2464 register struct nesting *thisblock = block_stack;
2465 register tree decl;
2466
2467 if (warn_unused)
2468 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2469 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2470 warning_with_decl (decl, "unused variable `%s'");
2471
2472 /* Mark the beginning and end of the scope if requested. */
2473
2474 if (mark_ends)
2475 emit_note (0, NOTE_INSN_BLOCK_END);
2476 else
2477 /* Get rid of the beginning-mark if we don't make an end-mark. */
2478 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2479
2480 if (thisblock->exit_label)
2481 {
2482 do_pending_stack_adjust ();
2483 emit_label (thisblock->exit_label);
2484 }
2485
2486 /* If necessary, make a handler for nonlocal gotos taking
2487 place in the function calls in this block. */
2488 if (function_call_count != thisblock->data.block.function_call_count
2489 && nonlocal_labels
2490 /* Make handler for outermost block
2491 if there were any nonlocal gotos to this function. */
2492 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2493 /* Make handler for inner block if it has something
2494 special to do when you jump out of it. */
2495 : (thisblock->data.block.cleanups != 0
2496 || thisblock->data.block.stack_level != 0)))
2497 {
2498 tree link;
2499 rtx afterward = gen_label_rtx ();
2500 rtx handler_label = gen_label_rtx ();
2501 rtx save_receiver = gen_reg_rtx (Pmode);
2502
2503 /* Don't let jump_optimize delete the handler. */
2504 LABEL_PRESERVE_P (handler_label) = 1;
2505
2506 /* Record the handler address in the stack slot for that purpose,
2507 during this block, saving and restoring the outer value. */
2508 if (thisblock->next != 0)
2509 {
2510 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2511 emit_insn_before (gen_move_insn (save_receiver,
2512 nonlocal_goto_handler_slot),
2513 thisblock->data.block.first_insn);
2514 }
2515 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2516 gen_rtx (LABEL_REF, Pmode,
2517 handler_label)),
2518 thisblock->data.block.first_insn);
2519
2520 /* Jump around the handler; it runs only when specially invoked. */
2521 emit_jump (afterward);
2522 emit_label (handler_label);
2523
2524#ifdef HAVE_nonlocal_goto
2525 if (! HAVE_nonlocal_goto)
2526#endif
2527 /* First adjust our frame pointer to its actual value. It was
2528 previously set to the start of the virtual area corresponding to
2529 the stacked variables when we branched here and now needs to be
2530 adjusted to the actual hardware fp value.
2531
2532 Assignments are to virtual registers are converted by
2533 instantiate_virtual_regs into the corresponding assignment
2534 to the underlying register (fp in this case) that makes
2535 the original assignment true.
2536 So the following insn will actually be
2537 decrementing fp by STARTING_FRAME_OFFSET. */
2538 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2539
2540#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2541 if (fixed_regs[ARG_POINTER_REGNUM])
2542 {
2543 /* Now restore our arg pointer from the address at which it was saved
2544 in our stack frame.
2545 If there hasn't be space allocated for it yet, make some now. */
2546 if (arg_pointer_save_area == 0)
2547 arg_pointer_save_area
2548 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
b4ac57ab
RS
2549 emit_move_insn (virtual_incoming_args_rtx,
2550 /* We need a pseudo here,
2551 or else instantiate_virtual_regs_1 complains. */
2552 copy_to_reg (arg_pointer_save_area));
28d81abb
RK
2553 }
2554#endif
2555
2556 /* The handler expects the desired label address in the static chain
2557 register. It tests the address and does an appropriate jump
2558 to whatever label is desired. */
2559 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2560 /* Skip any labels we shouldn't be able to jump to from here. */
2561 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2562 {
2563 rtx not_this = gen_label_rtx ();
2564 rtx this = gen_label_rtx ();
2565 do_jump_if_equal (static_chain_rtx,
2566 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2567 this, 0);
2568 emit_jump (not_this);
2569 emit_label (this);
2570 expand_goto (TREE_VALUE (link));
2571 emit_label (not_this);
2572 }
2573 /* If label is not recognized, abort. */
2574 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2575 VOIDmode, 0);
2576 emit_label (afterward);
2577 }
2578
2579 /* Don't allow jumping into a block that has cleanups or a stack level. */
2580 if (dont_jump_in
2581 || thisblock->data.block.stack_level != 0
2582 || thisblock->data.block.cleanups != 0)
2583 {
2584 struct label_chain *chain;
2585
2586 /* Any labels in this block are no longer valid to go to.
2587 Mark them to cause an error message. */
2588 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2589 {
2590 DECL_TOO_LATE (chain->label) = 1;
2591 /* If any goto without a fixup came to this label,
2592 that must be an error, because gotos without fixups
2593 come from outside all saved stack-levels and all cleanups. */
2594 if (TREE_ADDRESSABLE (chain->label))
2595 error_with_decl (chain->label,
2596 "label `%s' used before containing binding contour");
2597 }
2598 }
2599
2600 /* Restore stack level in effect before the block
2601 (only if variable-size objects allocated). */
2602 /* Perform any cleanups associated with the block. */
2603
2604 if (thisblock->data.block.stack_level != 0
2605 || thisblock->data.block.cleanups != 0)
2606 {
2607 /* Don't let cleanups affect ({...}) constructs. */
2608 int old_expr_stmts_for_value = expr_stmts_for_value;
2609 rtx old_last_expr_value = last_expr_value;
2610 tree old_last_expr_type = last_expr_type;
2611 expr_stmts_for_value = 0;
2612
2613 /* Do the cleanups. */
2614 expand_cleanups (thisblock->data.block.cleanups, 0);
2615 do_pending_stack_adjust ();
2616
2617 expr_stmts_for_value = old_expr_stmts_for_value;
2618 last_expr_value = old_last_expr_value;
2619 last_expr_type = old_last_expr_type;
2620
2621 /* Restore the stack level. */
2622
2623 if (thisblock->data.block.stack_level != 0)
2624 {
59257ff7
RK
2625 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2626 thisblock->data.block.stack_level, 0);
2627 if (nonlocal_goto_handler_slot != 0)
2628 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2629 }
2630
2631 /* Any gotos out of this block must also do these things.
59257ff7
RK
2632 Also report any gotos with fixups that came to labels in this
2633 level. */
28d81abb
RK
2634 fixup_gotos (thisblock,
2635 thisblock->data.block.stack_level,
2636 thisblock->data.block.cleanups,
2637 thisblock->data.block.first_insn,
2638 dont_jump_in);
2639 }
2640
2641 /* If doing stupid register allocation, make sure lives of all
2642 register variables declared here extend thru end of scope. */
2643
2644 if (obey_regdecls)
2645 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2646 {
2647 rtx rtl = DECL_RTL (decl);
2648 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2649 use_variable (rtl);
2650 }
2651
2652 /* Restore block_stack level for containing block. */
2653
2654 stack_block_stack = thisblock->data.block.innermost_stack_block;
2655 POPSTACK (block_stack);
2656
2657 /* Pop the stack slot nesting and free any slots at this level. */
2658 pop_temp_slots ();
2659}
2660\f
2661/* Generate RTL for the automatic variable declaration DECL.
2662 (Other kinds of declarations are simply ignored if seen here.)
2663 CLEANUP is an expression to be executed at exit from this binding contour;
2664 for example, in C++, it might call the destructor for this variable.
2665
2666 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2667 either before or after calling `expand_decl' but before compiling
2668 any subsequent expressions. This is because CLEANUP may be expanded
2669 more than once, on different branches of execution.
2670 For the same reason, CLEANUP may not contain a CALL_EXPR
2671 except as its topmost node--else `preexpand_calls' would get confused.
2672
2673 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2674 that is not associated with any particular variable.
2675
2676 There is no special support here for C++ constructors.
2677 They should be handled by the proper code in DECL_INITIAL. */
2678
2679void
2680expand_decl (decl)
2681 register tree decl;
2682{
2683 struct nesting *thisblock = block_stack;
2684 tree type = TREE_TYPE (decl);
2685
2686 /* Only automatic variables need any expansion done.
2687 Static and external variables, and external functions,
2688 will be handled by `assemble_variable' (called from finish_decl).
2689 TYPE_DECL and CONST_DECL require nothing.
2690 PARM_DECLs are handled in `assign_parms'. */
2691
2692 if (TREE_CODE (decl) != VAR_DECL)
2693 return;
2694 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2695 return;
2696
2697 /* Create the RTL representation for the variable. */
2698
2699 if (type == error_mark_node)
2700 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2701 else if (DECL_SIZE (decl) == 0)
2702 /* Variable with incomplete type. */
2703 {
2704 if (DECL_INITIAL (decl) == 0)
2705 /* Error message was already done; now avoid a crash. */
2706 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2707 else
2708 /* An initializer is going to decide the size of this array.
2709 Until we know the size, represent its address with a reg. */
2710 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2711 }
2712 else if (DECL_MODE (decl) != BLKmode
2713 /* If -ffloat-store, don't put explicit float vars
2714 into regs. */
2715 && !(flag_float_store
2716 && TREE_CODE (type) == REAL_TYPE)
2717 && ! TREE_THIS_VOLATILE (decl)
2718 && ! TREE_ADDRESSABLE (decl)
2719 && (TREE_REGDECL (decl) || ! obey_regdecls))
2720 {
2721 /* Automatic variable that can go in a register. */
2722 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2723 if (TREE_CODE (type) == POINTER_TYPE)
2724 mark_reg_pointer (DECL_RTL (decl));
2725 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2726 }
2727 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2728 {
2729 /* Variable of fixed size that goes on the stack. */
2730 rtx oldaddr = 0;
2731 rtx addr;
2732
2733 /* If we previously made RTL for this decl, it must be an array
2734 whose size was determined by the initializer.
2735 The old address was a register; set that register now
2736 to the proper address. */
2737 if (DECL_RTL (decl) != 0)
2738 {
2739 if (GET_CODE (DECL_RTL (decl)) != MEM
2740 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2741 abort ();
2742 oldaddr = XEXP (DECL_RTL (decl), 0);
2743 }
2744
2745 DECL_RTL (decl)
2746 = assign_stack_temp (DECL_MODE (decl),
2747 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2748 + BITS_PER_UNIT - 1)
2749 / BITS_PER_UNIT),
2750 1);
2751
2752 /* Set alignment we actually gave this decl. */
2753 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2754 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2755
2756 if (oldaddr)
2757 {
2758 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2759 if (addr != oldaddr)
2760 emit_move_insn (oldaddr, addr);
2761 }
2762
2763 /* If this is a memory ref that contains aggregate components,
2764 mark it as such for cse and loop optimize. */
2765 MEM_IN_STRUCT_P (DECL_RTL (decl))
2766 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2767 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2768 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2769#if 0
2770 /* If this is in memory because of -ffloat-store,
2771 set the volatile bit, to prevent optimizations from
2772 undoing the effects. */
2773 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2774 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2775#endif
2776 }
2777 else
2778 /* Dynamic-size object: must push space on the stack. */
2779 {
2780 rtx address, size;
2781
2782 /* Record the stack pointer on entry to block, if have
2783 not already done so. */
2784 if (thisblock->data.block.stack_level == 0)
2785 {
2786 do_pending_stack_adjust ();
59257ff7
RK
2787 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2788 &thisblock->data.block.stack_level,
2789 thisblock->data.block.first_insn);
28d81abb
RK
2790 stack_block_stack = thisblock;
2791 }
2792
2793 /* Compute the variable's size, in bytes. */
2794 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2795 DECL_SIZE (decl),
2796 size_int (BITS_PER_UNIT)),
2797 0, VOIDmode, 0);
2798 free_temp_slots ();
2799
59257ff7
RK
2800 /* This is equivalent to calling alloca. */
2801 current_function_calls_alloca = 1;
2802
28d81abb 2803 /* Allocate space on the stack for the variable. */
5130a5cc 2804 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
28d81abb 2805
59257ff7
RK
2806 if (nonlocal_goto_handler_slot != 0)
2807 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2808
2809 /* Reference the variable indirect through that rtx. */
2810 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2811
2207e295
RS
2812 /* If this is a memory ref that contains aggregate components,
2813 mark it as such for cse and loop optimize. */
2814 MEM_IN_STRUCT_P (DECL_RTL (decl))
2815 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2816 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2817 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2818
28d81abb
RK
2819 /* Indicate the alignment we actually gave this variable. */
2820#ifdef STACK_BOUNDARY
2821 DECL_ALIGN (decl) = STACK_BOUNDARY;
2822#else
2823 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2824#endif
2825 }
2826
2827 if (TREE_THIS_VOLATILE (decl))
2828 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2829 if (TREE_READONLY (decl))
2830 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2831
2832 /* If doing stupid register allocation, make sure life of any
2833 register variable starts here, at the start of its scope. */
2834
2835 if (obey_regdecls)
2836 use_variable (DECL_RTL (decl));
2837}
2838\f
2839/* Emit code to perform the initialization of a declaration DECL. */
2840
2841void
2842expand_decl_init (decl)
2843 tree decl;
2844{
b4ac57ab
RS
2845 int was_used = TREE_USED (decl);
2846
28d81abb
RK
2847 if (TREE_STATIC (decl))
2848 return;
2849
2850 /* Compute and store the initial value now. */
2851
2852 if (DECL_INITIAL (decl) == error_mark_node)
2853 {
2854 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2855 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2856 || code == POINTER_TYPE)
2857 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2858 0, 0);
2859 emit_queue ();
2860 }
2861 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2862 {
2863 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2864 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2865 emit_queue ();
2866 }
2867
b4ac57ab
RS
2868 /* Don't let the initialization count as "using" the variable. */
2869 TREE_USED (decl) = was_used;
2870
28d81abb
RK
2871 /* Free any temporaries we made while initializing the decl. */
2872 free_temp_slots ();
2873}
2874
2875/* CLEANUP is an expression to be executed at exit from this binding contour;
2876 for example, in C++, it might call the destructor for this variable.
2877
2878 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2879 either before or after calling `expand_decl' but before compiling
2880 any subsequent expressions. This is because CLEANUP may be expanded
2881 more than once, on different branches of execution.
2882 For the same reason, CLEANUP may not contain a CALL_EXPR
2883 except as its topmost node--else `preexpand_calls' would get confused.
2884
2885 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2886 that is not associated with any particular variable. */
2887
2888int
2889expand_decl_cleanup (decl, cleanup)
2890 tree decl, cleanup;
2891{
2892 struct nesting *thisblock = block_stack;
2893
2894 /* Error if we are not in any block. */
2895 if (thisblock == 0)
2896 return 0;
2897
2898 /* Record the cleanup if there is one. */
2899
2900 if (cleanup != 0)
2901 {
2902 thisblock->data.block.cleanups
2903 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2904 /* If this block has a cleanup, it belongs in stack_block_stack. */
2905 stack_block_stack = thisblock;
2906 }
2907 return 1;
2908}
2909\f
2910/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2911 DECL_ELTS is the list of elements that belong to DECL's type.
2912 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2913
2914void
2915expand_anon_union_decl (decl, cleanup, decl_elts)
2916 tree decl, cleanup, decl_elts;
2917{
2918 struct nesting *thisblock = block_stack;
2919 rtx x;
2920
2921 expand_decl (decl, cleanup);
2922 x = DECL_RTL (decl);
2923
2924 while (decl_elts)
2925 {
2926 tree decl_elt = TREE_VALUE (decl_elts);
2927 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2928 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2929
2930 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2931 instead create a new MEM rtx with the proper mode. */
2932 if (GET_CODE (x) == MEM)
2933 {
2934 if (mode == GET_MODE (x))
2935 DECL_RTL (decl_elt) = x;
2936 else
2937 {
2938 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2939 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2940 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2941 }
2942 }
2943 else if (GET_CODE (x) == REG)
2944 {
2945 if (mode == GET_MODE (x))
2946 DECL_RTL (decl_elt) = x;
2947 else
2948 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2949 }
2950 else
2951 abort ();
2952
2953 /* Record the cleanup if there is one. */
2954
2955 if (cleanup != 0)
2956 thisblock->data.block.cleanups
2957 = temp_tree_cons (decl_elt, cleanup_elt,
2958 thisblock->data.block.cleanups);
2959
2960 decl_elts = TREE_CHAIN (decl_elts);
2961 }
2962}
2963\f
2964/* Expand a list of cleanups LIST.
2965 Elements may be expressions or may be nested lists.
2966
2967 If DONT_DO is nonnull, then any list-element
2968 whose TREE_PURPOSE matches DONT_DO is omitted.
2969 This is sometimes used to avoid a cleanup associated with
2970 a value that is being returned out of the scope. */
2971
2972static void
2973expand_cleanups (list, dont_do)
2974 tree list;
2975 tree dont_do;
2976{
2977 tree tail;
2978 for (tail = list; tail; tail = TREE_CHAIN (tail))
2979 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2980 {
2981 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2982 expand_cleanups (TREE_VALUE (tail), dont_do);
2983 else
2984 {
2985 /* Cleanups may be run multiple times. For example,
2986 when exiting a binding contour, we expand the
2987 cleanups associated with that contour. When a goto
2988 within that binding contour has a target outside that
2989 contour, it will expand all cleanups from its scope to
2990 the target. Though the cleanups are expanded multiple
2991 times, the control paths are non-overlapping so the
2992 cleanups will not be executed twice. */
2993 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2994 free_temp_slots ();
2995 }
2996 }
2997}
2998
2999/* Expand a list of cleanups for a goto fixup.
3000 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3001 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3002
3003static void
3004fixup_cleanups (list, before_jump)
3005 tree list;
3006 rtx *before_jump;
3007{
3008 rtx beyond_jump = get_last_insn ();
3009 rtx new_before_jump;
3010
3011 expand_cleanups (list, 0);
3012 /* Pop any pushes done in the cleanups,
3013 in case function is about to return. */
3014 do_pending_stack_adjust ();
3015
3016 new_before_jump = get_last_insn ();
3017
3018 if (beyond_jump != new_before_jump)
3019 {
3020 /* If cleanups expand to nothing, don't reorder. */
3021 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3022 *before_jump = new_before_jump;
3023 }
3024}
3025
3026/* Move all cleanups from the current block_stack
3027 to the containing block_stack, where they are assumed to
3028 have been created. If anything can cause a temporary to
3029 be created, but not expanded for more than one level of
3030 block_stacks, then this code will have to change. */
3031
3032void
3033move_cleanups_up ()
3034{
3035 struct nesting *block = block_stack;
3036 struct nesting *outer = block->next;
3037
3038 outer->data.block.cleanups
3039 = chainon (block->data.block.cleanups,
3040 outer->data.block.cleanups);
3041 block->data.block.cleanups = 0;
3042}
3043
3044tree
3045last_cleanup_this_contour ()
3046{
3047 if (block_stack == 0)
3048 return 0;
3049
3050 return block_stack->data.block.cleanups;
3051}
3052
3053/* Return 1 if there are any pending cleanups at this point.
3054 If THIS_CONTOUR is nonzero, check the current contour as well.
3055 Otherwise, look only at the contours that enclose this one. */
3056
3057int
3058any_pending_cleanups (this_contour)
3059 int this_contour;
3060{
3061 struct nesting *block;
3062
3063 if (block_stack == 0)
3064 return 0;
3065
3066 if (this_contour && block_stack->data.block.cleanups != NULL)
3067 return 1;
3068 if (block_stack->data.block.cleanups == 0
3069 && (block_stack->data.block.outer_cleanups == 0
3070#if 0
3071 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3072#endif
3073 ))
3074 return 0;
3075
3076 for (block = block_stack->next; block; block = block->next)
3077 if (block->data.block.cleanups != 0)
3078 return 1;
3079
3080 return 0;
3081}
3082\f
3083/* Enter a case (Pascal) or switch (C) statement.
3084 Push a block onto case_stack and nesting_stack
3085 to accumulate the case-labels that are seen
3086 and to record the labels generated for the statement.
3087
3088 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3089 Otherwise, this construct is transparent for `exit_something'.
3090
3091 EXPR is the index-expression to be dispatched on.
3092 TYPE is its nominal type. We could simply convert EXPR to this type,
3093 but instead we take short cuts. */
3094
3095void
3096expand_start_case (exit_flag, expr, type, printname)
3097 int exit_flag;
3098 tree expr;
3099 tree type;
3100 char *printname;
3101{
3102 register struct nesting *thiscase = ALLOC_NESTING ();
3103
3104 /* Make an entry on case_stack for the case we are entering. */
3105
3106 thiscase->next = case_stack;
3107 thiscase->all = nesting_stack;
3108 thiscase->depth = ++nesting_depth;
3109 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3110 thiscase->data.case_stmt.case_list = 0;
3111 thiscase->data.case_stmt.index_expr = expr;
3112 thiscase->data.case_stmt.nominal_type = type;
3113 thiscase->data.case_stmt.default_label = 0;
3114 thiscase->data.case_stmt.num_ranges = 0;
3115 thiscase->data.case_stmt.printname = printname;
3116 thiscase->data.case_stmt.seenlabel = 0;
3117 case_stack = thiscase;
3118 nesting_stack = thiscase;
3119
3120 do_pending_stack_adjust ();
3121
3122 /* Make sure case_stmt.start points to something that won't
3123 need any transformation before expand_end_case. */
3124 if (GET_CODE (get_last_insn ()) != NOTE)
3125 emit_note (0, NOTE_INSN_DELETED);
3126
3127 thiscase->data.case_stmt.start = get_last_insn ();
3128}
3129
3130/* Start a "dummy case statement" within which case labels are invalid
3131 and are not connected to any larger real case statement.
3132 This can be used if you don't want to let a case statement jump
3133 into the middle of certain kinds of constructs. */
3134
3135void
3136expand_start_case_dummy ()
3137{
3138 register struct nesting *thiscase = ALLOC_NESTING ();
3139
3140 /* Make an entry on case_stack for the dummy. */
3141
3142 thiscase->next = case_stack;
3143 thiscase->all = nesting_stack;
3144 thiscase->depth = ++nesting_depth;
3145 thiscase->exit_label = 0;
3146 thiscase->data.case_stmt.case_list = 0;
3147 thiscase->data.case_stmt.start = 0;
3148 thiscase->data.case_stmt.nominal_type = 0;
3149 thiscase->data.case_stmt.default_label = 0;
3150 thiscase->data.case_stmt.num_ranges = 0;
3151 case_stack = thiscase;
3152 nesting_stack = thiscase;
3153}
3154
3155/* End a dummy case statement. */
3156
3157void
3158expand_end_case_dummy ()
3159{
3160 POPSTACK (case_stack);
3161}
3162
3163/* Return the data type of the index-expression
3164 of the innermost case statement, or null if none. */
3165
3166tree
3167case_index_expr_type ()
3168{
3169 if (case_stack)
3170 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3171 return 0;
3172}
3173\f
3174/* Accumulate one case or default label inside a case or switch statement.
3175 VALUE is the value of the case (a null pointer, for a default label).
3176
3177 If not currently inside a case or switch statement, return 1 and do
3178 nothing. The caller will print a language-specific error message.
3179 If VALUE is a duplicate or overlaps, return 2 and do nothing
3180 except store the (first) duplicate node in *DUPLICATE.
3181 If VALUE is out of range, return 3 and do nothing.
3182 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3183 Return 0 on success.
3184
3185 Extended to handle range statements. */
3186
3187int
3188pushcase (value, label, duplicate)
3189 register tree value;
3190 register tree label;
3191 tree *duplicate;
3192{
3193 register struct case_node **l;
3194 register struct case_node *n;
3195 tree index_type;
3196 tree nominal_type;
3197
3198 /* Fail if not inside a real case statement. */
3199 if (! (case_stack && case_stack->data.case_stmt.start))
3200 return 1;
3201
3202 if (stack_block_stack
3203 && stack_block_stack->depth > case_stack->depth)
3204 return 5;
3205
3206 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3207 nominal_type = case_stack->data.case_stmt.nominal_type;
3208
3209 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3210 if (index_type == error_mark_node)
3211 return 0;
3212
3213 /* Convert VALUE to the type in which the comparisons are nominally done. */
3214 if (value != 0)
3215 value = convert (nominal_type, value);
3216
3217 /* If this is the first label, warn if any insns have been emitted. */
3218 if (case_stack->data.case_stmt.seenlabel == 0)
3219 {
3220 rtx insn;
3221 for (insn = case_stack->data.case_stmt.start;
3222 insn;
3223 insn = NEXT_INSN (insn))
3224 {
3225 if (GET_CODE (insn) == CODE_LABEL)
3226 break;
3227 if (GET_CODE (insn) != NOTE
3228 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3229 {
3230 warning ("unreachable code at beginning of %s",
3231 case_stack->data.case_stmt.printname);
3232 break;
3233 }
3234 }
3235 }
3236 case_stack->data.case_stmt.seenlabel = 1;
3237
3238 /* Fail if this value is out of range for the actual type of the index
3239 (which may be narrower than NOMINAL_TYPE). */
3240 if (value != 0 && ! int_fits_type_p (value, index_type))
3241 return 3;
3242
3243 /* Fail if this is a duplicate or overlaps another entry. */
3244 if (value == 0)
3245 {
3246 if (case_stack->data.case_stmt.default_label != 0)
3247 {
3248 *duplicate = case_stack->data.case_stmt.default_label;
3249 return 2;
3250 }
3251 case_stack->data.case_stmt.default_label = label;
3252 }
3253 else
3254 {
3255 /* Find the elt in the chain before which to insert the new value,
3256 to keep the chain sorted in increasing order.
3257 But report an error if this element is a duplicate. */
3258 for (l = &case_stack->data.case_stmt.case_list;
3259 /* Keep going past elements distinctly less than VALUE. */
3260 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3261 l = &(*l)->right)
3262 ;
3263 if (*l)
3264 {
3265 /* Element we will insert before must be distinctly greater;
3266 overlap means error. */
3267 if (! tree_int_cst_lt (value, (*l)->low))
3268 {
3269 *duplicate = (*l)->code_label;
3270 return 2;
3271 }
3272 }
3273
3274 /* Add this label to the chain, and succeed.
3275 Copy VALUE so it is on temporary rather than momentary
3276 obstack and will thus survive till the end of the case statement. */
3277 n = (struct case_node *) oballoc (sizeof (struct case_node));
3278 n->left = 0;
3279 n->right = *l;
3280 n->high = n->low = copy_node (value);
3281 n->code_label = label;
3282 *l = n;
3283 }
3284
3285 expand_label (label);
3286 return 0;
3287}
3288
3289/* Like pushcase but this case applies to all values
3290 between VALUE1 and VALUE2 (inclusive).
3291 The return value is the same as that of pushcase
3292 but there is one additional error code:
3293 4 means the specified range was empty. */
3294
3295int
3296pushcase_range (value1, value2, label, duplicate)
3297 register tree value1, value2;
3298 register tree label;
3299 tree *duplicate;
3300{
3301 register struct case_node **l;
3302 register struct case_node *n;
3303 tree index_type;
3304 tree nominal_type;
3305
3306 /* Fail if not inside a real case statement. */
3307 if (! (case_stack && case_stack->data.case_stmt.start))
3308 return 1;
3309
3310 if (stack_block_stack
3311 && stack_block_stack->depth > case_stack->depth)
3312 return 5;
3313
3314 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3315 nominal_type = case_stack->data.case_stmt.nominal_type;
3316
3317 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3318 if (index_type == error_mark_node)
3319 return 0;
3320
3321 /* If this is the first label, warn if any insns have been emitted. */
3322 if (case_stack->data.case_stmt.seenlabel == 0)
3323 {
3324 rtx insn;
3325 for (insn = case_stack->data.case_stmt.start;
3326 insn;
3327 insn = NEXT_INSN (insn))
3328 {
3329 if (GET_CODE (insn) == CODE_LABEL)
3330 break;
3331 if (GET_CODE (insn) != NOTE
3332 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3333 {
3334 warning ("unreachable code at beginning of %s",
3335 case_stack->data.case_stmt.printname);
3336 break;
3337 }
3338 }
3339 }
3340 case_stack->data.case_stmt.seenlabel = 1;
3341
3342 /* Convert VALUEs to type in which the comparisons are nominally done. */
3343 if (value1 == 0) /* Negative infinity. */
3344 value1 = TYPE_MIN_VALUE(index_type);
3345 value1 = convert (nominal_type, value1);
3346
3347 if (value2 == 0) /* Positive infinity. */
3348 value2 = TYPE_MAX_VALUE(index_type);
3349 value2 = convert (nominal_type, value2);
3350
3351 /* Fail if these values are out of range. */
3352 if (! int_fits_type_p (value1, index_type))
3353 return 3;
3354
3355 if (! int_fits_type_p (value2, index_type))
3356 return 3;
3357
3358 /* Fail if the range is empty. */
3359 if (tree_int_cst_lt (value2, value1))
3360 return 4;
3361
3362 /* If the bounds are equal, turn this into the one-value case. */
3363 if (tree_int_cst_equal (value1, value2))
3364 return pushcase (value1, label, duplicate);
3365
3366 /* Find the elt in the chain before which to insert the new value,
3367 to keep the chain sorted in increasing order.
3368 But report an error if this element is a duplicate. */
3369 for (l = &case_stack->data.case_stmt.case_list;
3370 /* Keep going past elements distinctly less than this range. */
3371 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3372 l = &(*l)->right)
3373 ;
3374 if (*l)
3375 {
3376 /* Element we will insert before must be distinctly greater;
3377 overlap means error. */
3378 if (! tree_int_cst_lt (value2, (*l)->low))
3379 {
3380 *duplicate = (*l)->code_label;
3381 return 2;
3382 }
3383 }
3384
3385 /* Add this label to the chain, and succeed.
3386 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3387 obstack and will thus survive till the end of the case statement. */
3388
3389 n = (struct case_node *) oballoc (sizeof (struct case_node));
3390 n->left = 0;
3391 n->right = *l;
3392 n->low = copy_node (value1);
3393 n->high = copy_node (value2);
3394 n->code_label = label;
3395 *l = n;
3396
3397 expand_label (label);
3398
3399 case_stack->data.case_stmt.num_ranges++;
3400
3401 return 0;
3402}
3403\f
3404/* Called when the index of a switch statement is an enumerated type
3405 and there is no default label.
3406
3407 Checks that all enumeration literals are covered by the case
3408 expressions of a switch. Also, warn if there are any extra
3409 switch cases that are *not* elements of the enumerated type.
3410
3411 If all enumeration literals were covered by the case expressions,
3412 turn one of the expressions into the default expression since it should
3413 not be possible to fall through such a switch. */
3414
3415void
3416check_for_full_enumeration_handling (type)
3417 tree type;
3418{
3419 register struct case_node *n;
3420 register struct case_node **l;
3421 register tree chain;
3422 int all_values = 1;
3423
3424 /* The time complexity of this loop is currently O(N * M), with
3425 N being the number of enumerals in the enumerated type, and
3426 M being the number of case expressions in the switch. */
3427
3428 for (chain = TYPE_VALUES (type);
3429 chain;
3430 chain = TREE_CHAIN (chain))
3431 {
3432 /* Find a match between enumeral and case expression, if possible.
3433 Quit looking when we've gone too far (since case expressions
3434 are kept sorted in ascending order). Warn about enumerals not
3435 handled in the switch statement case expression list. */
3436
3437 for (n = case_stack->data.case_stmt.case_list;
3438 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3439 n = n->right)
3440 ;
3441
3442 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3443 {
3444 if (warn_switch)
3445 warning ("enumerated value `%s' not handled in switch",
3446 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3447 all_values = 0;
3448 }
3449 }
3450
3451 /* Now we go the other way around; we warn if there are case
3452 expressions that don't correspond to enumerals. This can
3453 occur since C and C++ don't enforce type-checking of
3454 assignments to enumeration variables. */
3455
3456 if (warn_switch)
3457 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3458 {
3459 for (chain = TYPE_VALUES (type);
3460 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3461 chain = TREE_CHAIN (chain))
3462 ;
3463
3464 if (!chain)
3465 warning ("case value `%d' not in enumerated type `%s'",
3466 TREE_INT_CST_LOW (n->low),
3467 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3468 == IDENTIFIER_NODE)
3469 ? TYPE_NAME (type)
3470 : DECL_NAME (TYPE_NAME (type))));
3471 }
3472
3473 /* If all values were found as case labels, make one of them the default
3474 label. Thus, this switch will never fall through. We arbitrarily pick
3475 the last one to make the default since this is likely the most
3476 efficient choice. */
3477
3478 if (all_values)
3479 {
3480 for (l = &case_stack->data.case_stmt.case_list;
3481 (*l)->right != 0;
3482 l = &(*l)->right)
3483 ;
3484
3485 case_stack->data.case_stmt.default_label = (*l)->code_label;
3486 *l = 0;
3487 }
3488}
3489\f
3490/* Terminate a case (Pascal) or switch (C) statement
3491 in which CASE_INDEX is the expression to be tested.
3492 Generate the code to test it and jump to the right place. */
3493
3494void
3495expand_end_case (orig_index)
3496 tree orig_index;
3497{
3498 tree minval, maxval, range;
3499 rtx default_label = 0;
3500 register struct case_node *n;
3501 int count;
3502 rtx index;
3503 rtx table_label = gen_label_rtx ();
3504 int ncases;
3505 rtx *labelvec;
3506 register int i;
3507 rtx before_case;
3508 register struct nesting *thiscase = case_stack;
3509 tree index_expr = thiscase->data.case_stmt.index_expr;
3510 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3511
3512 do_pending_stack_adjust ();
3513
3514 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3515 if (TREE_TYPE (index_expr) != error_mark_node)
3516 {
3517 /* If switch expression was an enumerated type, check that all
3518 enumeration literals are covered by the cases.
3519 No sense trying this if there's a default case, however. */
3520
3521 if (!thiscase->data.case_stmt.default_label
3522 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3523 && TREE_CODE (index_expr) != INTEGER_CST)
3524 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3525
3526 /* If this is the first label, warn if any insns have been emitted. */
3527 if (thiscase->data.case_stmt.seenlabel == 0)
3528 {
3529 rtx insn;
3530 for (insn = get_last_insn ();
3531 insn != case_stack->data.case_stmt.start;
3532 insn = PREV_INSN (insn))
3533 if (GET_CODE (insn) != NOTE
3534 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3535 {
3536 warning ("unreachable code at beginning of %s",
3537 case_stack->data.case_stmt.printname);
3538 break;
3539 }
3540 }
3541
3542 /* If we don't have a default-label, create one here,
3543 after the body of the switch. */
3544 if (thiscase->data.case_stmt.default_label == 0)
3545 {
3546 thiscase->data.case_stmt.default_label
3547 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3548 expand_label (thiscase->data.case_stmt.default_label);
3549 }
3550 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3551
3552 before_case = get_last_insn ();
3553
3554 /* Simplify the case-list before we count it. */
3555 group_case_nodes (thiscase->data.case_stmt.case_list);
3556
3557 /* Get upper and lower bounds of case values.
3558 Also convert all the case values to the index expr's data type. */
3559
3560 count = 0;
3561 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3562 {
3563 /* Check low and high label values are integers. */
3564 if (TREE_CODE (n->low) != INTEGER_CST)
3565 abort ();
3566 if (TREE_CODE (n->high) != INTEGER_CST)
3567 abort ();
3568
3569 n->low = convert (TREE_TYPE (index_expr), n->low);
3570 n->high = convert (TREE_TYPE (index_expr), n->high);
3571
3572 /* Count the elements and track the largest and smallest
3573 of them (treating them as signed even if they are not). */
3574 if (count++ == 0)
3575 {
3576 minval = n->low;
3577 maxval = n->high;
3578 }
3579 else
3580 {
3581 if (INT_CST_LT (n->low, minval))
3582 minval = n->low;
3583 if (INT_CST_LT (maxval, n->high))
3584 maxval = n->high;
3585 }
3586 /* A range counts double, since it requires two compares. */
3587 if (! tree_int_cst_equal (n->low, n->high))
3588 count++;
3589 }
3590
3591 /* Compute span of values. */
3592 if (count != 0)
3593 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3594 maxval, minval));
3595
3596 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3597 {
3598 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3599 emit_queue ();
3600 emit_jump (default_label);
3601 }
3602 /* If range of values is much bigger than number of values,
3603 make a sequence of conditional branches instead of a dispatch.
3604 If the switch-index is a constant, do it this way
3605 because we can optimize it. */
3606 else if (TREE_INT_CST_HIGH (range) != 0
3607#ifdef HAVE_casesi
3608 || (HAVE_casesi ? count < 4 : count < 5)
3609#else
3610 /* If machine does not have a case insn that compares the
3611 bounds, this means extra overhead for dispatch tables
3612 which raises the threshold for using them. */
3613 || count < 5
3614#endif
3615 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3616 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3617 /* These will reduce to a constant. */
28d81abb 3618 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3619 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3620 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3621 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3622 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3623 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb
RK
3624 {
3625 index = expand_expr (index_expr, 0, VOIDmode, 0);
3626
3627 /* If the index is a short or char that we do not have
3628 an insn to handle comparisons directly, convert it to
3629 a full integer now, rather than letting each comparison
3630 generate the conversion. */
3631
3632 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3633 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3634 == CODE_FOR_nothing))
3635 {
3636 enum machine_mode wider_mode;
3637 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3638 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3639 if (cmp_optab->handlers[(int) wider_mode].insn_code
3640 != CODE_FOR_nothing)
3641 {
3642 index = convert_to_mode (wider_mode, index, unsignedp);
3643 break;
3644 }
3645 }
3646
3647 emit_queue ();
3648 do_pending_stack_adjust ();
3649
3650 index = protect_from_queue (index, 0);
3651 if (GET_CODE (index) == MEM)
3652 index = copy_to_reg (index);
3653 if (GET_CODE (index) == CONST_INT
3654 || TREE_CODE (index_expr) == INTEGER_CST)
3655 {
3656 /* Make a tree node with the proper constant value
3657 if we don't already have one. */
3658 if (TREE_CODE (index_expr) != INTEGER_CST)
3659 {
3660 index_expr
3661 = build_int_2 (INTVAL (index),
3662 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3663 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3664 }
3665
3666 /* For constant index expressions we need only
3667 issue a unconditional branch to the appropriate
3668 target code. The job of removing any unreachable
3669 code is left to the optimisation phase if the
3670 "-O" option is specified. */
3671 for (n = thiscase->data.case_stmt.case_list;
3672 n;
3673 n = n->right)
3674 {
3675 if (! tree_int_cst_lt (index_expr, n->low)
3676 && ! tree_int_cst_lt (n->high, index_expr))
3677 break;
3678 }
3679 if (n)
3680 emit_jump (label_rtx (n->code_label));
3681 else
3682 emit_jump (default_label);
3683 }
3684 else
3685 {
3686 /* If the index expression is not constant we generate
3687 a binary decision tree to select the appropriate
3688 target code. This is done as follows:
3689
3690 The list of cases is rearranged into a binary tree,
3691 nearly optimal assuming equal probability for each case.
3692
3693 The tree is transformed into RTL, eliminating
3694 redundant test conditions at the same time.
3695
3696 If program flow could reach the end of the
3697 decision tree an unconditional jump to the
3698 default code is emitted. */
3699
3700 use_cost_table
3701 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb
RK
3702 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3703 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3704 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3705 default_label, TREE_TYPE (index_expr));
3706 emit_jump_if_reachable (default_label);
3707 }
3708 }
3709 else
3710 {
3711 int win = 0;
3712#ifdef HAVE_casesi
3713 if (HAVE_casesi)
3714 {
c4fcf531 3715 enum machine_mode index_mode = SImode;
5130a5cc 3716 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3717
28d81abb 3718 /* Convert the index to SImode. */
c4fcf531
RS
3719 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3720 > GET_MODE_BITSIZE (index_mode))
28d81abb 3721 {
af2682ef
RS
3722 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3723 rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
3724
3725 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3726 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3727 index_expr, minval);
3728 minval = integer_zero_node;
af2682ef
RS
3729 index = expand_expr (index_expr, 0, VOIDmode, 0);
3730 emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
3731 emit_jump_insn (gen_bltu (default_label));
3732 /* Now we can safely truncate. */
3733 index = convert_to_mode (index_mode, index, 0);
3734 }
3735 else
3736 {
3737 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3738 index_expr = convert (type_for_size (index_bits, 0),
3739 index_expr);
3740 index = expand_expr (index_expr, 0, VOIDmode, 0);
28d81abb 3741 }
28d81abb
RK
3742 emit_queue ();
3743 index = protect_from_queue (index, 0);
3744 do_pending_stack_adjust ();
3745
3746 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3747 expand_expr (range, 0, VOIDmode, 0),
3748 table_label, default_label));
3749 win = 1;
3750 }
3751#endif
3752#ifdef HAVE_tablejump
3753 if (! win && HAVE_tablejump)
3754 {
3755 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3756 fold (build (MINUS_EXPR,
3757 TREE_TYPE (index_expr),
3758 index_expr, minval)));
28d81abb
RK
3759 index = expand_expr (index_expr, 0, VOIDmode, 0);
3760 emit_queue ();
af2682ef 3761 index = protect_from_queue (index, 0);
28d81abb
RK
3762 do_pending_stack_adjust ();
3763
af2682ef
RS
3764 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3765 expand_expr (range, 0, VOIDmode, 0),
28d81abb
RK
3766 table_label, default_label);
3767 win = 1;
3768 }
3769#endif
3770 if (! win)
3771 abort ();
3772
3773 /* Get table of labels to jump to, in order of case index. */
3774
3775 ncases = TREE_INT_CST_LOW (range) + 1;
3776 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3777 bzero (labelvec, ncases * sizeof (rtx));
3778
3779 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3780 {
3781 register int i
3782 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3783
3784 while (1)
3785 {
3786 labelvec[i]
3787 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3788 if (i + TREE_INT_CST_LOW (minval)
3789 == TREE_INT_CST_LOW (n->high))
3790 break;
3791 i++;
3792 }
3793 }
3794
3795 /* Fill in the gaps with the default. */
3796 for (i = 0; i < ncases; i++)
3797 if (labelvec[i] == 0)
3798 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3799
3800 /* Output the table */
3801 emit_label (table_label);
3802
3803 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3804 were an expression, instead of a an #ifdef/#ifndef. */
3805 if (
3806#ifdef CASE_VECTOR_PC_RELATIVE
3807 1 ||
3808#endif
3809 flag_pic)
3810 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3811 gen_rtx (LABEL_REF, Pmode, table_label),
3812 gen_rtvec_v (ncases, labelvec)));
3813 else
3814 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3815 gen_rtvec_v (ncases, labelvec)));
3816
3817 /* If the case insn drops through the table,
3818 after the table we must jump to the default-label.
3819 Otherwise record no drop-through after the table. */
3820#ifdef CASE_DROPS_THROUGH
3821 emit_jump (default_label);
3822#else
3823 emit_barrier ();
3824#endif
3825 }
3826
915f619f
JW
3827 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3828 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3829 thiscase->data.case_stmt.start);
3830 }
3831 if (thiscase->exit_label)
3832 emit_label (thiscase->exit_label);
3833
3834 POPSTACK (case_stack);
3835
3836 free_temp_slots ();
3837}
3838
3839/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3840
3841static void
3842do_jump_if_equal (op1, op2, label, unsignedp)
3843 rtx op1, op2, label;
3844 int unsignedp;
3845{
3846 if (GET_CODE (op1) == CONST_INT
3847 && GET_CODE (op2) == CONST_INT)
3848 {
3849 if (INTVAL (op1) == INTVAL (op2))
3850 emit_jump (label);
3851 }
3852 else
3853 {
3854 enum machine_mode mode = GET_MODE (op1);
3855 if (mode == VOIDmode)
3856 mode = GET_MODE (op2);
3857 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3858 emit_jump_insn (gen_beq (label));
3859 }
3860}
3861\f
3862/* Not all case values are encountered equally. This function
3863 uses a heuristic to weight case labels, in cases where that
3864 looks like a reasonable thing to do.
3865
3866 Right now, all we try to guess is text, and we establish the
3867 following weights:
3868
3869 chars above space: 16
3870 digits: 16
3871 default: 12
3872 space, punct: 8
3873 tab: 4
3874 newline: 2
3875 other "\" chars: 1
3876 remaining chars: 0
3877
3878 If we find any cases in the switch that are not either -1 or in the range
3879 of valid ASCII characters, or are control characters other than those
3880 commonly used with "\", don't treat this switch scanning text.
3881
3882 Return 1 if these nodes are suitable for cost estimation, otherwise
3883 return 0. */
3884
3885static int
3886estimate_case_costs (node)
3887 case_node_ptr node;
3888{
3889 tree min_ascii = build_int_2 (-1, -1);
3890 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3891 case_node_ptr n;
3892 int i;
3893
3894 /* If we haven't already made the cost table, make it now. Note that the
3895 lower bound of the table is -1, not zero. */
3896
3897 if (cost_table == NULL)
3898 {
3899 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3900 bzero (cost_table - 1, 129 * sizeof (short));
3901
3902 for (i = 0; i < 128; i++)
3903 {
3904 if (isalnum (i))
3905 cost_table[i] = 16;
3906 else if (ispunct (i))
3907 cost_table[i] = 8;
3908 else if (iscntrl (i))
3909 cost_table[i] = -1;
3910 }
3911
3912 cost_table[' '] = 8;
3913 cost_table['\t'] = 4;
3914 cost_table['\0'] = 4;
3915 cost_table['\n'] = 2;
3916 cost_table['\f'] = 1;
3917 cost_table['\v'] = 1;
3918 cost_table['\b'] = 1;
3919 }
3920
3921 /* See if all the case expressions look like text. It is text if the
3922 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3923 as signed arithmetic since we don't want to ever access cost_table with a
3924 value less than -1. Also check that none of the constants in a range
3925 are strange control characters. */
3926
3927 for (n = node; n; n = n->right)
3928 {
3929 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3930 return 0;
3931
3932 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3933 if (cost_table[i] < 0)
3934 return 0;
3935 }
3936
3937 /* All interesting values are within the range of interesting
3938 ASCII characters. */
3939 return 1;
3940}
3941
3942/* Scan an ordered list of case nodes
3943 combining those with consecutive values or ranges.
3944
3945 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3946
3947static void
3948group_case_nodes (head)
3949 case_node_ptr head;
3950{
3951 case_node_ptr node = head;
3952
3953 while (node)
3954 {
3955 rtx lb = next_real_insn (label_rtx (node->code_label));
3956 case_node_ptr np = node;
3957
3958 /* Try to group the successors of NODE with NODE. */
3959 while (((np = np->right) != 0)
3960 /* Do they jump to the same place? */
3961 && next_real_insn (label_rtx (np->code_label)) == lb
3962 /* Are their ranges consecutive? */
3963 && tree_int_cst_equal (np->low,
3964 fold (build (PLUS_EXPR,
3965 TREE_TYPE (node->high),
3966 node->high,
3967 integer_one_node)))
3968 /* An overflow is not consecutive. */
3969 && tree_int_cst_lt (node->high,
3970 fold (build (PLUS_EXPR,
3971 TREE_TYPE (node->high),
3972 node->high,
3973 integer_one_node))))
3974 {
3975 node->high = np->high;
3976 }
3977 /* NP is the first node after NODE which can't be grouped with it.
3978 Delete the nodes in between, and move on to that node. */
3979 node->right = np;
3980 node = np;
3981 }
3982}
3983
3984/* Take an ordered list of case nodes
3985 and transform them into a near optimal binary tree,
6dc42e49 3986 on the assumption that any target code selection value is as
28d81abb
RK
3987 likely as any other.
3988
3989 The transformation is performed by splitting the ordered
3990 list into two equal sections plus a pivot. The parts are
3991 then attached to the pivot as left and right branches. Each
3992 branch is is then transformed recursively. */
3993
3994static void
3995balance_case_nodes (head, parent)
3996 case_node_ptr *head;
3997 case_node_ptr parent;
3998{
3999 register case_node_ptr np;
4000
4001 np = *head;
4002 if (np)
4003 {
4004 int cost = 0;
4005 int i = 0;
4006 int ranges = 0;
4007 register case_node_ptr *npp;
4008 case_node_ptr left;
4009
4010 /* Count the number of entries on branch. Also count the ranges. */
4011
4012 while (np)
4013 {
4014 if (!tree_int_cst_equal (np->low, np->high))
4015 {
4016 ranges++;
4017 if (use_cost_table)
4018 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4019 }
4020
4021 if (use_cost_table)
4022 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4023
4024 i++;
4025 np = np->right;
4026 }
4027
4028 if (i > 2)
4029 {
4030 /* Split this list if it is long enough for that to help. */
4031 npp = head;
4032 left = *npp;
4033 if (use_cost_table)
4034 {
4035 /* Find the place in the list that bisects the list's total cost,
4036 Here I gets half the total cost. */
4037 int n_moved = 0;
4038 i = (cost + 1) / 2;
4039 while (1)
4040 {
4041 /* Skip nodes while their cost does not reach that amount. */
4042 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4043 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4044 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4045 if (i <= 0)
4046 break;
4047 npp = &(*npp)->right;
4048 n_moved += 1;
4049 }
4050 if (n_moved == 0)
4051 {
4052 /* Leave this branch lopsided, but optimize left-hand
4053 side and fill in `parent' fields for right-hand side. */
4054 np = *head;
4055 np->parent = parent;
4056 balance_case_nodes (&np->left, np);
4057 for (; np->right; np = np->right)
4058 np->right->parent = np;
4059 return;
4060 }
4061 }
4062 /* If there are just three nodes, split at the middle one. */
4063 else if (i == 3)
4064 npp = &(*npp)->right;
4065 else
4066 {
4067 /* Find the place in the list that bisects the list's total cost,
4068 where ranges count as 2.
4069 Here I gets half the total cost. */
4070 i = (i + ranges + 1) / 2;
4071 while (1)
4072 {
4073 /* Skip nodes while their cost does not reach that amount. */
4074 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4075 i--;
4076 i--;
4077 if (i <= 0)
4078 break;
4079 npp = &(*npp)->right;
4080 }
4081 }
4082 *head = np = *npp;
4083 *npp = 0;
4084 np->parent = parent;
4085 np->left = left;
4086
4087 /* Optimize each of the two split parts. */
4088 balance_case_nodes (&np->left, np);
4089 balance_case_nodes (&np->right, np);
4090 }
4091 else
4092 {
4093 /* Else leave this branch as one level,
4094 but fill in `parent' fields. */
4095 np = *head;
4096 np->parent = parent;
4097 for (; np->right; np = np->right)
4098 np->right->parent = np;
4099 }
4100 }
4101}
4102\f
4103/* Search the parent sections of the case node tree
4104 to see if a test for the lower bound of NODE would be redundant.
4105 INDEX_TYPE is the type of the index expression.
4106
4107 The instructions to generate the case decision tree are
4108 output in the same order as nodes are processed so it is
4109 known that if a parent node checks the range of the current
4110 node minus one that the current node is bounded at its lower
4111 span. Thus the test would be redundant. */
4112
4113static int
4114node_has_low_bound (node, index_type)
4115 case_node_ptr node;
4116 tree index_type;
4117{
4118 tree low_minus_one;
4119 case_node_ptr pnode;
4120
4121 /* If the lower bound of this node is the lowest value in the index type,
4122 we need not test it. */
4123
4124 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4125 return 1;
4126
4127 /* If this node has a left branch, the value at the left must be less
4128 than that at this node, so it cannot be bounded at the bottom and
4129 we need not bother testing any further. */
4130
4131 if (node->left)
4132 return 0;
4133
4134 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4135 node->low, integer_one_node));
4136
4137 /* If the subtraction above overflowed, we can't verify anything.
4138 Otherwise, look for a parent that tests our value - 1. */
4139
4140 if (! tree_int_cst_lt (low_minus_one, node->low))
4141 return 0;
4142
4143 for (pnode = node->parent; pnode; pnode = pnode->parent)
4144 if (tree_int_cst_equal (low_minus_one, pnode->high))
4145 return 1;
4146
4147 return 0;
4148}
4149
4150/* Search the parent sections of the case node tree
4151 to see if a test for the upper bound of NODE would be redundant.
4152 INDEX_TYPE is the type of the index expression.
4153
4154 The instructions to generate the case decision tree are
4155 output in the same order as nodes are processed so it is
4156 known that if a parent node checks the range of the current
4157 node plus one that the current node is bounded at its upper
4158 span. Thus the test would be redundant. */
4159
4160static int
4161node_has_high_bound (node, index_type)
4162 case_node_ptr node;
4163 tree index_type;
4164{
4165 tree high_plus_one;
4166 case_node_ptr pnode;
4167
4168 /* If the upper bound of this node is the highest value in the type
4169 of the index expression, we need not test against it. */
4170
4171 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4172 return 1;
4173
4174 /* If this node has a right branch, the value at the right must be greater
4175 than that at this node, so it cannot be bounded at the top and
4176 we need not bother testing any further. */
4177
4178 if (node->right)
4179 return 0;
4180
4181 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4182 node->high, integer_one_node));
4183
4184 /* If the addition above overflowed, we can't verify anything.
4185 Otherwise, look for a parent that tests our value + 1. */
4186
4187 if (! tree_int_cst_lt (node->high, high_plus_one))
4188 return 0;
4189
4190 for (pnode = node->parent; pnode; pnode = pnode->parent)
4191 if (tree_int_cst_equal (high_plus_one, pnode->low))
4192 return 1;
4193
4194 return 0;
4195}
4196
4197/* Search the parent sections of the
4198 case node tree to see if both tests for the upper and lower
4199 bounds of NODE would be redundant. */
4200
4201static int
4202node_is_bounded (node, index_type)
4203 case_node_ptr node;
4204 tree index_type;
4205{
4206 return (node_has_low_bound (node, index_type)
4207 && node_has_high_bound (node, index_type));
4208}
4209
4210/* Emit an unconditional jump to LABEL unless it would be dead code. */
4211
4212static void
4213emit_jump_if_reachable (label)
4214 rtx label;
4215{
4216 if (GET_CODE (get_last_insn ()) != BARRIER)
4217 emit_jump (label);
4218}
4219\f
4220/* Emit step-by-step code to select a case for the value of INDEX.
4221 The thus generated decision tree follows the form of the
4222 case-node binary tree NODE, whose nodes represent test conditions.
4223 INDEX_TYPE is the type of the index of the switch.
4224
4225 Care is taken to prune redundant tests from the decision tree
4226 by detecting any boundary conditions already checked by
4227 emitted rtx. (See node_has_high_bound, node_has_low_bound
4228 and node_is_bounded, above.)
4229
4230 Where the test conditions can be shown to be redundant we emit
4231 an unconditional jump to the target code. As a further
4232 optimization, the subordinates of a tree node are examined to
4233 check for bounded nodes. In this case conditional and/or
4234 unconditional jumps as a result of the boundary check for the
4235 current node are arranged to target the subordinates associated
4236 code for out of bound conditions on the current node node.
4237
4238 We can asume that when control reaches the code generated here,
4239 the index value has already been compared with the parents
4240 of this node, and determined to be on the same side of each parent
4241 as this node is. Thus, if this node tests for the value 51,
4242 and a parent tested for 52, we don't need to consider
4243 the possibility of a value greater than 51. If another parent
4244 tests for the value 50, then this node need not test anything. */
4245
4246static void
4247emit_case_nodes (index, node, default_label, index_type)
4248 rtx index;
4249 case_node_ptr node;
4250 rtx default_label;
4251 tree index_type;
4252{
4253 /* If INDEX has an unsigned type, we must make unsigned branches. */
4254 int unsignedp = TREE_UNSIGNED (index_type);
4255 typedef rtx rtx_function ();
4256 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4257 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4258 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4259 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4260 enum machine_mode mode = GET_MODE (index);
4261
4262 /* See if our parents have already tested everything for us.
4263 If they have, emit an unconditional jump for this node. */
4264 if (node_is_bounded (node, index_type))
4265 emit_jump (label_rtx (node->code_label));
4266
4267 else if (tree_int_cst_equal (node->low, node->high))
4268 {
4269 /* Node is single valued. First see if the index expression matches
4270 this node and then check our children, if any. */
4271
4272 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4273 label_rtx (node->code_label), unsignedp);
4274
4275 if (node->right != 0 && node->left != 0)
4276 {
4277 /* This node has children on both sides.
4278 Dispatch to one side or the other
4279 by comparing the index value with this node's value.
4280 If one subtree is bounded, check that one first,
4281 so we can avoid real branches in the tree. */
4282
4283 if (node_is_bounded (node->right, index_type))
4284 {
4285 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4286 GT, 0, mode, unsignedp, 0);
4287
4288 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4289 emit_case_nodes (index, node->left, default_label, index_type);
4290 }
4291
4292 else if (node_is_bounded (node->left, index_type))
4293 {
4294 emit_cmp_insn (index, expand_expr (node->high, 0,
4295 VOIDmode, 0),
4296 LT, 0, mode, unsignedp, 0);
4297 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4298 emit_case_nodes (index, node->right, default_label, index_type);
4299 }
4300
4301 else
4302 {
4303 /* Neither node is bounded. First distinguish the two sides;
4304 then emit the code for one side at a time. */
4305
4306 tree test_label
4307 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4308
4309 /* See if the value is on the right. */
4310 emit_cmp_insn (index, expand_expr (node->high, 0,
4311 VOIDmode, 0),
4312 GT, 0, mode, unsignedp, 0);
4313 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4314
4315 /* Value must be on the left.
4316 Handle the left-hand subtree. */
4317 emit_case_nodes (index, node->left, default_label, index_type);
4318 /* If left-hand subtree does nothing,
4319 go to default. */
4320 emit_jump_if_reachable (default_label);
4321
4322 /* Code branches here for the right-hand subtree. */
4323 expand_label (test_label);
4324 emit_case_nodes (index, node->right, default_label, index_type);
4325 }
4326 }
4327
4328 else if (node->right != 0 && node->left == 0)
4329 {
4330 /* Here we have a right child but no left so we issue conditional
4331 branch to default and process the right child.
4332
4333 Omit the conditional branch to default if we it avoid only one
4334 right child; it costs too much space to save so little time. */
4335
de14fd73 4336 if (node->right->right || node->right->left
28d81abb
RK
4337 || !tree_int_cst_equal (node->right->low, node->right->high))
4338 {
4339 if (!node_has_low_bound (node, index_type))
4340 {
4341 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4342 LT, 0, mode, unsignedp, 0);
4343 emit_jump_insn ((*gen_blt_pat) (default_label));
4344 }
4345
4346 emit_case_nodes (index, node->right, default_label, index_type);
4347 }
4348 else
4349 /* We cannot process node->right normally
4350 since we haven't ruled out the numbers less than
4351 this node's value. So handle node->right explicitly. */
4352 do_jump_if_equal (index,
4353 expand_expr (node->right->low, 0, VOIDmode, 0),
4354 label_rtx (node->right->code_label), unsignedp);
4355 }
4356
4357 else if (node->right == 0 && node->left != 0)
4358 {
4359 /* Just one subtree, on the left. */
4360
de14fd73
RK
4361#if 0 /* The following code and comment were formerly part
4362 of the condition here, but they didn't work
4363 and I don't understand what the idea was. -- rms. */
4364 /* If our "most probable entry" is less probable
28d81abb
RK
4365 than the default label, emit a jump to
4366 the default label using condition codes
4367 already lying around. With no right branch,
4368 a branch-greater-than will get us to the default
4369 label correctly. */
de14fd73
RK
4370 if (use_cost_table
4371 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4372 ;
4373#endif /* 0 */
4374 if (node->left->left || node->left->right
28d81abb
RK
4375 || !tree_int_cst_equal (node->left->low, node->left->high))
4376 {
4377 if (!node_has_high_bound (node, index_type))
4378 {
4379 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4380 GT, 0, mode, unsignedp, 0);
4381 emit_jump_insn ((*gen_bgt_pat) (default_label));
4382 }
4383
4384 emit_case_nodes (index, node->left, default_label, index_type);
4385 }
4386 else
4387 /* We cannot process node->left normally
4388 since we haven't ruled out the numbers less than
4389 this node's value. So handle node->left explicitly. */
4390 do_jump_if_equal (index,
4391 expand_expr (node->left->low, 0, VOIDmode, 0),
4392 label_rtx (node->left->code_label), unsignedp);
4393 }
4394 }
4395 else
4396 {
4397 /* Node is a range. These cases are very similar to those for a single
4398 value, except that we do not start by testing whether this node
4399 is the one to branch to. */
4400
4401 if (node->right != 0 && node->left != 0)
4402 {
4403 /* Node has subtrees on both sides.
4404 If the right-hand subtree is bounded,
4405 test for it first, since we can go straight there.
4406 Otherwise, we need to make a branch in the control structure,
4407 then handle the two subtrees. */
4408 tree test_label = 0;
4409
4410 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4411 GT, 0, mode, unsignedp, 0);
4412
4413 if (node_is_bounded (node->right, index_type))
4414 /* Right hand node is fully bounded so we can eliminate any
4415 testing and branch directly to the target code. */
4416 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4417 else
4418 {
4419 /* Right hand node requires testing.
4420 Branch to a label where we will handle it later. */
4421
4422 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4423 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4424 }
4425
4426 /* Value belongs to this node or to the left-hand subtree. */
4427
4428 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4429 GE, 0, mode, unsignedp, 0);
4430 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4431
4432 /* Handle the left-hand subtree. */
4433 emit_case_nodes (index, node->left, default_label, index_type);
4434
4435 /* If right node had to be handled later, do that now. */
4436
4437 if (test_label)
4438 {
4439 /* If the left-hand subtree fell through,
4440 don't let it fall into the right-hand subtree. */
4441 emit_jump_if_reachable (default_label);
4442
4443 expand_label (test_label);
4444 emit_case_nodes (index, node->right, default_label, index_type);
4445 }
4446 }
4447
4448 else if (node->right != 0 && node->left == 0)
4449 {
4450 /* Deal with values to the left of this node,
4451 if they are possible. */
4452 if (!node_has_low_bound (node, index_type))
4453 {
4454 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4455 LT, 0, mode, unsignedp, 0);
4456 emit_jump_insn ((*gen_blt_pat) (default_label));
4457 }
4458
4459 /* Value belongs to this node or to the right-hand subtree. */
4460
4461 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4462 LE, 0, mode, unsignedp, 0);
4463 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4464
4465 emit_case_nodes (index, node->right, default_label, index_type);
4466 }
4467
4468 else if (node->right == 0 && node->left != 0)
4469 {
4470 /* Deal with values to the right of this node,
4471 if they are possible. */
4472 if (!node_has_high_bound (node, index_type))
4473 {
4474 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4475 GT, 0, mode, unsignedp, 0);
4476 emit_jump_insn ((*gen_bgt_pat) (default_label));
4477 }
4478
4479 /* Value belongs to this node or to the left-hand subtree. */
4480
4481 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4482 GE, 0, mode, unsignedp, 0);
4483 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4484
4485 emit_case_nodes (index, node->left, default_label, index_type);
4486 }
4487
4488 else
4489 {
4490 /* Node has no children so we check low and high bounds to remove
4491 redundant tests. Only one of the bounds can exist,
4492 since otherwise this node is bounded--a case tested already. */
4493
4494 if (!node_has_high_bound (node, index_type))
4495 {
4496 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4497 GT, 0, mode, unsignedp, 0);
4498 emit_jump_insn ((*gen_bgt_pat) (default_label));
4499 }
4500
4501 if (!node_has_low_bound (node, index_type))
4502 {
4503 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4504 LT, 0, mode, unsignedp, 0);
4505 emit_jump_insn ((*gen_blt_pat) (default_label));
4506 }
4507
4508 emit_jump (label_rtx (node->code_label));
4509 }
4510 }
4511}
4512\f
4513/* These routines are used by the loop unrolling code. They copy BLOCK trees
4514 so that the debugging info will be correct for the unrolled loop. */
4515
4516/* Indexed by loop number, contains pointer to the first block in the loop,
4517 or zero if none. Only valid if doing loop unrolling and outputting debugger
4518 info. */
4519
4520tree *loop_number_first_block;
4521
4522/* Indexed by loop number, contains pointer to the last block in the loop,
4523 only valid if loop_number_first_block is nonzero. */
4524
4525tree *loop_number_last_block;
4526
4527/* Indexed by loop number, contains nesting level of first block in the
4528 loop, if any. Only valid if doing loop unrolling and outputting debugger
4529 info. */
4530
4531int *loop_number_block_level;
4532
4533/* Scan the function looking for loops, and walk the BLOCK tree at the
4534 same time. Record the first and last BLOCK tree corresponding to each
4535 loop. This function is similar to find_and_verify_loops in loop.c. */
4536
4537void
4538find_loop_tree_blocks (f)
4539 rtx f;
4540{
4541 rtx insn;
4542 int current_loop = -1;
4543 int next_loop = -1;
4544 int loop;
4545 int block_level, tree_level;
4546 tree tree_block, parent_tree_block;
4547
4548 tree_block = DECL_INITIAL (current_function_decl);
4549 parent_tree_block = 0;
4550 block_level = 0;
4551 tree_level = -1;
4552
4553 /* Find boundaries of loops, and save the first and last BLOCK tree
4554 corresponding to each loop. */
4555
4556 for (insn = f; insn; insn = NEXT_INSN (insn))
4557 {
4558 if (GET_CODE (insn) == NOTE)
4559 switch (NOTE_LINE_NUMBER (insn))
4560 {
4561 case NOTE_INSN_LOOP_BEG:
4562 loop_number_block_level[++next_loop] = block_level;
4563 loop_number_first_block[next_loop] = 0;
4564 current_loop = next_loop;
4565 break;
4566
4567 case NOTE_INSN_LOOP_END:
4568 if (current_loop == -1)
4569 abort ();
4570
4571 current_loop = loop_outer_loop[current_loop];
4572 break;
4573
4574 case NOTE_INSN_BLOCK_BEG:
4575 if (tree_level < block_level)
4576 {
4577 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4578 we must now visit the subtree of the current block. */
4579 parent_tree_block = tree_block;
4580 tree_block = BLOCK_SUBBLOCKS (tree_block);
4581 tree_level++;
4582 }
4583 else if (tree_level > block_level)
4584 abort ();
4585
4586 /* Save this block tree here for all nested loops for which
4587 this is the topmost block. */
4588 for (loop = current_loop;
4589 loop != -1 && block_level == loop_number_block_level[loop];
4590 loop = loop_outer_loop[loop])
4591 {
4592 if (loop_number_first_block[loop] == 0)
4593 loop_number_first_block[loop] = tree_block;
4594 loop_number_last_block[loop] = tree_block;
4595 }
4596
4597 block_level++;
4598 break;
4599
4600 case NOTE_INSN_BLOCK_END:
4601 block_level--;
4602 if (tree_level > block_level)
4603 {
4604 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4605 we must now visit the parent of the current tree. */
4606 if (tree_block != 0 || parent_tree_block == 0)
4607 abort ();
4608 tree_block = parent_tree_block;
4609 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4610 tree_level--;
4611 }
4612 tree_block = BLOCK_CHAIN (tree_block);
4613 break;
4614 }
4615 }
4616}
4617
4618/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4619 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4620
4621 Note that we only copy the topmost level of tree nodes; they will share
4622 pointers to the same subblocks. */
4623
4624void
4625unroll_block_trees (loop_number, copies)
4626 int loop_number;
4627 int copies;
4628{
4629 int i;
4630
4631 /* First check whether there are any blocks that need to be copied. */
4632 if (loop_number_first_block[loop_number])
4633 {
4634 tree first_block = loop_number_first_block[loop_number];
4635 tree last_block = loop_number_last_block[loop_number];
4636 tree last_block_created = 0;
4637
4638 for (i = 0; i < copies - 1; i++)
4639 {
4640 tree block = first_block;
4641 tree insert_after = last_block;
4642 tree copied_block;
4643
4644 /* Copy every block between first_block and last_block inclusive,
4645 inserting the new blocks after last_block. */
4646 do
4647 {
4648 tree new_block = make_node (BLOCK);
4649 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4650 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4651 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4652 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4653 TREE_USED (new_block) = TREE_USED (block);
4654
4655 /* Insert the new block after the insertion point, and move
4656 the insertion point to the new block. This ensures that
4657 the copies are inserted in the right order. */
4658 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4659 BLOCK_CHAIN (insert_after) = new_block;
4660 insert_after = new_block;
4661
4662 copied_block = block;
4663 block = BLOCK_CHAIN (block);
4664 }
4665 while (copied_block != last_block);
4666
4667 /* Remember the last block created, so that we can update the
4668 info in the tables. */
4669 if (last_block_created == 0)
4670 last_block_created = insert_after;
4671 }
4672
4673 /* For all nested loops for which LAST_BLOCK was originally the last
4674 block, update the tables to indicate that LAST_BLOCK_CREATED is
4675 now the last block in the loop. */
4676 for (i = loop_number; last_block == loop_number_last_block[i];
4677 i = loop_outer_loop[i])
4678 loop_number_last_block[i] = last_block_created;
4679 }
4680}
This page took 0.46029 seconds and 5 git commands to generate.