]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
57extern int xmalloc ();
58extern void free ();
59
60/* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62char *emit_filename;
63int emit_lineno;
64
65/* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68int expr_stmts_for_value;
69
70/* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73static tree last_expr_type;
74static rtx last_expr_value;
75
76/* Number of binding contours started so far in this function. */
77
78int block_start_count;
79
80/* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83extern int current_function_returns_pcc_struct;
84
85/* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89extern rtx cleanup_label;
90
91/* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95extern rtx return_label;
96
97/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99extern rtx save_expr_regs;
100
101/* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104extern int frame_offset;
105
106/* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108extern rtx tail_recursion_label;
109
110/* Place after which to insert the tail_recursion_label if we need one. */
111extern rtx tail_recursion_reentry;
112
113/* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118extern rtx arg_pointer_save_area;
119
120/* Chain of all RTL_EXPRs that have insns in them. */
121extern tree rtl_expr_chain;
122
123#if 0 /* Turned off because 0 seems to work just as well. */
124/* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127static tree empty_cleanup_list;
128#endif
129\f
130/* Functions and data structures for expanding case statements. */
131
132/* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
28d81abb
RK
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153struct case_node
154{
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161};
162
163typedef struct case_node case_node;
164typedef struct case_node *case_node_ptr;
165
166/* These are used by estimate_case_costs and balance_case_nodes. */
167
168/* This must be a signed type, and non-ANSI compilers lack signed char. */
169static short *cost_table;
170static int use_cost_table;
171
172static int estimate_case_costs ();
173static void balance_case_nodes ();
174static void emit_case_nodes ();
175static void group_case_nodes ();
176static void emit_jump_if_reachable ();
177
178static int warn_if_unused_value ();
179static void expand_goto_internal ();
180static int expand_fixup ();
181void fixup_gotos ();
182void free_temp_slots ();
183static void expand_cleanups ();
184static void fixup_cleanups ();
185static void expand_null_return_1 ();
186static int tail_recursion_args ();
187static void do_jump_if_equal ();
188\f
189/* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214struct nesting
215{
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327};
328
329/* Chain of all pending binding contours. */
330struct nesting *block_stack;
331
332/* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334struct nesting *stack_block_stack;
335
336/* Chain of all pending conditional statements. */
337struct nesting *cond_stack;
338
339/* Chain of all pending loops. */
340struct nesting *loop_stack;
341
342/* Chain of all pending case or switch statements. */
343struct nesting *case_stack;
344
345/* Chain of all pending exception contours. */
346struct nesting *except_stack;
347
348/* Separate chain including all of the above,
349 chained through the `all' field. */
350struct nesting *nesting_stack;
351
352/* Number of entries on nesting_stack now. */
353int nesting_depth;
354
355/* Allocate and return a new `struct nesting'. */
356
357#define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363#define POPSTACK(STACK) \
364do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371\f
372/* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380struct goto_fixup
381{
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407};
408
409static struct goto_fixup *goto_fixup_chain;
410
411/* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414struct label_chain
415{
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419};
420\f
421void
422init_stmt ()
423{
424 gcc_obstack_init (&stmt_obstack);
425#if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427#endif
428}
429
430void
431init_stmt_for_function ()
432{
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449}
450
451void
452save_stmt_status (p)
453 struct function *p;
454{
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469}
470
471void
472restore_stmt_status (p)
473 struct function *p;
474{
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489}
490\f
491/* Emit a no-op instruction. */
492
493void
494emit_nop ()
495{
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501}
502\f
503/* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506rtx
507label_rtx (label)
508 tree label;
509{
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517}
518
519/* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521void
522emit_jump (label)
523 rtx label;
524{
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528}
529
530/* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533void
534expand_computed_goto (exp)
535 tree exp;
536{
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
de14fd73 538 emit_queue ();
28d81abb 539 emit_indirect_jump (x);
28d81abb
RK
540}
541\f
542/* Handle goto statements and the labels that they can go to. */
543
544/* Specify the location in the RTL code of a label LABEL,
545 which is a LABEL_DECL tree node.
546
547 This is used for the kind of label that the user can jump to with a
548 goto statement, and for alternatives of a switch or case statement.
549 RTL labels generated for loops and conditionals don't go through here;
550 they are generated directly at the RTL level, by other functions below.
551
552 Note that this has nothing to do with defining label *names*.
553 Languages vary in how they do that and what that even means. */
554
555void
556expand_label (label)
557 tree label;
558{
559 struct label_chain *p;
560
561 do_pending_stack_adjust ();
562 emit_label (label_rtx (label));
563 if (DECL_NAME (label))
564 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
565
566 if (stack_block_stack != 0)
567 {
568 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
569 p->next = stack_block_stack->data.block.label_chain;
570 stack_block_stack->data.block.label_chain = p;
571 p->label = label;
572 }
573}
574
575/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
576 from nested functions. */
577
578void
579declare_nonlocal_label (label)
580 tree label;
581{
582 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
583 LABEL_PRESERVE_P (label_rtx (label)) = 1;
584 if (nonlocal_goto_handler_slot == 0)
585 {
586 nonlocal_goto_handler_slot
587 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
588 emit_stack_save (SAVE_NONLOCAL,
589 &nonlocal_goto_stack_level,
590 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
591 }
592}
593
594/* Generate RTL code for a `goto' statement with target label LABEL.
595 LABEL should be a LABEL_DECL tree node that was or will later be
596 defined with `expand_label'. */
597
598void
599expand_goto (label)
600 tree label;
601{
602 /* Check for a nonlocal goto to a containing function. */
603 tree context = decl_function_context (label);
604 if (context != 0 && context != current_function_decl)
605 {
606 struct function *p = find_function_data (context);
607 rtx temp;
608 p->has_nonlocal_label = 1;
59257ff7
RK
609
610 /* Copy the rtl for the slots so that they won't be shared in
611 case the virtual stack vars register gets instantiated differently
612 in the parent than in the child. */
613
28d81abb
RK
614#if HAVE_nonlocal_goto
615 if (HAVE_nonlocal_goto)
616 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
617 copy_rtx (p->nonlocal_goto_handler_slot),
618 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
619 gen_rtx (LABEL_REF, Pmode,
620 label_rtx (label))));
621 else
622#endif
623 {
59257ff7
RK
624 rtx addr;
625
28d81abb
RK
626 /* Restore frame pointer for containing function.
627 This sets the actual hard register used for the frame pointer
628 to the location of the function's incoming static chain info.
629 The non-local goto handler will then adjust it to contain the
630 proper value and reload the argument pointer, if needed. */
631 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
632
633 /* We have now loaded the frame pointer hardware register with
634 the address of that corresponds to the start of the virtual
635 stack vars. So replace virtual_stack_vars_rtx in all
636 addresses we use with stack_pointer_rtx. */
637
28d81abb
RK
638 /* Get addr of containing function's current nonlocal goto handler,
639 which will do any cleanups and then jump to the label. */
59257ff7
RK
640 addr = copy_rtx (p->nonlocal_goto_handler_slot);
641 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
642 frame_pointer_rtx));
643
28d81abb 644 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
645 addr = p->nonlocal_goto_stack_level;
646 if (addr)
5e116627
MM
647 addr = replace_rtx (copy_rtx (addr),
648 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7
RK
649
650 emit_stack_restore (SAVE_NONLOCAL, addr, 0);
651
28d81abb
RK
652 /* Put in the static chain register the nonlocal label address. */
653 emit_move_insn (static_chain_rtx,
654 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
655 /* USE of frame_pointer_rtx added for consistency; not clear if
656 really needed. */
657 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
658 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
659 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
660 emit_indirect_jump (temp);
661 }
662 }
663 else
664 expand_goto_internal (label, label_rtx (label), 0);
665}
666
667/* Generate RTL code for a `goto' statement with target label BODY.
668 LABEL should be a LABEL_REF.
669 LAST_INSN, if non-0, is the rtx we should consider as the last
670 insn emitted (for the purposes of cleaning up a return). */
671
672static void
673expand_goto_internal (body, label, last_insn)
674 tree body;
675 rtx label;
676 rtx last_insn;
677{
678 struct nesting *block;
679 rtx stack_level = 0;
680
681 if (GET_CODE (label) != CODE_LABEL)
682 abort ();
683
684 /* If label has already been defined, we can tell now
685 whether and how we must alter the stack level. */
686
687 if (PREV_INSN (label) != 0)
688 {
689 /* Find the innermost pending block that contains the label.
690 (Check containment by comparing insn-uids.)
691 Then restore the outermost stack level within that block,
692 and do cleanups of all blocks contained in it. */
693 for (block = block_stack; block; block = block->next)
694 {
695 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
696 break;
697 if (block->data.block.stack_level != 0)
698 stack_level = block->data.block.stack_level;
699 /* Execute the cleanups for blocks we are exiting. */
700 if (block->data.block.cleanups != 0)
701 {
702 expand_cleanups (block->data.block.cleanups, 0);
703 do_pending_stack_adjust ();
704 }
705 }
706
707 if (stack_level)
708 {
709 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
710 the stack pointer. This one should be deleted as dead by flow. */
711 clear_pending_stack_adjust ();
712 do_pending_stack_adjust ();
59257ff7 713 emit_stack_restore (SAVE_BLOCK, stack_level, 0);
28d81abb
RK
714 }
715
716 if (body != 0 && DECL_TOO_LATE (body))
717 error ("jump to `%s' invalidly jumps into binding contour",
718 IDENTIFIER_POINTER (DECL_NAME (body)));
719 }
720 /* Label not yet defined: may need to put this goto
721 on the fixup list. */
722 else if (! expand_fixup (body, label, last_insn))
723 {
724 /* No fixup needed. Record that the label is the target
725 of at least one goto that has no fixup. */
726 if (body != 0)
727 TREE_ADDRESSABLE (body) = 1;
728 }
729
730 emit_jump (label);
731}
732\f
733/* Generate if necessary a fixup for a goto
734 whose target label in tree structure (if any) is TREE_LABEL
735 and whose target in rtl is RTL_LABEL.
736
737 If LAST_INSN is nonzero, we pretend that the jump appears
738 after insn LAST_INSN instead of at the current point in the insn stream.
739
740 The fixup will be used later to insert insns at this point
741 to restore the stack level as appropriate for the target label.
742
743 Value is nonzero if a fixup is made. */
744
745static int
746expand_fixup (tree_label, rtl_label, last_insn)
747 tree tree_label;
748 rtx rtl_label;
749 rtx last_insn;
750{
751 struct nesting *block, *end_block;
752
753 /* See if we can recognize which block the label will be output in.
754 This is possible in some very common cases.
755 If we succeed, set END_BLOCK to that block.
756 Otherwise, set it to 0. */
757
758 if (cond_stack
759 && (rtl_label == cond_stack->data.cond.endif_label
760 || rtl_label == cond_stack->data.cond.next_label))
761 end_block = cond_stack;
762 /* If we are in a loop, recognize certain labels which
763 are likely targets. This reduces the number of fixups
764 we need to create. */
765 else if (loop_stack
766 && (rtl_label == loop_stack->data.loop.start_label
767 || rtl_label == loop_stack->data.loop.end_label
768 || rtl_label == loop_stack->data.loop.continue_label))
769 end_block = loop_stack;
770 else
771 end_block = 0;
772
773 /* Now set END_BLOCK to the binding level to which we will return. */
774
775 if (end_block)
776 {
777 struct nesting *next_block = end_block->all;
778 block = block_stack;
779
780 /* First see if the END_BLOCK is inside the innermost binding level.
781 If so, then no cleanups or stack levels are relevant. */
782 while (next_block && next_block != block)
783 next_block = next_block->all;
784
785 if (next_block)
786 return 0;
787
788 /* Otherwise, set END_BLOCK to the innermost binding level
789 which is outside the relevant control-structure nesting. */
790 next_block = block_stack->next;
791 for (block = block_stack; block != end_block; block = block->all)
792 if (block == next_block)
793 next_block = next_block->next;
794 end_block = next_block;
795 }
796
797 /* Does any containing block have a stack level or cleanups?
798 If not, no fixup is needed, and that is the normal case
799 (the only case, for standard C). */
800 for (block = block_stack; block != end_block; block = block->next)
801 if (block->data.block.stack_level != 0
802 || block->data.block.cleanups != 0)
803 break;
804
805 if (block != end_block)
806 {
807 /* Ok, a fixup is needed. Add a fixup to the list of such. */
808 struct goto_fixup *fixup
809 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
810 /* In case an old stack level is restored, make sure that comes
811 after any pending stack adjust. */
812 /* ?? If the fixup isn't to come at the present position,
813 doing the stack adjust here isn't useful. Doing it with our
814 settings at that location isn't useful either. Let's hope
815 someone does it! */
816 if (last_insn == 0)
817 do_pending_stack_adjust ();
818 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
819 fixup->target = tree_label;
820 fixup->target_rtl = rtl_label;
821 fixup->block_start_count = block_start_count;
822 fixup->stack_level = 0;
823 fixup->cleanup_list_list
824 = (((block->data.block.outer_cleanups
825#if 0
826 && block->data.block.outer_cleanups != empty_cleanup_list
827#endif
828 )
829 || block->data.block.cleanups)
830 ? tree_cons (0, block->data.block.cleanups,
831 block->data.block.outer_cleanups)
832 : 0);
833 fixup->next = goto_fixup_chain;
834 goto_fixup_chain = fixup;
835 }
836
837 return block != 0;
838}
839
840/* When exiting a binding contour, process all pending gotos requiring fixups.
841 THISBLOCK is the structure that describes the block being exited.
842 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
843 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
844 FIRST_INSN is the insn that began this contour.
845
846 Gotos that jump out of this contour must restore the
847 stack level and do the cleanups before actually jumping.
848
849 DONT_JUMP_IN nonzero means report error there is a jump into this
850 contour from before the beginning of the contour.
851 This is also done if STACK_LEVEL is nonzero. */
852
853void
854fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
855 struct nesting *thisblock;
856 rtx stack_level;
857 tree cleanup_list;
858 rtx first_insn;
859 int dont_jump_in;
860{
861 register struct goto_fixup *f, *prev;
862
863 /* F is the fixup we are considering; PREV is the previous one. */
864 /* We run this loop in two passes so that cleanups of exited blocks
865 are run first, and blocks that are exited are marked so
866 afterwards. */
867
868 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
869 {
870 /* Test for a fixup that is inactive because it is already handled. */
871 if (f->before_jump == 0)
872 {
873 /* Delete inactive fixup from the chain, if that is easy to do. */
874 if (prev != 0)
875 prev->next = f->next;
876 }
877 /* Has this fixup's target label been defined?
878 If so, we can finalize it. */
879 else if (PREV_INSN (f->target_rtl) != 0)
880 {
881 /* Get the first non-label after the label
882 this goto jumps to. If that's before this scope begins,
883 we don't have a jump into the scope. */
884 rtx after_label = f->target_rtl;
885 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
886 after_label = NEXT_INSN (after_label);
887
888 /* If this fixup jumped into this contour from before the beginning
889 of this contour, report an error. */
890 /* ??? Bug: this does not detect jumping in through intermediate
891 blocks that have stack levels or cleanups.
892 It detects only a problem with the innermost block
893 around the label. */
894 if (f->target != 0
895 && (dont_jump_in || stack_level || cleanup_list)
896 /* If AFTER_LABEL is 0, it means the jump goes to the end
897 of the rtl, which means it jumps into this scope. */
898 && (after_label == 0
899 || INSN_UID (first_insn) < INSN_UID (after_label))
900 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
901 && ! TREE_REGDECL (f->target))
902 {
903 error_with_decl (f->target,
904 "label `%s' used before containing binding contour");
905 /* Prevent multiple errors for one label. */
906 TREE_REGDECL (f->target) = 1;
907 }
908
909 /* Execute cleanups for blocks this jump exits. */
910 if (f->cleanup_list_list)
911 {
912 tree lists;
913 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
914 /* Marked elements correspond to blocks that have been closed.
915 Do their cleanups. */
916 if (TREE_ADDRESSABLE (lists)
917 && TREE_VALUE (lists) != 0)
918 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
919 }
920
921 /* Restore stack level for the biggest contour that this
922 jump jumps out of. */
923 if (f->stack_level)
59257ff7 924 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
28d81abb
RK
925 f->before_jump = 0;
926 }
927 }
928
929 /* Mark the cleanups of exited blocks so that they are executed
930 by the code above. */
931 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
932 if (f->before_jump != 0
933 && PREV_INSN (f->target_rtl) == 0
934 /* Label has still not appeared. If we are exiting a block with
935 a stack level to restore, that started before the fixup,
936 mark this stack level as needing restoration
937 when the fixup is later finalized.
938 Also mark the cleanup_list_list element for F
939 that corresponds to this block, so that ultimately
940 this block's cleanups will be executed by the code above. */
941 && thisblock != 0
942 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
943 it means the label is undefined. That's erroneous, but possible. */
944 && (thisblock->data.block.block_start_count
945 <= f->block_start_count))
946 {
947 tree lists = f->cleanup_list_list;
948 for (; lists; lists = TREE_CHAIN (lists))
949 /* If the following elt. corresponds to our containing block
950 then the elt. must be for this block. */
951 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
952 TREE_ADDRESSABLE (lists) = 1;
953
954 if (stack_level)
955 f->stack_level = stack_level;
956 }
957}
958\f
959/* Generate RTL for an asm statement (explicit assembler code).
960 BODY is a STRING_CST node containing the assembler code text,
961 or an ADDR_EXPR containing a STRING_CST. */
962
963void
964expand_asm (body)
965 tree body;
966{
967 if (TREE_CODE (body) == ADDR_EXPR)
968 body = TREE_OPERAND (body, 0);
969
970 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
971 TREE_STRING_POINTER (body)));
972 last_expr_type = 0;
973}
974
975/* Generate RTL for an asm statement with arguments.
976 STRING is the instruction template.
977 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
978 Each output or input has an expression in the TREE_VALUE and
979 a constraint-string in the TREE_PURPOSE.
980 CLOBBERS is a list of STRING_CST nodes each naming a hard register
981 that is clobbered by this insn.
982
983 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
984 Some elements of OUTPUTS may be replaced with trees representing temporary
985 values. The caller should copy those temporary values to the originally
986 specified lvalues.
987
988 VOL nonzero means the insn is volatile; don't optimize it. */
989
990void
991expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
992 tree string, outputs, inputs, clobbers;
993 int vol;
994 char *filename;
995 int line;
996{
997 rtvec argvec, constraints;
998 rtx body;
999 int ninputs = list_length (inputs);
1000 int noutputs = list_length (outputs);
b4ccaa16 1001 int nclobbers;
28d81abb
RK
1002 tree tail;
1003 register int i;
1004 /* Vector of RTX's of evaluated output operands. */
1005 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1006 /* The insn we have emitted. */
1007 rtx insn;
1008
b4ccaa16
RS
1009 /* Count the number of meaningful clobbered registers, ignoring what
1010 we would ignore later. */
1011 nclobbers = 0;
1012 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1013 {
1014 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1015 if (decode_reg_name (regname) >= 0)
1016 ++nclobbers;
1017 }
1018
28d81abb
RK
1019 last_expr_type = 0;
1020
1021 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1022 {
1023 tree val = TREE_VALUE (tail);
1024 tree val1;
1025 int j;
1026 int found_equal;
1027
1028 /* If there's an erroneous arg, emit no insn. */
1029 if (TREE_TYPE (val) == error_mark_node)
1030 return;
1031
1032 /* Make sure constraint has `=' and does not have `+'. */
1033
1034 found_equal = 0;
1035 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1036 {
1037 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1038 {
1039 error ("output operand constraint contains `+'");
1040 return;
1041 }
1042 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1043 found_equal = 1;
1044 }
1045 if (! found_equal)
1046 {
1047 error ("output operand constraint lacks `='");
1048 return;
1049 }
1050
1051 /* If an output operand is not a variable or indirect ref,
1052 or a part of one,
1053 create a SAVE_EXPR which is a pseudo-reg
1054 to act as an intermediate temporary.
1055 Make the asm insn write into that, then copy it to
1056 the real output operand. */
1057
1058 while (TREE_CODE (val) == COMPONENT_REF
1059 || TREE_CODE (val) == ARRAY_REF)
1060 val = TREE_OPERAND (val, 0);
1061
1062 if (TREE_CODE (val) != VAR_DECL
1063 && TREE_CODE (val) != PARM_DECL
1064 && TREE_CODE (val) != INDIRECT_REF)
1065 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1066
1067 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1068 }
1069
1070 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1071 {
1072 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1073 return;
1074 }
1075
1076 /* Make vectors for the expression-rtx and constraint strings. */
1077
1078 argvec = rtvec_alloc (ninputs);
1079 constraints = rtvec_alloc (ninputs);
1080
1081 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1082 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1083 filename, line);
1084 MEM_VOLATILE_P (body) = vol;
1085
1086 /* Eval the inputs and put them into ARGVEC.
1087 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1088
1089 i = 0;
1090 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1091 {
1092 int j;
1093
1094 /* If there's an erroneous arg, emit no insn,
1095 because the ASM_INPUT would get VOIDmode
1096 and that could cause a crash in reload. */
1097 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1098 return;
1099 if (TREE_PURPOSE (tail) == NULL_TREE)
1100 {
1101 error ("hard register `%s' listed as input operand to `asm'",
1102 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1103 return;
1104 }
1105
1106 /* Make sure constraint has neither `=' nor `+'. */
1107
1108 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1109 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1110 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1111 {
1112 error ("input operand constraint contains `%c'",
1113 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1114 return;
1115 }
1116
1117 XVECEXP (body, 3, i) /* argvec */
1118 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1119 XVECEXP (body, 4, i) /* constraints */
1120 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1121 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1122 i++;
1123 }
1124
1125 /* Protect all the operands from the queue,
1126 now that they have all been evaluated. */
1127
1128 for (i = 0; i < ninputs; i++)
1129 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1130
1131 for (i = 0; i < noutputs; i++)
1132 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1133
1134 /* Now, for each output, construct an rtx
1135 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1136 ARGVEC CONSTRAINTS))
1137 If there is more than one, put them inside a PARALLEL. */
1138
1139 if (noutputs == 1 && nclobbers == 0)
1140 {
1141 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1142 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1143 }
1144 else if (noutputs == 0 && nclobbers == 0)
1145 {
1146 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1147 insn = emit_insn (body);
1148 }
1149 else
1150 {
1151 rtx obody = body;
1152 int num = noutputs;
1153 if (num == 0) num = 1;
1154 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1155
1156 /* For each output operand, store a SET. */
1157
1158 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1159 {
1160 XVECEXP (body, 0, i)
1161 = gen_rtx (SET, VOIDmode,
1162 output_rtx[i],
1163 gen_rtx (ASM_OPERANDS, VOIDmode,
1164 TREE_STRING_POINTER (string),
1165 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1166 i, argvec, constraints,
1167 filename, line));
1168 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1169 }
1170
1171 /* If there are no outputs (but there are some clobbers)
1172 store the bare ASM_OPERANDS into the PARALLEL. */
1173
1174 if (i == 0)
1175 XVECEXP (body, 0, i++) = obody;
1176
1177 /* Store (clobber REG) for each clobbered register specified. */
1178
b4ccaa16 1179 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1180 {
28d81abb 1181 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1182 int j = decode_reg_name (regname);
28d81abb 1183
b4ac57ab 1184 if (j < 0)
28d81abb 1185 {
dcfedcd0
RK
1186 if (j == -3)
1187 continue;
1188
28d81abb
RK
1189 error ("unknown register name `%s' in `asm'", regname);
1190 return;
1191 }
1192
1193 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1194 XVECEXP (body, 0, i++)
28d81abb
RK
1195 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1196 }
1197
1198 insn = emit_insn (body);
1199 }
1200
1201 free_temp_slots ();
1202}
1203\f
1204/* Generate RTL to evaluate the expression EXP
1205 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1206
1207void
1208expand_expr_stmt (exp)
1209 tree exp;
1210{
1211 /* If -W, warn about statements with no side effects,
1212 except for an explicit cast to void (e.g. for assert()), and
1213 except inside a ({...}) where they may be useful. */
1214 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1215 {
1216 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1217 && !(TREE_CODE (exp) == CONVERT_EXPR
1218 && TREE_TYPE (exp) == void_type_node))
1219 warning_with_file_and_line (emit_filename, emit_lineno,
1220 "statement with no effect");
1221 else if (warn_unused)
1222 warn_if_unused_value (exp);
1223 }
1224 last_expr_type = TREE_TYPE (exp);
1225 if (! flag_syntax_only)
1226 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1227 VOIDmode, 0);
1228
1229 /* If all we do is reference a volatile value in memory,
1230 copy it to a register to be sure it is actually touched. */
1231 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1232 && TREE_THIS_VOLATILE (exp))
1233 {
1234 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1235 copy_to_reg (last_expr_value);
1236 else
ddbe9812
RS
1237 {
1238 rtx lab = gen_label_rtx ();
1239
1240 /* Compare the value with itself to reference it. */
1241 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1242 expand_expr (TYPE_SIZE (last_expr_type),
1243 0, VOIDmode, 0),
1244 BLKmode, 0,
1245 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1246 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1247 emit_label (lab);
1248 }
28d81abb
RK
1249 }
1250
1251 /* If this expression is part of a ({...}) and is in memory, we may have
1252 to preserve temporaries. */
1253 preserve_temp_slots (last_expr_value);
1254
1255 /* Free any temporaries used to evaluate this expression. Any temporary
1256 used as a result of this expression will already have been preserved
1257 above. */
1258 free_temp_slots ();
1259
1260 emit_queue ();
1261}
1262
1263/* Warn if EXP contains any computations whose results are not used.
1264 Return 1 if a warning is printed; 0 otherwise. */
1265
1266static int
1267warn_if_unused_value (exp)
1268 tree exp;
1269{
1270 if (TREE_USED (exp))
1271 return 0;
1272
1273 switch (TREE_CODE (exp))
1274 {
1275 case PREINCREMENT_EXPR:
1276 case POSTINCREMENT_EXPR:
1277 case PREDECREMENT_EXPR:
1278 case POSTDECREMENT_EXPR:
1279 case MODIFY_EXPR:
1280 case INIT_EXPR:
1281 case TARGET_EXPR:
1282 case CALL_EXPR:
1283 case METHOD_CALL_EXPR:
1284 case RTL_EXPR:
1285 case WRAPPER_EXPR:
1286 case ANTI_WRAPPER_EXPR:
1287 case WITH_CLEANUP_EXPR:
1288 case EXIT_EXPR:
1289 /* We don't warn about COND_EXPR because it may be a useful
1290 construct if either arm contains a side effect. */
1291 case COND_EXPR:
1292 return 0;
1293
1294 case BIND_EXPR:
1295 /* For a binding, warn if no side effect within it. */
1296 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1297
1298 case TRUTH_ORIF_EXPR:
1299 case TRUTH_ANDIF_EXPR:
1300 /* In && or ||, warn if 2nd operand has no side effect. */
1301 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1302
1303 case COMPOUND_EXPR:
1304 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1305 return 1;
4d23e509
RS
1306 /* Let people do `(foo (), 0)' without a warning. */
1307 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1308 return 0;
28d81abb
RK
1309 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1310
1311 case NOP_EXPR:
1312 case CONVERT_EXPR:
b4ac57ab 1313 case NON_LVALUE_EXPR:
28d81abb
RK
1314 /* Don't warn about values cast to void. */
1315 if (TREE_TYPE (exp) == void_type_node)
1316 return 0;
1317 /* Don't warn about conversions not explicit in the user's program. */
1318 if (TREE_NO_UNUSED_WARNING (exp))
1319 return 0;
1320 /* Assignment to a cast usually results in a cast of a modify.
1321 Don't complain about that. */
1322 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1323 return 0;
1324 /* Sometimes it results in a cast of a cast of a modify.
1325 Don't complain about that. */
1326 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1327 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1328 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1329 return 0;
1330
1331 default:
ddbe9812
RS
1332 /* Referencing a volatile value is a side effect, so don't warn. */
1333 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1334 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1335 && TREE_THIS_VOLATILE (exp))
1336 return 0;
28d81abb
RK
1337 warning_with_file_and_line (emit_filename, emit_lineno,
1338 "value computed is not used");
1339 return 1;
1340 }
1341}
1342
1343/* Clear out the memory of the last expression evaluated. */
1344
1345void
1346clear_last_expr ()
1347{
1348 last_expr_type = 0;
1349}
1350
1351/* Begin a statement which will return a value.
1352 Return the RTL_EXPR for this statement expr.
1353 The caller must save that value and pass it to expand_end_stmt_expr. */
1354
1355tree
1356expand_start_stmt_expr ()
1357{
1358 /* Make the RTL_EXPR node temporary, not momentary,
1359 so that rtl_expr_chain doesn't become garbage. */
1360 int momentary = suspend_momentary ();
1361 tree t = make_node (RTL_EXPR);
1362 resume_momentary (momentary);
1363 start_sequence ();
1364 NO_DEFER_POP;
1365 expr_stmts_for_value++;
1366 return t;
1367}
1368
1369/* Restore the previous state at the end of a statement that returns a value.
1370 Returns a tree node representing the statement's value and the
1371 insns to compute the value.
1372
1373 The nodes of that expression have been freed by now, so we cannot use them.
1374 But we don't want to do that anyway; the expression has already been
1375 evaluated and now we just want to use the value. So generate a RTL_EXPR
1376 with the proper type and RTL value.
1377
1378 If the last substatement was not an expression,
1379 return something with type `void'. */
1380
1381tree
1382expand_end_stmt_expr (t)
1383 tree t;
1384{
1385 OK_DEFER_POP;
1386
1387 if (last_expr_type == 0)
1388 {
1389 last_expr_type = void_type_node;
1390 last_expr_value = const0_rtx;
1391 }
1392 else if (last_expr_value == 0)
1393 /* There are some cases where this can happen, such as when the
1394 statement is void type. */
1395 last_expr_value = const0_rtx;
1396 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1397 /* Remove any possible QUEUED. */
1398 last_expr_value = protect_from_queue (last_expr_value, 0);
1399
1400 emit_queue ();
1401
1402 TREE_TYPE (t) = last_expr_type;
1403 RTL_EXPR_RTL (t) = last_expr_value;
1404 RTL_EXPR_SEQUENCE (t) = get_insns ();
1405
1406 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1407
1408 end_sequence ();
1409
1410 /* Don't consider deleting this expr or containing exprs at tree level. */
1411 TREE_SIDE_EFFECTS (t) = 1;
1412 /* Propagate volatility of the actual RTL expr. */
1413 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1414
1415 last_expr_type = 0;
1416 expr_stmts_for_value--;
1417
1418 return t;
1419}
1420\f
1421/* The exception handling nesting looks like this:
1422
1423 <-- Level N-1
1424 { <-- exception handler block
1425 <-- Level N
1426 <-- in an exception handler
1427 { <-- try block
1428 : <-- in a TRY block
1429 : <-- in an exception handler
1430 :
1431 }
1432
1433 { <-- except block
1434 : <-- in an except block
1435 : <-- in an exception handler
1436 :
1437 }
1438
1439 }
1440
1441/* Return nonzero iff in a try block at level LEVEL. */
1442
1443int
1444in_try_block (level)
1445 int level;
1446{
1447 struct nesting *n = except_stack;
1448 while (1)
1449 {
1450 while (n && n->data.except_stmt.after_label != 0)
1451 n = n->next;
1452 if (n == 0)
1453 return 0;
1454 if (level == 0)
1455 return n != 0;
1456 level--;
1457 n = n->next;
1458 }
1459}
1460
1461/* Return nonzero iff in an except block at level LEVEL. */
1462
1463int
1464in_except_block (level)
1465 int level;
1466{
1467 struct nesting *n = except_stack;
1468 while (1)
1469 {
1470 while (n && n->data.except_stmt.after_label == 0)
1471 n = n->next;
1472 if (n == 0)
1473 return 0;
1474 if (level == 0)
1475 return n != 0;
1476 level--;
1477 n = n->next;
1478 }
1479}
1480
1481/* Return nonzero iff in an exception handler at level LEVEL. */
1482
1483int
1484in_exception_handler (level)
1485 int level;
1486{
1487 struct nesting *n = except_stack;
1488 while (n && level--)
1489 n = n->next;
1490 return n != 0;
1491}
1492
1493/* Record the fact that the current exception nesting raises
1494 exception EX. If not in an exception handler, return 0. */
1495int
1496expand_raise (ex)
1497 tree ex;
1498{
1499 tree *raises_ptr;
1500
1501 if (except_stack == 0)
1502 return 0;
1503 raises_ptr = &except_stack->data.except_stmt.raised;
1504 if (! value_member (ex, *raises_ptr))
1505 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1506 return 1;
1507}
1508
1509/* Generate RTL for the start of a try block.
1510
1511 TRY_CLAUSE is the condition to test to enter the try block. */
1512
1513void
1514expand_start_try (try_clause, exitflag, escapeflag)
1515 tree try_clause;
1516 int exitflag;
1517 int escapeflag;
1518{
1519 struct nesting *thishandler = ALLOC_NESTING ();
1520
1521 /* Make an entry on cond_stack for the cond we are entering. */
1522
1523 thishandler->next = except_stack;
1524 thishandler->all = nesting_stack;
1525 thishandler->depth = ++nesting_depth;
1526 thishandler->data.except_stmt.raised = 0;
1527 thishandler->data.except_stmt.handled = 0;
1528 thishandler->data.except_stmt.first_insn = get_insns ();
1529 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1530 thishandler->data.except_stmt.unhandled_label = 0;
1531 thishandler->data.except_stmt.after_label = 0;
1532 thishandler->data.except_stmt.escape_label
1533 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1534 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1535 except_stack = thishandler;
1536 nesting_stack = thishandler;
1537
1538 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1539}
1540
1541/* End of a TRY block. Nothing to do for now. */
1542
1543void
1544expand_end_try ()
1545{
1546 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1547 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1548}
1549
1550/* Start an `except' nesting contour.
1551 EXITFLAG says whether this contour should be able to `exit' something.
1552 ESCAPEFLAG says whether this contour should be escapable. */
1553
1554void
1555expand_start_except (exitflag, escapeflag)
1556 int exitflag;
1557 int escapeflag;
1558{
1559 if (exitflag)
1560 {
1561 struct nesting *n;
1562 /* An `exit' from catch clauses goes out to next exit level,
1563 if there is one. Otherwise, it just goes to the end
1564 of the construct. */
1565 for (n = except_stack->next; n; n = n->next)
1566 if (n->exit_label != 0)
1567 {
1568 except_stack->exit_label = n->exit_label;
1569 break;
1570 }
1571 if (n == 0)
1572 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1573 }
1574 if (escapeflag)
1575 {
1576 struct nesting *n;
1577 /* An `escape' from catch clauses goes out to next escape level,
1578 if there is one. Otherwise, it just goes to the end
1579 of the construct. */
1580 for (n = except_stack->next; n; n = n->next)
1581 if (n->data.except_stmt.escape_label != 0)
1582 {
1583 except_stack->data.except_stmt.escape_label
1584 = n->data.except_stmt.escape_label;
1585 break;
1586 }
1587 if (n == 0)
1588 except_stack->data.except_stmt.escape_label
1589 = except_stack->data.except_stmt.after_label;
1590 }
1591 do_pending_stack_adjust ();
1592 emit_label (except_stack->data.except_stmt.except_label);
1593}
1594
1595/* Generate code to `escape' from an exception contour. This
1596 is like `exiting', but does not conflict with constructs which
1597 use `exit_label'.
1598
1599 Return nonzero if this contour is escapable, otherwise
1600 return zero, and language-specific code will emit the
1601 appropriate error message. */
1602int
1603expand_escape_except ()
1604{
1605 struct nesting *n;
1606 last_expr_type = 0;
1607 for (n = except_stack; n; n = n->next)
1608 if (n->data.except_stmt.escape_label != 0)
1609 {
1610 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1611 return 1;
1612 }
1613
1614 return 0;
1615}
1616
1617/* Finish processing and `except' contour.
1618 Culls out all exceptions which might be raise but not
1619 handled, and returns the list to the caller.
1620 Language-specific code is responsible for dealing with these
1621 exceptions. */
1622
1623tree
1624expand_end_except ()
1625{
1626 struct nesting *n;
1627 tree raised = NULL_TREE;
1628
1629 do_pending_stack_adjust ();
1630 emit_label (except_stack->data.except_stmt.after_label);
1631
1632 n = except_stack->next;
1633 if (n)
1634 {
1635 /* Propagate exceptions raised but not handled to next
1636 highest level. */
1637 tree handled = except_stack->data.except_stmt.raised;
1638 if (handled != void_type_node)
1639 {
1640 tree prev = NULL_TREE;
1641 raised = except_stack->data.except_stmt.raised;
1642 while (handled)
1643 {
1644 tree this_raise;
1645 for (this_raise = raised, prev = 0; this_raise;
1646 this_raise = TREE_CHAIN (this_raise))
1647 {
1648 if (value_member (TREE_VALUE (this_raise), handled))
1649 {
1650 if (prev)
1651 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1652 else
1653 {
1654 raised = TREE_CHAIN (raised);
1655 if (raised == NULL_TREE)
1656 goto nada;
1657 }
1658 }
1659 else
1660 prev = this_raise;
1661 }
1662 handled = TREE_CHAIN (handled);
1663 }
1664 if (prev == NULL_TREE)
1665 prev = raised;
1666 if (prev)
1667 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1668 nada:
1669 n->data.except_stmt.raised = raised;
1670 }
1671 }
1672
1673 POPSTACK (except_stack);
1674 last_expr_type = 0;
1675 return raised;
1676}
1677
1678/* Record that exception EX is caught by this exception handler.
1679 Return nonzero if in exception handling construct, otherwise return 0. */
1680int
1681expand_catch (ex)
1682 tree ex;
1683{
1684 tree *raises_ptr;
1685
1686 if (except_stack == 0)
1687 return 0;
1688 raises_ptr = &except_stack->data.except_stmt.handled;
1689 if (*raises_ptr != void_type_node
1690 && ex != NULL_TREE
1691 && ! value_member (ex, *raises_ptr))
1692 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1693 return 1;
1694}
1695
1696/* Record that this exception handler catches all exceptions.
1697 Return nonzero if in exception handling construct, otherwise return 0. */
1698
1699int
1700expand_catch_default ()
1701{
1702 if (except_stack == 0)
1703 return 0;
1704 except_stack->data.except_stmt.handled = void_type_node;
1705 return 1;
1706}
1707
1708int
1709expand_end_catch ()
1710{
1711 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1712 return 0;
1713 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1714 return 1;
1715}
1716\f
1717/* Generate RTL for the start of an if-then. COND is the expression
1718 whose truth should be tested.
1719
1720 If EXITFLAG is nonzero, this conditional is visible to
1721 `exit_something'. */
1722
1723void
1724expand_start_cond (cond, exitflag)
1725 tree cond;
1726 int exitflag;
1727{
1728 struct nesting *thiscond = ALLOC_NESTING ();
1729
1730 /* Make an entry on cond_stack for the cond we are entering. */
1731
1732 thiscond->next = cond_stack;
1733 thiscond->all = nesting_stack;
1734 thiscond->depth = ++nesting_depth;
1735 thiscond->data.cond.next_label = gen_label_rtx ();
1736 /* Before we encounter an `else', we don't need a separate exit label
1737 unless there are supposed to be exit statements
1738 to exit this conditional. */
1739 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1740 thiscond->data.cond.endif_label = thiscond->exit_label;
1741 cond_stack = thiscond;
1742 nesting_stack = thiscond;
1743
1744 do_jump (cond, thiscond->data.cond.next_label, NULL);
1745}
1746
1747/* Generate RTL between then-clause and the elseif-clause
1748 of an if-then-elseif-.... */
1749
1750void
1751expand_start_elseif (cond)
1752 tree cond;
1753{
1754 if (cond_stack->data.cond.endif_label == 0)
1755 cond_stack->data.cond.endif_label = gen_label_rtx ();
1756 emit_jump (cond_stack->data.cond.endif_label);
1757 emit_label (cond_stack->data.cond.next_label);
1758 cond_stack->data.cond.next_label = gen_label_rtx ();
1759 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1760}
1761
1762/* Generate RTL between the then-clause and the else-clause
1763 of an if-then-else. */
1764
1765void
1766expand_start_else ()
1767{
1768 if (cond_stack->data.cond.endif_label == 0)
1769 cond_stack->data.cond.endif_label = gen_label_rtx ();
1770 emit_jump (cond_stack->data.cond.endif_label);
1771 emit_label (cond_stack->data.cond.next_label);
1772 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1773}
1774
1775/* Generate RTL for the end of an if-then.
1776 Pop the record for it off of cond_stack. */
1777
1778void
1779expand_end_cond ()
1780{
1781 struct nesting *thiscond = cond_stack;
1782
1783 do_pending_stack_adjust ();
1784 if (thiscond->data.cond.next_label)
1785 emit_label (thiscond->data.cond.next_label);
1786 if (thiscond->data.cond.endif_label)
1787 emit_label (thiscond->data.cond.endif_label);
1788
1789 POPSTACK (cond_stack);
1790 last_expr_type = 0;
1791}
1792\f
1793/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1794 loop should be exited by `exit_something'. This is a loop for which
1795 `expand_continue' will jump to the top of the loop.
1796
1797 Make an entry on loop_stack to record the labels associated with
1798 this loop. */
1799
1800struct nesting *
1801expand_start_loop (exit_flag)
1802 int exit_flag;
1803{
1804 register struct nesting *thisloop = ALLOC_NESTING ();
1805
1806 /* Make an entry on loop_stack for the loop we are entering. */
1807
1808 thisloop->next = loop_stack;
1809 thisloop->all = nesting_stack;
1810 thisloop->depth = ++nesting_depth;
1811 thisloop->data.loop.start_label = gen_label_rtx ();
1812 thisloop->data.loop.end_label = gen_label_rtx ();
1813 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1814 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1815 loop_stack = thisloop;
1816 nesting_stack = thisloop;
1817
1818 do_pending_stack_adjust ();
1819 emit_queue ();
1820 emit_note (0, NOTE_INSN_LOOP_BEG);
1821 emit_label (thisloop->data.loop.start_label);
1822
1823 return thisloop;
1824}
1825
1826/* Like expand_start_loop but for a loop where the continuation point
1827 (for expand_continue_loop) will be specified explicitly. */
1828
1829struct nesting *
1830expand_start_loop_continue_elsewhere (exit_flag)
1831 int exit_flag;
1832{
1833 struct nesting *thisloop = expand_start_loop (exit_flag);
1834 loop_stack->data.loop.continue_label = gen_label_rtx ();
1835 return thisloop;
1836}
1837
1838/* Specify the continuation point for a loop started with
1839 expand_start_loop_continue_elsewhere.
1840 Use this at the point in the code to which a continue statement
1841 should jump. */
1842
1843void
1844expand_loop_continue_here ()
1845{
1846 do_pending_stack_adjust ();
1847 emit_note (0, NOTE_INSN_LOOP_CONT);
1848 emit_label (loop_stack->data.loop.continue_label);
1849}
1850
1851/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1852 Pop the block off of loop_stack. */
1853
1854void
1855expand_end_loop ()
1856{
1857 register rtx insn = get_last_insn ();
1858 register rtx start_label = loop_stack->data.loop.start_label;
1859 rtx last_test_insn = 0;
1860 int num_insns = 0;
1861
1862 /* Mark the continue-point at the top of the loop if none elsewhere. */
1863 if (start_label == loop_stack->data.loop.continue_label)
1864 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1865
1866 do_pending_stack_adjust ();
1867
1868 /* If optimizing, perhaps reorder the loop. If the loop
1869 starts with a conditional exit, roll that to the end
1870 where it will optimize together with the jump back.
1871
1872 We look for the last conditional branch to the exit that we encounter
1873 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1874 branch to the exit first, use it.
1875
1876 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1877 because moving them is not valid. */
1878
1879 if (optimize
1880 &&
1881 ! (GET_CODE (insn) == JUMP_INSN
1882 && GET_CODE (PATTERN (insn)) == SET
1883 && SET_DEST (PATTERN (insn)) == pc_rtx
1884 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1885 {
1886 /* Scan insns from the top of the loop looking for a qualified
1887 conditional exit. */
1888 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1889 insn = NEXT_INSN (insn))
1890 {
1891 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1892 break;
1893
1894 if (GET_CODE (insn) == NOTE
1895 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1896 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1897 break;
1898
1899 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1900 num_insns++;
1901
1902 if (last_test_insn && num_insns > 30)
1903 break;
1904
1905 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1906 && SET_DEST (PATTERN (insn)) == pc_rtx
1907 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1908 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1909 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1910 == loop_stack->data.loop.end_label))
1911 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1912 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1913 == loop_stack->data.loop.end_label))))
1914 last_test_insn = insn;
1915
1916 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1917 && GET_CODE (PATTERN (insn)) == SET
1918 && SET_DEST (PATTERN (insn)) == pc_rtx
1919 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1920 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1921 == loop_stack->data.loop.end_label))
1922 /* Include BARRIER. */
1923 last_test_insn = NEXT_INSN (insn);
1924 }
1925
1926 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1927 {
1928 /* We found one. Move everything from there up
1929 to the end of the loop, and add a jump into the loop
1930 to jump to there. */
1931 register rtx newstart_label = gen_label_rtx ();
1932 register rtx start_move = start_label;
1933
b4ac57ab 1934 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1935 then we want to move this note also. */
1936 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1937 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1938 == NOTE_INSN_LOOP_CONT))
1939 start_move = PREV_INSN (start_move);
1940
1941 emit_label_after (newstart_label, PREV_INSN (start_move));
1942 reorder_insns (start_move, last_test_insn, get_last_insn ());
1943 emit_jump_insn_after (gen_jump (start_label),
1944 PREV_INSN (newstart_label));
1945 emit_barrier_after (PREV_INSN (newstart_label));
1946 start_label = newstart_label;
1947 }
1948 }
1949
1950 emit_jump (start_label);
1951 emit_note (0, NOTE_INSN_LOOP_END);
1952 emit_label (loop_stack->data.loop.end_label);
1953
1954 POPSTACK (loop_stack);
1955
1956 last_expr_type = 0;
1957}
1958
1959/* Generate a jump to the current loop's continue-point.
1960 This is usually the top of the loop, but may be specified
1961 explicitly elsewhere. If not currently inside a loop,
1962 return 0 and do nothing; caller will print an error message. */
1963
1964int
1965expand_continue_loop (whichloop)
1966 struct nesting *whichloop;
1967{
1968 last_expr_type = 0;
1969 if (whichloop == 0)
1970 whichloop = loop_stack;
1971 if (whichloop == 0)
1972 return 0;
1973 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1974 return 1;
1975}
1976
1977/* Generate a jump to exit the current loop. If not currently inside a loop,
1978 return 0 and do nothing; caller will print an error message. */
1979
1980int
1981expand_exit_loop (whichloop)
1982 struct nesting *whichloop;
1983{
1984 last_expr_type = 0;
1985 if (whichloop == 0)
1986 whichloop = loop_stack;
1987 if (whichloop == 0)
1988 return 0;
1989 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1990 return 1;
1991}
1992
1993/* Generate a conditional jump to exit the current loop if COND
1994 evaluates to zero. If not currently inside a loop,
1995 return 0 and do nothing; caller will print an error message. */
1996
1997int
1998expand_exit_loop_if_false (whichloop, cond)
1999 struct nesting *whichloop;
2000 tree cond;
2001{
2002 last_expr_type = 0;
2003 if (whichloop == 0)
2004 whichloop = loop_stack;
2005 if (whichloop == 0)
2006 return 0;
2007 do_jump (cond, whichloop->data.loop.end_label, NULL);
2008 return 1;
2009}
2010
2011/* Return non-zero if we should preserve sub-expressions as separate
2012 pseudos. We never do so if we aren't optimizing. We always do so
2013 if -fexpensive-optimizations.
2014
2015 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2016 the loop may still be a small one. */
2017
2018int
2019preserve_subexpressions_p ()
2020{
2021 rtx insn;
2022
2023 if (flag_expensive_optimizations)
2024 return 1;
2025
2026 if (optimize == 0 || loop_stack == 0)
2027 return 0;
2028
2029 insn = get_last_insn_anywhere ();
2030
2031 return (insn
2032 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2033 < n_non_fixed_regs * 3));
2034
2035}
2036
2037/* Generate a jump to exit the current loop, conditional, binding contour
2038 or case statement. Not all such constructs are visible to this function,
2039 only those started with EXIT_FLAG nonzero. Individual languages use
2040 the EXIT_FLAG parameter to control which kinds of constructs you can
2041 exit this way.
2042
2043 If not currently inside anything that can be exited,
2044 return 0 and do nothing; caller will print an error message. */
2045
2046int
2047expand_exit_something ()
2048{
2049 struct nesting *n;
2050 last_expr_type = 0;
2051 for (n = nesting_stack; n; n = n->all)
2052 if (n->exit_label != 0)
2053 {
2054 expand_goto_internal (0, n->exit_label, 0);
2055 return 1;
2056 }
2057
2058 return 0;
2059}
2060\f
2061/* Generate RTL to return from the current function, with no value.
2062 (That is, we do not do anything about returning any value.) */
2063
2064void
2065expand_null_return ()
2066{
2067 struct nesting *block = block_stack;
2068 rtx last_insn = 0;
2069
2070 /* Does any pending block have cleanups? */
2071
2072 while (block && block->data.block.cleanups == 0)
2073 block = block->next;
2074
2075 /* If yes, use a goto to return, since that runs cleanups. */
2076
2077 expand_null_return_1 (last_insn, block != 0);
2078}
2079
2080/* Generate RTL to return from the current function, with value VAL. */
2081
2082void
2083expand_value_return (val)
2084 rtx val;
2085{
2086 struct nesting *block = block_stack;
2087 rtx last_insn = get_last_insn ();
2088 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2089
2090 /* Copy the value to the return location
2091 unless it's already there. */
2092
2093 if (return_reg != val)
2094 emit_move_insn (return_reg, val);
2095 if (GET_CODE (return_reg) == REG
2096 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2097 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2098
2099 /* Does any pending block have cleanups? */
2100
2101 while (block && block->data.block.cleanups == 0)
2102 block = block->next;
2103
2104 /* If yes, use a goto to return, since that runs cleanups.
2105 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2106
2107 expand_null_return_1 (last_insn, block != 0);
2108}
2109
2110/* Output a return with no value. If LAST_INSN is nonzero,
2111 pretend that the return takes place after LAST_INSN.
2112 If USE_GOTO is nonzero then don't use a return instruction;
2113 go to the return label instead. This causes any cleanups
2114 of pending blocks to be executed normally. */
2115
2116static void
2117expand_null_return_1 (last_insn, use_goto)
2118 rtx last_insn;
2119 int use_goto;
2120{
2121 rtx end_label = cleanup_label ? cleanup_label : return_label;
2122
2123 clear_pending_stack_adjust ();
2124 do_pending_stack_adjust ();
2125 last_expr_type = 0;
2126
2127 /* PCC-struct return always uses an epilogue. */
2128 if (current_function_returns_pcc_struct || use_goto)
2129 {
2130 if (end_label == 0)
2131 end_label = return_label = gen_label_rtx ();
2132 expand_goto_internal (0, end_label, last_insn);
2133 return;
2134 }
2135
2136 /* Otherwise output a simple return-insn if one is available,
2137 unless it won't do the job. */
2138#ifdef HAVE_return
2139 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2140 {
2141 emit_jump_insn (gen_return ());
2142 emit_barrier ();
2143 return;
2144 }
2145#endif
2146
2147 /* Otherwise jump to the epilogue. */
2148 expand_goto_internal (0, end_label, last_insn);
2149}
2150\f
2151/* Generate RTL to evaluate the expression RETVAL and return it
2152 from the current function. */
2153
2154void
2155expand_return (retval)
2156 tree retval;
2157{
2158 /* If there are any cleanups to be performed, then they will
2159 be inserted following LAST_INSN. It is desirable
2160 that the last_insn, for such purposes, should be the
2161 last insn before computing the return value. Otherwise, cleanups
2162 which call functions can clobber the return value. */
2163 /* ??? rms: I think that is erroneous, because in C++ it would
2164 run destructors on variables that might be used in the subsequent
2165 computation of the return value. */
2166 rtx last_insn = 0;
2167 register rtx val = 0;
2168 register rtx op0;
2169 tree retval_rhs;
2170 int cleanups;
2171 struct nesting *block;
2172
2173 /* If function wants no value, give it none. */
2174 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2175 {
2176 expand_expr (retval, 0, VOIDmode, 0);
2177 expand_null_return ();
2178 return;
2179 }
2180
2181 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2182 cleanups = any_pending_cleanups (1);
2183
2184 if (TREE_CODE (retval) == RESULT_DECL)
2185 retval_rhs = retval;
2186 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2187 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2188 retval_rhs = TREE_OPERAND (retval, 1);
2189 else if (TREE_TYPE (retval) == void_type_node)
2190 /* Recognize tail-recursive call to void function. */
2191 retval_rhs = retval;
2192 else
2193 retval_rhs = NULL_TREE;
2194
2195 /* Only use `last_insn' if there are cleanups which must be run. */
2196 if (cleanups || cleanup_label != 0)
2197 last_insn = get_last_insn ();
2198
2199 /* Distribute return down conditional expr if either of the sides
2200 may involve tail recursion (see test below). This enhances the number
2201 of tail recursions we see. Don't do this always since it can produce
2202 sub-optimal code in some cases and we distribute assignments into
2203 conditional expressions when it would help. */
2204
2205 if (optimize && retval_rhs != 0
2206 && frame_offset == 0
2207 && TREE_CODE (retval_rhs) == COND_EXPR
2208 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2209 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2210 {
2211 rtx label = gen_label_rtx ();
2212 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2213 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2214 DECL_RESULT (current_function_decl),
2215 TREE_OPERAND (retval_rhs, 1)));
2216 emit_label (label);
2217 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2218 DECL_RESULT (current_function_decl),
2219 TREE_OPERAND (retval_rhs, 2)));
2220 return;
2221 }
2222
2223 /* For tail-recursive call to current function,
2224 just jump back to the beginning.
2225 It's unsafe if any auto variable in this function
2226 has its address taken; for simplicity,
2227 require stack frame to be empty. */
2228 if (optimize && retval_rhs != 0
2229 && frame_offset == 0
2230 && TREE_CODE (retval_rhs) == CALL_EXPR
2231 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2232 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2233 /* Finish checking validity, and if valid emit code
2234 to set the argument variables for the new call. */
2235 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2236 DECL_ARGUMENTS (current_function_decl)))
2237 {
2238 if (tail_recursion_label == 0)
2239 {
2240 tail_recursion_label = gen_label_rtx ();
2241 emit_label_after (tail_recursion_label,
2242 tail_recursion_reentry);
2243 }
2244 expand_goto_internal (0, tail_recursion_label, last_insn);
2245 emit_barrier ();
2246 return;
2247 }
2248#ifdef HAVE_return
2249 /* This optimization is safe if there are local cleanups
2250 because expand_null_return takes care of them.
2251 ??? I think it should also be safe when there is a cleanup label,
2252 because expand_null_return takes care of them, too.
2253 Any reason why not? */
2254 if (HAVE_return && cleanup_label == 0
2255 && ! current_function_returns_pcc_struct)
2256 {
2257 /* If this is return x == y; then generate
2258 if (x == y) return 1; else return 0;
2259 if we can do it with explicit return insns. */
2260 if (retval_rhs)
2261 switch (TREE_CODE (retval_rhs))
2262 {
2263 case EQ_EXPR:
2264 case NE_EXPR:
2265 case GT_EXPR:
2266 case GE_EXPR:
2267 case LT_EXPR:
2268 case LE_EXPR:
2269 case TRUTH_ANDIF_EXPR:
2270 case TRUTH_ORIF_EXPR:
2271 case TRUTH_AND_EXPR:
2272 case TRUTH_OR_EXPR:
2273 case TRUTH_NOT_EXPR:
2274 op0 = gen_label_rtx ();
2275 jumpifnot (retval_rhs, op0);
2276 expand_value_return (const1_rtx);
2277 emit_label (op0);
2278 expand_value_return (const0_rtx);
2279 return;
2280 }
2281 }
2282#endif /* HAVE_return */
2283
2284 if (cleanups
2285 && retval_rhs != 0
2286 && TREE_TYPE (retval_rhs) != void_type_node
2287 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2288 {
2289 /* Calculate the return value into a pseudo reg. */
2290 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2291 emit_queue ();
2292 /* All temporaries have now been used. */
2293 free_temp_slots ();
2294 /* Return the calculated value, doing cleanups first. */
2295 expand_value_return (val);
2296 }
2297 else
2298 {
2299 /* No cleanups or no hard reg used;
2300 calculate value into hard return reg. */
2301 expand_expr (retval, 0, VOIDmode, 0);
2302 emit_queue ();
2303 free_temp_slots ();
2304 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2305 }
2306}
2307
2308/* Return 1 if the end of the generated RTX is not a barrier.
2309 This means code already compiled can drop through. */
2310
2311int
2312drop_through_at_end_p ()
2313{
2314 rtx insn = get_last_insn ();
2315 while (insn && GET_CODE (insn) == NOTE)
2316 insn = PREV_INSN (insn);
2317 return insn && GET_CODE (insn) != BARRIER;
2318}
2319\f
2320/* Emit code to alter this function's formal parms for a tail-recursive call.
2321 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2322 FORMALS is the chain of decls of formals.
2323 Return 1 if this can be done;
2324 otherwise return 0 and do not emit any code. */
2325
2326static int
2327tail_recursion_args (actuals, formals)
2328 tree actuals, formals;
2329{
2330 register tree a = actuals, f = formals;
2331 register int i;
2332 register rtx *argvec;
2333
2334 /* Check that number and types of actuals are compatible
2335 with the formals. This is not always true in valid C code.
2336 Also check that no formal needs to be addressable
2337 and that all formals are scalars. */
2338
2339 /* Also count the args. */
2340
2341 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2342 {
2343 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2344 return 0;
2345 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2346 return 0;
2347 }
2348 if (a != 0 || f != 0)
2349 return 0;
2350
2351 /* Compute all the actuals. */
2352
2353 argvec = (rtx *) alloca (i * sizeof (rtx));
2354
2355 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2356 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2357
2358 /* Find which actual values refer to current values of previous formals.
2359 Copy each of them now, before any formal is changed. */
2360
2361 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2362 {
2363 int copy = 0;
2364 register int j;
2365 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2366 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2367 { copy = 1; break; }
2368 if (copy)
2369 argvec[i] = copy_to_reg (argvec[i]);
2370 }
2371
2372 /* Store the values of the actuals into the formals. */
2373
2374 for (f = formals, a = actuals, i = 0; f;
2375 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2376 {
2377 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2378 emit_move_insn (DECL_RTL (f), argvec[i]);
2379 else
2380 convert_move (DECL_RTL (f), argvec[i],
2381 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2382 }
2383
2384 free_temp_slots ();
2385 return 1;
2386}
2387\f
2388/* Generate the RTL code for entering a binding contour.
2389 The variables are declared one by one, by calls to `expand_decl'.
2390
2391 EXIT_FLAG is nonzero if this construct should be visible to
2392 `exit_something'. */
2393
2394void
2395expand_start_bindings (exit_flag)
2396 int exit_flag;
2397{
2398 struct nesting *thisblock = ALLOC_NESTING ();
2399
2400 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2401
2402 /* Make an entry on block_stack for the block we are entering. */
2403
2404 thisblock->next = block_stack;
2405 thisblock->all = nesting_stack;
2406 thisblock->depth = ++nesting_depth;
2407 thisblock->data.block.stack_level = 0;
2408 thisblock->data.block.cleanups = 0;
2409 thisblock->data.block.function_call_count = 0;
2410#if 0
2411 if (block_stack)
2412 {
2413 if (block_stack->data.block.cleanups == NULL_TREE
2414 && (block_stack->data.block.outer_cleanups == NULL_TREE
2415 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2416 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2417 else
2418 thisblock->data.block.outer_cleanups
2419 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2420 block_stack->data.block.outer_cleanups);
2421 }
2422 else
2423 thisblock->data.block.outer_cleanups = 0;
2424#endif
2425#if 1
2426 if (block_stack
2427 && !(block_stack->data.block.cleanups == NULL_TREE
2428 && block_stack->data.block.outer_cleanups == NULL_TREE))
2429 thisblock->data.block.outer_cleanups
2430 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2431 block_stack->data.block.outer_cleanups);
2432 else
2433 thisblock->data.block.outer_cleanups = 0;
2434#endif
2435 thisblock->data.block.label_chain = 0;
2436 thisblock->data.block.innermost_stack_block = stack_block_stack;
2437 thisblock->data.block.first_insn = note;
2438 thisblock->data.block.block_start_count = ++block_start_count;
2439 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2440 block_stack = thisblock;
2441 nesting_stack = thisblock;
2442
2443 /* Make a new level for allocating stack slots. */
2444 push_temp_slots ();
2445}
2446
2447/* Generate RTL code to terminate a binding contour.
2448 VARS is the chain of VAR_DECL nodes
2449 for the variables bound in this contour.
2450 MARK_ENDS is nonzero if we should put a note at the beginning
2451 and end of this binding contour.
2452
2453 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2454 (That is true automatically if the contour has a saved stack level.) */
2455
2456void
2457expand_end_bindings (vars, mark_ends, dont_jump_in)
2458 tree vars;
2459 int mark_ends;
2460 int dont_jump_in;
2461{
2462 register struct nesting *thisblock = block_stack;
2463 register tree decl;
2464
2465 if (warn_unused)
2466 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2467 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2468 warning_with_decl (decl, "unused variable `%s'");
2469
2470 /* Mark the beginning and end of the scope if requested. */
2471
2472 if (mark_ends)
2473 emit_note (0, NOTE_INSN_BLOCK_END);
2474 else
2475 /* Get rid of the beginning-mark if we don't make an end-mark. */
2476 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2477
2478 if (thisblock->exit_label)
2479 {
2480 do_pending_stack_adjust ();
2481 emit_label (thisblock->exit_label);
2482 }
2483
2484 /* If necessary, make a handler for nonlocal gotos taking
2485 place in the function calls in this block. */
2486 if (function_call_count != thisblock->data.block.function_call_count
2487 && nonlocal_labels
2488 /* Make handler for outermost block
2489 if there were any nonlocal gotos to this function. */
2490 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2491 /* Make handler for inner block if it has something
2492 special to do when you jump out of it. */
2493 : (thisblock->data.block.cleanups != 0
2494 || thisblock->data.block.stack_level != 0)))
2495 {
2496 tree link;
2497 rtx afterward = gen_label_rtx ();
2498 rtx handler_label = gen_label_rtx ();
2499 rtx save_receiver = gen_reg_rtx (Pmode);
2500
2501 /* Don't let jump_optimize delete the handler. */
2502 LABEL_PRESERVE_P (handler_label) = 1;
2503
2504 /* Record the handler address in the stack slot for that purpose,
2505 during this block, saving and restoring the outer value. */
2506 if (thisblock->next != 0)
2507 {
2508 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2509 emit_insn_before (gen_move_insn (save_receiver,
2510 nonlocal_goto_handler_slot),
2511 thisblock->data.block.first_insn);
2512 }
2513 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2514 gen_rtx (LABEL_REF, Pmode,
2515 handler_label)),
2516 thisblock->data.block.first_insn);
2517
2518 /* Jump around the handler; it runs only when specially invoked. */
2519 emit_jump (afterward);
2520 emit_label (handler_label);
2521
2522#ifdef HAVE_nonlocal_goto
2523 if (! HAVE_nonlocal_goto)
2524#endif
2525 /* First adjust our frame pointer to its actual value. It was
2526 previously set to the start of the virtual area corresponding to
2527 the stacked variables when we branched here and now needs to be
2528 adjusted to the actual hardware fp value.
2529
2530 Assignments are to virtual registers are converted by
2531 instantiate_virtual_regs into the corresponding assignment
2532 to the underlying register (fp in this case) that makes
2533 the original assignment true.
2534 So the following insn will actually be
2535 decrementing fp by STARTING_FRAME_OFFSET. */
2536 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2537
2538#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2539 if (fixed_regs[ARG_POINTER_REGNUM])
2540 {
2541 /* Now restore our arg pointer from the address at which it was saved
2542 in our stack frame.
2543 If there hasn't be space allocated for it yet, make some now. */
2544 if (arg_pointer_save_area == 0)
2545 arg_pointer_save_area
2546 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
b4ac57ab
RS
2547 emit_move_insn (virtual_incoming_args_rtx,
2548 /* We need a pseudo here,
2549 or else instantiate_virtual_regs_1 complains. */
2550 copy_to_reg (arg_pointer_save_area));
28d81abb
RK
2551 }
2552#endif
2553
2554 /* The handler expects the desired label address in the static chain
2555 register. It tests the address and does an appropriate jump
2556 to whatever label is desired. */
2557 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2558 /* Skip any labels we shouldn't be able to jump to from here. */
2559 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2560 {
2561 rtx not_this = gen_label_rtx ();
2562 rtx this = gen_label_rtx ();
2563 do_jump_if_equal (static_chain_rtx,
2564 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2565 this, 0);
2566 emit_jump (not_this);
2567 emit_label (this);
2568 expand_goto (TREE_VALUE (link));
2569 emit_label (not_this);
2570 }
2571 /* If label is not recognized, abort. */
2572 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2573 VOIDmode, 0);
2574 emit_label (afterward);
2575 }
2576
2577 /* Don't allow jumping into a block that has cleanups or a stack level. */
2578 if (dont_jump_in
2579 || thisblock->data.block.stack_level != 0
2580 || thisblock->data.block.cleanups != 0)
2581 {
2582 struct label_chain *chain;
2583
2584 /* Any labels in this block are no longer valid to go to.
2585 Mark them to cause an error message. */
2586 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2587 {
2588 DECL_TOO_LATE (chain->label) = 1;
2589 /* If any goto without a fixup came to this label,
2590 that must be an error, because gotos without fixups
2591 come from outside all saved stack-levels and all cleanups. */
2592 if (TREE_ADDRESSABLE (chain->label))
2593 error_with_decl (chain->label,
2594 "label `%s' used before containing binding contour");
2595 }
2596 }
2597
2598 /* Restore stack level in effect before the block
2599 (only if variable-size objects allocated). */
2600 /* Perform any cleanups associated with the block. */
2601
2602 if (thisblock->data.block.stack_level != 0
2603 || thisblock->data.block.cleanups != 0)
2604 {
2605 /* Don't let cleanups affect ({...}) constructs. */
2606 int old_expr_stmts_for_value = expr_stmts_for_value;
2607 rtx old_last_expr_value = last_expr_value;
2608 tree old_last_expr_type = last_expr_type;
2609 expr_stmts_for_value = 0;
2610
2611 /* Do the cleanups. */
2612 expand_cleanups (thisblock->data.block.cleanups, 0);
2613 do_pending_stack_adjust ();
2614
2615 expr_stmts_for_value = old_expr_stmts_for_value;
2616 last_expr_value = old_last_expr_value;
2617 last_expr_type = old_last_expr_type;
2618
2619 /* Restore the stack level. */
2620
2621 if (thisblock->data.block.stack_level != 0)
2622 {
59257ff7
RK
2623 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2624 thisblock->data.block.stack_level, 0);
2625 if (nonlocal_goto_handler_slot != 0)
2626 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2627 }
2628
2629 /* Any gotos out of this block must also do these things.
59257ff7
RK
2630 Also report any gotos with fixups that came to labels in this
2631 level. */
28d81abb
RK
2632 fixup_gotos (thisblock,
2633 thisblock->data.block.stack_level,
2634 thisblock->data.block.cleanups,
2635 thisblock->data.block.first_insn,
2636 dont_jump_in);
2637 }
2638
2639 /* If doing stupid register allocation, make sure lives of all
2640 register variables declared here extend thru end of scope. */
2641
2642 if (obey_regdecls)
2643 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2644 {
2645 rtx rtl = DECL_RTL (decl);
2646 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2647 use_variable (rtl);
2648 }
2649
2650 /* Restore block_stack level for containing block. */
2651
2652 stack_block_stack = thisblock->data.block.innermost_stack_block;
2653 POPSTACK (block_stack);
2654
2655 /* Pop the stack slot nesting and free any slots at this level. */
2656 pop_temp_slots ();
2657}
2658\f
2659/* Generate RTL for the automatic variable declaration DECL.
2660 (Other kinds of declarations are simply ignored if seen here.)
2661 CLEANUP is an expression to be executed at exit from this binding contour;
2662 for example, in C++, it might call the destructor for this variable.
2663
2664 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2665 either before or after calling `expand_decl' but before compiling
2666 any subsequent expressions. This is because CLEANUP may be expanded
2667 more than once, on different branches of execution.
2668 For the same reason, CLEANUP may not contain a CALL_EXPR
2669 except as its topmost node--else `preexpand_calls' would get confused.
2670
2671 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2672 that is not associated with any particular variable.
2673
2674 There is no special support here for C++ constructors.
2675 They should be handled by the proper code in DECL_INITIAL. */
2676
2677void
2678expand_decl (decl)
2679 register tree decl;
2680{
2681 struct nesting *thisblock = block_stack;
2682 tree type = TREE_TYPE (decl);
2683
2684 /* Only automatic variables need any expansion done.
2685 Static and external variables, and external functions,
2686 will be handled by `assemble_variable' (called from finish_decl).
2687 TYPE_DECL and CONST_DECL require nothing.
2688 PARM_DECLs are handled in `assign_parms'. */
2689
2690 if (TREE_CODE (decl) != VAR_DECL)
2691 return;
2692 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2693 return;
2694
2695 /* Create the RTL representation for the variable. */
2696
2697 if (type == error_mark_node)
2698 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2699 else if (DECL_SIZE (decl) == 0)
2700 /* Variable with incomplete type. */
2701 {
2702 if (DECL_INITIAL (decl) == 0)
2703 /* Error message was already done; now avoid a crash. */
2704 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2705 else
2706 /* An initializer is going to decide the size of this array.
2707 Until we know the size, represent its address with a reg. */
2708 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2709 }
2710 else if (DECL_MODE (decl) != BLKmode
2711 /* If -ffloat-store, don't put explicit float vars
2712 into regs. */
2713 && !(flag_float_store
2714 && TREE_CODE (type) == REAL_TYPE)
2715 && ! TREE_THIS_VOLATILE (decl)
2716 && ! TREE_ADDRESSABLE (decl)
2717 && (TREE_REGDECL (decl) || ! obey_regdecls))
2718 {
2719 /* Automatic variable that can go in a register. */
2720 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2721 if (TREE_CODE (type) == POINTER_TYPE)
2722 mark_reg_pointer (DECL_RTL (decl));
2723 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2724 }
2725 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2726 {
2727 /* Variable of fixed size that goes on the stack. */
2728 rtx oldaddr = 0;
2729 rtx addr;
2730
2731 /* If we previously made RTL for this decl, it must be an array
2732 whose size was determined by the initializer.
2733 The old address was a register; set that register now
2734 to the proper address. */
2735 if (DECL_RTL (decl) != 0)
2736 {
2737 if (GET_CODE (DECL_RTL (decl)) != MEM
2738 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2739 abort ();
2740 oldaddr = XEXP (DECL_RTL (decl), 0);
2741 }
2742
2743 DECL_RTL (decl)
2744 = assign_stack_temp (DECL_MODE (decl),
2745 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2746 + BITS_PER_UNIT - 1)
2747 / BITS_PER_UNIT),
2748 1);
2749
2750 /* Set alignment we actually gave this decl. */
2751 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2752 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2753
2754 if (oldaddr)
2755 {
2756 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2757 if (addr != oldaddr)
2758 emit_move_insn (oldaddr, addr);
2759 }
2760
2761 /* If this is a memory ref that contains aggregate components,
2762 mark it as such for cse and loop optimize. */
2763 MEM_IN_STRUCT_P (DECL_RTL (decl))
2764 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2765 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2766 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2767#if 0
2768 /* If this is in memory because of -ffloat-store,
2769 set the volatile bit, to prevent optimizations from
2770 undoing the effects. */
2771 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2772 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2773#endif
2774 }
2775 else
2776 /* Dynamic-size object: must push space on the stack. */
2777 {
2778 rtx address, size;
2779
2780 /* Record the stack pointer on entry to block, if have
2781 not already done so. */
2782 if (thisblock->data.block.stack_level == 0)
2783 {
2784 do_pending_stack_adjust ();
59257ff7
RK
2785 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2786 &thisblock->data.block.stack_level,
2787 thisblock->data.block.first_insn);
28d81abb
RK
2788 stack_block_stack = thisblock;
2789 }
2790
2791 /* Compute the variable's size, in bytes. */
2792 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2793 DECL_SIZE (decl),
2794 size_int (BITS_PER_UNIT)),
2795 0, VOIDmode, 0);
2796 free_temp_slots ();
2797
59257ff7
RK
2798 /* This is equivalent to calling alloca. */
2799 current_function_calls_alloca = 1;
2800
28d81abb 2801 /* Allocate space on the stack for the variable. */
5130a5cc 2802 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
28d81abb 2803
59257ff7
RK
2804 if (nonlocal_goto_handler_slot != 0)
2805 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2806
2807 /* Reference the variable indirect through that rtx. */
2808 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2809
2207e295
RS
2810 /* If this is a memory ref that contains aggregate components,
2811 mark it as such for cse and loop optimize. */
2812 MEM_IN_STRUCT_P (DECL_RTL (decl))
2813 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2814 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2815 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2816
28d81abb
RK
2817 /* Indicate the alignment we actually gave this variable. */
2818#ifdef STACK_BOUNDARY
2819 DECL_ALIGN (decl) = STACK_BOUNDARY;
2820#else
2821 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2822#endif
2823 }
2824
2825 if (TREE_THIS_VOLATILE (decl))
2826 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2827 if (TREE_READONLY (decl))
2828 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2829
2830 /* If doing stupid register allocation, make sure life of any
2831 register variable starts here, at the start of its scope. */
2832
2833 if (obey_regdecls)
2834 use_variable (DECL_RTL (decl));
2835}
2836\f
2837/* Emit code to perform the initialization of a declaration DECL. */
2838
2839void
2840expand_decl_init (decl)
2841 tree decl;
2842{
b4ac57ab
RS
2843 int was_used = TREE_USED (decl);
2844
28d81abb
RK
2845 if (TREE_STATIC (decl))
2846 return;
2847
2848 /* Compute and store the initial value now. */
2849
2850 if (DECL_INITIAL (decl) == error_mark_node)
2851 {
2852 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2853 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2854 || code == POINTER_TYPE)
2855 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2856 0, 0);
2857 emit_queue ();
2858 }
2859 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2860 {
2861 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2862 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2863 emit_queue ();
2864 }
2865
b4ac57ab
RS
2866 /* Don't let the initialization count as "using" the variable. */
2867 TREE_USED (decl) = was_used;
2868
28d81abb
RK
2869 /* Free any temporaries we made while initializing the decl. */
2870 free_temp_slots ();
2871}
2872
2873/* CLEANUP is an expression to be executed at exit from this binding contour;
2874 for example, in C++, it might call the destructor for this variable.
2875
2876 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2877 either before or after calling `expand_decl' but before compiling
2878 any subsequent expressions. This is because CLEANUP may be expanded
2879 more than once, on different branches of execution.
2880 For the same reason, CLEANUP may not contain a CALL_EXPR
2881 except as its topmost node--else `preexpand_calls' would get confused.
2882
2883 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2884 that is not associated with any particular variable. */
2885
2886int
2887expand_decl_cleanup (decl, cleanup)
2888 tree decl, cleanup;
2889{
2890 struct nesting *thisblock = block_stack;
2891
2892 /* Error if we are not in any block. */
2893 if (thisblock == 0)
2894 return 0;
2895
2896 /* Record the cleanup if there is one. */
2897
2898 if (cleanup != 0)
2899 {
2900 thisblock->data.block.cleanups
2901 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2902 /* If this block has a cleanup, it belongs in stack_block_stack. */
2903 stack_block_stack = thisblock;
2904 }
2905 return 1;
2906}
2907\f
2908/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2909 DECL_ELTS is the list of elements that belong to DECL's type.
2910 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2911
2912void
2913expand_anon_union_decl (decl, cleanup, decl_elts)
2914 tree decl, cleanup, decl_elts;
2915{
2916 struct nesting *thisblock = block_stack;
2917 rtx x;
2918
2919 expand_decl (decl, cleanup);
2920 x = DECL_RTL (decl);
2921
2922 while (decl_elts)
2923 {
2924 tree decl_elt = TREE_VALUE (decl_elts);
2925 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2926 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2927
2928 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2929 instead create a new MEM rtx with the proper mode. */
2930 if (GET_CODE (x) == MEM)
2931 {
2932 if (mode == GET_MODE (x))
2933 DECL_RTL (decl_elt) = x;
2934 else
2935 {
2936 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2937 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2938 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2939 }
2940 }
2941 else if (GET_CODE (x) == REG)
2942 {
2943 if (mode == GET_MODE (x))
2944 DECL_RTL (decl_elt) = x;
2945 else
2946 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2947 }
2948 else
2949 abort ();
2950
2951 /* Record the cleanup if there is one. */
2952
2953 if (cleanup != 0)
2954 thisblock->data.block.cleanups
2955 = temp_tree_cons (decl_elt, cleanup_elt,
2956 thisblock->data.block.cleanups);
2957
2958 decl_elts = TREE_CHAIN (decl_elts);
2959 }
2960}
2961\f
2962/* Expand a list of cleanups LIST.
2963 Elements may be expressions or may be nested lists.
2964
2965 If DONT_DO is nonnull, then any list-element
2966 whose TREE_PURPOSE matches DONT_DO is omitted.
2967 This is sometimes used to avoid a cleanup associated with
2968 a value that is being returned out of the scope. */
2969
2970static void
2971expand_cleanups (list, dont_do)
2972 tree list;
2973 tree dont_do;
2974{
2975 tree tail;
2976 for (tail = list; tail; tail = TREE_CHAIN (tail))
2977 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2978 {
2979 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2980 expand_cleanups (TREE_VALUE (tail), dont_do);
2981 else
2982 {
2983 /* Cleanups may be run multiple times. For example,
2984 when exiting a binding contour, we expand the
2985 cleanups associated with that contour. When a goto
2986 within that binding contour has a target outside that
2987 contour, it will expand all cleanups from its scope to
2988 the target. Though the cleanups are expanded multiple
2989 times, the control paths are non-overlapping so the
2990 cleanups will not be executed twice. */
2991 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2992 free_temp_slots ();
2993 }
2994 }
2995}
2996
2997/* Expand a list of cleanups for a goto fixup.
2998 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2999 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3000
3001static void
3002fixup_cleanups (list, before_jump)
3003 tree list;
3004 rtx *before_jump;
3005{
3006 rtx beyond_jump = get_last_insn ();
3007 rtx new_before_jump;
3008
3009 expand_cleanups (list, 0);
3010 /* Pop any pushes done in the cleanups,
3011 in case function is about to return. */
3012 do_pending_stack_adjust ();
3013
3014 new_before_jump = get_last_insn ();
3015
3016 if (beyond_jump != new_before_jump)
3017 {
3018 /* If cleanups expand to nothing, don't reorder. */
3019 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3020 *before_jump = new_before_jump;
3021 }
3022}
3023
3024/* Move all cleanups from the current block_stack
3025 to the containing block_stack, where they are assumed to
3026 have been created. If anything can cause a temporary to
3027 be created, but not expanded for more than one level of
3028 block_stacks, then this code will have to change. */
3029
3030void
3031move_cleanups_up ()
3032{
3033 struct nesting *block = block_stack;
3034 struct nesting *outer = block->next;
3035
3036 outer->data.block.cleanups
3037 = chainon (block->data.block.cleanups,
3038 outer->data.block.cleanups);
3039 block->data.block.cleanups = 0;
3040}
3041
3042tree
3043last_cleanup_this_contour ()
3044{
3045 if (block_stack == 0)
3046 return 0;
3047
3048 return block_stack->data.block.cleanups;
3049}
3050
3051/* Return 1 if there are any pending cleanups at this point.
3052 If THIS_CONTOUR is nonzero, check the current contour as well.
3053 Otherwise, look only at the contours that enclose this one. */
3054
3055int
3056any_pending_cleanups (this_contour)
3057 int this_contour;
3058{
3059 struct nesting *block;
3060
3061 if (block_stack == 0)
3062 return 0;
3063
3064 if (this_contour && block_stack->data.block.cleanups != NULL)
3065 return 1;
3066 if (block_stack->data.block.cleanups == 0
3067 && (block_stack->data.block.outer_cleanups == 0
3068#if 0
3069 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3070#endif
3071 ))
3072 return 0;
3073
3074 for (block = block_stack->next; block; block = block->next)
3075 if (block->data.block.cleanups != 0)
3076 return 1;
3077
3078 return 0;
3079}
3080\f
3081/* Enter a case (Pascal) or switch (C) statement.
3082 Push a block onto case_stack and nesting_stack
3083 to accumulate the case-labels that are seen
3084 and to record the labels generated for the statement.
3085
3086 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3087 Otherwise, this construct is transparent for `exit_something'.
3088
3089 EXPR is the index-expression to be dispatched on.
3090 TYPE is its nominal type. We could simply convert EXPR to this type,
3091 but instead we take short cuts. */
3092
3093void
3094expand_start_case (exit_flag, expr, type, printname)
3095 int exit_flag;
3096 tree expr;
3097 tree type;
3098 char *printname;
3099{
3100 register struct nesting *thiscase = ALLOC_NESTING ();
3101
3102 /* Make an entry on case_stack for the case we are entering. */
3103
3104 thiscase->next = case_stack;
3105 thiscase->all = nesting_stack;
3106 thiscase->depth = ++nesting_depth;
3107 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3108 thiscase->data.case_stmt.case_list = 0;
3109 thiscase->data.case_stmt.index_expr = expr;
3110 thiscase->data.case_stmt.nominal_type = type;
3111 thiscase->data.case_stmt.default_label = 0;
3112 thiscase->data.case_stmt.num_ranges = 0;
3113 thiscase->data.case_stmt.printname = printname;
3114 thiscase->data.case_stmt.seenlabel = 0;
3115 case_stack = thiscase;
3116 nesting_stack = thiscase;
3117
3118 do_pending_stack_adjust ();
3119
3120 /* Make sure case_stmt.start points to something that won't
3121 need any transformation before expand_end_case. */
3122 if (GET_CODE (get_last_insn ()) != NOTE)
3123 emit_note (0, NOTE_INSN_DELETED);
3124
3125 thiscase->data.case_stmt.start = get_last_insn ();
3126}
3127
3128/* Start a "dummy case statement" within which case labels are invalid
3129 and are not connected to any larger real case statement.
3130 This can be used if you don't want to let a case statement jump
3131 into the middle of certain kinds of constructs. */
3132
3133void
3134expand_start_case_dummy ()
3135{
3136 register struct nesting *thiscase = ALLOC_NESTING ();
3137
3138 /* Make an entry on case_stack for the dummy. */
3139
3140 thiscase->next = case_stack;
3141 thiscase->all = nesting_stack;
3142 thiscase->depth = ++nesting_depth;
3143 thiscase->exit_label = 0;
3144 thiscase->data.case_stmt.case_list = 0;
3145 thiscase->data.case_stmt.start = 0;
3146 thiscase->data.case_stmt.nominal_type = 0;
3147 thiscase->data.case_stmt.default_label = 0;
3148 thiscase->data.case_stmt.num_ranges = 0;
3149 case_stack = thiscase;
3150 nesting_stack = thiscase;
3151}
3152
3153/* End a dummy case statement. */
3154
3155void
3156expand_end_case_dummy ()
3157{
3158 POPSTACK (case_stack);
3159}
3160
3161/* Return the data type of the index-expression
3162 of the innermost case statement, or null if none. */
3163
3164tree
3165case_index_expr_type ()
3166{
3167 if (case_stack)
3168 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3169 return 0;
3170}
3171\f
3172/* Accumulate one case or default label inside a case or switch statement.
3173 VALUE is the value of the case (a null pointer, for a default label).
3174
3175 If not currently inside a case or switch statement, return 1 and do
3176 nothing. The caller will print a language-specific error message.
3177 If VALUE is a duplicate or overlaps, return 2 and do nothing
3178 except store the (first) duplicate node in *DUPLICATE.
3179 If VALUE is out of range, return 3 and do nothing.
3180 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3181 Return 0 on success.
3182
3183 Extended to handle range statements. */
3184
3185int
3186pushcase (value, label, duplicate)
3187 register tree value;
3188 register tree label;
3189 tree *duplicate;
3190{
3191 register struct case_node **l;
3192 register struct case_node *n;
3193 tree index_type;
3194 tree nominal_type;
3195
3196 /* Fail if not inside a real case statement. */
3197 if (! (case_stack && case_stack->data.case_stmt.start))
3198 return 1;
3199
3200 if (stack_block_stack
3201 && stack_block_stack->depth > case_stack->depth)
3202 return 5;
3203
3204 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3205 nominal_type = case_stack->data.case_stmt.nominal_type;
3206
3207 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3208 if (index_type == error_mark_node)
3209 return 0;
3210
3211 /* Convert VALUE to the type in which the comparisons are nominally done. */
3212 if (value != 0)
3213 value = convert (nominal_type, value);
3214
3215 /* If this is the first label, warn if any insns have been emitted. */
3216 if (case_stack->data.case_stmt.seenlabel == 0)
3217 {
3218 rtx insn;
3219 for (insn = case_stack->data.case_stmt.start;
3220 insn;
3221 insn = NEXT_INSN (insn))
3222 {
3223 if (GET_CODE (insn) == CODE_LABEL)
3224 break;
3225 if (GET_CODE (insn) != NOTE
3226 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3227 {
3228 warning ("unreachable code at beginning of %s",
3229 case_stack->data.case_stmt.printname);
3230 break;
3231 }
3232 }
3233 }
3234 case_stack->data.case_stmt.seenlabel = 1;
3235
3236 /* Fail if this value is out of range for the actual type of the index
3237 (which may be narrower than NOMINAL_TYPE). */
3238 if (value != 0 && ! int_fits_type_p (value, index_type))
3239 return 3;
3240
3241 /* Fail if this is a duplicate or overlaps another entry. */
3242 if (value == 0)
3243 {
3244 if (case_stack->data.case_stmt.default_label != 0)
3245 {
3246 *duplicate = case_stack->data.case_stmt.default_label;
3247 return 2;
3248 }
3249 case_stack->data.case_stmt.default_label = label;
3250 }
3251 else
3252 {
3253 /* Find the elt in the chain before which to insert the new value,
3254 to keep the chain sorted in increasing order.
3255 But report an error if this element is a duplicate. */
3256 for (l = &case_stack->data.case_stmt.case_list;
3257 /* Keep going past elements distinctly less than VALUE. */
3258 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3259 l = &(*l)->right)
3260 ;
3261 if (*l)
3262 {
3263 /* Element we will insert before must be distinctly greater;
3264 overlap means error. */
3265 if (! tree_int_cst_lt (value, (*l)->low))
3266 {
3267 *duplicate = (*l)->code_label;
3268 return 2;
3269 }
3270 }
3271
3272 /* Add this label to the chain, and succeed.
3273 Copy VALUE so it is on temporary rather than momentary
3274 obstack and will thus survive till the end of the case statement. */
3275 n = (struct case_node *) oballoc (sizeof (struct case_node));
3276 n->left = 0;
3277 n->right = *l;
3278 n->high = n->low = copy_node (value);
3279 n->code_label = label;
3280 *l = n;
3281 }
3282
3283 expand_label (label);
3284 return 0;
3285}
3286
3287/* Like pushcase but this case applies to all values
3288 between VALUE1 and VALUE2 (inclusive).
3289 The return value is the same as that of pushcase
3290 but there is one additional error code:
3291 4 means the specified range was empty. */
3292
3293int
3294pushcase_range (value1, value2, label, duplicate)
3295 register tree value1, value2;
3296 register tree label;
3297 tree *duplicate;
3298{
3299 register struct case_node **l;
3300 register struct case_node *n;
3301 tree index_type;
3302 tree nominal_type;
3303
3304 /* Fail if not inside a real case statement. */
3305 if (! (case_stack && case_stack->data.case_stmt.start))
3306 return 1;
3307
3308 if (stack_block_stack
3309 && stack_block_stack->depth > case_stack->depth)
3310 return 5;
3311
3312 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3313 nominal_type = case_stack->data.case_stmt.nominal_type;
3314
3315 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3316 if (index_type == error_mark_node)
3317 return 0;
3318
3319 /* If this is the first label, warn if any insns have been emitted. */
3320 if (case_stack->data.case_stmt.seenlabel == 0)
3321 {
3322 rtx insn;
3323 for (insn = case_stack->data.case_stmt.start;
3324 insn;
3325 insn = NEXT_INSN (insn))
3326 {
3327 if (GET_CODE (insn) == CODE_LABEL)
3328 break;
3329 if (GET_CODE (insn) != NOTE
3330 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3331 {
3332 warning ("unreachable code at beginning of %s",
3333 case_stack->data.case_stmt.printname);
3334 break;
3335 }
3336 }
3337 }
3338 case_stack->data.case_stmt.seenlabel = 1;
3339
3340 /* Convert VALUEs to type in which the comparisons are nominally done. */
3341 if (value1 == 0) /* Negative infinity. */
3342 value1 = TYPE_MIN_VALUE(index_type);
3343 value1 = convert (nominal_type, value1);
3344
3345 if (value2 == 0) /* Positive infinity. */
3346 value2 = TYPE_MAX_VALUE(index_type);
3347 value2 = convert (nominal_type, value2);
3348
3349 /* Fail if these values are out of range. */
3350 if (! int_fits_type_p (value1, index_type))
3351 return 3;
3352
3353 if (! int_fits_type_p (value2, index_type))
3354 return 3;
3355
3356 /* Fail if the range is empty. */
3357 if (tree_int_cst_lt (value2, value1))
3358 return 4;
3359
3360 /* If the bounds are equal, turn this into the one-value case. */
3361 if (tree_int_cst_equal (value1, value2))
3362 return pushcase (value1, label, duplicate);
3363
3364 /* Find the elt in the chain before which to insert the new value,
3365 to keep the chain sorted in increasing order.
3366 But report an error if this element is a duplicate. */
3367 for (l = &case_stack->data.case_stmt.case_list;
3368 /* Keep going past elements distinctly less than this range. */
3369 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3370 l = &(*l)->right)
3371 ;
3372 if (*l)
3373 {
3374 /* Element we will insert before must be distinctly greater;
3375 overlap means error. */
3376 if (! tree_int_cst_lt (value2, (*l)->low))
3377 {
3378 *duplicate = (*l)->code_label;
3379 return 2;
3380 }
3381 }
3382
3383 /* Add this label to the chain, and succeed.
3384 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3385 obstack and will thus survive till the end of the case statement. */
3386
3387 n = (struct case_node *) oballoc (sizeof (struct case_node));
3388 n->left = 0;
3389 n->right = *l;
3390 n->low = copy_node (value1);
3391 n->high = copy_node (value2);
3392 n->code_label = label;
3393 *l = n;
3394
3395 expand_label (label);
3396
3397 case_stack->data.case_stmt.num_ranges++;
3398
3399 return 0;
3400}
3401\f
3402/* Called when the index of a switch statement is an enumerated type
3403 and there is no default label.
3404
3405 Checks that all enumeration literals are covered by the case
3406 expressions of a switch. Also, warn if there are any extra
3407 switch cases that are *not* elements of the enumerated type.
3408
3409 If all enumeration literals were covered by the case expressions,
3410 turn one of the expressions into the default expression since it should
3411 not be possible to fall through such a switch. */
3412
3413void
3414check_for_full_enumeration_handling (type)
3415 tree type;
3416{
3417 register struct case_node *n;
3418 register struct case_node **l;
3419 register tree chain;
3420 int all_values = 1;
3421
3422 /* The time complexity of this loop is currently O(N * M), with
3423 N being the number of enumerals in the enumerated type, and
3424 M being the number of case expressions in the switch. */
3425
3426 for (chain = TYPE_VALUES (type);
3427 chain;
3428 chain = TREE_CHAIN (chain))
3429 {
3430 /* Find a match between enumeral and case expression, if possible.
3431 Quit looking when we've gone too far (since case expressions
3432 are kept sorted in ascending order). Warn about enumerals not
3433 handled in the switch statement case expression list. */
3434
3435 for (n = case_stack->data.case_stmt.case_list;
3436 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3437 n = n->right)
3438 ;
3439
3440 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3441 {
3442 if (warn_switch)
3443 warning ("enumerated value `%s' not handled in switch",
3444 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3445 all_values = 0;
3446 }
3447 }
3448
3449 /* Now we go the other way around; we warn if there are case
3450 expressions that don't correspond to enumerals. This can
3451 occur since C and C++ don't enforce type-checking of
3452 assignments to enumeration variables. */
3453
3454 if (warn_switch)
3455 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3456 {
3457 for (chain = TYPE_VALUES (type);
3458 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3459 chain = TREE_CHAIN (chain))
3460 ;
3461
3462 if (!chain)
3463 warning ("case value `%d' not in enumerated type `%s'",
3464 TREE_INT_CST_LOW (n->low),
3465 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3466 == IDENTIFIER_NODE)
3467 ? TYPE_NAME (type)
3468 : DECL_NAME (TYPE_NAME (type))));
3469 }
3470
3471 /* If all values were found as case labels, make one of them the default
3472 label. Thus, this switch will never fall through. We arbitrarily pick
3473 the last one to make the default since this is likely the most
3474 efficient choice. */
3475
3476 if (all_values)
3477 {
3478 for (l = &case_stack->data.case_stmt.case_list;
3479 (*l)->right != 0;
3480 l = &(*l)->right)
3481 ;
3482
3483 case_stack->data.case_stmt.default_label = (*l)->code_label;
3484 *l = 0;
3485 }
3486}
3487\f
3488/* Terminate a case (Pascal) or switch (C) statement
3489 in which CASE_INDEX is the expression to be tested.
3490 Generate the code to test it and jump to the right place. */
3491
3492void
3493expand_end_case (orig_index)
3494 tree orig_index;
3495{
3496 tree minval, maxval, range;
3497 rtx default_label = 0;
3498 register struct case_node *n;
3499 int count;
3500 rtx index;
3501 rtx table_label = gen_label_rtx ();
3502 int ncases;
3503 rtx *labelvec;
3504 register int i;
3505 rtx before_case;
3506 register struct nesting *thiscase = case_stack;
3507 tree index_expr = thiscase->data.case_stmt.index_expr;
3508 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3509
3510 do_pending_stack_adjust ();
3511
3512 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3513 if (TREE_TYPE (index_expr) != error_mark_node)
3514 {
3515 /* If switch expression was an enumerated type, check that all
3516 enumeration literals are covered by the cases.
3517 No sense trying this if there's a default case, however. */
3518
3519 if (!thiscase->data.case_stmt.default_label
3520 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3521 && TREE_CODE (index_expr) != INTEGER_CST)
3522 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3523
3524 /* If this is the first label, warn if any insns have been emitted. */
3525 if (thiscase->data.case_stmt.seenlabel == 0)
3526 {
3527 rtx insn;
3528 for (insn = get_last_insn ();
3529 insn != case_stack->data.case_stmt.start;
3530 insn = PREV_INSN (insn))
3531 if (GET_CODE (insn) != NOTE
3532 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3533 {
3534 warning ("unreachable code at beginning of %s",
3535 case_stack->data.case_stmt.printname);
3536 break;
3537 }
3538 }
3539
3540 /* If we don't have a default-label, create one here,
3541 after the body of the switch. */
3542 if (thiscase->data.case_stmt.default_label == 0)
3543 {
3544 thiscase->data.case_stmt.default_label
3545 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3546 expand_label (thiscase->data.case_stmt.default_label);
3547 }
3548 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3549
3550 before_case = get_last_insn ();
3551
3552 /* Simplify the case-list before we count it. */
3553 group_case_nodes (thiscase->data.case_stmt.case_list);
3554
3555 /* Get upper and lower bounds of case values.
3556 Also convert all the case values to the index expr's data type. */
3557
3558 count = 0;
3559 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3560 {
3561 /* Check low and high label values are integers. */
3562 if (TREE_CODE (n->low) != INTEGER_CST)
3563 abort ();
3564 if (TREE_CODE (n->high) != INTEGER_CST)
3565 abort ();
3566
3567 n->low = convert (TREE_TYPE (index_expr), n->low);
3568 n->high = convert (TREE_TYPE (index_expr), n->high);
3569
3570 /* Count the elements and track the largest and smallest
3571 of them (treating them as signed even if they are not). */
3572 if (count++ == 0)
3573 {
3574 minval = n->low;
3575 maxval = n->high;
3576 }
3577 else
3578 {
3579 if (INT_CST_LT (n->low, minval))
3580 minval = n->low;
3581 if (INT_CST_LT (maxval, n->high))
3582 maxval = n->high;
3583 }
3584 /* A range counts double, since it requires two compares. */
3585 if (! tree_int_cst_equal (n->low, n->high))
3586 count++;
3587 }
3588
3589 /* Compute span of values. */
3590 if (count != 0)
3591 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3592 maxval, minval));
3593
3594 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3595 {
3596 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3597 emit_queue ();
3598 emit_jump (default_label);
3599 }
3600 /* If range of values is much bigger than number of values,
3601 make a sequence of conditional branches instead of a dispatch.
3602 If the switch-index is a constant, do it this way
3603 because we can optimize it. */
3604 else if (TREE_INT_CST_HIGH (range) != 0
3605#ifdef HAVE_casesi
3606 || (HAVE_casesi ? count < 4 : count < 5)
3607#else
3608 /* If machine does not have a case insn that compares the
3609 bounds, this means extra overhead for dispatch tables
3610 which raises the threshold for using them. */
3611 || count < 5
3612#endif
3613 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3614 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3615 /* These will reduce to a constant. */
28d81abb 3616 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3617 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3618 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3619 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3620 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3621 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb
RK
3622 {
3623 index = expand_expr (index_expr, 0, VOIDmode, 0);
3624
3625 /* If the index is a short or char that we do not have
3626 an insn to handle comparisons directly, convert it to
3627 a full integer now, rather than letting each comparison
3628 generate the conversion. */
3629
3630 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3631 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3632 == CODE_FOR_nothing))
3633 {
3634 enum machine_mode wider_mode;
3635 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3636 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3637 if (cmp_optab->handlers[(int) wider_mode].insn_code
3638 != CODE_FOR_nothing)
3639 {
3640 index = convert_to_mode (wider_mode, index, unsignedp);
3641 break;
3642 }
3643 }
3644
3645 emit_queue ();
3646 do_pending_stack_adjust ();
3647
3648 index = protect_from_queue (index, 0);
3649 if (GET_CODE (index) == MEM)
3650 index = copy_to_reg (index);
3651 if (GET_CODE (index) == CONST_INT
3652 || TREE_CODE (index_expr) == INTEGER_CST)
3653 {
3654 /* Make a tree node with the proper constant value
3655 if we don't already have one. */
3656 if (TREE_CODE (index_expr) != INTEGER_CST)
3657 {
3658 index_expr
3659 = build_int_2 (INTVAL (index),
3660 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3661 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3662 }
3663
3664 /* For constant index expressions we need only
3665 issue a unconditional branch to the appropriate
3666 target code. The job of removing any unreachable
3667 code is left to the optimisation phase if the
3668 "-O" option is specified. */
3669 for (n = thiscase->data.case_stmt.case_list;
3670 n;
3671 n = n->right)
3672 {
3673 if (! tree_int_cst_lt (index_expr, n->low)
3674 && ! tree_int_cst_lt (n->high, index_expr))
3675 break;
3676 }
3677 if (n)
3678 emit_jump (label_rtx (n->code_label));
3679 else
3680 emit_jump (default_label);
3681 }
3682 else
3683 {
3684 /* If the index expression is not constant we generate
3685 a binary decision tree to select the appropriate
3686 target code. This is done as follows:
3687
3688 The list of cases is rearranged into a binary tree,
3689 nearly optimal assuming equal probability for each case.
3690
3691 The tree is transformed into RTL, eliminating
3692 redundant test conditions at the same time.
3693
3694 If program flow could reach the end of the
3695 decision tree an unconditional jump to the
3696 default code is emitted. */
3697
3698 use_cost_table
3699 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb
RK
3700 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3701 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3702 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3703 default_label, TREE_TYPE (index_expr));
3704 emit_jump_if_reachable (default_label);
3705 }
3706 }
3707 else
3708 {
3709 int win = 0;
3710#ifdef HAVE_casesi
3711 if (HAVE_casesi)
3712 {
c4fcf531 3713 enum machine_mode index_mode = SImode;
5130a5cc 3714 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3715
28d81abb 3716 /* Convert the index to SImode. */
c4fcf531
RS
3717 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3718 > GET_MODE_BITSIZE (index_mode))
28d81abb 3719 {
af2682ef
RS
3720 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3721 rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
3722
3723 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3724 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3725 index_expr, minval);
3726 minval = integer_zero_node;
af2682ef
RS
3727 index = expand_expr (index_expr, 0, VOIDmode, 0);
3728 emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
3729 emit_jump_insn (gen_bltu (default_label));
3730 /* Now we can safely truncate. */
3731 index = convert_to_mode (index_mode, index, 0);
3732 }
3733 else
3734 {
3735 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3736 index_expr = convert (type_for_size (index_bits, 0),
3737 index_expr);
3738 index = expand_expr (index_expr, 0, VOIDmode, 0);
28d81abb 3739 }
28d81abb
RK
3740 emit_queue ();
3741 index = protect_from_queue (index, 0);
3742 do_pending_stack_adjust ();
3743
3744 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3745 expand_expr (range, 0, VOIDmode, 0),
3746 table_label, default_label));
3747 win = 1;
3748 }
3749#endif
3750#ifdef HAVE_tablejump
3751 if (! win && HAVE_tablejump)
3752 {
3753 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3754 fold (build (MINUS_EXPR,
3755 TREE_TYPE (index_expr),
3756 index_expr, minval)));
28d81abb
RK
3757 index = expand_expr (index_expr, 0, VOIDmode, 0);
3758 emit_queue ();
af2682ef 3759 index = protect_from_queue (index, 0);
28d81abb
RK
3760 do_pending_stack_adjust ();
3761
af2682ef
RS
3762 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3763 expand_expr (range, 0, VOIDmode, 0),
28d81abb
RK
3764 table_label, default_label);
3765 win = 1;
3766 }
3767#endif
3768 if (! win)
3769 abort ();
3770
3771 /* Get table of labels to jump to, in order of case index. */
3772
3773 ncases = TREE_INT_CST_LOW (range) + 1;
3774 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3775 bzero (labelvec, ncases * sizeof (rtx));
3776
3777 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3778 {
3779 register int i
3780 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3781
3782 while (1)
3783 {
3784 labelvec[i]
3785 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3786 if (i + TREE_INT_CST_LOW (minval)
3787 == TREE_INT_CST_LOW (n->high))
3788 break;
3789 i++;
3790 }
3791 }
3792
3793 /* Fill in the gaps with the default. */
3794 for (i = 0; i < ncases; i++)
3795 if (labelvec[i] == 0)
3796 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3797
3798 /* Output the table */
3799 emit_label (table_label);
3800
3801 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3802 were an expression, instead of a an #ifdef/#ifndef. */
3803 if (
3804#ifdef CASE_VECTOR_PC_RELATIVE
3805 1 ||
3806#endif
3807 flag_pic)
3808 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3809 gen_rtx (LABEL_REF, Pmode, table_label),
3810 gen_rtvec_v (ncases, labelvec)));
3811 else
3812 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3813 gen_rtvec_v (ncases, labelvec)));
3814
3815 /* If the case insn drops through the table,
3816 after the table we must jump to the default-label.
3817 Otherwise record no drop-through after the table. */
3818#ifdef CASE_DROPS_THROUGH
3819 emit_jump (default_label);
3820#else
3821 emit_barrier ();
3822#endif
3823 }
3824
915f619f
JW
3825 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3826 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3827 thiscase->data.case_stmt.start);
3828 }
3829 if (thiscase->exit_label)
3830 emit_label (thiscase->exit_label);
3831
3832 POPSTACK (case_stack);
3833
3834 free_temp_slots ();
3835}
3836
3837/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3838
3839static void
3840do_jump_if_equal (op1, op2, label, unsignedp)
3841 rtx op1, op2, label;
3842 int unsignedp;
3843{
3844 if (GET_CODE (op1) == CONST_INT
3845 && GET_CODE (op2) == CONST_INT)
3846 {
3847 if (INTVAL (op1) == INTVAL (op2))
3848 emit_jump (label);
3849 }
3850 else
3851 {
3852 enum machine_mode mode = GET_MODE (op1);
3853 if (mode == VOIDmode)
3854 mode = GET_MODE (op2);
3855 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3856 emit_jump_insn (gen_beq (label));
3857 }
3858}
3859\f
3860/* Not all case values are encountered equally. This function
3861 uses a heuristic to weight case labels, in cases where that
3862 looks like a reasonable thing to do.
3863
3864 Right now, all we try to guess is text, and we establish the
3865 following weights:
3866
3867 chars above space: 16
3868 digits: 16
3869 default: 12
3870 space, punct: 8
3871 tab: 4
3872 newline: 2
3873 other "\" chars: 1
3874 remaining chars: 0
3875
3876 If we find any cases in the switch that are not either -1 or in the range
3877 of valid ASCII characters, or are control characters other than those
3878 commonly used with "\", don't treat this switch scanning text.
3879
3880 Return 1 if these nodes are suitable for cost estimation, otherwise
3881 return 0. */
3882
3883static int
3884estimate_case_costs (node)
3885 case_node_ptr node;
3886{
3887 tree min_ascii = build_int_2 (-1, -1);
3888 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3889 case_node_ptr n;
3890 int i;
3891
3892 /* If we haven't already made the cost table, make it now. Note that the
3893 lower bound of the table is -1, not zero. */
3894
3895 if (cost_table == NULL)
3896 {
3897 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3898 bzero (cost_table - 1, 129 * sizeof (short));
3899
3900 for (i = 0; i < 128; i++)
3901 {
3902 if (isalnum (i))
3903 cost_table[i] = 16;
3904 else if (ispunct (i))
3905 cost_table[i] = 8;
3906 else if (iscntrl (i))
3907 cost_table[i] = -1;
3908 }
3909
3910 cost_table[' '] = 8;
3911 cost_table['\t'] = 4;
3912 cost_table['\0'] = 4;
3913 cost_table['\n'] = 2;
3914 cost_table['\f'] = 1;
3915 cost_table['\v'] = 1;
3916 cost_table['\b'] = 1;
3917 }
3918
3919 /* See if all the case expressions look like text. It is text if the
3920 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3921 as signed arithmetic since we don't want to ever access cost_table with a
3922 value less than -1. Also check that none of the constants in a range
3923 are strange control characters. */
3924
3925 for (n = node; n; n = n->right)
3926 {
3927 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3928 return 0;
3929
3930 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3931 if (cost_table[i] < 0)
3932 return 0;
3933 }
3934
3935 /* All interesting values are within the range of interesting
3936 ASCII characters. */
3937 return 1;
3938}
3939
3940/* Scan an ordered list of case nodes
3941 combining those with consecutive values or ranges.
3942
3943 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3944
3945static void
3946group_case_nodes (head)
3947 case_node_ptr head;
3948{
3949 case_node_ptr node = head;
3950
3951 while (node)
3952 {
3953 rtx lb = next_real_insn (label_rtx (node->code_label));
3954 case_node_ptr np = node;
3955
3956 /* Try to group the successors of NODE with NODE. */
3957 while (((np = np->right) != 0)
3958 /* Do they jump to the same place? */
3959 && next_real_insn (label_rtx (np->code_label)) == lb
3960 /* Are their ranges consecutive? */
3961 && tree_int_cst_equal (np->low,
3962 fold (build (PLUS_EXPR,
3963 TREE_TYPE (node->high),
3964 node->high,
3965 integer_one_node)))
3966 /* An overflow is not consecutive. */
3967 && tree_int_cst_lt (node->high,
3968 fold (build (PLUS_EXPR,
3969 TREE_TYPE (node->high),
3970 node->high,
3971 integer_one_node))))
3972 {
3973 node->high = np->high;
3974 }
3975 /* NP is the first node after NODE which can't be grouped with it.
3976 Delete the nodes in between, and move on to that node. */
3977 node->right = np;
3978 node = np;
3979 }
3980}
3981
3982/* Take an ordered list of case nodes
3983 and transform them into a near optimal binary tree,
6dc42e49 3984 on the assumption that any target code selection value is as
28d81abb
RK
3985 likely as any other.
3986
3987 The transformation is performed by splitting the ordered
3988 list into two equal sections plus a pivot. The parts are
3989 then attached to the pivot as left and right branches. Each
3990 branch is is then transformed recursively. */
3991
3992static void
3993balance_case_nodes (head, parent)
3994 case_node_ptr *head;
3995 case_node_ptr parent;
3996{
3997 register case_node_ptr np;
3998
3999 np = *head;
4000 if (np)
4001 {
4002 int cost = 0;
4003 int i = 0;
4004 int ranges = 0;
4005 register case_node_ptr *npp;
4006 case_node_ptr left;
4007
4008 /* Count the number of entries on branch. Also count the ranges. */
4009
4010 while (np)
4011 {
4012 if (!tree_int_cst_equal (np->low, np->high))
4013 {
4014 ranges++;
4015 if (use_cost_table)
4016 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4017 }
4018
4019 if (use_cost_table)
4020 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4021
4022 i++;
4023 np = np->right;
4024 }
4025
4026 if (i > 2)
4027 {
4028 /* Split this list if it is long enough for that to help. */
4029 npp = head;
4030 left = *npp;
4031 if (use_cost_table)
4032 {
4033 /* Find the place in the list that bisects the list's total cost,
4034 Here I gets half the total cost. */
4035 int n_moved = 0;
4036 i = (cost + 1) / 2;
4037 while (1)
4038 {
4039 /* Skip nodes while their cost does not reach that amount. */
4040 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4041 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4042 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4043 if (i <= 0)
4044 break;
4045 npp = &(*npp)->right;
4046 n_moved += 1;
4047 }
4048 if (n_moved == 0)
4049 {
4050 /* Leave this branch lopsided, but optimize left-hand
4051 side and fill in `parent' fields for right-hand side. */
4052 np = *head;
4053 np->parent = parent;
4054 balance_case_nodes (&np->left, np);
4055 for (; np->right; np = np->right)
4056 np->right->parent = np;
4057 return;
4058 }
4059 }
4060 /* If there are just three nodes, split at the middle one. */
4061 else if (i == 3)
4062 npp = &(*npp)->right;
4063 else
4064 {
4065 /* Find the place in the list that bisects the list's total cost,
4066 where ranges count as 2.
4067 Here I gets half the total cost. */
4068 i = (i + ranges + 1) / 2;
4069 while (1)
4070 {
4071 /* Skip nodes while their cost does not reach that amount. */
4072 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4073 i--;
4074 i--;
4075 if (i <= 0)
4076 break;
4077 npp = &(*npp)->right;
4078 }
4079 }
4080 *head = np = *npp;
4081 *npp = 0;
4082 np->parent = parent;
4083 np->left = left;
4084
4085 /* Optimize each of the two split parts. */
4086 balance_case_nodes (&np->left, np);
4087 balance_case_nodes (&np->right, np);
4088 }
4089 else
4090 {
4091 /* Else leave this branch as one level,
4092 but fill in `parent' fields. */
4093 np = *head;
4094 np->parent = parent;
4095 for (; np->right; np = np->right)
4096 np->right->parent = np;
4097 }
4098 }
4099}
4100\f
4101/* Search the parent sections of the case node tree
4102 to see if a test for the lower bound of NODE would be redundant.
4103 INDEX_TYPE is the type of the index expression.
4104
4105 The instructions to generate the case decision tree are
4106 output in the same order as nodes are processed so it is
4107 known that if a parent node checks the range of the current
4108 node minus one that the current node is bounded at its lower
4109 span. Thus the test would be redundant. */
4110
4111static int
4112node_has_low_bound (node, index_type)
4113 case_node_ptr node;
4114 tree index_type;
4115{
4116 tree low_minus_one;
4117 case_node_ptr pnode;
4118
4119 /* If the lower bound of this node is the lowest value in the index type,
4120 we need not test it. */
4121
4122 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4123 return 1;
4124
4125 /* If this node has a left branch, the value at the left must be less
4126 than that at this node, so it cannot be bounded at the bottom and
4127 we need not bother testing any further. */
4128
4129 if (node->left)
4130 return 0;
4131
4132 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4133 node->low, integer_one_node));
4134
4135 /* If the subtraction above overflowed, we can't verify anything.
4136 Otherwise, look for a parent that tests our value - 1. */
4137
4138 if (! tree_int_cst_lt (low_minus_one, node->low))
4139 return 0;
4140
4141 for (pnode = node->parent; pnode; pnode = pnode->parent)
4142 if (tree_int_cst_equal (low_minus_one, pnode->high))
4143 return 1;
4144
4145 return 0;
4146}
4147
4148/* Search the parent sections of the case node tree
4149 to see if a test for the upper bound of NODE would be redundant.
4150 INDEX_TYPE is the type of the index expression.
4151
4152 The instructions to generate the case decision tree are
4153 output in the same order as nodes are processed so it is
4154 known that if a parent node checks the range of the current
4155 node plus one that the current node is bounded at its upper
4156 span. Thus the test would be redundant. */
4157
4158static int
4159node_has_high_bound (node, index_type)
4160 case_node_ptr node;
4161 tree index_type;
4162{
4163 tree high_plus_one;
4164 case_node_ptr pnode;
4165
4166 /* If the upper bound of this node is the highest value in the type
4167 of the index expression, we need not test against it. */
4168
4169 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4170 return 1;
4171
4172 /* If this node has a right branch, the value at the right must be greater
4173 than that at this node, so it cannot be bounded at the top and
4174 we need not bother testing any further. */
4175
4176 if (node->right)
4177 return 0;
4178
4179 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4180 node->high, integer_one_node));
4181
4182 /* If the addition above overflowed, we can't verify anything.
4183 Otherwise, look for a parent that tests our value + 1. */
4184
4185 if (! tree_int_cst_lt (node->high, high_plus_one))
4186 return 0;
4187
4188 for (pnode = node->parent; pnode; pnode = pnode->parent)
4189 if (tree_int_cst_equal (high_plus_one, pnode->low))
4190 return 1;
4191
4192 return 0;
4193}
4194
4195/* Search the parent sections of the
4196 case node tree to see if both tests for the upper and lower
4197 bounds of NODE would be redundant. */
4198
4199static int
4200node_is_bounded (node, index_type)
4201 case_node_ptr node;
4202 tree index_type;
4203{
4204 return (node_has_low_bound (node, index_type)
4205 && node_has_high_bound (node, index_type));
4206}
4207
4208/* Emit an unconditional jump to LABEL unless it would be dead code. */
4209
4210static void
4211emit_jump_if_reachable (label)
4212 rtx label;
4213{
4214 if (GET_CODE (get_last_insn ()) != BARRIER)
4215 emit_jump (label);
4216}
4217\f
4218/* Emit step-by-step code to select a case for the value of INDEX.
4219 The thus generated decision tree follows the form of the
4220 case-node binary tree NODE, whose nodes represent test conditions.
4221 INDEX_TYPE is the type of the index of the switch.
4222
4223 Care is taken to prune redundant tests from the decision tree
4224 by detecting any boundary conditions already checked by
4225 emitted rtx. (See node_has_high_bound, node_has_low_bound
4226 and node_is_bounded, above.)
4227
4228 Where the test conditions can be shown to be redundant we emit
4229 an unconditional jump to the target code. As a further
4230 optimization, the subordinates of a tree node are examined to
4231 check for bounded nodes. In this case conditional and/or
4232 unconditional jumps as a result of the boundary check for the
4233 current node are arranged to target the subordinates associated
4234 code for out of bound conditions on the current node node.
4235
4236 We can asume that when control reaches the code generated here,
4237 the index value has already been compared with the parents
4238 of this node, and determined to be on the same side of each parent
4239 as this node is. Thus, if this node tests for the value 51,
4240 and a parent tested for 52, we don't need to consider
4241 the possibility of a value greater than 51. If another parent
4242 tests for the value 50, then this node need not test anything. */
4243
4244static void
4245emit_case_nodes (index, node, default_label, index_type)
4246 rtx index;
4247 case_node_ptr node;
4248 rtx default_label;
4249 tree index_type;
4250{
4251 /* If INDEX has an unsigned type, we must make unsigned branches. */
4252 int unsignedp = TREE_UNSIGNED (index_type);
4253 typedef rtx rtx_function ();
4254 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4255 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4256 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4257 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4258 enum machine_mode mode = GET_MODE (index);
4259
4260 /* See if our parents have already tested everything for us.
4261 If they have, emit an unconditional jump for this node. */
4262 if (node_is_bounded (node, index_type))
4263 emit_jump (label_rtx (node->code_label));
4264
4265 else if (tree_int_cst_equal (node->low, node->high))
4266 {
4267 /* Node is single valued. First see if the index expression matches
4268 this node and then check our children, if any. */
4269
4270 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4271 label_rtx (node->code_label), unsignedp);
4272
4273 if (node->right != 0 && node->left != 0)
4274 {
4275 /* This node has children on both sides.
4276 Dispatch to one side or the other
4277 by comparing the index value with this node's value.
4278 If one subtree is bounded, check that one first,
4279 so we can avoid real branches in the tree. */
4280
4281 if (node_is_bounded (node->right, index_type))
4282 {
4283 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4284 GT, 0, mode, unsignedp, 0);
4285
4286 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4287 emit_case_nodes (index, node->left, default_label, index_type);
4288 }
4289
4290 else if (node_is_bounded (node->left, index_type))
4291 {
4292 emit_cmp_insn (index, expand_expr (node->high, 0,
4293 VOIDmode, 0),
4294 LT, 0, mode, unsignedp, 0);
4295 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4296 emit_case_nodes (index, node->right, default_label, index_type);
4297 }
4298
4299 else
4300 {
4301 /* Neither node is bounded. First distinguish the two sides;
4302 then emit the code for one side at a time. */
4303
4304 tree test_label
4305 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4306
4307 /* See if the value is on the right. */
4308 emit_cmp_insn (index, expand_expr (node->high, 0,
4309 VOIDmode, 0),
4310 GT, 0, mode, unsignedp, 0);
4311 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4312
4313 /* Value must be on the left.
4314 Handle the left-hand subtree. */
4315 emit_case_nodes (index, node->left, default_label, index_type);
4316 /* If left-hand subtree does nothing,
4317 go to default. */
4318 emit_jump_if_reachable (default_label);
4319
4320 /* Code branches here for the right-hand subtree. */
4321 expand_label (test_label);
4322 emit_case_nodes (index, node->right, default_label, index_type);
4323 }
4324 }
4325
4326 else if (node->right != 0 && node->left == 0)
4327 {
4328 /* Here we have a right child but no left so we issue conditional
4329 branch to default and process the right child.
4330
4331 Omit the conditional branch to default if we it avoid only one
4332 right child; it costs too much space to save so little time. */
4333
de14fd73 4334 if (node->right->right || node->right->left
28d81abb
RK
4335 || !tree_int_cst_equal (node->right->low, node->right->high))
4336 {
4337 if (!node_has_low_bound (node, index_type))
4338 {
4339 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4340 LT, 0, mode, unsignedp, 0);
4341 emit_jump_insn ((*gen_blt_pat) (default_label));
4342 }
4343
4344 emit_case_nodes (index, node->right, default_label, index_type);
4345 }
4346 else
4347 /* We cannot process node->right normally
4348 since we haven't ruled out the numbers less than
4349 this node's value. So handle node->right explicitly. */
4350 do_jump_if_equal (index,
4351 expand_expr (node->right->low, 0, VOIDmode, 0),
4352 label_rtx (node->right->code_label), unsignedp);
4353 }
4354
4355 else if (node->right == 0 && node->left != 0)
4356 {
4357 /* Just one subtree, on the left. */
4358
de14fd73
RK
4359#if 0 /* The following code and comment were formerly part
4360 of the condition here, but they didn't work
4361 and I don't understand what the idea was. -- rms. */
4362 /* If our "most probable entry" is less probable
28d81abb
RK
4363 than the default label, emit a jump to
4364 the default label using condition codes
4365 already lying around. With no right branch,
4366 a branch-greater-than will get us to the default
4367 label correctly. */
de14fd73
RK
4368 if (use_cost_table
4369 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4370 ;
4371#endif /* 0 */
4372 if (node->left->left || node->left->right
28d81abb
RK
4373 || !tree_int_cst_equal (node->left->low, node->left->high))
4374 {
4375 if (!node_has_high_bound (node, index_type))
4376 {
4377 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4378 GT, 0, mode, unsignedp, 0);
4379 emit_jump_insn ((*gen_bgt_pat) (default_label));
4380 }
4381
4382 emit_case_nodes (index, node->left, default_label, index_type);
4383 }
4384 else
4385 /* We cannot process node->left normally
4386 since we haven't ruled out the numbers less than
4387 this node's value. So handle node->left explicitly. */
4388 do_jump_if_equal (index,
4389 expand_expr (node->left->low, 0, VOIDmode, 0),
4390 label_rtx (node->left->code_label), unsignedp);
4391 }
4392 }
4393 else
4394 {
4395 /* Node is a range. These cases are very similar to those for a single
4396 value, except that we do not start by testing whether this node
4397 is the one to branch to. */
4398
4399 if (node->right != 0 && node->left != 0)
4400 {
4401 /* Node has subtrees on both sides.
4402 If the right-hand subtree is bounded,
4403 test for it first, since we can go straight there.
4404 Otherwise, we need to make a branch in the control structure,
4405 then handle the two subtrees. */
4406 tree test_label = 0;
4407
4408 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4409 GT, 0, mode, unsignedp, 0);
4410
4411 if (node_is_bounded (node->right, index_type))
4412 /* Right hand node is fully bounded so we can eliminate any
4413 testing and branch directly to the target code. */
4414 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4415 else
4416 {
4417 /* Right hand node requires testing.
4418 Branch to a label where we will handle it later. */
4419
4420 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4421 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4422 }
4423
4424 /* Value belongs to this node or to the left-hand subtree. */
4425
4426 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4427 GE, 0, mode, unsignedp, 0);
4428 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4429
4430 /* Handle the left-hand subtree. */
4431 emit_case_nodes (index, node->left, default_label, index_type);
4432
4433 /* If right node had to be handled later, do that now. */
4434
4435 if (test_label)
4436 {
4437 /* If the left-hand subtree fell through,
4438 don't let it fall into the right-hand subtree. */
4439 emit_jump_if_reachable (default_label);
4440
4441 expand_label (test_label);
4442 emit_case_nodes (index, node->right, default_label, index_type);
4443 }
4444 }
4445
4446 else if (node->right != 0 && node->left == 0)
4447 {
4448 /* Deal with values to the left of this node,
4449 if they are possible. */
4450 if (!node_has_low_bound (node, index_type))
4451 {
4452 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4453 LT, 0, mode, unsignedp, 0);
4454 emit_jump_insn ((*gen_blt_pat) (default_label));
4455 }
4456
4457 /* Value belongs to this node or to the right-hand subtree. */
4458
4459 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4460 LE, 0, mode, unsignedp, 0);
4461 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4462
4463 emit_case_nodes (index, node->right, default_label, index_type);
4464 }
4465
4466 else if (node->right == 0 && node->left != 0)
4467 {
4468 /* Deal with values to the right of this node,
4469 if they are possible. */
4470 if (!node_has_high_bound (node, index_type))
4471 {
4472 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4473 GT, 0, mode, unsignedp, 0);
4474 emit_jump_insn ((*gen_bgt_pat) (default_label));
4475 }
4476
4477 /* Value belongs to this node or to the left-hand subtree. */
4478
4479 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4480 GE, 0, mode, unsignedp, 0);
4481 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4482
4483 emit_case_nodes (index, node->left, default_label, index_type);
4484 }
4485
4486 else
4487 {
4488 /* Node has no children so we check low and high bounds to remove
4489 redundant tests. Only one of the bounds can exist,
4490 since otherwise this node is bounded--a case tested already. */
4491
4492 if (!node_has_high_bound (node, index_type))
4493 {
4494 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4495 GT, 0, mode, unsignedp, 0);
4496 emit_jump_insn ((*gen_bgt_pat) (default_label));
4497 }
4498
4499 if (!node_has_low_bound (node, index_type))
4500 {
4501 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4502 LT, 0, mode, unsignedp, 0);
4503 emit_jump_insn ((*gen_blt_pat) (default_label));
4504 }
4505
4506 emit_jump (label_rtx (node->code_label));
4507 }
4508 }
4509}
4510\f
4511/* These routines are used by the loop unrolling code. They copy BLOCK trees
4512 so that the debugging info will be correct for the unrolled loop. */
4513
4514/* Indexed by loop number, contains pointer to the first block in the loop,
4515 or zero if none. Only valid if doing loop unrolling and outputting debugger
4516 info. */
4517
4518tree *loop_number_first_block;
4519
4520/* Indexed by loop number, contains pointer to the last block in the loop,
4521 only valid if loop_number_first_block is nonzero. */
4522
4523tree *loop_number_last_block;
4524
4525/* Indexed by loop number, contains nesting level of first block in the
4526 loop, if any. Only valid if doing loop unrolling and outputting debugger
4527 info. */
4528
4529int *loop_number_block_level;
4530
4531/* Scan the function looking for loops, and walk the BLOCK tree at the
4532 same time. Record the first and last BLOCK tree corresponding to each
4533 loop. This function is similar to find_and_verify_loops in loop.c. */
4534
4535void
4536find_loop_tree_blocks (f)
4537 rtx f;
4538{
4539 rtx insn;
4540 int current_loop = -1;
4541 int next_loop = -1;
4542 int loop;
4543 int block_level, tree_level;
4544 tree tree_block, parent_tree_block;
4545
4546 tree_block = DECL_INITIAL (current_function_decl);
4547 parent_tree_block = 0;
4548 block_level = 0;
4549 tree_level = -1;
4550
4551 /* Find boundaries of loops, and save the first and last BLOCK tree
4552 corresponding to each loop. */
4553
4554 for (insn = f; insn; insn = NEXT_INSN (insn))
4555 {
4556 if (GET_CODE (insn) == NOTE)
4557 switch (NOTE_LINE_NUMBER (insn))
4558 {
4559 case NOTE_INSN_LOOP_BEG:
4560 loop_number_block_level[++next_loop] = block_level;
4561 loop_number_first_block[next_loop] = 0;
4562 current_loop = next_loop;
4563 break;
4564
4565 case NOTE_INSN_LOOP_END:
4566 if (current_loop == -1)
4567 abort ();
4568
4569 current_loop = loop_outer_loop[current_loop];
4570 break;
4571
4572 case NOTE_INSN_BLOCK_BEG:
4573 if (tree_level < block_level)
4574 {
4575 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4576 we must now visit the subtree of the current block. */
4577 parent_tree_block = tree_block;
4578 tree_block = BLOCK_SUBBLOCKS (tree_block);
4579 tree_level++;
4580 }
4581 else if (tree_level > block_level)
4582 abort ();
4583
4584 /* Save this block tree here for all nested loops for which
4585 this is the topmost block. */
4586 for (loop = current_loop;
4587 loop != -1 && block_level == loop_number_block_level[loop];
4588 loop = loop_outer_loop[loop])
4589 {
4590 if (loop_number_first_block[loop] == 0)
4591 loop_number_first_block[loop] = tree_block;
4592 loop_number_last_block[loop] = tree_block;
4593 }
4594
4595 block_level++;
4596 break;
4597
4598 case NOTE_INSN_BLOCK_END:
4599 block_level--;
4600 if (tree_level > block_level)
4601 {
4602 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4603 we must now visit the parent of the current tree. */
4604 if (tree_block != 0 || parent_tree_block == 0)
4605 abort ();
4606 tree_block = parent_tree_block;
4607 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4608 tree_level--;
4609 }
4610 tree_block = BLOCK_CHAIN (tree_block);
4611 break;
4612 }
4613 }
4614}
4615
4616/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4617 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4618
4619 Note that we only copy the topmost level of tree nodes; they will share
4620 pointers to the same subblocks. */
4621
4622void
4623unroll_block_trees (loop_number, copies)
4624 int loop_number;
4625 int copies;
4626{
4627 int i;
4628
4629 /* First check whether there are any blocks that need to be copied. */
4630 if (loop_number_first_block[loop_number])
4631 {
4632 tree first_block = loop_number_first_block[loop_number];
4633 tree last_block = loop_number_last_block[loop_number];
4634 tree last_block_created = 0;
4635
4636 for (i = 0; i < copies - 1; i++)
4637 {
4638 tree block = first_block;
4639 tree insert_after = last_block;
4640 tree copied_block;
4641
4642 /* Copy every block between first_block and last_block inclusive,
4643 inserting the new blocks after last_block. */
4644 do
4645 {
4646 tree new_block = make_node (BLOCK);
4647 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4648 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4649 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4650 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4651 TREE_USED (new_block) = TREE_USED (block);
4652
4653 /* Insert the new block after the insertion point, and move
4654 the insertion point to the new block. This ensures that
4655 the copies are inserted in the right order. */
4656 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4657 BLOCK_CHAIN (insert_after) = new_block;
4658 insert_after = new_block;
4659
4660 copied_block = block;
4661 block = BLOCK_CHAIN (block);
4662 }
4663 while (copied_block != last_block);
4664
4665 /* Remember the last block created, so that we can update the
4666 info in the tables. */
4667 if (last_block_created == 0)
4668 last_block_created = insert_after;
4669 }
4670
4671 /* For all nested loops for which LAST_BLOCK was originally the last
4672 block, update the tables to indicate that LAST_BLOCK_CREATED is
4673 now the last block in the loop. */
4674 for (i = loop_number; last_block == loop_number_last_block[i];
4675 i = loop_outer_loop[i])
4676 loop_number_last_block[i] = last_block_created;
4677 }
4678}
This page took 0.548878 seconds and 5 git commands to generate.