]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(CASE_VALUES_THRESHOLD): Define.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
57extern int xmalloc ();
58extern void free ();
59
60/* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62char *emit_filename;
63int emit_lineno;
64
65/* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68int expr_stmts_for_value;
69
70/* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73static tree last_expr_type;
74static rtx last_expr_value;
75
76/* Number of binding contours started so far in this function. */
77
78int block_start_count;
79
80/* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83extern int current_function_returns_pcc_struct;
84
85/* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89extern rtx cleanup_label;
90
91/* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95extern rtx return_label;
96
97/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99extern rtx save_expr_regs;
100
101/* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104extern int frame_offset;
105
106/* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108extern rtx tail_recursion_label;
109
110/* Place after which to insert the tail_recursion_label if we need one. */
111extern rtx tail_recursion_reentry;
112
113/* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118extern rtx arg_pointer_save_area;
119
120/* Chain of all RTL_EXPRs that have insns in them. */
121extern tree rtl_expr_chain;
122
123#if 0 /* Turned off because 0 seems to work just as well. */
124/* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127static tree empty_cleanup_list;
128#endif
129\f
130/* Functions and data structures for expanding case statements. */
131
132/* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
28d81abb
RK
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153struct case_node
154{
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161};
162
163typedef struct case_node case_node;
164typedef struct case_node *case_node_ptr;
165
166/* These are used by estimate_case_costs and balance_case_nodes. */
167
168/* This must be a signed type, and non-ANSI compilers lack signed char. */
169static short *cost_table;
170static int use_cost_table;
171
172static int estimate_case_costs ();
173static void balance_case_nodes ();
174static void emit_case_nodes ();
175static void group_case_nodes ();
176static void emit_jump_if_reachable ();
177
178static int warn_if_unused_value ();
179static void expand_goto_internal ();
180static int expand_fixup ();
181void fixup_gotos ();
182void free_temp_slots ();
183static void expand_cleanups ();
184static void fixup_cleanups ();
185static void expand_null_return_1 ();
186static int tail_recursion_args ();
187static void do_jump_if_equal ();
188\f
189/* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214struct nesting
215{
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327};
328
329/* Chain of all pending binding contours. */
330struct nesting *block_stack;
331
332/* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334struct nesting *stack_block_stack;
335
336/* Chain of all pending conditional statements. */
337struct nesting *cond_stack;
338
339/* Chain of all pending loops. */
340struct nesting *loop_stack;
341
342/* Chain of all pending case or switch statements. */
343struct nesting *case_stack;
344
345/* Chain of all pending exception contours. */
346struct nesting *except_stack;
347
348/* Separate chain including all of the above,
349 chained through the `all' field. */
350struct nesting *nesting_stack;
351
352/* Number of entries on nesting_stack now. */
353int nesting_depth;
354
355/* Allocate and return a new `struct nesting'. */
356
357#define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363#define POPSTACK(STACK) \
364do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371\f
372/* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380struct goto_fixup
381{
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407};
408
409static struct goto_fixup *goto_fixup_chain;
410
411/* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414struct label_chain
415{
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419};
420\f
421void
422init_stmt ()
423{
424 gcc_obstack_init (&stmt_obstack);
425#if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427#endif
428}
429
430void
431init_stmt_for_function ()
432{
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449}
450
451void
452save_stmt_status (p)
453 struct function *p;
454{
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469}
470
471void
472restore_stmt_status (p)
473 struct function *p;
474{
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489}
490\f
491/* Emit a no-op instruction. */
492
493void
494emit_nop ()
495{
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501}
502\f
503/* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506rtx
507label_rtx (label)
508 tree label;
509{
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517}
518
519/* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521void
522emit_jump (label)
523 rtx label;
524{
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528}
529
530/* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533void
534expand_computed_goto (exp)
535 tree exp;
536{
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
de14fd73 538 emit_queue ();
28d81abb 539 emit_indirect_jump (x);
28d81abb
RK
540}
541\f
542/* Handle goto statements and the labels that they can go to. */
543
544/* Specify the location in the RTL code of a label LABEL,
545 which is a LABEL_DECL tree node.
546
547 This is used for the kind of label that the user can jump to with a
548 goto statement, and for alternatives of a switch or case statement.
549 RTL labels generated for loops and conditionals don't go through here;
550 they are generated directly at the RTL level, by other functions below.
551
552 Note that this has nothing to do with defining label *names*.
553 Languages vary in how they do that and what that even means. */
554
555void
556expand_label (label)
557 tree label;
558{
559 struct label_chain *p;
560
561 do_pending_stack_adjust ();
562 emit_label (label_rtx (label));
563 if (DECL_NAME (label))
564 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
565
566 if (stack_block_stack != 0)
567 {
568 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
569 p->next = stack_block_stack->data.block.label_chain;
570 stack_block_stack->data.block.label_chain = p;
571 p->label = label;
572 }
573}
574
575/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
576 from nested functions. */
577
578void
579declare_nonlocal_label (label)
580 tree label;
581{
582 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
583 LABEL_PRESERVE_P (label_rtx (label)) = 1;
584 if (nonlocal_goto_handler_slot == 0)
585 {
586 nonlocal_goto_handler_slot
587 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
588 emit_stack_save (SAVE_NONLOCAL,
589 &nonlocal_goto_stack_level,
590 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
591 }
592}
593
594/* Generate RTL code for a `goto' statement with target label LABEL.
595 LABEL should be a LABEL_DECL tree node that was or will later be
596 defined with `expand_label'. */
597
598void
599expand_goto (label)
600 tree label;
601{
602 /* Check for a nonlocal goto to a containing function. */
603 tree context = decl_function_context (label);
604 if (context != 0 && context != current_function_decl)
605 {
606 struct function *p = find_function_data (context);
607 rtx temp;
608 p->has_nonlocal_label = 1;
59257ff7
RK
609
610 /* Copy the rtl for the slots so that they won't be shared in
611 case the virtual stack vars register gets instantiated differently
612 in the parent than in the child. */
613
28d81abb
RK
614#if HAVE_nonlocal_goto
615 if (HAVE_nonlocal_goto)
616 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
617 copy_rtx (p->nonlocal_goto_handler_slot),
618 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
619 gen_rtx (LABEL_REF, Pmode,
620 label_rtx (label))));
621 else
622#endif
623 {
59257ff7
RK
624 rtx addr;
625
28d81abb
RK
626 /* Restore frame pointer for containing function.
627 This sets the actual hard register used for the frame pointer
628 to the location of the function's incoming static chain info.
629 The non-local goto handler will then adjust it to contain the
630 proper value and reload the argument pointer, if needed. */
631 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
632
633 /* We have now loaded the frame pointer hardware register with
634 the address of that corresponds to the start of the virtual
635 stack vars. So replace virtual_stack_vars_rtx in all
636 addresses we use with stack_pointer_rtx. */
637
28d81abb
RK
638 /* Get addr of containing function's current nonlocal goto handler,
639 which will do any cleanups and then jump to the label. */
59257ff7
RK
640 addr = copy_rtx (p->nonlocal_goto_handler_slot);
641 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
642 frame_pointer_rtx));
643
28d81abb 644 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
645 addr = p->nonlocal_goto_stack_level;
646 if (addr)
5e116627
MM
647 addr = replace_rtx (copy_rtx (addr),
648 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7
RK
649
650 emit_stack_restore (SAVE_NONLOCAL, addr, 0);
651
28d81abb
RK
652 /* Put in the static chain register the nonlocal label address. */
653 emit_move_insn (static_chain_rtx,
654 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
655 /* USE of frame_pointer_rtx added for consistency; not clear if
656 really needed. */
657 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
658 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
659 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
660 emit_indirect_jump (temp);
661 }
662 }
663 else
664 expand_goto_internal (label, label_rtx (label), 0);
665}
666
667/* Generate RTL code for a `goto' statement with target label BODY.
668 LABEL should be a LABEL_REF.
669 LAST_INSN, if non-0, is the rtx we should consider as the last
670 insn emitted (for the purposes of cleaning up a return). */
671
672static void
673expand_goto_internal (body, label, last_insn)
674 tree body;
675 rtx label;
676 rtx last_insn;
677{
678 struct nesting *block;
679 rtx stack_level = 0;
680
681 if (GET_CODE (label) != CODE_LABEL)
682 abort ();
683
684 /* If label has already been defined, we can tell now
685 whether and how we must alter the stack level. */
686
687 if (PREV_INSN (label) != 0)
688 {
689 /* Find the innermost pending block that contains the label.
690 (Check containment by comparing insn-uids.)
691 Then restore the outermost stack level within that block,
692 and do cleanups of all blocks contained in it. */
693 for (block = block_stack; block; block = block->next)
694 {
695 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
696 break;
697 if (block->data.block.stack_level != 0)
698 stack_level = block->data.block.stack_level;
699 /* Execute the cleanups for blocks we are exiting. */
700 if (block->data.block.cleanups != 0)
701 {
702 expand_cleanups (block->data.block.cleanups, 0);
703 do_pending_stack_adjust ();
704 }
705 }
706
707 if (stack_level)
708 {
709 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
710 the stack pointer. This one should be deleted as dead by flow. */
711 clear_pending_stack_adjust ();
712 do_pending_stack_adjust ();
59257ff7 713 emit_stack_restore (SAVE_BLOCK, stack_level, 0);
28d81abb
RK
714 }
715
716 if (body != 0 && DECL_TOO_LATE (body))
717 error ("jump to `%s' invalidly jumps into binding contour",
718 IDENTIFIER_POINTER (DECL_NAME (body)));
719 }
720 /* Label not yet defined: may need to put this goto
721 on the fixup list. */
722 else if (! expand_fixup (body, label, last_insn))
723 {
724 /* No fixup needed. Record that the label is the target
725 of at least one goto that has no fixup. */
726 if (body != 0)
727 TREE_ADDRESSABLE (body) = 1;
728 }
729
730 emit_jump (label);
731}
732\f
733/* Generate if necessary a fixup for a goto
734 whose target label in tree structure (if any) is TREE_LABEL
735 and whose target in rtl is RTL_LABEL.
736
737 If LAST_INSN is nonzero, we pretend that the jump appears
738 after insn LAST_INSN instead of at the current point in the insn stream.
739
740 The fixup will be used later to insert insns at this point
741 to restore the stack level as appropriate for the target label.
742
743 Value is nonzero if a fixup is made. */
744
745static int
746expand_fixup (tree_label, rtl_label, last_insn)
747 tree tree_label;
748 rtx rtl_label;
749 rtx last_insn;
750{
751 struct nesting *block, *end_block;
752
753 /* See if we can recognize which block the label will be output in.
754 This is possible in some very common cases.
755 If we succeed, set END_BLOCK to that block.
756 Otherwise, set it to 0. */
757
758 if (cond_stack
759 && (rtl_label == cond_stack->data.cond.endif_label
760 || rtl_label == cond_stack->data.cond.next_label))
761 end_block = cond_stack;
762 /* If we are in a loop, recognize certain labels which
763 are likely targets. This reduces the number of fixups
764 we need to create. */
765 else if (loop_stack
766 && (rtl_label == loop_stack->data.loop.start_label
767 || rtl_label == loop_stack->data.loop.end_label
768 || rtl_label == loop_stack->data.loop.continue_label))
769 end_block = loop_stack;
770 else
771 end_block = 0;
772
773 /* Now set END_BLOCK to the binding level to which we will return. */
774
775 if (end_block)
776 {
777 struct nesting *next_block = end_block->all;
778 block = block_stack;
779
780 /* First see if the END_BLOCK is inside the innermost binding level.
781 If so, then no cleanups or stack levels are relevant. */
782 while (next_block && next_block != block)
783 next_block = next_block->all;
784
785 if (next_block)
786 return 0;
787
788 /* Otherwise, set END_BLOCK to the innermost binding level
789 which is outside the relevant control-structure nesting. */
790 next_block = block_stack->next;
791 for (block = block_stack; block != end_block; block = block->all)
792 if (block == next_block)
793 next_block = next_block->next;
794 end_block = next_block;
795 }
796
797 /* Does any containing block have a stack level or cleanups?
798 If not, no fixup is needed, and that is the normal case
799 (the only case, for standard C). */
800 for (block = block_stack; block != end_block; block = block->next)
801 if (block->data.block.stack_level != 0
802 || block->data.block.cleanups != 0)
803 break;
804
805 if (block != end_block)
806 {
807 /* Ok, a fixup is needed. Add a fixup to the list of such. */
808 struct goto_fixup *fixup
809 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
810 /* In case an old stack level is restored, make sure that comes
811 after any pending stack adjust. */
812 /* ?? If the fixup isn't to come at the present position,
813 doing the stack adjust here isn't useful. Doing it with our
814 settings at that location isn't useful either. Let's hope
815 someone does it! */
816 if (last_insn == 0)
817 do_pending_stack_adjust ();
818 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
819 fixup->target = tree_label;
820 fixup->target_rtl = rtl_label;
821 fixup->block_start_count = block_start_count;
822 fixup->stack_level = 0;
823 fixup->cleanup_list_list
824 = (((block->data.block.outer_cleanups
825#if 0
826 && block->data.block.outer_cleanups != empty_cleanup_list
827#endif
828 )
829 || block->data.block.cleanups)
830 ? tree_cons (0, block->data.block.cleanups,
831 block->data.block.outer_cleanups)
832 : 0);
833 fixup->next = goto_fixup_chain;
834 goto_fixup_chain = fixup;
835 }
836
837 return block != 0;
838}
839
840/* When exiting a binding contour, process all pending gotos requiring fixups.
841 THISBLOCK is the structure that describes the block being exited.
842 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
843 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
844 FIRST_INSN is the insn that began this contour.
845
846 Gotos that jump out of this contour must restore the
847 stack level and do the cleanups before actually jumping.
848
849 DONT_JUMP_IN nonzero means report error there is a jump into this
850 contour from before the beginning of the contour.
851 This is also done if STACK_LEVEL is nonzero. */
852
853void
854fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
855 struct nesting *thisblock;
856 rtx stack_level;
857 tree cleanup_list;
858 rtx first_insn;
859 int dont_jump_in;
860{
861 register struct goto_fixup *f, *prev;
862
863 /* F is the fixup we are considering; PREV is the previous one. */
864 /* We run this loop in two passes so that cleanups of exited blocks
865 are run first, and blocks that are exited are marked so
866 afterwards. */
867
868 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
869 {
870 /* Test for a fixup that is inactive because it is already handled. */
871 if (f->before_jump == 0)
872 {
873 /* Delete inactive fixup from the chain, if that is easy to do. */
874 if (prev != 0)
875 prev->next = f->next;
876 }
877 /* Has this fixup's target label been defined?
878 If so, we can finalize it. */
879 else if (PREV_INSN (f->target_rtl) != 0)
880 {
881 /* Get the first non-label after the label
882 this goto jumps to. If that's before this scope begins,
883 we don't have a jump into the scope. */
884 rtx after_label = f->target_rtl;
885 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
886 after_label = NEXT_INSN (after_label);
887
888 /* If this fixup jumped into this contour from before the beginning
889 of this contour, report an error. */
890 /* ??? Bug: this does not detect jumping in through intermediate
891 blocks that have stack levels or cleanups.
892 It detects only a problem with the innermost block
893 around the label. */
894 if (f->target != 0
895 && (dont_jump_in || stack_level || cleanup_list)
896 /* If AFTER_LABEL is 0, it means the jump goes to the end
897 of the rtl, which means it jumps into this scope. */
898 && (after_label == 0
899 || INSN_UID (first_insn) < INSN_UID (after_label))
900 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
901 && ! TREE_REGDECL (f->target))
902 {
903 error_with_decl (f->target,
904 "label `%s' used before containing binding contour");
905 /* Prevent multiple errors for one label. */
906 TREE_REGDECL (f->target) = 1;
907 }
908
909 /* Execute cleanups for blocks this jump exits. */
910 if (f->cleanup_list_list)
911 {
912 tree lists;
913 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
914 /* Marked elements correspond to blocks that have been closed.
915 Do their cleanups. */
916 if (TREE_ADDRESSABLE (lists)
917 && TREE_VALUE (lists) != 0)
918 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
919 }
920
921 /* Restore stack level for the biggest contour that this
922 jump jumps out of. */
923 if (f->stack_level)
59257ff7 924 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
28d81abb
RK
925 f->before_jump = 0;
926 }
927 }
928
929 /* Mark the cleanups of exited blocks so that they are executed
930 by the code above. */
931 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
932 if (f->before_jump != 0
933 && PREV_INSN (f->target_rtl) == 0
934 /* Label has still not appeared. If we are exiting a block with
935 a stack level to restore, that started before the fixup,
936 mark this stack level as needing restoration
937 when the fixup is later finalized.
938 Also mark the cleanup_list_list element for F
939 that corresponds to this block, so that ultimately
940 this block's cleanups will be executed by the code above. */
941 && thisblock != 0
942 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
943 it means the label is undefined. That's erroneous, but possible. */
944 && (thisblock->data.block.block_start_count
945 <= f->block_start_count))
946 {
947 tree lists = f->cleanup_list_list;
948 for (; lists; lists = TREE_CHAIN (lists))
949 /* If the following elt. corresponds to our containing block
950 then the elt. must be for this block. */
951 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
952 TREE_ADDRESSABLE (lists) = 1;
953
954 if (stack_level)
955 f->stack_level = stack_level;
956 }
957}
958\f
959/* Generate RTL for an asm statement (explicit assembler code).
960 BODY is a STRING_CST node containing the assembler code text,
961 or an ADDR_EXPR containing a STRING_CST. */
962
963void
964expand_asm (body)
965 tree body;
966{
967 if (TREE_CODE (body) == ADDR_EXPR)
968 body = TREE_OPERAND (body, 0);
969
970 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
971 TREE_STRING_POINTER (body)));
972 last_expr_type = 0;
973}
974
975/* Generate RTL for an asm statement with arguments.
976 STRING is the instruction template.
977 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
978 Each output or input has an expression in the TREE_VALUE and
979 a constraint-string in the TREE_PURPOSE.
980 CLOBBERS is a list of STRING_CST nodes each naming a hard register
981 that is clobbered by this insn.
982
983 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
984 Some elements of OUTPUTS may be replaced with trees representing temporary
985 values. The caller should copy those temporary values to the originally
986 specified lvalues.
987
988 VOL nonzero means the insn is volatile; don't optimize it. */
989
990void
991expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
992 tree string, outputs, inputs, clobbers;
993 int vol;
994 char *filename;
995 int line;
996{
997 rtvec argvec, constraints;
998 rtx body;
999 int ninputs = list_length (inputs);
1000 int noutputs = list_length (outputs);
b4ccaa16 1001 int nclobbers;
28d81abb
RK
1002 tree tail;
1003 register int i;
1004 /* Vector of RTX's of evaluated output operands. */
1005 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1006 /* The insn we have emitted. */
1007 rtx insn;
1008
b4ccaa16
RS
1009 /* Count the number of meaningful clobbered registers, ignoring what
1010 we would ignore later. */
1011 nclobbers = 0;
1012 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1013 {
1014 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1015 i = decode_reg_name (regname);
1016 if (i >= 0 || i == -4)
b4ccaa16
RS
1017 ++nclobbers;
1018 }
1019
28d81abb
RK
1020 last_expr_type = 0;
1021
1022 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1023 {
1024 tree val = TREE_VALUE (tail);
1025 tree val1;
1026 int j;
1027 int found_equal;
1028
1029 /* If there's an erroneous arg, emit no insn. */
1030 if (TREE_TYPE (val) == error_mark_node)
1031 return;
1032
1033 /* Make sure constraint has `=' and does not have `+'. */
1034
1035 found_equal = 0;
1036 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1037 {
1038 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1039 {
1040 error ("output operand constraint contains `+'");
1041 return;
1042 }
1043 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1044 found_equal = 1;
1045 }
1046 if (! found_equal)
1047 {
1048 error ("output operand constraint lacks `='");
1049 return;
1050 }
1051
1052 /* If an output operand is not a variable or indirect ref,
1053 or a part of one,
1054 create a SAVE_EXPR which is a pseudo-reg
1055 to act as an intermediate temporary.
1056 Make the asm insn write into that, then copy it to
1057 the real output operand. */
1058
1059 while (TREE_CODE (val) == COMPONENT_REF
1060 || TREE_CODE (val) == ARRAY_REF)
1061 val = TREE_OPERAND (val, 0);
1062
1063 if (TREE_CODE (val) != VAR_DECL
1064 && TREE_CODE (val) != PARM_DECL
1065 && TREE_CODE (val) != INDIRECT_REF)
1066 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1067
1068 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1069 }
1070
1071 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1072 {
1073 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1074 return;
1075 }
1076
1077 /* Make vectors for the expression-rtx and constraint strings. */
1078
1079 argvec = rtvec_alloc (ninputs);
1080 constraints = rtvec_alloc (ninputs);
1081
1082 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1083 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1084 filename, line);
1085 MEM_VOLATILE_P (body) = vol;
1086
1087 /* Eval the inputs and put them into ARGVEC.
1088 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1089
1090 i = 0;
1091 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1092 {
1093 int j;
1094
1095 /* If there's an erroneous arg, emit no insn,
1096 because the ASM_INPUT would get VOIDmode
1097 and that could cause a crash in reload. */
1098 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1099 return;
1100 if (TREE_PURPOSE (tail) == NULL_TREE)
1101 {
1102 error ("hard register `%s' listed as input operand to `asm'",
1103 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1104 return;
1105 }
1106
1107 /* Make sure constraint has neither `=' nor `+'. */
1108
1109 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1110 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1111 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1112 {
1113 error ("input operand constraint contains `%c'",
1114 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1115 return;
1116 }
1117
1118 XVECEXP (body, 3, i) /* argvec */
1119 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1120 XVECEXP (body, 4, i) /* constraints */
1121 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1122 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1123 i++;
1124 }
1125
1126 /* Protect all the operands from the queue,
1127 now that they have all been evaluated. */
1128
1129 for (i = 0; i < ninputs; i++)
1130 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1131
1132 for (i = 0; i < noutputs; i++)
1133 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1134
1135 /* Now, for each output, construct an rtx
1136 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1137 ARGVEC CONSTRAINTS))
1138 If there is more than one, put them inside a PARALLEL. */
1139
1140 if (noutputs == 1 && nclobbers == 0)
1141 {
1142 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1143 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1144 }
1145 else if (noutputs == 0 && nclobbers == 0)
1146 {
1147 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1148 insn = emit_insn (body);
1149 }
1150 else
1151 {
1152 rtx obody = body;
1153 int num = noutputs;
1154 if (num == 0) num = 1;
1155 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1156
1157 /* For each output operand, store a SET. */
1158
1159 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1160 {
1161 XVECEXP (body, 0, i)
1162 = gen_rtx (SET, VOIDmode,
1163 output_rtx[i],
1164 gen_rtx (ASM_OPERANDS, VOIDmode,
1165 TREE_STRING_POINTER (string),
1166 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1167 i, argvec, constraints,
1168 filename, line));
1169 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1170 }
1171
1172 /* If there are no outputs (but there are some clobbers)
1173 store the bare ASM_OPERANDS into the PARALLEL. */
1174
1175 if (i == 0)
1176 XVECEXP (body, 0, i++) = obody;
1177
1178 /* Store (clobber REG) for each clobbered register specified. */
1179
b4ccaa16 1180 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1181 {
28d81abb 1182 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1183 int j = decode_reg_name (regname);
28d81abb 1184
b4ac57ab 1185 if (j < 0)
28d81abb 1186 {
c09e6498 1187 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1188 continue;
1189
c09e6498
RS
1190 if (j == -4) /* `memory', don't cache memory across asm */
1191 {
1192 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1193 continue;
1194 }
1195
28d81abb
RK
1196 error ("unknown register name `%s' in `asm'", regname);
1197 return;
1198 }
1199
1200 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1201 XVECEXP (body, 0, i++)
28d81abb
RK
1202 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1203 }
1204
1205 insn = emit_insn (body);
1206 }
1207
1208 free_temp_slots ();
1209}
1210\f
1211/* Generate RTL to evaluate the expression EXP
1212 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1213
1214void
1215expand_expr_stmt (exp)
1216 tree exp;
1217{
1218 /* If -W, warn about statements with no side effects,
1219 except for an explicit cast to void (e.g. for assert()), and
1220 except inside a ({...}) where they may be useful. */
1221 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1222 {
1223 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1224 && !(TREE_CODE (exp) == CONVERT_EXPR
1225 && TREE_TYPE (exp) == void_type_node))
1226 warning_with_file_and_line (emit_filename, emit_lineno,
1227 "statement with no effect");
1228 else if (warn_unused)
1229 warn_if_unused_value (exp);
1230 }
1231 last_expr_type = TREE_TYPE (exp);
1232 if (! flag_syntax_only)
1233 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1234 VOIDmode, 0);
1235
1236 /* If all we do is reference a volatile value in memory,
1237 copy it to a register to be sure it is actually touched. */
1238 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1239 && TREE_THIS_VOLATILE (exp))
1240 {
1241 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1242 copy_to_reg (last_expr_value);
1243 else
ddbe9812
RS
1244 {
1245 rtx lab = gen_label_rtx ();
1246
1247 /* Compare the value with itself to reference it. */
1248 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1249 expand_expr (TYPE_SIZE (last_expr_type),
1250 0, VOIDmode, 0),
1251 BLKmode, 0,
1252 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1253 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1254 emit_label (lab);
1255 }
28d81abb
RK
1256 }
1257
1258 /* If this expression is part of a ({...}) and is in memory, we may have
1259 to preserve temporaries. */
1260 preserve_temp_slots (last_expr_value);
1261
1262 /* Free any temporaries used to evaluate this expression. Any temporary
1263 used as a result of this expression will already have been preserved
1264 above. */
1265 free_temp_slots ();
1266
1267 emit_queue ();
1268}
1269
1270/* Warn if EXP contains any computations whose results are not used.
1271 Return 1 if a warning is printed; 0 otherwise. */
1272
1273static int
1274warn_if_unused_value (exp)
1275 tree exp;
1276{
1277 if (TREE_USED (exp))
1278 return 0;
1279
1280 switch (TREE_CODE (exp))
1281 {
1282 case PREINCREMENT_EXPR:
1283 case POSTINCREMENT_EXPR:
1284 case PREDECREMENT_EXPR:
1285 case POSTDECREMENT_EXPR:
1286 case MODIFY_EXPR:
1287 case INIT_EXPR:
1288 case TARGET_EXPR:
1289 case CALL_EXPR:
1290 case METHOD_CALL_EXPR:
1291 case RTL_EXPR:
1292 case WRAPPER_EXPR:
1293 case ANTI_WRAPPER_EXPR:
1294 case WITH_CLEANUP_EXPR:
1295 case EXIT_EXPR:
1296 /* We don't warn about COND_EXPR because it may be a useful
1297 construct if either arm contains a side effect. */
1298 case COND_EXPR:
1299 return 0;
1300
1301 case BIND_EXPR:
1302 /* For a binding, warn if no side effect within it. */
1303 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1304
1305 case TRUTH_ORIF_EXPR:
1306 case TRUTH_ANDIF_EXPR:
1307 /* In && or ||, warn if 2nd operand has no side effect. */
1308 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1309
1310 case COMPOUND_EXPR:
1311 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1312 return 1;
4d23e509
RS
1313 /* Let people do `(foo (), 0)' without a warning. */
1314 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1315 return 0;
28d81abb
RK
1316 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1317
1318 case NOP_EXPR:
1319 case CONVERT_EXPR:
b4ac57ab 1320 case NON_LVALUE_EXPR:
28d81abb
RK
1321 /* Don't warn about values cast to void. */
1322 if (TREE_TYPE (exp) == void_type_node)
1323 return 0;
1324 /* Don't warn about conversions not explicit in the user's program. */
1325 if (TREE_NO_UNUSED_WARNING (exp))
1326 return 0;
1327 /* Assignment to a cast usually results in a cast of a modify.
1328 Don't complain about that. */
1329 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1330 return 0;
1331 /* Sometimes it results in a cast of a cast of a modify.
1332 Don't complain about that. */
1333 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1334 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1335 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1336 return 0;
1337
1338 default:
ddbe9812
RS
1339 /* Referencing a volatile value is a side effect, so don't warn. */
1340 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1341 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1342 && TREE_THIS_VOLATILE (exp))
1343 return 0;
28d81abb
RK
1344 warning_with_file_and_line (emit_filename, emit_lineno,
1345 "value computed is not used");
1346 return 1;
1347 }
1348}
1349
1350/* Clear out the memory of the last expression evaluated. */
1351
1352void
1353clear_last_expr ()
1354{
1355 last_expr_type = 0;
1356}
1357
1358/* Begin a statement which will return a value.
1359 Return the RTL_EXPR for this statement expr.
1360 The caller must save that value and pass it to expand_end_stmt_expr. */
1361
1362tree
1363expand_start_stmt_expr ()
1364{
1365 /* Make the RTL_EXPR node temporary, not momentary,
1366 so that rtl_expr_chain doesn't become garbage. */
1367 int momentary = suspend_momentary ();
1368 tree t = make_node (RTL_EXPR);
1369 resume_momentary (momentary);
1370 start_sequence ();
1371 NO_DEFER_POP;
1372 expr_stmts_for_value++;
1373 return t;
1374}
1375
1376/* Restore the previous state at the end of a statement that returns a value.
1377 Returns a tree node representing the statement's value and the
1378 insns to compute the value.
1379
1380 The nodes of that expression have been freed by now, so we cannot use them.
1381 But we don't want to do that anyway; the expression has already been
1382 evaluated and now we just want to use the value. So generate a RTL_EXPR
1383 with the proper type and RTL value.
1384
1385 If the last substatement was not an expression,
1386 return something with type `void'. */
1387
1388tree
1389expand_end_stmt_expr (t)
1390 tree t;
1391{
1392 OK_DEFER_POP;
1393
1394 if (last_expr_type == 0)
1395 {
1396 last_expr_type = void_type_node;
1397 last_expr_value = const0_rtx;
1398 }
1399 else if (last_expr_value == 0)
1400 /* There are some cases where this can happen, such as when the
1401 statement is void type. */
1402 last_expr_value = const0_rtx;
1403 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1404 /* Remove any possible QUEUED. */
1405 last_expr_value = protect_from_queue (last_expr_value, 0);
1406
1407 emit_queue ();
1408
1409 TREE_TYPE (t) = last_expr_type;
1410 RTL_EXPR_RTL (t) = last_expr_value;
1411 RTL_EXPR_SEQUENCE (t) = get_insns ();
1412
1413 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1414
1415 end_sequence ();
1416
1417 /* Don't consider deleting this expr or containing exprs at tree level. */
1418 TREE_SIDE_EFFECTS (t) = 1;
1419 /* Propagate volatility of the actual RTL expr. */
1420 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1421
1422 last_expr_type = 0;
1423 expr_stmts_for_value--;
1424
1425 return t;
1426}
1427\f
1428/* The exception handling nesting looks like this:
1429
1430 <-- Level N-1
1431 { <-- exception handler block
1432 <-- Level N
1433 <-- in an exception handler
1434 { <-- try block
1435 : <-- in a TRY block
1436 : <-- in an exception handler
1437 :
1438 }
1439
1440 { <-- except block
1441 : <-- in an except block
1442 : <-- in an exception handler
1443 :
1444 }
1445
1446 }
1447
1448/* Return nonzero iff in a try block at level LEVEL. */
1449
1450int
1451in_try_block (level)
1452 int level;
1453{
1454 struct nesting *n = except_stack;
1455 while (1)
1456 {
1457 while (n && n->data.except_stmt.after_label != 0)
1458 n = n->next;
1459 if (n == 0)
1460 return 0;
1461 if (level == 0)
1462 return n != 0;
1463 level--;
1464 n = n->next;
1465 }
1466}
1467
1468/* Return nonzero iff in an except block at level LEVEL. */
1469
1470int
1471in_except_block (level)
1472 int level;
1473{
1474 struct nesting *n = except_stack;
1475 while (1)
1476 {
1477 while (n && n->data.except_stmt.after_label == 0)
1478 n = n->next;
1479 if (n == 0)
1480 return 0;
1481 if (level == 0)
1482 return n != 0;
1483 level--;
1484 n = n->next;
1485 }
1486}
1487
1488/* Return nonzero iff in an exception handler at level LEVEL. */
1489
1490int
1491in_exception_handler (level)
1492 int level;
1493{
1494 struct nesting *n = except_stack;
1495 while (n && level--)
1496 n = n->next;
1497 return n != 0;
1498}
1499
1500/* Record the fact that the current exception nesting raises
1501 exception EX. If not in an exception handler, return 0. */
1502int
1503expand_raise (ex)
1504 tree ex;
1505{
1506 tree *raises_ptr;
1507
1508 if (except_stack == 0)
1509 return 0;
1510 raises_ptr = &except_stack->data.except_stmt.raised;
1511 if (! value_member (ex, *raises_ptr))
1512 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1513 return 1;
1514}
1515
1516/* Generate RTL for the start of a try block.
1517
1518 TRY_CLAUSE is the condition to test to enter the try block. */
1519
1520void
1521expand_start_try (try_clause, exitflag, escapeflag)
1522 tree try_clause;
1523 int exitflag;
1524 int escapeflag;
1525{
1526 struct nesting *thishandler = ALLOC_NESTING ();
1527
1528 /* Make an entry on cond_stack for the cond we are entering. */
1529
1530 thishandler->next = except_stack;
1531 thishandler->all = nesting_stack;
1532 thishandler->depth = ++nesting_depth;
1533 thishandler->data.except_stmt.raised = 0;
1534 thishandler->data.except_stmt.handled = 0;
1535 thishandler->data.except_stmt.first_insn = get_insns ();
1536 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1537 thishandler->data.except_stmt.unhandled_label = 0;
1538 thishandler->data.except_stmt.after_label = 0;
1539 thishandler->data.except_stmt.escape_label
1540 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1541 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1542 except_stack = thishandler;
1543 nesting_stack = thishandler;
1544
1545 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1546}
1547
1548/* End of a TRY block. Nothing to do for now. */
1549
1550void
1551expand_end_try ()
1552{
1553 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1554 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1555}
1556
1557/* Start an `except' nesting contour.
1558 EXITFLAG says whether this contour should be able to `exit' something.
1559 ESCAPEFLAG says whether this contour should be escapable. */
1560
1561void
1562expand_start_except (exitflag, escapeflag)
1563 int exitflag;
1564 int escapeflag;
1565{
1566 if (exitflag)
1567 {
1568 struct nesting *n;
1569 /* An `exit' from catch clauses goes out to next exit level,
1570 if there is one. Otherwise, it just goes to the end
1571 of the construct. */
1572 for (n = except_stack->next; n; n = n->next)
1573 if (n->exit_label != 0)
1574 {
1575 except_stack->exit_label = n->exit_label;
1576 break;
1577 }
1578 if (n == 0)
1579 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1580 }
1581 if (escapeflag)
1582 {
1583 struct nesting *n;
1584 /* An `escape' from catch clauses goes out to next escape level,
1585 if there is one. Otherwise, it just goes to the end
1586 of the construct. */
1587 for (n = except_stack->next; n; n = n->next)
1588 if (n->data.except_stmt.escape_label != 0)
1589 {
1590 except_stack->data.except_stmt.escape_label
1591 = n->data.except_stmt.escape_label;
1592 break;
1593 }
1594 if (n == 0)
1595 except_stack->data.except_stmt.escape_label
1596 = except_stack->data.except_stmt.after_label;
1597 }
1598 do_pending_stack_adjust ();
1599 emit_label (except_stack->data.except_stmt.except_label);
1600}
1601
1602/* Generate code to `escape' from an exception contour. This
1603 is like `exiting', but does not conflict with constructs which
1604 use `exit_label'.
1605
1606 Return nonzero if this contour is escapable, otherwise
1607 return zero, and language-specific code will emit the
1608 appropriate error message. */
1609int
1610expand_escape_except ()
1611{
1612 struct nesting *n;
1613 last_expr_type = 0;
1614 for (n = except_stack; n; n = n->next)
1615 if (n->data.except_stmt.escape_label != 0)
1616 {
1617 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1618 return 1;
1619 }
1620
1621 return 0;
1622}
1623
1624/* Finish processing and `except' contour.
1625 Culls out all exceptions which might be raise but not
1626 handled, and returns the list to the caller.
1627 Language-specific code is responsible for dealing with these
1628 exceptions. */
1629
1630tree
1631expand_end_except ()
1632{
1633 struct nesting *n;
1634 tree raised = NULL_TREE;
1635
1636 do_pending_stack_adjust ();
1637 emit_label (except_stack->data.except_stmt.after_label);
1638
1639 n = except_stack->next;
1640 if (n)
1641 {
1642 /* Propagate exceptions raised but not handled to next
1643 highest level. */
1644 tree handled = except_stack->data.except_stmt.raised;
1645 if (handled != void_type_node)
1646 {
1647 tree prev = NULL_TREE;
1648 raised = except_stack->data.except_stmt.raised;
1649 while (handled)
1650 {
1651 tree this_raise;
1652 for (this_raise = raised, prev = 0; this_raise;
1653 this_raise = TREE_CHAIN (this_raise))
1654 {
1655 if (value_member (TREE_VALUE (this_raise), handled))
1656 {
1657 if (prev)
1658 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1659 else
1660 {
1661 raised = TREE_CHAIN (raised);
1662 if (raised == NULL_TREE)
1663 goto nada;
1664 }
1665 }
1666 else
1667 prev = this_raise;
1668 }
1669 handled = TREE_CHAIN (handled);
1670 }
1671 if (prev == NULL_TREE)
1672 prev = raised;
1673 if (prev)
1674 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1675 nada:
1676 n->data.except_stmt.raised = raised;
1677 }
1678 }
1679
1680 POPSTACK (except_stack);
1681 last_expr_type = 0;
1682 return raised;
1683}
1684
1685/* Record that exception EX is caught by this exception handler.
1686 Return nonzero if in exception handling construct, otherwise return 0. */
1687int
1688expand_catch (ex)
1689 tree ex;
1690{
1691 tree *raises_ptr;
1692
1693 if (except_stack == 0)
1694 return 0;
1695 raises_ptr = &except_stack->data.except_stmt.handled;
1696 if (*raises_ptr != void_type_node
1697 && ex != NULL_TREE
1698 && ! value_member (ex, *raises_ptr))
1699 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1700 return 1;
1701}
1702
1703/* Record that this exception handler catches all exceptions.
1704 Return nonzero if in exception handling construct, otherwise return 0. */
1705
1706int
1707expand_catch_default ()
1708{
1709 if (except_stack == 0)
1710 return 0;
1711 except_stack->data.except_stmt.handled = void_type_node;
1712 return 1;
1713}
1714
1715int
1716expand_end_catch ()
1717{
1718 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1719 return 0;
1720 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1721 return 1;
1722}
1723\f
1724/* Generate RTL for the start of an if-then. COND is the expression
1725 whose truth should be tested.
1726
1727 If EXITFLAG is nonzero, this conditional is visible to
1728 `exit_something'. */
1729
1730void
1731expand_start_cond (cond, exitflag)
1732 tree cond;
1733 int exitflag;
1734{
1735 struct nesting *thiscond = ALLOC_NESTING ();
1736
1737 /* Make an entry on cond_stack for the cond we are entering. */
1738
1739 thiscond->next = cond_stack;
1740 thiscond->all = nesting_stack;
1741 thiscond->depth = ++nesting_depth;
1742 thiscond->data.cond.next_label = gen_label_rtx ();
1743 /* Before we encounter an `else', we don't need a separate exit label
1744 unless there are supposed to be exit statements
1745 to exit this conditional. */
1746 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1747 thiscond->data.cond.endif_label = thiscond->exit_label;
1748 cond_stack = thiscond;
1749 nesting_stack = thiscond;
1750
1751 do_jump (cond, thiscond->data.cond.next_label, NULL);
1752}
1753
1754/* Generate RTL between then-clause and the elseif-clause
1755 of an if-then-elseif-.... */
1756
1757void
1758expand_start_elseif (cond)
1759 tree cond;
1760{
1761 if (cond_stack->data.cond.endif_label == 0)
1762 cond_stack->data.cond.endif_label = gen_label_rtx ();
1763 emit_jump (cond_stack->data.cond.endif_label);
1764 emit_label (cond_stack->data.cond.next_label);
1765 cond_stack->data.cond.next_label = gen_label_rtx ();
1766 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1767}
1768
1769/* Generate RTL between the then-clause and the else-clause
1770 of an if-then-else. */
1771
1772void
1773expand_start_else ()
1774{
1775 if (cond_stack->data.cond.endif_label == 0)
1776 cond_stack->data.cond.endif_label = gen_label_rtx ();
1777 emit_jump (cond_stack->data.cond.endif_label);
1778 emit_label (cond_stack->data.cond.next_label);
1779 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1780}
1781
1782/* Generate RTL for the end of an if-then.
1783 Pop the record for it off of cond_stack. */
1784
1785void
1786expand_end_cond ()
1787{
1788 struct nesting *thiscond = cond_stack;
1789
1790 do_pending_stack_adjust ();
1791 if (thiscond->data.cond.next_label)
1792 emit_label (thiscond->data.cond.next_label);
1793 if (thiscond->data.cond.endif_label)
1794 emit_label (thiscond->data.cond.endif_label);
1795
1796 POPSTACK (cond_stack);
1797 last_expr_type = 0;
1798}
1799\f
1800/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1801 loop should be exited by `exit_something'. This is a loop for which
1802 `expand_continue' will jump to the top of the loop.
1803
1804 Make an entry on loop_stack to record the labels associated with
1805 this loop. */
1806
1807struct nesting *
1808expand_start_loop (exit_flag)
1809 int exit_flag;
1810{
1811 register struct nesting *thisloop = ALLOC_NESTING ();
1812
1813 /* Make an entry on loop_stack for the loop we are entering. */
1814
1815 thisloop->next = loop_stack;
1816 thisloop->all = nesting_stack;
1817 thisloop->depth = ++nesting_depth;
1818 thisloop->data.loop.start_label = gen_label_rtx ();
1819 thisloop->data.loop.end_label = gen_label_rtx ();
1820 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1821 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1822 loop_stack = thisloop;
1823 nesting_stack = thisloop;
1824
1825 do_pending_stack_adjust ();
1826 emit_queue ();
1827 emit_note (0, NOTE_INSN_LOOP_BEG);
1828 emit_label (thisloop->data.loop.start_label);
1829
1830 return thisloop;
1831}
1832
1833/* Like expand_start_loop but for a loop where the continuation point
1834 (for expand_continue_loop) will be specified explicitly. */
1835
1836struct nesting *
1837expand_start_loop_continue_elsewhere (exit_flag)
1838 int exit_flag;
1839{
1840 struct nesting *thisloop = expand_start_loop (exit_flag);
1841 loop_stack->data.loop.continue_label = gen_label_rtx ();
1842 return thisloop;
1843}
1844
1845/* Specify the continuation point for a loop started with
1846 expand_start_loop_continue_elsewhere.
1847 Use this at the point in the code to which a continue statement
1848 should jump. */
1849
1850void
1851expand_loop_continue_here ()
1852{
1853 do_pending_stack_adjust ();
1854 emit_note (0, NOTE_INSN_LOOP_CONT);
1855 emit_label (loop_stack->data.loop.continue_label);
1856}
1857
1858/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1859 Pop the block off of loop_stack. */
1860
1861void
1862expand_end_loop ()
1863{
1864 register rtx insn = get_last_insn ();
1865 register rtx start_label = loop_stack->data.loop.start_label;
1866 rtx last_test_insn = 0;
1867 int num_insns = 0;
1868
1869 /* Mark the continue-point at the top of the loop if none elsewhere. */
1870 if (start_label == loop_stack->data.loop.continue_label)
1871 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1872
1873 do_pending_stack_adjust ();
1874
1875 /* If optimizing, perhaps reorder the loop. If the loop
1876 starts with a conditional exit, roll that to the end
1877 where it will optimize together with the jump back.
1878
1879 We look for the last conditional branch to the exit that we encounter
1880 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1881 branch to the exit first, use it.
1882
1883 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1884 because moving them is not valid. */
1885
1886 if (optimize
1887 &&
1888 ! (GET_CODE (insn) == JUMP_INSN
1889 && GET_CODE (PATTERN (insn)) == SET
1890 && SET_DEST (PATTERN (insn)) == pc_rtx
1891 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1892 {
1893 /* Scan insns from the top of the loop looking for a qualified
1894 conditional exit. */
1895 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1896 insn = NEXT_INSN (insn))
1897 {
1898 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1899 break;
1900
1901 if (GET_CODE (insn) == NOTE
1902 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1903 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1904 break;
1905
1906 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1907 num_insns++;
1908
1909 if (last_test_insn && num_insns > 30)
1910 break;
1911
1912 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1913 && SET_DEST (PATTERN (insn)) == pc_rtx
1914 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1915 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1916 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1917 == loop_stack->data.loop.end_label))
1918 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1919 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1920 == loop_stack->data.loop.end_label))))
1921 last_test_insn = insn;
1922
1923 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1924 && GET_CODE (PATTERN (insn)) == SET
1925 && SET_DEST (PATTERN (insn)) == pc_rtx
1926 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1927 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1928 == loop_stack->data.loop.end_label))
1929 /* Include BARRIER. */
1930 last_test_insn = NEXT_INSN (insn);
1931 }
1932
1933 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1934 {
1935 /* We found one. Move everything from there up
1936 to the end of the loop, and add a jump into the loop
1937 to jump to there. */
1938 register rtx newstart_label = gen_label_rtx ();
1939 register rtx start_move = start_label;
1940
b4ac57ab 1941 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1942 then we want to move this note also. */
1943 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1944 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1945 == NOTE_INSN_LOOP_CONT))
1946 start_move = PREV_INSN (start_move);
1947
1948 emit_label_after (newstart_label, PREV_INSN (start_move));
1949 reorder_insns (start_move, last_test_insn, get_last_insn ());
1950 emit_jump_insn_after (gen_jump (start_label),
1951 PREV_INSN (newstart_label));
1952 emit_barrier_after (PREV_INSN (newstart_label));
1953 start_label = newstart_label;
1954 }
1955 }
1956
1957 emit_jump (start_label);
1958 emit_note (0, NOTE_INSN_LOOP_END);
1959 emit_label (loop_stack->data.loop.end_label);
1960
1961 POPSTACK (loop_stack);
1962
1963 last_expr_type = 0;
1964}
1965
1966/* Generate a jump to the current loop's continue-point.
1967 This is usually the top of the loop, but may be specified
1968 explicitly elsewhere. If not currently inside a loop,
1969 return 0 and do nothing; caller will print an error message. */
1970
1971int
1972expand_continue_loop (whichloop)
1973 struct nesting *whichloop;
1974{
1975 last_expr_type = 0;
1976 if (whichloop == 0)
1977 whichloop = loop_stack;
1978 if (whichloop == 0)
1979 return 0;
1980 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1981 return 1;
1982}
1983
1984/* Generate a jump to exit the current loop. If not currently inside a loop,
1985 return 0 and do nothing; caller will print an error message. */
1986
1987int
1988expand_exit_loop (whichloop)
1989 struct nesting *whichloop;
1990{
1991 last_expr_type = 0;
1992 if (whichloop == 0)
1993 whichloop = loop_stack;
1994 if (whichloop == 0)
1995 return 0;
1996 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1997 return 1;
1998}
1999
2000/* Generate a conditional jump to exit the current loop if COND
2001 evaluates to zero. If not currently inside a loop,
2002 return 0 and do nothing; caller will print an error message. */
2003
2004int
2005expand_exit_loop_if_false (whichloop, cond)
2006 struct nesting *whichloop;
2007 tree cond;
2008{
2009 last_expr_type = 0;
2010 if (whichloop == 0)
2011 whichloop = loop_stack;
2012 if (whichloop == 0)
2013 return 0;
2014 do_jump (cond, whichloop->data.loop.end_label, NULL);
2015 return 1;
2016}
2017
2018/* Return non-zero if we should preserve sub-expressions as separate
2019 pseudos. We never do so if we aren't optimizing. We always do so
2020 if -fexpensive-optimizations.
2021
2022 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2023 the loop may still be a small one. */
2024
2025int
2026preserve_subexpressions_p ()
2027{
2028 rtx insn;
2029
2030 if (flag_expensive_optimizations)
2031 return 1;
2032
2033 if (optimize == 0 || loop_stack == 0)
2034 return 0;
2035
2036 insn = get_last_insn_anywhere ();
2037
2038 return (insn
2039 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2040 < n_non_fixed_regs * 3));
2041
2042}
2043
2044/* Generate a jump to exit the current loop, conditional, binding contour
2045 or case statement. Not all such constructs are visible to this function,
2046 only those started with EXIT_FLAG nonzero. Individual languages use
2047 the EXIT_FLAG parameter to control which kinds of constructs you can
2048 exit this way.
2049
2050 If not currently inside anything that can be exited,
2051 return 0 and do nothing; caller will print an error message. */
2052
2053int
2054expand_exit_something ()
2055{
2056 struct nesting *n;
2057 last_expr_type = 0;
2058 for (n = nesting_stack; n; n = n->all)
2059 if (n->exit_label != 0)
2060 {
2061 expand_goto_internal (0, n->exit_label, 0);
2062 return 1;
2063 }
2064
2065 return 0;
2066}
2067\f
2068/* Generate RTL to return from the current function, with no value.
2069 (That is, we do not do anything about returning any value.) */
2070
2071void
2072expand_null_return ()
2073{
2074 struct nesting *block = block_stack;
2075 rtx last_insn = 0;
2076
2077 /* Does any pending block have cleanups? */
2078
2079 while (block && block->data.block.cleanups == 0)
2080 block = block->next;
2081
2082 /* If yes, use a goto to return, since that runs cleanups. */
2083
2084 expand_null_return_1 (last_insn, block != 0);
2085}
2086
2087/* Generate RTL to return from the current function, with value VAL. */
2088
2089void
2090expand_value_return (val)
2091 rtx val;
2092{
2093 struct nesting *block = block_stack;
2094 rtx last_insn = get_last_insn ();
2095 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2096
2097 /* Copy the value to the return location
2098 unless it's already there. */
2099
2100 if (return_reg != val)
2101 emit_move_insn (return_reg, val);
2102 if (GET_CODE (return_reg) == REG
2103 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2104 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2105
2106 /* Does any pending block have cleanups? */
2107
2108 while (block && block->data.block.cleanups == 0)
2109 block = block->next;
2110
2111 /* If yes, use a goto to return, since that runs cleanups.
2112 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2113
2114 expand_null_return_1 (last_insn, block != 0);
2115}
2116
2117/* Output a return with no value. If LAST_INSN is nonzero,
2118 pretend that the return takes place after LAST_INSN.
2119 If USE_GOTO is nonzero then don't use a return instruction;
2120 go to the return label instead. This causes any cleanups
2121 of pending blocks to be executed normally. */
2122
2123static void
2124expand_null_return_1 (last_insn, use_goto)
2125 rtx last_insn;
2126 int use_goto;
2127{
2128 rtx end_label = cleanup_label ? cleanup_label : return_label;
2129
2130 clear_pending_stack_adjust ();
2131 do_pending_stack_adjust ();
2132 last_expr_type = 0;
2133
2134 /* PCC-struct return always uses an epilogue. */
2135 if (current_function_returns_pcc_struct || use_goto)
2136 {
2137 if (end_label == 0)
2138 end_label = return_label = gen_label_rtx ();
2139 expand_goto_internal (0, end_label, last_insn);
2140 return;
2141 }
2142
2143 /* Otherwise output a simple return-insn if one is available,
2144 unless it won't do the job. */
2145#ifdef HAVE_return
2146 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2147 {
2148 emit_jump_insn (gen_return ());
2149 emit_barrier ();
2150 return;
2151 }
2152#endif
2153
2154 /* Otherwise jump to the epilogue. */
2155 expand_goto_internal (0, end_label, last_insn);
2156}
2157\f
2158/* Generate RTL to evaluate the expression RETVAL and return it
2159 from the current function. */
2160
2161void
2162expand_return (retval)
2163 tree retval;
2164{
2165 /* If there are any cleanups to be performed, then they will
2166 be inserted following LAST_INSN. It is desirable
2167 that the last_insn, for such purposes, should be the
2168 last insn before computing the return value. Otherwise, cleanups
2169 which call functions can clobber the return value. */
2170 /* ??? rms: I think that is erroneous, because in C++ it would
2171 run destructors on variables that might be used in the subsequent
2172 computation of the return value. */
2173 rtx last_insn = 0;
2174 register rtx val = 0;
2175 register rtx op0;
2176 tree retval_rhs;
2177 int cleanups;
2178 struct nesting *block;
2179
2180 /* If function wants no value, give it none. */
2181 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2182 {
2183 expand_expr (retval, 0, VOIDmode, 0);
2184 expand_null_return ();
2185 return;
2186 }
2187
2188 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2189 cleanups = any_pending_cleanups (1);
2190
2191 if (TREE_CODE (retval) == RESULT_DECL)
2192 retval_rhs = retval;
2193 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2194 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2195 retval_rhs = TREE_OPERAND (retval, 1);
2196 else if (TREE_TYPE (retval) == void_type_node)
2197 /* Recognize tail-recursive call to void function. */
2198 retval_rhs = retval;
2199 else
2200 retval_rhs = NULL_TREE;
2201
2202 /* Only use `last_insn' if there are cleanups which must be run. */
2203 if (cleanups || cleanup_label != 0)
2204 last_insn = get_last_insn ();
2205
2206 /* Distribute return down conditional expr if either of the sides
2207 may involve tail recursion (see test below). This enhances the number
2208 of tail recursions we see. Don't do this always since it can produce
2209 sub-optimal code in some cases and we distribute assignments into
2210 conditional expressions when it would help. */
2211
2212 if (optimize && retval_rhs != 0
2213 && frame_offset == 0
2214 && TREE_CODE (retval_rhs) == COND_EXPR
2215 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2216 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2217 {
2218 rtx label = gen_label_rtx ();
2219 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2220 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2221 DECL_RESULT (current_function_decl),
2222 TREE_OPERAND (retval_rhs, 1)));
2223 emit_label (label);
2224 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2225 DECL_RESULT (current_function_decl),
2226 TREE_OPERAND (retval_rhs, 2)));
2227 return;
2228 }
2229
2230 /* For tail-recursive call to current function,
2231 just jump back to the beginning.
2232 It's unsafe if any auto variable in this function
2233 has its address taken; for simplicity,
2234 require stack frame to be empty. */
2235 if (optimize && retval_rhs != 0
2236 && frame_offset == 0
2237 && TREE_CODE (retval_rhs) == CALL_EXPR
2238 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2239 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2240 /* Finish checking validity, and if valid emit code
2241 to set the argument variables for the new call. */
2242 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2243 DECL_ARGUMENTS (current_function_decl)))
2244 {
2245 if (tail_recursion_label == 0)
2246 {
2247 tail_recursion_label = gen_label_rtx ();
2248 emit_label_after (tail_recursion_label,
2249 tail_recursion_reentry);
2250 }
a3229491 2251 emit_queue ();
28d81abb
RK
2252 expand_goto_internal (0, tail_recursion_label, last_insn);
2253 emit_barrier ();
2254 return;
2255 }
2256#ifdef HAVE_return
2257 /* This optimization is safe if there are local cleanups
2258 because expand_null_return takes care of them.
2259 ??? I think it should also be safe when there is a cleanup label,
2260 because expand_null_return takes care of them, too.
2261 Any reason why not? */
2262 if (HAVE_return && cleanup_label == 0
2263 && ! current_function_returns_pcc_struct)
2264 {
2265 /* If this is return x == y; then generate
2266 if (x == y) return 1; else return 0;
2267 if we can do it with explicit return insns. */
2268 if (retval_rhs)
2269 switch (TREE_CODE (retval_rhs))
2270 {
2271 case EQ_EXPR:
2272 case NE_EXPR:
2273 case GT_EXPR:
2274 case GE_EXPR:
2275 case LT_EXPR:
2276 case LE_EXPR:
2277 case TRUTH_ANDIF_EXPR:
2278 case TRUTH_ORIF_EXPR:
2279 case TRUTH_AND_EXPR:
2280 case TRUTH_OR_EXPR:
2281 case TRUTH_NOT_EXPR:
2282 op0 = gen_label_rtx ();
2283 jumpifnot (retval_rhs, op0);
2284 expand_value_return (const1_rtx);
2285 emit_label (op0);
2286 expand_value_return (const0_rtx);
2287 return;
2288 }
2289 }
2290#endif /* HAVE_return */
2291
2292 if (cleanups
2293 && retval_rhs != 0
2294 && TREE_TYPE (retval_rhs) != void_type_node
2295 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2296 {
2297 /* Calculate the return value into a pseudo reg. */
2298 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2299 emit_queue ();
2300 /* All temporaries have now been used. */
2301 free_temp_slots ();
2302 /* Return the calculated value, doing cleanups first. */
2303 expand_value_return (val);
2304 }
2305 else
2306 {
2307 /* No cleanups or no hard reg used;
2308 calculate value into hard return reg. */
2309 expand_expr (retval, 0, VOIDmode, 0);
2310 emit_queue ();
2311 free_temp_slots ();
2312 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2313 }
2314}
2315
2316/* Return 1 if the end of the generated RTX is not a barrier.
2317 This means code already compiled can drop through. */
2318
2319int
2320drop_through_at_end_p ()
2321{
2322 rtx insn = get_last_insn ();
2323 while (insn && GET_CODE (insn) == NOTE)
2324 insn = PREV_INSN (insn);
2325 return insn && GET_CODE (insn) != BARRIER;
2326}
2327\f
2328/* Emit code to alter this function's formal parms for a tail-recursive call.
2329 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2330 FORMALS is the chain of decls of formals.
2331 Return 1 if this can be done;
2332 otherwise return 0 and do not emit any code. */
2333
2334static int
2335tail_recursion_args (actuals, formals)
2336 tree actuals, formals;
2337{
2338 register tree a = actuals, f = formals;
2339 register int i;
2340 register rtx *argvec;
2341
2342 /* Check that number and types of actuals are compatible
2343 with the formals. This is not always true in valid C code.
2344 Also check that no formal needs to be addressable
2345 and that all formals are scalars. */
2346
2347 /* Also count the args. */
2348
2349 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2350 {
2351 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2352 return 0;
2353 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2354 return 0;
2355 }
2356 if (a != 0 || f != 0)
2357 return 0;
2358
2359 /* Compute all the actuals. */
2360
2361 argvec = (rtx *) alloca (i * sizeof (rtx));
2362
2363 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2364 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2365
2366 /* Find which actual values refer to current values of previous formals.
2367 Copy each of them now, before any formal is changed. */
2368
2369 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2370 {
2371 int copy = 0;
2372 register int j;
2373 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2374 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2375 { copy = 1; break; }
2376 if (copy)
2377 argvec[i] = copy_to_reg (argvec[i]);
2378 }
2379
2380 /* Store the values of the actuals into the formals. */
2381
2382 for (f = formals, a = actuals, i = 0; f;
2383 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2384 {
2385 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2386 emit_move_insn (DECL_RTL (f), argvec[i]);
2387 else
2388 convert_move (DECL_RTL (f), argvec[i],
2389 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2390 }
2391
2392 free_temp_slots ();
2393 return 1;
2394}
2395\f
2396/* Generate the RTL code for entering a binding contour.
2397 The variables are declared one by one, by calls to `expand_decl'.
2398
2399 EXIT_FLAG is nonzero if this construct should be visible to
2400 `exit_something'. */
2401
2402void
2403expand_start_bindings (exit_flag)
2404 int exit_flag;
2405{
2406 struct nesting *thisblock = ALLOC_NESTING ();
2407
2408 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2409
2410 /* Make an entry on block_stack for the block we are entering. */
2411
2412 thisblock->next = block_stack;
2413 thisblock->all = nesting_stack;
2414 thisblock->depth = ++nesting_depth;
2415 thisblock->data.block.stack_level = 0;
2416 thisblock->data.block.cleanups = 0;
2417 thisblock->data.block.function_call_count = 0;
2418#if 0
2419 if (block_stack)
2420 {
2421 if (block_stack->data.block.cleanups == NULL_TREE
2422 && (block_stack->data.block.outer_cleanups == NULL_TREE
2423 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2424 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2425 else
2426 thisblock->data.block.outer_cleanups
2427 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2428 block_stack->data.block.outer_cleanups);
2429 }
2430 else
2431 thisblock->data.block.outer_cleanups = 0;
2432#endif
2433#if 1
2434 if (block_stack
2435 && !(block_stack->data.block.cleanups == NULL_TREE
2436 && block_stack->data.block.outer_cleanups == NULL_TREE))
2437 thisblock->data.block.outer_cleanups
2438 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2439 block_stack->data.block.outer_cleanups);
2440 else
2441 thisblock->data.block.outer_cleanups = 0;
2442#endif
2443 thisblock->data.block.label_chain = 0;
2444 thisblock->data.block.innermost_stack_block = stack_block_stack;
2445 thisblock->data.block.first_insn = note;
2446 thisblock->data.block.block_start_count = ++block_start_count;
2447 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2448 block_stack = thisblock;
2449 nesting_stack = thisblock;
2450
2451 /* Make a new level for allocating stack slots. */
2452 push_temp_slots ();
2453}
2454
2455/* Generate RTL code to terminate a binding contour.
2456 VARS is the chain of VAR_DECL nodes
2457 for the variables bound in this contour.
2458 MARK_ENDS is nonzero if we should put a note at the beginning
2459 and end of this binding contour.
2460
2461 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2462 (That is true automatically if the contour has a saved stack level.) */
2463
2464void
2465expand_end_bindings (vars, mark_ends, dont_jump_in)
2466 tree vars;
2467 int mark_ends;
2468 int dont_jump_in;
2469{
2470 register struct nesting *thisblock = block_stack;
2471 register tree decl;
2472
2473 if (warn_unused)
2474 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2475 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2476 warning_with_decl (decl, "unused variable `%s'");
2477
2478 /* Mark the beginning and end of the scope if requested. */
2479
2480 if (mark_ends)
2481 emit_note (0, NOTE_INSN_BLOCK_END);
2482 else
2483 /* Get rid of the beginning-mark if we don't make an end-mark. */
2484 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2485
2486 if (thisblock->exit_label)
2487 {
2488 do_pending_stack_adjust ();
2489 emit_label (thisblock->exit_label);
2490 }
2491
2492 /* If necessary, make a handler for nonlocal gotos taking
2493 place in the function calls in this block. */
2494 if (function_call_count != thisblock->data.block.function_call_count
2495 && nonlocal_labels
2496 /* Make handler for outermost block
2497 if there were any nonlocal gotos to this function. */
2498 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2499 /* Make handler for inner block if it has something
2500 special to do when you jump out of it. */
2501 : (thisblock->data.block.cleanups != 0
2502 || thisblock->data.block.stack_level != 0)))
2503 {
2504 tree link;
2505 rtx afterward = gen_label_rtx ();
2506 rtx handler_label = gen_label_rtx ();
2507 rtx save_receiver = gen_reg_rtx (Pmode);
2508
2509 /* Don't let jump_optimize delete the handler. */
2510 LABEL_PRESERVE_P (handler_label) = 1;
2511
2512 /* Record the handler address in the stack slot for that purpose,
2513 during this block, saving and restoring the outer value. */
2514 if (thisblock->next != 0)
2515 {
2516 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2517 emit_insn_before (gen_move_insn (save_receiver,
2518 nonlocal_goto_handler_slot),
2519 thisblock->data.block.first_insn);
2520 }
2521 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2522 gen_rtx (LABEL_REF, Pmode,
2523 handler_label)),
2524 thisblock->data.block.first_insn);
2525
2526 /* Jump around the handler; it runs only when specially invoked. */
2527 emit_jump (afterward);
2528 emit_label (handler_label);
2529
2530#ifdef HAVE_nonlocal_goto
2531 if (! HAVE_nonlocal_goto)
2532#endif
2533 /* First adjust our frame pointer to its actual value. It was
2534 previously set to the start of the virtual area corresponding to
2535 the stacked variables when we branched here and now needs to be
2536 adjusted to the actual hardware fp value.
2537
2538 Assignments are to virtual registers are converted by
2539 instantiate_virtual_regs into the corresponding assignment
2540 to the underlying register (fp in this case) that makes
2541 the original assignment true.
2542 So the following insn will actually be
2543 decrementing fp by STARTING_FRAME_OFFSET. */
2544 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2545
2546#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2547 if (fixed_regs[ARG_POINTER_REGNUM])
2548 {
2549 /* Now restore our arg pointer from the address at which it was saved
2550 in our stack frame.
2551 If there hasn't be space allocated for it yet, make some now. */
2552 if (arg_pointer_save_area == 0)
2553 arg_pointer_save_area
2554 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
b4ac57ab
RS
2555 emit_move_insn (virtual_incoming_args_rtx,
2556 /* We need a pseudo here,
2557 or else instantiate_virtual_regs_1 complains. */
2558 copy_to_reg (arg_pointer_save_area));
28d81abb
RK
2559 }
2560#endif
2561
2562 /* The handler expects the desired label address in the static chain
2563 register. It tests the address and does an appropriate jump
2564 to whatever label is desired. */
2565 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2566 /* Skip any labels we shouldn't be able to jump to from here. */
2567 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2568 {
2569 rtx not_this = gen_label_rtx ();
2570 rtx this = gen_label_rtx ();
2571 do_jump_if_equal (static_chain_rtx,
2572 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2573 this, 0);
2574 emit_jump (not_this);
2575 emit_label (this);
2576 expand_goto (TREE_VALUE (link));
2577 emit_label (not_this);
2578 }
2579 /* If label is not recognized, abort. */
2580 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2581 VOIDmode, 0);
2582 emit_label (afterward);
2583 }
2584
2585 /* Don't allow jumping into a block that has cleanups or a stack level. */
2586 if (dont_jump_in
2587 || thisblock->data.block.stack_level != 0
2588 || thisblock->data.block.cleanups != 0)
2589 {
2590 struct label_chain *chain;
2591
2592 /* Any labels in this block are no longer valid to go to.
2593 Mark them to cause an error message. */
2594 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2595 {
2596 DECL_TOO_LATE (chain->label) = 1;
2597 /* If any goto without a fixup came to this label,
2598 that must be an error, because gotos without fixups
2599 come from outside all saved stack-levels and all cleanups. */
2600 if (TREE_ADDRESSABLE (chain->label))
2601 error_with_decl (chain->label,
2602 "label `%s' used before containing binding contour");
2603 }
2604 }
2605
2606 /* Restore stack level in effect before the block
2607 (only if variable-size objects allocated). */
2608 /* Perform any cleanups associated with the block. */
2609
2610 if (thisblock->data.block.stack_level != 0
2611 || thisblock->data.block.cleanups != 0)
2612 {
2613 /* Don't let cleanups affect ({...}) constructs. */
2614 int old_expr_stmts_for_value = expr_stmts_for_value;
2615 rtx old_last_expr_value = last_expr_value;
2616 tree old_last_expr_type = last_expr_type;
2617 expr_stmts_for_value = 0;
2618
2619 /* Do the cleanups. */
2620 expand_cleanups (thisblock->data.block.cleanups, 0);
2621 do_pending_stack_adjust ();
2622
2623 expr_stmts_for_value = old_expr_stmts_for_value;
2624 last_expr_value = old_last_expr_value;
2625 last_expr_type = old_last_expr_type;
2626
2627 /* Restore the stack level. */
2628
2629 if (thisblock->data.block.stack_level != 0)
2630 {
59257ff7
RK
2631 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2632 thisblock->data.block.stack_level, 0);
2633 if (nonlocal_goto_handler_slot != 0)
2634 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2635 }
2636
2637 /* Any gotos out of this block must also do these things.
59257ff7
RK
2638 Also report any gotos with fixups that came to labels in this
2639 level. */
28d81abb
RK
2640 fixup_gotos (thisblock,
2641 thisblock->data.block.stack_level,
2642 thisblock->data.block.cleanups,
2643 thisblock->data.block.first_insn,
2644 dont_jump_in);
2645 }
2646
2647 /* If doing stupid register allocation, make sure lives of all
2648 register variables declared here extend thru end of scope. */
2649
2650 if (obey_regdecls)
2651 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2652 {
2653 rtx rtl = DECL_RTL (decl);
2654 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2655 use_variable (rtl);
2656 }
2657
2658 /* Restore block_stack level for containing block. */
2659
2660 stack_block_stack = thisblock->data.block.innermost_stack_block;
2661 POPSTACK (block_stack);
2662
2663 /* Pop the stack slot nesting and free any slots at this level. */
2664 pop_temp_slots ();
2665}
2666\f
2667/* Generate RTL for the automatic variable declaration DECL.
2668 (Other kinds of declarations are simply ignored if seen here.)
2669 CLEANUP is an expression to be executed at exit from this binding contour;
2670 for example, in C++, it might call the destructor for this variable.
2671
2672 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2673 either before or after calling `expand_decl' but before compiling
2674 any subsequent expressions. This is because CLEANUP may be expanded
2675 more than once, on different branches of execution.
2676 For the same reason, CLEANUP may not contain a CALL_EXPR
2677 except as its topmost node--else `preexpand_calls' would get confused.
2678
2679 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2680 that is not associated with any particular variable.
2681
2682 There is no special support here for C++ constructors.
2683 They should be handled by the proper code in DECL_INITIAL. */
2684
2685void
2686expand_decl (decl)
2687 register tree decl;
2688{
2689 struct nesting *thisblock = block_stack;
2690 tree type = TREE_TYPE (decl);
2691
2692 /* Only automatic variables need any expansion done.
2693 Static and external variables, and external functions,
2694 will be handled by `assemble_variable' (called from finish_decl).
2695 TYPE_DECL and CONST_DECL require nothing.
2696 PARM_DECLs are handled in `assign_parms'. */
2697
2698 if (TREE_CODE (decl) != VAR_DECL)
2699 return;
2700 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2701 return;
2702
2703 /* Create the RTL representation for the variable. */
2704
2705 if (type == error_mark_node)
2706 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2707 else if (DECL_SIZE (decl) == 0)
2708 /* Variable with incomplete type. */
2709 {
2710 if (DECL_INITIAL (decl) == 0)
2711 /* Error message was already done; now avoid a crash. */
2712 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2713 else
2714 /* An initializer is going to decide the size of this array.
2715 Until we know the size, represent its address with a reg. */
2716 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2717 }
2718 else if (DECL_MODE (decl) != BLKmode
2719 /* If -ffloat-store, don't put explicit float vars
2720 into regs. */
2721 && !(flag_float_store
2722 && TREE_CODE (type) == REAL_TYPE)
2723 && ! TREE_THIS_VOLATILE (decl)
2724 && ! TREE_ADDRESSABLE (decl)
2725 && (TREE_REGDECL (decl) || ! obey_regdecls))
2726 {
2727 /* Automatic variable that can go in a register. */
2728 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2729 if (TREE_CODE (type) == POINTER_TYPE)
2730 mark_reg_pointer (DECL_RTL (decl));
2731 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2732 }
2733 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2734 {
2735 /* Variable of fixed size that goes on the stack. */
2736 rtx oldaddr = 0;
2737 rtx addr;
2738
2739 /* If we previously made RTL for this decl, it must be an array
2740 whose size was determined by the initializer.
2741 The old address was a register; set that register now
2742 to the proper address. */
2743 if (DECL_RTL (decl) != 0)
2744 {
2745 if (GET_CODE (DECL_RTL (decl)) != MEM
2746 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2747 abort ();
2748 oldaddr = XEXP (DECL_RTL (decl), 0);
2749 }
2750
2751 DECL_RTL (decl)
2752 = assign_stack_temp (DECL_MODE (decl),
2753 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2754 + BITS_PER_UNIT - 1)
2755 / BITS_PER_UNIT),
2756 1);
2757
2758 /* Set alignment we actually gave this decl. */
2759 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2760 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2761
2762 if (oldaddr)
2763 {
2764 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2765 if (addr != oldaddr)
2766 emit_move_insn (oldaddr, addr);
2767 }
2768
2769 /* If this is a memory ref that contains aggregate components,
2770 mark it as such for cse and loop optimize. */
2771 MEM_IN_STRUCT_P (DECL_RTL (decl))
2772 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2773 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2774 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2775#if 0
2776 /* If this is in memory because of -ffloat-store,
2777 set the volatile bit, to prevent optimizations from
2778 undoing the effects. */
2779 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2780 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2781#endif
2782 }
2783 else
2784 /* Dynamic-size object: must push space on the stack. */
2785 {
2786 rtx address, size;
2787
2788 /* Record the stack pointer on entry to block, if have
2789 not already done so. */
2790 if (thisblock->data.block.stack_level == 0)
2791 {
2792 do_pending_stack_adjust ();
59257ff7
RK
2793 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2794 &thisblock->data.block.stack_level,
2795 thisblock->data.block.first_insn);
28d81abb
RK
2796 stack_block_stack = thisblock;
2797 }
2798
2799 /* Compute the variable's size, in bytes. */
2800 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2801 DECL_SIZE (decl),
2802 size_int (BITS_PER_UNIT)),
2803 0, VOIDmode, 0);
2804 free_temp_slots ();
2805
59257ff7
RK
2806 /* This is equivalent to calling alloca. */
2807 current_function_calls_alloca = 1;
2808
28d81abb 2809 /* Allocate space on the stack for the variable. */
5130a5cc 2810 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
28d81abb 2811
59257ff7
RK
2812 if (nonlocal_goto_handler_slot != 0)
2813 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2814
2815 /* Reference the variable indirect through that rtx. */
2816 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2817
2207e295
RS
2818 /* If this is a memory ref that contains aggregate components,
2819 mark it as such for cse and loop optimize. */
2820 MEM_IN_STRUCT_P (DECL_RTL (decl))
2821 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2822 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2823 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2824
28d81abb
RK
2825 /* Indicate the alignment we actually gave this variable. */
2826#ifdef STACK_BOUNDARY
2827 DECL_ALIGN (decl) = STACK_BOUNDARY;
2828#else
2829 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2830#endif
2831 }
2832
2833 if (TREE_THIS_VOLATILE (decl))
2834 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2835 if (TREE_READONLY (decl))
2836 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2837
2838 /* If doing stupid register allocation, make sure life of any
2839 register variable starts here, at the start of its scope. */
2840
2841 if (obey_regdecls)
2842 use_variable (DECL_RTL (decl));
2843}
2844\f
2845/* Emit code to perform the initialization of a declaration DECL. */
2846
2847void
2848expand_decl_init (decl)
2849 tree decl;
2850{
b4ac57ab
RS
2851 int was_used = TREE_USED (decl);
2852
28d81abb
RK
2853 if (TREE_STATIC (decl))
2854 return;
2855
2856 /* Compute and store the initial value now. */
2857
2858 if (DECL_INITIAL (decl) == error_mark_node)
2859 {
2860 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2861 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2862 || code == POINTER_TYPE)
2863 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2864 0, 0);
2865 emit_queue ();
2866 }
2867 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2868 {
2869 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2870 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2871 emit_queue ();
2872 }
2873
b4ac57ab
RS
2874 /* Don't let the initialization count as "using" the variable. */
2875 TREE_USED (decl) = was_used;
2876
28d81abb
RK
2877 /* Free any temporaries we made while initializing the decl. */
2878 free_temp_slots ();
2879}
2880
2881/* CLEANUP is an expression to be executed at exit from this binding contour;
2882 for example, in C++, it might call the destructor for this variable.
2883
2884 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2885 either before or after calling `expand_decl' but before compiling
2886 any subsequent expressions. This is because CLEANUP may be expanded
2887 more than once, on different branches of execution.
2888 For the same reason, CLEANUP may not contain a CALL_EXPR
2889 except as its topmost node--else `preexpand_calls' would get confused.
2890
2891 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2892 that is not associated with any particular variable. */
2893
2894int
2895expand_decl_cleanup (decl, cleanup)
2896 tree decl, cleanup;
2897{
2898 struct nesting *thisblock = block_stack;
2899
2900 /* Error if we are not in any block. */
2901 if (thisblock == 0)
2902 return 0;
2903
2904 /* Record the cleanup if there is one. */
2905
2906 if (cleanup != 0)
2907 {
2908 thisblock->data.block.cleanups
2909 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2910 /* If this block has a cleanup, it belongs in stack_block_stack. */
2911 stack_block_stack = thisblock;
2912 }
2913 return 1;
2914}
2915\f
2916/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2917 DECL_ELTS is the list of elements that belong to DECL's type.
2918 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2919
2920void
2921expand_anon_union_decl (decl, cleanup, decl_elts)
2922 tree decl, cleanup, decl_elts;
2923{
2924 struct nesting *thisblock = block_stack;
2925 rtx x;
2926
2927 expand_decl (decl, cleanup);
2928 x = DECL_RTL (decl);
2929
2930 while (decl_elts)
2931 {
2932 tree decl_elt = TREE_VALUE (decl_elts);
2933 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2934 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2935
2936 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2937 instead create a new MEM rtx with the proper mode. */
2938 if (GET_CODE (x) == MEM)
2939 {
2940 if (mode == GET_MODE (x))
2941 DECL_RTL (decl_elt) = x;
2942 else
2943 {
2944 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2945 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2946 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2947 }
2948 }
2949 else if (GET_CODE (x) == REG)
2950 {
2951 if (mode == GET_MODE (x))
2952 DECL_RTL (decl_elt) = x;
2953 else
2954 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2955 }
2956 else
2957 abort ();
2958
2959 /* Record the cleanup if there is one. */
2960
2961 if (cleanup != 0)
2962 thisblock->data.block.cleanups
2963 = temp_tree_cons (decl_elt, cleanup_elt,
2964 thisblock->data.block.cleanups);
2965
2966 decl_elts = TREE_CHAIN (decl_elts);
2967 }
2968}
2969\f
2970/* Expand a list of cleanups LIST.
2971 Elements may be expressions or may be nested lists.
2972
2973 If DONT_DO is nonnull, then any list-element
2974 whose TREE_PURPOSE matches DONT_DO is omitted.
2975 This is sometimes used to avoid a cleanup associated with
2976 a value that is being returned out of the scope. */
2977
2978static void
2979expand_cleanups (list, dont_do)
2980 tree list;
2981 tree dont_do;
2982{
2983 tree tail;
2984 for (tail = list; tail; tail = TREE_CHAIN (tail))
2985 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2986 {
2987 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2988 expand_cleanups (TREE_VALUE (tail), dont_do);
2989 else
2990 {
2991 /* Cleanups may be run multiple times. For example,
2992 when exiting a binding contour, we expand the
2993 cleanups associated with that contour. When a goto
2994 within that binding contour has a target outside that
2995 contour, it will expand all cleanups from its scope to
2996 the target. Though the cleanups are expanded multiple
2997 times, the control paths are non-overlapping so the
2998 cleanups will not be executed twice. */
2999 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3000 free_temp_slots ();
3001 }
3002 }
3003}
3004
3005/* Expand a list of cleanups for a goto fixup.
3006 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3007 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3008
3009static void
3010fixup_cleanups (list, before_jump)
3011 tree list;
3012 rtx *before_jump;
3013{
3014 rtx beyond_jump = get_last_insn ();
3015 rtx new_before_jump;
3016
3017 expand_cleanups (list, 0);
3018 /* Pop any pushes done in the cleanups,
3019 in case function is about to return. */
3020 do_pending_stack_adjust ();
3021
3022 new_before_jump = get_last_insn ();
3023
3024 if (beyond_jump != new_before_jump)
3025 {
3026 /* If cleanups expand to nothing, don't reorder. */
3027 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3028 *before_jump = new_before_jump;
3029 }
3030}
3031
3032/* Move all cleanups from the current block_stack
3033 to the containing block_stack, where they are assumed to
3034 have been created. If anything can cause a temporary to
3035 be created, but not expanded for more than one level of
3036 block_stacks, then this code will have to change. */
3037
3038void
3039move_cleanups_up ()
3040{
3041 struct nesting *block = block_stack;
3042 struct nesting *outer = block->next;
3043
3044 outer->data.block.cleanups
3045 = chainon (block->data.block.cleanups,
3046 outer->data.block.cleanups);
3047 block->data.block.cleanups = 0;
3048}
3049
3050tree
3051last_cleanup_this_contour ()
3052{
3053 if (block_stack == 0)
3054 return 0;
3055
3056 return block_stack->data.block.cleanups;
3057}
3058
3059/* Return 1 if there are any pending cleanups at this point.
3060 If THIS_CONTOUR is nonzero, check the current contour as well.
3061 Otherwise, look only at the contours that enclose this one. */
3062
3063int
3064any_pending_cleanups (this_contour)
3065 int this_contour;
3066{
3067 struct nesting *block;
3068
3069 if (block_stack == 0)
3070 return 0;
3071
3072 if (this_contour && block_stack->data.block.cleanups != NULL)
3073 return 1;
3074 if (block_stack->data.block.cleanups == 0
3075 && (block_stack->data.block.outer_cleanups == 0
3076#if 0
3077 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3078#endif
3079 ))
3080 return 0;
3081
3082 for (block = block_stack->next; block; block = block->next)
3083 if (block->data.block.cleanups != 0)
3084 return 1;
3085
3086 return 0;
3087}
3088\f
3089/* Enter a case (Pascal) or switch (C) statement.
3090 Push a block onto case_stack and nesting_stack
3091 to accumulate the case-labels that are seen
3092 and to record the labels generated for the statement.
3093
3094 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3095 Otherwise, this construct is transparent for `exit_something'.
3096
3097 EXPR is the index-expression to be dispatched on.
3098 TYPE is its nominal type. We could simply convert EXPR to this type,
3099 but instead we take short cuts. */
3100
3101void
3102expand_start_case (exit_flag, expr, type, printname)
3103 int exit_flag;
3104 tree expr;
3105 tree type;
3106 char *printname;
3107{
3108 register struct nesting *thiscase = ALLOC_NESTING ();
3109
3110 /* Make an entry on case_stack for the case we are entering. */
3111
3112 thiscase->next = case_stack;
3113 thiscase->all = nesting_stack;
3114 thiscase->depth = ++nesting_depth;
3115 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3116 thiscase->data.case_stmt.case_list = 0;
3117 thiscase->data.case_stmt.index_expr = expr;
3118 thiscase->data.case_stmt.nominal_type = type;
3119 thiscase->data.case_stmt.default_label = 0;
3120 thiscase->data.case_stmt.num_ranges = 0;
3121 thiscase->data.case_stmt.printname = printname;
3122 thiscase->data.case_stmt.seenlabel = 0;
3123 case_stack = thiscase;
3124 nesting_stack = thiscase;
3125
3126 do_pending_stack_adjust ();
3127
3128 /* Make sure case_stmt.start points to something that won't
3129 need any transformation before expand_end_case. */
3130 if (GET_CODE (get_last_insn ()) != NOTE)
3131 emit_note (0, NOTE_INSN_DELETED);
3132
3133 thiscase->data.case_stmt.start = get_last_insn ();
3134}
3135
3136/* Start a "dummy case statement" within which case labels are invalid
3137 and are not connected to any larger real case statement.
3138 This can be used if you don't want to let a case statement jump
3139 into the middle of certain kinds of constructs. */
3140
3141void
3142expand_start_case_dummy ()
3143{
3144 register struct nesting *thiscase = ALLOC_NESTING ();
3145
3146 /* Make an entry on case_stack for the dummy. */
3147
3148 thiscase->next = case_stack;
3149 thiscase->all = nesting_stack;
3150 thiscase->depth = ++nesting_depth;
3151 thiscase->exit_label = 0;
3152 thiscase->data.case_stmt.case_list = 0;
3153 thiscase->data.case_stmt.start = 0;
3154 thiscase->data.case_stmt.nominal_type = 0;
3155 thiscase->data.case_stmt.default_label = 0;
3156 thiscase->data.case_stmt.num_ranges = 0;
3157 case_stack = thiscase;
3158 nesting_stack = thiscase;
3159}
3160
3161/* End a dummy case statement. */
3162
3163void
3164expand_end_case_dummy ()
3165{
3166 POPSTACK (case_stack);
3167}
3168
3169/* Return the data type of the index-expression
3170 of the innermost case statement, or null if none. */
3171
3172tree
3173case_index_expr_type ()
3174{
3175 if (case_stack)
3176 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3177 return 0;
3178}
3179\f
3180/* Accumulate one case or default label inside a case or switch statement.
3181 VALUE is the value of the case (a null pointer, for a default label).
3182
3183 If not currently inside a case or switch statement, return 1 and do
3184 nothing. The caller will print a language-specific error message.
3185 If VALUE is a duplicate or overlaps, return 2 and do nothing
3186 except store the (first) duplicate node in *DUPLICATE.
3187 If VALUE is out of range, return 3 and do nothing.
3188 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3189 Return 0 on success.
3190
3191 Extended to handle range statements. */
3192
3193int
3194pushcase (value, label, duplicate)
3195 register tree value;
3196 register tree label;
3197 tree *duplicate;
3198{
3199 register struct case_node **l;
3200 register struct case_node *n;
3201 tree index_type;
3202 tree nominal_type;
3203
3204 /* Fail if not inside a real case statement. */
3205 if (! (case_stack && case_stack->data.case_stmt.start))
3206 return 1;
3207
3208 if (stack_block_stack
3209 && stack_block_stack->depth > case_stack->depth)
3210 return 5;
3211
3212 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3213 nominal_type = case_stack->data.case_stmt.nominal_type;
3214
3215 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3216 if (index_type == error_mark_node)
3217 return 0;
3218
3219 /* Convert VALUE to the type in which the comparisons are nominally done. */
3220 if (value != 0)
3221 value = convert (nominal_type, value);
3222
3223 /* If this is the first label, warn if any insns have been emitted. */
3224 if (case_stack->data.case_stmt.seenlabel == 0)
3225 {
3226 rtx insn;
3227 for (insn = case_stack->data.case_stmt.start;
3228 insn;
3229 insn = NEXT_INSN (insn))
3230 {
3231 if (GET_CODE (insn) == CODE_LABEL)
3232 break;
3233 if (GET_CODE (insn) != NOTE
3234 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3235 {
3236 warning ("unreachable code at beginning of %s",
3237 case_stack->data.case_stmt.printname);
3238 break;
3239 }
3240 }
3241 }
3242 case_stack->data.case_stmt.seenlabel = 1;
3243
3244 /* Fail if this value is out of range for the actual type of the index
3245 (which may be narrower than NOMINAL_TYPE). */
3246 if (value != 0 && ! int_fits_type_p (value, index_type))
3247 return 3;
3248
3249 /* Fail if this is a duplicate or overlaps another entry. */
3250 if (value == 0)
3251 {
3252 if (case_stack->data.case_stmt.default_label != 0)
3253 {
3254 *duplicate = case_stack->data.case_stmt.default_label;
3255 return 2;
3256 }
3257 case_stack->data.case_stmt.default_label = label;
3258 }
3259 else
3260 {
3261 /* Find the elt in the chain before which to insert the new value,
3262 to keep the chain sorted in increasing order.
3263 But report an error if this element is a duplicate. */
3264 for (l = &case_stack->data.case_stmt.case_list;
3265 /* Keep going past elements distinctly less than VALUE. */
3266 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3267 l = &(*l)->right)
3268 ;
3269 if (*l)
3270 {
3271 /* Element we will insert before must be distinctly greater;
3272 overlap means error. */
3273 if (! tree_int_cst_lt (value, (*l)->low))
3274 {
3275 *duplicate = (*l)->code_label;
3276 return 2;
3277 }
3278 }
3279
3280 /* Add this label to the chain, and succeed.
3281 Copy VALUE so it is on temporary rather than momentary
3282 obstack and will thus survive till the end of the case statement. */
3283 n = (struct case_node *) oballoc (sizeof (struct case_node));
3284 n->left = 0;
3285 n->right = *l;
3286 n->high = n->low = copy_node (value);
3287 n->code_label = label;
3288 *l = n;
3289 }
3290
3291 expand_label (label);
3292 return 0;
3293}
3294
3295/* Like pushcase but this case applies to all values
3296 between VALUE1 and VALUE2 (inclusive).
3297 The return value is the same as that of pushcase
3298 but there is one additional error code:
3299 4 means the specified range was empty. */
3300
3301int
3302pushcase_range (value1, value2, label, duplicate)
3303 register tree value1, value2;
3304 register tree label;
3305 tree *duplicate;
3306{
3307 register struct case_node **l;
3308 register struct case_node *n;
3309 tree index_type;
3310 tree nominal_type;
3311
3312 /* Fail if not inside a real case statement. */
3313 if (! (case_stack && case_stack->data.case_stmt.start))
3314 return 1;
3315
3316 if (stack_block_stack
3317 && stack_block_stack->depth > case_stack->depth)
3318 return 5;
3319
3320 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3321 nominal_type = case_stack->data.case_stmt.nominal_type;
3322
3323 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3324 if (index_type == error_mark_node)
3325 return 0;
3326
3327 /* If this is the first label, warn if any insns have been emitted. */
3328 if (case_stack->data.case_stmt.seenlabel == 0)
3329 {
3330 rtx insn;
3331 for (insn = case_stack->data.case_stmt.start;
3332 insn;
3333 insn = NEXT_INSN (insn))
3334 {
3335 if (GET_CODE (insn) == CODE_LABEL)
3336 break;
3337 if (GET_CODE (insn) != NOTE
3338 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3339 {
3340 warning ("unreachable code at beginning of %s",
3341 case_stack->data.case_stmt.printname);
3342 break;
3343 }
3344 }
3345 }
3346 case_stack->data.case_stmt.seenlabel = 1;
3347
3348 /* Convert VALUEs to type in which the comparisons are nominally done. */
3349 if (value1 == 0) /* Negative infinity. */
3350 value1 = TYPE_MIN_VALUE(index_type);
3351 value1 = convert (nominal_type, value1);
3352
3353 if (value2 == 0) /* Positive infinity. */
3354 value2 = TYPE_MAX_VALUE(index_type);
3355 value2 = convert (nominal_type, value2);
3356
3357 /* Fail if these values are out of range. */
3358 if (! int_fits_type_p (value1, index_type))
3359 return 3;
3360
3361 if (! int_fits_type_p (value2, index_type))
3362 return 3;
3363
3364 /* Fail if the range is empty. */
3365 if (tree_int_cst_lt (value2, value1))
3366 return 4;
3367
3368 /* If the bounds are equal, turn this into the one-value case. */
3369 if (tree_int_cst_equal (value1, value2))
3370 return pushcase (value1, label, duplicate);
3371
3372 /* Find the elt in the chain before which to insert the new value,
3373 to keep the chain sorted in increasing order.
3374 But report an error if this element is a duplicate. */
3375 for (l = &case_stack->data.case_stmt.case_list;
3376 /* Keep going past elements distinctly less than this range. */
3377 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3378 l = &(*l)->right)
3379 ;
3380 if (*l)
3381 {
3382 /* Element we will insert before must be distinctly greater;
3383 overlap means error. */
3384 if (! tree_int_cst_lt (value2, (*l)->low))
3385 {
3386 *duplicate = (*l)->code_label;
3387 return 2;
3388 }
3389 }
3390
3391 /* Add this label to the chain, and succeed.
3392 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3393 obstack and will thus survive till the end of the case statement. */
3394
3395 n = (struct case_node *) oballoc (sizeof (struct case_node));
3396 n->left = 0;
3397 n->right = *l;
3398 n->low = copy_node (value1);
3399 n->high = copy_node (value2);
3400 n->code_label = label;
3401 *l = n;
3402
3403 expand_label (label);
3404
3405 case_stack->data.case_stmt.num_ranges++;
3406
3407 return 0;
3408}
3409\f
3410/* Called when the index of a switch statement is an enumerated type
3411 and there is no default label.
3412
3413 Checks that all enumeration literals are covered by the case
3414 expressions of a switch. Also, warn if there are any extra
3415 switch cases that are *not* elements of the enumerated type.
3416
3417 If all enumeration literals were covered by the case expressions,
3418 turn one of the expressions into the default expression since it should
3419 not be possible to fall through such a switch. */
3420
3421void
3422check_for_full_enumeration_handling (type)
3423 tree type;
3424{
3425 register struct case_node *n;
3426 register struct case_node **l;
3427 register tree chain;
3428 int all_values = 1;
3429
3430 /* The time complexity of this loop is currently O(N * M), with
3431 N being the number of enumerals in the enumerated type, and
3432 M being the number of case expressions in the switch. */
3433
3434 for (chain = TYPE_VALUES (type);
3435 chain;
3436 chain = TREE_CHAIN (chain))
3437 {
3438 /* Find a match between enumeral and case expression, if possible.
3439 Quit looking when we've gone too far (since case expressions
3440 are kept sorted in ascending order). Warn about enumerals not
3441 handled in the switch statement case expression list. */
3442
3443 for (n = case_stack->data.case_stmt.case_list;
3444 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3445 n = n->right)
3446 ;
3447
1ddde1cd 3448 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3449 {
3450 if (warn_switch)
1ddde1cd 3451 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3452 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3453 all_values = 0;
3454 }
3455 }
3456
3457 /* Now we go the other way around; we warn if there are case
3458 expressions that don't correspond to enumerals. This can
3459 occur since C and C++ don't enforce type-checking of
3460 assignments to enumeration variables. */
3461
3462 if (warn_switch)
3463 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3464 {
3465 for (chain = TYPE_VALUES (type);
3466 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3467 chain = TREE_CHAIN (chain))
3468 ;
3469
3470 if (!chain)
3471 warning ("case value `%d' not in enumerated type `%s'",
3472 TREE_INT_CST_LOW (n->low),
3473 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3474 == IDENTIFIER_NODE)
3475 ? TYPE_NAME (type)
3476 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3477 if (!tree_int_cst_equal (n->low, n->high))
3478 {
3479 for (chain = TYPE_VALUES (type);
3480 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3481 chain = TREE_CHAIN (chain))
3482 ;
3483
3484 if (!chain)
3485 warning ("case value `%d' not in enumerated type `%s'",
3486 TREE_INT_CST_LOW (n->high),
3487 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3488 == IDENTIFIER_NODE)
3489 ? TYPE_NAME (type)
3490 : DECL_NAME (TYPE_NAME (type))));
3491 }
28d81abb
RK
3492 }
3493
3494 /* If all values were found as case labels, make one of them the default
3495 label. Thus, this switch will never fall through. We arbitrarily pick
3496 the last one to make the default since this is likely the most
3497 efficient choice. */
3498
3499 if (all_values)
3500 {
3501 for (l = &case_stack->data.case_stmt.case_list;
3502 (*l)->right != 0;
3503 l = &(*l)->right)
3504 ;
3505
3506 case_stack->data.case_stmt.default_label = (*l)->code_label;
3507 *l = 0;
3508 }
3509}
3510\f
3511/* Terminate a case (Pascal) or switch (C) statement
3512 in which CASE_INDEX is the expression to be tested.
3513 Generate the code to test it and jump to the right place. */
3514
3515void
3516expand_end_case (orig_index)
3517 tree orig_index;
3518{
3519 tree minval, maxval, range;
3520 rtx default_label = 0;
3521 register struct case_node *n;
3522 int count;
3523 rtx index;
3524 rtx table_label = gen_label_rtx ();
3525 int ncases;
3526 rtx *labelvec;
3527 register int i;
3528 rtx before_case;
3529 register struct nesting *thiscase = case_stack;
3530 tree index_expr = thiscase->data.case_stmt.index_expr;
3531 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3532
3533 do_pending_stack_adjust ();
3534
3535 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3536 if (TREE_TYPE (index_expr) != error_mark_node)
3537 {
3538 /* If switch expression was an enumerated type, check that all
3539 enumeration literals are covered by the cases.
3540 No sense trying this if there's a default case, however. */
3541
3542 if (!thiscase->data.case_stmt.default_label
3543 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3544 && TREE_CODE (index_expr) != INTEGER_CST)
3545 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3546
3547 /* If this is the first label, warn if any insns have been emitted. */
3548 if (thiscase->data.case_stmt.seenlabel == 0)
3549 {
3550 rtx insn;
3551 for (insn = get_last_insn ();
3552 insn != case_stack->data.case_stmt.start;
3553 insn = PREV_INSN (insn))
3554 if (GET_CODE (insn) != NOTE
3555 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3556 {
3557 warning ("unreachable code at beginning of %s",
3558 case_stack->data.case_stmt.printname);
3559 break;
3560 }
3561 }
3562
3563 /* If we don't have a default-label, create one here,
3564 after the body of the switch. */
3565 if (thiscase->data.case_stmt.default_label == 0)
3566 {
3567 thiscase->data.case_stmt.default_label
3568 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3569 expand_label (thiscase->data.case_stmt.default_label);
3570 }
3571 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3572
3573 before_case = get_last_insn ();
3574
3575 /* Simplify the case-list before we count it. */
3576 group_case_nodes (thiscase->data.case_stmt.case_list);
3577
3578 /* Get upper and lower bounds of case values.
3579 Also convert all the case values to the index expr's data type. */
3580
3581 count = 0;
3582 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3583 {
3584 /* Check low and high label values are integers. */
3585 if (TREE_CODE (n->low) != INTEGER_CST)
3586 abort ();
3587 if (TREE_CODE (n->high) != INTEGER_CST)
3588 abort ();
3589
3590 n->low = convert (TREE_TYPE (index_expr), n->low);
3591 n->high = convert (TREE_TYPE (index_expr), n->high);
3592
3593 /* Count the elements and track the largest and smallest
3594 of them (treating them as signed even if they are not). */
3595 if (count++ == 0)
3596 {
3597 minval = n->low;
3598 maxval = n->high;
3599 }
3600 else
3601 {
3602 if (INT_CST_LT (n->low, minval))
3603 minval = n->low;
3604 if (INT_CST_LT (maxval, n->high))
3605 maxval = n->high;
3606 }
3607 /* A range counts double, since it requires two compares. */
3608 if (! tree_int_cst_equal (n->low, n->high))
3609 count++;
3610 }
3611
3612 /* Compute span of values. */
3613 if (count != 0)
3614 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3615 maxval, minval));
3616
3617 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3618 {
3619 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3620 emit_queue ();
3621 emit_jump (default_label);
3622 }
3623 /* If range of values is much bigger than number of values,
3624 make a sequence of conditional branches instead of a dispatch.
3625 If the switch-index is a constant, do it this way
3626 because we can optimize it. */
3627 else if (TREE_INT_CST_HIGH (range) != 0
3628#ifdef HAVE_casesi
3629 || (HAVE_casesi ? count < 4 : count < 5)
3630#else
3631 /* If machine does not have a case insn that compares the
3632 bounds, this means extra overhead for dispatch tables
3633 which raises the threshold for using them. */
3634 || count < 5
3635#endif
3636 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3637 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3638 /* These will reduce to a constant. */
28d81abb 3639 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3640 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3641 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3642 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3643 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3644 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb
RK
3645 {
3646 index = expand_expr (index_expr, 0, VOIDmode, 0);
3647
3648 /* If the index is a short or char that we do not have
3649 an insn to handle comparisons directly, convert it to
3650 a full integer now, rather than letting each comparison
3651 generate the conversion. */
3652
3653 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3654 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3655 == CODE_FOR_nothing))
3656 {
3657 enum machine_mode wider_mode;
3658 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3659 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3660 if (cmp_optab->handlers[(int) wider_mode].insn_code
3661 != CODE_FOR_nothing)
3662 {
3663 index = convert_to_mode (wider_mode, index, unsignedp);
3664 break;
3665 }
3666 }
3667
3668 emit_queue ();
3669 do_pending_stack_adjust ();
3670
3671 index = protect_from_queue (index, 0);
3672 if (GET_CODE (index) == MEM)
3673 index = copy_to_reg (index);
3674 if (GET_CODE (index) == CONST_INT
3675 || TREE_CODE (index_expr) == INTEGER_CST)
3676 {
3677 /* Make a tree node with the proper constant value
3678 if we don't already have one. */
3679 if (TREE_CODE (index_expr) != INTEGER_CST)
3680 {
3681 index_expr
3682 = build_int_2 (INTVAL (index),
3683 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3684 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3685 }
3686
3687 /* For constant index expressions we need only
3688 issue a unconditional branch to the appropriate
3689 target code. The job of removing any unreachable
3690 code is left to the optimisation phase if the
3691 "-O" option is specified. */
3692 for (n = thiscase->data.case_stmt.case_list;
3693 n;
3694 n = n->right)
3695 {
3696 if (! tree_int_cst_lt (index_expr, n->low)
3697 && ! tree_int_cst_lt (n->high, index_expr))
3698 break;
3699 }
3700 if (n)
3701 emit_jump (label_rtx (n->code_label));
3702 else
3703 emit_jump (default_label);
3704 }
3705 else
3706 {
3707 /* If the index expression is not constant we generate
3708 a binary decision tree to select the appropriate
3709 target code. This is done as follows:
3710
3711 The list of cases is rearranged into a binary tree,
3712 nearly optimal assuming equal probability for each case.
3713
3714 The tree is transformed into RTL, eliminating
3715 redundant test conditions at the same time.
3716
3717 If program flow could reach the end of the
3718 decision tree an unconditional jump to the
3719 default code is emitted. */
3720
3721 use_cost_table
3722 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb
RK
3723 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3724 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3725 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3726 default_label, TREE_TYPE (index_expr));
3727 emit_jump_if_reachable (default_label);
3728 }
3729 }
3730 else
3731 {
3732 int win = 0;
3733#ifdef HAVE_casesi
3734 if (HAVE_casesi)
3735 {
c4fcf531 3736 enum machine_mode index_mode = SImode;
5130a5cc 3737 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3738
28d81abb 3739 /* Convert the index to SImode. */
c4fcf531
RS
3740 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3741 > GET_MODE_BITSIZE (index_mode))
28d81abb 3742 {
af2682ef
RS
3743 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3744 rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
3745
3746 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3747 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3748 index_expr, minval);
3749 minval = integer_zero_node;
af2682ef
RS
3750 index = expand_expr (index_expr, 0, VOIDmode, 0);
3751 emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
3752 emit_jump_insn (gen_bltu (default_label));
3753 /* Now we can safely truncate. */
3754 index = convert_to_mode (index_mode, index, 0);
3755 }
3756 else
3757 {
3758 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3759 index_expr = convert (type_for_size (index_bits, 0),
3760 index_expr);
3761 index = expand_expr (index_expr, 0, VOIDmode, 0);
28d81abb 3762 }
28d81abb
RK
3763 emit_queue ();
3764 index = protect_from_queue (index, 0);
3765 do_pending_stack_adjust ();
3766
3767 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3768 expand_expr (range, 0, VOIDmode, 0),
3769 table_label, default_label));
3770 win = 1;
3771 }
3772#endif
3773#ifdef HAVE_tablejump
3774 if (! win && HAVE_tablejump)
3775 {
3776 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3777 fold (build (MINUS_EXPR,
3778 TREE_TYPE (index_expr),
3779 index_expr, minval)));
28d81abb
RK
3780 index = expand_expr (index_expr, 0, VOIDmode, 0);
3781 emit_queue ();
af2682ef 3782 index = protect_from_queue (index, 0);
28d81abb
RK
3783 do_pending_stack_adjust ();
3784
af2682ef
RS
3785 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3786 expand_expr (range, 0, VOIDmode, 0),
28d81abb
RK
3787 table_label, default_label);
3788 win = 1;
3789 }
3790#endif
3791 if (! win)
3792 abort ();
3793
3794 /* Get table of labels to jump to, in order of case index. */
3795
3796 ncases = TREE_INT_CST_LOW (range) + 1;
3797 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3798 bzero (labelvec, ncases * sizeof (rtx));
3799
3800 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3801 {
3802 register int i
3803 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3804
3805 while (1)
3806 {
3807 labelvec[i]
3808 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3809 if (i + TREE_INT_CST_LOW (minval)
3810 == TREE_INT_CST_LOW (n->high))
3811 break;
3812 i++;
3813 }
3814 }
3815
3816 /* Fill in the gaps with the default. */
3817 for (i = 0; i < ncases; i++)
3818 if (labelvec[i] == 0)
3819 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3820
3821 /* Output the table */
3822 emit_label (table_label);
3823
3824 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3825 were an expression, instead of a an #ifdef/#ifndef. */
3826 if (
3827#ifdef CASE_VECTOR_PC_RELATIVE
3828 1 ||
3829#endif
3830 flag_pic)
3831 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3832 gen_rtx (LABEL_REF, Pmode, table_label),
3833 gen_rtvec_v (ncases, labelvec)));
3834 else
3835 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3836 gen_rtvec_v (ncases, labelvec)));
3837
3838 /* If the case insn drops through the table,
3839 after the table we must jump to the default-label.
3840 Otherwise record no drop-through after the table. */
3841#ifdef CASE_DROPS_THROUGH
3842 emit_jump (default_label);
3843#else
3844 emit_barrier ();
3845#endif
3846 }
3847
915f619f
JW
3848 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3849 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3850 thiscase->data.case_stmt.start);
3851 }
3852 if (thiscase->exit_label)
3853 emit_label (thiscase->exit_label);
3854
3855 POPSTACK (case_stack);
3856
3857 free_temp_slots ();
3858}
3859
3860/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3861
3862static void
3863do_jump_if_equal (op1, op2, label, unsignedp)
3864 rtx op1, op2, label;
3865 int unsignedp;
3866{
3867 if (GET_CODE (op1) == CONST_INT
3868 && GET_CODE (op2) == CONST_INT)
3869 {
3870 if (INTVAL (op1) == INTVAL (op2))
3871 emit_jump (label);
3872 }
3873 else
3874 {
3875 enum machine_mode mode = GET_MODE (op1);
3876 if (mode == VOIDmode)
3877 mode = GET_MODE (op2);
3878 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3879 emit_jump_insn (gen_beq (label));
3880 }
3881}
3882\f
3883/* Not all case values are encountered equally. This function
3884 uses a heuristic to weight case labels, in cases where that
3885 looks like a reasonable thing to do.
3886
3887 Right now, all we try to guess is text, and we establish the
3888 following weights:
3889
3890 chars above space: 16
3891 digits: 16
3892 default: 12
3893 space, punct: 8
3894 tab: 4
3895 newline: 2
3896 other "\" chars: 1
3897 remaining chars: 0
3898
3899 If we find any cases in the switch that are not either -1 or in the range
3900 of valid ASCII characters, or are control characters other than those
3901 commonly used with "\", don't treat this switch scanning text.
3902
3903 Return 1 if these nodes are suitable for cost estimation, otherwise
3904 return 0. */
3905
3906static int
3907estimate_case_costs (node)
3908 case_node_ptr node;
3909{
3910 tree min_ascii = build_int_2 (-1, -1);
3911 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3912 case_node_ptr n;
3913 int i;
3914
3915 /* If we haven't already made the cost table, make it now. Note that the
3916 lower bound of the table is -1, not zero. */
3917
3918 if (cost_table == NULL)
3919 {
3920 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3921 bzero (cost_table - 1, 129 * sizeof (short));
3922
3923 for (i = 0; i < 128; i++)
3924 {
3925 if (isalnum (i))
3926 cost_table[i] = 16;
3927 else if (ispunct (i))
3928 cost_table[i] = 8;
3929 else if (iscntrl (i))
3930 cost_table[i] = -1;
3931 }
3932
3933 cost_table[' '] = 8;
3934 cost_table['\t'] = 4;
3935 cost_table['\0'] = 4;
3936 cost_table['\n'] = 2;
3937 cost_table['\f'] = 1;
3938 cost_table['\v'] = 1;
3939 cost_table['\b'] = 1;
3940 }
3941
3942 /* See if all the case expressions look like text. It is text if the
3943 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3944 as signed arithmetic since we don't want to ever access cost_table with a
3945 value less than -1. Also check that none of the constants in a range
3946 are strange control characters. */
3947
3948 for (n = node; n; n = n->right)
3949 {
3950 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3951 return 0;
3952
3953 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3954 if (cost_table[i] < 0)
3955 return 0;
3956 }
3957
3958 /* All interesting values are within the range of interesting
3959 ASCII characters. */
3960 return 1;
3961}
3962
3963/* Scan an ordered list of case nodes
3964 combining those with consecutive values or ranges.
3965
3966 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3967
3968static void
3969group_case_nodes (head)
3970 case_node_ptr head;
3971{
3972 case_node_ptr node = head;
3973
3974 while (node)
3975 {
3976 rtx lb = next_real_insn (label_rtx (node->code_label));
3977 case_node_ptr np = node;
3978
3979 /* Try to group the successors of NODE with NODE. */
3980 while (((np = np->right) != 0)
3981 /* Do they jump to the same place? */
3982 && next_real_insn (label_rtx (np->code_label)) == lb
3983 /* Are their ranges consecutive? */
3984 && tree_int_cst_equal (np->low,
3985 fold (build (PLUS_EXPR,
3986 TREE_TYPE (node->high),
3987 node->high,
3988 integer_one_node)))
3989 /* An overflow is not consecutive. */
3990 && tree_int_cst_lt (node->high,
3991 fold (build (PLUS_EXPR,
3992 TREE_TYPE (node->high),
3993 node->high,
3994 integer_one_node))))
3995 {
3996 node->high = np->high;
3997 }
3998 /* NP is the first node after NODE which can't be grouped with it.
3999 Delete the nodes in between, and move on to that node. */
4000 node->right = np;
4001 node = np;
4002 }
4003}
4004
4005/* Take an ordered list of case nodes
4006 and transform them into a near optimal binary tree,
6dc42e49 4007 on the assumption that any target code selection value is as
28d81abb
RK
4008 likely as any other.
4009
4010 The transformation is performed by splitting the ordered
4011 list into two equal sections plus a pivot. The parts are
4012 then attached to the pivot as left and right branches. Each
4013 branch is is then transformed recursively. */
4014
4015static void
4016balance_case_nodes (head, parent)
4017 case_node_ptr *head;
4018 case_node_ptr parent;
4019{
4020 register case_node_ptr np;
4021
4022 np = *head;
4023 if (np)
4024 {
4025 int cost = 0;
4026 int i = 0;
4027 int ranges = 0;
4028 register case_node_ptr *npp;
4029 case_node_ptr left;
4030
4031 /* Count the number of entries on branch. Also count the ranges. */
4032
4033 while (np)
4034 {
4035 if (!tree_int_cst_equal (np->low, np->high))
4036 {
4037 ranges++;
4038 if (use_cost_table)
4039 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4040 }
4041
4042 if (use_cost_table)
4043 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4044
4045 i++;
4046 np = np->right;
4047 }
4048
4049 if (i > 2)
4050 {
4051 /* Split this list if it is long enough for that to help. */
4052 npp = head;
4053 left = *npp;
4054 if (use_cost_table)
4055 {
4056 /* Find the place in the list that bisects the list's total cost,
4057 Here I gets half the total cost. */
4058 int n_moved = 0;
4059 i = (cost + 1) / 2;
4060 while (1)
4061 {
4062 /* Skip nodes while their cost does not reach that amount. */
4063 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4064 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4065 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4066 if (i <= 0)
4067 break;
4068 npp = &(*npp)->right;
4069 n_moved += 1;
4070 }
4071 if (n_moved == 0)
4072 {
4073 /* Leave this branch lopsided, but optimize left-hand
4074 side and fill in `parent' fields for right-hand side. */
4075 np = *head;
4076 np->parent = parent;
4077 balance_case_nodes (&np->left, np);
4078 for (; np->right; np = np->right)
4079 np->right->parent = np;
4080 return;
4081 }
4082 }
4083 /* If there are just three nodes, split at the middle one. */
4084 else if (i == 3)
4085 npp = &(*npp)->right;
4086 else
4087 {
4088 /* Find the place in the list that bisects the list's total cost,
4089 where ranges count as 2.
4090 Here I gets half the total cost. */
4091 i = (i + ranges + 1) / 2;
4092 while (1)
4093 {
4094 /* Skip nodes while their cost does not reach that amount. */
4095 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4096 i--;
4097 i--;
4098 if (i <= 0)
4099 break;
4100 npp = &(*npp)->right;
4101 }
4102 }
4103 *head = np = *npp;
4104 *npp = 0;
4105 np->parent = parent;
4106 np->left = left;
4107
4108 /* Optimize each of the two split parts. */
4109 balance_case_nodes (&np->left, np);
4110 balance_case_nodes (&np->right, np);
4111 }
4112 else
4113 {
4114 /* Else leave this branch as one level,
4115 but fill in `parent' fields. */
4116 np = *head;
4117 np->parent = parent;
4118 for (; np->right; np = np->right)
4119 np->right->parent = np;
4120 }
4121 }
4122}
4123\f
4124/* Search the parent sections of the case node tree
4125 to see if a test for the lower bound of NODE would be redundant.
4126 INDEX_TYPE is the type of the index expression.
4127
4128 The instructions to generate the case decision tree are
4129 output in the same order as nodes are processed so it is
4130 known that if a parent node checks the range of the current
4131 node minus one that the current node is bounded at its lower
4132 span. Thus the test would be redundant. */
4133
4134static int
4135node_has_low_bound (node, index_type)
4136 case_node_ptr node;
4137 tree index_type;
4138{
4139 tree low_minus_one;
4140 case_node_ptr pnode;
4141
4142 /* If the lower bound of this node is the lowest value in the index type,
4143 we need not test it. */
4144
4145 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4146 return 1;
4147
4148 /* If this node has a left branch, the value at the left must be less
4149 than that at this node, so it cannot be bounded at the bottom and
4150 we need not bother testing any further. */
4151
4152 if (node->left)
4153 return 0;
4154
4155 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4156 node->low, integer_one_node));
4157
4158 /* If the subtraction above overflowed, we can't verify anything.
4159 Otherwise, look for a parent that tests our value - 1. */
4160
4161 if (! tree_int_cst_lt (low_minus_one, node->low))
4162 return 0;
4163
4164 for (pnode = node->parent; pnode; pnode = pnode->parent)
4165 if (tree_int_cst_equal (low_minus_one, pnode->high))
4166 return 1;
4167
4168 return 0;
4169}
4170
4171/* Search the parent sections of the case node tree
4172 to see if a test for the upper bound of NODE would be redundant.
4173 INDEX_TYPE is the type of the index expression.
4174
4175 The instructions to generate the case decision tree are
4176 output in the same order as nodes are processed so it is
4177 known that if a parent node checks the range of the current
4178 node plus one that the current node is bounded at its upper
4179 span. Thus the test would be redundant. */
4180
4181static int
4182node_has_high_bound (node, index_type)
4183 case_node_ptr node;
4184 tree index_type;
4185{
4186 tree high_plus_one;
4187 case_node_ptr pnode;
4188
4189 /* If the upper bound of this node is the highest value in the type
4190 of the index expression, we need not test against it. */
4191
4192 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4193 return 1;
4194
4195 /* If this node has a right branch, the value at the right must be greater
4196 than that at this node, so it cannot be bounded at the top and
4197 we need not bother testing any further. */
4198
4199 if (node->right)
4200 return 0;
4201
4202 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4203 node->high, integer_one_node));
4204
4205 /* If the addition above overflowed, we can't verify anything.
4206 Otherwise, look for a parent that tests our value + 1. */
4207
4208 if (! tree_int_cst_lt (node->high, high_plus_one))
4209 return 0;
4210
4211 for (pnode = node->parent; pnode; pnode = pnode->parent)
4212 if (tree_int_cst_equal (high_plus_one, pnode->low))
4213 return 1;
4214
4215 return 0;
4216}
4217
4218/* Search the parent sections of the
4219 case node tree to see if both tests for the upper and lower
4220 bounds of NODE would be redundant. */
4221
4222static int
4223node_is_bounded (node, index_type)
4224 case_node_ptr node;
4225 tree index_type;
4226{
4227 return (node_has_low_bound (node, index_type)
4228 && node_has_high_bound (node, index_type));
4229}
4230
4231/* Emit an unconditional jump to LABEL unless it would be dead code. */
4232
4233static void
4234emit_jump_if_reachable (label)
4235 rtx label;
4236{
4237 if (GET_CODE (get_last_insn ()) != BARRIER)
4238 emit_jump (label);
4239}
4240\f
4241/* Emit step-by-step code to select a case for the value of INDEX.
4242 The thus generated decision tree follows the form of the
4243 case-node binary tree NODE, whose nodes represent test conditions.
4244 INDEX_TYPE is the type of the index of the switch.
4245
4246 Care is taken to prune redundant tests from the decision tree
4247 by detecting any boundary conditions already checked by
4248 emitted rtx. (See node_has_high_bound, node_has_low_bound
4249 and node_is_bounded, above.)
4250
4251 Where the test conditions can be shown to be redundant we emit
4252 an unconditional jump to the target code. As a further
4253 optimization, the subordinates of a tree node are examined to
4254 check for bounded nodes. In this case conditional and/or
4255 unconditional jumps as a result of the boundary check for the
4256 current node are arranged to target the subordinates associated
4257 code for out of bound conditions on the current node node.
4258
f72aed24 4259 We can assume that when control reaches the code generated here,
28d81abb
RK
4260 the index value has already been compared with the parents
4261 of this node, and determined to be on the same side of each parent
4262 as this node is. Thus, if this node tests for the value 51,
4263 and a parent tested for 52, we don't need to consider
4264 the possibility of a value greater than 51. If another parent
4265 tests for the value 50, then this node need not test anything. */
4266
4267static void
4268emit_case_nodes (index, node, default_label, index_type)
4269 rtx index;
4270 case_node_ptr node;
4271 rtx default_label;
4272 tree index_type;
4273{
4274 /* If INDEX has an unsigned type, we must make unsigned branches. */
4275 int unsignedp = TREE_UNSIGNED (index_type);
4276 typedef rtx rtx_function ();
4277 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4278 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4279 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4280 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4281 enum machine_mode mode = GET_MODE (index);
4282
4283 /* See if our parents have already tested everything for us.
4284 If they have, emit an unconditional jump for this node. */
4285 if (node_is_bounded (node, index_type))
4286 emit_jump (label_rtx (node->code_label));
4287
4288 else if (tree_int_cst_equal (node->low, node->high))
4289 {
4290 /* Node is single valued. First see if the index expression matches
4291 this node and then check our children, if any. */
4292
4293 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4294 label_rtx (node->code_label), unsignedp);
4295
4296 if (node->right != 0 && node->left != 0)
4297 {
4298 /* This node has children on both sides.
4299 Dispatch to one side or the other
4300 by comparing the index value with this node's value.
4301 If one subtree is bounded, check that one first,
4302 so we can avoid real branches in the tree. */
4303
4304 if (node_is_bounded (node->right, index_type))
4305 {
4306 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4307 GT, 0, mode, unsignedp, 0);
4308
4309 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4310 emit_case_nodes (index, node->left, default_label, index_type);
4311 }
4312
4313 else if (node_is_bounded (node->left, index_type))
4314 {
4315 emit_cmp_insn (index, expand_expr (node->high, 0,
4316 VOIDmode, 0),
4317 LT, 0, mode, unsignedp, 0);
4318 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4319 emit_case_nodes (index, node->right, default_label, index_type);
4320 }
4321
4322 else
4323 {
4324 /* Neither node is bounded. First distinguish the two sides;
4325 then emit the code for one side at a time. */
4326
4327 tree test_label
4328 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4329
4330 /* See if the value is on the right. */
4331 emit_cmp_insn (index, expand_expr (node->high, 0,
4332 VOIDmode, 0),
4333 GT, 0, mode, unsignedp, 0);
4334 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4335
4336 /* Value must be on the left.
4337 Handle the left-hand subtree. */
4338 emit_case_nodes (index, node->left, default_label, index_type);
4339 /* If left-hand subtree does nothing,
4340 go to default. */
4341 emit_jump_if_reachable (default_label);
4342
4343 /* Code branches here for the right-hand subtree. */
4344 expand_label (test_label);
4345 emit_case_nodes (index, node->right, default_label, index_type);
4346 }
4347 }
4348
4349 else if (node->right != 0 && node->left == 0)
4350 {
4351 /* Here we have a right child but no left so we issue conditional
4352 branch to default and process the right child.
4353
4354 Omit the conditional branch to default if we it avoid only one
4355 right child; it costs too much space to save so little time. */
4356
de14fd73 4357 if (node->right->right || node->right->left
28d81abb
RK
4358 || !tree_int_cst_equal (node->right->low, node->right->high))
4359 {
4360 if (!node_has_low_bound (node, index_type))
4361 {
4362 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4363 LT, 0, mode, unsignedp, 0);
4364 emit_jump_insn ((*gen_blt_pat) (default_label));
4365 }
4366
4367 emit_case_nodes (index, node->right, default_label, index_type);
4368 }
4369 else
4370 /* We cannot process node->right normally
4371 since we haven't ruled out the numbers less than
4372 this node's value. So handle node->right explicitly. */
4373 do_jump_if_equal (index,
4374 expand_expr (node->right->low, 0, VOIDmode, 0),
4375 label_rtx (node->right->code_label), unsignedp);
4376 }
4377
4378 else if (node->right == 0 && node->left != 0)
4379 {
4380 /* Just one subtree, on the left. */
4381
de14fd73
RK
4382#if 0 /* The following code and comment were formerly part
4383 of the condition here, but they didn't work
4384 and I don't understand what the idea was. -- rms. */
4385 /* If our "most probable entry" is less probable
28d81abb
RK
4386 than the default label, emit a jump to
4387 the default label using condition codes
4388 already lying around. With no right branch,
4389 a branch-greater-than will get us to the default
4390 label correctly. */
de14fd73
RK
4391 if (use_cost_table
4392 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4393 ;
4394#endif /* 0 */
4395 if (node->left->left || node->left->right
28d81abb
RK
4396 || !tree_int_cst_equal (node->left->low, node->left->high))
4397 {
4398 if (!node_has_high_bound (node, index_type))
4399 {
4400 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4401 GT, 0, mode, unsignedp, 0);
4402 emit_jump_insn ((*gen_bgt_pat) (default_label));
4403 }
4404
4405 emit_case_nodes (index, node->left, default_label, index_type);
4406 }
4407 else
4408 /* We cannot process node->left normally
4409 since we haven't ruled out the numbers less than
4410 this node's value. So handle node->left explicitly. */
4411 do_jump_if_equal (index,
4412 expand_expr (node->left->low, 0, VOIDmode, 0),
4413 label_rtx (node->left->code_label), unsignedp);
4414 }
4415 }
4416 else
4417 {
4418 /* Node is a range. These cases are very similar to those for a single
4419 value, except that we do not start by testing whether this node
4420 is the one to branch to. */
4421
4422 if (node->right != 0 && node->left != 0)
4423 {
4424 /* Node has subtrees on both sides.
4425 If the right-hand subtree is bounded,
4426 test for it first, since we can go straight there.
4427 Otherwise, we need to make a branch in the control structure,
4428 then handle the two subtrees. */
4429 tree test_label = 0;
4430
4431 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4432 GT, 0, mode, unsignedp, 0);
4433
4434 if (node_is_bounded (node->right, index_type))
4435 /* Right hand node is fully bounded so we can eliminate any
4436 testing and branch directly to the target code. */
4437 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4438 else
4439 {
4440 /* Right hand node requires testing.
4441 Branch to a label where we will handle it later. */
4442
4443 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4444 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4445 }
4446
4447 /* Value belongs to this node or to the left-hand subtree. */
4448
4449 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4450 GE, 0, mode, unsignedp, 0);
4451 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4452
4453 /* Handle the left-hand subtree. */
4454 emit_case_nodes (index, node->left, default_label, index_type);
4455
4456 /* If right node had to be handled later, do that now. */
4457
4458 if (test_label)
4459 {
4460 /* If the left-hand subtree fell through,
4461 don't let it fall into the right-hand subtree. */
4462 emit_jump_if_reachable (default_label);
4463
4464 expand_label (test_label);
4465 emit_case_nodes (index, node->right, default_label, index_type);
4466 }
4467 }
4468
4469 else if (node->right != 0 && node->left == 0)
4470 {
4471 /* Deal with values to the left of this node,
4472 if they are possible. */
4473 if (!node_has_low_bound (node, index_type))
4474 {
4475 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4476 LT, 0, mode, unsignedp, 0);
4477 emit_jump_insn ((*gen_blt_pat) (default_label));
4478 }
4479
4480 /* Value belongs to this node or to the right-hand subtree. */
4481
4482 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4483 LE, 0, mode, unsignedp, 0);
4484 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4485
4486 emit_case_nodes (index, node->right, default_label, index_type);
4487 }
4488
4489 else if (node->right == 0 && node->left != 0)
4490 {
4491 /* Deal with values to the right of this node,
4492 if they are possible. */
4493 if (!node_has_high_bound (node, index_type))
4494 {
4495 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4496 GT, 0, mode, unsignedp, 0);
4497 emit_jump_insn ((*gen_bgt_pat) (default_label));
4498 }
4499
4500 /* Value belongs to this node or to the left-hand subtree. */
4501
4502 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4503 GE, 0, mode, unsignedp, 0);
4504 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4505
4506 emit_case_nodes (index, node->left, default_label, index_type);
4507 }
4508
4509 else
4510 {
4511 /* Node has no children so we check low and high bounds to remove
4512 redundant tests. Only one of the bounds can exist,
4513 since otherwise this node is bounded--a case tested already. */
4514
4515 if (!node_has_high_bound (node, index_type))
4516 {
4517 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4518 GT, 0, mode, unsignedp, 0);
4519 emit_jump_insn ((*gen_bgt_pat) (default_label));
4520 }
4521
4522 if (!node_has_low_bound (node, index_type))
4523 {
4524 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4525 LT, 0, mode, unsignedp, 0);
4526 emit_jump_insn ((*gen_blt_pat) (default_label));
4527 }
4528
4529 emit_jump (label_rtx (node->code_label));
4530 }
4531 }
4532}
4533\f
4534/* These routines are used by the loop unrolling code. They copy BLOCK trees
4535 so that the debugging info will be correct for the unrolled loop. */
4536
4537/* Indexed by loop number, contains pointer to the first block in the loop,
4538 or zero if none. Only valid if doing loop unrolling and outputting debugger
4539 info. */
4540
4541tree *loop_number_first_block;
4542
4543/* Indexed by loop number, contains pointer to the last block in the loop,
4544 only valid if loop_number_first_block is nonzero. */
4545
4546tree *loop_number_last_block;
4547
4548/* Indexed by loop number, contains nesting level of first block in the
4549 loop, if any. Only valid if doing loop unrolling and outputting debugger
4550 info. */
4551
4552int *loop_number_block_level;
4553
4554/* Scan the function looking for loops, and walk the BLOCK tree at the
4555 same time. Record the first and last BLOCK tree corresponding to each
4556 loop. This function is similar to find_and_verify_loops in loop.c. */
4557
4558void
4559find_loop_tree_blocks (f)
4560 rtx f;
4561{
4562 rtx insn;
4563 int current_loop = -1;
4564 int next_loop = -1;
4565 int loop;
4566 int block_level, tree_level;
4567 tree tree_block, parent_tree_block;
4568
4569 tree_block = DECL_INITIAL (current_function_decl);
4570 parent_tree_block = 0;
4571 block_level = 0;
4572 tree_level = -1;
4573
4574 /* Find boundaries of loops, and save the first and last BLOCK tree
4575 corresponding to each loop. */
4576
4577 for (insn = f; insn; insn = NEXT_INSN (insn))
4578 {
4579 if (GET_CODE (insn) == NOTE)
4580 switch (NOTE_LINE_NUMBER (insn))
4581 {
4582 case NOTE_INSN_LOOP_BEG:
4583 loop_number_block_level[++next_loop] = block_level;
4584 loop_number_first_block[next_loop] = 0;
4585 current_loop = next_loop;
4586 break;
4587
4588 case NOTE_INSN_LOOP_END:
4589 if (current_loop == -1)
4590 abort ();
4591
4592 current_loop = loop_outer_loop[current_loop];
4593 break;
4594
4595 case NOTE_INSN_BLOCK_BEG:
4596 if (tree_level < block_level)
4597 {
4598 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4599 we must now visit the subtree of the current block. */
4600 parent_tree_block = tree_block;
4601 tree_block = BLOCK_SUBBLOCKS (tree_block);
4602 tree_level++;
4603 }
4604 else if (tree_level > block_level)
4605 abort ();
4606
4607 /* Save this block tree here for all nested loops for which
4608 this is the topmost block. */
4609 for (loop = current_loop;
4610 loop != -1 && block_level == loop_number_block_level[loop];
4611 loop = loop_outer_loop[loop])
4612 {
4613 if (loop_number_first_block[loop] == 0)
4614 loop_number_first_block[loop] = tree_block;
4615 loop_number_last_block[loop] = tree_block;
4616 }
4617
4618 block_level++;
4619 break;
4620
4621 case NOTE_INSN_BLOCK_END:
4622 block_level--;
4623 if (tree_level > block_level)
4624 {
4625 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4626 we must now visit the parent of the current tree. */
4627 if (tree_block != 0 || parent_tree_block == 0)
4628 abort ();
4629 tree_block = parent_tree_block;
4630 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4631 tree_level--;
4632 }
4633 tree_block = BLOCK_CHAIN (tree_block);
4634 break;
4635 }
4636 }
4637}
4638
4639/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4640 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4641
4642 Note that we only copy the topmost level of tree nodes; they will share
4643 pointers to the same subblocks. */
4644
4645void
4646unroll_block_trees (loop_number, copies)
4647 int loop_number;
4648 int copies;
4649{
4650 int i;
4651
4652 /* First check whether there are any blocks that need to be copied. */
4653 if (loop_number_first_block[loop_number])
4654 {
4655 tree first_block = loop_number_first_block[loop_number];
4656 tree last_block = loop_number_last_block[loop_number];
4657 tree last_block_created = 0;
4658
4659 for (i = 0; i < copies - 1; i++)
4660 {
4661 tree block = first_block;
4662 tree insert_after = last_block;
4663 tree copied_block;
4664
4665 /* Copy every block between first_block and last_block inclusive,
4666 inserting the new blocks after last_block. */
4667 do
4668 {
4669 tree new_block = make_node (BLOCK);
4670 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4671 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4672 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4673 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4674 TREE_USED (new_block) = TREE_USED (block);
4675
4676 /* Insert the new block after the insertion point, and move
4677 the insertion point to the new block. This ensures that
4678 the copies are inserted in the right order. */
4679 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4680 BLOCK_CHAIN (insert_after) = new_block;
4681 insert_after = new_block;
4682
4683 copied_block = block;
4684 block = BLOCK_CHAIN (block);
4685 }
4686 while (copied_block != last_block);
4687
4688 /* Remember the last block created, so that we can update the
4689 info in the tables. */
4690 if (last_block_created == 0)
4691 last_block_created = insert_after;
4692 }
4693
4694 /* For all nested loops for which LAST_BLOCK was originally the last
4695 block, update the tables to indicate that LAST_BLOCK_CREATED is
4696 now the last block in the loop. */
4697 for (i = loop_number; last_block == loop_number_last_block[i];
4698 i = loop_outer_loop[i])
4699 loop_number_last_block[i] = last_block_created;
4700 }
4701}
This page took 0.486165 seconds and 5 git commands to generate.