]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(pushdecl): Always create a new (variant) ..._TYPE node
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
28d81abb
RK
57/* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
59char *emit_filename;
60int emit_lineno;
61
62/* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
64
65int expr_stmts_for_value;
66
67/* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
69
70static tree last_expr_type;
71static rtx last_expr_value;
72
73/* Number of binding contours started so far in this function. */
74
75int block_start_count;
76
77/* Nonzero if function being compiled needs to
78 return the address of where it has put a structure value. */
79
80extern int current_function_returns_pcc_struct;
81
82/* Label that will go on parm cleanup code, if any.
83 Jumping to this label runs cleanup code for parameters, if
84 such code must be run. Following this code is the logical return label. */
85
86extern rtx cleanup_label;
87
88/* Label that will go on function epilogue.
89 Jumping to this label serves as a "return" instruction
90 on machines which require execution of the epilogue on all returns. */
91
92extern rtx return_label;
93
94/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
95 So we can mark them all live at the end of the function, if nonopt. */
96extern rtx save_expr_regs;
97
98/* Offset to end of allocated area of stack frame.
99 If stack grows down, this is the address of the last stack slot allocated.
100 If stack grows up, this is the address for the next slot. */
101extern int frame_offset;
102
103/* Label to jump back to for tail recursion, or 0 if we have
104 not yet needed one for this function. */
105extern rtx tail_recursion_label;
106
107/* Place after which to insert the tail_recursion_label if we need one. */
108extern rtx tail_recursion_reentry;
109
110/* Location at which to save the argument pointer if it will need to be
111 referenced. There are two cases where this is done: if nonlocal gotos
112 exist, or if vars whose is an offset from the argument pointer will be
113 needed by inner routines. */
114
115extern rtx arg_pointer_save_area;
116
117/* Chain of all RTL_EXPRs that have insns in them. */
118extern tree rtl_expr_chain;
119
120#if 0 /* Turned off because 0 seems to work just as well. */
121/* Cleanup lists are required for binding levels regardless of whether
122 that binding level has cleanups or not. This node serves as the
123 cleanup list whenever an empty list is required. */
124static tree empty_cleanup_list;
125#endif
126\f
127/* Functions and data structures for expanding case statements. */
128
129/* Case label structure, used to hold info on labels within case
130 statements. We handle "range" labels; for a single-value label
131 as in C, the high and low limits are the same.
132
133 A chain of case nodes is initially maintained via the RIGHT fields
134 in the nodes. Nodes with higher case values are later in the list.
135
136 Switch statements can be output in one of two forms. A branch table
137 is used if there are more than a few labels and the labels are dense
138 within the range between the smallest and largest case value. If a
139 branch table is used, no further manipulations are done with the case
140 node chain.
141
142 The alternative to the use of a branch table is to generate a series
143 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
144 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
145 totally unbalanced, with everything on the right. We balance the tree
146 with nodes on the left having lower case values than the parent
28d81abb
RK
147 and nodes on the right having higher values. We then output the tree
148 in order. */
149
150struct case_node
151{
152 struct case_node *left; /* Left son in binary tree */
153 struct case_node *right; /* Right son in binary tree; also node chain */
154 struct case_node *parent; /* Parent of node in binary tree */
155 tree low; /* Lowest index value for this label */
156 tree high; /* Highest index value for this label */
157 tree code_label; /* Label to jump to when node matches */
158};
159
160typedef struct case_node case_node;
161typedef struct case_node *case_node_ptr;
162
163/* These are used by estimate_case_costs and balance_case_nodes. */
164
165/* This must be a signed type, and non-ANSI compilers lack signed char. */
166static short *cost_table;
167static int use_cost_table;
168
169static int estimate_case_costs ();
170static void balance_case_nodes ();
171static void emit_case_nodes ();
172static void group_case_nodes ();
173static void emit_jump_if_reachable ();
174
175static int warn_if_unused_value ();
176static void expand_goto_internal ();
177static int expand_fixup ();
178void fixup_gotos ();
179void free_temp_slots ();
180static void expand_cleanups ();
181static void fixup_cleanups ();
182static void expand_null_return_1 ();
183static int tail_recursion_args ();
184static void do_jump_if_equal ();
185\f
186/* Stack of control and binding constructs we are currently inside.
187
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
194
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
199
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
203
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
210
211struct nesting
212{
213 struct nesting *all;
214 struct nesting *next;
215 int depth;
216 rtx exit_label;
217 union
218 {
219 /* For conds (if-then and if-then-else statements). */
220 struct
221 {
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
225 rtx endif_label;
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
228 rtx next_label;
229 } cond;
230 /* For loops. */
231 struct
232 {
233 /* Label at the top of the loop; place to loop back to. */
234 rtx start_label;
235 /* Label at the end of the whole construct. */
236 rtx end_label;
237 /* Label for `continue' statement to jump to;
238 this is in front of the stepper of the loop. */
239 rtx continue_label;
240 } loop;
241 /* For variable binding contours. */
242 struct
243 {
244 /* Sequence number of this binding contour within the function,
245 in order of entry. */
246 int block_start_count;
247 /* Nonzero => value to restore stack to on exit. */
248 rtx stack_level;
249 /* The NOTE that starts this contour.
250 Used by expand_goto to check whether the destination
251 is within each contour or not. */
252 rtx first_insn;
253 /* Innermost containing binding contour that has a stack level. */
254 struct nesting *innermost_stack_block;
255 /* List of cleanups to be run on exit from this contour.
256 This is a list of expressions to be evaluated.
257 The TREE_PURPOSE of each link is the ..._DECL node
258 which the cleanup pertains to. */
259 tree cleanups;
260 /* List of cleanup-lists of blocks containing this block,
261 as they were at the locus where this block appears.
262 There is an element for each containing block,
263 ordered innermost containing block first.
264 The tail of this list can be 0 (was empty_cleanup_list),
265 if all remaining elements would be empty lists.
266 The element's TREE_VALUE is the cleanup-list of that block,
267 which may be null. */
268 tree outer_cleanups;
269 /* Chain of labels defined inside this binding contour.
270 For contours that have stack levels or cleanups. */
271 struct label_chain *label_chain;
272 /* Number of function calls seen, as of start of this block. */
273 int function_call_count;
274 } block;
275 /* For switch (C) or case (Pascal) statements,
276 and also for dummies (see `expand_start_case_dummy'). */
277 struct
278 {
279 /* The insn after which the case dispatch should finally
280 be emitted. Zero for a dummy. */
281 rtx start;
282 /* A list of case labels, kept in ascending order by value
283 as the list is built.
284 During expand_end_case, this list may be rearranged into a
285 nearly balanced binary tree. */
286 struct case_node *case_list;
287 /* Label to jump to if no case matches. */
288 tree default_label;
289 /* The expression to be dispatched on. */
290 tree index_expr;
291 /* Type that INDEX_EXPR should be converted to. */
292 tree nominal_type;
293 /* Number of range exprs in case statement. */
294 int num_ranges;
295 /* Name of this kind of statement, for warnings. */
296 char *printname;
297 /* Nonzero if a case label has been seen in this case stmt. */
298 char seenlabel;
299 } case_stmt;
300 /* For exception contours. */
301 struct
302 {
303 /* List of exceptions raised. This is a TREE_LIST
304 of whatever you want. */
305 tree raised;
306 /* List of exceptions caught. This is also a TREE_LIST
307 of whatever you want. As a special case, it has the
308 value `void_type_node' if it handles default exceptions. */
309 tree handled;
310
311 /* First insn of TRY block, in case resumptive model is needed. */
312 rtx first_insn;
313 /* Label for the catch clauses. */
314 rtx except_label;
315 /* Label for unhandled exceptions. */
316 rtx unhandled_label;
317 /* Label at the end of whole construct. */
318 rtx after_label;
319 /* Label which "escapes" the exception construct.
320 Like EXIT_LABEL for BREAK construct, but for exceptions. */
321 rtx escape_label;
322 } except_stmt;
323 } data;
324};
325
326/* Chain of all pending binding contours. */
327struct nesting *block_stack;
328
329/* Chain of all pending binding contours that restore stack levels
330 or have cleanups. */
331struct nesting *stack_block_stack;
332
333/* Chain of all pending conditional statements. */
334struct nesting *cond_stack;
335
336/* Chain of all pending loops. */
337struct nesting *loop_stack;
338
339/* Chain of all pending case or switch statements. */
340struct nesting *case_stack;
341
342/* Chain of all pending exception contours. */
343struct nesting *except_stack;
344
345/* Separate chain including all of the above,
346 chained through the `all' field. */
347struct nesting *nesting_stack;
348
349/* Number of entries on nesting_stack now. */
350int nesting_depth;
351
352/* Allocate and return a new `struct nesting'. */
353
354#define ALLOC_NESTING() \
355 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
356
357/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
358 and pop off `nesting_stack' down to the same level. */
359
360#define POPSTACK(STACK) \
361do { int initial_depth = nesting_stack->depth; \
362 do { struct nesting *this = STACK; \
363 STACK = this->next; \
364 nesting_stack = this->all; \
365 nesting_depth = this->depth; \
366 obstack_free (&stmt_obstack, this); } \
367 while (nesting_depth > initial_depth); } while (0)
368\f
369/* In some cases it is impossible to generate code for a forward goto
370 until the label definition is seen. This happens when it may be necessary
371 for the goto to reset the stack pointer: we don't yet know how to do that.
372 So expand_goto puts an entry on this fixup list.
373 Each time a binding contour that resets the stack is exited,
374 we check each fixup.
375 If the target label has now been defined, we can insert the proper code. */
376
377struct goto_fixup
378{
379 /* Points to following fixup. */
380 struct goto_fixup *next;
381 /* Points to the insn before the jump insn.
382 If more code must be inserted, it goes after this insn. */
383 rtx before_jump;
384 /* The LABEL_DECL that this jump is jumping to, or 0
385 for break, continue or return. */
386 tree target;
387 /* The CODE_LABEL rtx that this is jumping to. */
388 rtx target_rtl;
389 /* Number of binding contours started in current function
390 before the label reference. */
391 int block_start_count;
392 /* The outermost stack level that should be restored for this jump.
393 Each time a binding contour that resets the stack is exited,
394 if the target label is *not* yet defined, this slot is updated. */
395 rtx stack_level;
396 /* List of lists of cleanup expressions to be run by this goto.
397 There is one element for each block that this goto is within.
398 The tail of this list can be 0 (was empty_cleanup_list),
399 if all remaining elements would be empty.
400 The TREE_VALUE contains the cleanup list of that block as of the
401 time this goto was seen.
402 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
403 tree cleanup_list_list;
404};
405
406static struct goto_fixup *goto_fixup_chain;
407
408/* Within any binding contour that must restore a stack level,
409 all labels are recorded with a chain of these structures. */
410
411struct label_chain
412{
413 /* Points to following fixup. */
414 struct label_chain *next;
415 tree label;
416};
417\f
418void
419init_stmt ()
420{
421 gcc_obstack_init (&stmt_obstack);
422#if 0
423 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
424#endif
425}
426
427void
428init_stmt_for_function ()
429{
430 /* We are not currently within any block, conditional, loop or case. */
431 block_stack = 0;
432 loop_stack = 0;
433 case_stack = 0;
434 cond_stack = 0;
435 nesting_stack = 0;
436 nesting_depth = 0;
437
438 block_start_count = 0;
439
440 /* No gotos have been expanded yet. */
441 goto_fixup_chain = 0;
442
443 /* We are not processing a ({...}) grouping. */
444 expr_stmts_for_value = 0;
445 last_expr_type = 0;
446}
447
448void
449save_stmt_status (p)
450 struct function *p;
451{
452 p->block_stack = block_stack;
453 p->stack_block_stack = stack_block_stack;
454 p->cond_stack = cond_stack;
455 p->loop_stack = loop_stack;
456 p->case_stack = case_stack;
457 p->nesting_stack = nesting_stack;
458 p->nesting_depth = nesting_depth;
459 p->block_start_count = block_start_count;
460 p->last_expr_type = last_expr_type;
461 p->last_expr_value = last_expr_value;
462 p->expr_stmts_for_value = expr_stmts_for_value;
463 p->emit_filename = emit_filename;
464 p->emit_lineno = emit_lineno;
465 p->goto_fixup_chain = goto_fixup_chain;
466}
467
468void
469restore_stmt_status (p)
470 struct function *p;
471{
472 block_stack = p->block_stack;
473 stack_block_stack = p->stack_block_stack;
474 cond_stack = p->cond_stack;
475 loop_stack = p->loop_stack;
476 case_stack = p->case_stack;
477 nesting_stack = p->nesting_stack;
478 nesting_depth = p->nesting_depth;
479 block_start_count = p->block_start_count;
480 last_expr_type = p->last_expr_type;
481 last_expr_value = p->last_expr_value;
482 expr_stmts_for_value = p->expr_stmts_for_value;
483 emit_filename = p->emit_filename;
484 emit_lineno = p->emit_lineno;
485 goto_fixup_chain = p->goto_fixup_chain;
486}
487\f
488/* Emit a no-op instruction. */
489
490void
491emit_nop ()
492{
493 rtx last_insn = get_last_insn ();
494 if (!optimize
495 && (GET_CODE (last_insn) == CODE_LABEL
496 || prev_real_insn (last_insn) == 0))
497 emit_insn (gen_nop ());
498}
499\f
500/* Return the rtx-label that corresponds to a LABEL_DECL,
501 creating it if necessary. */
502
503rtx
504label_rtx (label)
505 tree label;
506{
507 if (TREE_CODE (label) != LABEL_DECL)
508 abort ();
509
510 if (DECL_RTL (label))
511 return DECL_RTL (label);
512
513 return DECL_RTL (label) = gen_label_rtx ();
514}
515
516/* Add an unconditional jump to LABEL as the next sequential instruction. */
517
518void
519emit_jump (label)
520 rtx label;
521{
522 do_pending_stack_adjust ();
523 emit_jump_insn (gen_jump (label));
524 emit_barrier ();
525}
526
527/* Emit code to jump to the address
528 specified by the pointer expression EXP. */
529
530void
531expand_computed_goto (exp)
532 tree exp;
533{
534 rtx x = expand_expr (exp, 0, VOIDmode, 0);
de14fd73 535 emit_queue ();
28d81abb 536 emit_indirect_jump (x);
28d81abb
RK
537}
538\f
539/* Handle goto statements and the labels that they can go to. */
540
541/* Specify the location in the RTL code of a label LABEL,
542 which is a LABEL_DECL tree node.
543
544 This is used for the kind of label that the user can jump to with a
545 goto statement, and for alternatives of a switch or case statement.
546 RTL labels generated for loops and conditionals don't go through here;
547 they are generated directly at the RTL level, by other functions below.
548
549 Note that this has nothing to do with defining label *names*.
550 Languages vary in how they do that and what that even means. */
551
552void
553expand_label (label)
554 tree label;
555{
556 struct label_chain *p;
557
558 do_pending_stack_adjust ();
559 emit_label (label_rtx (label));
560 if (DECL_NAME (label))
561 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
562
563 if (stack_block_stack != 0)
564 {
565 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
566 p->next = stack_block_stack->data.block.label_chain;
567 stack_block_stack->data.block.label_chain = p;
568 p->label = label;
569 }
570}
571
572/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
573 from nested functions. */
574
575void
576declare_nonlocal_label (label)
577 tree label;
578{
579 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
580 LABEL_PRESERVE_P (label_rtx (label)) = 1;
581 if (nonlocal_goto_handler_slot == 0)
582 {
583 nonlocal_goto_handler_slot
584 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
585 emit_stack_save (SAVE_NONLOCAL,
586 &nonlocal_goto_stack_level,
587 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
588 }
589}
590
591/* Generate RTL code for a `goto' statement with target label LABEL.
592 LABEL should be a LABEL_DECL tree node that was or will later be
593 defined with `expand_label'. */
594
595void
596expand_goto (label)
597 tree label;
598{
599 /* Check for a nonlocal goto to a containing function. */
600 tree context = decl_function_context (label);
601 if (context != 0 && context != current_function_decl)
602 {
603 struct function *p = find_function_data (context);
604 rtx temp;
605 p->has_nonlocal_label = 1;
59257ff7
RK
606
607 /* Copy the rtl for the slots so that they won't be shared in
608 case the virtual stack vars register gets instantiated differently
609 in the parent than in the child. */
610
28d81abb
RK
611#if HAVE_nonlocal_goto
612 if (HAVE_nonlocal_goto)
613 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
614 copy_rtx (p->nonlocal_goto_handler_slot),
615 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
616 gen_rtx (LABEL_REF, Pmode,
617 label_rtx (label))));
618 else
619#endif
620 {
59257ff7
RK
621 rtx addr;
622
28d81abb
RK
623 /* Restore frame pointer for containing function.
624 This sets the actual hard register used for the frame pointer
625 to the location of the function's incoming static chain info.
626 The non-local goto handler will then adjust it to contain the
627 proper value and reload the argument pointer, if needed. */
628 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
629
630 /* We have now loaded the frame pointer hardware register with
631 the address of that corresponds to the start of the virtual
632 stack vars. So replace virtual_stack_vars_rtx in all
633 addresses we use with stack_pointer_rtx. */
634
28d81abb
RK
635 /* Get addr of containing function's current nonlocal goto handler,
636 which will do any cleanups and then jump to the label. */
59257ff7
RK
637 addr = copy_rtx (p->nonlocal_goto_handler_slot);
638 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
639 frame_pointer_rtx));
640
28d81abb 641 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
642 addr = p->nonlocal_goto_stack_level;
643 if (addr)
5e116627
MM
644 addr = replace_rtx (copy_rtx (addr),
645 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7
RK
646
647 emit_stack_restore (SAVE_NONLOCAL, addr, 0);
648
28d81abb
RK
649 /* Put in the static chain register the nonlocal label address. */
650 emit_move_insn (static_chain_rtx,
651 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
652 /* USE of frame_pointer_rtx added for consistency; not clear if
653 really needed. */
654 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
655 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
656 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
657 emit_indirect_jump (temp);
658 }
659 }
660 else
661 expand_goto_internal (label, label_rtx (label), 0);
662}
663
664/* Generate RTL code for a `goto' statement with target label BODY.
665 LABEL should be a LABEL_REF.
666 LAST_INSN, if non-0, is the rtx we should consider as the last
667 insn emitted (for the purposes of cleaning up a return). */
668
669static void
670expand_goto_internal (body, label, last_insn)
671 tree body;
672 rtx label;
673 rtx last_insn;
674{
675 struct nesting *block;
676 rtx stack_level = 0;
677
678 if (GET_CODE (label) != CODE_LABEL)
679 abort ();
680
681 /* If label has already been defined, we can tell now
682 whether and how we must alter the stack level. */
683
684 if (PREV_INSN (label) != 0)
685 {
686 /* Find the innermost pending block that contains the label.
687 (Check containment by comparing insn-uids.)
688 Then restore the outermost stack level within that block,
689 and do cleanups of all blocks contained in it. */
690 for (block = block_stack; block; block = block->next)
691 {
692 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
693 break;
694 if (block->data.block.stack_level != 0)
695 stack_level = block->data.block.stack_level;
696 /* Execute the cleanups for blocks we are exiting. */
697 if (block->data.block.cleanups != 0)
698 {
699 expand_cleanups (block->data.block.cleanups, 0);
700 do_pending_stack_adjust ();
701 }
702 }
703
704 if (stack_level)
705 {
706 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
707 the stack pointer. This one should be deleted as dead by flow. */
708 clear_pending_stack_adjust ();
709 do_pending_stack_adjust ();
59257ff7 710 emit_stack_restore (SAVE_BLOCK, stack_level, 0);
28d81abb
RK
711 }
712
713 if (body != 0 && DECL_TOO_LATE (body))
714 error ("jump to `%s' invalidly jumps into binding contour",
715 IDENTIFIER_POINTER (DECL_NAME (body)));
716 }
717 /* Label not yet defined: may need to put this goto
718 on the fixup list. */
719 else if (! expand_fixup (body, label, last_insn))
720 {
721 /* No fixup needed. Record that the label is the target
722 of at least one goto that has no fixup. */
723 if (body != 0)
724 TREE_ADDRESSABLE (body) = 1;
725 }
726
727 emit_jump (label);
728}
729\f
730/* Generate if necessary a fixup for a goto
731 whose target label in tree structure (if any) is TREE_LABEL
732 and whose target in rtl is RTL_LABEL.
733
734 If LAST_INSN is nonzero, we pretend that the jump appears
735 after insn LAST_INSN instead of at the current point in the insn stream.
736
737 The fixup will be used later to insert insns at this point
738 to restore the stack level as appropriate for the target label.
739
740 Value is nonzero if a fixup is made. */
741
742static int
743expand_fixup (tree_label, rtl_label, last_insn)
744 tree tree_label;
745 rtx rtl_label;
746 rtx last_insn;
747{
748 struct nesting *block, *end_block;
749
750 /* See if we can recognize which block the label will be output in.
751 This is possible in some very common cases.
752 If we succeed, set END_BLOCK to that block.
753 Otherwise, set it to 0. */
754
755 if (cond_stack
756 && (rtl_label == cond_stack->data.cond.endif_label
757 || rtl_label == cond_stack->data.cond.next_label))
758 end_block = cond_stack;
759 /* If we are in a loop, recognize certain labels which
760 are likely targets. This reduces the number of fixups
761 we need to create. */
762 else if (loop_stack
763 && (rtl_label == loop_stack->data.loop.start_label
764 || rtl_label == loop_stack->data.loop.end_label
765 || rtl_label == loop_stack->data.loop.continue_label))
766 end_block = loop_stack;
767 else
768 end_block = 0;
769
770 /* Now set END_BLOCK to the binding level to which we will return. */
771
772 if (end_block)
773 {
774 struct nesting *next_block = end_block->all;
775 block = block_stack;
776
777 /* First see if the END_BLOCK is inside the innermost binding level.
778 If so, then no cleanups or stack levels are relevant. */
779 while (next_block && next_block != block)
780 next_block = next_block->all;
781
782 if (next_block)
783 return 0;
784
785 /* Otherwise, set END_BLOCK to the innermost binding level
786 which is outside the relevant control-structure nesting. */
787 next_block = block_stack->next;
788 for (block = block_stack; block != end_block; block = block->all)
789 if (block == next_block)
790 next_block = next_block->next;
791 end_block = next_block;
792 }
793
794 /* Does any containing block have a stack level or cleanups?
795 If not, no fixup is needed, and that is the normal case
796 (the only case, for standard C). */
797 for (block = block_stack; block != end_block; block = block->next)
798 if (block->data.block.stack_level != 0
799 || block->data.block.cleanups != 0)
800 break;
801
802 if (block != end_block)
803 {
804 /* Ok, a fixup is needed. Add a fixup to the list of such. */
805 struct goto_fixup *fixup
806 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
807 /* In case an old stack level is restored, make sure that comes
808 after any pending stack adjust. */
809 /* ?? If the fixup isn't to come at the present position,
810 doing the stack adjust here isn't useful. Doing it with our
811 settings at that location isn't useful either. Let's hope
812 someone does it! */
813 if (last_insn == 0)
814 do_pending_stack_adjust ();
815 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
816 fixup->target = tree_label;
817 fixup->target_rtl = rtl_label;
818 fixup->block_start_count = block_start_count;
819 fixup->stack_level = 0;
820 fixup->cleanup_list_list
821 = (((block->data.block.outer_cleanups
822#if 0
823 && block->data.block.outer_cleanups != empty_cleanup_list
824#endif
825 )
826 || block->data.block.cleanups)
827 ? tree_cons (0, block->data.block.cleanups,
828 block->data.block.outer_cleanups)
829 : 0);
830 fixup->next = goto_fixup_chain;
831 goto_fixup_chain = fixup;
832 }
833
834 return block != 0;
835}
836
837/* When exiting a binding contour, process all pending gotos requiring fixups.
838 THISBLOCK is the structure that describes the block being exited.
839 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
840 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
841 FIRST_INSN is the insn that began this contour.
842
843 Gotos that jump out of this contour must restore the
844 stack level and do the cleanups before actually jumping.
845
846 DONT_JUMP_IN nonzero means report error there is a jump into this
847 contour from before the beginning of the contour.
848 This is also done if STACK_LEVEL is nonzero. */
849
850void
851fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
852 struct nesting *thisblock;
853 rtx stack_level;
854 tree cleanup_list;
855 rtx first_insn;
856 int dont_jump_in;
857{
858 register struct goto_fixup *f, *prev;
859
860 /* F is the fixup we are considering; PREV is the previous one. */
861 /* We run this loop in two passes so that cleanups of exited blocks
862 are run first, and blocks that are exited are marked so
863 afterwards. */
864
865 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
866 {
867 /* Test for a fixup that is inactive because it is already handled. */
868 if (f->before_jump == 0)
869 {
870 /* Delete inactive fixup from the chain, if that is easy to do. */
871 if (prev != 0)
872 prev->next = f->next;
873 }
874 /* Has this fixup's target label been defined?
875 If so, we can finalize it. */
876 else if (PREV_INSN (f->target_rtl) != 0)
877 {
878 /* Get the first non-label after the label
879 this goto jumps to. If that's before this scope begins,
880 we don't have a jump into the scope. */
881 rtx after_label = f->target_rtl;
882 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
883 after_label = NEXT_INSN (after_label);
884
885 /* If this fixup jumped into this contour from before the beginning
886 of this contour, report an error. */
887 /* ??? Bug: this does not detect jumping in through intermediate
888 blocks that have stack levels or cleanups.
889 It detects only a problem with the innermost block
890 around the label. */
891 if (f->target != 0
892 && (dont_jump_in || stack_level || cleanup_list)
893 /* If AFTER_LABEL is 0, it means the jump goes to the end
894 of the rtl, which means it jumps into this scope. */
895 && (after_label == 0
896 || INSN_UID (first_insn) < INSN_UID (after_label))
897 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
898 && ! TREE_REGDECL (f->target))
899 {
900 error_with_decl (f->target,
901 "label `%s' used before containing binding contour");
902 /* Prevent multiple errors for one label. */
903 TREE_REGDECL (f->target) = 1;
904 }
905
906 /* Execute cleanups for blocks this jump exits. */
907 if (f->cleanup_list_list)
908 {
909 tree lists;
910 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
911 /* Marked elements correspond to blocks that have been closed.
912 Do their cleanups. */
913 if (TREE_ADDRESSABLE (lists)
914 && TREE_VALUE (lists) != 0)
915 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
916 }
917
918 /* Restore stack level for the biggest contour that this
919 jump jumps out of. */
920 if (f->stack_level)
59257ff7 921 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
28d81abb
RK
922 f->before_jump = 0;
923 }
924 }
925
926 /* Mark the cleanups of exited blocks so that they are executed
927 by the code above. */
928 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
929 if (f->before_jump != 0
930 && PREV_INSN (f->target_rtl) == 0
931 /* Label has still not appeared. If we are exiting a block with
932 a stack level to restore, that started before the fixup,
933 mark this stack level as needing restoration
934 when the fixup is later finalized.
935 Also mark the cleanup_list_list element for F
936 that corresponds to this block, so that ultimately
937 this block's cleanups will be executed by the code above. */
938 && thisblock != 0
939 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
940 it means the label is undefined. That's erroneous, but possible. */
941 && (thisblock->data.block.block_start_count
942 <= f->block_start_count))
943 {
944 tree lists = f->cleanup_list_list;
945 for (; lists; lists = TREE_CHAIN (lists))
946 /* If the following elt. corresponds to our containing block
947 then the elt. must be for this block. */
948 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
949 TREE_ADDRESSABLE (lists) = 1;
950
951 if (stack_level)
952 f->stack_level = stack_level;
953 }
954}
955\f
956/* Generate RTL for an asm statement (explicit assembler code).
957 BODY is a STRING_CST node containing the assembler code text,
958 or an ADDR_EXPR containing a STRING_CST. */
959
960void
961expand_asm (body)
962 tree body;
963{
964 if (TREE_CODE (body) == ADDR_EXPR)
965 body = TREE_OPERAND (body, 0);
966
967 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
968 TREE_STRING_POINTER (body)));
969 last_expr_type = 0;
970}
971
972/* Generate RTL for an asm statement with arguments.
973 STRING is the instruction template.
974 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
975 Each output or input has an expression in the TREE_VALUE and
976 a constraint-string in the TREE_PURPOSE.
977 CLOBBERS is a list of STRING_CST nodes each naming a hard register
978 that is clobbered by this insn.
979
980 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
981 Some elements of OUTPUTS may be replaced with trees representing temporary
982 values. The caller should copy those temporary values to the originally
983 specified lvalues.
984
985 VOL nonzero means the insn is volatile; don't optimize it. */
986
987void
988expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
989 tree string, outputs, inputs, clobbers;
990 int vol;
991 char *filename;
992 int line;
993{
994 rtvec argvec, constraints;
995 rtx body;
996 int ninputs = list_length (inputs);
997 int noutputs = list_length (outputs);
b4ccaa16 998 int nclobbers;
28d81abb
RK
999 tree tail;
1000 register int i;
1001 /* Vector of RTX's of evaluated output operands. */
1002 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1003 /* The insn we have emitted. */
1004 rtx insn;
1005
b4ccaa16
RS
1006 /* Count the number of meaningful clobbered registers, ignoring what
1007 we would ignore later. */
1008 nclobbers = 0;
1009 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1010 {
1011 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1012 i = decode_reg_name (regname);
1013 if (i >= 0 || i == -4)
b4ccaa16
RS
1014 ++nclobbers;
1015 }
1016
28d81abb
RK
1017 last_expr_type = 0;
1018
1019 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1020 {
1021 tree val = TREE_VALUE (tail);
1022 tree val1;
1023 int j;
1024 int found_equal;
1025
1026 /* If there's an erroneous arg, emit no insn. */
1027 if (TREE_TYPE (val) == error_mark_node)
1028 return;
1029
1030 /* Make sure constraint has `=' and does not have `+'. */
1031
1032 found_equal = 0;
1033 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1034 {
1035 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1036 {
1037 error ("output operand constraint contains `+'");
1038 return;
1039 }
1040 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1041 found_equal = 1;
1042 }
1043 if (! found_equal)
1044 {
1045 error ("output operand constraint lacks `='");
1046 return;
1047 }
1048
1049 /* If an output operand is not a variable or indirect ref,
1050 or a part of one,
1051 create a SAVE_EXPR which is a pseudo-reg
1052 to act as an intermediate temporary.
1053 Make the asm insn write into that, then copy it to
1054 the real output operand. */
1055
1056 while (TREE_CODE (val) == COMPONENT_REF
1057 || TREE_CODE (val) == ARRAY_REF)
1058 val = TREE_OPERAND (val, 0);
1059
1060 if (TREE_CODE (val) != VAR_DECL
1061 && TREE_CODE (val) != PARM_DECL
1062 && TREE_CODE (val) != INDIRECT_REF)
1063 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1064
1065 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1066 }
1067
1068 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1069 {
1070 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1071 return;
1072 }
1073
1074 /* Make vectors for the expression-rtx and constraint strings. */
1075
1076 argvec = rtvec_alloc (ninputs);
1077 constraints = rtvec_alloc (ninputs);
1078
1079 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1080 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1081 filename, line);
1082 MEM_VOLATILE_P (body) = vol;
1083
1084 /* Eval the inputs and put them into ARGVEC.
1085 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1086
1087 i = 0;
1088 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1089 {
1090 int j;
1091
1092 /* If there's an erroneous arg, emit no insn,
1093 because the ASM_INPUT would get VOIDmode
1094 and that could cause a crash in reload. */
1095 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1096 return;
1097 if (TREE_PURPOSE (tail) == NULL_TREE)
1098 {
1099 error ("hard register `%s' listed as input operand to `asm'",
1100 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1101 return;
1102 }
1103
1104 /* Make sure constraint has neither `=' nor `+'. */
1105
1106 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1107 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1108 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1109 {
1110 error ("input operand constraint contains `%c'",
1111 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1112 return;
1113 }
1114
1115 XVECEXP (body, 3, i) /* argvec */
1116 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1117 XVECEXP (body, 4, i) /* constraints */
1118 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1119 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1120 i++;
1121 }
1122
1123 /* Protect all the operands from the queue,
1124 now that they have all been evaluated. */
1125
1126 for (i = 0; i < ninputs; i++)
1127 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1128
1129 for (i = 0; i < noutputs; i++)
1130 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1131
1132 /* Now, for each output, construct an rtx
1133 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1134 ARGVEC CONSTRAINTS))
1135 If there is more than one, put them inside a PARALLEL. */
1136
1137 if (noutputs == 1 && nclobbers == 0)
1138 {
1139 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1140 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1141 }
1142 else if (noutputs == 0 && nclobbers == 0)
1143 {
1144 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1145 insn = emit_insn (body);
1146 }
1147 else
1148 {
1149 rtx obody = body;
1150 int num = noutputs;
1151 if (num == 0) num = 1;
1152 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1153
1154 /* For each output operand, store a SET. */
1155
1156 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1157 {
1158 XVECEXP (body, 0, i)
1159 = gen_rtx (SET, VOIDmode,
1160 output_rtx[i],
1161 gen_rtx (ASM_OPERANDS, VOIDmode,
1162 TREE_STRING_POINTER (string),
1163 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1164 i, argvec, constraints,
1165 filename, line));
1166 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1167 }
1168
1169 /* If there are no outputs (but there are some clobbers)
1170 store the bare ASM_OPERANDS into the PARALLEL. */
1171
1172 if (i == 0)
1173 XVECEXP (body, 0, i++) = obody;
1174
1175 /* Store (clobber REG) for each clobbered register specified. */
1176
b4ccaa16 1177 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1178 {
28d81abb 1179 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1180 int j = decode_reg_name (regname);
28d81abb 1181
b4ac57ab 1182 if (j < 0)
28d81abb 1183 {
c09e6498 1184 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1185 continue;
1186
c09e6498
RS
1187 if (j == -4) /* `memory', don't cache memory across asm */
1188 {
1189 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1190 continue;
1191 }
1192
28d81abb
RK
1193 error ("unknown register name `%s' in `asm'", regname);
1194 return;
1195 }
1196
1197 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1198 XVECEXP (body, 0, i++)
28d81abb
RK
1199 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1200 }
1201
1202 insn = emit_insn (body);
1203 }
1204
1205 free_temp_slots ();
1206}
1207\f
1208/* Generate RTL to evaluate the expression EXP
1209 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1210
1211void
1212expand_expr_stmt (exp)
1213 tree exp;
1214{
1215 /* If -W, warn about statements with no side effects,
1216 except for an explicit cast to void (e.g. for assert()), and
1217 except inside a ({...}) where they may be useful. */
1218 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1219 {
1220 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1221 && !(TREE_CODE (exp) == CONVERT_EXPR
1222 && TREE_TYPE (exp) == void_type_node))
1223 warning_with_file_and_line (emit_filename, emit_lineno,
1224 "statement with no effect");
1225 else if (warn_unused)
1226 warn_if_unused_value (exp);
1227 }
1228 last_expr_type = TREE_TYPE (exp);
1229 if (! flag_syntax_only)
1230 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1231 VOIDmode, 0);
1232
1233 /* If all we do is reference a volatile value in memory,
1234 copy it to a register to be sure it is actually touched. */
1235 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1236 && TREE_THIS_VOLATILE (exp))
1237 {
1238 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1239 copy_to_reg (last_expr_value);
1240 else
ddbe9812
RS
1241 {
1242 rtx lab = gen_label_rtx ();
1243
1244 /* Compare the value with itself to reference it. */
1245 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1246 expand_expr (TYPE_SIZE (last_expr_type),
1247 0, VOIDmode, 0),
1248 BLKmode, 0,
1249 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1250 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1251 emit_label (lab);
1252 }
28d81abb
RK
1253 }
1254
1255 /* If this expression is part of a ({...}) and is in memory, we may have
1256 to preserve temporaries. */
1257 preserve_temp_slots (last_expr_value);
1258
1259 /* Free any temporaries used to evaluate this expression. Any temporary
1260 used as a result of this expression will already have been preserved
1261 above. */
1262 free_temp_slots ();
1263
1264 emit_queue ();
1265}
1266
1267/* Warn if EXP contains any computations whose results are not used.
1268 Return 1 if a warning is printed; 0 otherwise. */
1269
1270static int
1271warn_if_unused_value (exp)
1272 tree exp;
1273{
1274 if (TREE_USED (exp))
1275 return 0;
1276
1277 switch (TREE_CODE (exp))
1278 {
1279 case PREINCREMENT_EXPR:
1280 case POSTINCREMENT_EXPR:
1281 case PREDECREMENT_EXPR:
1282 case POSTDECREMENT_EXPR:
1283 case MODIFY_EXPR:
1284 case INIT_EXPR:
1285 case TARGET_EXPR:
1286 case CALL_EXPR:
1287 case METHOD_CALL_EXPR:
1288 case RTL_EXPR:
1289 case WRAPPER_EXPR:
1290 case ANTI_WRAPPER_EXPR:
1291 case WITH_CLEANUP_EXPR:
1292 case EXIT_EXPR:
1293 /* We don't warn about COND_EXPR because it may be a useful
1294 construct if either arm contains a side effect. */
1295 case COND_EXPR:
1296 return 0;
1297
1298 case BIND_EXPR:
1299 /* For a binding, warn if no side effect within it. */
1300 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1301
1302 case TRUTH_ORIF_EXPR:
1303 case TRUTH_ANDIF_EXPR:
1304 /* In && or ||, warn if 2nd operand has no side effect. */
1305 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1306
1307 case COMPOUND_EXPR:
1308 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1309 return 1;
4d23e509
RS
1310 /* Let people do `(foo (), 0)' without a warning. */
1311 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1312 return 0;
28d81abb
RK
1313 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1314
1315 case NOP_EXPR:
1316 case CONVERT_EXPR:
b4ac57ab 1317 case NON_LVALUE_EXPR:
28d81abb
RK
1318 /* Don't warn about values cast to void. */
1319 if (TREE_TYPE (exp) == void_type_node)
1320 return 0;
1321 /* Don't warn about conversions not explicit in the user's program. */
1322 if (TREE_NO_UNUSED_WARNING (exp))
1323 return 0;
1324 /* Assignment to a cast usually results in a cast of a modify.
1325 Don't complain about that. */
1326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1327 return 0;
1328 /* Sometimes it results in a cast of a cast of a modify.
1329 Don't complain about that. */
1330 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1331 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1332 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1333 return 0;
1334
1335 default:
ddbe9812
RS
1336 /* Referencing a volatile value is a side effect, so don't warn. */
1337 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1338 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1339 && TREE_THIS_VOLATILE (exp))
1340 return 0;
28d81abb
RK
1341 warning_with_file_and_line (emit_filename, emit_lineno,
1342 "value computed is not used");
1343 return 1;
1344 }
1345}
1346
1347/* Clear out the memory of the last expression evaluated. */
1348
1349void
1350clear_last_expr ()
1351{
1352 last_expr_type = 0;
1353}
1354
1355/* Begin a statement which will return a value.
1356 Return the RTL_EXPR for this statement expr.
1357 The caller must save that value and pass it to expand_end_stmt_expr. */
1358
1359tree
1360expand_start_stmt_expr ()
1361{
1362 /* Make the RTL_EXPR node temporary, not momentary,
1363 so that rtl_expr_chain doesn't become garbage. */
1364 int momentary = suspend_momentary ();
1365 tree t = make_node (RTL_EXPR);
1366 resume_momentary (momentary);
1367 start_sequence ();
1368 NO_DEFER_POP;
1369 expr_stmts_for_value++;
1370 return t;
1371}
1372
1373/* Restore the previous state at the end of a statement that returns a value.
1374 Returns a tree node representing the statement's value and the
1375 insns to compute the value.
1376
1377 The nodes of that expression have been freed by now, so we cannot use them.
1378 But we don't want to do that anyway; the expression has already been
1379 evaluated and now we just want to use the value. So generate a RTL_EXPR
1380 with the proper type and RTL value.
1381
1382 If the last substatement was not an expression,
1383 return something with type `void'. */
1384
1385tree
1386expand_end_stmt_expr (t)
1387 tree t;
1388{
1389 OK_DEFER_POP;
1390
1391 if (last_expr_type == 0)
1392 {
1393 last_expr_type = void_type_node;
1394 last_expr_value = const0_rtx;
1395 }
1396 else if (last_expr_value == 0)
1397 /* There are some cases where this can happen, such as when the
1398 statement is void type. */
1399 last_expr_value = const0_rtx;
1400 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1401 /* Remove any possible QUEUED. */
1402 last_expr_value = protect_from_queue (last_expr_value, 0);
1403
1404 emit_queue ();
1405
1406 TREE_TYPE (t) = last_expr_type;
1407 RTL_EXPR_RTL (t) = last_expr_value;
1408 RTL_EXPR_SEQUENCE (t) = get_insns ();
1409
1410 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1411
1412 end_sequence ();
1413
1414 /* Don't consider deleting this expr or containing exprs at tree level. */
1415 TREE_SIDE_EFFECTS (t) = 1;
1416 /* Propagate volatility of the actual RTL expr. */
1417 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1418
1419 last_expr_type = 0;
1420 expr_stmts_for_value--;
1421
1422 return t;
1423}
1424\f
1425/* The exception handling nesting looks like this:
1426
1427 <-- Level N-1
1428 { <-- exception handler block
1429 <-- Level N
1430 <-- in an exception handler
1431 { <-- try block
1432 : <-- in a TRY block
1433 : <-- in an exception handler
1434 :
1435 }
1436
1437 { <-- except block
1438 : <-- in an except block
1439 : <-- in an exception handler
1440 :
1441 }
1442
1443 }
1444
1445/* Return nonzero iff in a try block at level LEVEL. */
1446
1447int
1448in_try_block (level)
1449 int level;
1450{
1451 struct nesting *n = except_stack;
1452 while (1)
1453 {
1454 while (n && n->data.except_stmt.after_label != 0)
1455 n = n->next;
1456 if (n == 0)
1457 return 0;
1458 if (level == 0)
1459 return n != 0;
1460 level--;
1461 n = n->next;
1462 }
1463}
1464
1465/* Return nonzero iff in an except block at level LEVEL. */
1466
1467int
1468in_except_block (level)
1469 int level;
1470{
1471 struct nesting *n = except_stack;
1472 while (1)
1473 {
1474 while (n && n->data.except_stmt.after_label == 0)
1475 n = n->next;
1476 if (n == 0)
1477 return 0;
1478 if (level == 0)
1479 return n != 0;
1480 level--;
1481 n = n->next;
1482 }
1483}
1484
1485/* Return nonzero iff in an exception handler at level LEVEL. */
1486
1487int
1488in_exception_handler (level)
1489 int level;
1490{
1491 struct nesting *n = except_stack;
1492 while (n && level--)
1493 n = n->next;
1494 return n != 0;
1495}
1496
1497/* Record the fact that the current exception nesting raises
1498 exception EX. If not in an exception handler, return 0. */
1499int
1500expand_raise (ex)
1501 tree ex;
1502{
1503 tree *raises_ptr;
1504
1505 if (except_stack == 0)
1506 return 0;
1507 raises_ptr = &except_stack->data.except_stmt.raised;
1508 if (! value_member (ex, *raises_ptr))
1509 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1510 return 1;
1511}
1512
1513/* Generate RTL for the start of a try block.
1514
1515 TRY_CLAUSE is the condition to test to enter the try block. */
1516
1517void
1518expand_start_try (try_clause, exitflag, escapeflag)
1519 tree try_clause;
1520 int exitflag;
1521 int escapeflag;
1522{
1523 struct nesting *thishandler = ALLOC_NESTING ();
1524
1525 /* Make an entry on cond_stack for the cond we are entering. */
1526
1527 thishandler->next = except_stack;
1528 thishandler->all = nesting_stack;
1529 thishandler->depth = ++nesting_depth;
1530 thishandler->data.except_stmt.raised = 0;
1531 thishandler->data.except_stmt.handled = 0;
1532 thishandler->data.except_stmt.first_insn = get_insns ();
1533 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1534 thishandler->data.except_stmt.unhandled_label = 0;
1535 thishandler->data.except_stmt.after_label = 0;
1536 thishandler->data.except_stmt.escape_label
1537 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1538 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1539 except_stack = thishandler;
1540 nesting_stack = thishandler;
1541
1542 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1543}
1544
1545/* End of a TRY block. Nothing to do for now. */
1546
1547void
1548expand_end_try ()
1549{
1550 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1551 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1552}
1553
1554/* Start an `except' nesting contour.
1555 EXITFLAG says whether this contour should be able to `exit' something.
1556 ESCAPEFLAG says whether this contour should be escapable. */
1557
1558void
1559expand_start_except (exitflag, escapeflag)
1560 int exitflag;
1561 int escapeflag;
1562{
1563 if (exitflag)
1564 {
1565 struct nesting *n;
1566 /* An `exit' from catch clauses goes out to next exit level,
1567 if there is one. Otherwise, it just goes to the end
1568 of the construct. */
1569 for (n = except_stack->next; n; n = n->next)
1570 if (n->exit_label != 0)
1571 {
1572 except_stack->exit_label = n->exit_label;
1573 break;
1574 }
1575 if (n == 0)
1576 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1577 }
1578 if (escapeflag)
1579 {
1580 struct nesting *n;
1581 /* An `escape' from catch clauses goes out to next escape level,
1582 if there is one. Otherwise, it just goes to the end
1583 of the construct. */
1584 for (n = except_stack->next; n; n = n->next)
1585 if (n->data.except_stmt.escape_label != 0)
1586 {
1587 except_stack->data.except_stmt.escape_label
1588 = n->data.except_stmt.escape_label;
1589 break;
1590 }
1591 if (n == 0)
1592 except_stack->data.except_stmt.escape_label
1593 = except_stack->data.except_stmt.after_label;
1594 }
1595 do_pending_stack_adjust ();
1596 emit_label (except_stack->data.except_stmt.except_label);
1597}
1598
1599/* Generate code to `escape' from an exception contour. This
1600 is like `exiting', but does not conflict with constructs which
1601 use `exit_label'.
1602
1603 Return nonzero if this contour is escapable, otherwise
1604 return zero, and language-specific code will emit the
1605 appropriate error message. */
1606int
1607expand_escape_except ()
1608{
1609 struct nesting *n;
1610 last_expr_type = 0;
1611 for (n = except_stack; n; n = n->next)
1612 if (n->data.except_stmt.escape_label != 0)
1613 {
1614 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1615 return 1;
1616 }
1617
1618 return 0;
1619}
1620
1621/* Finish processing and `except' contour.
1622 Culls out all exceptions which might be raise but not
1623 handled, and returns the list to the caller.
1624 Language-specific code is responsible for dealing with these
1625 exceptions. */
1626
1627tree
1628expand_end_except ()
1629{
1630 struct nesting *n;
1631 tree raised = NULL_TREE;
1632
1633 do_pending_stack_adjust ();
1634 emit_label (except_stack->data.except_stmt.after_label);
1635
1636 n = except_stack->next;
1637 if (n)
1638 {
1639 /* Propagate exceptions raised but not handled to next
1640 highest level. */
1641 tree handled = except_stack->data.except_stmt.raised;
1642 if (handled != void_type_node)
1643 {
1644 tree prev = NULL_TREE;
1645 raised = except_stack->data.except_stmt.raised;
1646 while (handled)
1647 {
1648 tree this_raise;
1649 for (this_raise = raised, prev = 0; this_raise;
1650 this_raise = TREE_CHAIN (this_raise))
1651 {
1652 if (value_member (TREE_VALUE (this_raise), handled))
1653 {
1654 if (prev)
1655 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1656 else
1657 {
1658 raised = TREE_CHAIN (raised);
1659 if (raised == NULL_TREE)
1660 goto nada;
1661 }
1662 }
1663 else
1664 prev = this_raise;
1665 }
1666 handled = TREE_CHAIN (handled);
1667 }
1668 if (prev == NULL_TREE)
1669 prev = raised;
1670 if (prev)
1671 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1672 nada:
1673 n->data.except_stmt.raised = raised;
1674 }
1675 }
1676
1677 POPSTACK (except_stack);
1678 last_expr_type = 0;
1679 return raised;
1680}
1681
1682/* Record that exception EX is caught by this exception handler.
1683 Return nonzero if in exception handling construct, otherwise return 0. */
1684int
1685expand_catch (ex)
1686 tree ex;
1687{
1688 tree *raises_ptr;
1689
1690 if (except_stack == 0)
1691 return 0;
1692 raises_ptr = &except_stack->data.except_stmt.handled;
1693 if (*raises_ptr != void_type_node
1694 && ex != NULL_TREE
1695 && ! value_member (ex, *raises_ptr))
1696 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1697 return 1;
1698}
1699
1700/* Record that this exception handler catches all exceptions.
1701 Return nonzero if in exception handling construct, otherwise return 0. */
1702
1703int
1704expand_catch_default ()
1705{
1706 if (except_stack == 0)
1707 return 0;
1708 except_stack->data.except_stmt.handled = void_type_node;
1709 return 1;
1710}
1711
1712int
1713expand_end_catch ()
1714{
1715 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1716 return 0;
1717 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1718 return 1;
1719}
1720\f
1721/* Generate RTL for the start of an if-then. COND is the expression
1722 whose truth should be tested.
1723
1724 If EXITFLAG is nonzero, this conditional is visible to
1725 `exit_something'. */
1726
1727void
1728expand_start_cond (cond, exitflag)
1729 tree cond;
1730 int exitflag;
1731{
1732 struct nesting *thiscond = ALLOC_NESTING ();
1733
1734 /* Make an entry on cond_stack for the cond we are entering. */
1735
1736 thiscond->next = cond_stack;
1737 thiscond->all = nesting_stack;
1738 thiscond->depth = ++nesting_depth;
1739 thiscond->data.cond.next_label = gen_label_rtx ();
1740 /* Before we encounter an `else', we don't need a separate exit label
1741 unless there are supposed to be exit statements
1742 to exit this conditional. */
1743 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1744 thiscond->data.cond.endif_label = thiscond->exit_label;
1745 cond_stack = thiscond;
1746 nesting_stack = thiscond;
1747
1748 do_jump (cond, thiscond->data.cond.next_label, NULL);
1749}
1750
1751/* Generate RTL between then-clause and the elseif-clause
1752 of an if-then-elseif-.... */
1753
1754void
1755expand_start_elseif (cond)
1756 tree cond;
1757{
1758 if (cond_stack->data.cond.endif_label == 0)
1759 cond_stack->data.cond.endif_label = gen_label_rtx ();
1760 emit_jump (cond_stack->data.cond.endif_label);
1761 emit_label (cond_stack->data.cond.next_label);
1762 cond_stack->data.cond.next_label = gen_label_rtx ();
1763 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1764}
1765
1766/* Generate RTL between the then-clause and the else-clause
1767 of an if-then-else. */
1768
1769void
1770expand_start_else ()
1771{
1772 if (cond_stack->data.cond.endif_label == 0)
1773 cond_stack->data.cond.endif_label = gen_label_rtx ();
1774 emit_jump (cond_stack->data.cond.endif_label);
1775 emit_label (cond_stack->data.cond.next_label);
1776 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1777}
1778
1779/* Generate RTL for the end of an if-then.
1780 Pop the record for it off of cond_stack. */
1781
1782void
1783expand_end_cond ()
1784{
1785 struct nesting *thiscond = cond_stack;
1786
1787 do_pending_stack_adjust ();
1788 if (thiscond->data.cond.next_label)
1789 emit_label (thiscond->data.cond.next_label);
1790 if (thiscond->data.cond.endif_label)
1791 emit_label (thiscond->data.cond.endif_label);
1792
1793 POPSTACK (cond_stack);
1794 last_expr_type = 0;
1795}
1796\f
1797/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1798 loop should be exited by `exit_something'. This is a loop for which
1799 `expand_continue' will jump to the top of the loop.
1800
1801 Make an entry on loop_stack to record the labels associated with
1802 this loop. */
1803
1804struct nesting *
1805expand_start_loop (exit_flag)
1806 int exit_flag;
1807{
1808 register struct nesting *thisloop = ALLOC_NESTING ();
1809
1810 /* Make an entry on loop_stack for the loop we are entering. */
1811
1812 thisloop->next = loop_stack;
1813 thisloop->all = nesting_stack;
1814 thisloop->depth = ++nesting_depth;
1815 thisloop->data.loop.start_label = gen_label_rtx ();
1816 thisloop->data.loop.end_label = gen_label_rtx ();
1817 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1818 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1819 loop_stack = thisloop;
1820 nesting_stack = thisloop;
1821
1822 do_pending_stack_adjust ();
1823 emit_queue ();
1824 emit_note (0, NOTE_INSN_LOOP_BEG);
1825 emit_label (thisloop->data.loop.start_label);
1826
1827 return thisloop;
1828}
1829
1830/* Like expand_start_loop but for a loop where the continuation point
1831 (for expand_continue_loop) will be specified explicitly. */
1832
1833struct nesting *
1834expand_start_loop_continue_elsewhere (exit_flag)
1835 int exit_flag;
1836{
1837 struct nesting *thisloop = expand_start_loop (exit_flag);
1838 loop_stack->data.loop.continue_label = gen_label_rtx ();
1839 return thisloop;
1840}
1841
1842/* Specify the continuation point for a loop started with
1843 expand_start_loop_continue_elsewhere.
1844 Use this at the point in the code to which a continue statement
1845 should jump. */
1846
1847void
1848expand_loop_continue_here ()
1849{
1850 do_pending_stack_adjust ();
1851 emit_note (0, NOTE_INSN_LOOP_CONT);
1852 emit_label (loop_stack->data.loop.continue_label);
1853}
1854
1855/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1856 Pop the block off of loop_stack. */
1857
1858void
1859expand_end_loop ()
1860{
1861 register rtx insn = get_last_insn ();
1862 register rtx start_label = loop_stack->data.loop.start_label;
1863 rtx last_test_insn = 0;
1864 int num_insns = 0;
1865
1866 /* Mark the continue-point at the top of the loop if none elsewhere. */
1867 if (start_label == loop_stack->data.loop.continue_label)
1868 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1869
1870 do_pending_stack_adjust ();
1871
1872 /* If optimizing, perhaps reorder the loop. If the loop
1873 starts with a conditional exit, roll that to the end
1874 where it will optimize together with the jump back.
1875
1876 We look for the last conditional branch to the exit that we encounter
1877 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1878 branch to the exit first, use it.
1879
1880 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1881 because moving them is not valid. */
1882
1883 if (optimize
1884 &&
1885 ! (GET_CODE (insn) == JUMP_INSN
1886 && GET_CODE (PATTERN (insn)) == SET
1887 && SET_DEST (PATTERN (insn)) == pc_rtx
1888 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1889 {
1890 /* Scan insns from the top of the loop looking for a qualified
1891 conditional exit. */
1892 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1893 insn = NEXT_INSN (insn))
1894 {
1895 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1896 break;
1897
1898 if (GET_CODE (insn) == NOTE
1899 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1900 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1901 break;
1902
1903 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1904 num_insns++;
1905
1906 if (last_test_insn && num_insns > 30)
1907 break;
1908
1909 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1910 && SET_DEST (PATTERN (insn)) == pc_rtx
1911 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1912 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1913 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1914 == loop_stack->data.loop.end_label))
1915 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1916 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1917 == loop_stack->data.loop.end_label))))
1918 last_test_insn = insn;
1919
1920 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1921 && GET_CODE (PATTERN (insn)) == SET
1922 && SET_DEST (PATTERN (insn)) == pc_rtx
1923 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1924 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1925 == loop_stack->data.loop.end_label))
1926 /* Include BARRIER. */
1927 last_test_insn = NEXT_INSN (insn);
1928 }
1929
1930 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1931 {
1932 /* We found one. Move everything from there up
1933 to the end of the loop, and add a jump into the loop
1934 to jump to there. */
1935 register rtx newstart_label = gen_label_rtx ();
1936 register rtx start_move = start_label;
1937
b4ac57ab 1938 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1939 then we want to move this note also. */
1940 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1941 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1942 == NOTE_INSN_LOOP_CONT))
1943 start_move = PREV_INSN (start_move);
1944
1945 emit_label_after (newstart_label, PREV_INSN (start_move));
1946 reorder_insns (start_move, last_test_insn, get_last_insn ());
1947 emit_jump_insn_after (gen_jump (start_label),
1948 PREV_INSN (newstart_label));
1949 emit_barrier_after (PREV_INSN (newstart_label));
1950 start_label = newstart_label;
1951 }
1952 }
1953
1954 emit_jump (start_label);
1955 emit_note (0, NOTE_INSN_LOOP_END);
1956 emit_label (loop_stack->data.loop.end_label);
1957
1958 POPSTACK (loop_stack);
1959
1960 last_expr_type = 0;
1961}
1962
1963/* Generate a jump to the current loop's continue-point.
1964 This is usually the top of the loop, but may be specified
1965 explicitly elsewhere. If not currently inside a loop,
1966 return 0 and do nothing; caller will print an error message. */
1967
1968int
1969expand_continue_loop (whichloop)
1970 struct nesting *whichloop;
1971{
1972 last_expr_type = 0;
1973 if (whichloop == 0)
1974 whichloop = loop_stack;
1975 if (whichloop == 0)
1976 return 0;
1977 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1978 return 1;
1979}
1980
1981/* Generate a jump to exit the current loop. If not currently inside a loop,
1982 return 0 and do nothing; caller will print an error message. */
1983
1984int
1985expand_exit_loop (whichloop)
1986 struct nesting *whichloop;
1987{
1988 last_expr_type = 0;
1989 if (whichloop == 0)
1990 whichloop = loop_stack;
1991 if (whichloop == 0)
1992 return 0;
1993 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1994 return 1;
1995}
1996
1997/* Generate a conditional jump to exit the current loop if COND
1998 evaluates to zero. If not currently inside a loop,
1999 return 0 and do nothing; caller will print an error message. */
2000
2001int
2002expand_exit_loop_if_false (whichloop, cond)
2003 struct nesting *whichloop;
2004 tree cond;
2005{
2006 last_expr_type = 0;
2007 if (whichloop == 0)
2008 whichloop = loop_stack;
2009 if (whichloop == 0)
2010 return 0;
2011 do_jump (cond, whichloop->data.loop.end_label, NULL);
2012 return 1;
2013}
2014
2015/* Return non-zero if we should preserve sub-expressions as separate
2016 pseudos. We never do so if we aren't optimizing. We always do so
2017 if -fexpensive-optimizations.
2018
2019 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2020 the loop may still be a small one. */
2021
2022int
2023preserve_subexpressions_p ()
2024{
2025 rtx insn;
2026
2027 if (flag_expensive_optimizations)
2028 return 1;
2029
2030 if (optimize == 0 || loop_stack == 0)
2031 return 0;
2032
2033 insn = get_last_insn_anywhere ();
2034
2035 return (insn
2036 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2037 < n_non_fixed_regs * 3));
2038
2039}
2040
2041/* Generate a jump to exit the current loop, conditional, binding contour
2042 or case statement. Not all such constructs are visible to this function,
2043 only those started with EXIT_FLAG nonzero. Individual languages use
2044 the EXIT_FLAG parameter to control which kinds of constructs you can
2045 exit this way.
2046
2047 If not currently inside anything that can be exited,
2048 return 0 and do nothing; caller will print an error message. */
2049
2050int
2051expand_exit_something ()
2052{
2053 struct nesting *n;
2054 last_expr_type = 0;
2055 for (n = nesting_stack; n; n = n->all)
2056 if (n->exit_label != 0)
2057 {
2058 expand_goto_internal (0, n->exit_label, 0);
2059 return 1;
2060 }
2061
2062 return 0;
2063}
2064\f
2065/* Generate RTL to return from the current function, with no value.
2066 (That is, we do not do anything about returning any value.) */
2067
2068void
2069expand_null_return ()
2070{
2071 struct nesting *block = block_stack;
2072 rtx last_insn = 0;
2073
2074 /* Does any pending block have cleanups? */
2075
2076 while (block && block->data.block.cleanups == 0)
2077 block = block->next;
2078
2079 /* If yes, use a goto to return, since that runs cleanups. */
2080
2081 expand_null_return_1 (last_insn, block != 0);
2082}
2083
2084/* Generate RTL to return from the current function, with value VAL. */
2085
2086void
2087expand_value_return (val)
2088 rtx val;
2089{
2090 struct nesting *block = block_stack;
2091 rtx last_insn = get_last_insn ();
2092 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2093
2094 /* Copy the value to the return location
2095 unless it's already there. */
2096
2097 if (return_reg != val)
2098 emit_move_insn (return_reg, val);
2099 if (GET_CODE (return_reg) == REG
2100 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2101 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2102
2103 /* Does any pending block have cleanups? */
2104
2105 while (block && block->data.block.cleanups == 0)
2106 block = block->next;
2107
2108 /* If yes, use a goto to return, since that runs cleanups.
2109 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2110
2111 expand_null_return_1 (last_insn, block != 0);
2112}
2113
2114/* Output a return with no value. If LAST_INSN is nonzero,
2115 pretend that the return takes place after LAST_INSN.
2116 If USE_GOTO is nonzero then don't use a return instruction;
2117 go to the return label instead. This causes any cleanups
2118 of pending blocks to be executed normally. */
2119
2120static void
2121expand_null_return_1 (last_insn, use_goto)
2122 rtx last_insn;
2123 int use_goto;
2124{
2125 rtx end_label = cleanup_label ? cleanup_label : return_label;
2126
2127 clear_pending_stack_adjust ();
2128 do_pending_stack_adjust ();
2129 last_expr_type = 0;
2130
2131 /* PCC-struct return always uses an epilogue. */
2132 if (current_function_returns_pcc_struct || use_goto)
2133 {
2134 if (end_label == 0)
2135 end_label = return_label = gen_label_rtx ();
2136 expand_goto_internal (0, end_label, last_insn);
2137 return;
2138 }
2139
2140 /* Otherwise output a simple return-insn if one is available,
2141 unless it won't do the job. */
2142#ifdef HAVE_return
2143 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2144 {
2145 emit_jump_insn (gen_return ());
2146 emit_barrier ();
2147 return;
2148 }
2149#endif
2150
2151 /* Otherwise jump to the epilogue. */
2152 expand_goto_internal (0, end_label, last_insn);
2153}
2154\f
2155/* Generate RTL to evaluate the expression RETVAL and return it
2156 from the current function. */
2157
2158void
2159expand_return (retval)
2160 tree retval;
2161{
2162 /* If there are any cleanups to be performed, then they will
2163 be inserted following LAST_INSN. It is desirable
2164 that the last_insn, for such purposes, should be the
2165 last insn before computing the return value. Otherwise, cleanups
2166 which call functions can clobber the return value. */
2167 /* ??? rms: I think that is erroneous, because in C++ it would
2168 run destructors on variables that might be used in the subsequent
2169 computation of the return value. */
2170 rtx last_insn = 0;
2171 register rtx val = 0;
2172 register rtx op0;
2173 tree retval_rhs;
2174 int cleanups;
2175 struct nesting *block;
2176
2177 /* If function wants no value, give it none. */
2178 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2179 {
2180 expand_expr (retval, 0, VOIDmode, 0);
2181 expand_null_return ();
2182 return;
2183 }
2184
2185 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2186 cleanups = any_pending_cleanups (1);
2187
2188 if (TREE_CODE (retval) == RESULT_DECL)
2189 retval_rhs = retval;
2190 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2191 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2192 retval_rhs = TREE_OPERAND (retval, 1);
2193 else if (TREE_TYPE (retval) == void_type_node)
2194 /* Recognize tail-recursive call to void function. */
2195 retval_rhs = retval;
2196 else
2197 retval_rhs = NULL_TREE;
2198
2199 /* Only use `last_insn' if there are cleanups which must be run. */
2200 if (cleanups || cleanup_label != 0)
2201 last_insn = get_last_insn ();
2202
2203 /* Distribute return down conditional expr if either of the sides
2204 may involve tail recursion (see test below). This enhances the number
2205 of tail recursions we see. Don't do this always since it can produce
2206 sub-optimal code in some cases and we distribute assignments into
2207 conditional expressions when it would help. */
2208
2209 if (optimize && retval_rhs != 0
2210 && frame_offset == 0
2211 && TREE_CODE (retval_rhs) == COND_EXPR
2212 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2213 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2214 {
2215 rtx label = gen_label_rtx ();
2216 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2217 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2218 DECL_RESULT (current_function_decl),
2219 TREE_OPERAND (retval_rhs, 1)));
2220 emit_label (label);
2221 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2222 DECL_RESULT (current_function_decl),
2223 TREE_OPERAND (retval_rhs, 2)));
2224 return;
2225 }
2226
2227 /* For tail-recursive call to current function,
2228 just jump back to the beginning.
2229 It's unsafe if any auto variable in this function
2230 has its address taken; for simplicity,
2231 require stack frame to be empty. */
2232 if (optimize && retval_rhs != 0
2233 && frame_offset == 0
2234 && TREE_CODE (retval_rhs) == CALL_EXPR
2235 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2236 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2237 /* Finish checking validity, and if valid emit code
2238 to set the argument variables for the new call. */
2239 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2240 DECL_ARGUMENTS (current_function_decl)))
2241 {
2242 if (tail_recursion_label == 0)
2243 {
2244 tail_recursion_label = gen_label_rtx ();
2245 emit_label_after (tail_recursion_label,
2246 tail_recursion_reentry);
2247 }
a3229491 2248 emit_queue ();
28d81abb
RK
2249 expand_goto_internal (0, tail_recursion_label, last_insn);
2250 emit_barrier ();
2251 return;
2252 }
2253#ifdef HAVE_return
2254 /* This optimization is safe if there are local cleanups
2255 because expand_null_return takes care of them.
2256 ??? I think it should also be safe when there is a cleanup label,
2257 because expand_null_return takes care of them, too.
2258 Any reason why not? */
2259 if (HAVE_return && cleanup_label == 0
2260 && ! current_function_returns_pcc_struct)
2261 {
2262 /* If this is return x == y; then generate
2263 if (x == y) return 1; else return 0;
2264 if we can do it with explicit return insns. */
2265 if (retval_rhs)
2266 switch (TREE_CODE (retval_rhs))
2267 {
2268 case EQ_EXPR:
2269 case NE_EXPR:
2270 case GT_EXPR:
2271 case GE_EXPR:
2272 case LT_EXPR:
2273 case LE_EXPR:
2274 case TRUTH_ANDIF_EXPR:
2275 case TRUTH_ORIF_EXPR:
2276 case TRUTH_AND_EXPR:
2277 case TRUTH_OR_EXPR:
2278 case TRUTH_NOT_EXPR:
2279 op0 = gen_label_rtx ();
2280 jumpifnot (retval_rhs, op0);
2281 expand_value_return (const1_rtx);
2282 emit_label (op0);
2283 expand_value_return (const0_rtx);
2284 return;
2285 }
2286 }
2287#endif /* HAVE_return */
2288
2289 if (cleanups
2290 && retval_rhs != 0
2291 && TREE_TYPE (retval_rhs) != void_type_node
2292 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2293 {
2294 /* Calculate the return value into a pseudo reg. */
2295 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2296 emit_queue ();
2297 /* All temporaries have now been used. */
2298 free_temp_slots ();
2299 /* Return the calculated value, doing cleanups first. */
2300 expand_value_return (val);
2301 }
2302 else
2303 {
2304 /* No cleanups or no hard reg used;
2305 calculate value into hard return reg. */
2306 expand_expr (retval, 0, VOIDmode, 0);
2307 emit_queue ();
2308 free_temp_slots ();
2309 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2310 }
2311}
2312
2313/* Return 1 if the end of the generated RTX is not a barrier.
2314 This means code already compiled can drop through. */
2315
2316int
2317drop_through_at_end_p ()
2318{
2319 rtx insn = get_last_insn ();
2320 while (insn && GET_CODE (insn) == NOTE)
2321 insn = PREV_INSN (insn);
2322 return insn && GET_CODE (insn) != BARRIER;
2323}
2324\f
2325/* Emit code to alter this function's formal parms for a tail-recursive call.
2326 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2327 FORMALS is the chain of decls of formals.
2328 Return 1 if this can be done;
2329 otherwise return 0 and do not emit any code. */
2330
2331static int
2332tail_recursion_args (actuals, formals)
2333 tree actuals, formals;
2334{
2335 register tree a = actuals, f = formals;
2336 register int i;
2337 register rtx *argvec;
2338
2339 /* Check that number and types of actuals are compatible
2340 with the formals. This is not always true in valid C code.
2341 Also check that no formal needs to be addressable
2342 and that all formals are scalars. */
2343
2344 /* Also count the args. */
2345
2346 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2347 {
2348 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2349 return 0;
2350 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2351 return 0;
2352 }
2353 if (a != 0 || f != 0)
2354 return 0;
2355
2356 /* Compute all the actuals. */
2357
2358 argvec = (rtx *) alloca (i * sizeof (rtx));
2359
2360 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2361 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2362
2363 /* Find which actual values refer to current values of previous formals.
2364 Copy each of them now, before any formal is changed. */
2365
2366 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2367 {
2368 int copy = 0;
2369 register int j;
2370 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2371 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2372 { copy = 1; break; }
2373 if (copy)
2374 argvec[i] = copy_to_reg (argvec[i]);
2375 }
2376
2377 /* Store the values of the actuals into the formals. */
2378
2379 for (f = formals, a = actuals, i = 0; f;
2380 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2381 {
2382 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2383 emit_move_insn (DECL_RTL (f), argvec[i]);
2384 else
2385 convert_move (DECL_RTL (f), argvec[i],
2386 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2387 }
2388
2389 free_temp_slots ();
2390 return 1;
2391}
2392\f
2393/* Generate the RTL code for entering a binding contour.
2394 The variables are declared one by one, by calls to `expand_decl'.
2395
2396 EXIT_FLAG is nonzero if this construct should be visible to
2397 `exit_something'. */
2398
2399void
2400expand_start_bindings (exit_flag)
2401 int exit_flag;
2402{
2403 struct nesting *thisblock = ALLOC_NESTING ();
2404
2405 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2406
2407 /* Make an entry on block_stack for the block we are entering. */
2408
2409 thisblock->next = block_stack;
2410 thisblock->all = nesting_stack;
2411 thisblock->depth = ++nesting_depth;
2412 thisblock->data.block.stack_level = 0;
2413 thisblock->data.block.cleanups = 0;
2414 thisblock->data.block.function_call_count = 0;
2415#if 0
2416 if (block_stack)
2417 {
2418 if (block_stack->data.block.cleanups == NULL_TREE
2419 && (block_stack->data.block.outer_cleanups == NULL_TREE
2420 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2421 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2422 else
2423 thisblock->data.block.outer_cleanups
2424 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2425 block_stack->data.block.outer_cleanups);
2426 }
2427 else
2428 thisblock->data.block.outer_cleanups = 0;
2429#endif
2430#if 1
2431 if (block_stack
2432 && !(block_stack->data.block.cleanups == NULL_TREE
2433 && block_stack->data.block.outer_cleanups == NULL_TREE))
2434 thisblock->data.block.outer_cleanups
2435 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2436 block_stack->data.block.outer_cleanups);
2437 else
2438 thisblock->data.block.outer_cleanups = 0;
2439#endif
2440 thisblock->data.block.label_chain = 0;
2441 thisblock->data.block.innermost_stack_block = stack_block_stack;
2442 thisblock->data.block.first_insn = note;
2443 thisblock->data.block.block_start_count = ++block_start_count;
2444 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2445 block_stack = thisblock;
2446 nesting_stack = thisblock;
2447
2448 /* Make a new level for allocating stack slots. */
2449 push_temp_slots ();
2450}
2451
2452/* Generate RTL code to terminate a binding contour.
2453 VARS is the chain of VAR_DECL nodes
2454 for the variables bound in this contour.
2455 MARK_ENDS is nonzero if we should put a note at the beginning
2456 and end of this binding contour.
2457
2458 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2459 (That is true automatically if the contour has a saved stack level.) */
2460
2461void
2462expand_end_bindings (vars, mark_ends, dont_jump_in)
2463 tree vars;
2464 int mark_ends;
2465 int dont_jump_in;
2466{
2467 register struct nesting *thisblock = block_stack;
2468 register tree decl;
2469
2470 if (warn_unused)
2471 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2472 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2473 warning_with_decl (decl, "unused variable `%s'");
2474
2475 /* Mark the beginning and end of the scope if requested. */
2476
2477 if (mark_ends)
2478 emit_note (0, NOTE_INSN_BLOCK_END);
2479 else
2480 /* Get rid of the beginning-mark if we don't make an end-mark. */
2481 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2482
2483 if (thisblock->exit_label)
2484 {
2485 do_pending_stack_adjust ();
2486 emit_label (thisblock->exit_label);
2487 }
2488
2489 /* If necessary, make a handler for nonlocal gotos taking
2490 place in the function calls in this block. */
2491 if (function_call_count != thisblock->data.block.function_call_count
2492 && nonlocal_labels
2493 /* Make handler for outermost block
2494 if there were any nonlocal gotos to this function. */
2495 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2496 /* Make handler for inner block if it has something
2497 special to do when you jump out of it. */
2498 : (thisblock->data.block.cleanups != 0
2499 || thisblock->data.block.stack_level != 0)))
2500 {
2501 tree link;
2502 rtx afterward = gen_label_rtx ();
2503 rtx handler_label = gen_label_rtx ();
2504 rtx save_receiver = gen_reg_rtx (Pmode);
2505
2506 /* Don't let jump_optimize delete the handler. */
2507 LABEL_PRESERVE_P (handler_label) = 1;
2508
2509 /* Record the handler address in the stack slot for that purpose,
2510 during this block, saving and restoring the outer value. */
2511 if (thisblock->next != 0)
2512 {
2513 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2514 emit_insn_before (gen_move_insn (save_receiver,
2515 nonlocal_goto_handler_slot),
2516 thisblock->data.block.first_insn);
2517 }
2518 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2519 gen_rtx (LABEL_REF, Pmode,
2520 handler_label)),
2521 thisblock->data.block.first_insn);
2522
2523 /* Jump around the handler; it runs only when specially invoked. */
2524 emit_jump (afterward);
2525 emit_label (handler_label);
2526
2527#ifdef HAVE_nonlocal_goto
2528 if (! HAVE_nonlocal_goto)
2529#endif
2530 /* First adjust our frame pointer to its actual value. It was
2531 previously set to the start of the virtual area corresponding to
2532 the stacked variables when we branched here and now needs to be
2533 adjusted to the actual hardware fp value.
2534
2535 Assignments are to virtual registers are converted by
2536 instantiate_virtual_regs into the corresponding assignment
2537 to the underlying register (fp in this case) that makes
2538 the original assignment true.
2539 So the following insn will actually be
2540 decrementing fp by STARTING_FRAME_OFFSET. */
2541 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2542
2543#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2544 if (fixed_regs[ARG_POINTER_REGNUM])
2545 {
42495ca0
RK
2546#ifdef ELIMINABLE_REGS
2547 /* If the argument pointer can be eliminated in favor of the
2548 frame pointer, we don't need to restore it. We assume here
2549 that if such an elimination is present, it can always be used.
2550 This is the case on all known machines; if we don't make this
2551 assumption, we do unnecessary saving on many machines. */
2552 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2553 int i;
2554
2555 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2556 if (elim_regs[i].from == ARG_POINTER_REGNUM
2557 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2558 break;
2559
2560 if (i == sizeof elim_regs / sizeof elim_regs [0])
2561#endif
2562 {
2563 /* Now restore our arg pointer from the address at which it
2564 was saved in our stack frame.
2565 If there hasn't be space allocated for it yet, make
2566 some now. */
2567 if (arg_pointer_save_area == 0)
2568 arg_pointer_save_area
2569 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2570 emit_move_insn (virtual_incoming_args_rtx,
2571 /* We need a pseudo here, or else
2572 instantiate_virtual_regs_1 complains. */
2573 copy_to_reg (arg_pointer_save_area));
2574 }
28d81abb
RK
2575 }
2576#endif
2577
2578 /* The handler expects the desired label address in the static chain
2579 register. It tests the address and does an appropriate jump
2580 to whatever label is desired. */
2581 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2582 /* Skip any labels we shouldn't be able to jump to from here. */
2583 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2584 {
2585 rtx not_this = gen_label_rtx ();
2586 rtx this = gen_label_rtx ();
2587 do_jump_if_equal (static_chain_rtx,
2588 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2589 this, 0);
2590 emit_jump (not_this);
2591 emit_label (this);
2592 expand_goto (TREE_VALUE (link));
2593 emit_label (not_this);
2594 }
2595 /* If label is not recognized, abort. */
2596 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2597 VOIDmode, 0);
2598 emit_label (afterward);
2599 }
2600
2601 /* Don't allow jumping into a block that has cleanups or a stack level. */
2602 if (dont_jump_in
2603 || thisblock->data.block.stack_level != 0
2604 || thisblock->data.block.cleanups != 0)
2605 {
2606 struct label_chain *chain;
2607
2608 /* Any labels in this block are no longer valid to go to.
2609 Mark them to cause an error message. */
2610 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2611 {
2612 DECL_TOO_LATE (chain->label) = 1;
2613 /* If any goto without a fixup came to this label,
2614 that must be an error, because gotos without fixups
2615 come from outside all saved stack-levels and all cleanups. */
2616 if (TREE_ADDRESSABLE (chain->label))
2617 error_with_decl (chain->label,
2618 "label `%s' used before containing binding contour");
2619 }
2620 }
2621
2622 /* Restore stack level in effect before the block
2623 (only if variable-size objects allocated). */
2624 /* Perform any cleanups associated with the block. */
2625
2626 if (thisblock->data.block.stack_level != 0
2627 || thisblock->data.block.cleanups != 0)
2628 {
2629 /* Don't let cleanups affect ({...}) constructs. */
2630 int old_expr_stmts_for_value = expr_stmts_for_value;
2631 rtx old_last_expr_value = last_expr_value;
2632 tree old_last_expr_type = last_expr_type;
2633 expr_stmts_for_value = 0;
2634
2635 /* Do the cleanups. */
2636 expand_cleanups (thisblock->data.block.cleanups, 0);
2637 do_pending_stack_adjust ();
2638
2639 expr_stmts_for_value = old_expr_stmts_for_value;
2640 last_expr_value = old_last_expr_value;
2641 last_expr_type = old_last_expr_type;
2642
2643 /* Restore the stack level. */
2644
2645 if (thisblock->data.block.stack_level != 0)
2646 {
59257ff7
RK
2647 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2648 thisblock->data.block.stack_level, 0);
2649 if (nonlocal_goto_handler_slot != 0)
2650 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2651 }
2652
2653 /* Any gotos out of this block must also do these things.
59257ff7
RK
2654 Also report any gotos with fixups that came to labels in this
2655 level. */
28d81abb
RK
2656 fixup_gotos (thisblock,
2657 thisblock->data.block.stack_level,
2658 thisblock->data.block.cleanups,
2659 thisblock->data.block.first_insn,
2660 dont_jump_in);
2661 }
2662
2663 /* If doing stupid register allocation, make sure lives of all
2664 register variables declared here extend thru end of scope. */
2665
2666 if (obey_regdecls)
2667 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2668 {
2669 rtx rtl = DECL_RTL (decl);
2670 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2671 use_variable (rtl);
2672 }
2673
2674 /* Restore block_stack level for containing block. */
2675
2676 stack_block_stack = thisblock->data.block.innermost_stack_block;
2677 POPSTACK (block_stack);
2678
2679 /* Pop the stack slot nesting and free any slots at this level. */
2680 pop_temp_slots ();
2681}
2682\f
2683/* Generate RTL for the automatic variable declaration DECL.
2684 (Other kinds of declarations are simply ignored if seen here.)
2685 CLEANUP is an expression to be executed at exit from this binding contour;
2686 for example, in C++, it might call the destructor for this variable.
2687
2688 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2689 either before or after calling `expand_decl' but before compiling
2690 any subsequent expressions. This is because CLEANUP may be expanded
2691 more than once, on different branches of execution.
2692 For the same reason, CLEANUP may not contain a CALL_EXPR
2693 except as its topmost node--else `preexpand_calls' would get confused.
2694
2695 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2696 that is not associated with any particular variable.
2697
2698 There is no special support here for C++ constructors.
2699 They should be handled by the proper code in DECL_INITIAL. */
2700
2701void
2702expand_decl (decl)
2703 register tree decl;
2704{
2705 struct nesting *thisblock = block_stack;
2706 tree type = TREE_TYPE (decl);
2707
2708 /* Only automatic variables need any expansion done.
2709 Static and external variables, and external functions,
2710 will be handled by `assemble_variable' (called from finish_decl).
2711 TYPE_DECL and CONST_DECL require nothing.
2712 PARM_DECLs are handled in `assign_parms'. */
2713
2714 if (TREE_CODE (decl) != VAR_DECL)
2715 return;
2716 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2717 return;
2718
2719 /* Create the RTL representation for the variable. */
2720
2721 if (type == error_mark_node)
2722 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2723 else if (DECL_SIZE (decl) == 0)
2724 /* Variable with incomplete type. */
2725 {
2726 if (DECL_INITIAL (decl) == 0)
2727 /* Error message was already done; now avoid a crash. */
2728 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2729 else
2730 /* An initializer is going to decide the size of this array.
2731 Until we know the size, represent its address with a reg. */
2732 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2733 }
2734 else if (DECL_MODE (decl) != BLKmode
2735 /* If -ffloat-store, don't put explicit float vars
2736 into regs. */
2737 && !(flag_float_store
2738 && TREE_CODE (type) == REAL_TYPE)
2739 && ! TREE_THIS_VOLATILE (decl)
2740 && ! TREE_ADDRESSABLE (decl)
2741 && (TREE_REGDECL (decl) || ! obey_regdecls))
2742 {
2743 /* Automatic variable that can go in a register. */
2744 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2745 if (TREE_CODE (type) == POINTER_TYPE)
2746 mark_reg_pointer (DECL_RTL (decl));
2747 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2748 }
2749 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2750 {
2751 /* Variable of fixed size that goes on the stack. */
2752 rtx oldaddr = 0;
2753 rtx addr;
2754
2755 /* If we previously made RTL for this decl, it must be an array
2756 whose size was determined by the initializer.
2757 The old address was a register; set that register now
2758 to the proper address. */
2759 if (DECL_RTL (decl) != 0)
2760 {
2761 if (GET_CODE (DECL_RTL (decl)) != MEM
2762 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2763 abort ();
2764 oldaddr = XEXP (DECL_RTL (decl), 0);
2765 }
2766
2767 DECL_RTL (decl)
2768 = assign_stack_temp (DECL_MODE (decl),
2769 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2770 + BITS_PER_UNIT - 1)
2771 / BITS_PER_UNIT),
2772 1);
2773
2774 /* Set alignment we actually gave this decl. */
2775 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2776 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2777
2778 if (oldaddr)
2779 {
2780 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2781 if (addr != oldaddr)
2782 emit_move_insn (oldaddr, addr);
2783 }
2784
2785 /* If this is a memory ref that contains aggregate components,
2786 mark it as such for cse and loop optimize. */
2787 MEM_IN_STRUCT_P (DECL_RTL (decl))
2788 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2789 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2790 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2791#if 0
2792 /* If this is in memory because of -ffloat-store,
2793 set the volatile bit, to prevent optimizations from
2794 undoing the effects. */
2795 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2796 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2797#endif
2798 }
2799 else
2800 /* Dynamic-size object: must push space on the stack. */
2801 {
2802 rtx address, size;
2803
2804 /* Record the stack pointer on entry to block, if have
2805 not already done so. */
2806 if (thisblock->data.block.stack_level == 0)
2807 {
2808 do_pending_stack_adjust ();
59257ff7
RK
2809 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2810 &thisblock->data.block.stack_level,
2811 thisblock->data.block.first_insn);
28d81abb
RK
2812 stack_block_stack = thisblock;
2813 }
2814
2815 /* Compute the variable's size, in bytes. */
2816 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2817 DECL_SIZE (decl),
2818 size_int (BITS_PER_UNIT)),
2819 0, VOIDmode, 0);
2820 free_temp_slots ();
2821
59257ff7
RK
2822 /* This is equivalent to calling alloca. */
2823 current_function_calls_alloca = 1;
2824
28d81abb 2825 /* Allocate space on the stack for the variable. */
5130a5cc 2826 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
28d81abb 2827
59257ff7
RK
2828 if (nonlocal_goto_handler_slot != 0)
2829 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
28d81abb
RK
2830
2831 /* Reference the variable indirect through that rtx. */
2832 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2833
2207e295
RS
2834 /* If this is a memory ref that contains aggregate components,
2835 mark it as such for cse and loop optimize. */
2836 MEM_IN_STRUCT_P (DECL_RTL (decl))
2837 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2838 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2839 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2840
28d81abb
RK
2841 /* Indicate the alignment we actually gave this variable. */
2842#ifdef STACK_BOUNDARY
2843 DECL_ALIGN (decl) = STACK_BOUNDARY;
2844#else
2845 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2846#endif
2847 }
2848
2849 if (TREE_THIS_VOLATILE (decl))
2850 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2851 if (TREE_READONLY (decl))
2852 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2853
2854 /* If doing stupid register allocation, make sure life of any
2855 register variable starts here, at the start of its scope. */
2856
2857 if (obey_regdecls)
2858 use_variable (DECL_RTL (decl));
2859}
2860\f
2861/* Emit code to perform the initialization of a declaration DECL. */
2862
2863void
2864expand_decl_init (decl)
2865 tree decl;
2866{
b4ac57ab
RS
2867 int was_used = TREE_USED (decl);
2868
28d81abb
RK
2869 if (TREE_STATIC (decl))
2870 return;
2871
2872 /* Compute and store the initial value now. */
2873
2874 if (DECL_INITIAL (decl) == error_mark_node)
2875 {
2876 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2877 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2878 || code == POINTER_TYPE)
2879 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2880 0, 0);
2881 emit_queue ();
2882 }
2883 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2884 {
2885 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2886 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2887 emit_queue ();
2888 }
2889
b4ac57ab
RS
2890 /* Don't let the initialization count as "using" the variable. */
2891 TREE_USED (decl) = was_used;
2892
28d81abb
RK
2893 /* Free any temporaries we made while initializing the decl. */
2894 free_temp_slots ();
2895}
2896
2897/* CLEANUP is an expression to be executed at exit from this binding contour;
2898 for example, in C++, it might call the destructor for this variable.
2899
2900 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2901 either before or after calling `expand_decl' but before compiling
2902 any subsequent expressions. This is because CLEANUP may be expanded
2903 more than once, on different branches of execution.
2904 For the same reason, CLEANUP may not contain a CALL_EXPR
2905 except as its topmost node--else `preexpand_calls' would get confused.
2906
2907 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2908 that is not associated with any particular variable. */
2909
2910int
2911expand_decl_cleanup (decl, cleanup)
2912 tree decl, cleanup;
2913{
2914 struct nesting *thisblock = block_stack;
2915
2916 /* Error if we are not in any block. */
2917 if (thisblock == 0)
2918 return 0;
2919
2920 /* Record the cleanup if there is one. */
2921
2922 if (cleanup != 0)
2923 {
2924 thisblock->data.block.cleanups
2925 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2926 /* If this block has a cleanup, it belongs in stack_block_stack. */
2927 stack_block_stack = thisblock;
2928 }
2929 return 1;
2930}
2931\f
2932/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2933 DECL_ELTS is the list of elements that belong to DECL's type.
2934 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2935
2936void
2937expand_anon_union_decl (decl, cleanup, decl_elts)
2938 tree decl, cleanup, decl_elts;
2939{
2940 struct nesting *thisblock = block_stack;
2941 rtx x;
2942
2943 expand_decl (decl, cleanup);
2944 x = DECL_RTL (decl);
2945
2946 while (decl_elts)
2947 {
2948 tree decl_elt = TREE_VALUE (decl_elts);
2949 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2950 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2951
2952 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2953 instead create a new MEM rtx with the proper mode. */
2954 if (GET_CODE (x) == MEM)
2955 {
2956 if (mode == GET_MODE (x))
2957 DECL_RTL (decl_elt) = x;
2958 else
2959 {
2960 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2961 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2962 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2963 }
2964 }
2965 else if (GET_CODE (x) == REG)
2966 {
2967 if (mode == GET_MODE (x))
2968 DECL_RTL (decl_elt) = x;
2969 else
2970 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2971 }
2972 else
2973 abort ();
2974
2975 /* Record the cleanup if there is one. */
2976
2977 if (cleanup != 0)
2978 thisblock->data.block.cleanups
2979 = temp_tree_cons (decl_elt, cleanup_elt,
2980 thisblock->data.block.cleanups);
2981
2982 decl_elts = TREE_CHAIN (decl_elts);
2983 }
2984}
2985\f
2986/* Expand a list of cleanups LIST.
2987 Elements may be expressions or may be nested lists.
2988
2989 If DONT_DO is nonnull, then any list-element
2990 whose TREE_PURPOSE matches DONT_DO is omitted.
2991 This is sometimes used to avoid a cleanup associated with
2992 a value that is being returned out of the scope. */
2993
2994static void
2995expand_cleanups (list, dont_do)
2996 tree list;
2997 tree dont_do;
2998{
2999 tree tail;
3000 for (tail = list; tail; tail = TREE_CHAIN (tail))
3001 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3002 {
3003 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3004 expand_cleanups (TREE_VALUE (tail), dont_do);
3005 else
3006 {
3007 /* Cleanups may be run multiple times. For example,
3008 when exiting a binding contour, we expand the
3009 cleanups associated with that contour. When a goto
3010 within that binding contour has a target outside that
3011 contour, it will expand all cleanups from its scope to
3012 the target. Though the cleanups are expanded multiple
3013 times, the control paths are non-overlapping so the
3014 cleanups will not be executed twice. */
3015 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3016 free_temp_slots ();
3017 }
3018 }
3019}
3020
3021/* Expand a list of cleanups for a goto fixup.
3022 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3023 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3024
3025static void
3026fixup_cleanups (list, before_jump)
3027 tree list;
3028 rtx *before_jump;
3029{
3030 rtx beyond_jump = get_last_insn ();
3031 rtx new_before_jump;
3032
3033 expand_cleanups (list, 0);
3034 /* Pop any pushes done in the cleanups,
3035 in case function is about to return. */
3036 do_pending_stack_adjust ();
3037
3038 new_before_jump = get_last_insn ();
3039
3040 if (beyond_jump != new_before_jump)
3041 {
3042 /* If cleanups expand to nothing, don't reorder. */
3043 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3044 *before_jump = new_before_jump;
3045 }
3046}
3047
3048/* Move all cleanups from the current block_stack
3049 to the containing block_stack, where they are assumed to
3050 have been created. If anything can cause a temporary to
3051 be created, but not expanded for more than one level of
3052 block_stacks, then this code will have to change. */
3053
3054void
3055move_cleanups_up ()
3056{
3057 struct nesting *block = block_stack;
3058 struct nesting *outer = block->next;
3059
3060 outer->data.block.cleanups
3061 = chainon (block->data.block.cleanups,
3062 outer->data.block.cleanups);
3063 block->data.block.cleanups = 0;
3064}
3065
3066tree
3067last_cleanup_this_contour ()
3068{
3069 if (block_stack == 0)
3070 return 0;
3071
3072 return block_stack->data.block.cleanups;
3073}
3074
3075/* Return 1 if there are any pending cleanups at this point.
3076 If THIS_CONTOUR is nonzero, check the current contour as well.
3077 Otherwise, look only at the contours that enclose this one. */
3078
3079int
3080any_pending_cleanups (this_contour)
3081 int this_contour;
3082{
3083 struct nesting *block;
3084
3085 if (block_stack == 0)
3086 return 0;
3087
3088 if (this_contour && block_stack->data.block.cleanups != NULL)
3089 return 1;
3090 if (block_stack->data.block.cleanups == 0
3091 && (block_stack->data.block.outer_cleanups == 0
3092#if 0
3093 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3094#endif
3095 ))
3096 return 0;
3097
3098 for (block = block_stack->next; block; block = block->next)
3099 if (block->data.block.cleanups != 0)
3100 return 1;
3101
3102 return 0;
3103}
3104\f
3105/* Enter a case (Pascal) or switch (C) statement.
3106 Push a block onto case_stack and nesting_stack
3107 to accumulate the case-labels that are seen
3108 and to record the labels generated for the statement.
3109
3110 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3111 Otherwise, this construct is transparent for `exit_something'.
3112
3113 EXPR is the index-expression to be dispatched on.
3114 TYPE is its nominal type. We could simply convert EXPR to this type,
3115 but instead we take short cuts. */
3116
3117void
3118expand_start_case (exit_flag, expr, type, printname)
3119 int exit_flag;
3120 tree expr;
3121 tree type;
3122 char *printname;
3123{
3124 register struct nesting *thiscase = ALLOC_NESTING ();
3125
3126 /* Make an entry on case_stack for the case we are entering. */
3127
3128 thiscase->next = case_stack;
3129 thiscase->all = nesting_stack;
3130 thiscase->depth = ++nesting_depth;
3131 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3132 thiscase->data.case_stmt.case_list = 0;
3133 thiscase->data.case_stmt.index_expr = expr;
3134 thiscase->data.case_stmt.nominal_type = type;
3135 thiscase->data.case_stmt.default_label = 0;
3136 thiscase->data.case_stmt.num_ranges = 0;
3137 thiscase->data.case_stmt.printname = printname;
3138 thiscase->data.case_stmt.seenlabel = 0;
3139 case_stack = thiscase;
3140 nesting_stack = thiscase;
3141
3142 do_pending_stack_adjust ();
3143
3144 /* Make sure case_stmt.start points to something that won't
3145 need any transformation before expand_end_case. */
3146 if (GET_CODE (get_last_insn ()) != NOTE)
3147 emit_note (0, NOTE_INSN_DELETED);
3148
3149 thiscase->data.case_stmt.start = get_last_insn ();
3150}
3151
3152/* Start a "dummy case statement" within which case labels are invalid
3153 and are not connected to any larger real case statement.
3154 This can be used if you don't want to let a case statement jump
3155 into the middle of certain kinds of constructs. */
3156
3157void
3158expand_start_case_dummy ()
3159{
3160 register struct nesting *thiscase = ALLOC_NESTING ();
3161
3162 /* Make an entry on case_stack for the dummy. */
3163
3164 thiscase->next = case_stack;
3165 thiscase->all = nesting_stack;
3166 thiscase->depth = ++nesting_depth;
3167 thiscase->exit_label = 0;
3168 thiscase->data.case_stmt.case_list = 0;
3169 thiscase->data.case_stmt.start = 0;
3170 thiscase->data.case_stmt.nominal_type = 0;
3171 thiscase->data.case_stmt.default_label = 0;
3172 thiscase->data.case_stmt.num_ranges = 0;
3173 case_stack = thiscase;
3174 nesting_stack = thiscase;
3175}
3176
3177/* End a dummy case statement. */
3178
3179void
3180expand_end_case_dummy ()
3181{
3182 POPSTACK (case_stack);
3183}
3184
3185/* Return the data type of the index-expression
3186 of the innermost case statement, or null if none. */
3187
3188tree
3189case_index_expr_type ()
3190{
3191 if (case_stack)
3192 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3193 return 0;
3194}
3195\f
3196/* Accumulate one case or default label inside a case or switch statement.
3197 VALUE is the value of the case (a null pointer, for a default label).
3198
3199 If not currently inside a case or switch statement, return 1 and do
3200 nothing. The caller will print a language-specific error message.
3201 If VALUE is a duplicate or overlaps, return 2 and do nothing
3202 except store the (first) duplicate node in *DUPLICATE.
3203 If VALUE is out of range, return 3 and do nothing.
3204 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3205 Return 0 on success.
3206
3207 Extended to handle range statements. */
3208
3209int
3210pushcase (value, label, duplicate)
3211 register tree value;
3212 register tree label;
3213 tree *duplicate;
3214{
3215 register struct case_node **l;
3216 register struct case_node *n;
3217 tree index_type;
3218 tree nominal_type;
3219
3220 /* Fail if not inside a real case statement. */
3221 if (! (case_stack && case_stack->data.case_stmt.start))
3222 return 1;
3223
3224 if (stack_block_stack
3225 && stack_block_stack->depth > case_stack->depth)
3226 return 5;
3227
3228 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3229 nominal_type = case_stack->data.case_stmt.nominal_type;
3230
3231 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3232 if (index_type == error_mark_node)
3233 return 0;
3234
3235 /* Convert VALUE to the type in which the comparisons are nominally done. */
3236 if (value != 0)
3237 value = convert (nominal_type, value);
3238
3239 /* If this is the first label, warn if any insns have been emitted. */
3240 if (case_stack->data.case_stmt.seenlabel == 0)
3241 {
3242 rtx insn;
3243 for (insn = case_stack->data.case_stmt.start;
3244 insn;
3245 insn = NEXT_INSN (insn))
3246 {
3247 if (GET_CODE (insn) == CODE_LABEL)
3248 break;
3249 if (GET_CODE (insn) != NOTE
3250 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3251 {
3252 warning ("unreachable code at beginning of %s",
3253 case_stack->data.case_stmt.printname);
3254 break;
3255 }
3256 }
3257 }
3258 case_stack->data.case_stmt.seenlabel = 1;
3259
3260 /* Fail if this value is out of range for the actual type of the index
3261 (which may be narrower than NOMINAL_TYPE). */
3262 if (value != 0 && ! int_fits_type_p (value, index_type))
3263 return 3;
3264
3265 /* Fail if this is a duplicate or overlaps another entry. */
3266 if (value == 0)
3267 {
3268 if (case_stack->data.case_stmt.default_label != 0)
3269 {
3270 *duplicate = case_stack->data.case_stmt.default_label;
3271 return 2;
3272 }
3273 case_stack->data.case_stmt.default_label = label;
3274 }
3275 else
3276 {
3277 /* Find the elt in the chain before which to insert the new value,
3278 to keep the chain sorted in increasing order.
3279 But report an error if this element is a duplicate. */
3280 for (l = &case_stack->data.case_stmt.case_list;
3281 /* Keep going past elements distinctly less than VALUE. */
3282 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3283 l = &(*l)->right)
3284 ;
3285 if (*l)
3286 {
3287 /* Element we will insert before must be distinctly greater;
3288 overlap means error. */
3289 if (! tree_int_cst_lt (value, (*l)->low))
3290 {
3291 *duplicate = (*l)->code_label;
3292 return 2;
3293 }
3294 }
3295
3296 /* Add this label to the chain, and succeed.
3297 Copy VALUE so it is on temporary rather than momentary
3298 obstack and will thus survive till the end of the case statement. */
3299 n = (struct case_node *) oballoc (sizeof (struct case_node));
3300 n->left = 0;
3301 n->right = *l;
3302 n->high = n->low = copy_node (value);
3303 n->code_label = label;
3304 *l = n;
3305 }
3306
3307 expand_label (label);
3308 return 0;
3309}
3310
3311/* Like pushcase but this case applies to all values
3312 between VALUE1 and VALUE2 (inclusive).
3313 The return value is the same as that of pushcase
3314 but there is one additional error code:
3315 4 means the specified range was empty. */
3316
3317int
3318pushcase_range (value1, value2, label, duplicate)
3319 register tree value1, value2;
3320 register tree label;
3321 tree *duplicate;
3322{
3323 register struct case_node **l;
3324 register struct case_node *n;
3325 tree index_type;
3326 tree nominal_type;
3327
3328 /* Fail if not inside a real case statement. */
3329 if (! (case_stack && case_stack->data.case_stmt.start))
3330 return 1;
3331
3332 if (stack_block_stack
3333 && stack_block_stack->depth > case_stack->depth)
3334 return 5;
3335
3336 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3337 nominal_type = case_stack->data.case_stmt.nominal_type;
3338
3339 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3340 if (index_type == error_mark_node)
3341 return 0;
3342
3343 /* If this is the first label, warn if any insns have been emitted. */
3344 if (case_stack->data.case_stmt.seenlabel == 0)
3345 {
3346 rtx insn;
3347 for (insn = case_stack->data.case_stmt.start;
3348 insn;
3349 insn = NEXT_INSN (insn))
3350 {
3351 if (GET_CODE (insn) == CODE_LABEL)
3352 break;
3353 if (GET_CODE (insn) != NOTE
3354 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3355 {
3356 warning ("unreachable code at beginning of %s",
3357 case_stack->data.case_stmt.printname);
3358 break;
3359 }
3360 }
3361 }
3362 case_stack->data.case_stmt.seenlabel = 1;
3363
3364 /* Convert VALUEs to type in which the comparisons are nominally done. */
3365 if (value1 == 0) /* Negative infinity. */
3366 value1 = TYPE_MIN_VALUE(index_type);
3367 value1 = convert (nominal_type, value1);
3368
3369 if (value2 == 0) /* Positive infinity. */
3370 value2 = TYPE_MAX_VALUE(index_type);
3371 value2 = convert (nominal_type, value2);
3372
3373 /* Fail if these values are out of range. */
3374 if (! int_fits_type_p (value1, index_type))
3375 return 3;
3376
3377 if (! int_fits_type_p (value2, index_type))
3378 return 3;
3379
3380 /* Fail if the range is empty. */
3381 if (tree_int_cst_lt (value2, value1))
3382 return 4;
3383
3384 /* If the bounds are equal, turn this into the one-value case. */
3385 if (tree_int_cst_equal (value1, value2))
3386 return pushcase (value1, label, duplicate);
3387
3388 /* Find the elt in the chain before which to insert the new value,
3389 to keep the chain sorted in increasing order.
3390 But report an error if this element is a duplicate. */
3391 for (l = &case_stack->data.case_stmt.case_list;
3392 /* Keep going past elements distinctly less than this range. */
3393 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3394 l = &(*l)->right)
3395 ;
3396 if (*l)
3397 {
3398 /* Element we will insert before must be distinctly greater;
3399 overlap means error. */
3400 if (! tree_int_cst_lt (value2, (*l)->low))
3401 {
3402 *duplicate = (*l)->code_label;
3403 return 2;
3404 }
3405 }
3406
3407 /* Add this label to the chain, and succeed.
3408 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3409 obstack and will thus survive till the end of the case statement. */
3410
3411 n = (struct case_node *) oballoc (sizeof (struct case_node));
3412 n->left = 0;
3413 n->right = *l;
3414 n->low = copy_node (value1);
3415 n->high = copy_node (value2);
3416 n->code_label = label;
3417 *l = n;
3418
3419 expand_label (label);
3420
3421 case_stack->data.case_stmt.num_ranges++;
3422
3423 return 0;
3424}
3425\f
3426/* Called when the index of a switch statement is an enumerated type
3427 and there is no default label.
3428
3429 Checks that all enumeration literals are covered by the case
3430 expressions of a switch. Also, warn if there are any extra
3431 switch cases that are *not* elements of the enumerated type.
3432
3433 If all enumeration literals were covered by the case expressions,
3434 turn one of the expressions into the default expression since it should
3435 not be possible to fall through such a switch. */
3436
3437void
3438check_for_full_enumeration_handling (type)
3439 tree type;
3440{
3441 register struct case_node *n;
3442 register struct case_node **l;
3443 register tree chain;
3444 int all_values = 1;
3445
3446 /* The time complexity of this loop is currently O(N * M), with
3447 N being the number of enumerals in the enumerated type, and
3448 M being the number of case expressions in the switch. */
3449
3450 for (chain = TYPE_VALUES (type);
3451 chain;
3452 chain = TREE_CHAIN (chain))
3453 {
3454 /* Find a match between enumeral and case expression, if possible.
3455 Quit looking when we've gone too far (since case expressions
3456 are kept sorted in ascending order). Warn about enumerals not
3457 handled in the switch statement case expression list. */
3458
3459 for (n = case_stack->data.case_stmt.case_list;
3460 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3461 n = n->right)
3462 ;
3463
1ddde1cd 3464 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3465 {
3466 if (warn_switch)
1ddde1cd 3467 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3468 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3469 all_values = 0;
3470 }
3471 }
3472
3473 /* Now we go the other way around; we warn if there are case
3474 expressions that don't correspond to enumerals. This can
3475 occur since C and C++ don't enforce type-checking of
3476 assignments to enumeration variables. */
3477
3478 if (warn_switch)
3479 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3480 {
3481 for (chain = TYPE_VALUES (type);
3482 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3483 chain = TREE_CHAIN (chain))
3484 ;
3485
3486 if (!chain)
3487 warning ("case value `%d' not in enumerated type `%s'",
3488 TREE_INT_CST_LOW (n->low),
3489 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3490 == IDENTIFIER_NODE)
3491 ? TYPE_NAME (type)
3492 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3493 if (!tree_int_cst_equal (n->low, n->high))
3494 {
3495 for (chain = TYPE_VALUES (type);
3496 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3497 chain = TREE_CHAIN (chain))
3498 ;
3499
3500 if (!chain)
3501 warning ("case value `%d' not in enumerated type `%s'",
3502 TREE_INT_CST_LOW (n->high),
3503 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3504 == IDENTIFIER_NODE)
3505 ? TYPE_NAME (type)
3506 : DECL_NAME (TYPE_NAME (type))));
3507 }
28d81abb
RK
3508 }
3509
3510 /* If all values were found as case labels, make one of them the default
3511 label. Thus, this switch will never fall through. We arbitrarily pick
3512 the last one to make the default since this is likely the most
3513 efficient choice. */
3514
3515 if (all_values)
3516 {
3517 for (l = &case_stack->data.case_stmt.case_list;
3518 (*l)->right != 0;
3519 l = &(*l)->right)
3520 ;
3521
3522 case_stack->data.case_stmt.default_label = (*l)->code_label;
3523 *l = 0;
3524 }
3525}
3526\f
3527/* Terminate a case (Pascal) or switch (C) statement
3528 in which CASE_INDEX is the expression to be tested.
3529 Generate the code to test it and jump to the right place. */
3530
3531void
3532expand_end_case (orig_index)
3533 tree orig_index;
3534{
3535 tree minval, maxval, range;
3536 rtx default_label = 0;
3537 register struct case_node *n;
3538 int count;
3539 rtx index;
3540 rtx table_label = gen_label_rtx ();
3541 int ncases;
3542 rtx *labelvec;
3543 register int i;
3544 rtx before_case;
3545 register struct nesting *thiscase = case_stack;
3546 tree index_expr = thiscase->data.case_stmt.index_expr;
3547 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3548
3549 do_pending_stack_adjust ();
3550
3551 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3552 if (TREE_TYPE (index_expr) != error_mark_node)
3553 {
3554 /* If switch expression was an enumerated type, check that all
3555 enumeration literals are covered by the cases.
3556 No sense trying this if there's a default case, however. */
3557
3558 if (!thiscase->data.case_stmt.default_label
3559 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3560 && TREE_CODE (index_expr) != INTEGER_CST)
3561 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3562
3563 /* If this is the first label, warn if any insns have been emitted. */
3564 if (thiscase->data.case_stmt.seenlabel == 0)
3565 {
3566 rtx insn;
3567 for (insn = get_last_insn ();
3568 insn != case_stack->data.case_stmt.start;
3569 insn = PREV_INSN (insn))
3570 if (GET_CODE (insn) != NOTE
3571 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3572 {
3573 warning ("unreachable code at beginning of %s",
3574 case_stack->data.case_stmt.printname);
3575 break;
3576 }
3577 }
3578
3579 /* If we don't have a default-label, create one here,
3580 after the body of the switch. */
3581 if (thiscase->data.case_stmt.default_label == 0)
3582 {
3583 thiscase->data.case_stmt.default_label
3584 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3585 expand_label (thiscase->data.case_stmt.default_label);
3586 }
3587 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3588
3589 before_case = get_last_insn ();
3590
3591 /* Simplify the case-list before we count it. */
3592 group_case_nodes (thiscase->data.case_stmt.case_list);
3593
3594 /* Get upper and lower bounds of case values.
3595 Also convert all the case values to the index expr's data type. */
3596
3597 count = 0;
3598 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3599 {
3600 /* Check low and high label values are integers. */
3601 if (TREE_CODE (n->low) != INTEGER_CST)
3602 abort ();
3603 if (TREE_CODE (n->high) != INTEGER_CST)
3604 abort ();
3605
3606 n->low = convert (TREE_TYPE (index_expr), n->low);
3607 n->high = convert (TREE_TYPE (index_expr), n->high);
3608
3609 /* Count the elements and track the largest and smallest
3610 of them (treating them as signed even if they are not). */
3611 if (count++ == 0)
3612 {
3613 minval = n->low;
3614 maxval = n->high;
3615 }
3616 else
3617 {
3618 if (INT_CST_LT (n->low, minval))
3619 minval = n->low;
3620 if (INT_CST_LT (maxval, n->high))
3621 maxval = n->high;
3622 }
3623 /* A range counts double, since it requires two compares. */
3624 if (! tree_int_cst_equal (n->low, n->high))
3625 count++;
3626 }
3627
3628 /* Compute span of values. */
3629 if (count != 0)
3630 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3631 maxval, minval));
3632
3633 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3634 {
3635 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3636 emit_queue ();
3637 emit_jump (default_label);
3638 }
3639 /* If range of values is much bigger than number of values,
3640 make a sequence of conditional branches instead of a dispatch.
3641 If the switch-index is a constant, do it this way
3642 because we can optimize it. */
4f73c5dd
TW
3643
3644#ifndef CASE_VALUES_THRESHOLD
28d81abb 3645#ifdef HAVE_casesi
4f73c5dd 3646#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 3647#else
4f73c5dd
TW
3648 /* If machine does not have a case insn that compares the
3649 bounds, this means extra overhead for dispatch tables
3650 which raises the threshold for using them. */
3651#define CASE_VALUES_THRESHOLD 5
3652#endif /* HAVE_casesi */
3653#endif /* CASE_VALUES_THRESHOLD */
3654
3655 else if (TREE_INT_CST_HIGH (range) != 0
3656 || count < CASE_VALUES_THRESHOLD
28d81abb
RK
3657 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3658 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3659 /* These will reduce to a constant. */
28d81abb 3660 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3661 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3662 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3663 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3664 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3665 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb
RK
3666 {
3667 index = expand_expr (index_expr, 0, VOIDmode, 0);
3668
3669 /* If the index is a short or char that we do not have
3670 an insn to handle comparisons directly, convert it to
3671 a full integer now, rather than letting each comparison
3672 generate the conversion. */
3673
3674 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3675 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3676 == CODE_FOR_nothing))
3677 {
3678 enum machine_mode wider_mode;
3679 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3680 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3681 if (cmp_optab->handlers[(int) wider_mode].insn_code
3682 != CODE_FOR_nothing)
3683 {
3684 index = convert_to_mode (wider_mode, index, unsignedp);
3685 break;
3686 }
3687 }
3688
3689 emit_queue ();
3690 do_pending_stack_adjust ();
3691
3692 index = protect_from_queue (index, 0);
3693 if (GET_CODE (index) == MEM)
3694 index = copy_to_reg (index);
3695 if (GET_CODE (index) == CONST_INT
3696 || TREE_CODE (index_expr) == INTEGER_CST)
3697 {
3698 /* Make a tree node with the proper constant value
3699 if we don't already have one. */
3700 if (TREE_CODE (index_expr) != INTEGER_CST)
3701 {
3702 index_expr
3703 = build_int_2 (INTVAL (index),
3704 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3705 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3706 }
3707
3708 /* For constant index expressions we need only
3709 issue a unconditional branch to the appropriate
3710 target code. The job of removing any unreachable
3711 code is left to the optimisation phase if the
3712 "-O" option is specified. */
3713 for (n = thiscase->data.case_stmt.case_list;
3714 n;
3715 n = n->right)
3716 {
3717 if (! tree_int_cst_lt (index_expr, n->low)
3718 && ! tree_int_cst_lt (n->high, index_expr))
3719 break;
3720 }
3721 if (n)
3722 emit_jump (label_rtx (n->code_label));
3723 else
3724 emit_jump (default_label);
3725 }
3726 else
3727 {
3728 /* If the index expression is not constant we generate
3729 a binary decision tree to select the appropriate
3730 target code. This is done as follows:
3731
3732 The list of cases is rearranged into a binary tree,
3733 nearly optimal assuming equal probability for each case.
3734
3735 The tree is transformed into RTL, eliminating
3736 redundant test conditions at the same time.
3737
3738 If program flow could reach the end of the
3739 decision tree an unconditional jump to the
3740 default code is emitted. */
3741
3742 use_cost_table
3743 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb
RK
3744 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3745 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3746 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3747 default_label, TREE_TYPE (index_expr));
3748 emit_jump_if_reachable (default_label);
3749 }
3750 }
3751 else
3752 {
3753 int win = 0;
3754#ifdef HAVE_casesi
3755 if (HAVE_casesi)
3756 {
c4fcf531 3757 enum machine_mode index_mode = SImode;
5130a5cc 3758 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3759
28d81abb 3760 /* Convert the index to SImode. */
c4fcf531
RS
3761 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3762 > GET_MODE_BITSIZE (index_mode))
28d81abb 3763 {
af2682ef
RS
3764 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3765 rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
3766
3767 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3768 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3769 index_expr, minval);
3770 minval = integer_zero_node;
af2682ef
RS
3771 index = expand_expr (index_expr, 0, VOIDmode, 0);
3772 emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
3773 emit_jump_insn (gen_bltu (default_label));
3774 /* Now we can safely truncate. */
3775 index = convert_to_mode (index_mode, index, 0);
3776 }
3777 else
3778 {
3779 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3780 index_expr = convert (type_for_size (index_bits, 0),
3781 index_expr);
3782 index = expand_expr (index_expr, 0, VOIDmode, 0);
28d81abb 3783 }
28d81abb
RK
3784 emit_queue ();
3785 index = protect_from_queue (index, 0);
3786 do_pending_stack_adjust ();
3787
3788 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3789 expand_expr (range, 0, VOIDmode, 0),
3790 table_label, default_label));
3791 win = 1;
3792 }
3793#endif
3794#ifdef HAVE_tablejump
3795 if (! win && HAVE_tablejump)
3796 {
3797 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3798 fold (build (MINUS_EXPR,
3799 TREE_TYPE (index_expr),
3800 index_expr, minval)));
28d81abb
RK
3801 index = expand_expr (index_expr, 0, VOIDmode, 0);
3802 emit_queue ();
af2682ef 3803 index = protect_from_queue (index, 0);
28d81abb
RK
3804 do_pending_stack_adjust ();
3805
af2682ef
RS
3806 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3807 expand_expr (range, 0, VOIDmode, 0),
28d81abb
RK
3808 table_label, default_label);
3809 win = 1;
3810 }
3811#endif
3812 if (! win)
3813 abort ();
3814
3815 /* Get table of labels to jump to, in order of case index. */
3816
3817 ncases = TREE_INT_CST_LOW (range) + 1;
3818 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3819 bzero (labelvec, ncases * sizeof (rtx));
3820
3821 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3822 {
3823 register int i
3824 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3825
3826 while (1)
3827 {
3828 labelvec[i]
3829 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3830 if (i + TREE_INT_CST_LOW (minval)
3831 == TREE_INT_CST_LOW (n->high))
3832 break;
3833 i++;
3834 }
3835 }
3836
3837 /* Fill in the gaps with the default. */
3838 for (i = 0; i < ncases; i++)
3839 if (labelvec[i] == 0)
3840 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3841
3842 /* Output the table */
3843 emit_label (table_label);
3844
3845 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3846 were an expression, instead of a an #ifdef/#ifndef. */
3847 if (
3848#ifdef CASE_VECTOR_PC_RELATIVE
3849 1 ||
3850#endif
3851 flag_pic)
3852 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3853 gen_rtx (LABEL_REF, Pmode, table_label),
3854 gen_rtvec_v (ncases, labelvec)));
3855 else
3856 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3857 gen_rtvec_v (ncases, labelvec)));
3858
3859 /* If the case insn drops through the table,
3860 after the table we must jump to the default-label.
3861 Otherwise record no drop-through after the table. */
3862#ifdef CASE_DROPS_THROUGH
3863 emit_jump (default_label);
3864#else
3865 emit_barrier ();
3866#endif
3867 }
3868
915f619f
JW
3869 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3870 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3871 thiscase->data.case_stmt.start);
3872 }
3873 if (thiscase->exit_label)
3874 emit_label (thiscase->exit_label);
3875
3876 POPSTACK (case_stack);
3877
3878 free_temp_slots ();
3879}
3880
3881/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3882
3883static void
3884do_jump_if_equal (op1, op2, label, unsignedp)
3885 rtx op1, op2, label;
3886 int unsignedp;
3887{
3888 if (GET_CODE (op1) == CONST_INT
3889 && GET_CODE (op2) == CONST_INT)
3890 {
3891 if (INTVAL (op1) == INTVAL (op2))
3892 emit_jump (label);
3893 }
3894 else
3895 {
3896 enum machine_mode mode = GET_MODE (op1);
3897 if (mode == VOIDmode)
3898 mode = GET_MODE (op2);
3899 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3900 emit_jump_insn (gen_beq (label));
3901 }
3902}
3903\f
3904/* Not all case values are encountered equally. This function
3905 uses a heuristic to weight case labels, in cases where that
3906 looks like a reasonable thing to do.
3907
3908 Right now, all we try to guess is text, and we establish the
3909 following weights:
3910
3911 chars above space: 16
3912 digits: 16
3913 default: 12
3914 space, punct: 8
3915 tab: 4
3916 newline: 2
3917 other "\" chars: 1
3918 remaining chars: 0
3919
3920 If we find any cases in the switch that are not either -1 or in the range
3921 of valid ASCII characters, or are control characters other than those
3922 commonly used with "\", don't treat this switch scanning text.
3923
3924 Return 1 if these nodes are suitable for cost estimation, otherwise
3925 return 0. */
3926
3927static int
3928estimate_case_costs (node)
3929 case_node_ptr node;
3930{
3931 tree min_ascii = build_int_2 (-1, -1);
3932 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3933 case_node_ptr n;
3934 int i;
3935
3936 /* If we haven't already made the cost table, make it now. Note that the
3937 lower bound of the table is -1, not zero. */
3938
3939 if (cost_table == NULL)
3940 {
3941 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3942 bzero (cost_table - 1, 129 * sizeof (short));
3943
3944 for (i = 0; i < 128; i++)
3945 {
3946 if (isalnum (i))
3947 cost_table[i] = 16;
3948 else if (ispunct (i))
3949 cost_table[i] = 8;
3950 else if (iscntrl (i))
3951 cost_table[i] = -1;
3952 }
3953
3954 cost_table[' '] = 8;
3955 cost_table['\t'] = 4;
3956 cost_table['\0'] = 4;
3957 cost_table['\n'] = 2;
3958 cost_table['\f'] = 1;
3959 cost_table['\v'] = 1;
3960 cost_table['\b'] = 1;
3961 }
3962
3963 /* See if all the case expressions look like text. It is text if the
3964 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3965 as signed arithmetic since we don't want to ever access cost_table with a
3966 value less than -1. Also check that none of the constants in a range
3967 are strange control characters. */
3968
3969 for (n = node; n; n = n->right)
3970 {
3971 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3972 return 0;
3973
3974 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3975 if (cost_table[i] < 0)
3976 return 0;
3977 }
3978
3979 /* All interesting values are within the range of interesting
3980 ASCII characters. */
3981 return 1;
3982}
3983
3984/* Scan an ordered list of case nodes
3985 combining those with consecutive values or ranges.
3986
3987 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3988
3989static void
3990group_case_nodes (head)
3991 case_node_ptr head;
3992{
3993 case_node_ptr node = head;
3994
3995 while (node)
3996 {
3997 rtx lb = next_real_insn (label_rtx (node->code_label));
3998 case_node_ptr np = node;
3999
4000 /* Try to group the successors of NODE with NODE. */
4001 while (((np = np->right) != 0)
4002 /* Do they jump to the same place? */
4003 && next_real_insn (label_rtx (np->code_label)) == lb
4004 /* Are their ranges consecutive? */
4005 && tree_int_cst_equal (np->low,
4006 fold (build (PLUS_EXPR,
4007 TREE_TYPE (node->high),
4008 node->high,
4009 integer_one_node)))
4010 /* An overflow is not consecutive. */
4011 && tree_int_cst_lt (node->high,
4012 fold (build (PLUS_EXPR,
4013 TREE_TYPE (node->high),
4014 node->high,
4015 integer_one_node))))
4016 {
4017 node->high = np->high;
4018 }
4019 /* NP is the first node after NODE which can't be grouped with it.
4020 Delete the nodes in between, and move on to that node. */
4021 node->right = np;
4022 node = np;
4023 }
4024}
4025
4026/* Take an ordered list of case nodes
4027 and transform them into a near optimal binary tree,
6dc42e49 4028 on the assumption that any target code selection value is as
28d81abb
RK
4029 likely as any other.
4030
4031 The transformation is performed by splitting the ordered
4032 list into two equal sections plus a pivot. The parts are
4033 then attached to the pivot as left and right branches. Each
4034 branch is is then transformed recursively. */
4035
4036static void
4037balance_case_nodes (head, parent)
4038 case_node_ptr *head;
4039 case_node_ptr parent;
4040{
4041 register case_node_ptr np;
4042
4043 np = *head;
4044 if (np)
4045 {
4046 int cost = 0;
4047 int i = 0;
4048 int ranges = 0;
4049 register case_node_ptr *npp;
4050 case_node_ptr left;
4051
4052 /* Count the number of entries on branch. Also count the ranges. */
4053
4054 while (np)
4055 {
4056 if (!tree_int_cst_equal (np->low, np->high))
4057 {
4058 ranges++;
4059 if (use_cost_table)
4060 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4061 }
4062
4063 if (use_cost_table)
4064 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4065
4066 i++;
4067 np = np->right;
4068 }
4069
4070 if (i > 2)
4071 {
4072 /* Split this list if it is long enough for that to help. */
4073 npp = head;
4074 left = *npp;
4075 if (use_cost_table)
4076 {
4077 /* Find the place in the list that bisects the list's total cost,
4078 Here I gets half the total cost. */
4079 int n_moved = 0;
4080 i = (cost + 1) / 2;
4081 while (1)
4082 {
4083 /* Skip nodes while their cost does not reach that amount. */
4084 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4085 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4086 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4087 if (i <= 0)
4088 break;
4089 npp = &(*npp)->right;
4090 n_moved += 1;
4091 }
4092 if (n_moved == 0)
4093 {
4094 /* Leave this branch lopsided, but optimize left-hand
4095 side and fill in `parent' fields for right-hand side. */
4096 np = *head;
4097 np->parent = parent;
4098 balance_case_nodes (&np->left, np);
4099 for (; np->right; np = np->right)
4100 np->right->parent = np;
4101 return;
4102 }
4103 }
4104 /* If there are just three nodes, split at the middle one. */
4105 else if (i == 3)
4106 npp = &(*npp)->right;
4107 else
4108 {
4109 /* Find the place in the list that bisects the list's total cost,
4110 where ranges count as 2.
4111 Here I gets half the total cost. */
4112 i = (i + ranges + 1) / 2;
4113 while (1)
4114 {
4115 /* Skip nodes while their cost does not reach that amount. */
4116 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4117 i--;
4118 i--;
4119 if (i <= 0)
4120 break;
4121 npp = &(*npp)->right;
4122 }
4123 }
4124 *head = np = *npp;
4125 *npp = 0;
4126 np->parent = parent;
4127 np->left = left;
4128
4129 /* Optimize each of the two split parts. */
4130 balance_case_nodes (&np->left, np);
4131 balance_case_nodes (&np->right, np);
4132 }
4133 else
4134 {
4135 /* Else leave this branch as one level,
4136 but fill in `parent' fields. */
4137 np = *head;
4138 np->parent = parent;
4139 for (; np->right; np = np->right)
4140 np->right->parent = np;
4141 }
4142 }
4143}
4144\f
4145/* Search the parent sections of the case node tree
4146 to see if a test for the lower bound of NODE would be redundant.
4147 INDEX_TYPE is the type of the index expression.
4148
4149 The instructions to generate the case decision tree are
4150 output in the same order as nodes are processed so it is
4151 known that if a parent node checks the range of the current
4152 node minus one that the current node is bounded at its lower
4153 span. Thus the test would be redundant. */
4154
4155static int
4156node_has_low_bound (node, index_type)
4157 case_node_ptr node;
4158 tree index_type;
4159{
4160 tree low_minus_one;
4161 case_node_ptr pnode;
4162
4163 /* If the lower bound of this node is the lowest value in the index type,
4164 we need not test it. */
4165
4166 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4167 return 1;
4168
4169 /* If this node has a left branch, the value at the left must be less
4170 than that at this node, so it cannot be bounded at the bottom and
4171 we need not bother testing any further. */
4172
4173 if (node->left)
4174 return 0;
4175
4176 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4177 node->low, integer_one_node));
4178
4179 /* If the subtraction above overflowed, we can't verify anything.
4180 Otherwise, look for a parent that tests our value - 1. */
4181
4182 if (! tree_int_cst_lt (low_minus_one, node->low))
4183 return 0;
4184
4185 for (pnode = node->parent; pnode; pnode = pnode->parent)
4186 if (tree_int_cst_equal (low_minus_one, pnode->high))
4187 return 1;
4188
4189 return 0;
4190}
4191
4192/* Search the parent sections of the case node tree
4193 to see if a test for the upper bound of NODE would be redundant.
4194 INDEX_TYPE is the type of the index expression.
4195
4196 The instructions to generate the case decision tree are
4197 output in the same order as nodes are processed so it is
4198 known that if a parent node checks the range of the current
4199 node plus one that the current node is bounded at its upper
4200 span. Thus the test would be redundant. */
4201
4202static int
4203node_has_high_bound (node, index_type)
4204 case_node_ptr node;
4205 tree index_type;
4206{
4207 tree high_plus_one;
4208 case_node_ptr pnode;
4209
4210 /* If the upper bound of this node is the highest value in the type
4211 of the index expression, we need not test against it. */
4212
4213 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4214 return 1;
4215
4216 /* If this node has a right branch, the value at the right must be greater
4217 than that at this node, so it cannot be bounded at the top and
4218 we need not bother testing any further. */
4219
4220 if (node->right)
4221 return 0;
4222
4223 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4224 node->high, integer_one_node));
4225
4226 /* If the addition above overflowed, we can't verify anything.
4227 Otherwise, look for a parent that tests our value + 1. */
4228
4229 if (! tree_int_cst_lt (node->high, high_plus_one))
4230 return 0;
4231
4232 for (pnode = node->parent; pnode; pnode = pnode->parent)
4233 if (tree_int_cst_equal (high_plus_one, pnode->low))
4234 return 1;
4235
4236 return 0;
4237}
4238
4239/* Search the parent sections of the
4240 case node tree to see if both tests for the upper and lower
4241 bounds of NODE would be redundant. */
4242
4243static int
4244node_is_bounded (node, index_type)
4245 case_node_ptr node;
4246 tree index_type;
4247{
4248 return (node_has_low_bound (node, index_type)
4249 && node_has_high_bound (node, index_type));
4250}
4251
4252/* Emit an unconditional jump to LABEL unless it would be dead code. */
4253
4254static void
4255emit_jump_if_reachable (label)
4256 rtx label;
4257{
4258 if (GET_CODE (get_last_insn ()) != BARRIER)
4259 emit_jump (label);
4260}
4261\f
4262/* Emit step-by-step code to select a case for the value of INDEX.
4263 The thus generated decision tree follows the form of the
4264 case-node binary tree NODE, whose nodes represent test conditions.
4265 INDEX_TYPE is the type of the index of the switch.
4266
4267 Care is taken to prune redundant tests from the decision tree
4268 by detecting any boundary conditions already checked by
4269 emitted rtx. (See node_has_high_bound, node_has_low_bound
4270 and node_is_bounded, above.)
4271
4272 Where the test conditions can be shown to be redundant we emit
4273 an unconditional jump to the target code. As a further
4274 optimization, the subordinates of a tree node are examined to
4275 check for bounded nodes. In this case conditional and/or
4276 unconditional jumps as a result of the boundary check for the
4277 current node are arranged to target the subordinates associated
4278 code for out of bound conditions on the current node node.
4279
f72aed24 4280 We can assume that when control reaches the code generated here,
28d81abb
RK
4281 the index value has already been compared with the parents
4282 of this node, and determined to be on the same side of each parent
4283 as this node is. Thus, if this node tests for the value 51,
4284 and a parent tested for 52, we don't need to consider
4285 the possibility of a value greater than 51. If another parent
4286 tests for the value 50, then this node need not test anything. */
4287
4288static void
4289emit_case_nodes (index, node, default_label, index_type)
4290 rtx index;
4291 case_node_ptr node;
4292 rtx default_label;
4293 tree index_type;
4294{
4295 /* If INDEX has an unsigned type, we must make unsigned branches. */
4296 int unsignedp = TREE_UNSIGNED (index_type);
4297 typedef rtx rtx_function ();
4298 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4299 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4300 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4301 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4302 enum machine_mode mode = GET_MODE (index);
4303
4304 /* See if our parents have already tested everything for us.
4305 If they have, emit an unconditional jump for this node. */
4306 if (node_is_bounded (node, index_type))
4307 emit_jump (label_rtx (node->code_label));
4308
4309 else if (tree_int_cst_equal (node->low, node->high))
4310 {
4311 /* Node is single valued. First see if the index expression matches
4312 this node and then check our children, if any. */
4313
4314 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4315 label_rtx (node->code_label), unsignedp);
4316
4317 if (node->right != 0 && node->left != 0)
4318 {
4319 /* This node has children on both sides.
4320 Dispatch to one side or the other
4321 by comparing the index value with this node's value.
4322 If one subtree is bounded, check that one first,
4323 so we can avoid real branches in the tree. */
4324
4325 if (node_is_bounded (node->right, index_type))
4326 {
4327 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4328 GT, 0, mode, unsignedp, 0);
4329
4330 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4331 emit_case_nodes (index, node->left, default_label, index_type);
4332 }
4333
4334 else if (node_is_bounded (node->left, index_type))
4335 {
4336 emit_cmp_insn (index, expand_expr (node->high, 0,
4337 VOIDmode, 0),
4338 LT, 0, mode, unsignedp, 0);
4339 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4340 emit_case_nodes (index, node->right, default_label, index_type);
4341 }
4342
4343 else
4344 {
4345 /* Neither node is bounded. First distinguish the two sides;
4346 then emit the code for one side at a time. */
4347
4348 tree test_label
4349 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4350
4351 /* See if the value is on the right. */
4352 emit_cmp_insn (index, expand_expr (node->high, 0,
4353 VOIDmode, 0),
4354 GT, 0, mode, unsignedp, 0);
4355 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4356
4357 /* Value must be on the left.
4358 Handle the left-hand subtree. */
4359 emit_case_nodes (index, node->left, default_label, index_type);
4360 /* If left-hand subtree does nothing,
4361 go to default. */
4362 emit_jump_if_reachable (default_label);
4363
4364 /* Code branches here for the right-hand subtree. */
4365 expand_label (test_label);
4366 emit_case_nodes (index, node->right, default_label, index_type);
4367 }
4368 }
4369
4370 else if (node->right != 0 && node->left == 0)
4371 {
4372 /* Here we have a right child but no left so we issue conditional
4373 branch to default and process the right child.
4374
4375 Omit the conditional branch to default if we it avoid only one
4376 right child; it costs too much space to save so little time. */
4377
de14fd73 4378 if (node->right->right || node->right->left
28d81abb
RK
4379 || !tree_int_cst_equal (node->right->low, node->right->high))
4380 {
4381 if (!node_has_low_bound (node, index_type))
4382 {
4383 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4384 LT, 0, mode, unsignedp, 0);
4385 emit_jump_insn ((*gen_blt_pat) (default_label));
4386 }
4387
4388 emit_case_nodes (index, node->right, default_label, index_type);
4389 }
4390 else
4391 /* We cannot process node->right normally
4392 since we haven't ruled out the numbers less than
4393 this node's value. So handle node->right explicitly. */
4394 do_jump_if_equal (index,
4395 expand_expr (node->right->low, 0, VOIDmode, 0),
4396 label_rtx (node->right->code_label), unsignedp);
4397 }
4398
4399 else if (node->right == 0 && node->left != 0)
4400 {
4401 /* Just one subtree, on the left. */
4402
de14fd73
RK
4403#if 0 /* The following code and comment were formerly part
4404 of the condition here, but they didn't work
4405 and I don't understand what the idea was. -- rms. */
4406 /* If our "most probable entry" is less probable
28d81abb
RK
4407 than the default label, emit a jump to
4408 the default label using condition codes
4409 already lying around. With no right branch,
4410 a branch-greater-than will get us to the default
4411 label correctly. */
de14fd73
RK
4412 if (use_cost_table
4413 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4414 ;
4415#endif /* 0 */
4416 if (node->left->left || node->left->right
28d81abb
RK
4417 || !tree_int_cst_equal (node->left->low, node->left->high))
4418 {
4419 if (!node_has_high_bound (node, index_type))
4420 {
4421 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4422 GT, 0, mode, unsignedp, 0);
4423 emit_jump_insn ((*gen_bgt_pat) (default_label));
4424 }
4425
4426 emit_case_nodes (index, node->left, default_label, index_type);
4427 }
4428 else
4429 /* We cannot process node->left normally
4430 since we haven't ruled out the numbers less than
4431 this node's value. So handle node->left explicitly. */
4432 do_jump_if_equal (index,
4433 expand_expr (node->left->low, 0, VOIDmode, 0),
4434 label_rtx (node->left->code_label), unsignedp);
4435 }
4436 }
4437 else
4438 {
4439 /* Node is a range. These cases are very similar to those for a single
4440 value, except that we do not start by testing whether this node
4441 is the one to branch to. */
4442
4443 if (node->right != 0 && node->left != 0)
4444 {
4445 /* Node has subtrees on both sides.
4446 If the right-hand subtree is bounded,
4447 test for it first, since we can go straight there.
4448 Otherwise, we need to make a branch in the control structure,
4449 then handle the two subtrees. */
4450 tree test_label = 0;
4451
4452 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4453 GT, 0, mode, unsignedp, 0);
4454
4455 if (node_is_bounded (node->right, index_type))
4456 /* Right hand node is fully bounded so we can eliminate any
4457 testing and branch directly to the target code. */
4458 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4459 else
4460 {
4461 /* Right hand node requires testing.
4462 Branch to a label where we will handle it later. */
4463
4464 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4465 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4466 }
4467
4468 /* Value belongs to this node or to the left-hand subtree. */
4469
4470 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4471 GE, 0, mode, unsignedp, 0);
4472 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4473
4474 /* Handle the left-hand subtree. */
4475 emit_case_nodes (index, node->left, default_label, index_type);
4476
4477 /* If right node had to be handled later, do that now. */
4478
4479 if (test_label)
4480 {
4481 /* If the left-hand subtree fell through,
4482 don't let it fall into the right-hand subtree. */
4483 emit_jump_if_reachable (default_label);
4484
4485 expand_label (test_label);
4486 emit_case_nodes (index, node->right, default_label, index_type);
4487 }
4488 }
4489
4490 else if (node->right != 0 && node->left == 0)
4491 {
4492 /* Deal with values to the left of this node,
4493 if they are possible. */
4494 if (!node_has_low_bound (node, index_type))
4495 {
4496 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4497 LT, 0, mode, unsignedp, 0);
4498 emit_jump_insn ((*gen_blt_pat) (default_label));
4499 }
4500
4501 /* Value belongs to this node or to the right-hand subtree. */
4502
4503 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4504 LE, 0, mode, unsignedp, 0);
4505 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4506
4507 emit_case_nodes (index, node->right, default_label, index_type);
4508 }
4509
4510 else if (node->right == 0 && node->left != 0)
4511 {
4512 /* Deal with values to the right of this node,
4513 if they are possible. */
4514 if (!node_has_high_bound (node, index_type))
4515 {
4516 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4517 GT, 0, mode, unsignedp, 0);
4518 emit_jump_insn ((*gen_bgt_pat) (default_label));
4519 }
4520
4521 /* Value belongs to this node or to the left-hand subtree. */
4522
4523 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4524 GE, 0, mode, unsignedp, 0);
4525 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4526
4527 emit_case_nodes (index, node->left, default_label, index_type);
4528 }
4529
4530 else
4531 {
4532 /* Node has no children so we check low and high bounds to remove
4533 redundant tests. Only one of the bounds can exist,
4534 since otherwise this node is bounded--a case tested already. */
4535
4536 if (!node_has_high_bound (node, index_type))
4537 {
4538 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4539 GT, 0, mode, unsignedp, 0);
4540 emit_jump_insn ((*gen_bgt_pat) (default_label));
4541 }
4542
4543 if (!node_has_low_bound (node, index_type))
4544 {
4545 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4546 LT, 0, mode, unsignedp, 0);
4547 emit_jump_insn ((*gen_blt_pat) (default_label));
4548 }
4549
4550 emit_jump (label_rtx (node->code_label));
4551 }
4552 }
4553}
4554\f
4555/* These routines are used by the loop unrolling code. They copy BLOCK trees
4556 so that the debugging info will be correct for the unrolled loop. */
4557
4558/* Indexed by loop number, contains pointer to the first block in the loop,
4559 or zero if none. Only valid if doing loop unrolling and outputting debugger
4560 info. */
4561
4562tree *loop_number_first_block;
4563
4564/* Indexed by loop number, contains pointer to the last block in the loop,
4565 only valid if loop_number_first_block is nonzero. */
4566
4567tree *loop_number_last_block;
4568
4569/* Indexed by loop number, contains nesting level of first block in the
4570 loop, if any. Only valid if doing loop unrolling and outputting debugger
4571 info. */
4572
4573int *loop_number_block_level;
4574
4575/* Scan the function looking for loops, and walk the BLOCK tree at the
4576 same time. Record the first and last BLOCK tree corresponding to each
4577 loop. This function is similar to find_and_verify_loops in loop.c. */
4578
4579void
4580find_loop_tree_blocks (f)
4581 rtx f;
4582{
4583 rtx insn;
4584 int current_loop = -1;
4585 int next_loop = -1;
4586 int loop;
4587 int block_level, tree_level;
4588 tree tree_block, parent_tree_block;
4589
4590 tree_block = DECL_INITIAL (current_function_decl);
4591 parent_tree_block = 0;
4592 block_level = 0;
4593 tree_level = -1;
4594
4595 /* Find boundaries of loops, and save the first and last BLOCK tree
4596 corresponding to each loop. */
4597
4598 for (insn = f; insn; insn = NEXT_INSN (insn))
4599 {
4600 if (GET_CODE (insn) == NOTE)
4601 switch (NOTE_LINE_NUMBER (insn))
4602 {
4603 case NOTE_INSN_LOOP_BEG:
4604 loop_number_block_level[++next_loop] = block_level;
4605 loop_number_first_block[next_loop] = 0;
4606 current_loop = next_loop;
4607 break;
4608
4609 case NOTE_INSN_LOOP_END:
4610 if (current_loop == -1)
4611 abort ();
4612
4613 current_loop = loop_outer_loop[current_loop];
4614 break;
4615
4616 case NOTE_INSN_BLOCK_BEG:
4617 if (tree_level < block_level)
4618 {
4619 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4620 we must now visit the subtree of the current block. */
4621 parent_tree_block = tree_block;
4622 tree_block = BLOCK_SUBBLOCKS (tree_block);
4623 tree_level++;
4624 }
4625 else if (tree_level > block_level)
4626 abort ();
4627
4628 /* Save this block tree here for all nested loops for which
4629 this is the topmost block. */
4630 for (loop = current_loop;
4631 loop != -1 && block_level == loop_number_block_level[loop];
4632 loop = loop_outer_loop[loop])
4633 {
4634 if (loop_number_first_block[loop] == 0)
4635 loop_number_first_block[loop] = tree_block;
4636 loop_number_last_block[loop] = tree_block;
4637 }
4638
4639 block_level++;
4640 break;
4641
4642 case NOTE_INSN_BLOCK_END:
4643 block_level--;
4644 if (tree_level > block_level)
4645 {
4646 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4647 we must now visit the parent of the current tree. */
4648 if (tree_block != 0 || parent_tree_block == 0)
4649 abort ();
4650 tree_block = parent_tree_block;
4651 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4652 tree_level--;
4653 }
4654 tree_block = BLOCK_CHAIN (tree_block);
4655 break;
4656 }
4657 }
4658}
4659
4660/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4661 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4662
4663 Note that we only copy the topmost level of tree nodes; they will share
4664 pointers to the same subblocks. */
4665
4666void
4667unroll_block_trees (loop_number, copies)
4668 int loop_number;
4669 int copies;
4670{
4671 int i;
4672
4673 /* First check whether there are any blocks that need to be copied. */
4674 if (loop_number_first_block[loop_number])
4675 {
4676 tree first_block = loop_number_first_block[loop_number];
4677 tree last_block = loop_number_last_block[loop_number];
4678 tree last_block_created = 0;
4679
4680 for (i = 0; i < copies - 1; i++)
4681 {
4682 tree block = first_block;
4683 tree insert_after = last_block;
4684 tree copied_block;
4685
4686 /* Copy every block between first_block and last_block inclusive,
4687 inserting the new blocks after last_block. */
4688 do
4689 {
4690 tree new_block = make_node (BLOCK);
4691 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4692 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4693 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4694 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4695 TREE_USED (new_block) = TREE_USED (block);
4696
4697 /* Insert the new block after the insertion point, and move
4698 the insertion point to the new block. This ensures that
4699 the copies are inserted in the right order. */
4700 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4701 BLOCK_CHAIN (insert_after) = new_block;
4702 insert_after = new_block;
4703
4704 copied_block = block;
4705 block = BLOCK_CHAIN (block);
4706 }
4707 while (copied_block != last_block);
4708
4709 /* Remember the last block created, so that we can update the
4710 info in the tables. */
4711 if (last_block_created == 0)
4712 last_block_created = insert_after;
4713 }
4714
4715 /* For all nested loops for which LAST_BLOCK was originally the last
4716 block, update the tables to indicate that LAST_BLOCK_CREATED is
4717 now the last block in the loop. */
4718 for (i = loop_number; last_block == loop_number_last_block[i];
4719 i = loop_outer_loop[i])
4720 loop_number_last_block[i] = last_block_created;
4721 }
4722}
This page took 0.510204 seconds and 5 git commands to generate.