]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(output_move_double): Use `ldo' for immediate adds (instead of `addi' with bad syntax).
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
28d81abb
RK
57/* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
59char *emit_filename;
60int emit_lineno;
61
62/* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
64
65int expr_stmts_for_value;
66
67/* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
69
70static tree last_expr_type;
71static rtx last_expr_value;
72
7629c936
RS
73/* Each time we expand the end of a binding contour (in `expand_end_bindings')
74 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
75 This is used by the `remember_end_note' function to record the endpoint
76 of each generated block in its associated BLOCK node. */
77
78static rtx last_block_end_note;
79
28d81abb
RK
80/* Number of binding contours started so far in this function. */
81
82int block_start_count;
83
84/* Nonzero if function being compiled needs to
85 return the address of where it has put a structure value. */
86
87extern int current_function_returns_pcc_struct;
88
89/* Label that will go on parm cleanup code, if any.
90 Jumping to this label runs cleanup code for parameters, if
91 such code must be run. Following this code is the logical return label. */
92
93extern rtx cleanup_label;
94
95/* Label that will go on function epilogue.
96 Jumping to this label serves as a "return" instruction
97 on machines which require execution of the epilogue on all returns. */
98
99extern rtx return_label;
100
101/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
102 So we can mark them all live at the end of the function, if nonopt. */
103extern rtx save_expr_regs;
104
105/* Offset to end of allocated area of stack frame.
106 If stack grows down, this is the address of the last stack slot allocated.
107 If stack grows up, this is the address for the next slot. */
108extern int frame_offset;
109
110/* Label to jump back to for tail recursion, or 0 if we have
111 not yet needed one for this function. */
112extern rtx tail_recursion_label;
113
114/* Place after which to insert the tail_recursion_label if we need one. */
115extern rtx tail_recursion_reentry;
116
117/* Location at which to save the argument pointer if it will need to be
118 referenced. There are two cases where this is done: if nonlocal gotos
119 exist, or if vars whose is an offset from the argument pointer will be
120 needed by inner routines. */
121
122extern rtx arg_pointer_save_area;
123
124/* Chain of all RTL_EXPRs that have insns in them. */
125extern tree rtl_expr_chain;
126
127#if 0 /* Turned off because 0 seems to work just as well. */
128/* Cleanup lists are required for binding levels regardless of whether
129 that binding level has cleanups or not. This node serves as the
130 cleanup list whenever an empty list is required. */
131static tree empty_cleanup_list;
132#endif
133\f
134/* Functions and data structures for expanding case statements. */
135
136/* Case label structure, used to hold info on labels within case
137 statements. We handle "range" labels; for a single-value label
138 as in C, the high and low limits are the same.
139
140 A chain of case nodes is initially maintained via the RIGHT fields
141 in the nodes. Nodes with higher case values are later in the list.
142
143 Switch statements can be output in one of two forms. A branch table
144 is used if there are more than a few labels and the labels are dense
145 within the range between the smallest and largest case value. If a
146 branch table is used, no further manipulations are done with the case
147 node chain.
148
149 The alternative to the use of a branch table is to generate a series
150 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
151 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
152 totally unbalanced, with everything on the right. We balance the tree
153 with nodes on the left having lower case values than the parent
28d81abb
RK
154 and nodes on the right having higher values. We then output the tree
155 in order. */
156
157struct case_node
158{
159 struct case_node *left; /* Left son in binary tree */
160 struct case_node *right; /* Right son in binary tree; also node chain */
161 struct case_node *parent; /* Parent of node in binary tree */
162 tree low; /* Lowest index value for this label */
163 tree high; /* Highest index value for this label */
164 tree code_label; /* Label to jump to when node matches */
165};
166
167typedef struct case_node case_node;
168typedef struct case_node *case_node_ptr;
169
170/* These are used by estimate_case_costs and balance_case_nodes. */
171
172/* This must be a signed type, and non-ANSI compilers lack signed char. */
173static short *cost_table;
174static int use_cost_table;
175
176static int estimate_case_costs ();
177static void balance_case_nodes ();
178static void emit_case_nodes ();
179static void group_case_nodes ();
180static void emit_jump_if_reachable ();
181
182static int warn_if_unused_value ();
183static void expand_goto_internal ();
184static int expand_fixup ();
185void fixup_gotos ();
186void free_temp_slots ();
187static void expand_cleanups ();
28d81abb
RK
188static void expand_null_return_1 ();
189static int tail_recursion_args ();
190static void do_jump_if_equal ();
191\f
192/* Stack of control and binding constructs we are currently inside.
193
194 These constructs begin when you call `expand_start_WHATEVER'
195 and end when you call `expand_end_WHATEVER'. This stack records
196 info about how the construct began that tells the end-function
197 what to do. It also may provide information about the construct
198 to alter the behavior of other constructs within the body.
199 For example, they may affect the behavior of C `break' and `continue'.
200
201 Each construct gets one `struct nesting' object.
202 All of these objects are chained through the `all' field.
203 `nesting_stack' points to the first object (innermost construct).
204 The position of an entry on `nesting_stack' is in its `depth' field.
205
206 Each type of construct has its own individual stack.
207 For example, loops have `loop_stack'. Each object points to the
208 next object of the same type through the `next' field.
209
210 Some constructs are visible to `break' exit-statements and others
211 are not. Which constructs are visible depends on the language.
212 Therefore, the data structure allows each construct to be visible
213 or not, according to the args given when the construct is started.
214 The construct is visible if the `exit_label' field is non-null.
215 In that case, the value should be a CODE_LABEL rtx. */
216
217struct nesting
218{
219 struct nesting *all;
220 struct nesting *next;
221 int depth;
222 rtx exit_label;
223 union
224 {
225 /* For conds (if-then and if-then-else statements). */
226 struct
227 {
228 /* Label for the end of the if construct.
229 There is none if EXITFLAG was not set
230 and no `else' has been seen yet. */
231 rtx endif_label;
232 /* Label for the end of this alternative.
233 This may be the end of the if or the next else/elseif. */
234 rtx next_label;
235 } cond;
236 /* For loops. */
237 struct
238 {
239 /* Label at the top of the loop; place to loop back to. */
240 rtx start_label;
241 /* Label at the end of the whole construct. */
242 rtx end_label;
243 /* Label for `continue' statement to jump to;
244 this is in front of the stepper of the loop. */
245 rtx continue_label;
246 } loop;
247 /* For variable binding contours. */
248 struct
249 {
250 /* Sequence number of this binding contour within the function,
251 in order of entry. */
252 int block_start_count;
253 /* Nonzero => value to restore stack to on exit. */
254 rtx stack_level;
255 /* The NOTE that starts this contour.
256 Used by expand_goto to check whether the destination
257 is within each contour or not. */
258 rtx first_insn;
259 /* Innermost containing binding contour that has a stack level. */
260 struct nesting *innermost_stack_block;
261 /* List of cleanups to be run on exit from this contour.
262 This is a list of expressions to be evaluated.
263 The TREE_PURPOSE of each link is the ..._DECL node
264 which the cleanup pertains to. */
265 tree cleanups;
266 /* List of cleanup-lists of blocks containing this block,
267 as they were at the locus where this block appears.
268 There is an element for each containing block,
269 ordered innermost containing block first.
270 The tail of this list can be 0 (was empty_cleanup_list),
271 if all remaining elements would be empty lists.
272 The element's TREE_VALUE is the cleanup-list of that block,
273 which may be null. */
274 tree outer_cleanups;
275 /* Chain of labels defined inside this binding contour.
276 For contours that have stack levels or cleanups. */
277 struct label_chain *label_chain;
278 /* Number of function calls seen, as of start of this block. */
279 int function_call_count;
280 } block;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
283 struct
284 {
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
287 rtx start;
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
306 /* For exception contours. */
307 struct
308 {
309 /* List of exceptions raised. This is a TREE_LIST
310 of whatever you want. */
311 tree raised;
312 /* List of exceptions caught. This is also a TREE_LIST
313 of whatever you want. As a special case, it has the
314 value `void_type_node' if it handles default exceptions. */
315 tree handled;
316
317 /* First insn of TRY block, in case resumptive model is needed. */
318 rtx first_insn;
319 /* Label for the catch clauses. */
320 rtx except_label;
321 /* Label for unhandled exceptions. */
322 rtx unhandled_label;
323 /* Label at the end of whole construct. */
324 rtx after_label;
325 /* Label which "escapes" the exception construct.
326 Like EXIT_LABEL for BREAK construct, but for exceptions. */
327 rtx escape_label;
328 } except_stmt;
329 } data;
330};
331
332/* Chain of all pending binding contours. */
333struct nesting *block_stack;
334
335/* Chain of all pending binding contours that restore stack levels
336 or have cleanups. */
337struct nesting *stack_block_stack;
338
339/* Chain of all pending conditional statements. */
340struct nesting *cond_stack;
341
342/* Chain of all pending loops. */
343struct nesting *loop_stack;
344
345/* Chain of all pending case or switch statements. */
346struct nesting *case_stack;
347
348/* Chain of all pending exception contours. */
349struct nesting *except_stack;
350
351/* Separate chain including all of the above,
352 chained through the `all' field. */
353struct nesting *nesting_stack;
354
355/* Number of entries on nesting_stack now. */
356int nesting_depth;
357
358/* Allocate and return a new `struct nesting'. */
359
360#define ALLOC_NESTING() \
361 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
362
363/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
364 and pop off `nesting_stack' down to the same level. */
365
366#define POPSTACK(STACK) \
367do { int initial_depth = nesting_stack->depth; \
368 do { struct nesting *this = STACK; \
369 STACK = this->next; \
370 nesting_stack = this->all; \
371 nesting_depth = this->depth; \
372 obstack_free (&stmt_obstack, this); } \
373 while (nesting_depth > initial_depth); } while (0)
374\f
375/* In some cases it is impossible to generate code for a forward goto
376 until the label definition is seen. This happens when it may be necessary
377 for the goto to reset the stack pointer: we don't yet know how to do that.
378 So expand_goto puts an entry on this fixup list.
379 Each time a binding contour that resets the stack is exited,
380 we check each fixup.
381 If the target label has now been defined, we can insert the proper code. */
382
383struct goto_fixup
384{
385 /* Points to following fixup. */
386 struct goto_fixup *next;
387 /* Points to the insn before the jump insn.
388 If more code must be inserted, it goes after this insn. */
389 rtx before_jump;
390 /* The LABEL_DECL that this jump is jumping to, or 0
391 for break, continue or return. */
392 tree target;
7629c936
RS
393 /* The BLOCK for the place where this goto was found. */
394 tree context;
28d81abb
RK
395 /* The CODE_LABEL rtx that this is jumping to. */
396 rtx target_rtl;
397 /* Number of binding contours started in current function
398 before the label reference. */
399 int block_start_count;
400 /* The outermost stack level that should be restored for this jump.
401 Each time a binding contour that resets the stack is exited,
402 if the target label is *not* yet defined, this slot is updated. */
403 rtx stack_level;
404 /* List of lists of cleanup expressions to be run by this goto.
405 There is one element for each block that this goto is within.
406 The tail of this list can be 0 (was empty_cleanup_list),
407 if all remaining elements would be empty.
408 The TREE_VALUE contains the cleanup list of that block as of the
409 time this goto was seen.
410 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
411 tree cleanup_list_list;
412};
413
414static struct goto_fixup *goto_fixup_chain;
415
416/* Within any binding contour that must restore a stack level,
417 all labels are recorded with a chain of these structures. */
418
419struct label_chain
420{
421 /* Points to following fixup. */
422 struct label_chain *next;
423 tree label;
424};
425\f
426void
427init_stmt ()
428{
429 gcc_obstack_init (&stmt_obstack);
430#if 0
431 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
432#endif
433}
434
435void
436init_stmt_for_function ()
437{
438 /* We are not currently within any block, conditional, loop or case. */
439 block_stack = 0;
440 loop_stack = 0;
441 case_stack = 0;
442 cond_stack = 0;
443 nesting_stack = 0;
444 nesting_depth = 0;
445
446 block_start_count = 0;
447
448 /* No gotos have been expanded yet. */
449 goto_fixup_chain = 0;
450
451 /* We are not processing a ({...}) grouping. */
452 expr_stmts_for_value = 0;
453 last_expr_type = 0;
454}
455
456void
457save_stmt_status (p)
458 struct function *p;
459{
460 p->block_stack = block_stack;
461 p->stack_block_stack = stack_block_stack;
462 p->cond_stack = cond_stack;
463 p->loop_stack = loop_stack;
464 p->case_stack = case_stack;
465 p->nesting_stack = nesting_stack;
466 p->nesting_depth = nesting_depth;
467 p->block_start_count = block_start_count;
468 p->last_expr_type = last_expr_type;
469 p->last_expr_value = last_expr_value;
470 p->expr_stmts_for_value = expr_stmts_for_value;
471 p->emit_filename = emit_filename;
472 p->emit_lineno = emit_lineno;
473 p->goto_fixup_chain = goto_fixup_chain;
474}
475
476void
477restore_stmt_status (p)
478 struct function *p;
479{
480 block_stack = p->block_stack;
481 stack_block_stack = p->stack_block_stack;
482 cond_stack = p->cond_stack;
483 loop_stack = p->loop_stack;
484 case_stack = p->case_stack;
485 nesting_stack = p->nesting_stack;
486 nesting_depth = p->nesting_depth;
487 block_start_count = p->block_start_count;
488 last_expr_type = p->last_expr_type;
489 last_expr_value = p->last_expr_value;
490 expr_stmts_for_value = p->expr_stmts_for_value;
491 emit_filename = p->emit_filename;
492 emit_lineno = p->emit_lineno;
493 goto_fixup_chain = p->goto_fixup_chain;
494}
495\f
496/* Emit a no-op instruction. */
497
498void
499emit_nop ()
500{
501 rtx last_insn = get_last_insn ();
502 if (!optimize
503 && (GET_CODE (last_insn) == CODE_LABEL
504 || prev_real_insn (last_insn) == 0))
505 emit_insn (gen_nop ());
506}
507\f
508/* Return the rtx-label that corresponds to a LABEL_DECL,
509 creating it if necessary. */
510
511rtx
512label_rtx (label)
513 tree label;
514{
515 if (TREE_CODE (label) != LABEL_DECL)
516 abort ();
517
518 if (DECL_RTL (label))
519 return DECL_RTL (label);
520
521 return DECL_RTL (label) = gen_label_rtx ();
522}
523
524/* Add an unconditional jump to LABEL as the next sequential instruction. */
525
526void
527emit_jump (label)
528 rtx label;
529{
530 do_pending_stack_adjust ();
531 emit_jump_insn (gen_jump (label));
532 emit_barrier ();
533}
534
535/* Emit code to jump to the address
536 specified by the pointer expression EXP. */
537
538void
539expand_computed_goto (exp)
540 tree exp;
541{
37366632 542 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
de14fd73 543 emit_queue ();
28d81abb 544 emit_indirect_jump (x);
28d81abb
RK
545}
546\f
547/* Handle goto statements and the labels that they can go to. */
548
549/* Specify the location in the RTL code of a label LABEL,
550 which is a LABEL_DECL tree node.
551
552 This is used for the kind of label that the user can jump to with a
553 goto statement, and for alternatives of a switch or case statement.
554 RTL labels generated for loops and conditionals don't go through here;
555 they are generated directly at the RTL level, by other functions below.
556
557 Note that this has nothing to do with defining label *names*.
558 Languages vary in how they do that and what that even means. */
559
560void
561expand_label (label)
562 tree label;
563{
564 struct label_chain *p;
565
566 do_pending_stack_adjust ();
567 emit_label (label_rtx (label));
568 if (DECL_NAME (label))
569 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
570
571 if (stack_block_stack != 0)
572 {
573 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
574 p->next = stack_block_stack->data.block.label_chain;
575 stack_block_stack->data.block.label_chain = p;
576 p->label = label;
577 }
578}
579
580/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
581 from nested functions. */
582
583void
584declare_nonlocal_label (label)
585 tree label;
586{
587 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
588 LABEL_PRESERVE_P (label_rtx (label)) = 1;
589 if (nonlocal_goto_handler_slot == 0)
590 {
591 nonlocal_goto_handler_slot
592 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
593 emit_stack_save (SAVE_NONLOCAL,
594 &nonlocal_goto_stack_level,
595 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
596 }
597}
598
599/* Generate RTL code for a `goto' statement with target label LABEL.
600 LABEL should be a LABEL_DECL tree node that was or will later be
601 defined with `expand_label'. */
602
603void
604expand_goto (label)
605 tree label;
606{
607 /* Check for a nonlocal goto to a containing function. */
608 tree context = decl_function_context (label);
609 if (context != 0 && context != current_function_decl)
610 {
611 struct function *p = find_function_data (context);
612 rtx temp;
613 p->has_nonlocal_label = 1;
59257ff7
RK
614
615 /* Copy the rtl for the slots so that they won't be shared in
616 case the virtual stack vars register gets instantiated differently
617 in the parent than in the child. */
618
28d81abb
RK
619#if HAVE_nonlocal_goto
620 if (HAVE_nonlocal_goto)
621 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
622 copy_rtx (p->nonlocal_goto_handler_slot),
623 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
624 gen_rtx (LABEL_REF, Pmode,
625 label_rtx (label))));
626 else
627#endif
628 {
59257ff7
RK
629 rtx addr;
630
28d81abb
RK
631 /* Restore frame pointer for containing function.
632 This sets the actual hard register used for the frame pointer
633 to the location of the function's incoming static chain info.
634 The non-local goto handler will then adjust it to contain the
635 proper value and reload the argument pointer, if needed. */
636 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
637
638 /* We have now loaded the frame pointer hardware register with
639 the address of that corresponds to the start of the virtual
640 stack vars. So replace virtual_stack_vars_rtx in all
641 addresses we use with stack_pointer_rtx. */
642
28d81abb
RK
643 /* Get addr of containing function's current nonlocal goto handler,
644 which will do any cleanups and then jump to the label. */
59257ff7
RK
645 addr = copy_rtx (p->nonlocal_goto_handler_slot);
646 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
647 frame_pointer_rtx));
648
28d81abb 649 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
650 addr = p->nonlocal_goto_stack_level;
651 if (addr)
5e116627
MM
652 addr = replace_rtx (copy_rtx (addr),
653 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7 654
37366632 655 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 656
28d81abb
RK
657 /* Put in the static chain register the nonlocal label address. */
658 emit_move_insn (static_chain_rtx,
659 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
660 /* USE of frame_pointer_rtx added for consistency; not clear if
661 really needed. */
662 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
663 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
664 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
665 emit_indirect_jump (temp);
666 }
667 }
668 else
37366632 669 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
670}
671
672/* Generate RTL code for a `goto' statement with target label BODY.
673 LABEL should be a LABEL_REF.
674 LAST_INSN, if non-0, is the rtx we should consider as the last
675 insn emitted (for the purposes of cleaning up a return). */
676
677static void
678expand_goto_internal (body, label, last_insn)
679 tree body;
680 rtx label;
681 rtx last_insn;
682{
683 struct nesting *block;
684 rtx stack_level = 0;
685
686 if (GET_CODE (label) != CODE_LABEL)
687 abort ();
688
689 /* If label has already been defined, we can tell now
690 whether and how we must alter the stack level. */
691
692 if (PREV_INSN (label) != 0)
693 {
694 /* Find the innermost pending block that contains the label.
695 (Check containment by comparing insn-uids.)
696 Then restore the outermost stack level within that block,
697 and do cleanups of all blocks contained in it. */
698 for (block = block_stack; block; block = block->next)
699 {
700 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
701 break;
702 if (block->data.block.stack_level != 0)
703 stack_level = block->data.block.stack_level;
704 /* Execute the cleanups for blocks we are exiting. */
705 if (block->data.block.cleanups != 0)
706 {
37366632 707 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
708 do_pending_stack_adjust ();
709 }
710 }
711
712 if (stack_level)
713 {
714 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
715 the stack pointer. This one should be deleted as dead by flow. */
716 clear_pending_stack_adjust ();
717 do_pending_stack_adjust ();
37366632 718 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
719 }
720
721 if (body != 0 && DECL_TOO_LATE (body))
722 error ("jump to `%s' invalidly jumps into binding contour",
723 IDENTIFIER_POINTER (DECL_NAME (body)));
724 }
725 /* Label not yet defined: may need to put this goto
726 on the fixup list. */
727 else if (! expand_fixup (body, label, last_insn))
728 {
729 /* No fixup needed. Record that the label is the target
730 of at least one goto that has no fixup. */
731 if (body != 0)
732 TREE_ADDRESSABLE (body) = 1;
733 }
734
735 emit_jump (label);
736}
737\f
738/* Generate if necessary a fixup for a goto
739 whose target label in tree structure (if any) is TREE_LABEL
740 and whose target in rtl is RTL_LABEL.
741
742 If LAST_INSN is nonzero, we pretend that the jump appears
743 after insn LAST_INSN instead of at the current point in the insn stream.
744
745 The fixup will be used later to insert insns at this point
746 to restore the stack level as appropriate for the target label.
747
748 Value is nonzero if a fixup is made. */
749
750static int
751expand_fixup (tree_label, rtl_label, last_insn)
752 tree tree_label;
753 rtx rtl_label;
754 rtx last_insn;
755{
756 struct nesting *block, *end_block;
757
758 /* See if we can recognize which block the label will be output in.
759 This is possible in some very common cases.
760 If we succeed, set END_BLOCK to that block.
761 Otherwise, set it to 0. */
762
763 if (cond_stack
764 && (rtl_label == cond_stack->data.cond.endif_label
765 || rtl_label == cond_stack->data.cond.next_label))
766 end_block = cond_stack;
767 /* If we are in a loop, recognize certain labels which
768 are likely targets. This reduces the number of fixups
769 we need to create. */
770 else if (loop_stack
771 && (rtl_label == loop_stack->data.loop.start_label
772 || rtl_label == loop_stack->data.loop.end_label
773 || rtl_label == loop_stack->data.loop.continue_label))
774 end_block = loop_stack;
775 else
776 end_block = 0;
777
778 /* Now set END_BLOCK to the binding level to which we will return. */
779
780 if (end_block)
781 {
782 struct nesting *next_block = end_block->all;
783 block = block_stack;
784
785 /* First see if the END_BLOCK is inside the innermost binding level.
786 If so, then no cleanups or stack levels are relevant. */
787 while (next_block && next_block != block)
788 next_block = next_block->all;
789
790 if (next_block)
791 return 0;
792
793 /* Otherwise, set END_BLOCK to the innermost binding level
794 which is outside the relevant control-structure nesting. */
795 next_block = block_stack->next;
796 for (block = block_stack; block != end_block; block = block->all)
797 if (block == next_block)
798 next_block = next_block->next;
799 end_block = next_block;
800 }
801
802 /* Does any containing block have a stack level or cleanups?
803 If not, no fixup is needed, and that is the normal case
804 (the only case, for standard C). */
805 for (block = block_stack; block != end_block; block = block->next)
806 if (block->data.block.stack_level != 0
807 || block->data.block.cleanups != 0)
808 break;
809
810 if (block != end_block)
811 {
812 /* Ok, a fixup is needed. Add a fixup to the list of such. */
813 struct goto_fixup *fixup
814 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
815 /* In case an old stack level is restored, make sure that comes
816 after any pending stack adjust. */
817 /* ?? If the fixup isn't to come at the present position,
818 doing the stack adjust here isn't useful. Doing it with our
819 settings at that location isn't useful either. Let's hope
820 someone does it! */
821 if (last_insn == 0)
822 do_pending_stack_adjust ();
823 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
824 fixup->target = tree_label;
825 fixup->target_rtl = rtl_label;
7629c936 826 fixup->context = current_block ();
28d81abb
RK
827 fixup->block_start_count = block_start_count;
828 fixup->stack_level = 0;
829 fixup->cleanup_list_list
830 = (((block->data.block.outer_cleanups
831#if 0
832 && block->data.block.outer_cleanups != empty_cleanup_list
833#endif
834 )
835 || block->data.block.cleanups)
37366632 836 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
837 block->data.block.outer_cleanups)
838 : 0);
839 fixup->next = goto_fixup_chain;
840 goto_fixup_chain = fixup;
841 }
842
843 return block != 0;
844}
845
846/* When exiting a binding contour, process all pending gotos requiring fixups.
847 THISBLOCK is the structure that describes the block being exited.
848 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
849 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
850 FIRST_INSN is the insn that began this contour.
851
852 Gotos that jump out of this contour must restore the
853 stack level and do the cleanups before actually jumping.
854
855 DONT_JUMP_IN nonzero means report error there is a jump into this
856 contour from before the beginning of the contour.
857 This is also done if STACK_LEVEL is nonzero. */
858
859void
860fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
861 struct nesting *thisblock;
862 rtx stack_level;
863 tree cleanup_list;
864 rtx first_insn;
865 int dont_jump_in;
866{
867 register struct goto_fixup *f, *prev;
868
869 /* F is the fixup we are considering; PREV is the previous one. */
870 /* We run this loop in two passes so that cleanups of exited blocks
871 are run first, and blocks that are exited are marked so
872 afterwards. */
873
874 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
875 {
876 /* Test for a fixup that is inactive because it is already handled. */
877 if (f->before_jump == 0)
878 {
879 /* Delete inactive fixup from the chain, if that is easy to do. */
880 if (prev != 0)
881 prev->next = f->next;
882 }
883 /* Has this fixup's target label been defined?
884 If so, we can finalize it. */
885 else if (PREV_INSN (f->target_rtl) != 0)
886 {
7629c936
RS
887 register rtx cleanup_insns;
888 tree newblock;
889
28d81abb
RK
890 /* Get the first non-label after the label
891 this goto jumps to. If that's before this scope begins,
892 we don't have a jump into the scope. */
893 rtx after_label = f->target_rtl;
894 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
895 after_label = NEXT_INSN (after_label);
896
897 /* If this fixup jumped into this contour from before the beginning
898 of this contour, report an error. */
899 /* ??? Bug: this does not detect jumping in through intermediate
900 blocks that have stack levels or cleanups.
901 It detects only a problem with the innermost block
902 around the label. */
903 if (f->target != 0
904 && (dont_jump_in || stack_level || cleanup_list)
905 /* If AFTER_LABEL is 0, it means the jump goes to the end
906 of the rtl, which means it jumps into this scope. */
907 && (after_label == 0
908 || INSN_UID (first_insn) < INSN_UID (after_label))
909 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
44fe2e80 910 && ! DECL_REGISTER (f->target))
28d81abb
RK
911 {
912 error_with_decl (f->target,
913 "label `%s' used before containing binding contour");
914 /* Prevent multiple errors for one label. */
44fe2e80 915 DECL_REGISTER (f->target) = 1;
28d81abb
RK
916 }
917
7629c936
RS
918 /* We will expand the cleanups into a sequence of their own and
919 then later on we will attach this new sequence to the insn
920 stream just ahead of the actual jump insn. */
921
922 start_sequence ();
923
924 pushlevel (0);
925
926 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
927 if (f->cleanup_list_list)
928 {
929 tree lists;
930 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
931 /* Marked elements correspond to blocks that have been closed.
932 Do their cleanups. */
933 if (TREE_ADDRESSABLE (lists)
934 && TREE_VALUE (lists) != 0)
7629c936
RS
935 {
936 expand_cleanups (TREE_VALUE (lists), 0);
937 /* Pop any pushes done in the cleanups,
938 in case function is about to return. */
939 do_pending_stack_adjust ();
940 }
28d81abb
RK
941 }
942
943 /* Restore stack level for the biggest contour that this
944 jump jumps out of. */
945 if (f->stack_level)
59257ff7 946 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
947
948 /* Finish up the sequence containing the insns which implement the
949 necessary cleanups, and then attach that whole sequence to the
950 insn stream just ahead of the actual jump insn. Attaching it
951 at that point insures that any cleanups which are in fact
952 implicit C++ object destructions (which must be executed upon
953 leaving the block) appear (to the debugger) to be taking place
954 in an area of the generated code where the object(s) being
955 destructed are still "in scope". */
956
957 cleanup_insns = get_insns ();
958 newblock = poplevel (1, 0, 0);
959
960 end_sequence ();
961 emit_insns_after (cleanup_insns, f->before_jump);
962
963 /* Put the new block into its proper context. */
964 BLOCK_SUBBLOCKS (f->context)
965 = chainon (BLOCK_SUBBLOCKS (f->context), newblock);
966 BLOCK_SUPERCONTEXT (newblock) = f->context;
967
28d81abb
RK
968 f->before_jump = 0;
969 }
970 }
971
972 /* Mark the cleanups of exited blocks so that they are executed
973 by the code above. */
974 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
975 if (f->before_jump != 0
976 && PREV_INSN (f->target_rtl) == 0
977 /* Label has still not appeared. If we are exiting a block with
978 a stack level to restore, that started before the fixup,
979 mark this stack level as needing restoration
980 when the fixup is later finalized.
981 Also mark the cleanup_list_list element for F
982 that corresponds to this block, so that ultimately
983 this block's cleanups will be executed by the code above. */
984 && thisblock != 0
985 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
986 it means the label is undefined. That's erroneous, but possible. */
987 && (thisblock->data.block.block_start_count
988 <= f->block_start_count))
989 {
990 tree lists = f->cleanup_list_list;
991 for (; lists; lists = TREE_CHAIN (lists))
992 /* If the following elt. corresponds to our containing block
993 then the elt. must be for this block. */
994 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
995 TREE_ADDRESSABLE (lists) = 1;
996
997 if (stack_level)
998 f->stack_level = stack_level;
999 }
1000}
1001\f
1002/* Generate RTL for an asm statement (explicit assembler code).
1003 BODY is a STRING_CST node containing the assembler code text,
1004 or an ADDR_EXPR containing a STRING_CST. */
1005
1006void
1007expand_asm (body)
1008 tree body;
1009{
1010 if (TREE_CODE (body) == ADDR_EXPR)
1011 body = TREE_OPERAND (body, 0);
1012
1013 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1014 TREE_STRING_POINTER (body)));
1015 last_expr_type = 0;
1016}
1017
1018/* Generate RTL for an asm statement with arguments.
1019 STRING is the instruction template.
1020 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1021 Each output or input has an expression in the TREE_VALUE and
1022 a constraint-string in the TREE_PURPOSE.
1023 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1024 that is clobbered by this insn.
1025
1026 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1027 Some elements of OUTPUTS may be replaced with trees representing temporary
1028 values. The caller should copy those temporary values to the originally
1029 specified lvalues.
1030
1031 VOL nonzero means the insn is volatile; don't optimize it. */
1032
1033void
1034expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1035 tree string, outputs, inputs, clobbers;
1036 int vol;
1037 char *filename;
1038 int line;
1039{
1040 rtvec argvec, constraints;
1041 rtx body;
1042 int ninputs = list_length (inputs);
1043 int noutputs = list_length (outputs);
b4ccaa16 1044 int nclobbers;
28d81abb
RK
1045 tree tail;
1046 register int i;
1047 /* Vector of RTX's of evaluated output operands. */
1048 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1049 /* The insn we have emitted. */
1050 rtx insn;
1051
b4ccaa16
RS
1052 /* Count the number of meaningful clobbered registers, ignoring what
1053 we would ignore later. */
1054 nclobbers = 0;
1055 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1056 {
1057 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1058 i = decode_reg_name (regname);
1059 if (i >= 0 || i == -4)
b4ccaa16
RS
1060 ++nclobbers;
1061 }
1062
28d81abb
RK
1063 last_expr_type = 0;
1064
1065 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1066 {
1067 tree val = TREE_VALUE (tail);
1068 tree val1;
1069 int j;
1070 int found_equal;
1071
1072 /* If there's an erroneous arg, emit no insn. */
1073 if (TREE_TYPE (val) == error_mark_node)
1074 return;
1075
1076 /* Make sure constraint has `=' and does not have `+'. */
1077
1078 found_equal = 0;
1079 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1080 {
1081 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1082 {
1083 error ("output operand constraint contains `+'");
1084 return;
1085 }
1086 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1087 found_equal = 1;
1088 }
1089 if (! found_equal)
1090 {
1091 error ("output operand constraint lacks `='");
1092 return;
1093 }
1094
1095 /* If an output operand is not a variable or indirect ref,
1096 or a part of one,
1097 create a SAVE_EXPR which is a pseudo-reg
1098 to act as an intermediate temporary.
1099 Make the asm insn write into that, then copy it to
1100 the real output operand. */
1101
1102 while (TREE_CODE (val) == COMPONENT_REF
1103 || TREE_CODE (val) == ARRAY_REF)
1104 val = TREE_OPERAND (val, 0);
1105
1106 if (TREE_CODE (val) != VAR_DECL
1107 && TREE_CODE (val) != PARM_DECL
1108 && TREE_CODE (val) != INDIRECT_REF)
1109 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1110
37366632 1111 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1112 }
1113
1114 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1115 {
1116 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1117 return;
1118 }
1119
1120 /* Make vectors for the expression-rtx and constraint strings. */
1121
1122 argvec = rtvec_alloc (ninputs);
1123 constraints = rtvec_alloc (ninputs);
1124
1125 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1126 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1127 filename, line);
1128 MEM_VOLATILE_P (body) = vol;
1129
1130 /* Eval the inputs and put them into ARGVEC.
1131 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1132
1133 i = 0;
1134 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1135 {
1136 int j;
1137
1138 /* If there's an erroneous arg, emit no insn,
1139 because the ASM_INPUT would get VOIDmode
1140 and that could cause a crash in reload. */
1141 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1142 return;
1143 if (TREE_PURPOSE (tail) == NULL_TREE)
1144 {
1145 error ("hard register `%s' listed as input operand to `asm'",
1146 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1147 return;
1148 }
1149
1150 /* Make sure constraint has neither `=' nor `+'. */
1151
1152 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1153 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1154 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1155 {
1156 error ("input operand constraint contains `%c'",
1157 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1158 return;
1159 }
1160
1161 XVECEXP (body, 3, i) /* argvec */
37366632 1162 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1163 XVECEXP (body, 4, i) /* constraints */
1164 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1165 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1166 i++;
1167 }
1168
1169 /* Protect all the operands from the queue,
1170 now that they have all been evaluated. */
1171
1172 for (i = 0; i < ninputs; i++)
1173 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1174
1175 for (i = 0; i < noutputs; i++)
1176 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1177
1178 /* Now, for each output, construct an rtx
1179 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1180 ARGVEC CONSTRAINTS))
1181 If there is more than one, put them inside a PARALLEL. */
1182
1183 if (noutputs == 1 && nclobbers == 0)
1184 {
1185 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1186 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1187 }
1188 else if (noutputs == 0 && nclobbers == 0)
1189 {
1190 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1191 insn = emit_insn (body);
1192 }
1193 else
1194 {
1195 rtx obody = body;
1196 int num = noutputs;
1197 if (num == 0) num = 1;
1198 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1199
1200 /* For each output operand, store a SET. */
1201
1202 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1203 {
1204 XVECEXP (body, 0, i)
1205 = gen_rtx (SET, VOIDmode,
1206 output_rtx[i],
1207 gen_rtx (ASM_OPERANDS, VOIDmode,
1208 TREE_STRING_POINTER (string),
1209 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1210 i, argvec, constraints,
1211 filename, line));
1212 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1213 }
1214
1215 /* If there are no outputs (but there are some clobbers)
1216 store the bare ASM_OPERANDS into the PARALLEL. */
1217
1218 if (i == 0)
1219 XVECEXP (body, 0, i++) = obody;
1220
1221 /* Store (clobber REG) for each clobbered register specified. */
1222
b4ccaa16 1223 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1224 {
28d81abb 1225 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1226 int j = decode_reg_name (regname);
28d81abb 1227
b4ac57ab 1228 if (j < 0)
28d81abb 1229 {
c09e6498 1230 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1231 continue;
1232
c09e6498
RS
1233 if (j == -4) /* `memory', don't cache memory across asm */
1234 {
1235 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1236 continue;
1237 }
1238
28d81abb
RK
1239 error ("unknown register name `%s' in `asm'", regname);
1240 return;
1241 }
1242
1243 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1244 XVECEXP (body, 0, i++)
28d81abb
RK
1245 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1246 }
1247
1248 insn = emit_insn (body);
1249 }
1250
1251 free_temp_slots ();
1252}
1253\f
1254/* Generate RTL to evaluate the expression EXP
1255 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1256
1257void
1258expand_expr_stmt (exp)
1259 tree exp;
1260{
1261 /* If -W, warn about statements with no side effects,
1262 except for an explicit cast to void (e.g. for assert()), and
1263 except inside a ({...}) where they may be useful. */
1264 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1265 {
1266 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1267 && !(TREE_CODE (exp) == CONVERT_EXPR
1268 && TREE_TYPE (exp) == void_type_node))
1269 warning_with_file_and_line (emit_filename, emit_lineno,
1270 "statement with no effect");
1271 else if (warn_unused)
1272 warn_if_unused_value (exp);
1273 }
1274 last_expr_type = TREE_TYPE (exp);
1275 if (! flag_syntax_only)
37366632
RK
1276 last_expr_value = expand_expr (exp,
1277 (expr_stmts_for_value
1278 ? NULL_RTX : const0_rtx),
28d81abb
RK
1279 VOIDmode, 0);
1280
1281 /* If all we do is reference a volatile value in memory,
1282 copy it to a register to be sure it is actually touched. */
1283 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1284 && TREE_THIS_VOLATILE (exp))
1285 {
1286 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1287 copy_to_reg (last_expr_value);
1288 else
ddbe9812
RS
1289 {
1290 rtx lab = gen_label_rtx ();
1291
1292 /* Compare the value with itself to reference it. */
1293 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1294 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1295 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1296 BLKmode, 0,
1297 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1298 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1299 emit_label (lab);
1300 }
28d81abb
RK
1301 }
1302
1303 /* If this expression is part of a ({...}) and is in memory, we may have
1304 to preserve temporaries. */
1305 preserve_temp_slots (last_expr_value);
1306
1307 /* Free any temporaries used to evaluate this expression. Any temporary
1308 used as a result of this expression will already have been preserved
1309 above. */
1310 free_temp_slots ();
1311
1312 emit_queue ();
1313}
1314
1315/* Warn if EXP contains any computations whose results are not used.
1316 Return 1 if a warning is printed; 0 otherwise. */
1317
1318static int
1319warn_if_unused_value (exp)
1320 tree exp;
1321{
1322 if (TREE_USED (exp))
1323 return 0;
1324
1325 switch (TREE_CODE (exp))
1326 {
1327 case PREINCREMENT_EXPR:
1328 case POSTINCREMENT_EXPR:
1329 case PREDECREMENT_EXPR:
1330 case POSTDECREMENT_EXPR:
1331 case MODIFY_EXPR:
1332 case INIT_EXPR:
1333 case TARGET_EXPR:
1334 case CALL_EXPR:
1335 case METHOD_CALL_EXPR:
1336 case RTL_EXPR:
1337 case WRAPPER_EXPR:
1338 case ANTI_WRAPPER_EXPR:
1339 case WITH_CLEANUP_EXPR:
1340 case EXIT_EXPR:
1341 /* We don't warn about COND_EXPR because it may be a useful
1342 construct if either arm contains a side effect. */
1343 case COND_EXPR:
1344 return 0;
1345
1346 case BIND_EXPR:
1347 /* For a binding, warn if no side effect within it. */
1348 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1349
1350 case TRUTH_ORIF_EXPR:
1351 case TRUTH_ANDIF_EXPR:
1352 /* In && or ||, warn if 2nd operand has no side effect. */
1353 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1354
1355 case COMPOUND_EXPR:
1356 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1357 return 1;
4d23e509
RS
1358 /* Let people do `(foo (), 0)' without a warning. */
1359 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1360 return 0;
28d81abb
RK
1361 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1362
1363 case NOP_EXPR:
1364 case CONVERT_EXPR:
b4ac57ab 1365 case NON_LVALUE_EXPR:
28d81abb
RK
1366 /* Don't warn about values cast to void. */
1367 if (TREE_TYPE (exp) == void_type_node)
1368 return 0;
1369 /* Don't warn about conversions not explicit in the user's program. */
1370 if (TREE_NO_UNUSED_WARNING (exp))
1371 return 0;
1372 /* Assignment to a cast usually results in a cast of a modify.
1373 Don't complain about that. */
1374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1375 return 0;
1376 /* Sometimes it results in a cast of a cast of a modify.
1377 Don't complain about that. */
1378 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1379 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1380 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1381 return 0;
1382
1383 default:
ddbe9812
RS
1384 /* Referencing a volatile value is a side effect, so don't warn. */
1385 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1386 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1387 && TREE_THIS_VOLATILE (exp))
1388 return 0;
28d81abb
RK
1389 warning_with_file_and_line (emit_filename, emit_lineno,
1390 "value computed is not used");
1391 return 1;
1392 }
1393}
1394
1395/* Clear out the memory of the last expression evaluated. */
1396
1397void
1398clear_last_expr ()
1399{
1400 last_expr_type = 0;
1401}
1402
1403/* Begin a statement which will return a value.
1404 Return the RTL_EXPR for this statement expr.
1405 The caller must save that value and pass it to expand_end_stmt_expr. */
1406
1407tree
1408expand_start_stmt_expr ()
1409{
1410 /* Make the RTL_EXPR node temporary, not momentary,
1411 so that rtl_expr_chain doesn't become garbage. */
1412 int momentary = suspend_momentary ();
1413 tree t = make_node (RTL_EXPR);
1414 resume_momentary (momentary);
1415 start_sequence ();
1416 NO_DEFER_POP;
1417 expr_stmts_for_value++;
1418 return t;
1419}
1420
1421/* Restore the previous state at the end of a statement that returns a value.
1422 Returns a tree node representing the statement's value and the
1423 insns to compute the value.
1424
1425 The nodes of that expression have been freed by now, so we cannot use them.
1426 But we don't want to do that anyway; the expression has already been
1427 evaluated and now we just want to use the value. So generate a RTL_EXPR
1428 with the proper type and RTL value.
1429
1430 If the last substatement was not an expression,
1431 return something with type `void'. */
1432
1433tree
1434expand_end_stmt_expr (t)
1435 tree t;
1436{
1437 OK_DEFER_POP;
1438
1439 if (last_expr_type == 0)
1440 {
1441 last_expr_type = void_type_node;
1442 last_expr_value = const0_rtx;
1443 }
1444 else if (last_expr_value == 0)
1445 /* There are some cases where this can happen, such as when the
1446 statement is void type. */
1447 last_expr_value = const0_rtx;
1448 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1449 /* Remove any possible QUEUED. */
1450 last_expr_value = protect_from_queue (last_expr_value, 0);
1451
1452 emit_queue ();
1453
1454 TREE_TYPE (t) = last_expr_type;
1455 RTL_EXPR_RTL (t) = last_expr_value;
1456 RTL_EXPR_SEQUENCE (t) = get_insns ();
1457
1458 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1459
1460 end_sequence ();
1461
1462 /* Don't consider deleting this expr or containing exprs at tree level. */
1463 TREE_SIDE_EFFECTS (t) = 1;
1464 /* Propagate volatility of the actual RTL expr. */
1465 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1466
1467 last_expr_type = 0;
1468 expr_stmts_for_value--;
1469
1470 return t;
1471}
1472\f
1473/* The exception handling nesting looks like this:
1474
1475 <-- Level N-1
1476 { <-- exception handler block
1477 <-- Level N
1478 <-- in an exception handler
1479 { <-- try block
1480 : <-- in a TRY block
1481 : <-- in an exception handler
1482 :
1483 }
1484
1485 { <-- except block
1486 : <-- in an except block
1487 : <-- in an exception handler
1488 :
1489 }
1490
1491 }
a124fd5e 1492*/
28d81abb
RK
1493
1494/* Return nonzero iff in a try block at level LEVEL. */
1495
1496int
1497in_try_block (level)
1498 int level;
1499{
1500 struct nesting *n = except_stack;
1501 while (1)
1502 {
1503 while (n && n->data.except_stmt.after_label != 0)
1504 n = n->next;
1505 if (n == 0)
1506 return 0;
1507 if (level == 0)
1508 return n != 0;
1509 level--;
1510 n = n->next;
1511 }
1512}
1513
1514/* Return nonzero iff in an except block at level LEVEL. */
1515
1516int
1517in_except_block (level)
1518 int level;
1519{
1520 struct nesting *n = except_stack;
1521 while (1)
1522 {
1523 while (n && n->data.except_stmt.after_label == 0)
1524 n = n->next;
1525 if (n == 0)
1526 return 0;
1527 if (level == 0)
1528 return n != 0;
1529 level--;
1530 n = n->next;
1531 }
1532}
1533
1534/* Return nonzero iff in an exception handler at level LEVEL. */
1535
1536int
1537in_exception_handler (level)
1538 int level;
1539{
1540 struct nesting *n = except_stack;
1541 while (n && level--)
1542 n = n->next;
1543 return n != 0;
1544}
1545
1546/* Record the fact that the current exception nesting raises
1547 exception EX. If not in an exception handler, return 0. */
1548int
1549expand_raise (ex)
1550 tree ex;
1551{
1552 tree *raises_ptr;
1553
1554 if (except_stack == 0)
1555 return 0;
1556 raises_ptr = &except_stack->data.except_stmt.raised;
1557 if (! value_member (ex, *raises_ptr))
1558 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1559 return 1;
1560}
1561
1562/* Generate RTL for the start of a try block.
1563
1564 TRY_CLAUSE is the condition to test to enter the try block. */
1565
1566void
1567expand_start_try (try_clause, exitflag, escapeflag)
1568 tree try_clause;
1569 int exitflag;
1570 int escapeflag;
1571{
1572 struct nesting *thishandler = ALLOC_NESTING ();
1573
1574 /* Make an entry on cond_stack for the cond we are entering. */
1575
1576 thishandler->next = except_stack;
1577 thishandler->all = nesting_stack;
1578 thishandler->depth = ++nesting_depth;
1579 thishandler->data.except_stmt.raised = 0;
1580 thishandler->data.except_stmt.handled = 0;
1581 thishandler->data.except_stmt.first_insn = get_insns ();
1582 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1583 thishandler->data.except_stmt.unhandled_label = 0;
1584 thishandler->data.except_stmt.after_label = 0;
1585 thishandler->data.except_stmt.escape_label
1586 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1587 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1588 except_stack = thishandler;
1589 nesting_stack = thishandler;
1590
37366632 1591 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1592}
1593
1594/* End of a TRY block. Nothing to do for now. */
1595
1596void
1597expand_end_try ()
1598{
1599 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1600 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1601 NULL_RTX);
28d81abb
RK
1602}
1603
1604/* Start an `except' nesting contour.
1605 EXITFLAG says whether this contour should be able to `exit' something.
1606 ESCAPEFLAG says whether this contour should be escapable. */
1607
1608void
1609expand_start_except (exitflag, escapeflag)
1610 int exitflag;
1611 int escapeflag;
1612{
1613 if (exitflag)
1614 {
1615 struct nesting *n;
1616 /* An `exit' from catch clauses goes out to next exit level,
1617 if there is one. Otherwise, it just goes to the end
1618 of the construct. */
1619 for (n = except_stack->next; n; n = n->next)
1620 if (n->exit_label != 0)
1621 {
1622 except_stack->exit_label = n->exit_label;
1623 break;
1624 }
1625 if (n == 0)
1626 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1627 }
1628 if (escapeflag)
1629 {
1630 struct nesting *n;
1631 /* An `escape' from catch clauses goes out to next escape level,
1632 if there is one. Otherwise, it just goes to the end
1633 of the construct. */
1634 for (n = except_stack->next; n; n = n->next)
1635 if (n->data.except_stmt.escape_label != 0)
1636 {
1637 except_stack->data.except_stmt.escape_label
1638 = n->data.except_stmt.escape_label;
1639 break;
1640 }
1641 if (n == 0)
1642 except_stack->data.except_stmt.escape_label
1643 = except_stack->data.except_stmt.after_label;
1644 }
1645 do_pending_stack_adjust ();
1646 emit_label (except_stack->data.except_stmt.except_label);
1647}
1648
1649/* Generate code to `escape' from an exception contour. This
1650 is like `exiting', but does not conflict with constructs which
1651 use `exit_label'.
1652
1653 Return nonzero if this contour is escapable, otherwise
1654 return zero, and language-specific code will emit the
1655 appropriate error message. */
1656int
1657expand_escape_except ()
1658{
1659 struct nesting *n;
1660 last_expr_type = 0;
1661 for (n = except_stack; n; n = n->next)
1662 if (n->data.except_stmt.escape_label != 0)
1663 {
37366632
RK
1664 expand_goto_internal (NULL_TREE,
1665 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
1666 return 1;
1667 }
1668
1669 return 0;
1670}
1671
1672/* Finish processing and `except' contour.
1673 Culls out all exceptions which might be raise but not
1674 handled, and returns the list to the caller.
1675 Language-specific code is responsible for dealing with these
1676 exceptions. */
1677
1678tree
1679expand_end_except ()
1680{
1681 struct nesting *n;
1682 tree raised = NULL_TREE;
1683
1684 do_pending_stack_adjust ();
1685 emit_label (except_stack->data.except_stmt.after_label);
1686
1687 n = except_stack->next;
1688 if (n)
1689 {
1690 /* Propagate exceptions raised but not handled to next
1691 highest level. */
1692 tree handled = except_stack->data.except_stmt.raised;
1693 if (handled != void_type_node)
1694 {
1695 tree prev = NULL_TREE;
1696 raised = except_stack->data.except_stmt.raised;
1697 while (handled)
1698 {
1699 tree this_raise;
1700 for (this_raise = raised, prev = 0; this_raise;
1701 this_raise = TREE_CHAIN (this_raise))
1702 {
1703 if (value_member (TREE_VALUE (this_raise), handled))
1704 {
1705 if (prev)
1706 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1707 else
1708 {
1709 raised = TREE_CHAIN (raised);
1710 if (raised == NULL_TREE)
1711 goto nada;
1712 }
1713 }
1714 else
1715 prev = this_raise;
1716 }
1717 handled = TREE_CHAIN (handled);
1718 }
1719 if (prev == NULL_TREE)
1720 prev = raised;
1721 if (prev)
1722 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1723 nada:
1724 n->data.except_stmt.raised = raised;
1725 }
1726 }
1727
1728 POPSTACK (except_stack);
1729 last_expr_type = 0;
1730 return raised;
1731}
1732
1733/* Record that exception EX is caught by this exception handler.
1734 Return nonzero if in exception handling construct, otherwise return 0. */
1735int
1736expand_catch (ex)
1737 tree ex;
1738{
1739 tree *raises_ptr;
1740
1741 if (except_stack == 0)
1742 return 0;
1743 raises_ptr = &except_stack->data.except_stmt.handled;
1744 if (*raises_ptr != void_type_node
1745 && ex != NULL_TREE
1746 && ! value_member (ex, *raises_ptr))
1747 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1748 return 1;
1749}
1750
1751/* Record that this exception handler catches all exceptions.
1752 Return nonzero if in exception handling construct, otherwise return 0. */
1753
1754int
1755expand_catch_default ()
1756{
1757 if (except_stack == 0)
1758 return 0;
1759 except_stack->data.except_stmt.handled = void_type_node;
1760 return 1;
1761}
1762
1763int
1764expand_end_catch ()
1765{
1766 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1767 return 0;
37366632
RK
1768 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1769 NULL_RTX);
28d81abb
RK
1770 return 1;
1771}
1772\f
1773/* Generate RTL for the start of an if-then. COND is the expression
1774 whose truth should be tested.
1775
1776 If EXITFLAG is nonzero, this conditional is visible to
1777 `exit_something'. */
1778
1779void
1780expand_start_cond (cond, exitflag)
1781 tree cond;
1782 int exitflag;
1783{
1784 struct nesting *thiscond = ALLOC_NESTING ();
1785
1786 /* Make an entry on cond_stack for the cond we are entering. */
1787
1788 thiscond->next = cond_stack;
1789 thiscond->all = nesting_stack;
1790 thiscond->depth = ++nesting_depth;
1791 thiscond->data.cond.next_label = gen_label_rtx ();
1792 /* Before we encounter an `else', we don't need a separate exit label
1793 unless there are supposed to be exit statements
1794 to exit this conditional. */
1795 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1796 thiscond->data.cond.endif_label = thiscond->exit_label;
1797 cond_stack = thiscond;
1798 nesting_stack = thiscond;
1799
37366632 1800 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1801}
1802
1803/* Generate RTL between then-clause and the elseif-clause
1804 of an if-then-elseif-.... */
1805
1806void
1807expand_start_elseif (cond)
1808 tree cond;
1809{
1810 if (cond_stack->data.cond.endif_label == 0)
1811 cond_stack->data.cond.endif_label = gen_label_rtx ();
1812 emit_jump (cond_stack->data.cond.endif_label);
1813 emit_label (cond_stack->data.cond.next_label);
1814 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1815 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1816}
1817
1818/* Generate RTL between the then-clause and the else-clause
1819 of an if-then-else. */
1820
1821void
1822expand_start_else ()
1823{
1824 if (cond_stack->data.cond.endif_label == 0)
1825 cond_stack->data.cond.endif_label = gen_label_rtx ();
1826 emit_jump (cond_stack->data.cond.endif_label);
1827 emit_label (cond_stack->data.cond.next_label);
1828 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1829}
1830
1831/* Generate RTL for the end of an if-then.
1832 Pop the record for it off of cond_stack. */
1833
1834void
1835expand_end_cond ()
1836{
1837 struct nesting *thiscond = cond_stack;
1838
1839 do_pending_stack_adjust ();
1840 if (thiscond->data.cond.next_label)
1841 emit_label (thiscond->data.cond.next_label);
1842 if (thiscond->data.cond.endif_label)
1843 emit_label (thiscond->data.cond.endif_label);
1844
1845 POPSTACK (cond_stack);
1846 last_expr_type = 0;
1847}
1848\f
1849/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1850 loop should be exited by `exit_something'. This is a loop for which
1851 `expand_continue' will jump to the top of the loop.
1852
1853 Make an entry on loop_stack to record the labels associated with
1854 this loop. */
1855
1856struct nesting *
1857expand_start_loop (exit_flag)
1858 int exit_flag;
1859{
1860 register struct nesting *thisloop = ALLOC_NESTING ();
1861
1862 /* Make an entry on loop_stack for the loop we are entering. */
1863
1864 thisloop->next = loop_stack;
1865 thisloop->all = nesting_stack;
1866 thisloop->depth = ++nesting_depth;
1867 thisloop->data.loop.start_label = gen_label_rtx ();
1868 thisloop->data.loop.end_label = gen_label_rtx ();
1869 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1870 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1871 loop_stack = thisloop;
1872 nesting_stack = thisloop;
1873
1874 do_pending_stack_adjust ();
1875 emit_queue ();
37366632 1876 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
1877 emit_label (thisloop->data.loop.start_label);
1878
1879 return thisloop;
1880}
1881
1882/* Like expand_start_loop but for a loop where the continuation point
1883 (for expand_continue_loop) will be specified explicitly. */
1884
1885struct nesting *
1886expand_start_loop_continue_elsewhere (exit_flag)
1887 int exit_flag;
1888{
1889 struct nesting *thisloop = expand_start_loop (exit_flag);
1890 loop_stack->data.loop.continue_label = gen_label_rtx ();
1891 return thisloop;
1892}
1893
1894/* Specify the continuation point for a loop started with
1895 expand_start_loop_continue_elsewhere.
1896 Use this at the point in the code to which a continue statement
1897 should jump. */
1898
1899void
1900expand_loop_continue_here ()
1901{
1902 do_pending_stack_adjust ();
37366632 1903 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
1904 emit_label (loop_stack->data.loop.continue_label);
1905}
1906
1907/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1908 Pop the block off of loop_stack. */
1909
1910void
1911expand_end_loop ()
1912{
1913 register rtx insn = get_last_insn ();
1914 register rtx start_label = loop_stack->data.loop.start_label;
1915 rtx last_test_insn = 0;
1916 int num_insns = 0;
1917
1918 /* Mark the continue-point at the top of the loop if none elsewhere. */
1919 if (start_label == loop_stack->data.loop.continue_label)
1920 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1921
1922 do_pending_stack_adjust ();
1923
1924 /* If optimizing, perhaps reorder the loop. If the loop
1925 starts with a conditional exit, roll that to the end
1926 where it will optimize together with the jump back.
1927
1928 We look for the last conditional branch to the exit that we encounter
1929 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1930 branch to the exit first, use it.
1931
1932 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1933 because moving them is not valid. */
1934
1935 if (optimize
1936 &&
1937 ! (GET_CODE (insn) == JUMP_INSN
1938 && GET_CODE (PATTERN (insn)) == SET
1939 && SET_DEST (PATTERN (insn)) == pc_rtx
1940 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1941 {
1942 /* Scan insns from the top of the loop looking for a qualified
1943 conditional exit. */
1944 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1945 insn = NEXT_INSN (insn))
1946 {
1947 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1948 break;
1949
1950 if (GET_CODE (insn) == NOTE
1951 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1952 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1953 break;
1954
1955 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1956 num_insns++;
1957
1958 if (last_test_insn && num_insns > 30)
1959 break;
1960
1961 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1962 && SET_DEST (PATTERN (insn)) == pc_rtx
1963 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1964 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1965 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1966 == loop_stack->data.loop.end_label))
1967 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1968 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1969 == loop_stack->data.loop.end_label))))
1970 last_test_insn = insn;
1971
1972 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1973 && GET_CODE (PATTERN (insn)) == SET
1974 && SET_DEST (PATTERN (insn)) == pc_rtx
1975 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1976 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1977 == loop_stack->data.loop.end_label))
1978 /* Include BARRIER. */
1979 last_test_insn = NEXT_INSN (insn);
1980 }
1981
1982 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1983 {
1984 /* We found one. Move everything from there up
1985 to the end of the loop, and add a jump into the loop
1986 to jump to there. */
1987 register rtx newstart_label = gen_label_rtx ();
1988 register rtx start_move = start_label;
1989
b4ac57ab 1990 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1991 then we want to move this note also. */
1992 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1993 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1994 == NOTE_INSN_LOOP_CONT))
1995 start_move = PREV_INSN (start_move);
1996
1997 emit_label_after (newstart_label, PREV_INSN (start_move));
1998 reorder_insns (start_move, last_test_insn, get_last_insn ());
1999 emit_jump_insn_after (gen_jump (start_label),
2000 PREV_INSN (newstart_label));
2001 emit_barrier_after (PREV_INSN (newstart_label));
2002 start_label = newstart_label;
2003 }
2004 }
2005
2006 emit_jump (start_label);
37366632 2007 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2008 emit_label (loop_stack->data.loop.end_label);
2009
2010 POPSTACK (loop_stack);
2011
2012 last_expr_type = 0;
2013}
2014
2015/* Generate a jump to the current loop's continue-point.
2016 This is usually the top of the loop, but may be specified
2017 explicitly elsewhere. If not currently inside a loop,
2018 return 0 and do nothing; caller will print an error message. */
2019
2020int
2021expand_continue_loop (whichloop)
2022 struct nesting *whichloop;
2023{
2024 last_expr_type = 0;
2025 if (whichloop == 0)
2026 whichloop = loop_stack;
2027 if (whichloop == 0)
2028 return 0;
37366632
RK
2029 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2030 NULL_RTX);
28d81abb
RK
2031 return 1;
2032}
2033
2034/* Generate a jump to exit the current loop. If not currently inside a loop,
2035 return 0 and do nothing; caller will print an error message. */
2036
2037int
2038expand_exit_loop (whichloop)
2039 struct nesting *whichloop;
2040{
2041 last_expr_type = 0;
2042 if (whichloop == 0)
2043 whichloop = loop_stack;
2044 if (whichloop == 0)
2045 return 0;
37366632 2046 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2047 return 1;
2048}
2049
2050/* Generate a conditional jump to exit the current loop if COND
2051 evaluates to zero. If not currently inside a loop,
2052 return 0 and do nothing; caller will print an error message. */
2053
2054int
2055expand_exit_loop_if_false (whichloop, cond)
2056 struct nesting *whichloop;
2057 tree cond;
2058{
2059 last_expr_type = 0;
2060 if (whichloop == 0)
2061 whichloop = loop_stack;
2062 if (whichloop == 0)
2063 return 0;
37366632 2064 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2065 return 1;
2066}
2067
2068/* Return non-zero if we should preserve sub-expressions as separate
2069 pseudos. We never do so if we aren't optimizing. We always do so
2070 if -fexpensive-optimizations.
2071
2072 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2073 the loop may still be a small one. */
2074
2075int
2076preserve_subexpressions_p ()
2077{
2078 rtx insn;
2079
2080 if (flag_expensive_optimizations)
2081 return 1;
2082
2083 if (optimize == 0 || loop_stack == 0)
2084 return 0;
2085
2086 insn = get_last_insn_anywhere ();
2087
2088 return (insn
2089 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2090 < n_non_fixed_regs * 3));
2091
2092}
2093
2094/* Generate a jump to exit the current loop, conditional, binding contour
2095 or case statement. Not all such constructs are visible to this function,
2096 only those started with EXIT_FLAG nonzero. Individual languages use
2097 the EXIT_FLAG parameter to control which kinds of constructs you can
2098 exit this way.
2099
2100 If not currently inside anything that can be exited,
2101 return 0 and do nothing; caller will print an error message. */
2102
2103int
2104expand_exit_something ()
2105{
2106 struct nesting *n;
2107 last_expr_type = 0;
2108 for (n = nesting_stack; n; n = n->all)
2109 if (n->exit_label != 0)
2110 {
37366632 2111 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2112 return 1;
2113 }
2114
2115 return 0;
2116}
2117\f
2118/* Generate RTL to return from the current function, with no value.
2119 (That is, we do not do anything about returning any value.) */
2120
2121void
2122expand_null_return ()
2123{
2124 struct nesting *block = block_stack;
2125 rtx last_insn = 0;
2126
2127 /* Does any pending block have cleanups? */
2128
2129 while (block && block->data.block.cleanups == 0)
2130 block = block->next;
2131
2132 /* If yes, use a goto to return, since that runs cleanups. */
2133
2134 expand_null_return_1 (last_insn, block != 0);
2135}
2136
2137/* Generate RTL to return from the current function, with value VAL. */
2138
2139void
2140expand_value_return (val)
2141 rtx val;
2142{
2143 struct nesting *block = block_stack;
2144 rtx last_insn = get_last_insn ();
2145 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2146
2147 /* Copy the value to the return location
2148 unless it's already there. */
2149
2150 if (return_reg != val)
2151 emit_move_insn (return_reg, val);
2152 if (GET_CODE (return_reg) == REG
2153 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2154 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2155
2156 /* Does any pending block have cleanups? */
2157
2158 while (block && block->data.block.cleanups == 0)
2159 block = block->next;
2160
2161 /* If yes, use a goto to return, since that runs cleanups.
2162 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2163
2164 expand_null_return_1 (last_insn, block != 0);
2165}
2166
2167/* Output a return with no value. If LAST_INSN is nonzero,
2168 pretend that the return takes place after LAST_INSN.
2169 If USE_GOTO is nonzero then don't use a return instruction;
2170 go to the return label instead. This causes any cleanups
2171 of pending blocks to be executed normally. */
2172
2173static void
2174expand_null_return_1 (last_insn, use_goto)
2175 rtx last_insn;
2176 int use_goto;
2177{
2178 rtx end_label = cleanup_label ? cleanup_label : return_label;
2179
2180 clear_pending_stack_adjust ();
2181 do_pending_stack_adjust ();
2182 last_expr_type = 0;
2183
2184 /* PCC-struct return always uses an epilogue. */
2185 if (current_function_returns_pcc_struct || use_goto)
2186 {
2187 if (end_label == 0)
2188 end_label = return_label = gen_label_rtx ();
37366632 2189 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2190 return;
2191 }
2192
2193 /* Otherwise output a simple return-insn if one is available,
2194 unless it won't do the job. */
2195#ifdef HAVE_return
2196 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2197 {
2198 emit_jump_insn (gen_return ());
2199 emit_barrier ();
2200 return;
2201 }
2202#endif
2203
2204 /* Otherwise jump to the epilogue. */
37366632 2205 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2206}
2207\f
2208/* Generate RTL to evaluate the expression RETVAL and return it
2209 from the current function. */
2210
2211void
2212expand_return (retval)
2213 tree retval;
2214{
2215 /* If there are any cleanups to be performed, then they will
2216 be inserted following LAST_INSN. It is desirable
2217 that the last_insn, for such purposes, should be the
2218 last insn before computing the return value. Otherwise, cleanups
2219 which call functions can clobber the return value. */
2220 /* ??? rms: I think that is erroneous, because in C++ it would
2221 run destructors on variables that might be used in the subsequent
2222 computation of the return value. */
2223 rtx last_insn = 0;
2224 register rtx val = 0;
2225 register rtx op0;
2226 tree retval_rhs;
2227 int cleanups;
2228 struct nesting *block;
2229
2230 /* If function wants no value, give it none. */
2231 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2232 {
37366632 2233 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2234 emit_queue ();
28d81abb
RK
2235 expand_null_return ();
2236 return;
2237 }
2238
2239 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2240 cleanups = any_pending_cleanups (1);
2241
2242 if (TREE_CODE (retval) == RESULT_DECL)
2243 retval_rhs = retval;
2244 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2245 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2246 retval_rhs = TREE_OPERAND (retval, 1);
2247 else if (TREE_TYPE (retval) == void_type_node)
2248 /* Recognize tail-recursive call to void function. */
2249 retval_rhs = retval;
2250 else
2251 retval_rhs = NULL_TREE;
2252
2253 /* Only use `last_insn' if there are cleanups which must be run. */
2254 if (cleanups || cleanup_label != 0)
2255 last_insn = get_last_insn ();
2256
2257 /* Distribute return down conditional expr if either of the sides
2258 may involve tail recursion (see test below). This enhances the number
2259 of tail recursions we see. Don't do this always since it can produce
2260 sub-optimal code in some cases and we distribute assignments into
2261 conditional expressions when it would help. */
2262
2263 if (optimize && retval_rhs != 0
2264 && frame_offset == 0
2265 && TREE_CODE (retval_rhs) == COND_EXPR
2266 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2267 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2268 {
2269 rtx label = gen_label_rtx ();
37366632 2270 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
28d81abb
RK
2271 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2272 DECL_RESULT (current_function_decl),
2273 TREE_OPERAND (retval_rhs, 1)));
2274 emit_label (label);
2275 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2276 DECL_RESULT (current_function_decl),
2277 TREE_OPERAND (retval_rhs, 2)));
2278 return;
2279 }
2280
2281 /* For tail-recursive call to current function,
2282 just jump back to the beginning.
2283 It's unsafe if any auto variable in this function
2284 has its address taken; for simplicity,
2285 require stack frame to be empty. */
2286 if (optimize && retval_rhs != 0
2287 && frame_offset == 0
2288 && TREE_CODE (retval_rhs) == CALL_EXPR
2289 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2290 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2291 /* Finish checking validity, and if valid emit code
2292 to set the argument variables for the new call. */
2293 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2294 DECL_ARGUMENTS (current_function_decl)))
2295 {
2296 if (tail_recursion_label == 0)
2297 {
2298 tail_recursion_label = gen_label_rtx ();
2299 emit_label_after (tail_recursion_label,
2300 tail_recursion_reentry);
2301 }
a3229491 2302 emit_queue ();
37366632 2303 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2304 emit_barrier ();
2305 return;
2306 }
2307#ifdef HAVE_return
2308 /* This optimization is safe if there are local cleanups
2309 because expand_null_return takes care of them.
2310 ??? I think it should also be safe when there is a cleanup label,
2311 because expand_null_return takes care of them, too.
2312 Any reason why not? */
2313 if (HAVE_return && cleanup_label == 0
2314 && ! current_function_returns_pcc_struct)
2315 {
2316 /* If this is return x == y; then generate
2317 if (x == y) return 1; else return 0;
2318 if we can do it with explicit return insns. */
2319 if (retval_rhs)
2320 switch (TREE_CODE (retval_rhs))
2321 {
2322 case EQ_EXPR:
2323 case NE_EXPR:
2324 case GT_EXPR:
2325 case GE_EXPR:
2326 case LT_EXPR:
2327 case LE_EXPR:
2328 case TRUTH_ANDIF_EXPR:
2329 case TRUTH_ORIF_EXPR:
2330 case TRUTH_AND_EXPR:
2331 case TRUTH_OR_EXPR:
2332 case TRUTH_NOT_EXPR:
2333 op0 = gen_label_rtx ();
2334 jumpifnot (retval_rhs, op0);
2335 expand_value_return (const1_rtx);
2336 emit_label (op0);
2337 expand_value_return (const0_rtx);
2338 return;
2339 }
2340 }
2341#endif /* HAVE_return */
2342
2343 if (cleanups
2344 && retval_rhs != 0
2345 && TREE_TYPE (retval_rhs) != void_type_node
2346 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2347 {
2348 /* Calculate the return value into a pseudo reg. */
37366632 2349 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2350 emit_queue ();
2351 /* All temporaries have now been used. */
2352 free_temp_slots ();
2353 /* Return the calculated value, doing cleanups first. */
2354 expand_value_return (val);
2355 }
2356 else
2357 {
2358 /* No cleanups or no hard reg used;
2359 calculate value into hard return reg. */
37366632 2360 expand_expr (retval, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2361 emit_queue ();
2362 free_temp_slots ();
2363 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2364 }
2365}
2366
2367/* Return 1 if the end of the generated RTX is not a barrier.
2368 This means code already compiled can drop through. */
2369
2370int
2371drop_through_at_end_p ()
2372{
2373 rtx insn = get_last_insn ();
2374 while (insn && GET_CODE (insn) == NOTE)
2375 insn = PREV_INSN (insn);
2376 return insn && GET_CODE (insn) != BARRIER;
2377}
2378\f
2379/* Emit code to alter this function's formal parms for a tail-recursive call.
2380 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2381 FORMALS is the chain of decls of formals.
2382 Return 1 if this can be done;
2383 otherwise return 0 and do not emit any code. */
2384
2385static int
2386tail_recursion_args (actuals, formals)
2387 tree actuals, formals;
2388{
2389 register tree a = actuals, f = formals;
2390 register int i;
2391 register rtx *argvec;
2392
2393 /* Check that number and types of actuals are compatible
2394 with the formals. This is not always true in valid C code.
2395 Also check that no formal needs to be addressable
2396 and that all formals are scalars. */
2397
2398 /* Also count the args. */
2399
2400 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2401 {
2402 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2403 return 0;
2404 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2405 return 0;
2406 }
2407 if (a != 0 || f != 0)
2408 return 0;
2409
2410 /* Compute all the actuals. */
2411
2412 argvec = (rtx *) alloca (i * sizeof (rtx));
2413
2414 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2415 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2416
2417 /* Find which actual values refer to current values of previous formals.
2418 Copy each of them now, before any formal is changed. */
2419
2420 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2421 {
2422 int copy = 0;
2423 register int j;
2424 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2425 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2426 { copy = 1; break; }
2427 if (copy)
2428 argvec[i] = copy_to_reg (argvec[i]);
2429 }
2430
2431 /* Store the values of the actuals into the formals. */
2432
2433 for (f = formals, a = actuals, i = 0; f;
2434 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2435 {
2436 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2437 emit_move_insn (DECL_RTL (f), argvec[i]);
2438 else
2439 convert_move (DECL_RTL (f), argvec[i],
2440 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2441 }
2442
2443 free_temp_slots ();
2444 return 1;
2445}
2446\f
2447/* Generate the RTL code for entering a binding contour.
2448 The variables are declared one by one, by calls to `expand_decl'.
2449
2450 EXIT_FLAG is nonzero if this construct should be visible to
2451 `exit_something'. */
2452
2453void
2454expand_start_bindings (exit_flag)
2455 int exit_flag;
2456{
2457 struct nesting *thisblock = ALLOC_NESTING ();
2458
37366632 2459 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2460
2461 /* Make an entry on block_stack for the block we are entering. */
2462
2463 thisblock->next = block_stack;
2464 thisblock->all = nesting_stack;
2465 thisblock->depth = ++nesting_depth;
2466 thisblock->data.block.stack_level = 0;
2467 thisblock->data.block.cleanups = 0;
2468 thisblock->data.block.function_call_count = 0;
2469#if 0
2470 if (block_stack)
2471 {
2472 if (block_stack->data.block.cleanups == NULL_TREE
2473 && (block_stack->data.block.outer_cleanups == NULL_TREE
2474 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2475 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2476 else
2477 thisblock->data.block.outer_cleanups
2478 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2479 block_stack->data.block.outer_cleanups);
2480 }
2481 else
2482 thisblock->data.block.outer_cleanups = 0;
2483#endif
2484#if 1
2485 if (block_stack
2486 && !(block_stack->data.block.cleanups == NULL_TREE
2487 && block_stack->data.block.outer_cleanups == NULL_TREE))
2488 thisblock->data.block.outer_cleanups
2489 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2490 block_stack->data.block.outer_cleanups);
2491 else
2492 thisblock->data.block.outer_cleanups = 0;
2493#endif
2494 thisblock->data.block.label_chain = 0;
2495 thisblock->data.block.innermost_stack_block = stack_block_stack;
2496 thisblock->data.block.first_insn = note;
2497 thisblock->data.block.block_start_count = ++block_start_count;
2498 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2499 block_stack = thisblock;
2500 nesting_stack = thisblock;
2501
2502 /* Make a new level for allocating stack slots. */
2503 push_temp_slots ();
2504}
2505
7629c936
RS
2506/* Given a pointer to a BLOCK node, save a pointer to the most recently
2507 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2508 BLOCK node. */
2509
2510void
2511remember_end_note (block)
2512 register tree block;
2513{
2514 BLOCK_END_NOTE (block) = last_block_end_note;
2515 last_block_end_note = NULL_RTX;
2516}
2517
28d81abb
RK
2518/* Generate RTL code to terminate a binding contour.
2519 VARS is the chain of VAR_DECL nodes
2520 for the variables bound in this contour.
2521 MARK_ENDS is nonzero if we should put a note at the beginning
2522 and end of this binding contour.
2523
2524 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2525 (That is true automatically if the contour has a saved stack level.) */
2526
2527void
2528expand_end_bindings (vars, mark_ends, dont_jump_in)
2529 tree vars;
2530 int mark_ends;
2531 int dont_jump_in;
2532{
2533 register struct nesting *thisblock = block_stack;
2534 register tree decl;
2535
2536 if (warn_unused)
2537 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
2538 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2539 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
2540 warning_with_decl (decl, "unused variable `%s'");
2541
28d81abb
RK
2542 if (thisblock->exit_label)
2543 {
2544 do_pending_stack_adjust ();
2545 emit_label (thisblock->exit_label);
2546 }
2547
2548 /* If necessary, make a handler for nonlocal gotos taking
2549 place in the function calls in this block. */
2550 if (function_call_count != thisblock->data.block.function_call_count
2551 && nonlocal_labels
2552 /* Make handler for outermost block
2553 if there were any nonlocal gotos to this function. */
2554 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2555 /* Make handler for inner block if it has something
2556 special to do when you jump out of it. */
2557 : (thisblock->data.block.cleanups != 0
2558 || thisblock->data.block.stack_level != 0)))
2559 {
2560 tree link;
2561 rtx afterward = gen_label_rtx ();
2562 rtx handler_label = gen_label_rtx ();
2563 rtx save_receiver = gen_reg_rtx (Pmode);
2564
2565 /* Don't let jump_optimize delete the handler. */
2566 LABEL_PRESERVE_P (handler_label) = 1;
2567
2568 /* Record the handler address in the stack slot for that purpose,
2569 during this block, saving and restoring the outer value. */
2570 if (thisblock->next != 0)
2571 {
2572 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2573 emit_insn_before (gen_move_insn (save_receiver,
2574 nonlocal_goto_handler_slot),
2575 thisblock->data.block.first_insn);
2576 }
2577 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2578 gen_rtx (LABEL_REF, Pmode,
2579 handler_label)),
2580 thisblock->data.block.first_insn);
2581
2582 /* Jump around the handler; it runs only when specially invoked. */
2583 emit_jump (afterward);
2584 emit_label (handler_label);
2585
2586#ifdef HAVE_nonlocal_goto
2587 if (! HAVE_nonlocal_goto)
2588#endif
2589 /* First adjust our frame pointer to its actual value. It was
2590 previously set to the start of the virtual area corresponding to
2591 the stacked variables when we branched here and now needs to be
2592 adjusted to the actual hardware fp value.
2593
2594 Assignments are to virtual registers are converted by
2595 instantiate_virtual_regs into the corresponding assignment
2596 to the underlying register (fp in this case) that makes
2597 the original assignment true.
2598 So the following insn will actually be
2599 decrementing fp by STARTING_FRAME_OFFSET. */
2600 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2601
2602#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2603 if (fixed_regs[ARG_POINTER_REGNUM])
2604 {
42495ca0
RK
2605#ifdef ELIMINABLE_REGS
2606 /* If the argument pointer can be eliminated in favor of the
2607 frame pointer, we don't need to restore it. We assume here
2608 that if such an elimination is present, it can always be used.
2609 This is the case on all known machines; if we don't make this
2610 assumption, we do unnecessary saving on many machines. */
2611 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2612 int i;
2613
2614 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2615 if (elim_regs[i].from == ARG_POINTER_REGNUM
2616 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2617 break;
2618
2619 if (i == sizeof elim_regs / sizeof elim_regs [0])
2620#endif
2621 {
2622 /* Now restore our arg pointer from the address at which it
2623 was saved in our stack frame.
2624 If there hasn't be space allocated for it yet, make
2625 some now. */
2626 if (arg_pointer_save_area == 0)
2627 arg_pointer_save_area
2628 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2629 emit_move_insn (virtual_incoming_args_rtx,
2630 /* We need a pseudo here, or else
2631 instantiate_virtual_regs_1 complains. */
2632 copy_to_reg (arg_pointer_save_area));
2633 }
28d81abb
RK
2634 }
2635#endif
2636
2637 /* The handler expects the desired label address in the static chain
2638 register. It tests the address and does an appropriate jump
2639 to whatever label is desired. */
2640 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2641 /* Skip any labels we shouldn't be able to jump to from here. */
2642 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2643 {
2644 rtx not_this = gen_label_rtx ();
2645 rtx this = gen_label_rtx ();
2646 do_jump_if_equal (static_chain_rtx,
2647 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2648 this, 0);
2649 emit_jump (not_this);
2650 emit_label (this);
2651 expand_goto (TREE_VALUE (link));
2652 emit_label (not_this);
2653 }
2654 /* If label is not recognized, abort. */
2655 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2656 VOIDmode, 0);
2657 emit_label (afterward);
2658 }
2659
2660 /* Don't allow jumping into a block that has cleanups or a stack level. */
2661 if (dont_jump_in
2662 || thisblock->data.block.stack_level != 0
2663 || thisblock->data.block.cleanups != 0)
2664 {
2665 struct label_chain *chain;
2666
2667 /* Any labels in this block are no longer valid to go to.
2668 Mark them to cause an error message. */
2669 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2670 {
2671 DECL_TOO_LATE (chain->label) = 1;
2672 /* If any goto without a fixup came to this label,
2673 that must be an error, because gotos without fixups
2674 come from outside all saved stack-levels and all cleanups. */
2675 if (TREE_ADDRESSABLE (chain->label))
2676 error_with_decl (chain->label,
2677 "label `%s' used before containing binding contour");
2678 }
2679 }
2680
2681 /* Restore stack level in effect before the block
2682 (only if variable-size objects allocated). */
2683 /* Perform any cleanups associated with the block. */
2684
2685 if (thisblock->data.block.stack_level != 0
2686 || thisblock->data.block.cleanups != 0)
2687 {
2688 /* Don't let cleanups affect ({...}) constructs. */
2689 int old_expr_stmts_for_value = expr_stmts_for_value;
2690 rtx old_last_expr_value = last_expr_value;
2691 tree old_last_expr_type = last_expr_type;
2692 expr_stmts_for_value = 0;
2693
2694 /* Do the cleanups. */
37366632 2695 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
2696 do_pending_stack_adjust ();
2697
2698 expr_stmts_for_value = old_expr_stmts_for_value;
2699 last_expr_value = old_last_expr_value;
2700 last_expr_type = old_last_expr_type;
2701
2702 /* Restore the stack level. */
2703
2704 if (thisblock->data.block.stack_level != 0)
2705 {
59257ff7 2706 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 2707 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 2708 if (nonlocal_goto_handler_slot != 0)
37366632
RK
2709 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2710 NULL_RTX);
28d81abb
RK
2711 }
2712
2713 /* Any gotos out of this block must also do these things.
59257ff7
RK
2714 Also report any gotos with fixups that came to labels in this
2715 level. */
28d81abb
RK
2716 fixup_gotos (thisblock,
2717 thisblock->data.block.stack_level,
2718 thisblock->data.block.cleanups,
2719 thisblock->data.block.first_insn,
2720 dont_jump_in);
2721 }
2722
c7d2d61d
RS
2723 /* Mark the beginning and end of the scope if requested.
2724 We do this now, after running cleanups on the variables
2725 just going out of scope, so they are in scope for their cleanups. */
2726
2727 if (mark_ends)
7629c936 2728 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
2729 else
2730 /* Get rid of the beginning-mark if we don't make an end-mark. */
2731 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2732
28d81abb
RK
2733 /* If doing stupid register allocation, make sure lives of all
2734 register variables declared here extend thru end of scope. */
2735
2736 if (obey_regdecls)
2737 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2738 {
2739 rtx rtl = DECL_RTL (decl);
2740 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2741 use_variable (rtl);
2742 }
2743
2744 /* Restore block_stack level for containing block. */
2745
2746 stack_block_stack = thisblock->data.block.innermost_stack_block;
2747 POPSTACK (block_stack);
2748
2749 /* Pop the stack slot nesting and free any slots at this level. */
2750 pop_temp_slots ();
2751}
2752\f
2753/* Generate RTL for the automatic variable declaration DECL.
2754 (Other kinds of declarations are simply ignored if seen here.)
2755 CLEANUP is an expression to be executed at exit from this binding contour;
2756 for example, in C++, it might call the destructor for this variable.
2757
2758 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2759 either before or after calling `expand_decl' but before compiling
2760 any subsequent expressions. This is because CLEANUP may be expanded
2761 more than once, on different branches of execution.
2762 For the same reason, CLEANUP may not contain a CALL_EXPR
2763 except as its topmost node--else `preexpand_calls' would get confused.
2764
2765 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2766 that is not associated with any particular variable.
2767
2768 There is no special support here for C++ constructors.
2769 They should be handled by the proper code in DECL_INITIAL. */
2770
2771void
2772expand_decl (decl)
2773 register tree decl;
2774{
2775 struct nesting *thisblock = block_stack;
2776 tree type = TREE_TYPE (decl);
2777
2778 /* Only automatic variables need any expansion done.
2779 Static and external variables, and external functions,
2780 will be handled by `assemble_variable' (called from finish_decl).
2781 TYPE_DECL and CONST_DECL require nothing.
2782 PARM_DECLs are handled in `assign_parms'. */
2783
2784 if (TREE_CODE (decl) != VAR_DECL)
2785 return;
44fe2e80 2786 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
2787 return;
2788
2789 /* Create the RTL representation for the variable. */
2790
2791 if (type == error_mark_node)
2792 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2793 else if (DECL_SIZE (decl) == 0)
2794 /* Variable with incomplete type. */
2795 {
2796 if (DECL_INITIAL (decl) == 0)
2797 /* Error message was already done; now avoid a crash. */
2798 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2799 else
2800 /* An initializer is going to decide the size of this array.
2801 Until we know the size, represent its address with a reg. */
2802 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2803 }
2804 else if (DECL_MODE (decl) != BLKmode
2805 /* If -ffloat-store, don't put explicit float vars
2806 into regs. */
2807 && !(flag_float_store
2808 && TREE_CODE (type) == REAL_TYPE)
2809 && ! TREE_THIS_VOLATILE (decl)
2810 && ! TREE_ADDRESSABLE (decl)
44fe2e80 2811 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
2812 {
2813 /* Automatic variable that can go in a register. */
2814 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2815 if (TREE_CODE (type) == POINTER_TYPE)
2816 mark_reg_pointer (DECL_RTL (decl));
2817 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2818 }
2819 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2820 {
2821 /* Variable of fixed size that goes on the stack. */
2822 rtx oldaddr = 0;
2823 rtx addr;
2824
2825 /* If we previously made RTL for this decl, it must be an array
2826 whose size was determined by the initializer.
2827 The old address was a register; set that register now
2828 to the proper address. */
2829 if (DECL_RTL (decl) != 0)
2830 {
2831 if (GET_CODE (DECL_RTL (decl)) != MEM
2832 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2833 abort ();
2834 oldaddr = XEXP (DECL_RTL (decl), 0);
2835 }
2836
2837 DECL_RTL (decl)
2838 = assign_stack_temp (DECL_MODE (decl),
2839 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2840 + BITS_PER_UNIT - 1)
2841 / BITS_PER_UNIT),
2842 1);
2843
2844 /* Set alignment we actually gave this decl. */
2845 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2846 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2847
2848 if (oldaddr)
2849 {
2850 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2851 if (addr != oldaddr)
2852 emit_move_insn (oldaddr, addr);
2853 }
2854
2855 /* If this is a memory ref that contains aggregate components,
2856 mark it as such for cse and loop optimize. */
2857 MEM_IN_STRUCT_P (DECL_RTL (decl))
2858 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2859 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2860 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2861#if 0
2862 /* If this is in memory because of -ffloat-store,
2863 set the volatile bit, to prevent optimizations from
2864 undoing the effects. */
2865 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2866 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2867#endif
2868 }
2869 else
2870 /* Dynamic-size object: must push space on the stack. */
2871 {
2872 rtx address, size;
2873
2874 /* Record the stack pointer on entry to block, if have
2875 not already done so. */
2876 if (thisblock->data.block.stack_level == 0)
2877 {
2878 do_pending_stack_adjust ();
59257ff7
RK
2879 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2880 &thisblock->data.block.stack_level,
2881 thisblock->data.block.first_insn);
28d81abb
RK
2882 stack_block_stack = thisblock;
2883 }
2884
2885 /* Compute the variable's size, in bytes. */
2886 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2887 DECL_SIZE (decl),
2888 size_int (BITS_PER_UNIT)),
37366632 2889 NULL_RTX, VOIDmode, 0);
28d81abb
RK
2890 free_temp_slots ();
2891
59257ff7
RK
2892 /* This is equivalent to calling alloca. */
2893 current_function_calls_alloca = 1;
2894
28d81abb 2895 /* Allocate space on the stack for the variable. */
37366632
RK
2896 address = allocate_dynamic_stack_space (size, NULL_RTX,
2897 DECL_ALIGN (decl));
28d81abb 2898
59257ff7 2899 if (nonlocal_goto_handler_slot != 0)
37366632 2900 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
2901
2902 /* Reference the variable indirect through that rtx. */
2903 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2904
2207e295
RS
2905 /* If this is a memory ref that contains aggregate components,
2906 mark it as such for cse and loop optimize. */
2907 MEM_IN_STRUCT_P (DECL_RTL (decl))
2908 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2909 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2910 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2911
28d81abb
RK
2912 /* Indicate the alignment we actually gave this variable. */
2913#ifdef STACK_BOUNDARY
2914 DECL_ALIGN (decl) = STACK_BOUNDARY;
2915#else
2916 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2917#endif
2918 }
2919
2920 if (TREE_THIS_VOLATILE (decl))
2921 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2922 if (TREE_READONLY (decl))
2923 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2924
2925 /* If doing stupid register allocation, make sure life of any
2926 register variable starts here, at the start of its scope. */
2927
2928 if (obey_regdecls)
2929 use_variable (DECL_RTL (decl));
2930}
2931\f
2932/* Emit code to perform the initialization of a declaration DECL. */
2933
2934void
2935expand_decl_init (decl)
2936 tree decl;
2937{
b4ac57ab
RS
2938 int was_used = TREE_USED (decl);
2939
28d81abb
RK
2940 if (TREE_STATIC (decl))
2941 return;
2942
2943 /* Compute and store the initial value now. */
2944
2945 if (DECL_INITIAL (decl) == error_mark_node)
2946 {
2947 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2948 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2949 || code == POINTER_TYPE)
2950 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2951 0, 0);
2952 emit_queue ();
2953 }
2954 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2955 {
2956 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2957 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2958 emit_queue ();
2959 }
2960
b4ac57ab
RS
2961 /* Don't let the initialization count as "using" the variable. */
2962 TREE_USED (decl) = was_used;
2963
28d81abb
RK
2964 /* Free any temporaries we made while initializing the decl. */
2965 free_temp_slots ();
2966}
2967
2968/* CLEANUP is an expression to be executed at exit from this binding contour;
2969 for example, in C++, it might call the destructor for this variable.
2970
2971 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2972 either before or after calling `expand_decl' but before compiling
2973 any subsequent expressions. This is because CLEANUP may be expanded
2974 more than once, on different branches of execution.
2975 For the same reason, CLEANUP may not contain a CALL_EXPR
2976 except as its topmost node--else `preexpand_calls' would get confused.
2977
2978 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2979 that is not associated with any particular variable. */
2980
2981int
2982expand_decl_cleanup (decl, cleanup)
2983 tree decl, cleanup;
2984{
2985 struct nesting *thisblock = block_stack;
2986
2987 /* Error if we are not in any block. */
2988 if (thisblock == 0)
2989 return 0;
2990
2991 /* Record the cleanup if there is one. */
2992
2993 if (cleanup != 0)
2994 {
2995 thisblock->data.block.cleanups
2996 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2997 /* If this block has a cleanup, it belongs in stack_block_stack. */
2998 stack_block_stack = thisblock;
2999 }
3000 return 1;
3001}
3002\f
3003/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3004 DECL_ELTS is the list of elements that belong to DECL's type.
3005 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3006
3007void
3008expand_anon_union_decl (decl, cleanup, decl_elts)
3009 tree decl, cleanup, decl_elts;
3010{
3011 struct nesting *thisblock = block_stack;
3012 rtx x;
3013
3014 expand_decl (decl, cleanup);
3015 x = DECL_RTL (decl);
3016
3017 while (decl_elts)
3018 {
3019 tree decl_elt = TREE_VALUE (decl_elts);
3020 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3021 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3022
3023 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3024 instead create a new MEM rtx with the proper mode. */
3025 if (GET_CODE (x) == MEM)
3026 {
3027 if (mode == GET_MODE (x))
3028 DECL_RTL (decl_elt) = x;
3029 else
3030 {
3031 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3032 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3033 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3034 }
3035 }
3036 else if (GET_CODE (x) == REG)
3037 {
3038 if (mode == GET_MODE (x))
3039 DECL_RTL (decl_elt) = x;
3040 else
3041 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3042 }
3043 else
3044 abort ();
3045
3046 /* Record the cleanup if there is one. */
3047
3048 if (cleanup != 0)
3049 thisblock->data.block.cleanups
3050 = temp_tree_cons (decl_elt, cleanup_elt,
3051 thisblock->data.block.cleanups);
3052
3053 decl_elts = TREE_CHAIN (decl_elts);
3054 }
3055}
3056\f
3057/* Expand a list of cleanups LIST.
3058 Elements may be expressions or may be nested lists.
3059
3060 If DONT_DO is nonnull, then any list-element
3061 whose TREE_PURPOSE matches DONT_DO is omitted.
3062 This is sometimes used to avoid a cleanup associated with
3063 a value that is being returned out of the scope. */
3064
3065static void
3066expand_cleanups (list, dont_do)
3067 tree list;
3068 tree dont_do;
3069{
3070 tree tail;
3071 for (tail = list; tail; tail = TREE_CHAIN (tail))
3072 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3073 {
3074 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3075 expand_cleanups (TREE_VALUE (tail), dont_do);
3076 else
3077 {
3078 /* Cleanups may be run multiple times. For example,
3079 when exiting a binding contour, we expand the
3080 cleanups associated with that contour. When a goto
3081 within that binding contour has a target outside that
3082 contour, it will expand all cleanups from its scope to
3083 the target. Though the cleanups are expanded multiple
3084 times, the control paths are non-overlapping so the
3085 cleanups will not be executed twice. */
3086 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3087 free_temp_slots ();
3088 }
3089 }
3090}
3091
28d81abb
RK
3092/* Move all cleanups from the current block_stack
3093 to the containing block_stack, where they are assumed to
3094 have been created. If anything can cause a temporary to
3095 be created, but not expanded for more than one level of
3096 block_stacks, then this code will have to change. */
3097
3098void
3099move_cleanups_up ()
3100{
3101 struct nesting *block = block_stack;
3102 struct nesting *outer = block->next;
3103
3104 outer->data.block.cleanups
3105 = chainon (block->data.block.cleanups,
3106 outer->data.block.cleanups);
3107 block->data.block.cleanups = 0;
3108}
3109
3110tree
3111last_cleanup_this_contour ()
3112{
3113 if (block_stack == 0)
3114 return 0;
3115
3116 return block_stack->data.block.cleanups;
3117}
3118
3119/* Return 1 if there are any pending cleanups at this point.
3120 If THIS_CONTOUR is nonzero, check the current contour as well.
3121 Otherwise, look only at the contours that enclose this one. */
3122
3123int
3124any_pending_cleanups (this_contour)
3125 int this_contour;
3126{
3127 struct nesting *block;
3128
3129 if (block_stack == 0)
3130 return 0;
3131
3132 if (this_contour && block_stack->data.block.cleanups != NULL)
3133 return 1;
3134 if (block_stack->data.block.cleanups == 0
3135 && (block_stack->data.block.outer_cleanups == 0
3136#if 0
3137 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3138#endif
3139 ))
3140 return 0;
3141
3142 for (block = block_stack->next; block; block = block->next)
3143 if (block->data.block.cleanups != 0)
3144 return 1;
3145
3146 return 0;
3147}
3148\f
3149/* Enter a case (Pascal) or switch (C) statement.
3150 Push a block onto case_stack and nesting_stack
3151 to accumulate the case-labels that are seen
3152 and to record the labels generated for the statement.
3153
3154 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3155 Otherwise, this construct is transparent for `exit_something'.
3156
3157 EXPR is the index-expression to be dispatched on.
3158 TYPE is its nominal type. We could simply convert EXPR to this type,
3159 but instead we take short cuts. */
3160
3161void
3162expand_start_case (exit_flag, expr, type, printname)
3163 int exit_flag;
3164 tree expr;
3165 tree type;
3166 char *printname;
3167{
3168 register struct nesting *thiscase = ALLOC_NESTING ();
3169
3170 /* Make an entry on case_stack for the case we are entering. */
3171
3172 thiscase->next = case_stack;
3173 thiscase->all = nesting_stack;
3174 thiscase->depth = ++nesting_depth;
3175 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3176 thiscase->data.case_stmt.case_list = 0;
3177 thiscase->data.case_stmt.index_expr = expr;
3178 thiscase->data.case_stmt.nominal_type = type;
3179 thiscase->data.case_stmt.default_label = 0;
3180 thiscase->data.case_stmt.num_ranges = 0;
3181 thiscase->data.case_stmt.printname = printname;
3182 thiscase->data.case_stmt.seenlabel = 0;
3183 case_stack = thiscase;
3184 nesting_stack = thiscase;
3185
3186 do_pending_stack_adjust ();
3187
3188 /* Make sure case_stmt.start points to something that won't
3189 need any transformation before expand_end_case. */
3190 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3191 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3192
3193 thiscase->data.case_stmt.start = get_last_insn ();
3194}
3195
3196/* Start a "dummy case statement" within which case labels are invalid
3197 and are not connected to any larger real case statement.
3198 This can be used if you don't want to let a case statement jump
3199 into the middle of certain kinds of constructs. */
3200
3201void
3202expand_start_case_dummy ()
3203{
3204 register struct nesting *thiscase = ALLOC_NESTING ();
3205
3206 /* Make an entry on case_stack for the dummy. */
3207
3208 thiscase->next = case_stack;
3209 thiscase->all = nesting_stack;
3210 thiscase->depth = ++nesting_depth;
3211 thiscase->exit_label = 0;
3212 thiscase->data.case_stmt.case_list = 0;
3213 thiscase->data.case_stmt.start = 0;
3214 thiscase->data.case_stmt.nominal_type = 0;
3215 thiscase->data.case_stmt.default_label = 0;
3216 thiscase->data.case_stmt.num_ranges = 0;
3217 case_stack = thiscase;
3218 nesting_stack = thiscase;
3219}
3220
3221/* End a dummy case statement. */
3222
3223void
3224expand_end_case_dummy ()
3225{
3226 POPSTACK (case_stack);
3227}
3228
3229/* Return the data type of the index-expression
3230 of the innermost case statement, or null if none. */
3231
3232tree
3233case_index_expr_type ()
3234{
3235 if (case_stack)
3236 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3237 return 0;
3238}
3239\f
3240/* Accumulate one case or default label inside a case or switch statement.
3241 VALUE is the value of the case (a null pointer, for a default label).
3242
3243 If not currently inside a case or switch statement, return 1 and do
3244 nothing. The caller will print a language-specific error message.
3245 If VALUE is a duplicate or overlaps, return 2 and do nothing
3246 except store the (first) duplicate node in *DUPLICATE.
3247 If VALUE is out of range, return 3 and do nothing.
3248 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3249 Return 0 on success.
3250
3251 Extended to handle range statements. */
3252
3253int
3254pushcase (value, label, duplicate)
3255 register tree value;
3256 register tree label;
3257 tree *duplicate;
3258{
3259 register struct case_node **l;
3260 register struct case_node *n;
3261 tree index_type;
3262 tree nominal_type;
3263
3264 /* Fail if not inside a real case statement. */
3265 if (! (case_stack && case_stack->data.case_stmt.start))
3266 return 1;
3267
3268 if (stack_block_stack
3269 && stack_block_stack->depth > case_stack->depth)
3270 return 5;
3271
3272 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3273 nominal_type = case_stack->data.case_stmt.nominal_type;
3274
3275 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3276 if (index_type == error_mark_node)
3277 return 0;
3278
3279 /* Convert VALUE to the type in which the comparisons are nominally done. */
3280 if (value != 0)
3281 value = convert (nominal_type, value);
3282
3283 /* If this is the first label, warn if any insns have been emitted. */
3284 if (case_stack->data.case_stmt.seenlabel == 0)
3285 {
3286 rtx insn;
3287 for (insn = case_stack->data.case_stmt.start;
3288 insn;
3289 insn = NEXT_INSN (insn))
3290 {
3291 if (GET_CODE (insn) == CODE_LABEL)
3292 break;
3293 if (GET_CODE (insn) != NOTE
3294 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3295 {
3296 warning ("unreachable code at beginning of %s",
3297 case_stack->data.case_stmt.printname);
3298 break;
3299 }
3300 }
3301 }
3302 case_stack->data.case_stmt.seenlabel = 1;
3303
3304 /* Fail if this value is out of range for the actual type of the index
3305 (which may be narrower than NOMINAL_TYPE). */
3306 if (value != 0 && ! int_fits_type_p (value, index_type))
3307 return 3;
3308
3309 /* Fail if this is a duplicate or overlaps another entry. */
3310 if (value == 0)
3311 {
3312 if (case_stack->data.case_stmt.default_label != 0)
3313 {
3314 *duplicate = case_stack->data.case_stmt.default_label;
3315 return 2;
3316 }
3317 case_stack->data.case_stmt.default_label = label;
3318 }
3319 else
3320 {
3321 /* Find the elt in the chain before which to insert the new value,
3322 to keep the chain sorted in increasing order.
3323 But report an error if this element is a duplicate. */
3324 for (l = &case_stack->data.case_stmt.case_list;
3325 /* Keep going past elements distinctly less than VALUE. */
3326 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3327 l = &(*l)->right)
3328 ;
3329 if (*l)
3330 {
3331 /* Element we will insert before must be distinctly greater;
3332 overlap means error. */
3333 if (! tree_int_cst_lt (value, (*l)->low))
3334 {
3335 *duplicate = (*l)->code_label;
3336 return 2;
3337 }
3338 }
3339
3340 /* Add this label to the chain, and succeed.
3341 Copy VALUE so it is on temporary rather than momentary
3342 obstack and will thus survive till the end of the case statement. */
3343 n = (struct case_node *) oballoc (sizeof (struct case_node));
3344 n->left = 0;
3345 n->right = *l;
3346 n->high = n->low = copy_node (value);
3347 n->code_label = label;
3348 *l = n;
3349 }
3350
3351 expand_label (label);
3352 return 0;
3353}
3354
3355/* Like pushcase but this case applies to all values
3356 between VALUE1 and VALUE2 (inclusive).
3357 The return value is the same as that of pushcase
3358 but there is one additional error code:
3359 4 means the specified range was empty. */
3360
3361int
3362pushcase_range (value1, value2, label, duplicate)
3363 register tree value1, value2;
3364 register tree label;
3365 tree *duplicate;
3366{
3367 register struct case_node **l;
3368 register struct case_node *n;
3369 tree index_type;
3370 tree nominal_type;
3371
3372 /* Fail if not inside a real case statement. */
3373 if (! (case_stack && case_stack->data.case_stmt.start))
3374 return 1;
3375
3376 if (stack_block_stack
3377 && stack_block_stack->depth > case_stack->depth)
3378 return 5;
3379
3380 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3381 nominal_type = case_stack->data.case_stmt.nominal_type;
3382
3383 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3384 if (index_type == error_mark_node)
3385 return 0;
3386
3387 /* If this is the first label, warn if any insns have been emitted. */
3388 if (case_stack->data.case_stmt.seenlabel == 0)
3389 {
3390 rtx insn;
3391 for (insn = case_stack->data.case_stmt.start;
3392 insn;
3393 insn = NEXT_INSN (insn))
3394 {
3395 if (GET_CODE (insn) == CODE_LABEL)
3396 break;
3397 if (GET_CODE (insn) != NOTE
3398 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3399 {
3400 warning ("unreachable code at beginning of %s",
3401 case_stack->data.case_stmt.printname);
3402 break;
3403 }
3404 }
3405 }
3406 case_stack->data.case_stmt.seenlabel = 1;
3407
3408 /* Convert VALUEs to type in which the comparisons are nominally done. */
3409 if (value1 == 0) /* Negative infinity. */
3410 value1 = TYPE_MIN_VALUE(index_type);
3411 value1 = convert (nominal_type, value1);
3412
3413 if (value2 == 0) /* Positive infinity. */
3414 value2 = TYPE_MAX_VALUE(index_type);
3415 value2 = convert (nominal_type, value2);
3416
3417 /* Fail if these values are out of range. */
3418 if (! int_fits_type_p (value1, index_type))
3419 return 3;
3420
3421 if (! int_fits_type_p (value2, index_type))
3422 return 3;
3423
3424 /* Fail if the range is empty. */
3425 if (tree_int_cst_lt (value2, value1))
3426 return 4;
3427
3428 /* If the bounds are equal, turn this into the one-value case. */
3429 if (tree_int_cst_equal (value1, value2))
3430 return pushcase (value1, label, duplicate);
3431
3432 /* Find the elt in the chain before which to insert the new value,
3433 to keep the chain sorted in increasing order.
3434 But report an error if this element is a duplicate. */
3435 for (l = &case_stack->data.case_stmt.case_list;
3436 /* Keep going past elements distinctly less than this range. */
3437 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3438 l = &(*l)->right)
3439 ;
3440 if (*l)
3441 {
3442 /* Element we will insert before must be distinctly greater;
3443 overlap means error. */
3444 if (! tree_int_cst_lt (value2, (*l)->low))
3445 {
3446 *duplicate = (*l)->code_label;
3447 return 2;
3448 }
3449 }
3450
3451 /* Add this label to the chain, and succeed.
3452 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3453 obstack and will thus survive till the end of the case statement. */
3454
3455 n = (struct case_node *) oballoc (sizeof (struct case_node));
3456 n->left = 0;
3457 n->right = *l;
3458 n->low = copy_node (value1);
3459 n->high = copy_node (value2);
3460 n->code_label = label;
3461 *l = n;
3462
3463 expand_label (label);
3464
3465 case_stack->data.case_stmt.num_ranges++;
3466
3467 return 0;
3468}
3469\f
3470/* Called when the index of a switch statement is an enumerated type
3471 and there is no default label.
3472
3473 Checks that all enumeration literals are covered by the case
3474 expressions of a switch. Also, warn if there are any extra
3475 switch cases that are *not* elements of the enumerated type.
3476
3477 If all enumeration literals were covered by the case expressions,
3478 turn one of the expressions into the default expression since it should
3479 not be possible to fall through such a switch. */
3480
3481void
3482check_for_full_enumeration_handling (type)
3483 tree type;
3484{
3485 register struct case_node *n;
3486 register struct case_node **l;
3487 register tree chain;
3488 int all_values = 1;
3489
3490 /* The time complexity of this loop is currently O(N * M), with
3491 N being the number of enumerals in the enumerated type, and
3492 M being the number of case expressions in the switch. */
3493
3494 for (chain = TYPE_VALUES (type);
3495 chain;
3496 chain = TREE_CHAIN (chain))
3497 {
3498 /* Find a match between enumeral and case expression, if possible.
3499 Quit looking when we've gone too far (since case expressions
3500 are kept sorted in ascending order). Warn about enumerals not
3501 handled in the switch statement case expression list. */
3502
3503 for (n = case_stack->data.case_stmt.case_list;
3504 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3505 n = n->right)
3506 ;
3507
1ddde1cd 3508 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3509 {
3510 if (warn_switch)
1ddde1cd 3511 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3512 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3513 all_values = 0;
3514 }
3515 }
3516
3517 /* Now we go the other way around; we warn if there are case
3518 expressions that don't correspond to enumerals. This can
3519 occur since C and C++ don't enforce type-checking of
3520 assignments to enumeration variables. */
3521
3522 if (warn_switch)
3523 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3524 {
3525 for (chain = TYPE_VALUES (type);
3526 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3527 chain = TREE_CHAIN (chain))
3528 ;
3529
3530 if (!chain)
3531 warning ("case value `%d' not in enumerated type `%s'",
3532 TREE_INT_CST_LOW (n->low),
3533 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3534 == IDENTIFIER_NODE)
3535 ? TYPE_NAME (type)
3536 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3537 if (!tree_int_cst_equal (n->low, n->high))
3538 {
3539 for (chain = TYPE_VALUES (type);
3540 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3541 chain = TREE_CHAIN (chain))
3542 ;
3543
3544 if (!chain)
3545 warning ("case value `%d' not in enumerated type `%s'",
3546 TREE_INT_CST_LOW (n->high),
3547 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3548 == IDENTIFIER_NODE)
3549 ? TYPE_NAME (type)
3550 : DECL_NAME (TYPE_NAME (type))));
3551 }
28d81abb
RK
3552 }
3553
3554 /* If all values were found as case labels, make one of them the default
3555 label. Thus, this switch will never fall through. We arbitrarily pick
3556 the last one to make the default since this is likely the most
3557 efficient choice. */
3558
3559 if (all_values)
3560 {
3561 for (l = &case_stack->data.case_stmt.case_list;
3562 (*l)->right != 0;
3563 l = &(*l)->right)
3564 ;
3565
3566 case_stack->data.case_stmt.default_label = (*l)->code_label;
3567 *l = 0;
3568 }
3569}
3570\f
3571/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 3572 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
3573 Generate the code to test it and jump to the right place. */
3574
3575void
3576expand_end_case (orig_index)
3577 tree orig_index;
3578{
3579 tree minval, maxval, range;
3580 rtx default_label = 0;
3581 register struct case_node *n;
3582 int count;
3583 rtx index;
3584 rtx table_label = gen_label_rtx ();
3585 int ncases;
3586 rtx *labelvec;
3587 register int i;
3588 rtx before_case;
3589 register struct nesting *thiscase = case_stack;
3590 tree index_expr = thiscase->data.case_stmt.index_expr;
3591 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3592
3593 do_pending_stack_adjust ();
3594
3595 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3596 if (TREE_TYPE (index_expr) != error_mark_node)
3597 {
3598 /* If switch expression was an enumerated type, check that all
3599 enumeration literals are covered by the cases.
3600 No sense trying this if there's a default case, however. */
3601
3602 if (!thiscase->data.case_stmt.default_label
3603 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3604 && TREE_CODE (index_expr) != INTEGER_CST)
3605 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3606
3607 /* If this is the first label, warn if any insns have been emitted. */
3608 if (thiscase->data.case_stmt.seenlabel == 0)
3609 {
3610 rtx insn;
3611 for (insn = get_last_insn ();
3612 insn != case_stack->data.case_stmt.start;
3613 insn = PREV_INSN (insn))
3614 if (GET_CODE (insn) != NOTE
3615 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3616 {
3617 warning ("unreachable code at beginning of %s",
3618 case_stack->data.case_stmt.printname);
3619 break;
3620 }
3621 }
3622
3623 /* If we don't have a default-label, create one here,
3624 after the body of the switch. */
3625 if (thiscase->data.case_stmt.default_label == 0)
3626 {
3627 thiscase->data.case_stmt.default_label
3628 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3629 expand_label (thiscase->data.case_stmt.default_label);
3630 }
3631 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3632
3633 before_case = get_last_insn ();
3634
3635 /* Simplify the case-list before we count it. */
3636 group_case_nodes (thiscase->data.case_stmt.case_list);
3637
3638 /* Get upper and lower bounds of case values.
3639 Also convert all the case values to the index expr's data type. */
3640
3641 count = 0;
3642 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3643 {
3644 /* Check low and high label values are integers. */
3645 if (TREE_CODE (n->low) != INTEGER_CST)
3646 abort ();
3647 if (TREE_CODE (n->high) != INTEGER_CST)
3648 abort ();
3649
3650 n->low = convert (TREE_TYPE (index_expr), n->low);
3651 n->high = convert (TREE_TYPE (index_expr), n->high);
3652
3653 /* Count the elements and track the largest and smallest
3654 of them (treating them as signed even if they are not). */
3655 if (count++ == 0)
3656 {
3657 minval = n->low;
3658 maxval = n->high;
3659 }
3660 else
3661 {
3662 if (INT_CST_LT (n->low, minval))
3663 minval = n->low;
3664 if (INT_CST_LT (maxval, n->high))
3665 maxval = n->high;
3666 }
3667 /* A range counts double, since it requires two compares. */
3668 if (! tree_int_cst_equal (n->low, n->high))
3669 count++;
3670 }
3671
3672 /* Compute span of values. */
3673 if (count != 0)
3674 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3675 maxval, minval));
3676
3677 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3678 {
3679 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3680 emit_queue ();
3681 emit_jump (default_label);
3682 }
3683 /* If range of values is much bigger than number of values,
3684 make a sequence of conditional branches instead of a dispatch.
3685 If the switch-index is a constant, do it this way
3686 because we can optimize it. */
4f73c5dd
TW
3687
3688#ifndef CASE_VALUES_THRESHOLD
28d81abb 3689#ifdef HAVE_casesi
4f73c5dd 3690#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 3691#else
4f73c5dd
TW
3692 /* If machine does not have a case insn that compares the
3693 bounds, this means extra overhead for dispatch tables
3694 which raises the threshold for using them. */
3695#define CASE_VALUES_THRESHOLD 5
3696#endif /* HAVE_casesi */
3697#endif /* CASE_VALUES_THRESHOLD */
3698
3699 else if (TREE_INT_CST_HIGH (range) != 0
3700 || count < CASE_VALUES_THRESHOLD
37366632
RK
3701 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
3702 > 10 * count)
28d81abb 3703 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3704 /* These will reduce to a constant. */
28d81abb 3705 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3706 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3707 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3708 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3709 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3710 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 3711 {
37366632 3712 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
3713
3714 /* If the index is a short or char that we do not have
3715 an insn to handle comparisons directly, convert it to
3716 a full integer now, rather than letting each comparison
3717 generate the conversion. */
3718
3719 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3720 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3721 == CODE_FOR_nothing))
3722 {
3723 enum machine_mode wider_mode;
3724 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3725 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3726 if (cmp_optab->handlers[(int) wider_mode].insn_code
3727 != CODE_FOR_nothing)
3728 {
3729 index = convert_to_mode (wider_mode, index, unsignedp);
3730 break;
3731 }
3732 }
3733
3734 emit_queue ();
3735 do_pending_stack_adjust ();
3736
3737 index = protect_from_queue (index, 0);
3738 if (GET_CODE (index) == MEM)
3739 index = copy_to_reg (index);
3740 if (GET_CODE (index) == CONST_INT
3741 || TREE_CODE (index_expr) == INTEGER_CST)
3742 {
3743 /* Make a tree node with the proper constant value
3744 if we don't already have one. */
3745 if (TREE_CODE (index_expr) != INTEGER_CST)
3746 {
3747 index_expr
3748 = build_int_2 (INTVAL (index),
3749 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3750 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3751 }
3752
3753 /* For constant index expressions we need only
3754 issue a unconditional branch to the appropriate
3755 target code. The job of removing any unreachable
3756 code is left to the optimisation phase if the
3757 "-O" option is specified. */
3758 for (n = thiscase->data.case_stmt.case_list;
3759 n;
3760 n = n->right)
3761 {
3762 if (! tree_int_cst_lt (index_expr, n->low)
3763 && ! tree_int_cst_lt (n->high, index_expr))
3764 break;
3765 }
3766 if (n)
3767 emit_jump (label_rtx (n->code_label));
3768 else
3769 emit_jump (default_label);
3770 }
3771 else
3772 {
3773 /* If the index expression is not constant we generate
3774 a binary decision tree to select the appropriate
3775 target code. This is done as follows:
3776
3777 The list of cases is rearranged into a binary tree,
3778 nearly optimal assuming equal probability for each case.
3779
3780 The tree is transformed into RTL, eliminating
3781 redundant test conditions at the same time.
3782
3783 If program flow could reach the end of the
3784 decision tree an unconditional jump to the
3785 default code is emitted. */
3786
3787 use_cost_table
3788 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 3789 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
3790 balance_case_nodes (&thiscase->data.case_stmt.case_list,
3791 NULL_PTR);
28d81abb
RK
3792 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3793 default_label, TREE_TYPE (index_expr));
3794 emit_jump_if_reachable (default_label);
3795 }
3796 }
3797 else
3798 {
3799 int win = 0;
3800#ifdef HAVE_casesi
3801 if (HAVE_casesi)
3802 {
c4fcf531 3803 enum machine_mode index_mode = SImode;
5130a5cc 3804 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3805
28d81abb 3806 /* Convert the index to SImode. */
c4fcf531
RS
3807 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3808 > GET_MODE_BITSIZE (index_mode))
28d81abb 3809 {
af2682ef 3810 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 3811 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
3812
3813 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3814 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3815 index_expr, minval);
3816 minval = integer_zero_node;
37366632
RK
3817 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3818 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
af2682ef
RS
3819 emit_jump_insn (gen_bltu (default_label));
3820 /* Now we can safely truncate. */
3821 index = convert_to_mode (index_mode, index, 0);
3822 }
3823 else
3824 {
3825 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3826 index_expr = convert (type_for_size (index_bits, 0),
3827 index_expr);
37366632 3828 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3829 }
28d81abb
RK
3830 emit_queue ();
3831 index = protect_from_queue (index, 0);
3832 do_pending_stack_adjust ();
3833
37366632
RK
3834 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
3835 VOIDmode, 0),
3836 expand_expr (range, NULL_RTX,
3837 VOIDmode, 0),
28d81abb
RK
3838 table_label, default_label));
3839 win = 1;
3840 }
3841#endif
3842#ifdef HAVE_tablejump
3843 if (! win && HAVE_tablejump)
3844 {
3845 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3846 fold (build (MINUS_EXPR,
3847 TREE_TYPE (index_expr),
3848 index_expr, minval)));
37366632 3849 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3850 emit_queue ();
af2682ef 3851 index = protect_from_queue (index, 0);
28d81abb
RK
3852 do_pending_stack_adjust ();
3853
af2682ef 3854 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 3855 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
3856 table_label, default_label);
3857 win = 1;
3858 }
3859#endif
3860 if (! win)
3861 abort ();
3862
3863 /* Get table of labels to jump to, in order of case index. */
3864
3865 ncases = TREE_INT_CST_LOW (range) + 1;
3866 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3867 bzero (labelvec, ncases * sizeof (rtx));
3868
3869 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3870 {
37366632 3871 register HOST_WIDE_INT i
28d81abb
RK
3872 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3873
3874 while (1)
3875 {
3876 labelvec[i]
3877 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3878 if (i + TREE_INT_CST_LOW (minval)
3879 == TREE_INT_CST_LOW (n->high))
3880 break;
3881 i++;
3882 }
3883 }
3884
3885 /* Fill in the gaps with the default. */
3886 for (i = 0; i < ncases; i++)
3887 if (labelvec[i] == 0)
3888 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3889
3890 /* Output the table */
3891 emit_label (table_label);
3892
3893 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 3894 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
3895 if (
3896#ifdef CASE_VECTOR_PC_RELATIVE
3897 1 ||
3898#endif
3899 flag_pic)
3900 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3901 gen_rtx (LABEL_REF, Pmode, table_label),
3902 gen_rtvec_v (ncases, labelvec)));
3903 else
3904 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3905 gen_rtvec_v (ncases, labelvec)));
3906
3907 /* If the case insn drops through the table,
3908 after the table we must jump to the default-label.
3909 Otherwise record no drop-through after the table. */
3910#ifdef CASE_DROPS_THROUGH
3911 emit_jump (default_label);
3912#else
3913 emit_barrier ();
3914#endif
3915 }
3916
915f619f
JW
3917 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3918 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3919 thiscase->data.case_stmt.start);
3920 }
3921 if (thiscase->exit_label)
3922 emit_label (thiscase->exit_label);
3923
3924 POPSTACK (case_stack);
3925
3926 free_temp_slots ();
3927}
3928
3929/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3930
3931static void
3932do_jump_if_equal (op1, op2, label, unsignedp)
3933 rtx op1, op2, label;
3934 int unsignedp;
3935{
3936 if (GET_CODE (op1) == CONST_INT
3937 && GET_CODE (op2) == CONST_INT)
3938 {
3939 if (INTVAL (op1) == INTVAL (op2))
3940 emit_jump (label);
3941 }
3942 else
3943 {
3944 enum machine_mode mode = GET_MODE (op1);
3945 if (mode == VOIDmode)
3946 mode = GET_MODE (op2);
37366632 3947 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
3948 emit_jump_insn (gen_beq (label));
3949 }
3950}
3951\f
3952/* Not all case values are encountered equally. This function
3953 uses a heuristic to weight case labels, in cases where that
3954 looks like a reasonable thing to do.
3955
3956 Right now, all we try to guess is text, and we establish the
3957 following weights:
3958
3959 chars above space: 16
3960 digits: 16
3961 default: 12
3962 space, punct: 8
3963 tab: 4
3964 newline: 2
3965 other "\" chars: 1
3966 remaining chars: 0
3967
3968 If we find any cases in the switch that are not either -1 or in the range
3969 of valid ASCII characters, or are control characters other than those
3970 commonly used with "\", don't treat this switch scanning text.
3971
3972 Return 1 if these nodes are suitable for cost estimation, otherwise
3973 return 0. */
3974
3975static int
3976estimate_case_costs (node)
3977 case_node_ptr node;
3978{
3979 tree min_ascii = build_int_2 (-1, -1);
3980 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3981 case_node_ptr n;
3982 int i;
3983
3984 /* If we haven't already made the cost table, make it now. Note that the
3985 lower bound of the table is -1, not zero. */
3986
3987 if (cost_table == NULL)
3988 {
3989 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3990 bzero (cost_table - 1, 129 * sizeof (short));
3991
3992 for (i = 0; i < 128; i++)
3993 {
3994 if (isalnum (i))
3995 cost_table[i] = 16;
3996 else if (ispunct (i))
3997 cost_table[i] = 8;
3998 else if (iscntrl (i))
3999 cost_table[i] = -1;
4000 }
4001
4002 cost_table[' '] = 8;
4003 cost_table['\t'] = 4;
4004 cost_table['\0'] = 4;
4005 cost_table['\n'] = 2;
4006 cost_table['\f'] = 1;
4007 cost_table['\v'] = 1;
4008 cost_table['\b'] = 1;
4009 }
4010
4011 /* See if all the case expressions look like text. It is text if the
4012 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4013 as signed arithmetic since we don't want to ever access cost_table with a
4014 value less than -1. Also check that none of the constants in a range
4015 are strange control characters. */
4016
4017 for (n = node; n; n = n->right)
4018 {
4019 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4020 return 0;
4021
4022 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4023 if (cost_table[i] < 0)
4024 return 0;
4025 }
4026
4027 /* All interesting values are within the range of interesting
4028 ASCII characters. */
4029 return 1;
4030}
4031
4032/* Scan an ordered list of case nodes
4033 combining those with consecutive values or ranges.
4034
4035 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4036
4037static void
4038group_case_nodes (head)
4039 case_node_ptr head;
4040{
4041 case_node_ptr node = head;
4042
4043 while (node)
4044 {
4045 rtx lb = next_real_insn (label_rtx (node->code_label));
4046 case_node_ptr np = node;
4047
4048 /* Try to group the successors of NODE with NODE. */
4049 while (((np = np->right) != 0)
4050 /* Do they jump to the same place? */
4051 && next_real_insn (label_rtx (np->code_label)) == lb
4052 /* Are their ranges consecutive? */
4053 && tree_int_cst_equal (np->low,
4054 fold (build (PLUS_EXPR,
4055 TREE_TYPE (node->high),
4056 node->high,
4057 integer_one_node)))
4058 /* An overflow is not consecutive. */
4059 && tree_int_cst_lt (node->high,
4060 fold (build (PLUS_EXPR,
4061 TREE_TYPE (node->high),
4062 node->high,
4063 integer_one_node))))
4064 {
4065 node->high = np->high;
4066 }
4067 /* NP is the first node after NODE which can't be grouped with it.
4068 Delete the nodes in between, and move on to that node. */
4069 node->right = np;
4070 node = np;
4071 }
4072}
4073
4074/* Take an ordered list of case nodes
4075 and transform them into a near optimal binary tree,
6dc42e49 4076 on the assumption that any target code selection value is as
28d81abb
RK
4077 likely as any other.
4078
4079 The transformation is performed by splitting the ordered
4080 list into two equal sections plus a pivot. The parts are
4081 then attached to the pivot as left and right branches. Each
4082 branch is is then transformed recursively. */
4083
4084static void
4085balance_case_nodes (head, parent)
4086 case_node_ptr *head;
4087 case_node_ptr parent;
4088{
4089 register case_node_ptr np;
4090
4091 np = *head;
4092 if (np)
4093 {
4094 int cost = 0;
4095 int i = 0;
4096 int ranges = 0;
4097 register case_node_ptr *npp;
4098 case_node_ptr left;
4099
4100 /* Count the number of entries on branch. Also count the ranges. */
4101
4102 while (np)
4103 {
4104 if (!tree_int_cst_equal (np->low, np->high))
4105 {
4106 ranges++;
4107 if (use_cost_table)
4108 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4109 }
4110
4111 if (use_cost_table)
4112 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4113
4114 i++;
4115 np = np->right;
4116 }
4117
4118 if (i > 2)
4119 {
4120 /* Split this list if it is long enough for that to help. */
4121 npp = head;
4122 left = *npp;
4123 if (use_cost_table)
4124 {
4125 /* Find the place in the list that bisects the list's total cost,
4126 Here I gets half the total cost. */
4127 int n_moved = 0;
4128 i = (cost + 1) / 2;
4129 while (1)
4130 {
4131 /* Skip nodes while their cost does not reach that amount. */
4132 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4133 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4134 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4135 if (i <= 0)
4136 break;
4137 npp = &(*npp)->right;
4138 n_moved += 1;
4139 }
4140 if (n_moved == 0)
4141 {
4142 /* Leave this branch lopsided, but optimize left-hand
4143 side and fill in `parent' fields for right-hand side. */
4144 np = *head;
4145 np->parent = parent;
4146 balance_case_nodes (&np->left, np);
4147 for (; np->right; np = np->right)
4148 np->right->parent = np;
4149 return;
4150 }
4151 }
4152 /* If there are just three nodes, split at the middle one. */
4153 else if (i == 3)
4154 npp = &(*npp)->right;
4155 else
4156 {
4157 /* Find the place in the list that bisects the list's total cost,
4158 where ranges count as 2.
4159 Here I gets half the total cost. */
4160 i = (i + ranges + 1) / 2;
4161 while (1)
4162 {
4163 /* Skip nodes while their cost does not reach that amount. */
4164 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4165 i--;
4166 i--;
4167 if (i <= 0)
4168 break;
4169 npp = &(*npp)->right;
4170 }
4171 }
4172 *head = np = *npp;
4173 *npp = 0;
4174 np->parent = parent;
4175 np->left = left;
4176
4177 /* Optimize each of the two split parts. */
4178 balance_case_nodes (&np->left, np);
4179 balance_case_nodes (&np->right, np);
4180 }
4181 else
4182 {
4183 /* Else leave this branch as one level,
4184 but fill in `parent' fields. */
4185 np = *head;
4186 np->parent = parent;
4187 for (; np->right; np = np->right)
4188 np->right->parent = np;
4189 }
4190 }
4191}
4192\f
4193/* Search the parent sections of the case node tree
4194 to see if a test for the lower bound of NODE would be redundant.
4195 INDEX_TYPE is the type of the index expression.
4196
4197 The instructions to generate the case decision tree are
4198 output in the same order as nodes are processed so it is
4199 known that if a parent node checks the range of the current
4200 node minus one that the current node is bounded at its lower
4201 span. Thus the test would be redundant. */
4202
4203static int
4204node_has_low_bound (node, index_type)
4205 case_node_ptr node;
4206 tree index_type;
4207{
4208 tree low_minus_one;
4209 case_node_ptr pnode;
4210
4211 /* If the lower bound of this node is the lowest value in the index type,
4212 we need not test it. */
4213
4214 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4215 return 1;
4216
4217 /* If this node has a left branch, the value at the left must be less
4218 than that at this node, so it cannot be bounded at the bottom and
4219 we need not bother testing any further. */
4220
4221 if (node->left)
4222 return 0;
4223
4224 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4225 node->low, integer_one_node));
4226
4227 /* If the subtraction above overflowed, we can't verify anything.
4228 Otherwise, look for a parent that tests our value - 1. */
4229
4230 if (! tree_int_cst_lt (low_minus_one, node->low))
4231 return 0;
4232
4233 for (pnode = node->parent; pnode; pnode = pnode->parent)
4234 if (tree_int_cst_equal (low_minus_one, pnode->high))
4235 return 1;
4236
4237 return 0;
4238}
4239
4240/* Search the parent sections of the case node tree
4241 to see if a test for the upper bound of NODE would be redundant.
4242 INDEX_TYPE is the type of the index expression.
4243
4244 The instructions to generate the case decision tree are
4245 output in the same order as nodes are processed so it is
4246 known that if a parent node checks the range of the current
4247 node plus one that the current node is bounded at its upper
4248 span. Thus the test would be redundant. */
4249
4250static int
4251node_has_high_bound (node, index_type)
4252 case_node_ptr node;
4253 tree index_type;
4254{
4255 tree high_plus_one;
4256 case_node_ptr pnode;
4257
4258 /* If the upper bound of this node is the highest value in the type
4259 of the index expression, we need not test against it. */
4260
4261 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4262 return 1;
4263
4264 /* If this node has a right branch, the value at the right must be greater
4265 than that at this node, so it cannot be bounded at the top and
4266 we need not bother testing any further. */
4267
4268 if (node->right)
4269 return 0;
4270
4271 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4272 node->high, integer_one_node));
4273
4274 /* If the addition above overflowed, we can't verify anything.
4275 Otherwise, look for a parent that tests our value + 1. */
4276
4277 if (! tree_int_cst_lt (node->high, high_plus_one))
4278 return 0;
4279
4280 for (pnode = node->parent; pnode; pnode = pnode->parent)
4281 if (tree_int_cst_equal (high_plus_one, pnode->low))
4282 return 1;
4283
4284 return 0;
4285}
4286
4287/* Search the parent sections of the
4288 case node tree to see if both tests for the upper and lower
4289 bounds of NODE would be redundant. */
4290
4291static int
4292node_is_bounded (node, index_type)
4293 case_node_ptr node;
4294 tree index_type;
4295{
4296 return (node_has_low_bound (node, index_type)
4297 && node_has_high_bound (node, index_type));
4298}
4299
4300/* Emit an unconditional jump to LABEL unless it would be dead code. */
4301
4302static void
4303emit_jump_if_reachable (label)
4304 rtx label;
4305{
4306 if (GET_CODE (get_last_insn ()) != BARRIER)
4307 emit_jump (label);
4308}
4309\f
4310/* Emit step-by-step code to select a case for the value of INDEX.
4311 The thus generated decision tree follows the form of the
4312 case-node binary tree NODE, whose nodes represent test conditions.
4313 INDEX_TYPE is the type of the index of the switch.
4314
4315 Care is taken to prune redundant tests from the decision tree
4316 by detecting any boundary conditions already checked by
4317 emitted rtx. (See node_has_high_bound, node_has_low_bound
4318 and node_is_bounded, above.)
4319
4320 Where the test conditions can be shown to be redundant we emit
4321 an unconditional jump to the target code. As a further
4322 optimization, the subordinates of a tree node are examined to
4323 check for bounded nodes. In this case conditional and/or
4324 unconditional jumps as a result of the boundary check for the
4325 current node are arranged to target the subordinates associated
4326 code for out of bound conditions on the current node node.
4327
f72aed24 4328 We can assume that when control reaches the code generated here,
28d81abb
RK
4329 the index value has already been compared with the parents
4330 of this node, and determined to be on the same side of each parent
4331 as this node is. Thus, if this node tests for the value 51,
4332 and a parent tested for 52, we don't need to consider
4333 the possibility of a value greater than 51. If another parent
4334 tests for the value 50, then this node need not test anything. */
4335
4336static void
4337emit_case_nodes (index, node, default_label, index_type)
4338 rtx index;
4339 case_node_ptr node;
4340 rtx default_label;
4341 tree index_type;
4342{
4343 /* If INDEX has an unsigned type, we must make unsigned branches. */
4344 int unsignedp = TREE_UNSIGNED (index_type);
4345 typedef rtx rtx_function ();
4346 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4347 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4348 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4349 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4350 enum machine_mode mode = GET_MODE (index);
4351
4352 /* See if our parents have already tested everything for us.
4353 If they have, emit an unconditional jump for this node. */
4354 if (node_is_bounded (node, index_type))
4355 emit_jump (label_rtx (node->code_label));
4356
4357 else if (tree_int_cst_equal (node->low, node->high))
4358 {
4359 /* Node is single valued. First see if the index expression matches
4360 this node and then check our children, if any. */
4361
37366632 4362 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4363 label_rtx (node->code_label), unsignedp);
4364
4365 if (node->right != 0 && node->left != 0)
4366 {
4367 /* This node has children on both sides.
4368 Dispatch to one side or the other
4369 by comparing the index value with this node's value.
4370 If one subtree is bounded, check that one first,
4371 so we can avoid real branches in the tree. */
4372
4373 if (node_is_bounded (node->right, index_type))
4374 {
37366632
RK
4375 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4376 VOIDmode, 0),
4377 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4378
4379 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4380 emit_case_nodes (index, node->left, default_label, index_type);
4381 }
4382
4383 else if (node_is_bounded (node->left, index_type))
4384 {
37366632 4385 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4386 VOIDmode, 0),
37366632 4387 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4388 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4389 emit_case_nodes (index, node->right, default_label, index_type);
4390 }
4391
4392 else
4393 {
4394 /* Neither node is bounded. First distinguish the two sides;
4395 then emit the code for one side at a time. */
4396
4397 tree test_label
4398 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4399
4400 /* See if the value is on the right. */
37366632 4401 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4402 VOIDmode, 0),
37366632 4403 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4404 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4405
4406 /* Value must be on the left.
4407 Handle the left-hand subtree. */
4408 emit_case_nodes (index, node->left, default_label, index_type);
4409 /* If left-hand subtree does nothing,
4410 go to default. */
4411 emit_jump_if_reachable (default_label);
4412
4413 /* Code branches here for the right-hand subtree. */
4414 expand_label (test_label);
4415 emit_case_nodes (index, node->right, default_label, index_type);
4416 }
4417 }
4418
4419 else if (node->right != 0 && node->left == 0)
4420 {
4421 /* Here we have a right child but no left so we issue conditional
4422 branch to default and process the right child.
4423
4424 Omit the conditional branch to default if we it avoid only one
4425 right child; it costs too much space to save so little time. */
4426
de14fd73 4427 if (node->right->right || node->right->left
28d81abb
RK
4428 || !tree_int_cst_equal (node->right->low, node->right->high))
4429 {
4430 if (!node_has_low_bound (node, index_type))
4431 {
37366632
RK
4432 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4433 VOIDmode, 0),
4434 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4435 emit_jump_insn ((*gen_blt_pat) (default_label));
4436 }
4437
4438 emit_case_nodes (index, node->right, default_label, index_type);
4439 }
4440 else
4441 /* We cannot process node->right normally
4442 since we haven't ruled out the numbers less than
4443 this node's value. So handle node->right explicitly. */
4444 do_jump_if_equal (index,
37366632
RK
4445 expand_expr (node->right->low, NULL_RTX,
4446 VOIDmode, 0),
28d81abb
RK
4447 label_rtx (node->right->code_label), unsignedp);
4448 }
4449
4450 else if (node->right == 0 && node->left != 0)
4451 {
4452 /* Just one subtree, on the left. */
4453
de14fd73
RK
4454#if 0 /* The following code and comment were formerly part
4455 of the condition here, but they didn't work
4456 and I don't understand what the idea was. -- rms. */
4457 /* If our "most probable entry" is less probable
28d81abb
RK
4458 than the default label, emit a jump to
4459 the default label using condition codes
4460 already lying around. With no right branch,
4461 a branch-greater-than will get us to the default
4462 label correctly. */
de14fd73
RK
4463 if (use_cost_table
4464 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4465 ;
4466#endif /* 0 */
4467 if (node->left->left || node->left->right
28d81abb
RK
4468 || !tree_int_cst_equal (node->left->low, node->left->high))
4469 {
4470 if (!node_has_high_bound (node, index_type))
4471 {
37366632
RK
4472 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4473 VOIDmode, 0),
4474 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4475 emit_jump_insn ((*gen_bgt_pat) (default_label));
4476 }
4477
4478 emit_case_nodes (index, node->left, default_label, index_type);
4479 }
4480 else
4481 /* We cannot process node->left normally
4482 since we haven't ruled out the numbers less than
4483 this node's value. So handle node->left explicitly. */
4484 do_jump_if_equal (index,
37366632
RK
4485 expand_expr (node->left->low, NULL_RTX,
4486 VOIDmode, 0),
28d81abb
RK
4487 label_rtx (node->left->code_label), unsignedp);
4488 }
4489 }
4490 else
4491 {
4492 /* Node is a range. These cases are very similar to those for a single
4493 value, except that we do not start by testing whether this node
4494 is the one to branch to. */
4495
4496 if (node->right != 0 && node->left != 0)
4497 {
4498 /* Node has subtrees on both sides.
4499 If the right-hand subtree is bounded,
4500 test for it first, since we can go straight there.
4501 Otherwise, we need to make a branch in the control structure,
4502 then handle the two subtrees. */
4503 tree test_label = 0;
4504
37366632
RK
4505 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4506 VOIDmode, 0),
4507 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4508
4509 if (node_is_bounded (node->right, index_type))
4510 /* Right hand node is fully bounded so we can eliminate any
4511 testing and branch directly to the target code. */
4512 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4513 else
4514 {
4515 /* Right hand node requires testing.
4516 Branch to a label where we will handle it later. */
4517
4518 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4519 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4520 }
4521
4522 /* Value belongs to this node or to the left-hand subtree. */
4523
37366632
RK
4524 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4525 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4526 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4527
4528 /* Handle the left-hand subtree. */
4529 emit_case_nodes (index, node->left, default_label, index_type);
4530
4531 /* If right node had to be handled later, do that now. */
4532
4533 if (test_label)
4534 {
4535 /* If the left-hand subtree fell through,
4536 don't let it fall into the right-hand subtree. */
4537 emit_jump_if_reachable (default_label);
4538
4539 expand_label (test_label);
4540 emit_case_nodes (index, node->right, default_label, index_type);
4541 }
4542 }
4543
4544 else if (node->right != 0 && node->left == 0)
4545 {
4546 /* Deal with values to the left of this node,
4547 if they are possible. */
4548 if (!node_has_low_bound (node, index_type))
4549 {
37366632
RK
4550 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4551 VOIDmode, 0),
4552 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4553 emit_jump_insn ((*gen_blt_pat) (default_label));
4554 }
4555
4556 /* Value belongs to this node or to the right-hand subtree. */
4557
37366632
RK
4558 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4559 VOIDmode, 0),
4560 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4561 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4562
4563 emit_case_nodes (index, node->right, default_label, index_type);
4564 }
4565
4566 else if (node->right == 0 && node->left != 0)
4567 {
4568 /* Deal with values to the right of this node,
4569 if they are possible. */
4570 if (!node_has_high_bound (node, index_type))
4571 {
37366632
RK
4572 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4573 VOIDmode, 0),
4574 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4575 emit_jump_insn ((*gen_bgt_pat) (default_label));
4576 }
4577
4578 /* Value belongs to this node or to the left-hand subtree. */
4579
37366632
RK
4580 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4581 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4582 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4583
4584 emit_case_nodes (index, node->left, default_label, index_type);
4585 }
4586
4587 else
4588 {
4589 /* Node has no children so we check low and high bounds to remove
4590 redundant tests. Only one of the bounds can exist,
4591 since otherwise this node is bounded--a case tested already. */
4592
4593 if (!node_has_high_bound (node, index_type))
4594 {
37366632
RK
4595 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4596 VOIDmode, 0),
4597 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4598 emit_jump_insn ((*gen_bgt_pat) (default_label));
4599 }
4600
4601 if (!node_has_low_bound (node, index_type))
4602 {
37366632
RK
4603 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4604 VOIDmode, 0),
4605 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4606 emit_jump_insn ((*gen_blt_pat) (default_label));
4607 }
4608
4609 emit_jump (label_rtx (node->code_label));
4610 }
4611 }
4612}
4613\f
4614/* These routines are used by the loop unrolling code. They copy BLOCK trees
4615 so that the debugging info will be correct for the unrolled loop. */
4616
94dc8b56 4617/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 4618
94dc8b56 4619static tree *block_vector;
28d81abb
RK
4620
4621void
94dc8b56 4622find_loop_tree_blocks ()
28d81abb 4623{
94dc8b56 4624 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4625
94dc8b56
JW
4626 /* There first block is for the function body, and does not have
4627 corresponding block notes. Don't include it in the block vector. */
4628 block = BLOCK_SUBBLOCKS (block);
28d81abb 4629
94dc8b56 4630 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
4631}
4632
28d81abb 4633void
94dc8b56 4634unroll_block_trees ()
28d81abb 4635{
94dc8b56 4636 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4637
94dc8b56 4638 reorder_blocks (block_vector, block, get_insns ());
28d81abb 4639}
94dc8b56 4640
This page took 0.864652 seconds and 5 git commands to generate.