]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(gen_lowpart_common, gen_highpart, operand_subword): Don't make a new
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
28d81abb
RK
57/* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
59char *emit_filename;
60int emit_lineno;
61
62/* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
64
65int expr_stmts_for_value;
66
67/* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
69
70static tree last_expr_type;
71static rtx last_expr_value;
72
7629c936
RS
73/* Each time we expand the end of a binding contour (in `expand_end_bindings')
74 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
75 This is used by the `remember_end_note' function to record the endpoint
76 of each generated block in its associated BLOCK node. */
77
78static rtx last_block_end_note;
79
28d81abb
RK
80/* Number of binding contours started so far in this function. */
81
82int block_start_count;
83
84/* Nonzero if function being compiled needs to
85 return the address of where it has put a structure value. */
86
87extern int current_function_returns_pcc_struct;
88
89/* Label that will go on parm cleanup code, if any.
90 Jumping to this label runs cleanup code for parameters, if
91 such code must be run. Following this code is the logical return label. */
92
93extern rtx cleanup_label;
94
95/* Label that will go on function epilogue.
96 Jumping to this label serves as a "return" instruction
97 on machines which require execution of the epilogue on all returns. */
98
99extern rtx return_label;
100
101/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
102 So we can mark them all live at the end of the function, if nonopt. */
103extern rtx save_expr_regs;
104
105/* Offset to end of allocated area of stack frame.
106 If stack grows down, this is the address of the last stack slot allocated.
107 If stack grows up, this is the address for the next slot. */
108extern int frame_offset;
109
110/* Label to jump back to for tail recursion, or 0 if we have
111 not yet needed one for this function. */
112extern rtx tail_recursion_label;
113
114/* Place after which to insert the tail_recursion_label if we need one. */
115extern rtx tail_recursion_reentry;
116
117/* Location at which to save the argument pointer if it will need to be
118 referenced. There are two cases where this is done: if nonlocal gotos
119 exist, or if vars whose is an offset from the argument pointer will be
120 needed by inner routines. */
121
122extern rtx arg_pointer_save_area;
123
124/* Chain of all RTL_EXPRs that have insns in them. */
125extern tree rtl_expr_chain;
126
127#if 0 /* Turned off because 0 seems to work just as well. */
128/* Cleanup lists are required for binding levels regardless of whether
129 that binding level has cleanups or not. This node serves as the
130 cleanup list whenever an empty list is required. */
131static tree empty_cleanup_list;
132#endif
133\f
134/* Functions and data structures for expanding case statements. */
135
136/* Case label structure, used to hold info on labels within case
137 statements. We handle "range" labels; for a single-value label
138 as in C, the high and low limits are the same.
139
140 A chain of case nodes is initially maintained via the RIGHT fields
141 in the nodes. Nodes with higher case values are later in the list.
142
143 Switch statements can be output in one of two forms. A branch table
144 is used if there are more than a few labels and the labels are dense
145 within the range between the smallest and largest case value. If a
146 branch table is used, no further manipulations are done with the case
147 node chain.
148
149 The alternative to the use of a branch table is to generate a series
150 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
151 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
152 totally unbalanced, with everything on the right. We balance the tree
153 with nodes on the left having lower case values than the parent
28d81abb
RK
154 and nodes on the right having higher values. We then output the tree
155 in order. */
156
157struct case_node
158{
159 struct case_node *left; /* Left son in binary tree */
160 struct case_node *right; /* Right son in binary tree; also node chain */
161 struct case_node *parent; /* Parent of node in binary tree */
162 tree low; /* Lowest index value for this label */
163 tree high; /* Highest index value for this label */
164 tree code_label; /* Label to jump to when node matches */
165};
166
167typedef struct case_node case_node;
168typedef struct case_node *case_node_ptr;
169
170/* These are used by estimate_case_costs and balance_case_nodes. */
171
172/* This must be a signed type, and non-ANSI compilers lack signed char. */
173static short *cost_table;
174static int use_cost_table;
175
176static int estimate_case_costs ();
177static void balance_case_nodes ();
178static void emit_case_nodes ();
179static void group_case_nodes ();
180static void emit_jump_if_reachable ();
181
182static int warn_if_unused_value ();
183static void expand_goto_internal ();
184static int expand_fixup ();
185void fixup_gotos ();
186void free_temp_slots ();
187static void expand_cleanups ();
28d81abb
RK
188static void expand_null_return_1 ();
189static int tail_recursion_args ();
190static void do_jump_if_equal ();
191\f
192/* Stack of control and binding constructs we are currently inside.
193
194 These constructs begin when you call `expand_start_WHATEVER'
195 and end when you call `expand_end_WHATEVER'. This stack records
196 info about how the construct began that tells the end-function
197 what to do. It also may provide information about the construct
198 to alter the behavior of other constructs within the body.
199 For example, they may affect the behavior of C `break' and `continue'.
200
201 Each construct gets one `struct nesting' object.
202 All of these objects are chained through the `all' field.
203 `nesting_stack' points to the first object (innermost construct).
204 The position of an entry on `nesting_stack' is in its `depth' field.
205
206 Each type of construct has its own individual stack.
207 For example, loops have `loop_stack'. Each object points to the
208 next object of the same type through the `next' field.
209
210 Some constructs are visible to `break' exit-statements and others
211 are not. Which constructs are visible depends on the language.
212 Therefore, the data structure allows each construct to be visible
213 or not, according to the args given when the construct is started.
214 The construct is visible if the `exit_label' field is non-null.
215 In that case, the value should be a CODE_LABEL rtx. */
216
217struct nesting
218{
219 struct nesting *all;
220 struct nesting *next;
221 int depth;
222 rtx exit_label;
223 union
224 {
225 /* For conds (if-then and if-then-else statements). */
226 struct
227 {
228 /* Label for the end of the if construct.
229 There is none if EXITFLAG was not set
230 and no `else' has been seen yet. */
231 rtx endif_label;
232 /* Label for the end of this alternative.
233 This may be the end of the if or the next else/elseif. */
234 rtx next_label;
235 } cond;
236 /* For loops. */
237 struct
238 {
239 /* Label at the top of the loop; place to loop back to. */
240 rtx start_label;
241 /* Label at the end of the whole construct. */
242 rtx end_label;
243 /* Label for `continue' statement to jump to;
244 this is in front of the stepper of the loop. */
245 rtx continue_label;
246 } loop;
247 /* For variable binding contours. */
248 struct
249 {
250 /* Sequence number of this binding contour within the function,
251 in order of entry. */
252 int block_start_count;
253 /* Nonzero => value to restore stack to on exit. */
254 rtx stack_level;
255 /* The NOTE that starts this contour.
256 Used by expand_goto to check whether the destination
257 is within each contour or not. */
258 rtx first_insn;
259 /* Innermost containing binding contour that has a stack level. */
260 struct nesting *innermost_stack_block;
261 /* List of cleanups to be run on exit from this contour.
262 This is a list of expressions to be evaluated.
263 The TREE_PURPOSE of each link is the ..._DECL node
264 which the cleanup pertains to. */
265 tree cleanups;
266 /* List of cleanup-lists of blocks containing this block,
267 as they were at the locus where this block appears.
268 There is an element for each containing block,
269 ordered innermost containing block first.
270 The tail of this list can be 0 (was empty_cleanup_list),
271 if all remaining elements would be empty lists.
272 The element's TREE_VALUE is the cleanup-list of that block,
273 which may be null. */
274 tree outer_cleanups;
275 /* Chain of labels defined inside this binding contour.
276 For contours that have stack levels or cleanups. */
277 struct label_chain *label_chain;
278 /* Number of function calls seen, as of start of this block. */
279 int function_call_count;
280 } block;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
283 struct
284 {
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
287 rtx start;
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
306 /* For exception contours. */
307 struct
308 {
309 /* List of exceptions raised. This is a TREE_LIST
310 of whatever you want. */
311 tree raised;
312 /* List of exceptions caught. This is also a TREE_LIST
313 of whatever you want. As a special case, it has the
314 value `void_type_node' if it handles default exceptions. */
315 tree handled;
316
317 /* First insn of TRY block, in case resumptive model is needed. */
318 rtx first_insn;
319 /* Label for the catch clauses. */
320 rtx except_label;
321 /* Label for unhandled exceptions. */
322 rtx unhandled_label;
323 /* Label at the end of whole construct. */
324 rtx after_label;
325 /* Label which "escapes" the exception construct.
326 Like EXIT_LABEL for BREAK construct, but for exceptions. */
327 rtx escape_label;
328 } except_stmt;
329 } data;
330};
331
332/* Chain of all pending binding contours. */
333struct nesting *block_stack;
334
335/* Chain of all pending binding contours that restore stack levels
336 or have cleanups. */
337struct nesting *stack_block_stack;
338
339/* Chain of all pending conditional statements. */
340struct nesting *cond_stack;
341
342/* Chain of all pending loops. */
343struct nesting *loop_stack;
344
345/* Chain of all pending case or switch statements. */
346struct nesting *case_stack;
347
348/* Chain of all pending exception contours. */
349struct nesting *except_stack;
350
351/* Separate chain including all of the above,
352 chained through the `all' field. */
353struct nesting *nesting_stack;
354
355/* Number of entries on nesting_stack now. */
356int nesting_depth;
357
358/* Allocate and return a new `struct nesting'. */
359
360#define ALLOC_NESTING() \
361 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
362
363/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
364 and pop off `nesting_stack' down to the same level. */
365
366#define POPSTACK(STACK) \
367do { int initial_depth = nesting_stack->depth; \
368 do { struct nesting *this = STACK; \
369 STACK = this->next; \
370 nesting_stack = this->all; \
371 nesting_depth = this->depth; \
372 obstack_free (&stmt_obstack, this); } \
373 while (nesting_depth > initial_depth); } while (0)
374\f
375/* In some cases it is impossible to generate code for a forward goto
376 until the label definition is seen. This happens when it may be necessary
377 for the goto to reset the stack pointer: we don't yet know how to do that.
378 So expand_goto puts an entry on this fixup list.
379 Each time a binding contour that resets the stack is exited,
380 we check each fixup.
381 If the target label has now been defined, we can insert the proper code. */
382
383struct goto_fixup
384{
385 /* Points to following fixup. */
386 struct goto_fixup *next;
387 /* Points to the insn before the jump insn.
388 If more code must be inserted, it goes after this insn. */
389 rtx before_jump;
390 /* The LABEL_DECL that this jump is jumping to, or 0
391 for break, continue or return. */
392 tree target;
7629c936
RS
393 /* The BLOCK for the place where this goto was found. */
394 tree context;
28d81abb
RK
395 /* The CODE_LABEL rtx that this is jumping to. */
396 rtx target_rtl;
397 /* Number of binding contours started in current function
398 before the label reference. */
399 int block_start_count;
400 /* The outermost stack level that should be restored for this jump.
401 Each time a binding contour that resets the stack is exited,
402 if the target label is *not* yet defined, this slot is updated. */
403 rtx stack_level;
404 /* List of lists of cleanup expressions to be run by this goto.
405 There is one element for each block that this goto is within.
406 The tail of this list can be 0 (was empty_cleanup_list),
407 if all remaining elements would be empty.
408 The TREE_VALUE contains the cleanup list of that block as of the
409 time this goto was seen.
410 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
411 tree cleanup_list_list;
412};
413
414static struct goto_fixup *goto_fixup_chain;
415
416/* Within any binding contour that must restore a stack level,
417 all labels are recorded with a chain of these structures. */
418
419struct label_chain
420{
421 /* Points to following fixup. */
422 struct label_chain *next;
423 tree label;
424};
425\f
426void
427init_stmt ()
428{
429 gcc_obstack_init (&stmt_obstack);
430#if 0
431 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
432#endif
433}
434
435void
436init_stmt_for_function ()
437{
438 /* We are not currently within any block, conditional, loop or case. */
439 block_stack = 0;
440 loop_stack = 0;
441 case_stack = 0;
442 cond_stack = 0;
443 nesting_stack = 0;
444 nesting_depth = 0;
445
446 block_start_count = 0;
447
448 /* No gotos have been expanded yet. */
449 goto_fixup_chain = 0;
450
451 /* We are not processing a ({...}) grouping. */
452 expr_stmts_for_value = 0;
453 last_expr_type = 0;
454}
455
456void
457save_stmt_status (p)
458 struct function *p;
459{
460 p->block_stack = block_stack;
461 p->stack_block_stack = stack_block_stack;
462 p->cond_stack = cond_stack;
463 p->loop_stack = loop_stack;
464 p->case_stack = case_stack;
465 p->nesting_stack = nesting_stack;
466 p->nesting_depth = nesting_depth;
467 p->block_start_count = block_start_count;
468 p->last_expr_type = last_expr_type;
469 p->last_expr_value = last_expr_value;
470 p->expr_stmts_for_value = expr_stmts_for_value;
471 p->emit_filename = emit_filename;
472 p->emit_lineno = emit_lineno;
473 p->goto_fixup_chain = goto_fixup_chain;
474}
475
476void
477restore_stmt_status (p)
478 struct function *p;
479{
480 block_stack = p->block_stack;
481 stack_block_stack = p->stack_block_stack;
482 cond_stack = p->cond_stack;
483 loop_stack = p->loop_stack;
484 case_stack = p->case_stack;
485 nesting_stack = p->nesting_stack;
486 nesting_depth = p->nesting_depth;
487 block_start_count = p->block_start_count;
488 last_expr_type = p->last_expr_type;
489 last_expr_value = p->last_expr_value;
490 expr_stmts_for_value = p->expr_stmts_for_value;
491 emit_filename = p->emit_filename;
492 emit_lineno = p->emit_lineno;
493 goto_fixup_chain = p->goto_fixup_chain;
494}
495\f
496/* Emit a no-op instruction. */
497
498void
499emit_nop ()
500{
501 rtx last_insn = get_last_insn ();
502 if (!optimize
503 && (GET_CODE (last_insn) == CODE_LABEL
504 || prev_real_insn (last_insn) == 0))
505 emit_insn (gen_nop ());
506}
507\f
508/* Return the rtx-label that corresponds to a LABEL_DECL,
509 creating it if necessary. */
510
511rtx
512label_rtx (label)
513 tree label;
514{
515 if (TREE_CODE (label) != LABEL_DECL)
516 abort ();
517
518 if (DECL_RTL (label))
519 return DECL_RTL (label);
520
521 return DECL_RTL (label) = gen_label_rtx ();
522}
523
524/* Add an unconditional jump to LABEL as the next sequential instruction. */
525
526void
527emit_jump (label)
528 rtx label;
529{
530 do_pending_stack_adjust ();
531 emit_jump_insn (gen_jump (label));
532 emit_barrier ();
533}
534
535/* Emit code to jump to the address
536 specified by the pointer expression EXP. */
537
538void
539expand_computed_goto (exp)
540 tree exp;
541{
37366632 542 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
de14fd73 543 emit_queue ();
28d81abb 544 emit_indirect_jump (x);
28d81abb
RK
545}
546\f
547/* Handle goto statements and the labels that they can go to. */
548
549/* Specify the location in the RTL code of a label LABEL,
550 which is a LABEL_DECL tree node.
551
552 This is used for the kind of label that the user can jump to with a
553 goto statement, and for alternatives of a switch or case statement.
554 RTL labels generated for loops and conditionals don't go through here;
555 they are generated directly at the RTL level, by other functions below.
556
557 Note that this has nothing to do with defining label *names*.
558 Languages vary in how they do that and what that even means. */
559
560void
561expand_label (label)
562 tree label;
563{
564 struct label_chain *p;
565
566 do_pending_stack_adjust ();
567 emit_label (label_rtx (label));
568 if (DECL_NAME (label))
569 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
570
571 if (stack_block_stack != 0)
572 {
573 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
574 p->next = stack_block_stack->data.block.label_chain;
575 stack_block_stack->data.block.label_chain = p;
576 p->label = label;
577 }
578}
579
580/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
581 from nested functions. */
582
583void
584declare_nonlocal_label (label)
585 tree label;
586{
587 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
588 LABEL_PRESERVE_P (label_rtx (label)) = 1;
589 if (nonlocal_goto_handler_slot == 0)
590 {
591 nonlocal_goto_handler_slot
592 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
593 emit_stack_save (SAVE_NONLOCAL,
594 &nonlocal_goto_stack_level,
595 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
596 }
597}
598
599/* Generate RTL code for a `goto' statement with target label LABEL.
600 LABEL should be a LABEL_DECL tree node that was or will later be
601 defined with `expand_label'. */
602
603void
604expand_goto (label)
605 tree label;
606{
607 /* Check for a nonlocal goto to a containing function. */
608 tree context = decl_function_context (label);
609 if (context != 0 && context != current_function_decl)
610 {
611 struct function *p = find_function_data (context);
612 rtx temp;
613 p->has_nonlocal_label = 1;
59257ff7
RK
614
615 /* Copy the rtl for the slots so that they won't be shared in
616 case the virtual stack vars register gets instantiated differently
617 in the parent than in the child. */
618
28d81abb
RK
619#if HAVE_nonlocal_goto
620 if (HAVE_nonlocal_goto)
621 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
622 copy_rtx (p->nonlocal_goto_handler_slot),
623 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
624 gen_rtx (LABEL_REF, Pmode,
625 label_rtx (label))));
626 else
627#endif
628 {
59257ff7
RK
629 rtx addr;
630
28d81abb
RK
631 /* Restore frame pointer for containing function.
632 This sets the actual hard register used for the frame pointer
633 to the location of the function's incoming static chain info.
634 The non-local goto handler will then adjust it to contain the
635 proper value and reload the argument pointer, if needed. */
636 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
637
638 /* We have now loaded the frame pointer hardware register with
639 the address of that corresponds to the start of the virtual
640 stack vars. So replace virtual_stack_vars_rtx in all
641 addresses we use with stack_pointer_rtx. */
642
28d81abb
RK
643 /* Get addr of containing function's current nonlocal goto handler,
644 which will do any cleanups and then jump to the label. */
59257ff7
RK
645 addr = copy_rtx (p->nonlocal_goto_handler_slot);
646 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
647 frame_pointer_rtx));
648
28d81abb 649 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
650 addr = p->nonlocal_goto_stack_level;
651 if (addr)
5e116627
MM
652 addr = replace_rtx (copy_rtx (addr),
653 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7 654
37366632 655 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 656
28d81abb
RK
657 /* Put in the static chain register the nonlocal label address. */
658 emit_move_insn (static_chain_rtx,
659 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
660 /* USE of frame_pointer_rtx added for consistency; not clear if
661 really needed. */
662 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
663 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
664 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
665 emit_indirect_jump (temp);
666 }
667 }
668 else
37366632 669 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
670}
671
672/* Generate RTL code for a `goto' statement with target label BODY.
673 LABEL should be a LABEL_REF.
674 LAST_INSN, if non-0, is the rtx we should consider as the last
675 insn emitted (for the purposes of cleaning up a return). */
676
677static void
678expand_goto_internal (body, label, last_insn)
679 tree body;
680 rtx label;
681 rtx last_insn;
682{
683 struct nesting *block;
684 rtx stack_level = 0;
685
686 if (GET_CODE (label) != CODE_LABEL)
687 abort ();
688
689 /* If label has already been defined, we can tell now
690 whether and how we must alter the stack level. */
691
692 if (PREV_INSN (label) != 0)
693 {
694 /* Find the innermost pending block that contains the label.
695 (Check containment by comparing insn-uids.)
696 Then restore the outermost stack level within that block,
697 and do cleanups of all blocks contained in it. */
698 for (block = block_stack; block; block = block->next)
699 {
700 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
701 break;
702 if (block->data.block.stack_level != 0)
703 stack_level = block->data.block.stack_level;
704 /* Execute the cleanups for blocks we are exiting. */
705 if (block->data.block.cleanups != 0)
706 {
37366632 707 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
708 do_pending_stack_adjust ();
709 }
710 }
711
712 if (stack_level)
713 {
714 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
715 the stack pointer. This one should be deleted as dead by flow. */
716 clear_pending_stack_adjust ();
717 do_pending_stack_adjust ();
37366632 718 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
719 }
720
721 if (body != 0 && DECL_TOO_LATE (body))
722 error ("jump to `%s' invalidly jumps into binding contour",
723 IDENTIFIER_POINTER (DECL_NAME (body)));
724 }
725 /* Label not yet defined: may need to put this goto
726 on the fixup list. */
727 else if (! expand_fixup (body, label, last_insn))
728 {
729 /* No fixup needed. Record that the label is the target
730 of at least one goto that has no fixup. */
731 if (body != 0)
732 TREE_ADDRESSABLE (body) = 1;
733 }
734
735 emit_jump (label);
736}
737\f
738/* Generate if necessary a fixup for a goto
739 whose target label in tree structure (if any) is TREE_LABEL
740 and whose target in rtl is RTL_LABEL.
741
742 If LAST_INSN is nonzero, we pretend that the jump appears
743 after insn LAST_INSN instead of at the current point in the insn stream.
744
023b57e6
RS
745 The fixup will be used later to insert insns just before the goto.
746 Those insns will restore the stack level as appropriate for the
747 target label, and will (in the case of C++) also invoke any object
748 destructors which have to be invoked when we exit the scopes which
749 are exited by the goto.
28d81abb
RK
750
751 Value is nonzero if a fixup is made. */
752
753static int
754expand_fixup (tree_label, rtl_label, last_insn)
755 tree tree_label;
756 rtx rtl_label;
757 rtx last_insn;
758{
759 struct nesting *block, *end_block;
760
761 /* See if we can recognize which block the label will be output in.
762 This is possible in some very common cases.
763 If we succeed, set END_BLOCK to that block.
764 Otherwise, set it to 0. */
765
766 if (cond_stack
767 && (rtl_label == cond_stack->data.cond.endif_label
768 || rtl_label == cond_stack->data.cond.next_label))
769 end_block = cond_stack;
770 /* If we are in a loop, recognize certain labels which
771 are likely targets. This reduces the number of fixups
772 we need to create. */
773 else if (loop_stack
774 && (rtl_label == loop_stack->data.loop.start_label
775 || rtl_label == loop_stack->data.loop.end_label
776 || rtl_label == loop_stack->data.loop.continue_label))
777 end_block = loop_stack;
778 else
779 end_block = 0;
780
781 /* Now set END_BLOCK to the binding level to which we will return. */
782
783 if (end_block)
784 {
785 struct nesting *next_block = end_block->all;
786 block = block_stack;
787
788 /* First see if the END_BLOCK is inside the innermost binding level.
789 If so, then no cleanups or stack levels are relevant. */
790 while (next_block && next_block != block)
791 next_block = next_block->all;
792
793 if (next_block)
794 return 0;
795
796 /* Otherwise, set END_BLOCK to the innermost binding level
797 which is outside the relevant control-structure nesting. */
798 next_block = block_stack->next;
799 for (block = block_stack; block != end_block; block = block->all)
800 if (block == next_block)
801 next_block = next_block->next;
802 end_block = next_block;
803 }
804
805 /* Does any containing block have a stack level or cleanups?
806 If not, no fixup is needed, and that is the normal case
807 (the only case, for standard C). */
808 for (block = block_stack; block != end_block; block = block->next)
809 if (block->data.block.stack_level != 0
810 || block->data.block.cleanups != 0)
811 break;
812
813 if (block != end_block)
814 {
815 /* Ok, a fixup is needed. Add a fixup to the list of such. */
816 struct goto_fixup *fixup
817 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
818 /* In case an old stack level is restored, make sure that comes
819 after any pending stack adjust. */
820 /* ?? If the fixup isn't to come at the present position,
821 doing the stack adjust here isn't useful. Doing it with our
822 settings at that location isn't useful either. Let's hope
823 someone does it! */
824 if (last_insn == 0)
825 do_pending_stack_adjust ();
28d81abb
RK
826 fixup->target = tree_label;
827 fixup->target_rtl = rtl_label;
023b57e6
RS
828
829 /* Create a BLOCK node and a corresponding matched set of
830 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
831 this point. The notes will encapsulate any and all fixup
832 code which we might later insert at this point in the insn
833 stream. Also, the BLOCK node will be the parent (i.e. the
834 `SUPERBLOCK') of any other BLOCK nodes which we might create
835 later on when we are expanding the fixup code. */
836
837 {
838 register rtx original_before_jump
839 = last_insn ? last_insn : get_last_insn ();
840
841 start_sequence ();
842 pushlevel (0);
843 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
844 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
845 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
846 end_sequence ();
847 emit_insns_after (fixup->before_jump, original_before_jump);
848 }
849
28d81abb
RK
850 fixup->block_start_count = block_start_count;
851 fixup->stack_level = 0;
852 fixup->cleanup_list_list
853 = (((block->data.block.outer_cleanups
854#if 0
855 && block->data.block.outer_cleanups != empty_cleanup_list
856#endif
857 )
858 || block->data.block.cleanups)
37366632 859 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
860 block->data.block.outer_cleanups)
861 : 0);
862 fixup->next = goto_fixup_chain;
863 goto_fixup_chain = fixup;
864 }
865
866 return block != 0;
867}
868
869/* When exiting a binding contour, process all pending gotos requiring fixups.
870 THISBLOCK is the structure that describes the block being exited.
871 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
872 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
873 FIRST_INSN is the insn that began this contour.
874
875 Gotos that jump out of this contour must restore the
876 stack level and do the cleanups before actually jumping.
877
878 DONT_JUMP_IN nonzero means report error there is a jump into this
879 contour from before the beginning of the contour.
880 This is also done if STACK_LEVEL is nonzero. */
881
882void
883fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
884 struct nesting *thisblock;
885 rtx stack_level;
886 tree cleanup_list;
887 rtx first_insn;
888 int dont_jump_in;
889{
890 register struct goto_fixup *f, *prev;
891
892 /* F is the fixup we are considering; PREV is the previous one. */
893 /* We run this loop in two passes so that cleanups of exited blocks
894 are run first, and blocks that are exited are marked so
895 afterwards. */
896
897 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
898 {
899 /* Test for a fixup that is inactive because it is already handled. */
900 if (f->before_jump == 0)
901 {
902 /* Delete inactive fixup from the chain, if that is easy to do. */
903 if (prev != 0)
904 prev->next = f->next;
905 }
906 /* Has this fixup's target label been defined?
907 If so, we can finalize it. */
908 else if (PREV_INSN (f->target_rtl) != 0)
909 {
7629c936 910 register rtx cleanup_insns;
7629c936 911
28d81abb
RK
912 /* Get the first non-label after the label
913 this goto jumps to. If that's before this scope begins,
914 we don't have a jump into the scope. */
915 rtx after_label = f->target_rtl;
916 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
917 after_label = NEXT_INSN (after_label);
918
919 /* If this fixup jumped into this contour from before the beginning
920 of this contour, report an error. */
921 /* ??? Bug: this does not detect jumping in through intermediate
922 blocks that have stack levels or cleanups.
923 It detects only a problem with the innermost block
924 around the label. */
925 if (f->target != 0
926 && (dont_jump_in || stack_level || cleanup_list)
927 /* If AFTER_LABEL is 0, it means the jump goes to the end
928 of the rtl, which means it jumps into this scope. */
929 && (after_label == 0
930 || INSN_UID (first_insn) < INSN_UID (after_label))
931 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
44fe2e80 932 && ! DECL_REGISTER (f->target))
28d81abb
RK
933 {
934 error_with_decl (f->target,
935 "label `%s' used before containing binding contour");
936 /* Prevent multiple errors for one label. */
44fe2e80 937 DECL_REGISTER (f->target) = 1;
28d81abb
RK
938 }
939
7629c936
RS
940 /* We will expand the cleanups into a sequence of their own and
941 then later on we will attach this new sequence to the insn
942 stream just ahead of the actual jump insn. */
943
944 start_sequence ();
945
023b57e6
RS
946 /* Temporarily restore the lexical context where we will
947 logically be inserting the fixup code. We do this for the
948 sake of getting the debugging information right. */
949
7629c936 950 pushlevel (0);
023b57e6 951 set_block (f->context);
7629c936
RS
952
953 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
954 if (f->cleanup_list_list)
955 {
956 tree lists;
957 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
958 /* Marked elements correspond to blocks that have been closed.
959 Do their cleanups. */
960 if (TREE_ADDRESSABLE (lists)
961 && TREE_VALUE (lists) != 0)
7629c936
RS
962 {
963 expand_cleanups (TREE_VALUE (lists), 0);
964 /* Pop any pushes done in the cleanups,
965 in case function is about to return. */
966 do_pending_stack_adjust ();
967 }
28d81abb
RK
968 }
969
970 /* Restore stack level for the biggest contour that this
971 jump jumps out of. */
972 if (f->stack_level)
59257ff7 973 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
974
975 /* Finish up the sequence containing the insns which implement the
976 necessary cleanups, and then attach that whole sequence to the
977 insn stream just ahead of the actual jump insn. Attaching it
978 at that point insures that any cleanups which are in fact
979 implicit C++ object destructions (which must be executed upon
980 leaving the block) appear (to the debugger) to be taking place
981 in an area of the generated code where the object(s) being
982 destructed are still "in scope". */
983
984 cleanup_insns = get_insns ();
023b57e6 985 poplevel (1, 0, 0);
7629c936
RS
986
987 end_sequence ();
988 emit_insns_after (cleanup_insns, f->before_jump);
989
7629c936 990
28d81abb
RK
991 f->before_jump = 0;
992 }
993 }
994
995 /* Mark the cleanups of exited blocks so that they are executed
996 by the code above. */
997 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
998 if (f->before_jump != 0
999 && PREV_INSN (f->target_rtl) == 0
1000 /* Label has still not appeared. If we are exiting a block with
1001 a stack level to restore, that started before the fixup,
1002 mark this stack level as needing restoration
1003 when the fixup is later finalized.
1004 Also mark the cleanup_list_list element for F
1005 that corresponds to this block, so that ultimately
1006 this block's cleanups will be executed by the code above. */
1007 && thisblock != 0
1008 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1009 it means the label is undefined. That's erroneous, but possible. */
1010 && (thisblock->data.block.block_start_count
1011 <= f->block_start_count))
1012 {
1013 tree lists = f->cleanup_list_list;
1014 for (; lists; lists = TREE_CHAIN (lists))
1015 /* If the following elt. corresponds to our containing block
1016 then the elt. must be for this block. */
1017 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1018 TREE_ADDRESSABLE (lists) = 1;
1019
1020 if (stack_level)
1021 f->stack_level = stack_level;
1022 }
1023}
1024\f
1025/* Generate RTL for an asm statement (explicit assembler code).
1026 BODY is a STRING_CST node containing the assembler code text,
1027 or an ADDR_EXPR containing a STRING_CST. */
1028
1029void
1030expand_asm (body)
1031 tree body;
1032{
1033 if (TREE_CODE (body) == ADDR_EXPR)
1034 body = TREE_OPERAND (body, 0);
1035
1036 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1037 TREE_STRING_POINTER (body)));
1038 last_expr_type = 0;
1039}
1040
1041/* Generate RTL for an asm statement with arguments.
1042 STRING is the instruction template.
1043 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1044 Each output or input has an expression in the TREE_VALUE and
1045 a constraint-string in the TREE_PURPOSE.
1046 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1047 that is clobbered by this insn.
1048
1049 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1050 Some elements of OUTPUTS may be replaced with trees representing temporary
1051 values. The caller should copy those temporary values to the originally
1052 specified lvalues.
1053
1054 VOL nonzero means the insn is volatile; don't optimize it. */
1055
1056void
1057expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1058 tree string, outputs, inputs, clobbers;
1059 int vol;
1060 char *filename;
1061 int line;
1062{
1063 rtvec argvec, constraints;
1064 rtx body;
1065 int ninputs = list_length (inputs);
1066 int noutputs = list_length (outputs);
b4ccaa16 1067 int nclobbers;
28d81abb
RK
1068 tree tail;
1069 register int i;
1070 /* Vector of RTX's of evaluated output operands. */
1071 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1072 /* The insn we have emitted. */
1073 rtx insn;
1074
b4ccaa16
RS
1075 /* Count the number of meaningful clobbered registers, ignoring what
1076 we would ignore later. */
1077 nclobbers = 0;
1078 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1079 {
1080 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1081 i = decode_reg_name (regname);
1082 if (i >= 0 || i == -4)
b4ccaa16
RS
1083 ++nclobbers;
1084 }
1085
28d81abb
RK
1086 last_expr_type = 0;
1087
1088 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1089 {
1090 tree val = TREE_VALUE (tail);
1091 tree val1;
1092 int j;
1093 int found_equal;
1094
1095 /* If there's an erroneous arg, emit no insn. */
1096 if (TREE_TYPE (val) == error_mark_node)
1097 return;
1098
1099 /* Make sure constraint has `=' and does not have `+'. */
1100
1101 found_equal = 0;
1102 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1103 {
1104 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1105 {
1106 error ("output operand constraint contains `+'");
1107 return;
1108 }
1109 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1110 found_equal = 1;
1111 }
1112 if (! found_equal)
1113 {
1114 error ("output operand constraint lacks `='");
1115 return;
1116 }
1117
1118 /* If an output operand is not a variable or indirect ref,
1119 or a part of one,
1120 create a SAVE_EXPR which is a pseudo-reg
1121 to act as an intermediate temporary.
1122 Make the asm insn write into that, then copy it to
1123 the real output operand. */
1124
1125 while (TREE_CODE (val) == COMPONENT_REF
1126 || TREE_CODE (val) == ARRAY_REF)
1127 val = TREE_OPERAND (val, 0);
1128
1129 if (TREE_CODE (val) != VAR_DECL
1130 && TREE_CODE (val) != PARM_DECL
1131 && TREE_CODE (val) != INDIRECT_REF)
1132 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1133
37366632 1134 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1135 }
1136
1137 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1138 {
1139 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1140 return;
1141 }
1142
1143 /* Make vectors for the expression-rtx and constraint strings. */
1144
1145 argvec = rtvec_alloc (ninputs);
1146 constraints = rtvec_alloc (ninputs);
1147
1148 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1149 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1150 filename, line);
1151 MEM_VOLATILE_P (body) = vol;
1152
1153 /* Eval the inputs and put them into ARGVEC.
1154 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1155
1156 i = 0;
1157 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1158 {
1159 int j;
1160
1161 /* If there's an erroneous arg, emit no insn,
1162 because the ASM_INPUT would get VOIDmode
1163 and that could cause a crash in reload. */
1164 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1165 return;
1166 if (TREE_PURPOSE (tail) == NULL_TREE)
1167 {
1168 error ("hard register `%s' listed as input operand to `asm'",
1169 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1170 return;
1171 }
1172
1173 /* Make sure constraint has neither `=' nor `+'. */
1174
1175 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1176 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1177 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1178 {
1179 error ("input operand constraint contains `%c'",
1180 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1181 return;
1182 }
1183
1184 XVECEXP (body, 3, i) /* argvec */
37366632 1185 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1186 XVECEXP (body, 4, i) /* constraints */
1187 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1188 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1189 i++;
1190 }
1191
1192 /* Protect all the operands from the queue,
1193 now that they have all been evaluated. */
1194
1195 for (i = 0; i < ninputs; i++)
1196 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1197
1198 for (i = 0; i < noutputs; i++)
1199 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1200
1201 /* Now, for each output, construct an rtx
1202 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1203 ARGVEC CONSTRAINTS))
1204 If there is more than one, put them inside a PARALLEL. */
1205
1206 if (noutputs == 1 && nclobbers == 0)
1207 {
1208 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1209 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1210 }
1211 else if (noutputs == 0 && nclobbers == 0)
1212 {
1213 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1214 insn = emit_insn (body);
1215 }
1216 else
1217 {
1218 rtx obody = body;
1219 int num = noutputs;
1220 if (num == 0) num = 1;
1221 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1222
1223 /* For each output operand, store a SET. */
1224
1225 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1226 {
1227 XVECEXP (body, 0, i)
1228 = gen_rtx (SET, VOIDmode,
1229 output_rtx[i],
1230 gen_rtx (ASM_OPERANDS, VOIDmode,
1231 TREE_STRING_POINTER (string),
1232 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1233 i, argvec, constraints,
1234 filename, line));
1235 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1236 }
1237
1238 /* If there are no outputs (but there are some clobbers)
1239 store the bare ASM_OPERANDS into the PARALLEL. */
1240
1241 if (i == 0)
1242 XVECEXP (body, 0, i++) = obody;
1243
1244 /* Store (clobber REG) for each clobbered register specified. */
1245
b4ccaa16 1246 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1247 {
28d81abb 1248 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1249 int j = decode_reg_name (regname);
28d81abb 1250
b4ac57ab 1251 if (j < 0)
28d81abb 1252 {
c09e6498 1253 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1254 continue;
1255
c09e6498
RS
1256 if (j == -4) /* `memory', don't cache memory across asm */
1257 {
1258 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1259 continue;
1260 }
1261
28d81abb
RK
1262 error ("unknown register name `%s' in `asm'", regname);
1263 return;
1264 }
1265
1266 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1267 XVECEXP (body, 0, i++)
28d81abb
RK
1268 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1269 }
1270
1271 insn = emit_insn (body);
1272 }
1273
1274 free_temp_slots ();
1275}
1276\f
1277/* Generate RTL to evaluate the expression EXP
1278 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1279
1280void
1281expand_expr_stmt (exp)
1282 tree exp;
1283{
1284 /* If -W, warn about statements with no side effects,
1285 except for an explicit cast to void (e.g. for assert()), and
1286 except inside a ({...}) where they may be useful. */
1287 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1288 {
1289 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1290 && !(TREE_CODE (exp) == CONVERT_EXPR
1291 && TREE_TYPE (exp) == void_type_node))
1292 warning_with_file_and_line (emit_filename, emit_lineno,
1293 "statement with no effect");
1294 else if (warn_unused)
1295 warn_if_unused_value (exp);
1296 }
1297 last_expr_type = TREE_TYPE (exp);
1298 if (! flag_syntax_only)
37366632
RK
1299 last_expr_value = expand_expr (exp,
1300 (expr_stmts_for_value
1301 ? NULL_RTX : const0_rtx),
28d81abb
RK
1302 VOIDmode, 0);
1303
1304 /* If all we do is reference a volatile value in memory,
1305 copy it to a register to be sure it is actually touched. */
1306 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1307 && TREE_THIS_VOLATILE (exp))
1308 {
1309 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1310 copy_to_reg (last_expr_value);
1311 else
ddbe9812
RS
1312 {
1313 rtx lab = gen_label_rtx ();
1314
1315 /* Compare the value with itself to reference it. */
1316 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1317 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1318 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1319 BLKmode, 0,
1320 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1321 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1322 emit_label (lab);
1323 }
28d81abb
RK
1324 }
1325
1326 /* If this expression is part of a ({...}) and is in memory, we may have
1327 to preserve temporaries. */
1328 preserve_temp_slots (last_expr_value);
1329
1330 /* Free any temporaries used to evaluate this expression. Any temporary
1331 used as a result of this expression will already have been preserved
1332 above. */
1333 free_temp_slots ();
1334
1335 emit_queue ();
1336}
1337
1338/* Warn if EXP contains any computations whose results are not used.
1339 Return 1 if a warning is printed; 0 otherwise. */
1340
1341static int
1342warn_if_unused_value (exp)
1343 tree exp;
1344{
1345 if (TREE_USED (exp))
1346 return 0;
1347
1348 switch (TREE_CODE (exp))
1349 {
1350 case PREINCREMENT_EXPR:
1351 case POSTINCREMENT_EXPR:
1352 case PREDECREMENT_EXPR:
1353 case POSTDECREMENT_EXPR:
1354 case MODIFY_EXPR:
1355 case INIT_EXPR:
1356 case TARGET_EXPR:
1357 case CALL_EXPR:
1358 case METHOD_CALL_EXPR:
1359 case RTL_EXPR:
1360 case WRAPPER_EXPR:
1361 case ANTI_WRAPPER_EXPR:
1362 case WITH_CLEANUP_EXPR:
1363 case EXIT_EXPR:
1364 /* We don't warn about COND_EXPR because it may be a useful
1365 construct if either arm contains a side effect. */
1366 case COND_EXPR:
1367 return 0;
1368
1369 case BIND_EXPR:
1370 /* For a binding, warn if no side effect within it. */
1371 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1372
1373 case TRUTH_ORIF_EXPR:
1374 case TRUTH_ANDIF_EXPR:
1375 /* In && or ||, warn if 2nd operand has no side effect. */
1376 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1377
1378 case COMPOUND_EXPR:
1379 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1380 return 1;
4d23e509
RS
1381 /* Let people do `(foo (), 0)' without a warning. */
1382 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1383 return 0;
28d81abb
RK
1384 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1385
1386 case NOP_EXPR:
1387 case CONVERT_EXPR:
b4ac57ab 1388 case NON_LVALUE_EXPR:
28d81abb
RK
1389 /* Don't warn about values cast to void. */
1390 if (TREE_TYPE (exp) == void_type_node)
1391 return 0;
1392 /* Don't warn about conversions not explicit in the user's program. */
1393 if (TREE_NO_UNUSED_WARNING (exp))
1394 return 0;
1395 /* Assignment to a cast usually results in a cast of a modify.
1396 Don't complain about that. */
1397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1398 return 0;
1399 /* Sometimes it results in a cast of a cast of a modify.
1400 Don't complain about that. */
1401 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1402 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1403 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1404 return 0;
1405
1406 default:
ddbe9812
RS
1407 /* Referencing a volatile value is a side effect, so don't warn. */
1408 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1409 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1410 && TREE_THIS_VOLATILE (exp))
1411 return 0;
28d81abb
RK
1412 warning_with_file_and_line (emit_filename, emit_lineno,
1413 "value computed is not used");
1414 return 1;
1415 }
1416}
1417
1418/* Clear out the memory of the last expression evaluated. */
1419
1420void
1421clear_last_expr ()
1422{
1423 last_expr_type = 0;
1424}
1425
1426/* Begin a statement which will return a value.
1427 Return the RTL_EXPR for this statement expr.
1428 The caller must save that value and pass it to expand_end_stmt_expr. */
1429
1430tree
1431expand_start_stmt_expr ()
1432{
1433 /* Make the RTL_EXPR node temporary, not momentary,
1434 so that rtl_expr_chain doesn't become garbage. */
1435 int momentary = suspend_momentary ();
1436 tree t = make_node (RTL_EXPR);
1437 resume_momentary (momentary);
1438 start_sequence ();
1439 NO_DEFER_POP;
1440 expr_stmts_for_value++;
1441 return t;
1442}
1443
1444/* Restore the previous state at the end of a statement that returns a value.
1445 Returns a tree node representing the statement's value and the
1446 insns to compute the value.
1447
1448 The nodes of that expression have been freed by now, so we cannot use them.
1449 But we don't want to do that anyway; the expression has already been
1450 evaluated and now we just want to use the value. So generate a RTL_EXPR
1451 with the proper type and RTL value.
1452
1453 If the last substatement was not an expression,
1454 return something with type `void'. */
1455
1456tree
1457expand_end_stmt_expr (t)
1458 tree t;
1459{
1460 OK_DEFER_POP;
1461
1462 if (last_expr_type == 0)
1463 {
1464 last_expr_type = void_type_node;
1465 last_expr_value = const0_rtx;
1466 }
1467 else if (last_expr_value == 0)
1468 /* There are some cases where this can happen, such as when the
1469 statement is void type. */
1470 last_expr_value = const0_rtx;
1471 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1472 /* Remove any possible QUEUED. */
1473 last_expr_value = protect_from_queue (last_expr_value, 0);
1474
1475 emit_queue ();
1476
1477 TREE_TYPE (t) = last_expr_type;
1478 RTL_EXPR_RTL (t) = last_expr_value;
1479 RTL_EXPR_SEQUENCE (t) = get_insns ();
1480
1481 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1482
1483 end_sequence ();
1484
1485 /* Don't consider deleting this expr or containing exprs at tree level. */
1486 TREE_SIDE_EFFECTS (t) = 1;
1487 /* Propagate volatility of the actual RTL expr. */
1488 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1489
1490 last_expr_type = 0;
1491 expr_stmts_for_value--;
1492
1493 return t;
1494}
1495\f
1496/* The exception handling nesting looks like this:
1497
1498 <-- Level N-1
1499 { <-- exception handler block
1500 <-- Level N
1501 <-- in an exception handler
1502 { <-- try block
1503 : <-- in a TRY block
1504 : <-- in an exception handler
1505 :
1506 }
1507
1508 { <-- except block
1509 : <-- in an except block
1510 : <-- in an exception handler
1511 :
1512 }
1513
1514 }
a124fd5e 1515*/
28d81abb
RK
1516
1517/* Return nonzero iff in a try block at level LEVEL. */
1518
1519int
1520in_try_block (level)
1521 int level;
1522{
1523 struct nesting *n = except_stack;
1524 while (1)
1525 {
1526 while (n && n->data.except_stmt.after_label != 0)
1527 n = n->next;
1528 if (n == 0)
1529 return 0;
1530 if (level == 0)
1531 return n != 0;
1532 level--;
1533 n = n->next;
1534 }
1535}
1536
1537/* Return nonzero iff in an except block at level LEVEL. */
1538
1539int
1540in_except_block (level)
1541 int level;
1542{
1543 struct nesting *n = except_stack;
1544 while (1)
1545 {
1546 while (n && n->data.except_stmt.after_label == 0)
1547 n = n->next;
1548 if (n == 0)
1549 return 0;
1550 if (level == 0)
1551 return n != 0;
1552 level--;
1553 n = n->next;
1554 }
1555}
1556
1557/* Return nonzero iff in an exception handler at level LEVEL. */
1558
1559int
1560in_exception_handler (level)
1561 int level;
1562{
1563 struct nesting *n = except_stack;
1564 while (n && level--)
1565 n = n->next;
1566 return n != 0;
1567}
1568
1569/* Record the fact that the current exception nesting raises
1570 exception EX. If not in an exception handler, return 0. */
1571int
1572expand_raise (ex)
1573 tree ex;
1574{
1575 tree *raises_ptr;
1576
1577 if (except_stack == 0)
1578 return 0;
1579 raises_ptr = &except_stack->data.except_stmt.raised;
1580 if (! value_member (ex, *raises_ptr))
1581 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1582 return 1;
1583}
1584
1585/* Generate RTL for the start of a try block.
1586
1587 TRY_CLAUSE is the condition to test to enter the try block. */
1588
1589void
1590expand_start_try (try_clause, exitflag, escapeflag)
1591 tree try_clause;
1592 int exitflag;
1593 int escapeflag;
1594{
1595 struct nesting *thishandler = ALLOC_NESTING ();
1596
1597 /* Make an entry on cond_stack for the cond we are entering. */
1598
1599 thishandler->next = except_stack;
1600 thishandler->all = nesting_stack;
1601 thishandler->depth = ++nesting_depth;
1602 thishandler->data.except_stmt.raised = 0;
1603 thishandler->data.except_stmt.handled = 0;
1604 thishandler->data.except_stmt.first_insn = get_insns ();
1605 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1606 thishandler->data.except_stmt.unhandled_label = 0;
1607 thishandler->data.except_stmt.after_label = 0;
1608 thishandler->data.except_stmt.escape_label
1609 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1610 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1611 except_stack = thishandler;
1612 nesting_stack = thishandler;
1613
37366632 1614 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1615}
1616
1617/* End of a TRY block. Nothing to do for now. */
1618
1619void
1620expand_end_try ()
1621{
1622 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1623 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1624 NULL_RTX);
28d81abb
RK
1625}
1626
1627/* Start an `except' nesting contour.
1628 EXITFLAG says whether this contour should be able to `exit' something.
1629 ESCAPEFLAG says whether this contour should be escapable. */
1630
1631void
1632expand_start_except (exitflag, escapeflag)
1633 int exitflag;
1634 int escapeflag;
1635{
1636 if (exitflag)
1637 {
1638 struct nesting *n;
1639 /* An `exit' from catch clauses goes out to next exit level,
1640 if there is one. Otherwise, it just goes to the end
1641 of the construct. */
1642 for (n = except_stack->next; n; n = n->next)
1643 if (n->exit_label != 0)
1644 {
1645 except_stack->exit_label = n->exit_label;
1646 break;
1647 }
1648 if (n == 0)
1649 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1650 }
1651 if (escapeflag)
1652 {
1653 struct nesting *n;
1654 /* An `escape' from catch clauses goes out to next escape level,
1655 if there is one. Otherwise, it just goes to the end
1656 of the construct. */
1657 for (n = except_stack->next; n; n = n->next)
1658 if (n->data.except_stmt.escape_label != 0)
1659 {
1660 except_stack->data.except_stmt.escape_label
1661 = n->data.except_stmt.escape_label;
1662 break;
1663 }
1664 if (n == 0)
1665 except_stack->data.except_stmt.escape_label
1666 = except_stack->data.except_stmt.after_label;
1667 }
1668 do_pending_stack_adjust ();
1669 emit_label (except_stack->data.except_stmt.except_label);
1670}
1671
1672/* Generate code to `escape' from an exception contour. This
1673 is like `exiting', but does not conflict with constructs which
1674 use `exit_label'.
1675
1676 Return nonzero if this contour is escapable, otherwise
1677 return zero, and language-specific code will emit the
1678 appropriate error message. */
1679int
1680expand_escape_except ()
1681{
1682 struct nesting *n;
1683 last_expr_type = 0;
1684 for (n = except_stack; n; n = n->next)
1685 if (n->data.except_stmt.escape_label != 0)
1686 {
37366632
RK
1687 expand_goto_internal (NULL_TREE,
1688 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
1689 return 1;
1690 }
1691
1692 return 0;
1693}
1694
1695/* Finish processing and `except' contour.
1696 Culls out all exceptions which might be raise but not
1697 handled, and returns the list to the caller.
1698 Language-specific code is responsible for dealing with these
1699 exceptions. */
1700
1701tree
1702expand_end_except ()
1703{
1704 struct nesting *n;
1705 tree raised = NULL_TREE;
1706
1707 do_pending_stack_adjust ();
1708 emit_label (except_stack->data.except_stmt.after_label);
1709
1710 n = except_stack->next;
1711 if (n)
1712 {
1713 /* Propagate exceptions raised but not handled to next
1714 highest level. */
1715 tree handled = except_stack->data.except_stmt.raised;
1716 if (handled != void_type_node)
1717 {
1718 tree prev = NULL_TREE;
1719 raised = except_stack->data.except_stmt.raised;
1720 while (handled)
1721 {
1722 tree this_raise;
1723 for (this_raise = raised, prev = 0; this_raise;
1724 this_raise = TREE_CHAIN (this_raise))
1725 {
1726 if (value_member (TREE_VALUE (this_raise), handled))
1727 {
1728 if (prev)
1729 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1730 else
1731 {
1732 raised = TREE_CHAIN (raised);
1733 if (raised == NULL_TREE)
1734 goto nada;
1735 }
1736 }
1737 else
1738 prev = this_raise;
1739 }
1740 handled = TREE_CHAIN (handled);
1741 }
1742 if (prev == NULL_TREE)
1743 prev = raised;
1744 if (prev)
1745 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1746 nada:
1747 n->data.except_stmt.raised = raised;
1748 }
1749 }
1750
1751 POPSTACK (except_stack);
1752 last_expr_type = 0;
1753 return raised;
1754}
1755
1756/* Record that exception EX is caught by this exception handler.
1757 Return nonzero if in exception handling construct, otherwise return 0. */
1758int
1759expand_catch (ex)
1760 tree ex;
1761{
1762 tree *raises_ptr;
1763
1764 if (except_stack == 0)
1765 return 0;
1766 raises_ptr = &except_stack->data.except_stmt.handled;
1767 if (*raises_ptr != void_type_node
1768 && ex != NULL_TREE
1769 && ! value_member (ex, *raises_ptr))
1770 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1771 return 1;
1772}
1773
1774/* Record that this exception handler catches all exceptions.
1775 Return nonzero if in exception handling construct, otherwise return 0. */
1776
1777int
1778expand_catch_default ()
1779{
1780 if (except_stack == 0)
1781 return 0;
1782 except_stack->data.except_stmt.handled = void_type_node;
1783 return 1;
1784}
1785
1786int
1787expand_end_catch ()
1788{
1789 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1790 return 0;
37366632
RK
1791 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1792 NULL_RTX);
28d81abb
RK
1793 return 1;
1794}
1795\f
1796/* Generate RTL for the start of an if-then. COND is the expression
1797 whose truth should be tested.
1798
1799 If EXITFLAG is nonzero, this conditional is visible to
1800 `exit_something'. */
1801
1802void
1803expand_start_cond (cond, exitflag)
1804 tree cond;
1805 int exitflag;
1806{
1807 struct nesting *thiscond = ALLOC_NESTING ();
1808
1809 /* Make an entry on cond_stack for the cond we are entering. */
1810
1811 thiscond->next = cond_stack;
1812 thiscond->all = nesting_stack;
1813 thiscond->depth = ++nesting_depth;
1814 thiscond->data.cond.next_label = gen_label_rtx ();
1815 /* Before we encounter an `else', we don't need a separate exit label
1816 unless there are supposed to be exit statements
1817 to exit this conditional. */
1818 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1819 thiscond->data.cond.endif_label = thiscond->exit_label;
1820 cond_stack = thiscond;
1821 nesting_stack = thiscond;
1822
37366632 1823 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1824}
1825
1826/* Generate RTL between then-clause and the elseif-clause
1827 of an if-then-elseif-.... */
1828
1829void
1830expand_start_elseif (cond)
1831 tree cond;
1832{
1833 if (cond_stack->data.cond.endif_label == 0)
1834 cond_stack->data.cond.endif_label = gen_label_rtx ();
1835 emit_jump (cond_stack->data.cond.endif_label);
1836 emit_label (cond_stack->data.cond.next_label);
1837 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1838 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1839}
1840
1841/* Generate RTL between the then-clause and the else-clause
1842 of an if-then-else. */
1843
1844void
1845expand_start_else ()
1846{
1847 if (cond_stack->data.cond.endif_label == 0)
1848 cond_stack->data.cond.endif_label = gen_label_rtx ();
1849 emit_jump (cond_stack->data.cond.endif_label);
1850 emit_label (cond_stack->data.cond.next_label);
1851 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1852}
1853
1854/* Generate RTL for the end of an if-then.
1855 Pop the record for it off of cond_stack. */
1856
1857void
1858expand_end_cond ()
1859{
1860 struct nesting *thiscond = cond_stack;
1861
1862 do_pending_stack_adjust ();
1863 if (thiscond->data.cond.next_label)
1864 emit_label (thiscond->data.cond.next_label);
1865 if (thiscond->data.cond.endif_label)
1866 emit_label (thiscond->data.cond.endif_label);
1867
1868 POPSTACK (cond_stack);
1869 last_expr_type = 0;
1870}
1871\f
1872/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1873 loop should be exited by `exit_something'. This is a loop for which
1874 `expand_continue' will jump to the top of the loop.
1875
1876 Make an entry on loop_stack to record the labels associated with
1877 this loop. */
1878
1879struct nesting *
1880expand_start_loop (exit_flag)
1881 int exit_flag;
1882{
1883 register struct nesting *thisloop = ALLOC_NESTING ();
1884
1885 /* Make an entry on loop_stack for the loop we are entering. */
1886
1887 thisloop->next = loop_stack;
1888 thisloop->all = nesting_stack;
1889 thisloop->depth = ++nesting_depth;
1890 thisloop->data.loop.start_label = gen_label_rtx ();
1891 thisloop->data.loop.end_label = gen_label_rtx ();
1892 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1893 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1894 loop_stack = thisloop;
1895 nesting_stack = thisloop;
1896
1897 do_pending_stack_adjust ();
1898 emit_queue ();
37366632 1899 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
1900 emit_label (thisloop->data.loop.start_label);
1901
1902 return thisloop;
1903}
1904
1905/* Like expand_start_loop but for a loop where the continuation point
1906 (for expand_continue_loop) will be specified explicitly. */
1907
1908struct nesting *
1909expand_start_loop_continue_elsewhere (exit_flag)
1910 int exit_flag;
1911{
1912 struct nesting *thisloop = expand_start_loop (exit_flag);
1913 loop_stack->data.loop.continue_label = gen_label_rtx ();
1914 return thisloop;
1915}
1916
1917/* Specify the continuation point for a loop started with
1918 expand_start_loop_continue_elsewhere.
1919 Use this at the point in the code to which a continue statement
1920 should jump. */
1921
1922void
1923expand_loop_continue_here ()
1924{
1925 do_pending_stack_adjust ();
37366632 1926 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
1927 emit_label (loop_stack->data.loop.continue_label);
1928}
1929
1930/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1931 Pop the block off of loop_stack. */
1932
1933void
1934expand_end_loop ()
1935{
1936 register rtx insn = get_last_insn ();
1937 register rtx start_label = loop_stack->data.loop.start_label;
1938 rtx last_test_insn = 0;
1939 int num_insns = 0;
1940
1941 /* Mark the continue-point at the top of the loop if none elsewhere. */
1942 if (start_label == loop_stack->data.loop.continue_label)
1943 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1944
1945 do_pending_stack_adjust ();
1946
1947 /* If optimizing, perhaps reorder the loop. If the loop
1948 starts with a conditional exit, roll that to the end
1949 where it will optimize together with the jump back.
1950
1951 We look for the last conditional branch to the exit that we encounter
1952 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1953 branch to the exit first, use it.
1954
1955 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1956 because moving them is not valid. */
1957
1958 if (optimize
1959 &&
1960 ! (GET_CODE (insn) == JUMP_INSN
1961 && GET_CODE (PATTERN (insn)) == SET
1962 && SET_DEST (PATTERN (insn)) == pc_rtx
1963 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1964 {
1965 /* Scan insns from the top of the loop looking for a qualified
1966 conditional exit. */
1967 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1968 insn = NEXT_INSN (insn))
1969 {
1970 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1971 break;
1972
1973 if (GET_CODE (insn) == NOTE
1974 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1975 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1976 break;
1977
1978 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1979 num_insns++;
1980
1981 if (last_test_insn && num_insns > 30)
1982 break;
1983
1984 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1985 && SET_DEST (PATTERN (insn)) == pc_rtx
1986 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1987 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1988 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1989 == loop_stack->data.loop.end_label))
1990 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1991 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1992 == loop_stack->data.loop.end_label))))
1993 last_test_insn = insn;
1994
1995 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1996 && GET_CODE (PATTERN (insn)) == SET
1997 && SET_DEST (PATTERN (insn)) == pc_rtx
1998 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1999 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2000 == loop_stack->data.loop.end_label))
2001 /* Include BARRIER. */
2002 last_test_insn = NEXT_INSN (insn);
2003 }
2004
2005 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2006 {
2007 /* We found one. Move everything from there up
2008 to the end of the loop, and add a jump into the loop
2009 to jump to there. */
2010 register rtx newstart_label = gen_label_rtx ();
2011 register rtx start_move = start_label;
2012
b4ac57ab 2013 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2014 then we want to move this note also. */
2015 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2016 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2017 == NOTE_INSN_LOOP_CONT))
2018 start_move = PREV_INSN (start_move);
2019
2020 emit_label_after (newstart_label, PREV_INSN (start_move));
2021 reorder_insns (start_move, last_test_insn, get_last_insn ());
2022 emit_jump_insn_after (gen_jump (start_label),
2023 PREV_INSN (newstart_label));
2024 emit_barrier_after (PREV_INSN (newstart_label));
2025 start_label = newstart_label;
2026 }
2027 }
2028
2029 emit_jump (start_label);
37366632 2030 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2031 emit_label (loop_stack->data.loop.end_label);
2032
2033 POPSTACK (loop_stack);
2034
2035 last_expr_type = 0;
2036}
2037
2038/* Generate a jump to the current loop's continue-point.
2039 This is usually the top of the loop, but may be specified
2040 explicitly elsewhere. If not currently inside a loop,
2041 return 0 and do nothing; caller will print an error message. */
2042
2043int
2044expand_continue_loop (whichloop)
2045 struct nesting *whichloop;
2046{
2047 last_expr_type = 0;
2048 if (whichloop == 0)
2049 whichloop = loop_stack;
2050 if (whichloop == 0)
2051 return 0;
37366632
RK
2052 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2053 NULL_RTX);
28d81abb
RK
2054 return 1;
2055}
2056
2057/* Generate a jump to exit the current loop. If not currently inside a loop,
2058 return 0 and do nothing; caller will print an error message. */
2059
2060int
2061expand_exit_loop (whichloop)
2062 struct nesting *whichloop;
2063{
2064 last_expr_type = 0;
2065 if (whichloop == 0)
2066 whichloop = loop_stack;
2067 if (whichloop == 0)
2068 return 0;
37366632 2069 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2070 return 1;
2071}
2072
2073/* Generate a conditional jump to exit the current loop if COND
2074 evaluates to zero. If not currently inside a loop,
2075 return 0 and do nothing; caller will print an error message. */
2076
2077int
2078expand_exit_loop_if_false (whichloop, cond)
2079 struct nesting *whichloop;
2080 tree cond;
2081{
2082 last_expr_type = 0;
2083 if (whichloop == 0)
2084 whichloop = loop_stack;
2085 if (whichloop == 0)
2086 return 0;
37366632 2087 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2088 return 1;
2089}
2090
2091/* Return non-zero if we should preserve sub-expressions as separate
2092 pseudos. We never do so if we aren't optimizing. We always do so
2093 if -fexpensive-optimizations.
2094
2095 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2096 the loop may still be a small one. */
2097
2098int
2099preserve_subexpressions_p ()
2100{
2101 rtx insn;
2102
2103 if (flag_expensive_optimizations)
2104 return 1;
2105
2106 if (optimize == 0 || loop_stack == 0)
2107 return 0;
2108
2109 insn = get_last_insn_anywhere ();
2110
2111 return (insn
2112 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2113 < n_non_fixed_regs * 3));
2114
2115}
2116
2117/* Generate a jump to exit the current loop, conditional, binding contour
2118 or case statement. Not all such constructs are visible to this function,
2119 only those started with EXIT_FLAG nonzero. Individual languages use
2120 the EXIT_FLAG parameter to control which kinds of constructs you can
2121 exit this way.
2122
2123 If not currently inside anything that can be exited,
2124 return 0 and do nothing; caller will print an error message. */
2125
2126int
2127expand_exit_something ()
2128{
2129 struct nesting *n;
2130 last_expr_type = 0;
2131 for (n = nesting_stack; n; n = n->all)
2132 if (n->exit_label != 0)
2133 {
37366632 2134 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2135 return 1;
2136 }
2137
2138 return 0;
2139}
2140\f
2141/* Generate RTL to return from the current function, with no value.
2142 (That is, we do not do anything about returning any value.) */
2143
2144void
2145expand_null_return ()
2146{
2147 struct nesting *block = block_stack;
2148 rtx last_insn = 0;
2149
2150 /* Does any pending block have cleanups? */
2151
2152 while (block && block->data.block.cleanups == 0)
2153 block = block->next;
2154
2155 /* If yes, use a goto to return, since that runs cleanups. */
2156
2157 expand_null_return_1 (last_insn, block != 0);
2158}
2159
2160/* Generate RTL to return from the current function, with value VAL. */
2161
2162void
2163expand_value_return (val)
2164 rtx val;
2165{
2166 struct nesting *block = block_stack;
2167 rtx last_insn = get_last_insn ();
2168 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2169
2170 /* Copy the value to the return location
2171 unless it's already there. */
2172
2173 if (return_reg != val)
2174 emit_move_insn (return_reg, val);
2175 if (GET_CODE (return_reg) == REG
2176 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2177 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2178
2179 /* Does any pending block have cleanups? */
2180
2181 while (block && block->data.block.cleanups == 0)
2182 block = block->next;
2183
2184 /* If yes, use a goto to return, since that runs cleanups.
2185 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2186
2187 expand_null_return_1 (last_insn, block != 0);
2188}
2189
2190/* Output a return with no value. If LAST_INSN is nonzero,
2191 pretend that the return takes place after LAST_INSN.
2192 If USE_GOTO is nonzero then don't use a return instruction;
2193 go to the return label instead. This causes any cleanups
2194 of pending blocks to be executed normally. */
2195
2196static void
2197expand_null_return_1 (last_insn, use_goto)
2198 rtx last_insn;
2199 int use_goto;
2200{
2201 rtx end_label = cleanup_label ? cleanup_label : return_label;
2202
2203 clear_pending_stack_adjust ();
2204 do_pending_stack_adjust ();
2205 last_expr_type = 0;
2206
2207 /* PCC-struct return always uses an epilogue. */
2208 if (current_function_returns_pcc_struct || use_goto)
2209 {
2210 if (end_label == 0)
2211 end_label = return_label = gen_label_rtx ();
37366632 2212 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2213 return;
2214 }
2215
2216 /* Otherwise output a simple return-insn if one is available,
2217 unless it won't do the job. */
2218#ifdef HAVE_return
2219 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2220 {
2221 emit_jump_insn (gen_return ());
2222 emit_barrier ();
2223 return;
2224 }
2225#endif
2226
2227 /* Otherwise jump to the epilogue. */
37366632 2228 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2229}
2230\f
2231/* Generate RTL to evaluate the expression RETVAL and return it
2232 from the current function. */
2233
2234void
2235expand_return (retval)
2236 tree retval;
2237{
2238 /* If there are any cleanups to be performed, then they will
2239 be inserted following LAST_INSN. It is desirable
2240 that the last_insn, for such purposes, should be the
2241 last insn before computing the return value. Otherwise, cleanups
2242 which call functions can clobber the return value. */
2243 /* ??? rms: I think that is erroneous, because in C++ it would
2244 run destructors on variables that might be used in the subsequent
2245 computation of the return value. */
2246 rtx last_insn = 0;
2247 register rtx val = 0;
2248 register rtx op0;
2249 tree retval_rhs;
2250 int cleanups;
2251 struct nesting *block;
2252
2253 /* If function wants no value, give it none. */
2254 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2255 {
37366632 2256 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2257 emit_queue ();
28d81abb
RK
2258 expand_null_return ();
2259 return;
2260 }
2261
2262 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2263 cleanups = any_pending_cleanups (1);
2264
2265 if (TREE_CODE (retval) == RESULT_DECL)
2266 retval_rhs = retval;
2267 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2268 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2269 retval_rhs = TREE_OPERAND (retval, 1);
2270 else if (TREE_TYPE (retval) == void_type_node)
2271 /* Recognize tail-recursive call to void function. */
2272 retval_rhs = retval;
2273 else
2274 retval_rhs = NULL_TREE;
2275
2276 /* Only use `last_insn' if there are cleanups which must be run. */
2277 if (cleanups || cleanup_label != 0)
2278 last_insn = get_last_insn ();
2279
2280 /* Distribute return down conditional expr if either of the sides
2281 may involve tail recursion (see test below). This enhances the number
2282 of tail recursions we see. Don't do this always since it can produce
2283 sub-optimal code in some cases and we distribute assignments into
2284 conditional expressions when it would help. */
2285
2286 if (optimize && retval_rhs != 0
2287 && frame_offset == 0
2288 && TREE_CODE (retval_rhs) == COND_EXPR
2289 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2290 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2291 {
2292 rtx label = gen_label_rtx ();
37366632 2293 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
28d81abb
RK
2294 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2295 DECL_RESULT (current_function_decl),
2296 TREE_OPERAND (retval_rhs, 1)));
2297 emit_label (label);
2298 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2299 DECL_RESULT (current_function_decl),
2300 TREE_OPERAND (retval_rhs, 2)));
2301 return;
2302 }
2303
2304 /* For tail-recursive call to current function,
2305 just jump back to the beginning.
2306 It's unsafe if any auto variable in this function
2307 has its address taken; for simplicity,
2308 require stack frame to be empty. */
2309 if (optimize && retval_rhs != 0
2310 && frame_offset == 0
2311 && TREE_CODE (retval_rhs) == CALL_EXPR
2312 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2313 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2314 /* Finish checking validity, and if valid emit code
2315 to set the argument variables for the new call. */
2316 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2317 DECL_ARGUMENTS (current_function_decl)))
2318 {
2319 if (tail_recursion_label == 0)
2320 {
2321 tail_recursion_label = gen_label_rtx ();
2322 emit_label_after (tail_recursion_label,
2323 tail_recursion_reentry);
2324 }
a3229491 2325 emit_queue ();
37366632 2326 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2327 emit_barrier ();
2328 return;
2329 }
2330#ifdef HAVE_return
2331 /* This optimization is safe if there are local cleanups
2332 because expand_null_return takes care of them.
2333 ??? I think it should also be safe when there is a cleanup label,
2334 because expand_null_return takes care of them, too.
2335 Any reason why not? */
2336 if (HAVE_return && cleanup_label == 0
2337 && ! current_function_returns_pcc_struct)
2338 {
2339 /* If this is return x == y; then generate
2340 if (x == y) return 1; else return 0;
2341 if we can do it with explicit return insns. */
2342 if (retval_rhs)
2343 switch (TREE_CODE (retval_rhs))
2344 {
2345 case EQ_EXPR:
2346 case NE_EXPR:
2347 case GT_EXPR:
2348 case GE_EXPR:
2349 case LT_EXPR:
2350 case LE_EXPR:
2351 case TRUTH_ANDIF_EXPR:
2352 case TRUTH_ORIF_EXPR:
2353 case TRUTH_AND_EXPR:
2354 case TRUTH_OR_EXPR:
2355 case TRUTH_NOT_EXPR:
2356 op0 = gen_label_rtx ();
2357 jumpifnot (retval_rhs, op0);
2358 expand_value_return (const1_rtx);
2359 emit_label (op0);
2360 expand_value_return (const0_rtx);
2361 return;
2362 }
2363 }
2364#endif /* HAVE_return */
2365
2366 if (cleanups
2367 && retval_rhs != 0
2368 && TREE_TYPE (retval_rhs) != void_type_node
2369 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2370 {
2371 /* Calculate the return value into a pseudo reg. */
37366632 2372 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2373 emit_queue ();
2374 /* All temporaries have now been used. */
2375 free_temp_slots ();
2376 /* Return the calculated value, doing cleanups first. */
2377 expand_value_return (val);
2378 }
2379 else
2380 {
2381 /* No cleanups or no hard reg used;
2382 calculate value into hard return reg. */
37366632 2383 expand_expr (retval, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2384 emit_queue ();
2385 free_temp_slots ();
2386 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2387 }
2388}
2389
2390/* Return 1 if the end of the generated RTX is not a barrier.
2391 This means code already compiled can drop through. */
2392
2393int
2394drop_through_at_end_p ()
2395{
2396 rtx insn = get_last_insn ();
2397 while (insn && GET_CODE (insn) == NOTE)
2398 insn = PREV_INSN (insn);
2399 return insn && GET_CODE (insn) != BARRIER;
2400}
2401\f
2402/* Emit code to alter this function's formal parms for a tail-recursive call.
2403 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2404 FORMALS is the chain of decls of formals.
2405 Return 1 if this can be done;
2406 otherwise return 0 and do not emit any code. */
2407
2408static int
2409tail_recursion_args (actuals, formals)
2410 tree actuals, formals;
2411{
2412 register tree a = actuals, f = formals;
2413 register int i;
2414 register rtx *argvec;
2415
2416 /* Check that number and types of actuals are compatible
2417 with the formals. This is not always true in valid C code.
2418 Also check that no formal needs to be addressable
2419 and that all formals are scalars. */
2420
2421 /* Also count the args. */
2422
2423 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2424 {
2425 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2426 return 0;
2427 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2428 return 0;
2429 }
2430 if (a != 0 || f != 0)
2431 return 0;
2432
2433 /* Compute all the actuals. */
2434
2435 argvec = (rtx *) alloca (i * sizeof (rtx));
2436
2437 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2438 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2439
2440 /* Find which actual values refer to current values of previous formals.
2441 Copy each of them now, before any formal is changed. */
2442
2443 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2444 {
2445 int copy = 0;
2446 register int j;
2447 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2448 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2449 { copy = 1; break; }
2450 if (copy)
2451 argvec[i] = copy_to_reg (argvec[i]);
2452 }
2453
2454 /* Store the values of the actuals into the formals. */
2455
2456 for (f = formals, a = actuals, i = 0; f;
2457 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2458 {
98f3b471 2459 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2460 emit_move_insn (DECL_RTL (f), argvec[i]);
2461 else
2462 convert_move (DECL_RTL (f), argvec[i],
2463 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2464 }
2465
2466 free_temp_slots ();
2467 return 1;
2468}
2469\f
2470/* Generate the RTL code for entering a binding contour.
2471 The variables are declared one by one, by calls to `expand_decl'.
2472
2473 EXIT_FLAG is nonzero if this construct should be visible to
2474 `exit_something'. */
2475
2476void
2477expand_start_bindings (exit_flag)
2478 int exit_flag;
2479{
2480 struct nesting *thisblock = ALLOC_NESTING ();
2481
37366632 2482 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2483
2484 /* Make an entry on block_stack for the block we are entering. */
2485
2486 thisblock->next = block_stack;
2487 thisblock->all = nesting_stack;
2488 thisblock->depth = ++nesting_depth;
2489 thisblock->data.block.stack_level = 0;
2490 thisblock->data.block.cleanups = 0;
2491 thisblock->data.block.function_call_count = 0;
2492#if 0
2493 if (block_stack)
2494 {
2495 if (block_stack->data.block.cleanups == NULL_TREE
2496 && (block_stack->data.block.outer_cleanups == NULL_TREE
2497 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2498 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2499 else
2500 thisblock->data.block.outer_cleanups
2501 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2502 block_stack->data.block.outer_cleanups);
2503 }
2504 else
2505 thisblock->data.block.outer_cleanups = 0;
2506#endif
2507#if 1
2508 if (block_stack
2509 && !(block_stack->data.block.cleanups == NULL_TREE
2510 && block_stack->data.block.outer_cleanups == NULL_TREE))
2511 thisblock->data.block.outer_cleanups
2512 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2513 block_stack->data.block.outer_cleanups);
2514 else
2515 thisblock->data.block.outer_cleanups = 0;
2516#endif
2517 thisblock->data.block.label_chain = 0;
2518 thisblock->data.block.innermost_stack_block = stack_block_stack;
2519 thisblock->data.block.first_insn = note;
2520 thisblock->data.block.block_start_count = ++block_start_count;
2521 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2522 block_stack = thisblock;
2523 nesting_stack = thisblock;
2524
2525 /* Make a new level for allocating stack slots. */
2526 push_temp_slots ();
2527}
2528
7629c936
RS
2529/* Given a pointer to a BLOCK node, save a pointer to the most recently
2530 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2531 BLOCK node. */
2532
2533void
2534remember_end_note (block)
2535 register tree block;
2536{
2537 BLOCK_END_NOTE (block) = last_block_end_note;
2538 last_block_end_note = NULL_RTX;
2539}
2540
28d81abb
RK
2541/* Generate RTL code to terminate a binding contour.
2542 VARS is the chain of VAR_DECL nodes
2543 for the variables bound in this contour.
2544 MARK_ENDS is nonzero if we should put a note at the beginning
2545 and end of this binding contour.
2546
2547 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2548 (That is true automatically if the contour has a saved stack level.) */
2549
2550void
2551expand_end_bindings (vars, mark_ends, dont_jump_in)
2552 tree vars;
2553 int mark_ends;
2554 int dont_jump_in;
2555{
2556 register struct nesting *thisblock = block_stack;
2557 register tree decl;
2558
2559 if (warn_unused)
2560 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
2561 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2562 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
2563 warning_with_decl (decl, "unused variable `%s'");
2564
28d81abb
RK
2565 if (thisblock->exit_label)
2566 {
2567 do_pending_stack_adjust ();
2568 emit_label (thisblock->exit_label);
2569 }
2570
2571 /* If necessary, make a handler for nonlocal gotos taking
2572 place in the function calls in this block. */
2573 if (function_call_count != thisblock->data.block.function_call_count
2574 && nonlocal_labels
2575 /* Make handler for outermost block
2576 if there were any nonlocal gotos to this function. */
2577 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2578 /* Make handler for inner block if it has something
2579 special to do when you jump out of it. */
2580 : (thisblock->data.block.cleanups != 0
2581 || thisblock->data.block.stack_level != 0)))
2582 {
2583 tree link;
2584 rtx afterward = gen_label_rtx ();
2585 rtx handler_label = gen_label_rtx ();
2586 rtx save_receiver = gen_reg_rtx (Pmode);
2587
2588 /* Don't let jump_optimize delete the handler. */
2589 LABEL_PRESERVE_P (handler_label) = 1;
2590
2591 /* Record the handler address in the stack slot for that purpose,
2592 during this block, saving and restoring the outer value. */
2593 if (thisblock->next != 0)
2594 {
2595 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2596 emit_insn_before (gen_move_insn (save_receiver,
2597 nonlocal_goto_handler_slot),
2598 thisblock->data.block.first_insn);
2599 }
2600 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2601 gen_rtx (LABEL_REF, Pmode,
2602 handler_label)),
2603 thisblock->data.block.first_insn);
2604
2605 /* Jump around the handler; it runs only when specially invoked. */
2606 emit_jump (afterward);
2607 emit_label (handler_label);
2608
2609#ifdef HAVE_nonlocal_goto
2610 if (! HAVE_nonlocal_goto)
2611#endif
2612 /* First adjust our frame pointer to its actual value. It was
2613 previously set to the start of the virtual area corresponding to
2614 the stacked variables when we branched here and now needs to be
2615 adjusted to the actual hardware fp value.
2616
2617 Assignments are to virtual registers are converted by
2618 instantiate_virtual_regs into the corresponding assignment
2619 to the underlying register (fp in this case) that makes
2620 the original assignment true.
2621 So the following insn will actually be
2622 decrementing fp by STARTING_FRAME_OFFSET. */
2623 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2624
2625#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2626 if (fixed_regs[ARG_POINTER_REGNUM])
2627 {
42495ca0
RK
2628#ifdef ELIMINABLE_REGS
2629 /* If the argument pointer can be eliminated in favor of the
2630 frame pointer, we don't need to restore it. We assume here
2631 that if such an elimination is present, it can always be used.
2632 This is the case on all known machines; if we don't make this
2633 assumption, we do unnecessary saving on many machines. */
2634 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2635 int i;
2636
2637 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2638 if (elim_regs[i].from == ARG_POINTER_REGNUM
2639 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2640 break;
2641
2642 if (i == sizeof elim_regs / sizeof elim_regs [0])
2643#endif
2644 {
2645 /* Now restore our arg pointer from the address at which it
2646 was saved in our stack frame.
2647 If there hasn't be space allocated for it yet, make
2648 some now. */
2649 if (arg_pointer_save_area == 0)
2650 arg_pointer_save_area
2651 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2652 emit_move_insn (virtual_incoming_args_rtx,
2653 /* We need a pseudo here, or else
2654 instantiate_virtual_regs_1 complains. */
2655 copy_to_reg (arg_pointer_save_area));
2656 }
28d81abb
RK
2657 }
2658#endif
2659
2660 /* The handler expects the desired label address in the static chain
2661 register. It tests the address and does an appropriate jump
2662 to whatever label is desired. */
2663 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2664 /* Skip any labels we shouldn't be able to jump to from here. */
2665 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2666 {
2667 rtx not_this = gen_label_rtx ();
2668 rtx this = gen_label_rtx ();
2669 do_jump_if_equal (static_chain_rtx,
2670 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2671 this, 0);
2672 emit_jump (not_this);
2673 emit_label (this);
2674 expand_goto (TREE_VALUE (link));
2675 emit_label (not_this);
2676 }
2677 /* If label is not recognized, abort. */
2678 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2679 VOIDmode, 0);
2680 emit_label (afterward);
2681 }
2682
2683 /* Don't allow jumping into a block that has cleanups or a stack level. */
2684 if (dont_jump_in
2685 || thisblock->data.block.stack_level != 0
2686 || thisblock->data.block.cleanups != 0)
2687 {
2688 struct label_chain *chain;
2689
2690 /* Any labels in this block are no longer valid to go to.
2691 Mark them to cause an error message. */
2692 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2693 {
2694 DECL_TOO_LATE (chain->label) = 1;
2695 /* If any goto without a fixup came to this label,
2696 that must be an error, because gotos without fixups
2697 come from outside all saved stack-levels and all cleanups. */
2698 if (TREE_ADDRESSABLE (chain->label))
2699 error_with_decl (chain->label,
2700 "label `%s' used before containing binding contour");
2701 }
2702 }
2703
2704 /* Restore stack level in effect before the block
2705 (only if variable-size objects allocated). */
2706 /* Perform any cleanups associated with the block. */
2707
2708 if (thisblock->data.block.stack_level != 0
2709 || thisblock->data.block.cleanups != 0)
2710 {
2711 /* Don't let cleanups affect ({...}) constructs. */
2712 int old_expr_stmts_for_value = expr_stmts_for_value;
2713 rtx old_last_expr_value = last_expr_value;
2714 tree old_last_expr_type = last_expr_type;
2715 expr_stmts_for_value = 0;
2716
2717 /* Do the cleanups. */
37366632 2718 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
2719 do_pending_stack_adjust ();
2720
2721 expr_stmts_for_value = old_expr_stmts_for_value;
2722 last_expr_value = old_last_expr_value;
2723 last_expr_type = old_last_expr_type;
2724
2725 /* Restore the stack level. */
2726
2727 if (thisblock->data.block.stack_level != 0)
2728 {
59257ff7 2729 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 2730 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 2731 if (nonlocal_goto_handler_slot != 0)
37366632
RK
2732 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2733 NULL_RTX);
28d81abb
RK
2734 }
2735
2736 /* Any gotos out of this block must also do these things.
59257ff7
RK
2737 Also report any gotos with fixups that came to labels in this
2738 level. */
28d81abb
RK
2739 fixup_gotos (thisblock,
2740 thisblock->data.block.stack_level,
2741 thisblock->data.block.cleanups,
2742 thisblock->data.block.first_insn,
2743 dont_jump_in);
2744 }
2745
c7d2d61d
RS
2746 /* Mark the beginning and end of the scope if requested.
2747 We do this now, after running cleanups on the variables
2748 just going out of scope, so they are in scope for their cleanups. */
2749
2750 if (mark_ends)
7629c936 2751 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
2752 else
2753 /* Get rid of the beginning-mark if we don't make an end-mark. */
2754 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2755
28d81abb
RK
2756 /* If doing stupid register allocation, make sure lives of all
2757 register variables declared here extend thru end of scope. */
2758
2759 if (obey_regdecls)
2760 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2761 {
2762 rtx rtl = DECL_RTL (decl);
2763 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2764 use_variable (rtl);
2765 }
2766
2767 /* Restore block_stack level for containing block. */
2768
2769 stack_block_stack = thisblock->data.block.innermost_stack_block;
2770 POPSTACK (block_stack);
2771
2772 /* Pop the stack slot nesting and free any slots at this level. */
2773 pop_temp_slots ();
2774}
2775\f
2776/* Generate RTL for the automatic variable declaration DECL.
2777 (Other kinds of declarations are simply ignored if seen here.)
2778 CLEANUP is an expression to be executed at exit from this binding contour;
2779 for example, in C++, it might call the destructor for this variable.
2780
2781 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2782 either before or after calling `expand_decl' but before compiling
2783 any subsequent expressions. This is because CLEANUP may be expanded
2784 more than once, on different branches of execution.
2785 For the same reason, CLEANUP may not contain a CALL_EXPR
2786 except as its topmost node--else `preexpand_calls' would get confused.
2787
2788 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2789 that is not associated with any particular variable.
2790
2791 There is no special support here for C++ constructors.
2792 They should be handled by the proper code in DECL_INITIAL. */
2793
2794void
2795expand_decl (decl)
2796 register tree decl;
2797{
2798 struct nesting *thisblock = block_stack;
2799 tree type = TREE_TYPE (decl);
2800
2801 /* Only automatic variables need any expansion done.
2802 Static and external variables, and external functions,
2803 will be handled by `assemble_variable' (called from finish_decl).
2804 TYPE_DECL and CONST_DECL require nothing.
2805 PARM_DECLs are handled in `assign_parms'. */
2806
2807 if (TREE_CODE (decl) != VAR_DECL)
2808 return;
44fe2e80 2809 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
2810 return;
2811
2812 /* Create the RTL representation for the variable. */
2813
2814 if (type == error_mark_node)
2815 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2816 else if (DECL_SIZE (decl) == 0)
2817 /* Variable with incomplete type. */
2818 {
2819 if (DECL_INITIAL (decl) == 0)
2820 /* Error message was already done; now avoid a crash. */
2821 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2822 else
2823 /* An initializer is going to decide the size of this array.
2824 Until we know the size, represent its address with a reg. */
2825 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2826 }
2827 else if (DECL_MODE (decl) != BLKmode
2828 /* If -ffloat-store, don't put explicit float vars
2829 into regs. */
2830 && !(flag_float_store
2831 && TREE_CODE (type) == REAL_TYPE)
2832 && ! TREE_THIS_VOLATILE (decl)
2833 && ! TREE_ADDRESSABLE (decl)
44fe2e80 2834 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
2835 {
2836 /* Automatic variable that can go in a register. */
98f3b471
RK
2837 enum machine_mode reg_mode = DECL_MODE (decl);
2838 int unsignedp = TREE_UNSIGNED (type);
2839
2840 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2841 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2842 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2843 || TREE_CODE (type) == OFFSET_TYPE)
2844 {
2845 PROMOTE_MODE (reg_mode, unsignedp, type);
2846 }
2847
2848 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
28d81abb
RK
2849 if (TREE_CODE (type) == POINTER_TYPE)
2850 mark_reg_pointer (DECL_RTL (decl));
2851 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2852 }
2853 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2854 {
2855 /* Variable of fixed size that goes on the stack. */
2856 rtx oldaddr = 0;
2857 rtx addr;
2858
2859 /* If we previously made RTL for this decl, it must be an array
2860 whose size was determined by the initializer.
2861 The old address was a register; set that register now
2862 to the proper address. */
2863 if (DECL_RTL (decl) != 0)
2864 {
2865 if (GET_CODE (DECL_RTL (decl)) != MEM
2866 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2867 abort ();
2868 oldaddr = XEXP (DECL_RTL (decl), 0);
2869 }
2870
2871 DECL_RTL (decl)
2872 = assign_stack_temp (DECL_MODE (decl),
2873 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2874 + BITS_PER_UNIT - 1)
2875 / BITS_PER_UNIT),
2876 1);
2877
2878 /* Set alignment we actually gave this decl. */
2879 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2880 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2881
2882 if (oldaddr)
2883 {
2884 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2885 if (addr != oldaddr)
2886 emit_move_insn (oldaddr, addr);
2887 }
2888
2889 /* If this is a memory ref that contains aggregate components,
2890 mark it as such for cse and loop optimize. */
2891 MEM_IN_STRUCT_P (DECL_RTL (decl))
2892 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2893 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2894 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2895#if 0
2896 /* If this is in memory because of -ffloat-store,
2897 set the volatile bit, to prevent optimizations from
2898 undoing the effects. */
2899 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2900 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2901#endif
2902 }
2903 else
2904 /* Dynamic-size object: must push space on the stack. */
2905 {
2906 rtx address, size;
2907
2908 /* Record the stack pointer on entry to block, if have
2909 not already done so. */
2910 if (thisblock->data.block.stack_level == 0)
2911 {
2912 do_pending_stack_adjust ();
59257ff7
RK
2913 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2914 &thisblock->data.block.stack_level,
2915 thisblock->data.block.first_insn);
28d81abb
RK
2916 stack_block_stack = thisblock;
2917 }
2918
2919 /* Compute the variable's size, in bytes. */
2920 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2921 DECL_SIZE (decl),
2922 size_int (BITS_PER_UNIT)),
37366632 2923 NULL_RTX, VOIDmode, 0);
28d81abb
RK
2924 free_temp_slots ();
2925
59257ff7
RK
2926 /* This is equivalent to calling alloca. */
2927 current_function_calls_alloca = 1;
2928
28d81abb 2929 /* Allocate space on the stack for the variable. */
37366632
RK
2930 address = allocate_dynamic_stack_space (size, NULL_RTX,
2931 DECL_ALIGN (decl));
28d81abb 2932
59257ff7 2933 if (nonlocal_goto_handler_slot != 0)
37366632 2934 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
2935
2936 /* Reference the variable indirect through that rtx. */
2937 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2938
2207e295
RS
2939 /* If this is a memory ref that contains aggregate components,
2940 mark it as such for cse and loop optimize. */
2941 MEM_IN_STRUCT_P (DECL_RTL (decl))
2942 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2943 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2944 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2945
28d81abb
RK
2946 /* Indicate the alignment we actually gave this variable. */
2947#ifdef STACK_BOUNDARY
2948 DECL_ALIGN (decl) = STACK_BOUNDARY;
2949#else
2950 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2951#endif
2952 }
2953
2954 if (TREE_THIS_VOLATILE (decl))
2955 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2956 if (TREE_READONLY (decl))
2957 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2958
2959 /* If doing stupid register allocation, make sure life of any
2960 register variable starts here, at the start of its scope. */
2961
2962 if (obey_regdecls)
2963 use_variable (DECL_RTL (decl));
2964}
2965\f
2966/* Emit code to perform the initialization of a declaration DECL. */
2967
2968void
2969expand_decl_init (decl)
2970 tree decl;
2971{
b4ac57ab
RS
2972 int was_used = TREE_USED (decl);
2973
28d81abb
RK
2974 if (TREE_STATIC (decl))
2975 return;
2976
2977 /* Compute and store the initial value now. */
2978
2979 if (DECL_INITIAL (decl) == error_mark_node)
2980 {
2981 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2982 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2983 || code == POINTER_TYPE)
2984 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2985 0, 0);
2986 emit_queue ();
2987 }
2988 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2989 {
2990 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2991 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2992 emit_queue ();
2993 }
2994
b4ac57ab
RS
2995 /* Don't let the initialization count as "using" the variable. */
2996 TREE_USED (decl) = was_used;
2997
28d81abb
RK
2998 /* Free any temporaries we made while initializing the decl. */
2999 free_temp_slots ();
3000}
3001
3002/* CLEANUP is an expression to be executed at exit from this binding contour;
3003 for example, in C++, it might call the destructor for this variable.
3004
3005 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3006 either before or after calling `expand_decl' but before compiling
3007 any subsequent expressions. This is because CLEANUP may be expanded
3008 more than once, on different branches of execution.
3009 For the same reason, CLEANUP may not contain a CALL_EXPR
3010 except as its topmost node--else `preexpand_calls' would get confused.
3011
3012 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3013 that is not associated with any particular variable. */
3014
3015int
3016expand_decl_cleanup (decl, cleanup)
3017 tree decl, cleanup;
3018{
3019 struct nesting *thisblock = block_stack;
3020
3021 /* Error if we are not in any block. */
3022 if (thisblock == 0)
3023 return 0;
3024
3025 /* Record the cleanup if there is one. */
3026
3027 if (cleanup != 0)
3028 {
3029 thisblock->data.block.cleanups
3030 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3031 /* If this block has a cleanup, it belongs in stack_block_stack. */
3032 stack_block_stack = thisblock;
3033 }
3034 return 1;
3035}
3036\f
3037/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3038 DECL_ELTS is the list of elements that belong to DECL's type.
3039 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3040
3041void
3042expand_anon_union_decl (decl, cleanup, decl_elts)
3043 tree decl, cleanup, decl_elts;
3044{
3045 struct nesting *thisblock = block_stack;
3046 rtx x;
3047
3048 expand_decl (decl, cleanup);
3049 x = DECL_RTL (decl);
3050
3051 while (decl_elts)
3052 {
3053 tree decl_elt = TREE_VALUE (decl_elts);
3054 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3055 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3056
3057 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3058 instead create a new MEM rtx with the proper mode. */
3059 if (GET_CODE (x) == MEM)
3060 {
3061 if (mode == GET_MODE (x))
3062 DECL_RTL (decl_elt) = x;
3063 else
3064 {
3065 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3066 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3067 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3068 }
3069 }
3070 else if (GET_CODE (x) == REG)
3071 {
3072 if (mode == GET_MODE (x))
3073 DECL_RTL (decl_elt) = x;
3074 else
3075 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3076 }
3077 else
3078 abort ();
3079
3080 /* Record the cleanup if there is one. */
3081
3082 if (cleanup != 0)
3083 thisblock->data.block.cleanups
3084 = temp_tree_cons (decl_elt, cleanup_elt,
3085 thisblock->data.block.cleanups);
3086
3087 decl_elts = TREE_CHAIN (decl_elts);
3088 }
3089}
3090\f
3091/* Expand a list of cleanups LIST.
3092 Elements may be expressions or may be nested lists.
3093
3094 If DONT_DO is nonnull, then any list-element
3095 whose TREE_PURPOSE matches DONT_DO is omitted.
3096 This is sometimes used to avoid a cleanup associated with
3097 a value that is being returned out of the scope. */
3098
3099static void
3100expand_cleanups (list, dont_do)
3101 tree list;
3102 tree dont_do;
3103{
3104 tree tail;
3105 for (tail = list; tail; tail = TREE_CHAIN (tail))
3106 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3107 {
3108 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3109 expand_cleanups (TREE_VALUE (tail), dont_do);
3110 else
3111 {
3112 /* Cleanups may be run multiple times. For example,
3113 when exiting a binding contour, we expand the
3114 cleanups associated with that contour. When a goto
3115 within that binding contour has a target outside that
3116 contour, it will expand all cleanups from its scope to
3117 the target. Though the cleanups are expanded multiple
3118 times, the control paths are non-overlapping so the
3119 cleanups will not be executed twice. */
3120 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3121 free_temp_slots ();
3122 }
3123 }
3124}
3125
28d81abb
RK
3126/* Move all cleanups from the current block_stack
3127 to the containing block_stack, where they are assumed to
3128 have been created. If anything can cause a temporary to
3129 be created, but not expanded for more than one level of
3130 block_stacks, then this code will have to change. */
3131
3132void
3133move_cleanups_up ()
3134{
3135 struct nesting *block = block_stack;
3136 struct nesting *outer = block->next;
3137
3138 outer->data.block.cleanups
3139 = chainon (block->data.block.cleanups,
3140 outer->data.block.cleanups);
3141 block->data.block.cleanups = 0;
3142}
3143
3144tree
3145last_cleanup_this_contour ()
3146{
3147 if (block_stack == 0)
3148 return 0;
3149
3150 return block_stack->data.block.cleanups;
3151}
3152
3153/* Return 1 if there are any pending cleanups at this point.
3154 If THIS_CONTOUR is nonzero, check the current contour as well.
3155 Otherwise, look only at the contours that enclose this one. */
3156
3157int
3158any_pending_cleanups (this_contour)
3159 int this_contour;
3160{
3161 struct nesting *block;
3162
3163 if (block_stack == 0)
3164 return 0;
3165
3166 if (this_contour && block_stack->data.block.cleanups != NULL)
3167 return 1;
3168 if (block_stack->data.block.cleanups == 0
3169 && (block_stack->data.block.outer_cleanups == 0
3170#if 0
3171 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3172#endif
3173 ))
3174 return 0;
3175
3176 for (block = block_stack->next; block; block = block->next)
3177 if (block->data.block.cleanups != 0)
3178 return 1;
3179
3180 return 0;
3181}
3182\f
3183/* Enter a case (Pascal) or switch (C) statement.
3184 Push a block onto case_stack and nesting_stack
3185 to accumulate the case-labels that are seen
3186 and to record the labels generated for the statement.
3187
3188 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3189 Otherwise, this construct is transparent for `exit_something'.
3190
3191 EXPR is the index-expression to be dispatched on.
3192 TYPE is its nominal type. We could simply convert EXPR to this type,
3193 but instead we take short cuts. */
3194
3195void
3196expand_start_case (exit_flag, expr, type, printname)
3197 int exit_flag;
3198 tree expr;
3199 tree type;
3200 char *printname;
3201{
3202 register struct nesting *thiscase = ALLOC_NESTING ();
3203
3204 /* Make an entry on case_stack for the case we are entering. */
3205
3206 thiscase->next = case_stack;
3207 thiscase->all = nesting_stack;
3208 thiscase->depth = ++nesting_depth;
3209 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3210 thiscase->data.case_stmt.case_list = 0;
3211 thiscase->data.case_stmt.index_expr = expr;
3212 thiscase->data.case_stmt.nominal_type = type;
3213 thiscase->data.case_stmt.default_label = 0;
3214 thiscase->data.case_stmt.num_ranges = 0;
3215 thiscase->data.case_stmt.printname = printname;
3216 thiscase->data.case_stmt.seenlabel = 0;
3217 case_stack = thiscase;
3218 nesting_stack = thiscase;
3219
3220 do_pending_stack_adjust ();
3221
3222 /* Make sure case_stmt.start points to something that won't
3223 need any transformation before expand_end_case. */
3224 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3225 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3226
3227 thiscase->data.case_stmt.start = get_last_insn ();
3228}
3229
3230/* Start a "dummy case statement" within which case labels are invalid
3231 and are not connected to any larger real case statement.
3232 This can be used if you don't want to let a case statement jump
3233 into the middle of certain kinds of constructs. */
3234
3235void
3236expand_start_case_dummy ()
3237{
3238 register struct nesting *thiscase = ALLOC_NESTING ();
3239
3240 /* Make an entry on case_stack for the dummy. */
3241
3242 thiscase->next = case_stack;
3243 thiscase->all = nesting_stack;
3244 thiscase->depth = ++nesting_depth;
3245 thiscase->exit_label = 0;
3246 thiscase->data.case_stmt.case_list = 0;
3247 thiscase->data.case_stmt.start = 0;
3248 thiscase->data.case_stmt.nominal_type = 0;
3249 thiscase->data.case_stmt.default_label = 0;
3250 thiscase->data.case_stmt.num_ranges = 0;
3251 case_stack = thiscase;
3252 nesting_stack = thiscase;
3253}
3254
3255/* End a dummy case statement. */
3256
3257void
3258expand_end_case_dummy ()
3259{
3260 POPSTACK (case_stack);
3261}
3262
3263/* Return the data type of the index-expression
3264 of the innermost case statement, or null if none. */
3265
3266tree
3267case_index_expr_type ()
3268{
3269 if (case_stack)
3270 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3271 return 0;
3272}
3273\f
3274/* Accumulate one case or default label inside a case or switch statement.
3275 VALUE is the value of the case (a null pointer, for a default label).
3276
3277 If not currently inside a case or switch statement, return 1 and do
3278 nothing. The caller will print a language-specific error message.
3279 If VALUE is a duplicate or overlaps, return 2 and do nothing
3280 except store the (first) duplicate node in *DUPLICATE.
3281 If VALUE is out of range, return 3 and do nothing.
3282 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3283 Return 0 on success.
3284
3285 Extended to handle range statements. */
3286
3287int
3288pushcase (value, label, duplicate)
3289 register tree value;
3290 register tree label;
3291 tree *duplicate;
3292{
3293 register struct case_node **l;
3294 register struct case_node *n;
3295 tree index_type;
3296 tree nominal_type;
3297
3298 /* Fail if not inside a real case statement. */
3299 if (! (case_stack && case_stack->data.case_stmt.start))
3300 return 1;
3301
3302 if (stack_block_stack
3303 && stack_block_stack->depth > case_stack->depth)
3304 return 5;
3305
3306 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3307 nominal_type = case_stack->data.case_stmt.nominal_type;
3308
3309 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3310 if (index_type == error_mark_node)
3311 return 0;
3312
3313 /* Convert VALUE to the type in which the comparisons are nominally done. */
3314 if (value != 0)
3315 value = convert (nominal_type, value);
3316
3317 /* If this is the first label, warn if any insns have been emitted. */
3318 if (case_stack->data.case_stmt.seenlabel == 0)
3319 {
3320 rtx insn;
3321 for (insn = case_stack->data.case_stmt.start;
3322 insn;
3323 insn = NEXT_INSN (insn))
3324 {
3325 if (GET_CODE (insn) == CODE_LABEL)
3326 break;
3327 if (GET_CODE (insn) != NOTE
3328 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3329 {
3330 warning ("unreachable code at beginning of %s",
3331 case_stack->data.case_stmt.printname);
3332 break;
3333 }
3334 }
3335 }
3336 case_stack->data.case_stmt.seenlabel = 1;
3337
3338 /* Fail if this value is out of range for the actual type of the index
3339 (which may be narrower than NOMINAL_TYPE). */
3340 if (value != 0 && ! int_fits_type_p (value, index_type))
3341 return 3;
3342
3343 /* Fail if this is a duplicate or overlaps another entry. */
3344 if (value == 0)
3345 {
3346 if (case_stack->data.case_stmt.default_label != 0)
3347 {
3348 *duplicate = case_stack->data.case_stmt.default_label;
3349 return 2;
3350 }
3351 case_stack->data.case_stmt.default_label = label;
3352 }
3353 else
3354 {
3355 /* Find the elt in the chain before which to insert the new value,
3356 to keep the chain sorted in increasing order.
3357 But report an error if this element is a duplicate. */
3358 for (l = &case_stack->data.case_stmt.case_list;
3359 /* Keep going past elements distinctly less than VALUE. */
3360 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3361 l = &(*l)->right)
3362 ;
3363 if (*l)
3364 {
3365 /* Element we will insert before must be distinctly greater;
3366 overlap means error. */
3367 if (! tree_int_cst_lt (value, (*l)->low))
3368 {
3369 *duplicate = (*l)->code_label;
3370 return 2;
3371 }
3372 }
3373
3374 /* Add this label to the chain, and succeed.
3375 Copy VALUE so it is on temporary rather than momentary
3376 obstack and will thus survive till the end of the case statement. */
3377 n = (struct case_node *) oballoc (sizeof (struct case_node));
3378 n->left = 0;
3379 n->right = *l;
3380 n->high = n->low = copy_node (value);
3381 n->code_label = label;
3382 *l = n;
3383 }
3384
3385 expand_label (label);
3386 return 0;
3387}
3388
3389/* Like pushcase but this case applies to all values
3390 between VALUE1 and VALUE2 (inclusive).
3391 The return value is the same as that of pushcase
3392 but there is one additional error code:
3393 4 means the specified range was empty. */
3394
3395int
3396pushcase_range (value1, value2, label, duplicate)
3397 register tree value1, value2;
3398 register tree label;
3399 tree *duplicate;
3400{
3401 register struct case_node **l;
3402 register struct case_node *n;
3403 tree index_type;
3404 tree nominal_type;
3405
3406 /* Fail if not inside a real case statement. */
3407 if (! (case_stack && case_stack->data.case_stmt.start))
3408 return 1;
3409
3410 if (stack_block_stack
3411 && stack_block_stack->depth > case_stack->depth)
3412 return 5;
3413
3414 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3415 nominal_type = case_stack->data.case_stmt.nominal_type;
3416
3417 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3418 if (index_type == error_mark_node)
3419 return 0;
3420
3421 /* If this is the first label, warn if any insns have been emitted. */
3422 if (case_stack->data.case_stmt.seenlabel == 0)
3423 {
3424 rtx insn;
3425 for (insn = case_stack->data.case_stmt.start;
3426 insn;
3427 insn = NEXT_INSN (insn))
3428 {
3429 if (GET_CODE (insn) == CODE_LABEL)
3430 break;
3431 if (GET_CODE (insn) != NOTE
3432 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3433 {
3434 warning ("unreachable code at beginning of %s",
3435 case_stack->data.case_stmt.printname);
3436 break;
3437 }
3438 }
3439 }
3440 case_stack->data.case_stmt.seenlabel = 1;
3441
3442 /* Convert VALUEs to type in which the comparisons are nominally done. */
3443 if (value1 == 0) /* Negative infinity. */
3444 value1 = TYPE_MIN_VALUE(index_type);
3445 value1 = convert (nominal_type, value1);
3446
3447 if (value2 == 0) /* Positive infinity. */
3448 value2 = TYPE_MAX_VALUE(index_type);
3449 value2 = convert (nominal_type, value2);
3450
3451 /* Fail if these values are out of range. */
3452 if (! int_fits_type_p (value1, index_type))
3453 return 3;
3454
3455 if (! int_fits_type_p (value2, index_type))
3456 return 3;
3457
3458 /* Fail if the range is empty. */
3459 if (tree_int_cst_lt (value2, value1))
3460 return 4;
3461
3462 /* If the bounds are equal, turn this into the one-value case. */
3463 if (tree_int_cst_equal (value1, value2))
3464 return pushcase (value1, label, duplicate);
3465
3466 /* Find the elt in the chain before which to insert the new value,
3467 to keep the chain sorted in increasing order.
3468 But report an error if this element is a duplicate. */
3469 for (l = &case_stack->data.case_stmt.case_list;
3470 /* Keep going past elements distinctly less than this range. */
3471 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3472 l = &(*l)->right)
3473 ;
3474 if (*l)
3475 {
3476 /* Element we will insert before must be distinctly greater;
3477 overlap means error. */
3478 if (! tree_int_cst_lt (value2, (*l)->low))
3479 {
3480 *duplicate = (*l)->code_label;
3481 return 2;
3482 }
3483 }
3484
3485 /* Add this label to the chain, and succeed.
3486 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3487 obstack and will thus survive till the end of the case statement. */
3488
3489 n = (struct case_node *) oballoc (sizeof (struct case_node));
3490 n->left = 0;
3491 n->right = *l;
3492 n->low = copy_node (value1);
3493 n->high = copy_node (value2);
3494 n->code_label = label;
3495 *l = n;
3496
3497 expand_label (label);
3498
3499 case_stack->data.case_stmt.num_ranges++;
3500
3501 return 0;
3502}
3503\f
3504/* Called when the index of a switch statement is an enumerated type
3505 and there is no default label.
3506
3507 Checks that all enumeration literals are covered by the case
3508 expressions of a switch. Also, warn if there are any extra
3509 switch cases that are *not* elements of the enumerated type.
3510
3511 If all enumeration literals were covered by the case expressions,
3512 turn one of the expressions into the default expression since it should
3513 not be possible to fall through such a switch. */
3514
3515void
3516check_for_full_enumeration_handling (type)
3517 tree type;
3518{
3519 register struct case_node *n;
3520 register struct case_node **l;
3521 register tree chain;
3522 int all_values = 1;
3523
3524 /* The time complexity of this loop is currently O(N * M), with
3525 N being the number of enumerals in the enumerated type, and
3526 M being the number of case expressions in the switch. */
3527
3528 for (chain = TYPE_VALUES (type);
3529 chain;
3530 chain = TREE_CHAIN (chain))
3531 {
3532 /* Find a match between enumeral and case expression, if possible.
3533 Quit looking when we've gone too far (since case expressions
3534 are kept sorted in ascending order). Warn about enumerals not
3535 handled in the switch statement case expression list. */
3536
3537 for (n = case_stack->data.case_stmt.case_list;
3538 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3539 n = n->right)
3540 ;
3541
1ddde1cd 3542 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3543 {
3544 if (warn_switch)
1ddde1cd 3545 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3546 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3547 all_values = 0;
3548 }
3549 }
3550
3551 /* Now we go the other way around; we warn if there are case
3552 expressions that don't correspond to enumerals. This can
3553 occur since C and C++ don't enforce type-checking of
3554 assignments to enumeration variables. */
3555
3556 if (warn_switch)
3557 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3558 {
3559 for (chain = TYPE_VALUES (type);
3560 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3561 chain = TREE_CHAIN (chain))
3562 ;
3563
3564 if (!chain)
3565 warning ("case value `%d' not in enumerated type `%s'",
3566 TREE_INT_CST_LOW (n->low),
3567 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3568 == IDENTIFIER_NODE)
3569 ? TYPE_NAME (type)
3570 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3571 if (!tree_int_cst_equal (n->low, n->high))
3572 {
3573 for (chain = TYPE_VALUES (type);
3574 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3575 chain = TREE_CHAIN (chain))
3576 ;
3577
3578 if (!chain)
3579 warning ("case value `%d' not in enumerated type `%s'",
3580 TREE_INT_CST_LOW (n->high),
3581 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3582 == IDENTIFIER_NODE)
3583 ? TYPE_NAME (type)
3584 : DECL_NAME (TYPE_NAME (type))));
3585 }
28d81abb
RK
3586 }
3587
3588 /* If all values were found as case labels, make one of them the default
3589 label. Thus, this switch will never fall through. We arbitrarily pick
3590 the last one to make the default since this is likely the most
3591 efficient choice. */
3592
3593 if (all_values)
3594 {
3595 for (l = &case_stack->data.case_stmt.case_list;
3596 (*l)->right != 0;
3597 l = &(*l)->right)
3598 ;
3599
3600 case_stack->data.case_stmt.default_label = (*l)->code_label;
3601 *l = 0;
3602 }
3603}
3604\f
3605/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 3606 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
3607 Generate the code to test it and jump to the right place. */
3608
3609void
3610expand_end_case (orig_index)
3611 tree orig_index;
3612{
3613 tree minval, maxval, range;
3614 rtx default_label = 0;
3615 register struct case_node *n;
3616 int count;
3617 rtx index;
3618 rtx table_label = gen_label_rtx ();
3619 int ncases;
3620 rtx *labelvec;
3621 register int i;
3622 rtx before_case;
3623 register struct nesting *thiscase = case_stack;
3624 tree index_expr = thiscase->data.case_stmt.index_expr;
3625 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3626
3627 do_pending_stack_adjust ();
3628
3629 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3630 if (TREE_TYPE (index_expr) != error_mark_node)
3631 {
3632 /* If switch expression was an enumerated type, check that all
3633 enumeration literals are covered by the cases.
3634 No sense trying this if there's a default case, however. */
3635
3636 if (!thiscase->data.case_stmt.default_label
3637 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3638 && TREE_CODE (index_expr) != INTEGER_CST)
3639 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3640
3641 /* If this is the first label, warn if any insns have been emitted. */
3642 if (thiscase->data.case_stmt.seenlabel == 0)
3643 {
3644 rtx insn;
3645 for (insn = get_last_insn ();
3646 insn != case_stack->data.case_stmt.start;
3647 insn = PREV_INSN (insn))
3648 if (GET_CODE (insn) != NOTE
3649 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3650 {
3651 warning ("unreachable code at beginning of %s",
3652 case_stack->data.case_stmt.printname);
3653 break;
3654 }
3655 }
3656
3657 /* If we don't have a default-label, create one here,
3658 after the body of the switch. */
3659 if (thiscase->data.case_stmt.default_label == 0)
3660 {
3661 thiscase->data.case_stmt.default_label
3662 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3663 expand_label (thiscase->data.case_stmt.default_label);
3664 }
3665 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3666
3667 before_case = get_last_insn ();
3668
3669 /* Simplify the case-list before we count it. */
3670 group_case_nodes (thiscase->data.case_stmt.case_list);
3671
3672 /* Get upper and lower bounds of case values.
3673 Also convert all the case values to the index expr's data type. */
3674
3675 count = 0;
3676 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3677 {
3678 /* Check low and high label values are integers. */
3679 if (TREE_CODE (n->low) != INTEGER_CST)
3680 abort ();
3681 if (TREE_CODE (n->high) != INTEGER_CST)
3682 abort ();
3683
3684 n->low = convert (TREE_TYPE (index_expr), n->low);
3685 n->high = convert (TREE_TYPE (index_expr), n->high);
3686
3687 /* Count the elements and track the largest and smallest
3688 of them (treating them as signed even if they are not). */
3689 if (count++ == 0)
3690 {
3691 minval = n->low;
3692 maxval = n->high;
3693 }
3694 else
3695 {
3696 if (INT_CST_LT (n->low, minval))
3697 minval = n->low;
3698 if (INT_CST_LT (maxval, n->high))
3699 maxval = n->high;
3700 }
3701 /* A range counts double, since it requires two compares. */
3702 if (! tree_int_cst_equal (n->low, n->high))
3703 count++;
3704 }
3705
3706 /* Compute span of values. */
3707 if (count != 0)
3708 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3709 maxval, minval));
3710
3711 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3712 {
3713 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3714 emit_queue ();
3715 emit_jump (default_label);
3716 }
3717 /* If range of values is much bigger than number of values,
3718 make a sequence of conditional branches instead of a dispatch.
3719 If the switch-index is a constant, do it this way
3720 because we can optimize it. */
4f73c5dd
TW
3721
3722#ifndef CASE_VALUES_THRESHOLD
28d81abb 3723#ifdef HAVE_casesi
4f73c5dd 3724#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 3725#else
4f73c5dd
TW
3726 /* If machine does not have a case insn that compares the
3727 bounds, this means extra overhead for dispatch tables
3728 which raises the threshold for using them. */
3729#define CASE_VALUES_THRESHOLD 5
3730#endif /* HAVE_casesi */
3731#endif /* CASE_VALUES_THRESHOLD */
3732
3733 else if (TREE_INT_CST_HIGH (range) != 0
3734 || count < CASE_VALUES_THRESHOLD
37366632
RK
3735 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
3736 > 10 * count)
28d81abb 3737 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3738 /* These will reduce to a constant. */
28d81abb 3739 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3740 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3741 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3742 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3743 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3744 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 3745 {
37366632 3746 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
3747
3748 /* If the index is a short or char that we do not have
3749 an insn to handle comparisons directly, convert it to
3750 a full integer now, rather than letting each comparison
3751 generate the conversion. */
3752
3753 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3754 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3755 == CODE_FOR_nothing))
3756 {
3757 enum machine_mode wider_mode;
3758 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3759 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3760 if (cmp_optab->handlers[(int) wider_mode].insn_code
3761 != CODE_FOR_nothing)
3762 {
3763 index = convert_to_mode (wider_mode, index, unsignedp);
3764 break;
3765 }
3766 }
3767
3768 emit_queue ();
3769 do_pending_stack_adjust ();
3770
3771 index = protect_from_queue (index, 0);
3772 if (GET_CODE (index) == MEM)
3773 index = copy_to_reg (index);
3774 if (GET_CODE (index) == CONST_INT
3775 || TREE_CODE (index_expr) == INTEGER_CST)
3776 {
3777 /* Make a tree node with the proper constant value
3778 if we don't already have one. */
3779 if (TREE_CODE (index_expr) != INTEGER_CST)
3780 {
3781 index_expr
3782 = build_int_2 (INTVAL (index),
3783 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3784 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3785 }
3786
3787 /* For constant index expressions we need only
3788 issue a unconditional branch to the appropriate
3789 target code. The job of removing any unreachable
3790 code is left to the optimisation phase if the
3791 "-O" option is specified. */
3792 for (n = thiscase->data.case_stmt.case_list;
3793 n;
3794 n = n->right)
3795 {
3796 if (! tree_int_cst_lt (index_expr, n->low)
3797 && ! tree_int_cst_lt (n->high, index_expr))
3798 break;
3799 }
3800 if (n)
3801 emit_jump (label_rtx (n->code_label));
3802 else
3803 emit_jump (default_label);
3804 }
3805 else
3806 {
3807 /* If the index expression is not constant we generate
3808 a binary decision tree to select the appropriate
3809 target code. This is done as follows:
3810
3811 The list of cases is rearranged into a binary tree,
3812 nearly optimal assuming equal probability for each case.
3813
3814 The tree is transformed into RTL, eliminating
3815 redundant test conditions at the same time.
3816
3817 If program flow could reach the end of the
3818 decision tree an unconditional jump to the
3819 default code is emitted. */
3820
3821 use_cost_table
3822 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 3823 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
3824 balance_case_nodes (&thiscase->data.case_stmt.case_list,
3825 NULL_PTR);
28d81abb
RK
3826 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3827 default_label, TREE_TYPE (index_expr));
3828 emit_jump_if_reachable (default_label);
3829 }
3830 }
3831 else
3832 {
3833 int win = 0;
3834#ifdef HAVE_casesi
3835 if (HAVE_casesi)
3836 {
c4fcf531 3837 enum machine_mode index_mode = SImode;
5130a5cc 3838 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3839
28d81abb 3840 /* Convert the index to SImode. */
c4fcf531
RS
3841 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3842 > GET_MODE_BITSIZE (index_mode))
28d81abb 3843 {
af2682ef 3844 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 3845 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
3846
3847 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3848 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3849 index_expr, minval);
3850 minval = integer_zero_node;
37366632
RK
3851 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3852 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
af2682ef
RS
3853 emit_jump_insn (gen_bltu (default_label));
3854 /* Now we can safely truncate. */
3855 index = convert_to_mode (index_mode, index, 0);
3856 }
3857 else
3858 {
3859 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3860 index_expr = convert (type_for_size (index_bits, 0),
3861 index_expr);
37366632 3862 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3863 }
28d81abb
RK
3864 emit_queue ();
3865 index = protect_from_queue (index, 0);
3866 do_pending_stack_adjust ();
3867
37366632
RK
3868 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
3869 VOIDmode, 0),
3870 expand_expr (range, NULL_RTX,
3871 VOIDmode, 0),
28d81abb
RK
3872 table_label, default_label));
3873 win = 1;
3874 }
3875#endif
3876#ifdef HAVE_tablejump
3877 if (! win && HAVE_tablejump)
3878 {
3879 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3880 fold (build (MINUS_EXPR,
3881 TREE_TYPE (index_expr),
3882 index_expr, minval)));
37366632 3883 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3884 emit_queue ();
af2682ef 3885 index = protect_from_queue (index, 0);
28d81abb
RK
3886 do_pending_stack_adjust ();
3887
af2682ef 3888 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 3889 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
3890 table_label, default_label);
3891 win = 1;
3892 }
3893#endif
3894 if (! win)
3895 abort ();
3896
3897 /* Get table of labels to jump to, in order of case index. */
3898
3899 ncases = TREE_INT_CST_LOW (range) + 1;
3900 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3901 bzero (labelvec, ncases * sizeof (rtx));
3902
3903 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3904 {
37366632 3905 register HOST_WIDE_INT i
28d81abb
RK
3906 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3907
3908 while (1)
3909 {
3910 labelvec[i]
3911 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3912 if (i + TREE_INT_CST_LOW (minval)
3913 == TREE_INT_CST_LOW (n->high))
3914 break;
3915 i++;
3916 }
3917 }
3918
3919 /* Fill in the gaps with the default. */
3920 for (i = 0; i < ncases; i++)
3921 if (labelvec[i] == 0)
3922 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3923
3924 /* Output the table */
3925 emit_label (table_label);
3926
3927 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 3928 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
3929 if (
3930#ifdef CASE_VECTOR_PC_RELATIVE
3931 1 ||
3932#endif
3933 flag_pic)
3934 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3935 gen_rtx (LABEL_REF, Pmode, table_label),
3936 gen_rtvec_v (ncases, labelvec)));
3937 else
3938 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3939 gen_rtvec_v (ncases, labelvec)));
3940
3941 /* If the case insn drops through the table,
3942 after the table we must jump to the default-label.
3943 Otherwise record no drop-through after the table. */
3944#ifdef CASE_DROPS_THROUGH
3945 emit_jump (default_label);
3946#else
3947 emit_barrier ();
3948#endif
3949 }
3950
915f619f
JW
3951 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3952 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3953 thiscase->data.case_stmt.start);
3954 }
3955 if (thiscase->exit_label)
3956 emit_label (thiscase->exit_label);
3957
3958 POPSTACK (case_stack);
3959
3960 free_temp_slots ();
3961}
3962
3963/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3964
3965static void
3966do_jump_if_equal (op1, op2, label, unsignedp)
3967 rtx op1, op2, label;
3968 int unsignedp;
3969{
3970 if (GET_CODE (op1) == CONST_INT
3971 && GET_CODE (op2) == CONST_INT)
3972 {
3973 if (INTVAL (op1) == INTVAL (op2))
3974 emit_jump (label);
3975 }
3976 else
3977 {
3978 enum machine_mode mode = GET_MODE (op1);
3979 if (mode == VOIDmode)
3980 mode = GET_MODE (op2);
37366632 3981 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
3982 emit_jump_insn (gen_beq (label));
3983 }
3984}
3985\f
3986/* Not all case values are encountered equally. This function
3987 uses a heuristic to weight case labels, in cases where that
3988 looks like a reasonable thing to do.
3989
3990 Right now, all we try to guess is text, and we establish the
3991 following weights:
3992
3993 chars above space: 16
3994 digits: 16
3995 default: 12
3996 space, punct: 8
3997 tab: 4
3998 newline: 2
3999 other "\" chars: 1
4000 remaining chars: 0
4001
4002 If we find any cases in the switch that are not either -1 or in the range
4003 of valid ASCII characters, or are control characters other than those
4004 commonly used with "\", don't treat this switch scanning text.
4005
4006 Return 1 if these nodes are suitable for cost estimation, otherwise
4007 return 0. */
4008
4009static int
4010estimate_case_costs (node)
4011 case_node_ptr node;
4012{
4013 tree min_ascii = build_int_2 (-1, -1);
4014 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4015 case_node_ptr n;
4016 int i;
4017
4018 /* If we haven't already made the cost table, make it now. Note that the
4019 lower bound of the table is -1, not zero. */
4020
4021 if (cost_table == NULL)
4022 {
4023 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4024 bzero (cost_table - 1, 129 * sizeof (short));
4025
4026 for (i = 0; i < 128; i++)
4027 {
4028 if (isalnum (i))
4029 cost_table[i] = 16;
4030 else if (ispunct (i))
4031 cost_table[i] = 8;
4032 else if (iscntrl (i))
4033 cost_table[i] = -1;
4034 }
4035
4036 cost_table[' '] = 8;
4037 cost_table['\t'] = 4;
4038 cost_table['\0'] = 4;
4039 cost_table['\n'] = 2;
4040 cost_table['\f'] = 1;
4041 cost_table['\v'] = 1;
4042 cost_table['\b'] = 1;
4043 }
4044
4045 /* See if all the case expressions look like text. It is text if the
4046 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4047 as signed arithmetic since we don't want to ever access cost_table with a
4048 value less than -1. Also check that none of the constants in a range
4049 are strange control characters. */
4050
4051 for (n = node; n; n = n->right)
4052 {
4053 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4054 return 0;
4055
4056 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4057 if (cost_table[i] < 0)
4058 return 0;
4059 }
4060
4061 /* All interesting values are within the range of interesting
4062 ASCII characters. */
4063 return 1;
4064}
4065
4066/* Scan an ordered list of case nodes
4067 combining those with consecutive values or ranges.
4068
4069 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4070
4071static void
4072group_case_nodes (head)
4073 case_node_ptr head;
4074{
4075 case_node_ptr node = head;
4076
4077 while (node)
4078 {
4079 rtx lb = next_real_insn (label_rtx (node->code_label));
4080 case_node_ptr np = node;
4081
4082 /* Try to group the successors of NODE with NODE. */
4083 while (((np = np->right) != 0)
4084 /* Do they jump to the same place? */
4085 && next_real_insn (label_rtx (np->code_label)) == lb
4086 /* Are their ranges consecutive? */
4087 && tree_int_cst_equal (np->low,
4088 fold (build (PLUS_EXPR,
4089 TREE_TYPE (node->high),
4090 node->high,
4091 integer_one_node)))
4092 /* An overflow is not consecutive. */
4093 && tree_int_cst_lt (node->high,
4094 fold (build (PLUS_EXPR,
4095 TREE_TYPE (node->high),
4096 node->high,
4097 integer_one_node))))
4098 {
4099 node->high = np->high;
4100 }
4101 /* NP is the first node after NODE which can't be grouped with it.
4102 Delete the nodes in between, and move on to that node. */
4103 node->right = np;
4104 node = np;
4105 }
4106}
4107
4108/* Take an ordered list of case nodes
4109 and transform them into a near optimal binary tree,
6dc42e49 4110 on the assumption that any target code selection value is as
28d81abb
RK
4111 likely as any other.
4112
4113 The transformation is performed by splitting the ordered
4114 list into two equal sections plus a pivot. The parts are
4115 then attached to the pivot as left and right branches. Each
4116 branch is is then transformed recursively. */
4117
4118static void
4119balance_case_nodes (head, parent)
4120 case_node_ptr *head;
4121 case_node_ptr parent;
4122{
4123 register case_node_ptr np;
4124
4125 np = *head;
4126 if (np)
4127 {
4128 int cost = 0;
4129 int i = 0;
4130 int ranges = 0;
4131 register case_node_ptr *npp;
4132 case_node_ptr left;
4133
4134 /* Count the number of entries on branch. Also count the ranges. */
4135
4136 while (np)
4137 {
4138 if (!tree_int_cst_equal (np->low, np->high))
4139 {
4140 ranges++;
4141 if (use_cost_table)
4142 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4143 }
4144
4145 if (use_cost_table)
4146 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4147
4148 i++;
4149 np = np->right;
4150 }
4151
4152 if (i > 2)
4153 {
4154 /* Split this list if it is long enough for that to help. */
4155 npp = head;
4156 left = *npp;
4157 if (use_cost_table)
4158 {
4159 /* Find the place in the list that bisects the list's total cost,
4160 Here I gets half the total cost. */
4161 int n_moved = 0;
4162 i = (cost + 1) / 2;
4163 while (1)
4164 {
4165 /* Skip nodes while their cost does not reach that amount. */
4166 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4167 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4168 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4169 if (i <= 0)
4170 break;
4171 npp = &(*npp)->right;
4172 n_moved += 1;
4173 }
4174 if (n_moved == 0)
4175 {
4176 /* Leave this branch lopsided, but optimize left-hand
4177 side and fill in `parent' fields for right-hand side. */
4178 np = *head;
4179 np->parent = parent;
4180 balance_case_nodes (&np->left, np);
4181 for (; np->right; np = np->right)
4182 np->right->parent = np;
4183 return;
4184 }
4185 }
4186 /* If there are just three nodes, split at the middle one. */
4187 else if (i == 3)
4188 npp = &(*npp)->right;
4189 else
4190 {
4191 /* Find the place in the list that bisects the list's total cost,
4192 where ranges count as 2.
4193 Here I gets half the total cost. */
4194 i = (i + ranges + 1) / 2;
4195 while (1)
4196 {
4197 /* Skip nodes while their cost does not reach that amount. */
4198 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4199 i--;
4200 i--;
4201 if (i <= 0)
4202 break;
4203 npp = &(*npp)->right;
4204 }
4205 }
4206 *head = np = *npp;
4207 *npp = 0;
4208 np->parent = parent;
4209 np->left = left;
4210
4211 /* Optimize each of the two split parts. */
4212 balance_case_nodes (&np->left, np);
4213 balance_case_nodes (&np->right, np);
4214 }
4215 else
4216 {
4217 /* Else leave this branch as one level,
4218 but fill in `parent' fields. */
4219 np = *head;
4220 np->parent = parent;
4221 for (; np->right; np = np->right)
4222 np->right->parent = np;
4223 }
4224 }
4225}
4226\f
4227/* Search the parent sections of the case node tree
4228 to see if a test for the lower bound of NODE would be redundant.
4229 INDEX_TYPE is the type of the index expression.
4230
4231 The instructions to generate the case decision tree are
4232 output in the same order as nodes are processed so it is
4233 known that if a parent node checks the range of the current
4234 node minus one that the current node is bounded at its lower
4235 span. Thus the test would be redundant. */
4236
4237static int
4238node_has_low_bound (node, index_type)
4239 case_node_ptr node;
4240 tree index_type;
4241{
4242 tree low_minus_one;
4243 case_node_ptr pnode;
4244
4245 /* If the lower bound of this node is the lowest value in the index type,
4246 we need not test it. */
4247
4248 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4249 return 1;
4250
4251 /* If this node has a left branch, the value at the left must be less
4252 than that at this node, so it cannot be bounded at the bottom and
4253 we need not bother testing any further. */
4254
4255 if (node->left)
4256 return 0;
4257
4258 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4259 node->low, integer_one_node));
4260
4261 /* If the subtraction above overflowed, we can't verify anything.
4262 Otherwise, look for a parent that tests our value - 1. */
4263
4264 if (! tree_int_cst_lt (low_minus_one, node->low))
4265 return 0;
4266
4267 for (pnode = node->parent; pnode; pnode = pnode->parent)
4268 if (tree_int_cst_equal (low_minus_one, pnode->high))
4269 return 1;
4270
4271 return 0;
4272}
4273
4274/* Search the parent sections of the case node tree
4275 to see if a test for the upper bound of NODE would be redundant.
4276 INDEX_TYPE is the type of the index expression.
4277
4278 The instructions to generate the case decision tree are
4279 output in the same order as nodes are processed so it is
4280 known that if a parent node checks the range of the current
4281 node plus one that the current node is bounded at its upper
4282 span. Thus the test would be redundant. */
4283
4284static int
4285node_has_high_bound (node, index_type)
4286 case_node_ptr node;
4287 tree index_type;
4288{
4289 tree high_plus_one;
4290 case_node_ptr pnode;
4291
4292 /* If the upper bound of this node is the highest value in the type
4293 of the index expression, we need not test against it. */
4294
4295 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4296 return 1;
4297
4298 /* If this node has a right branch, the value at the right must be greater
4299 than that at this node, so it cannot be bounded at the top and
4300 we need not bother testing any further. */
4301
4302 if (node->right)
4303 return 0;
4304
4305 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4306 node->high, integer_one_node));
4307
4308 /* If the addition above overflowed, we can't verify anything.
4309 Otherwise, look for a parent that tests our value + 1. */
4310
4311 if (! tree_int_cst_lt (node->high, high_plus_one))
4312 return 0;
4313
4314 for (pnode = node->parent; pnode; pnode = pnode->parent)
4315 if (tree_int_cst_equal (high_plus_one, pnode->low))
4316 return 1;
4317
4318 return 0;
4319}
4320
4321/* Search the parent sections of the
4322 case node tree to see if both tests for the upper and lower
4323 bounds of NODE would be redundant. */
4324
4325static int
4326node_is_bounded (node, index_type)
4327 case_node_ptr node;
4328 tree index_type;
4329{
4330 return (node_has_low_bound (node, index_type)
4331 && node_has_high_bound (node, index_type));
4332}
4333
4334/* Emit an unconditional jump to LABEL unless it would be dead code. */
4335
4336static void
4337emit_jump_if_reachable (label)
4338 rtx label;
4339{
4340 if (GET_CODE (get_last_insn ()) != BARRIER)
4341 emit_jump (label);
4342}
4343\f
4344/* Emit step-by-step code to select a case for the value of INDEX.
4345 The thus generated decision tree follows the form of the
4346 case-node binary tree NODE, whose nodes represent test conditions.
4347 INDEX_TYPE is the type of the index of the switch.
4348
4349 Care is taken to prune redundant tests from the decision tree
4350 by detecting any boundary conditions already checked by
4351 emitted rtx. (See node_has_high_bound, node_has_low_bound
4352 and node_is_bounded, above.)
4353
4354 Where the test conditions can be shown to be redundant we emit
4355 an unconditional jump to the target code. As a further
4356 optimization, the subordinates of a tree node are examined to
4357 check for bounded nodes. In this case conditional and/or
4358 unconditional jumps as a result of the boundary check for the
4359 current node are arranged to target the subordinates associated
4360 code for out of bound conditions on the current node node.
4361
f72aed24 4362 We can assume that when control reaches the code generated here,
28d81abb
RK
4363 the index value has already been compared with the parents
4364 of this node, and determined to be on the same side of each parent
4365 as this node is. Thus, if this node tests for the value 51,
4366 and a parent tested for 52, we don't need to consider
4367 the possibility of a value greater than 51. If another parent
4368 tests for the value 50, then this node need not test anything. */
4369
4370static void
4371emit_case_nodes (index, node, default_label, index_type)
4372 rtx index;
4373 case_node_ptr node;
4374 rtx default_label;
4375 tree index_type;
4376{
4377 /* If INDEX has an unsigned type, we must make unsigned branches. */
4378 int unsignedp = TREE_UNSIGNED (index_type);
4379 typedef rtx rtx_function ();
4380 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4381 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4382 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4383 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4384 enum machine_mode mode = GET_MODE (index);
4385
4386 /* See if our parents have already tested everything for us.
4387 If they have, emit an unconditional jump for this node. */
4388 if (node_is_bounded (node, index_type))
4389 emit_jump (label_rtx (node->code_label));
4390
4391 else if (tree_int_cst_equal (node->low, node->high))
4392 {
4393 /* Node is single valued. First see if the index expression matches
4394 this node and then check our children, if any. */
4395
37366632 4396 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4397 label_rtx (node->code_label), unsignedp);
4398
4399 if (node->right != 0 && node->left != 0)
4400 {
4401 /* This node has children on both sides.
4402 Dispatch to one side or the other
4403 by comparing the index value with this node's value.
4404 If one subtree is bounded, check that one first,
4405 so we can avoid real branches in the tree. */
4406
4407 if (node_is_bounded (node->right, index_type))
4408 {
37366632
RK
4409 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4410 VOIDmode, 0),
4411 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4412
4413 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4414 emit_case_nodes (index, node->left, default_label, index_type);
4415 }
4416
4417 else if (node_is_bounded (node->left, index_type))
4418 {
37366632 4419 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4420 VOIDmode, 0),
37366632 4421 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4422 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4423 emit_case_nodes (index, node->right, default_label, index_type);
4424 }
4425
4426 else
4427 {
4428 /* Neither node is bounded. First distinguish the two sides;
4429 then emit the code for one side at a time. */
4430
4431 tree test_label
4432 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4433
4434 /* See if the value is on the right. */
37366632 4435 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4436 VOIDmode, 0),
37366632 4437 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4438 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4439
4440 /* Value must be on the left.
4441 Handle the left-hand subtree. */
4442 emit_case_nodes (index, node->left, default_label, index_type);
4443 /* If left-hand subtree does nothing,
4444 go to default. */
4445 emit_jump_if_reachable (default_label);
4446
4447 /* Code branches here for the right-hand subtree. */
4448 expand_label (test_label);
4449 emit_case_nodes (index, node->right, default_label, index_type);
4450 }
4451 }
4452
4453 else if (node->right != 0 && node->left == 0)
4454 {
4455 /* Here we have a right child but no left so we issue conditional
4456 branch to default and process the right child.
4457
4458 Omit the conditional branch to default if we it avoid only one
4459 right child; it costs too much space to save so little time. */
4460
de14fd73 4461 if (node->right->right || node->right->left
28d81abb
RK
4462 || !tree_int_cst_equal (node->right->low, node->right->high))
4463 {
4464 if (!node_has_low_bound (node, index_type))
4465 {
37366632
RK
4466 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4467 VOIDmode, 0),
4468 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4469 emit_jump_insn ((*gen_blt_pat) (default_label));
4470 }
4471
4472 emit_case_nodes (index, node->right, default_label, index_type);
4473 }
4474 else
4475 /* We cannot process node->right normally
4476 since we haven't ruled out the numbers less than
4477 this node's value. So handle node->right explicitly. */
4478 do_jump_if_equal (index,
37366632
RK
4479 expand_expr (node->right->low, NULL_RTX,
4480 VOIDmode, 0),
28d81abb
RK
4481 label_rtx (node->right->code_label), unsignedp);
4482 }
4483
4484 else if (node->right == 0 && node->left != 0)
4485 {
4486 /* Just one subtree, on the left. */
4487
de14fd73
RK
4488#if 0 /* The following code and comment were formerly part
4489 of the condition here, but they didn't work
4490 and I don't understand what the idea was. -- rms. */
4491 /* If our "most probable entry" is less probable
28d81abb
RK
4492 than the default label, emit a jump to
4493 the default label using condition codes
4494 already lying around. With no right branch,
4495 a branch-greater-than will get us to the default
4496 label correctly. */
de14fd73
RK
4497 if (use_cost_table
4498 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4499 ;
4500#endif /* 0 */
4501 if (node->left->left || node->left->right
28d81abb
RK
4502 || !tree_int_cst_equal (node->left->low, node->left->high))
4503 {
4504 if (!node_has_high_bound (node, index_type))
4505 {
37366632
RK
4506 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4507 VOIDmode, 0),
4508 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4509 emit_jump_insn ((*gen_bgt_pat) (default_label));
4510 }
4511
4512 emit_case_nodes (index, node->left, default_label, index_type);
4513 }
4514 else
4515 /* We cannot process node->left normally
4516 since we haven't ruled out the numbers less than
4517 this node's value. So handle node->left explicitly. */
4518 do_jump_if_equal (index,
37366632
RK
4519 expand_expr (node->left->low, NULL_RTX,
4520 VOIDmode, 0),
28d81abb
RK
4521 label_rtx (node->left->code_label), unsignedp);
4522 }
4523 }
4524 else
4525 {
4526 /* Node is a range. These cases are very similar to those for a single
4527 value, except that we do not start by testing whether this node
4528 is the one to branch to. */
4529
4530 if (node->right != 0 && node->left != 0)
4531 {
4532 /* Node has subtrees on both sides.
4533 If the right-hand subtree is bounded,
4534 test for it first, since we can go straight there.
4535 Otherwise, we need to make a branch in the control structure,
4536 then handle the two subtrees. */
4537 tree test_label = 0;
4538
37366632
RK
4539 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4540 VOIDmode, 0),
4541 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4542
4543 if (node_is_bounded (node->right, index_type))
4544 /* Right hand node is fully bounded so we can eliminate any
4545 testing and branch directly to the target code. */
4546 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4547 else
4548 {
4549 /* Right hand node requires testing.
4550 Branch to a label where we will handle it later. */
4551
4552 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4553 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4554 }
4555
4556 /* Value belongs to this node or to the left-hand subtree. */
4557
37366632
RK
4558 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4559 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4560 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4561
4562 /* Handle the left-hand subtree. */
4563 emit_case_nodes (index, node->left, default_label, index_type);
4564
4565 /* If right node had to be handled later, do that now. */
4566
4567 if (test_label)
4568 {
4569 /* If the left-hand subtree fell through,
4570 don't let it fall into the right-hand subtree. */
4571 emit_jump_if_reachable (default_label);
4572
4573 expand_label (test_label);
4574 emit_case_nodes (index, node->right, default_label, index_type);
4575 }
4576 }
4577
4578 else if (node->right != 0 && node->left == 0)
4579 {
4580 /* Deal with values to the left of this node,
4581 if they are possible. */
4582 if (!node_has_low_bound (node, index_type))
4583 {
37366632
RK
4584 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4585 VOIDmode, 0),
4586 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4587 emit_jump_insn ((*gen_blt_pat) (default_label));
4588 }
4589
4590 /* Value belongs to this node or to the right-hand subtree. */
4591
37366632
RK
4592 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4593 VOIDmode, 0),
4594 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4595 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4596
4597 emit_case_nodes (index, node->right, default_label, index_type);
4598 }
4599
4600 else if (node->right == 0 && node->left != 0)
4601 {
4602 /* Deal with values to the right of this node,
4603 if they are possible. */
4604 if (!node_has_high_bound (node, index_type))
4605 {
37366632
RK
4606 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4607 VOIDmode, 0),
4608 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4609 emit_jump_insn ((*gen_bgt_pat) (default_label));
4610 }
4611
4612 /* Value belongs to this node or to the left-hand subtree. */
4613
37366632
RK
4614 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4615 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4616 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4617
4618 emit_case_nodes (index, node->left, default_label, index_type);
4619 }
4620
4621 else
4622 {
4623 /* Node has no children so we check low and high bounds to remove
4624 redundant tests. Only one of the bounds can exist,
4625 since otherwise this node is bounded--a case tested already. */
4626
4627 if (!node_has_high_bound (node, index_type))
4628 {
37366632
RK
4629 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4630 VOIDmode, 0),
4631 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4632 emit_jump_insn ((*gen_bgt_pat) (default_label));
4633 }
4634
4635 if (!node_has_low_bound (node, index_type))
4636 {
37366632
RK
4637 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4638 VOIDmode, 0),
4639 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4640 emit_jump_insn ((*gen_blt_pat) (default_label));
4641 }
4642
4643 emit_jump (label_rtx (node->code_label));
4644 }
4645 }
4646}
4647\f
4648/* These routines are used by the loop unrolling code. They copy BLOCK trees
4649 so that the debugging info will be correct for the unrolled loop. */
4650
94dc8b56 4651/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 4652
94dc8b56 4653static tree *block_vector;
28d81abb
RK
4654
4655void
94dc8b56 4656find_loop_tree_blocks ()
28d81abb 4657{
94dc8b56 4658 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4659
94dc8b56
JW
4660 /* There first block is for the function body, and does not have
4661 corresponding block notes. Don't include it in the block vector. */
4662 block = BLOCK_SUBBLOCKS (block);
28d81abb 4663
94dc8b56 4664 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
4665}
4666
28d81abb 4667void
94dc8b56 4668unroll_block_trees ()
28d81abb 4669{
94dc8b56 4670 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4671
94dc8b56 4672 reorder_blocks (block_vector, block, get_insns ());
28d81abb 4673}
94dc8b56 4674
This page took 0.482356 seconds and 5 git commands to generate.