]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
Make andsi/iorsi/xorsi match actual machine
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
28d81abb
RK
57/* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
59char *emit_filename;
60int emit_lineno;
61
62/* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
64
65int expr_stmts_for_value;
66
67/* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
69
70static tree last_expr_type;
71static rtx last_expr_value;
72
73/* Number of binding contours started so far in this function. */
74
75int block_start_count;
76
77/* Nonzero if function being compiled needs to
78 return the address of where it has put a structure value. */
79
80extern int current_function_returns_pcc_struct;
81
82/* Label that will go on parm cleanup code, if any.
83 Jumping to this label runs cleanup code for parameters, if
84 such code must be run. Following this code is the logical return label. */
85
86extern rtx cleanup_label;
87
88/* Label that will go on function epilogue.
89 Jumping to this label serves as a "return" instruction
90 on machines which require execution of the epilogue on all returns. */
91
92extern rtx return_label;
93
94/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
95 So we can mark them all live at the end of the function, if nonopt. */
96extern rtx save_expr_regs;
97
98/* Offset to end of allocated area of stack frame.
99 If stack grows down, this is the address of the last stack slot allocated.
100 If stack grows up, this is the address for the next slot. */
101extern int frame_offset;
102
103/* Label to jump back to for tail recursion, or 0 if we have
104 not yet needed one for this function. */
105extern rtx tail_recursion_label;
106
107/* Place after which to insert the tail_recursion_label if we need one. */
108extern rtx tail_recursion_reentry;
109
110/* Location at which to save the argument pointer if it will need to be
111 referenced. There are two cases where this is done: if nonlocal gotos
112 exist, or if vars whose is an offset from the argument pointer will be
113 needed by inner routines. */
114
115extern rtx arg_pointer_save_area;
116
117/* Chain of all RTL_EXPRs that have insns in them. */
118extern tree rtl_expr_chain;
119
120#if 0 /* Turned off because 0 seems to work just as well. */
121/* Cleanup lists are required for binding levels regardless of whether
122 that binding level has cleanups or not. This node serves as the
123 cleanup list whenever an empty list is required. */
124static tree empty_cleanup_list;
125#endif
126\f
127/* Functions and data structures for expanding case statements. */
128
129/* Case label structure, used to hold info on labels within case
130 statements. We handle "range" labels; for a single-value label
131 as in C, the high and low limits are the same.
132
133 A chain of case nodes is initially maintained via the RIGHT fields
134 in the nodes. Nodes with higher case values are later in the list.
135
136 Switch statements can be output in one of two forms. A branch table
137 is used if there are more than a few labels and the labels are dense
138 within the range between the smallest and largest case value. If a
139 branch table is used, no further manipulations are done with the case
140 node chain.
141
142 The alternative to the use of a branch table is to generate a series
143 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
144 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
145 totally unbalanced, with everything on the right. We balance the tree
146 with nodes on the left having lower case values than the parent
28d81abb
RK
147 and nodes on the right having higher values. We then output the tree
148 in order. */
149
150struct case_node
151{
152 struct case_node *left; /* Left son in binary tree */
153 struct case_node *right; /* Right son in binary tree; also node chain */
154 struct case_node *parent; /* Parent of node in binary tree */
155 tree low; /* Lowest index value for this label */
156 tree high; /* Highest index value for this label */
157 tree code_label; /* Label to jump to when node matches */
158};
159
160typedef struct case_node case_node;
161typedef struct case_node *case_node_ptr;
162
163/* These are used by estimate_case_costs and balance_case_nodes. */
164
165/* This must be a signed type, and non-ANSI compilers lack signed char. */
166static short *cost_table;
167static int use_cost_table;
168
169static int estimate_case_costs ();
170static void balance_case_nodes ();
171static void emit_case_nodes ();
172static void group_case_nodes ();
173static void emit_jump_if_reachable ();
174
175static int warn_if_unused_value ();
176static void expand_goto_internal ();
177static int expand_fixup ();
178void fixup_gotos ();
179void free_temp_slots ();
180static void expand_cleanups ();
181static void fixup_cleanups ();
182static void expand_null_return_1 ();
183static int tail_recursion_args ();
184static void do_jump_if_equal ();
185\f
186/* Stack of control and binding constructs we are currently inside.
187
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
194
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
199
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
203
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
210
211struct nesting
212{
213 struct nesting *all;
214 struct nesting *next;
215 int depth;
216 rtx exit_label;
217 union
218 {
219 /* For conds (if-then and if-then-else statements). */
220 struct
221 {
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
225 rtx endif_label;
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
228 rtx next_label;
229 } cond;
230 /* For loops. */
231 struct
232 {
233 /* Label at the top of the loop; place to loop back to. */
234 rtx start_label;
235 /* Label at the end of the whole construct. */
236 rtx end_label;
237 /* Label for `continue' statement to jump to;
238 this is in front of the stepper of the loop. */
239 rtx continue_label;
240 } loop;
241 /* For variable binding contours. */
242 struct
243 {
244 /* Sequence number of this binding contour within the function,
245 in order of entry. */
246 int block_start_count;
247 /* Nonzero => value to restore stack to on exit. */
248 rtx stack_level;
249 /* The NOTE that starts this contour.
250 Used by expand_goto to check whether the destination
251 is within each contour or not. */
252 rtx first_insn;
253 /* Innermost containing binding contour that has a stack level. */
254 struct nesting *innermost_stack_block;
255 /* List of cleanups to be run on exit from this contour.
256 This is a list of expressions to be evaluated.
257 The TREE_PURPOSE of each link is the ..._DECL node
258 which the cleanup pertains to. */
259 tree cleanups;
260 /* List of cleanup-lists of blocks containing this block,
261 as they were at the locus where this block appears.
262 There is an element for each containing block,
263 ordered innermost containing block first.
264 The tail of this list can be 0 (was empty_cleanup_list),
265 if all remaining elements would be empty lists.
266 The element's TREE_VALUE is the cleanup-list of that block,
267 which may be null. */
268 tree outer_cleanups;
269 /* Chain of labels defined inside this binding contour.
270 For contours that have stack levels or cleanups. */
271 struct label_chain *label_chain;
272 /* Number of function calls seen, as of start of this block. */
273 int function_call_count;
274 } block;
275 /* For switch (C) or case (Pascal) statements,
276 and also for dummies (see `expand_start_case_dummy'). */
277 struct
278 {
279 /* The insn after which the case dispatch should finally
280 be emitted. Zero for a dummy. */
281 rtx start;
282 /* A list of case labels, kept in ascending order by value
283 as the list is built.
284 During expand_end_case, this list may be rearranged into a
285 nearly balanced binary tree. */
286 struct case_node *case_list;
287 /* Label to jump to if no case matches. */
288 tree default_label;
289 /* The expression to be dispatched on. */
290 tree index_expr;
291 /* Type that INDEX_EXPR should be converted to. */
292 tree nominal_type;
293 /* Number of range exprs in case statement. */
294 int num_ranges;
295 /* Name of this kind of statement, for warnings. */
296 char *printname;
297 /* Nonzero if a case label has been seen in this case stmt. */
298 char seenlabel;
299 } case_stmt;
300 /* For exception contours. */
301 struct
302 {
303 /* List of exceptions raised. This is a TREE_LIST
304 of whatever you want. */
305 tree raised;
306 /* List of exceptions caught. This is also a TREE_LIST
307 of whatever you want. As a special case, it has the
308 value `void_type_node' if it handles default exceptions. */
309 tree handled;
310
311 /* First insn of TRY block, in case resumptive model is needed. */
312 rtx first_insn;
313 /* Label for the catch clauses. */
314 rtx except_label;
315 /* Label for unhandled exceptions. */
316 rtx unhandled_label;
317 /* Label at the end of whole construct. */
318 rtx after_label;
319 /* Label which "escapes" the exception construct.
320 Like EXIT_LABEL for BREAK construct, but for exceptions. */
321 rtx escape_label;
322 } except_stmt;
323 } data;
324};
325
326/* Chain of all pending binding contours. */
327struct nesting *block_stack;
328
329/* Chain of all pending binding contours that restore stack levels
330 or have cleanups. */
331struct nesting *stack_block_stack;
332
333/* Chain of all pending conditional statements. */
334struct nesting *cond_stack;
335
336/* Chain of all pending loops. */
337struct nesting *loop_stack;
338
339/* Chain of all pending case or switch statements. */
340struct nesting *case_stack;
341
342/* Chain of all pending exception contours. */
343struct nesting *except_stack;
344
345/* Separate chain including all of the above,
346 chained through the `all' field. */
347struct nesting *nesting_stack;
348
349/* Number of entries on nesting_stack now. */
350int nesting_depth;
351
352/* Allocate and return a new `struct nesting'. */
353
354#define ALLOC_NESTING() \
355 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
356
357/* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
358 and pop off `nesting_stack' down to the same level. */
359
360#define POPSTACK(STACK) \
361do { int initial_depth = nesting_stack->depth; \
362 do { struct nesting *this = STACK; \
363 STACK = this->next; \
364 nesting_stack = this->all; \
365 nesting_depth = this->depth; \
366 obstack_free (&stmt_obstack, this); } \
367 while (nesting_depth > initial_depth); } while (0)
368\f
369/* In some cases it is impossible to generate code for a forward goto
370 until the label definition is seen. This happens when it may be necessary
371 for the goto to reset the stack pointer: we don't yet know how to do that.
372 So expand_goto puts an entry on this fixup list.
373 Each time a binding contour that resets the stack is exited,
374 we check each fixup.
375 If the target label has now been defined, we can insert the proper code. */
376
377struct goto_fixup
378{
379 /* Points to following fixup. */
380 struct goto_fixup *next;
381 /* Points to the insn before the jump insn.
382 If more code must be inserted, it goes after this insn. */
383 rtx before_jump;
384 /* The LABEL_DECL that this jump is jumping to, or 0
385 for break, continue or return. */
386 tree target;
387 /* The CODE_LABEL rtx that this is jumping to. */
388 rtx target_rtl;
389 /* Number of binding contours started in current function
390 before the label reference. */
391 int block_start_count;
392 /* The outermost stack level that should be restored for this jump.
393 Each time a binding contour that resets the stack is exited,
394 if the target label is *not* yet defined, this slot is updated. */
395 rtx stack_level;
396 /* List of lists of cleanup expressions to be run by this goto.
397 There is one element for each block that this goto is within.
398 The tail of this list can be 0 (was empty_cleanup_list),
399 if all remaining elements would be empty.
400 The TREE_VALUE contains the cleanup list of that block as of the
401 time this goto was seen.
402 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
403 tree cleanup_list_list;
404};
405
406static struct goto_fixup *goto_fixup_chain;
407
408/* Within any binding contour that must restore a stack level,
409 all labels are recorded with a chain of these structures. */
410
411struct label_chain
412{
413 /* Points to following fixup. */
414 struct label_chain *next;
415 tree label;
416};
417\f
418void
419init_stmt ()
420{
421 gcc_obstack_init (&stmt_obstack);
422#if 0
423 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
424#endif
425}
426
427void
428init_stmt_for_function ()
429{
430 /* We are not currently within any block, conditional, loop or case. */
431 block_stack = 0;
432 loop_stack = 0;
433 case_stack = 0;
434 cond_stack = 0;
435 nesting_stack = 0;
436 nesting_depth = 0;
437
438 block_start_count = 0;
439
440 /* No gotos have been expanded yet. */
441 goto_fixup_chain = 0;
442
443 /* We are not processing a ({...}) grouping. */
444 expr_stmts_for_value = 0;
445 last_expr_type = 0;
446}
447
448void
449save_stmt_status (p)
450 struct function *p;
451{
452 p->block_stack = block_stack;
453 p->stack_block_stack = stack_block_stack;
454 p->cond_stack = cond_stack;
455 p->loop_stack = loop_stack;
456 p->case_stack = case_stack;
457 p->nesting_stack = nesting_stack;
458 p->nesting_depth = nesting_depth;
459 p->block_start_count = block_start_count;
460 p->last_expr_type = last_expr_type;
461 p->last_expr_value = last_expr_value;
462 p->expr_stmts_for_value = expr_stmts_for_value;
463 p->emit_filename = emit_filename;
464 p->emit_lineno = emit_lineno;
465 p->goto_fixup_chain = goto_fixup_chain;
466}
467
468void
469restore_stmt_status (p)
470 struct function *p;
471{
472 block_stack = p->block_stack;
473 stack_block_stack = p->stack_block_stack;
474 cond_stack = p->cond_stack;
475 loop_stack = p->loop_stack;
476 case_stack = p->case_stack;
477 nesting_stack = p->nesting_stack;
478 nesting_depth = p->nesting_depth;
479 block_start_count = p->block_start_count;
480 last_expr_type = p->last_expr_type;
481 last_expr_value = p->last_expr_value;
482 expr_stmts_for_value = p->expr_stmts_for_value;
483 emit_filename = p->emit_filename;
484 emit_lineno = p->emit_lineno;
485 goto_fixup_chain = p->goto_fixup_chain;
486}
487\f
488/* Emit a no-op instruction. */
489
490void
491emit_nop ()
492{
493 rtx last_insn = get_last_insn ();
494 if (!optimize
495 && (GET_CODE (last_insn) == CODE_LABEL
496 || prev_real_insn (last_insn) == 0))
497 emit_insn (gen_nop ());
498}
499\f
500/* Return the rtx-label that corresponds to a LABEL_DECL,
501 creating it if necessary. */
502
503rtx
504label_rtx (label)
505 tree label;
506{
507 if (TREE_CODE (label) != LABEL_DECL)
508 abort ();
509
510 if (DECL_RTL (label))
511 return DECL_RTL (label);
512
513 return DECL_RTL (label) = gen_label_rtx ();
514}
515
516/* Add an unconditional jump to LABEL as the next sequential instruction. */
517
518void
519emit_jump (label)
520 rtx label;
521{
522 do_pending_stack_adjust ();
523 emit_jump_insn (gen_jump (label));
524 emit_barrier ();
525}
526
527/* Emit code to jump to the address
528 specified by the pointer expression EXP. */
529
530void
531expand_computed_goto (exp)
532 tree exp;
533{
37366632 534 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
de14fd73 535 emit_queue ();
28d81abb 536 emit_indirect_jump (x);
28d81abb
RK
537}
538\f
539/* Handle goto statements and the labels that they can go to. */
540
541/* Specify the location in the RTL code of a label LABEL,
542 which is a LABEL_DECL tree node.
543
544 This is used for the kind of label that the user can jump to with a
545 goto statement, and for alternatives of a switch or case statement.
546 RTL labels generated for loops and conditionals don't go through here;
547 they are generated directly at the RTL level, by other functions below.
548
549 Note that this has nothing to do with defining label *names*.
550 Languages vary in how they do that and what that even means. */
551
552void
553expand_label (label)
554 tree label;
555{
556 struct label_chain *p;
557
558 do_pending_stack_adjust ();
559 emit_label (label_rtx (label));
560 if (DECL_NAME (label))
561 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
562
563 if (stack_block_stack != 0)
564 {
565 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
566 p->next = stack_block_stack->data.block.label_chain;
567 stack_block_stack->data.block.label_chain = p;
568 p->label = label;
569 }
570}
571
572/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
573 from nested functions. */
574
575void
576declare_nonlocal_label (label)
577 tree label;
578{
579 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
580 LABEL_PRESERVE_P (label_rtx (label)) = 1;
581 if (nonlocal_goto_handler_slot == 0)
582 {
583 nonlocal_goto_handler_slot
584 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
585 emit_stack_save (SAVE_NONLOCAL,
586 &nonlocal_goto_stack_level,
587 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
588 }
589}
590
591/* Generate RTL code for a `goto' statement with target label LABEL.
592 LABEL should be a LABEL_DECL tree node that was or will later be
593 defined with `expand_label'. */
594
595void
596expand_goto (label)
597 tree label;
598{
599 /* Check for a nonlocal goto to a containing function. */
600 tree context = decl_function_context (label);
601 if (context != 0 && context != current_function_decl)
602 {
603 struct function *p = find_function_data (context);
604 rtx temp;
605 p->has_nonlocal_label = 1;
59257ff7
RK
606
607 /* Copy the rtl for the slots so that they won't be shared in
608 case the virtual stack vars register gets instantiated differently
609 in the parent than in the child. */
610
28d81abb
RK
611#if HAVE_nonlocal_goto
612 if (HAVE_nonlocal_goto)
613 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
614 copy_rtx (p->nonlocal_goto_handler_slot),
615 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
616 gen_rtx (LABEL_REF, Pmode,
617 label_rtx (label))));
618 else
619#endif
620 {
59257ff7
RK
621 rtx addr;
622
28d81abb
RK
623 /* Restore frame pointer for containing function.
624 This sets the actual hard register used for the frame pointer
625 to the location of the function's incoming static chain info.
626 The non-local goto handler will then adjust it to contain the
627 proper value and reload the argument pointer, if needed. */
628 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
629
630 /* We have now loaded the frame pointer hardware register with
631 the address of that corresponds to the start of the virtual
632 stack vars. So replace virtual_stack_vars_rtx in all
633 addresses we use with stack_pointer_rtx. */
634
28d81abb
RK
635 /* Get addr of containing function's current nonlocal goto handler,
636 which will do any cleanups and then jump to the label. */
59257ff7
RK
637 addr = copy_rtx (p->nonlocal_goto_handler_slot);
638 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
639 frame_pointer_rtx));
640
28d81abb 641 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
642 addr = p->nonlocal_goto_stack_level;
643 if (addr)
5e116627
MM
644 addr = replace_rtx (copy_rtx (addr),
645 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7 646
37366632 647 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 648
28d81abb
RK
649 /* Put in the static chain register the nonlocal label address. */
650 emit_move_insn (static_chain_rtx,
651 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
652 /* USE of frame_pointer_rtx added for consistency; not clear if
653 really needed. */
654 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
655 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
656 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
657 emit_indirect_jump (temp);
658 }
659 }
660 else
37366632 661 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
662}
663
664/* Generate RTL code for a `goto' statement with target label BODY.
665 LABEL should be a LABEL_REF.
666 LAST_INSN, if non-0, is the rtx we should consider as the last
667 insn emitted (for the purposes of cleaning up a return). */
668
669static void
670expand_goto_internal (body, label, last_insn)
671 tree body;
672 rtx label;
673 rtx last_insn;
674{
675 struct nesting *block;
676 rtx stack_level = 0;
677
678 if (GET_CODE (label) != CODE_LABEL)
679 abort ();
680
681 /* If label has already been defined, we can tell now
682 whether and how we must alter the stack level. */
683
684 if (PREV_INSN (label) != 0)
685 {
686 /* Find the innermost pending block that contains the label.
687 (Check containment by comparing insn-uids.)
688 Then restore the outermost stack level within that block,
689 and do cleanups of all blocks contained in it. */
690 for (block = block_stack; block; block = block->next)
691 {
692 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
693 break;
694 if (block->data.block.stack_level != 0)
695 stack_level = block->data.block.stack_level;
696 /* Execute the cleanups for blocks we are exiting. */
697 if (block->data.block.cleanups != 0)
698 {
37366632 699 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
700 do_pending_stack_adjust ();
701 }
702 }
703
704 if (stack_level)
705 {
706 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
707 the stack pointer. This one should be deleted as dead by flow. */
708 clear_pending_stack_adjust ();
709 do_pending_stack_adjust ();
37366632 710 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
711 }
712
713 if (body != 0 && DECL_TOO_LATE (body))
714 error ("jump to `%s' invalidly jumps into binding contour",
715 IDENTIFIER_POINTER (DECL_NAME (body)));
716 }
717 /* Label not yet defined: may need to put this goto
718 on the fixup list. */
719 else if (! expand_fixup (body, label, last_insn))
720 {
721 /* No fixup needed. Record that the label is the target
722 of at least one goto that has no fixup. */
723 if (body != 0)
724 TREE_ADDRESSABLE (body) = 1;
725 }
726
727 emit_jump (label);
728}
729\f
730/* Generate if necessary a fixup for a goto
731 whose target label in tree structure (if any) is TREE_LABEL
732 and whose target in rtl is RTL_LABEL.
733
734 If LAST_INSN is nonzero, we pretend that the jump appears
735 after insn LAST_INSN instead of at the current point in the insn stream.
736
737 The fixup will be used later to insert insns at this point
738 to restore the stack level as appropriate for the target label.
739
740 Value is nonzero if a fixup is made. */
741
742static int
743expand_fixup (tree_label, rtl_label, last_insn)
744 tree tree_label;
745 rtx rtl_label;
746 rtx last_insn;
747{
748 struct nesting *block, *end_block;
749
750 /* See if we can recognize which block the label will be output in.
751 This is possible in some very common cases.
752 If we succeed, set END_BLOCK to that block.
753 Otherwise, set it to 0. */
754
755 if (cond_stack
756 && (rtl_label == cond_stack->data.cond.endif_label
757 || rtl_label == cond_stack->data.cond.next_label))
758 end_block = cond_stack;
759 /* If we are in a loop, recognize certain labels which
760 are likely targets. This reduces the number of fixups
761 we need to create. */
762 else if (loop_stack
763 && (rtl_label == loop_stack->data.loop.start_label
764 || rtl_label == loop_stack->data.loop.end_label
765 || rtl_label == loop_stack->data.loop.continue_label))
766 end_block = loop_stack;
767 else
768 end_block = 0;
769
770 /* Now set END_BLOCK to the binding level to which we will return. */
771
772 if (end_block)
773 {
774 struct nesting *next_block = end_block->all;
775 block = block_stack;
776
777 /* First see if the END_BLOCK is inside the innermost binding level.
778 If so, then no cleanups or stack levels are relevant. */
779 while (next_block && next_block != block)
780 next_block = next_block->all;
781
782 if (next_block)
783 return 0;
784
785 /* Otherwise, set END_BLOCK to the innermost binding level
786 which is outside the relevant control-structure nesting. */
787 next_block = block_stack->next;
788 for (block = block_stack; block != end_block; block = block->all)
789 if (block == next_block)
790 next_block = next_block->next;
791 end_block = next_block;
792 }
793
794 /* Does any containing block have a stack level or cleanups?
795 If not, no fixup is needed, and that is the normal case
796 (the only case, for standard C). */
797 for (block = block_stack; block != end_block; block = block->next)
798 if (block->data.block.stack_level != 0
799 || block->data.block.cleanups != 0)
800 break;
801
802 if (block != end_block)
803 {
804 /* Ok, a fixup is needed. Add a fixup to the list of such. */
805 struct goto_fixup *fixup
806 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
807 /* In case an old stack level is restored, make sure that comes
808 after any pending stack adjust. */
809 /* ?? If the fixup isn't to come at the present position,
810 doing the stack adjust here isn't useful. Doing it with our
811 settings at that location isn't useful either. Let's hope
812 someone does it! */
813 if (last_insn == 0)
814 do_pending_stack_adjust ();
815 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
816 fixup->target = tree_label;
817 fixup->target_rtl = rtl_label;
818 fixup->block_start_count = block_start_count;
819 fixup->stack_level = 0;
820 fixup->cleanup_list_list
821 = (((block->data.block.outer_cleanups
822#if 0
823 && block->data.block.outer_cleanups != empty_cleanup_list
824#endif
825 )
826 || block->data.block.cleanups)
37366632 827 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
828 block->data.block.outer_cleanups)
829 : 0);
830 fixup->next = goto_fixup_chain;
831 goto_fixup_chain = fixup;
832 }
833
834 return block != 0;
835}
836
837/* When exiting a binding contour, process all pending gotos requiring fixups.
838 THISBLOCK is the structure that describes the block being exited.
839 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
840 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
841 FIRST_INSN is the insn that began this contour.
842
843 Gotos that jump out of this contour must restore the
844 stack level and do the cleanups before actually jumping.
845
846 DONT_JUMP_IN nonzero means report error there is a jump into this
847 contour from before the beginning of the contour.
848 This is also done if STACK_LEVEL is nonzero. */
849
850void
851fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
852 struct nesting *thisblock;
853 rtx stack_level;
854 tree cleanup_list;
855 rtx first_insn;
856 int dont_jump_in;
857{
858 register struct goto_fixup *f, *prev;
859
860 /* F is the fixup we are considering; PREV is the previous one. */
861 /* We run this loop in two passes so that cleanups of exited blocks
862 are run first, and blocks that are exited are marked so
863 afterwards. */
864
865 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
866 {
867 /* Test for a fixup that is inactive because it is already handled. */
868 if (f->before_jump == 0)
869 {
870 /* Delete inactive fixup from the chain, if that is easy to do. */
871 if (prev != 0)
872 prev->next = f->next;
873 }
874 /* Has this fixup's target label been defined?
875 If so, we can finalize it. */
876 else if (PREV_INSN (f->target_rtl) != 0)
877 {
878 /* Get the first non-label after the label
879 this goto jumps to. If that's before this scope begins,
880 we don't have a jump into the scope. */
881 rtx after_label = f->target_rtl;
882 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
883 after_label = NEXT_INSN (after_label);
884
885 /* If this fixup jumped into this contour from before the beginning
886 of this contour, report an error. */
887 /* ??? Bug: this does not detect jumping in through intermediate
888 blocks that have stack levels or cleanups.
889 It detects only a problem with the innermost block
890 around the label. */
891 if (f->target != 0
892 && (dont_jump_in || stack_level || cleanup_list)
893 /* If AFTER_LABEL is 0, it means the jump goes to the end
894 of the rtl, which means it jumps into this scope. */
895 && (after_label == 0
896 || INSN_UID (first_insn) < INSN_UID (after_label))
897 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
898 && ! TREE_REGDECL (f->target))
899 {
900 error_with_decl (f->target,
901 "label `%s' used before containing binding contour");
902 /* Prevent multiple errors for one label. */
903 TREE_REGDECL (f->target) = 1;
904 }
905
906 /* Execute cleanups for blocks this jump exits. */
907 if (f->cleanup_list_list)
908 {
909 tree lists;
910 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
911 /* Marked elements correspond to blocks that have been closed.
912 Do their cleanups. */
913 if (TREE_ADDRESSABLE (lists)
914 && TREE_VALUE (lists) != 0)
915 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
916 }
917
918 /* Restore stack level for the biggest contour that this
919 jump jumps out of. */
920 if (f->stack_level)
59257ff7 921 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
28d81abb
RK
922 f->before_jump = 0;
923 }
924 }
925
926 /* Mark the cleanups of exited blocks so that they are executed
927 by the code above. */
928 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
929 if (f->before_jump != 0
930 && PREV_INSN (f->target_rtl) == 0
931 /* Label has still not appeared. If we are exiting a block with
932 a stack level to restore, that started before the fixup,
933 mark this stack level as needing restoration
934 when the fixup is later finalized.
935 Also mark the cleanup_list_list element for F
936 that corresponds to this block, so that ultimately
937 this block's cleanups will be executed by the code above. */
938 && thisblock != 0
939 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
940 it means the label is undefined. That's erroneous, but possible. */
941 && (thisblock->data.block.block_start_count
942 <= f->block_start_count))
943 {
944 tree lists = f->cleanup_list_list;
945 for (; lists; lists = TREE_CHAIN (lists))
946 /* If the following elt. corresponds to our containing block
947 then the elt. must be for this block. */
948 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
949 TREE_ADDRESSABLE (lists) = 1;
950
951 if (stack_level)
952 f->stack_level = stack_level;
953 }
954}
955\f
956/* Generate RTL for an asm statement (explicit assembler code).
957 BODY is a STRING_CST node containing the assembler code text,
958 or an ADDR_EXPR containing a STRING_CST. */
959
960void
961expand_asm (body)
962 tree body;
963{
964 if (TREE_CODE (body) == ADDR_EXPR)
965 body = TREE_OPERAND (body, 0);
966
967 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
968 TREE_STRING_POINTER (body)));
969 last_expr_type = 0;
970}
971
972/* Generate RTL for an asm statement with arguments.
973 STRING is the instruction template.
974 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
975 Each output or input has an expression in the TREE_VALUE and
976 a constraint-string in the TREE_PURPOSE.
977 CLOBBERS is a list of STRING_CST nodes each naming a hard register
978 that is clobbered by this insn.
979
980 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
981 Some elements of OUTPUTS may be replaced with trees representing temporary
982 values. The caller should copy those temporary values to the originally
983 specified lvalues.
984
985 VOL nonzero means the insn is volatile; don't optimize it. */
986
987void
988expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
989 tree string, outputs, inputs, clobbers;
990 int vol;
991 char *filename;
992 int line;
993{
994 rtvec argvec, constraints;
995 rtx body;
996 int ninputs = list_length (inputs);
997 int noutputs = list_length (outputs);
b4ccaa16 998 int nclobbers;
28d81abb
RK
999 tree tail;
1000 register int i;
1001 /* Vector of RTX's of evaluated output operands. */
1002 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1003 /* The insn we have emitted. */
1004 rtx insn;
1005
b4ccaa16
RS
1006 /* Count the number of meaningful clobbered registers, ignoring what
1007 we would ignore later. */
1008 nclobbers = 0;
1009 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1010 {
1011 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1012 i = decode_reg_name (regname);
1013 if (i >= 0 || i == -4)
b4ccaa16
RS
1014 ++nclobbers;
1015 }
1016
28d81abb
RK
1017 last_expr_type = 0;
1018
1019 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1020 {
1021 tree val = TREE_VALUE (tail);
1022 tree val1;
1023 int j;
1024 int found_equal;
1025
1026 /* If there's an erroneous arg, emit no insn. */
1027 if (TREE_TYPE (val) == error_mark_node)
1028 return;
1029
1030 /* Make sure constraint has `=' and does not have `+'. */
1031
1032 found_equal = 0;
1033 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1034 {
1035 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1036 {
1037 error ("output operand constraint contains `+'");
1038 return;
1039 }
1040 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1041 found_equal = 1;
1042 }
1043 if (! found_equal)
1044 {
1045 error ("output operand constraint lacks `='");
1046 return;
1047 }
1048
1049 /* If an output operand is not a variable or indirect ref,
1050 or a part of one,
1051 create a SAVE_EXPR which is a pseudo-reg
1052 to act as an intermediate temporary.
1053 Make the asm insn write into that, then copy it to
1054 the real output operand. */
1055
1056 while (TREE_CODE (val) == COMPONENT_REF
1057 || TREE_CODE (val) == ARRAY_REF)
1058 val = TREE_OPERAND (val, 0);
1059
1060 if (TREE_CODE (val) != VAR_DECL
1061 && TREE_CODE (val) != PARM_DECL
1062 && TREE_CODE (val) != INDIRECT_REF)
1063 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1064
37366632 1065 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1066 }
1067
1068 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1069 {
1070 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1071 return;
1072 }
1073
1074 /* Make vectors for the expression-rtx and constraint strings. */
1075
1076 argvec = rtvec_alloc (ninputs);
1077 constraints = rtvec_alloc (ninputs);
1078
1079 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1080 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1081 filename, line);
1082 MEM_VOLATILE_P (body) = vol;
1083
1084 /* Eval the inputs and put them into ARGVEC.
1085 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1086
1087 i = 0;
1088 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1089 {
1090 int j;
1091
1092 /* If there's an erroneous arg, emit no insn,
1093 because the ASM_INPUT would get VOIDmode
1094 and that could cause a crash in reload. */
1095 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1096 return;
1097 if (TREE_PURPOSE (tail) == NULL_TREE)
1098 {
1099 error ("hard register `%s' listed as input operand to `asm'",
1100 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1101 return;
1102 }
1103
1104 /* Make sure constraint has neither `=' nor `+'. */
1105
1106 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1107 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1108 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1109 {
1110 error ("input operand constraint contains `%c'",
1111 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1112 return;
1113 }
1114
1115 XVECEXP (body, 3, i) /* argvec */
37366632 1116 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1117 XVECEXP (body, 4, i) /* constraints */
1118 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1119 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1120 i++;
1121 }
1122
1123 /* Protect all the operands from the queue,
1124 now that they have all been evaluated. */
1125
1126 for (i = 0; i < ninputs; i++)
1127 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1128
1129 for (i = 0; i < noutputs; i++)
1130 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1131
1132 /* Now, for each output, construct an rtx
1133 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1134 ARGVEC CONSTRAINTS))
1135 If there is more than one, put them inside a PARALLEL. */
1136
1137 if (noutputs == 1 && nclobbers == 0)
1138 {
1139 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1140 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1141 }
1142 else if (noutputs == 0 && nclobbers == 0)
1143 {
1144 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1145 insn = emit_insn (body);
1146 }
1147 else
1148 {
1149 rtx obody = body;
1150 int num = noutputs;
1151 if (num == 0) num = 1;
1152 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1153
1154 /* For each output operand, store a SET. */
1155
1156 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1157 {
1158 XVECEXP (body, 0, i)
1159 = gen_rtx (SET, VOIDmode,
1160 output_rtx[i],
1161 gen_rtx (ASM_OPERANDS, VOIDmode,
1162 TREE_STRING_POINTER (string),
1163 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1164 i, argvec, constraints,
1165 filename, line));
1166 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1167 }
1168
1169 /* If there are no outputs (but there are some clobbers)
1170 store the bare ASM_OPERANDS into the PARALLEL. */
1171
1172 if (i == 0)
1173 XVECEXP (body, 0, i++) = obody;
1174
1175 /* Store (clobber REG) for each clobbered register specified. */
1176
b4ccaa16 1177 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1178 {
28d81abb 1179 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1180 int j = decode_reg_name (regname);
28d81abb 1181
b4ac57ab 1182 if (j < 0)
28d81abb 1183 {
c09e6498 1184 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1185 continue;
1186
c09e6498
RS
1187 if (j == -4) /* `memory', don't cache memory across asm */
1188 {
1189 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1190 continue;
1191 }
1192
28d81abb
RK
1193 error ("unknown register name `%s' in `asm'", regname);
1194 return;
1195 }
1196
1197 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1198 XVECEXP (body, 0, i++)
28d81abb
RK
1199 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1200 }
1201
1202 insn = emit_insn (body);
1203 }
1204
1205 free_temp_slots ();
1206}
1207\f
1208/* Generate RTL to evaluate the expression EXP
1209 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1210
1211void
1212expand_expr_stmt (exp)
1213 tree exp;
1214{
1215 /* If -W, warn about statements with no side effects,
1216 except for an explicit cast to void (e.g. for assert()), and
1217 except inside a ({...}) where they may be useful. */
1218 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1219 {
1220 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1221 && !(TREE_CODE (exp) == CONVERT_EXPR
1222 && TREE_TYPE (exp) == void_type_node))
1223 warning_with_file_and_line (emit_filename, emit_lineno,
1224 "statement with no effect");
1225 else if (warn_unused)
1226 warn_if_unused_value (exp);
1227 }
1228 last_expr_type = TREE_TYPE (exp);
1229 if (! flag_syntax_only)
37366632
RK
1230 last_expr_value = expand_expr (exp,
1231 (expr_stmts_for_value
1232 ? NULL_RTX : const0_rtx),
28d81abb
RK
1233 VOIDmode, 0);
1234
1235 /* If all we do is reference a volatile value in memory,
1236 copy it to a register to be sure it is actually touched. */
1237 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1238 && TREE_THIS_VOLATILE (exp))
1239 {
1240 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1241 copy_to_reg (last_expr_value);
1242 else
ddbe9812
RS
1243 {
1244 rtx lab = gen_label_rtx ();
1245
1246 /* Compare the value with itself to reference it. */
1247 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1248 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1249 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1250 BLKmode, 0,
1251 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1252 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1253 emit_label (lab);
1254 }
28d81abb
RK
1255 }
1256
1257 /* If this expression is part of a ({...}) and is in memory, we may have
1258 to preserve temporaries. */
1259 preserve_temp_slots (last_expr_value);
1260
1261 /* Free any temporaries used to evaluate this expression. Any temporary
1262 used as a result of this expression will already have been preserved
1263 above. */
1264 free_temp_slots ();
1265
1266 emit_queue ();
1267}
1268
1269/* Warn if EXP contains any computations whose results are not used.
1270 Return 1 if a warning is printed; 0 otherwise. */
1271
1272static int
1273warn_if_unused_value (exp)
1274 tree exp;
1275{
1276 if (TREE_USED (exp))
1277 return 0;
1278
1279 switch (TREE_CODE (exp))
1280 {
1281 case PREINCREMENT_EXPR:
1282 case POSTINCREMENT_EXPR:
1283 case PREDECREMENT_EXPR:
1284 case POSTDECREMENT_EXPR:
1285 case MODIFY_EXPR:
1286 case INIT_EXPR:
1287 case TARGET_EXPR:
1288 case CALL_EXPR:
1289 case METHOD_CALL_EXPR:
1290 case RTL_EXPR:
1291 case WRAPPER_EXPR:
1292 case ANTI_WRAPPER_EXPR:
1293 case WITH_CLEANUP_EXPR:
1294 case EXIT_EXPR:
1295 /* We don't warn about COND_EXPR because it may be a useful
1296 construct if either arm contains a side effect. */
1297 case COND_EXPR:
1298 return 0;
1299
1300 case BIND_EXPR:
1301 /* For a binding, warn if no side effect within it. */
1302 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1303
1304 case TRUTH_ORIF_EXPR:
1305 case TRUTH_ANDIF_EXPR:
1306 /* In && or ||, warn if 2nd operand has no side effect. */
1307 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1308
1309 case COMPOUND_EXPR:
1310 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1311 return 1;
4d23e509
RS
1312 /* Let people do `(foo (), 0)' without a warning. */
1313 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1314 return 0;
28d81abb
RK
1315 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1316
1317 case NOP_EXPR:
1318 case CONVERT_EXPR:
b4ac57ab 1319 case NON_LVALUE_EXPR:
28d81abb
RK
1320 /* Don't warn about values cast to void. */
1321 if (TREE_TYPE (exp) == void_type_node)
1322 return 0;
1323 /* Don't warn about conversions not explicit in the user's program. */
1324 if (TREE_NO_UNUSED_WARNING (exp))
1325 return 0;
1326 /* Assignment to a cast usually results in a cast of a modify.
1327 Don't complain about that. */
1328 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1329 return 0;
1330 /* Sometimes it results in a cast of a cast of a modify.
1331 Don't complain about that. */
1332 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1333 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1334 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1335 return 0;
1336
1337 default:
ddbe9812
RS
1338 /* Referencing a volatile value is a side effect, so don't warn. */
1339 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1340 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1341 && TREE_THIS_VOLATILE (exp))
1342 return 0;
28d81abb
RK
1343 warning_with_file_and_line (emit_filename, emit_lineno,
1344 "value computed is not used");
1345 return 1;
1346 }
1347}
1348
1349/* Clear out the memory of the last expression evaluated. */
1350
1351void
1352clear_last_expr ()
1353{
1354 last_expr_type = 0;
1355}
1356
1357/* Begin a statement which will return a value.
1358 Return the RTL_EXPR for this statement expr.
1359 The caller must save that value and pass it to expand_end_stmt_expr. */
1360
1361tree
1362expand_start_stmt_expr ()
1363{
1364 /* Make the RTL_EXPR node temporary, not momentary,
1365 so that rtl_expr_chain doesn't become garbage. */
1366 int momentary = suspend_momentary ();
1367 tree t = make_node (RTL_EXPR);
1368 resume_momentary (momentary);
1369 start_sequence ();
1370 NO_DEFER_POP;
1371 expr_stmts_for_value++;
1372 return t;
1373}
1374
1375/* Restore the previous state at the end of a statement that returns a value.
1376 Returns a tree node representing the statement's value and the
1377 insns to compute the value.
1378
1379 The nodes of that expression have been freed by now, so we cannot use them.
1380 But we don't want to do that anyway; the expression has already been
1381 evaluated and now we just want to use the value. So generate a RTL_EXPR
1382 with the proper type and RTL value.
1383
1384 If the last substatement was not an expression,
1385 return something with type `void'. */
1386
1387tree
1388expand_end_stmt_expr (t)
1389 tree t;
1390{
1391 OK_DEFER_POP;
1392
1393 if (last_expr_type == 0)
1394 {
1395 last_expr_type = void_type_node;
1396 last_expr_value = const0_rtx;
1397 }
1398 else if (last_expr_value == 0)
1399 /* There are some cases where this can happen, such as when the
1400 statement is void type. */
1401 last_expr_value = const0_rtx;
1402 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1403 /* Remove any possible QUEUED. */
1404 last_expr_value = protect_from_queue (last_expr_value, 0);
1405
1406 emit_queue ();
1407
1408 TREE_TYPE (t) = last_expr_type;
1409 RTL_EXPR_RTL (t) = last_expr_value;
1410 RTL_EXPR_SEQUENCE (t) = get_insns ();
1411
1412 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1413
1414 end_sequence ();
1415
1416 /* Don't consider deleting this expr or containing exprs at tree level. */
1417 TREE_SIDE_EFFECTS (t) = 1;
1418 /* Propagate volatility of the actual RTL expr. */
1419 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1420
1421 last_expr_type = 0;
1422 expr_stmts_for_value--;
1423
1424 return t;
1425}
1426\f
1427/* The exception handling nesting looks like this:
1428
1429 <-- Level N-1
1430 { <-- exception handler block
1431 <-- Level N
1432 <-- in an exception handler
1433 { <-- try block
1434 : <-- in a TRY block
1435 : <-- in an exception handler
1436 :
1437 }
1438
1439 { <-- except block
1440 : <-- in an except block
1441 : <-- in an exception handler
1442 :
1443 }
1444
1445 }
1446
1447/* Return nonzero iff in a try block at level LEVEL. */
1448
1449int
1450in_try_block (level)
1451 int level;
1452{
1453 struct nesting *n = except_stack;
1454 while (1)
1455 {
1456 while (n && n->data.except_stmt.after_label != 0)
1457 n = n->next;
1458 if (n == 0)
1459 return 0;
1460 if (level == 0)
1461 return n != 0;
1462 level--;
1463 n = n->next;
1464 }
1465}
1466
1467/* Return nonzero iff in an except block at level LEVEL. */
1468
1469int
1470in_except_block (level)
1471 int level;
1472{
1473 struct nesting *n = except_stack;
1474 while (1)
1475 {
1476 while (n && n->data.except_stmt.after_label == 0)
1477 n = n->next;
1478 if (n == 0)
1479 return 0;
1480 if (level == 0)
1481 return n != 0;
1482 level--;
1483 n = n->next;
1484 }
1485}
1486
1487/* Return nonzero iff in an exception handler at level LEVEL. */
1488
1489int
1490in_exception_handler (level)
1491 int level;
1492{
1493 struct nesting *n = except_stack;
1494 while (n && level--)
1495 n = n->next;
1496 return n != 0;
1497}
1498
1499/* Record the fact that the current exception nesting raises
1500 exception EX. If not in an exception handler, return 0. */
1501int
1502expand_raise (ex)
1503 tree ex;
1504{
1505 tree *raises_ptr;
1506
1507 if (except_stack == 0)
1508 return 0;
1509 raises_ptr = &except_stack->data.except_stmt.raised;
1510 if (! value_member (ex, *raises_ptr))
1511 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1512 return 1;
1513}
1514
1515/* Generate RTL for the start of a try block.
1516
1517 TRY_CLAUSE is the condition to test to enter the try block. */
1518
1519void
1520expand_start_try (try_clause, exitflag, escapeflag)
1521 tree try_clause;
1522 int exitflag;
1523 int escapeflag;
1524{
1525 struct nesting *thishandler = ALLOC_NESTING ();
1526
1527 /* Make an entry on cond_stack for the cond we are entering. */
1528
1529 thishandler->next = except_stack;
1530 thishandler->all = nesting_stack;
1531 thishandler->depth = ++nesting_depth;
1532 thishandler->data.except_stmt.raised = 0;
1533 thishandler->data.except_stmt.handled = 0;
1534 thishandler->data.except_stmt.first_insn = get_insns ();
1535 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1536 thishandler->data.except_stmt.unhandled_label = 0;
1537 thishandler->data.except_stmt.after_label = 0;
1538 thishandler->data.except_stmt.escape_label
1539 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1540 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1541 except_stack = thishandler;
1542 nesting_stack = thishandler;
1543
37366632 1544 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1545}
1546
1547/* End of a TRY block. Nothing to do for now. */
1548
1549void
1550expand_end_try ()
1551{
1552 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1553 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1554 NULL_RTX);
28d81abb
RK
1555}
1556
1557/* Start an `except' nesting contour.
1558 EXITFLAG says whether this contour should be able to `exit' something.
1559 ESCAPEFLAG says whether this contour should be escapable. */
1560
1561void
1562expand_start_except (exitflag, escapeflag)
1563 int exitflag;
1564 int escapeflag;
1565{
1566 if (exitflag)
1567 {
1568 struct nesting *n;
1569 /* An `exit' from catch clauses goes out to next exit level,
1570 if there is one. Otherwise, it just goes to the end
1571 of the construct. */
1572 for (n = except_stack->next; n; n = n->next)
1573 if (n->exit_label != 0)
1574 {
1575 except_stack->exit_label = n->exit_label;
1576 break;
1577 }
1578 if (n == 0)
1579 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1580 }
1581 if (escapeflag)
1582 {
1583 struct nesting *n;
1584 /* An `escape' from catch clauses goes out to next escape level,
1585 if there is one. Otherwise, it just goes to the end
1586 of the construct. */
1587 for (n = except_stack->next; n; n = n->next)
1588 if (n->data.except_stmt.escape_label != 0)
1589 {
1590 except_stack->data.except_stmt.escape_label
1591 = n->data.except_stmt.escape_label;
1592 break;
1593 }
1594 if (n == 0)
1595 except_stack->data.except_stmt.escape_label
1596 = except_stack->data.except_stmt.after_label;
1597 }
1598 do_pending_stack_adjust ();
1599 emit_label (except_stack->data.except_stmt.except_label);
1600}
1601
1602/* Generate code to `escape' from an exception contour. This
1603 is like `exiting', but does not conflict with constructs which
1604 use `exit_label'.
1605
1606 Return nonzero if this contour is escapable, otherwise
1607 return zero, and language-specific code will emit the
1608 appropriate error message. */
1609int
1610expand_escape_except ()
1611{
1612 struct nesting *n;
1613 last_expr_type = 0;
1614 for (n = except_stack; n; n = n->next)
1615 if (n->data.except_stmt.escape_label != 0)
1616 {
37366632
RK
1617 expand_goto_internal (NULL_TREE,
1618 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
1619 return 1;
1620 }
1621
1622 return 0;
1623}
1624
1625/* Finish processing and `except' contour.
1626 Culls out all exceptions which might be raise but not
1627 handled, and returns the list to the caller.
1628 Language-specific code is responsible for dealing with these
1629 exceptions. */
1630
1631tree
1632expand_end_except ()
1633{
1634 struct nesting *n;
1635 tree raised = NULL_TREE;
1636
1637 do_pending_stack_adjust ();
1638 emit_label (except_stack->data.except_stmt.after_label);
1639
1640 n = except_stack->next;
1641 if (n)
1642 {
1643 /* Propagate exceptions raised but not handled to next
1644 highest level. */
1645 tree handled = except_stack->data.except_stmt.raised;
1646 if (handled != void_type_node)
1647 {
1648 tree prev = NULL_TREE;
1649 raised = except_stack->data.except_stmt.raised;
1650 while (handled)
1651 {
1652 tree this_raise;
1653 for (this_raise = raised, prev = 0; this_raise;
1654 this_raise = TREE_CHAIN (this_raise))
1655 {
1656 if (value_member (TREE_VALUE (this_raise), handled))
1657 {
1658 if (prev)
1659 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1660 else
1661 {
1662 raised = TREE_CHAIN (raised);
1663 if (raised == NULL_TREE)
1664 goto nada;
1665 }
1666 }
1667 else
1668 prev = this_raise;
1669 }
1670 handled = TREE_CHAIN (handled);
1671 }
1672 if (prev == NULL_TREE)
1673 prev = raised;
1674 if (prev)
1675 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1676 nada:
1677 n->data.except_stmt.raised = raised;
1678 }
1679 }
1680
1681 POPSTACK (except_stack);
1682 last_expr_type = 0;
1683 return raised;
1684}
1685
1686/* Record that exception EX is caught by this exception handler.
1687 Return nonzero if in exception handling construct, otherwise return 0. */
1688int
1689expand_catch (ex)
1690 tree ex;
1691{
1692 tree *raises_ptr;
1693
1694 if (except_stack == 0)
1695 return 0;
1696 raises_ptr = &except_stack->data.except_stmt.handled;
1697 if (*raises_ptr != void_type_node
1698 && ex != NULL_TREE
1699 && ! value_member (ex, *raises_ptr))
1700 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1701 return 1;
1702}
1703
1704/* Record that this exception handler catches all exceptions.
1705 Return nonzero if in exception handling construct, otherwise return 0. */
1706
1707int
1708expand_catch_default ()
1709{
1710 if (except_stack == 0)
1711 return 0;
1712 except_stack->data.except_stmt.handled = void_type_node;
1713 return 1;
1714}
1715
1716int
1717expand_end_catch ()
1718{
1719 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1720 return 0;
37366632
RK
1721 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1722 NULL_RTX);
28d81abb
RK
1723 return 1;
1724}
1725\f
1726/* Generate RTL for the start of an if-then. COND is the expression
1727 whose truth should be tested.
1728
1729 If EXITFLAG is nonzero, this conditional is visible to
1730 `exit_something'. */
1731
1732void
1733expand_start_cond (cond, exitflag)
1734 tree cond;
1735 int exitflag;
1736{
1737 struct nesting *thiscond = ALLOC_NESTING ();
1738
1739 /* Make an entry on cond_stack for the cond we are entering. */
1740
1741 thiscond->next = cond_stack;
1742 thiscond->all = nesting_stack;
1743 thiscond->depth = ++nesting_depth;
1744 thiscond->data.cond.next_label = gen_label_rtx ();
1745 /* Before we encounter an `else', we don't need a separate exit label
1746 unless there are supposed to be exit statements
1747 to exit this conditional. */
1748 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1749 thiscond->data.cond.endif_label = thiscond->exit_label;
1750 cond_stack = thiscond;
1751 nesting_stack = thiscond;
1752
37366632 1753 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1754}
1755
1756/* Generate RTL between then-clause and the elseif-clause
1757 of an if-then-elseif-.... */
1758
1759void
1760expand_start_elseif (cond)
1761 tree cond;
1762{
1763 if (cond_stack->data.cond.endif_label == 0)
1764 cond_stack->data.cond.endif_label = gen_label_rtx ();
1765 emit_jump (cond_stack->data.cond.endif_label);
1766 emit_label (cond_stack->data.cond.next_label);
1767 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1768 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1769}
1770
1771/* Generate RTL between the then-clause and the else-clause
1772 of an if-then-else. */
1773
1774void
1775expand_start_else ()
1776{
1777 if (cond_stack->data.cond.endif_label == 0)
1778 cond_stack->data.cond.endif_label = gen_label_rtx ();
1779 emit_jump (cond_stack->data.cond.endif_label);
1780 emit_label (cond_stack->data.cond.next_label);
1781 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1782}
1783
1784/* Generate RTL for the end of an if-then.
1785 Pop the record for it off of cond_stack. */
1786
1787void
1788expand_end_cond ()
1789{
1790 struct nesting *thiscond = cond_stack;
1791
1792 do_pending_stack_adjust ();
1793 if (thiscond->data.cond.next_label)
1794 emit_label (thiscond->data.cond.next_label);
1795 if (thiscond->data.cond.endif_label)
1796 emit_label (thiscond->data.cond.endif_label);
1797
1798 POPSTACK (cond_stack);
1799 last_expr_type = 0;
1800}
1801\f
1802/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1803 loop should be exited by `exit_something'. This is a loop for which
1804 `expand_continue' will jump to the top of the loop.
1805
1806 Make an entry on loop_stack to record the labels associated with
1807 this loop. */
1808
1809struct nesting *
1810expand_start_loop (exit_flag)
1811 int exit_flag;
1812{
1813 register struct nesting *thisloop = ALLOC_NESTING ();
1814
1815 /* Make an entry on loop_stack for the loop we are entering. */
1816
1817 thisloop->next = loop_stack;
1818 thisloop->all = nesting_stack;
1819 thisloop->depth = ++nesting_depth;
1820 thisloop->data.loop.start_label = gen_label_rtx ();
1821 thisloop->data.loop.end_label = gen_label_rtx ();
1822 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1823 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1824 loop_stack = thisloop;
1825 nesting_stack = thisloop;
1826
1827 do_pending_stack_adjust ();
1828 emit_queue ();
37366632 1829 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
1830 emit_label (thisloop->data.loop.start_label);
1831
1832 return thisloop;
1833}
1834
1835/* Like expand_start_loop but for a loop where the continuation point
1836 (for expand_continue_loop) will be specified explicitly. */
1837
1838struct nesting *
1839expand_start_loop_continue_elsewhere (exit_flag)
1840 int exit_flag;
1841{
1842 struct nesting *thisloop = expand_start_loop (exit_flag);
1843 loop_stack->data.loop.continue_label = gen_label_rtx ();
1844 return thisloop;
1845}
1846
1847/* Specify the continuation point for a loop started with
1848 expand_start_loop_continue_elsewhere.
1849 Use this at the point in the code to which a continue statement
1850 should jump. */
1851
1852void
1853expand_loop_continue_here ()
1854{
1855 do_pending_stack_adjust ();
37366632 1856 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
1857 emit_label (loop_stack->data.loop.continue_label);
1858}
1859
1860/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1861 Pop the block off of loop_stack. */
1862
1863void
1864expand_end_loop ()
1865{
1866 register rtx insn = get_last_insn ();
1867 register rtx start_label = loop_stack->data.loop.start_label;
1868 rtx last_test_insn = 0;
1869 int num_insns = 0;
1870
1871 /* Mark the continue-point at the top of the loop if none elsewhere. */
1872 if (start_label == loop_stack->data.loop.continue_label)
1873 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1874
1875 do_pending_stack_adjust ();
1876
1877 /* If optimizing, perhaps reorder the loop. If the loop
1878 starts with a conditional exit, roll that to the end
1879 where it will optimize together with the jump back.
1880
1881 We look for the last conditional branch to the exit that we encounter
1882 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1883 branch to the exit first, use it.
1884
1885 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1886 because moving them is not valid. */
1887
1888 if (optimize
1889 &&
1890 ! (GET_CODE (insn) == JUMP_INSN
1891 && GET_CODE (PATTERN (insn)) == SET
1892 && SET_DEST (PATTERN (insn)) == pc_rtx
1893 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1894 {
1895 /* Scan insns from the top of the loop looking for a qualified
1896 conditional exit. */
1897 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1898 insn = NEXT_INSN (insn))
1899 {
1900 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1901 break;
1902
1903 if (GET_CODE (insn) == NOTE
1904 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1905 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1906 break;
1907
1908 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1909 num_insns++;
1910
1911 if (last_test_insn && num_insns > 30)
1912 break;
1913
1914 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1915 && SET_DEST (PATTERN (insn)) == pc_rtx
1916 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1917 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1918 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1919 == loop_stack->data.loop.end_label))
1920 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1921 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1922 == loop_stack->data.loop.end_label))))
1923 last_test_insn = insn;
1924
1925 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1926 && GET_CODE (PATTERN (insn)) == SET
1927 && SET_DEST (PATTERN (insn)) == pc_rtx
1928 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1929 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1930 == loop_stack->data.loop.end_label))
1931 /* Include BARRIER. */
1932 last_test_insn = NEXT_INSN (insn);
1933 }
1934
1935 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1936 {
1937 /* We found one. Move everything from there up
1938 to the end of the loop, and add a jump into the loop
1939 to jump to there. */
1940 register rtx newstart_label = gen_label_rtx ();
1941 register rtx start_move = start_label;
1942
b4ac57ab 1943 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
1944 then we want to move this note also. */
1945 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1946 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1947 == NOTE_INSN_LOOP_CONT))
1948 start_move = PREV_INSN (start_move);
1949
1950 emit_label_after (newstart_label, PREV_INSN (start_move));
1951 reorder_insns (start_move, last_test_insn, get_last_insn ());
1952 emit_jump_insn_after (gen_jump (start_label),
1953 PREV_INSN (newstart_label));
1954 emit_barrier_after (PREV_INSN (newstart_label));
1955 start_label = newstart_label;
1956 }
1957 }
1958
1959 emit_jump (start_label);
37366632 1960 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
1961 emit_label (loop_stack->data.loop.end_label);
1962
1963 POPSTACK (loop_stack);
1964
1965 last_expr_type = 0;
1966}
1967
1968/* Generate a jump to the current loop's continue-point.
1969 This is usually the top of the loop, but may be specified
1970 explicitly elsewhere. If not currently inside a loop,
1971 return 0 and do nothing; caller will print an error message. */
1972
1973int
1974expand_continue_loop (whichloop)
1975 struct nesting *whichloop;
1976{
1977 last_expr_type = 0;
1978 if (whichloop == 0)
1979 whichloop = loop_stack;
1980 if (whichloop == 0)
1981 return 0;
37366632
RK
1982 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
1983 NULL_RTX);
28d81abb
RK
1984 return 1;
1985}
1986
1987/* Generate a jump to exit the current loop. If not currently inside a loop,
1988 return 0 and do nothing; caller will print an error message. */
1989
1990int
1991expand_exit_loop (whichloop)
1992 struct nesting *whichloop;
1993{
1994 last_expr_type = 0;
1995 if (whichloop == 0)
1996 whichloop = loop_stack;
1997 if (whichloop == 0)
1998 return 0;
37366632 1999 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2000 return 1;
2001}
2002
2003/* Generate a conditional jump to exit the current loop if COND
2004 evaluates to zero. If not currently inside a loop,
2005 return 0 and do nothing; caller will print an error message. */
2006
2007int
2008expand_exit_loop_if_false (whichloop, cond)
2009 struct nesting *whichloop;
2010 tree cond;
2011{
2012 last_expr_type = 0;
2013 if (whichloop == 0)
2014 whichloop = loop_stack;
2015 if (whichloop == 0)
2016 return 0;
37366632 2017 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2018 return 1;
2019}
2020
2021/* Return non-zero if we should preserve sub-expressions as separate
2022 pseudos. We never do so if we aren't optimizing. We always do so
2023 if -fexpensive-optimizations.
2024
2025 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2026 the loop may still be a small one. */
2027
2028int
2029preserve_subexpressions_p ()
2030{
2031 rtx insn;
2032
2033 if (flag_expensive_optimizations)
2034 return 1;
2035
2036 if (optimize == 0 || loop_stack == 0)
2037 return 0;
2038
2039 insn = get_last_insn_anywhere ();
2040
2041 return (insn
2042 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2043 < n_non_fixed_regs * 3));
2044
2045}
2046
2047/* Generate a jump to exit the current loop, conditional, binding contour
2048 or case statement. Not all such constructs are visible to this function,
2049 only those started with EXIT_FLAG nonzero. Individual languages use
2050 the EXIT_FLAG parameter to control which kinds of constructs you can
2051 exit this way.
2052
2053 If not currently inside anything that can be exited,
2054 return 0 and do nothing; caller will print an error message. */
2055
2056int
2057expand_exit_something ()
2058{
2059 struct nesting *n;
2060 last_expr_type = 0;
2061 for (n = nesting_stack; n; n = n->all)
2062 if (n->exit_label != 0)
2063 {
37366632 2064 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2065 return 1;
2066 }
2067
2068 return 0;
2069}
2070\f
2071/* Generate RTL to return from the current function, with no value.
2072 (That is, we do not do anything about returning any value.) */
2073
2074void
2075expand_null_return ()
2076{
2077 struct nesting *block = block_stack;
2078 rtx last_insn = 0;
2079
2080 /* Does any pending block have cleanups? */
2081
2082 while (block && block->data.block.cleanups == 0)
2083 block = block->next;
2084
2085 /* If yes, use a goto to return, since that runs cleanups. */
2086
2087 expand_null_return_1 (last_insn, block != 0);
2088}
2089
2090/* Generate RTL to return from the current function, with value VAL. */
2091
2092void
2093expand_value_return (val)
2094 rtx val;
2095{
2096 struct nesting *block = block_stack;
2097 rtx last_insn = get_last_insn ();
2098 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2099
2100 /* Copy the value to the return location
2101 unless it's already there. */
2102
2103 if (return_reg != val)
2104 emit_move_insn (return_reg, val);
2105 if (GET_CODE (return_reg) == REG
2106 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2107 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2108
2109 /* Does any pending block have cleanups? */
2110
2111 while (block && block->data.block.cleanups == 0)
2112 block = block->next;
2113
2114 /* If yes, use a goto to return, since that runs cleanups.
2115 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2116
2117 expand_null_return_1 (last_insn, block != 0);
2118}
2119
2120/* Output a return with no value. If LAST_INSN is nonzero,
2121 pretend that the return takes place after LAST_INSN.
2122 If USE_GOTO is nonzero then don't use a return instruction;
2123 go to the return label instead. This causes any cleanups
2124 of pending blocks to be executed normally. */
2125
2126static void
2127expand_null_return_1 (last_insn, use_goto)
2128 rtx last_insn;
2129 int use_goto;
2130{
2131 rtx end_label = cleanup_label ? cleanup_label : return_label;
2132
2133 clear_pending_stack_adjust ();
2134 do_pending_stack_adjust ();
2135 last_expr_type = 0;
2136
2137 /* PCC-struct return always uses an epilogue. */
2138 if (current_function_returns_pcc_struct || use_goto)
2139 {
2140 if (end_label == 0)
2141 end_label = return_label = gen_label_rtx ();
37366632 2142 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2143 return;
2144 }
2145
2146 /* Otherwise output a simple return-insn if one is available,
2147 unless it won't do the job. */
2148#ifdef HAVE_return
2149 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2150 {
2151 emit_jump_insn (gen_return ());
2152 emit_barrier ();
2153 return;
2154 }
2155#endif
2156
2157 /* Otherwise jump to the epilogue. */
37366632 2158 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2159}
2160\f
2161/* Generate RTL to evaluate the expression RETVAL and return it
2162 from the current function. */
2163
2164void
2165expand_return (retval)
2166 tree retval;
2167{
2168 /* If there are any cleanups to be performed, then they will
2169 be inserted following LAST_INSN. It is desirable
2170 that the last_insn, for such purposes, should be the
2171 last insn before computing the return value. Otherwise, cleanups
2172 which call functions can clobber the return value. */
2173 /* ??? rms: I think that is erroneous, because in C++ it would
2174 run destructors on variables that might be used in the subsequent
2175 computation of the return value. */
2176 rtx last_insn = 0;
2177 register rtx val = 0;
2178 register rtx op0;
2179 tree retval_rhs;
2180 int cleanups;
2181 struct nesting *block;
2182
2183 /* If function wants no value, give it none. */
2184 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2185 {
37366632 2186 expand_expr (retval, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2187 expand_null_return ();
2188 return;
2189 }
2190
2191 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2192 cleanups = any_pending_cleanups (1);
2193
2194 if (TREE_CODE (retval) == RESULT_DECL)
2195 retval_rhs = retval;
2196 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2197 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2198 retval_rhs = TREE_OPERAND (retval, 1);
2199 else if (TREE_TYPE (retval) == void_type_node)
2200 /* Recognize tail-recursive call to void function. */
2201 retval_rhs = retval;
2202 else
2203 retval_rhs = NULL_TREE;
2204
2205 /* Only use `last_insn' if there are cleanups which must be run. */
2206 if (cleanups || cleanup_label != 0)
2207 last_insn = get_last_insn ();
2208
2209 /* Distribute return down conditional expr if either of the sides
2210 may involve tail recursion (see test below). This enhances the number
2211 of tail recursions we see. Don't do this always since it can produce
2212 sub-optimal code in some cases and we distribute assignments into
2213 conditional expressions when it would help. */
2214
2215 if (optimize && retval_rhs != 0
2216 && frame_offset == 0
2217 && TREE_CODE (retval_rhs) == COND_EXPR
2218 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2219 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2220 {
2221 rtx label = gen_label_rtx ();
37366632 2222 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
28d81abb
RK
2223 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2224 DECL_RESULT (current_function_decl),
2225 TREE_OPERAND (retval_rhs, 1)));
2226 emit_label (label);
2227 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2228 DECL_RESULT (current_function_decl),
2229 TREE_OPERAND (retval_rhs, 2)));
2230 return;
2231 }
2232
2233 /* For tail-recursive call to current function,
2234 just jump back to the beginning.
2235 It's unsafe if any auto variable in this function
2236 has its address taken; for simplicity,
2237 require stack frame to be empty. */
2238 if (optimize && retval_rhs != 0
2239 && frame_offset == 0
2240 && TREE_CODE (retval_rhs) == CALL_EXPR
2241 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2242 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2243 /* Finish checking validity, and if valid emit code
2244 to set the argument variables for the new call. */
2245 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2246 DECL_ARGUMENTS (current_function_decl)))
2247 {
2248 if (tail_recursion_label == 0)
2249 {
2250 tail_recursion_label = gen_label_rtx ();
2251 emit_label_after (tail_recursion_label,
2252 tail_recursion_reentry);
2253 }
a3229491 2254 emit_queue ();
37366632 2255 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2256 emit_barrier ();
2257 return;
2258 }
2259#ifdef HAVE_return
2260 /* This optimization is safe if there are local cleanups
2261 because expand_null_return takes care of them.
2262 ??? I think it should also be safe when there is a cleanup label,
2263 because expand_null_return takes care of them, too.
2264 Any reason why not? */
2265 if (HAVE_return && cleanup_label == 0
2266 && ! current_function_returns_pcc_struct)
2267 {
2268 /* If this is return x == y; then generate
2269 if (x == y) return 1; else return 0;
2270 if we can do it with explicit return insns. */
2271 if (retval_rhs)
2272 switch (TREE_CODE (retval_rhs))
2273 {
2274 case EQ_EXPR:
2275 case NE_EXPR:
2276 case GT_EXPR:
2277 case GE_EXPR:
2278 case LT_EXPR:
2279 case LE_EXPR:
2280 case TRUTH_ANDIF_EXPR:
2281 case TRUTH_ORIF_EXPR:
2282 case TRUTH_AND_EXPR:
2283 case TRUTH_OR_EXPR:
2284 case TRUTH_NOT_EXPR:
2285 op0 = gen_label_rtx ();
2286 jumpifnot (retval_rhs, op0);
2287 expand_value_return (const1_rtx);
2288 emit_label (op0);
2289 expand_value_return (const0_rtx);
2290 return;
2291 }
2292 }
2293#endif /* HAVE_return */
2294
2295 if (cleanups
2296 && retval_rhs != 0
2297 && TREE_TYPE (retval_rhs) != void_type_node
2298 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2299 {
2300 /* Calculate the return value into a pseudo reg. */
37366632 2301 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2302 emit_queue ();
2303 /* All temporaries have now been used. */
2304 free_temp_slots ();
2305 /* Return the calculated value, doing cleanups first. */
2306 expand_value_return (val);
2307 }
2308 else
2309 {
2310 /* No cleanups or no hard reg used;
2311 calculate value into hard return reg. */
37366632 2312 expand_expr (retval, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2313 emit_queue ();
2314 free_temp_slots ();
2315 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2316 }
2317}
2318
2319/* Return 1 if the end of the generated RTX is not a barrier.
2320 This means code already compiled can drop through. */
2321
2322int
2323drop_through_at_end_p ()
2324{
2325 rtx insn = get_last_insn ();
2326 while (insn && GET_CODE (insn) == NOTE)
2327 insn = PREV_INSN (insn);
2328 return insn && GET_CODE (insn) != BARRIER;
2329}
2330\f
2331/* Emit code to alter this function's formal parms for a tail-recursive call.
2332 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2333 FORMALS is the chain of decls of formals.
2334 Return 1 if this can be done;
2335 otherwise return 0 and do not emit any code. */
2336
2337static int
2338tail_recursion_args (actuals, formals)
2339 tree actuals, formals;
2340{
2341 register tree a = actuals, f = formals;
2342 register int i;
2343 register rtx *argvec;
2344
2345 /* Check that number and types of actuals are compatible
2346 with the formals. This is not always true in valid C code.
2347 Also check that no formal needs to be addressable
2348 and that all formals are scalars. */
2349
2350 /* Also count the args. */
2351
2352 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2353 {
2354 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2355 return 0;
2356 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2357 return 0;
2358 }
2359 if (a != 0 || f != 0)
2360 return 0;
2361
2362 /* Compute all the actuals. */
2363
2364 argvec = (rtx *) alloca (i * sizeof (rtx));
2365
2366 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2367 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2368
2369 /* Find which actual values refer to current values of previous formals.
2370 Copy each of them now, before any formal is changed. */
2371
2372 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2373 {
2374 int copy = 0;
2375 register int j;
2376 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2377 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2378 { copy = 1; break; }
2379 if (copy)
2380 argvec[i] = copy_to_reg (argvec[i]);
2381 }
2382
2383 /* Store the values of the actuals into the formals. */
2384
2385 for (f = formals, a = actuals, i = 0; f;
2386 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2387 {
2388 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2389 emit_move_insn (DECL_RTL (f), argvec[i]);
2390 else
2391 convert_move (DECL_RTL (f), argvec[i],
2392 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2393 }
2394
2395 free_temp_slots ();
2396 return 1;
2397}
2398\f
2399/* Generate the RTL code for entering a binding contour.
2400 The variables are declared one by one, by calls to `expand_decl'.
2401
2402 EXIT_FLAG is nonzero if this construct should be visible to
2403 `exit_something'. */
2404
2405void
2406expand_start_bindings (exit_flag)
2407 int exit_flag;
2408{
2409 struct nesting *thisblock = ALLOC_NESTING ();
2410
37366632 2411 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2412
2413 /* Make an entry on block_stack for the block we are entering. */
2414
2415 thisblock->next = block_stack;
2416 thisblock->all = nesting_stack;
2417 thisblock->depth = ++nesting_depth;
2418 thisblock->data.block.stack_level = 0;
2419 thisblock->data.block.cleanups = 0;
2420 thisblock->data.block.function_call_count = 0;
2421#if 0
2422 if (block_stack)
2423 {
2424 if (block_stack->data.block.cleanups == NULL_TREE
2425 && (block_stack->data.block.outer_cleanups == NULL_TREE
2426 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2427 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2428 else
2429 thisblock->data.block.outer_cleanups
2430 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2431 block_stack->data.block.outer_cleanups);
2432 }
2433 else
2434 thisblock->data.block.outer_cleanups = 0;
2435#endif
2436#if 1
2437 if (block_stack
2438 && !(block_stack->data.block.cleanups == NULL_TREE
2439 && block_stack->data.block.outer_cleanups == NULL_TREE))
2440 thisblock->data.block.outer_cleanups
2441 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2442 block_stack->data.block.outer_cleanups);
2443 else
2444 thisblock->data.block.outer_cleanups = 0;
2445#endif
2446 thisblock->data.block.label_chain = 0;
2447 thisblock->data.block.innermost_stack_block = stack_block_stack;
2448 thisblock->data.block.first_insn = note;
2449 thisblock->data.block.block_start_count = ++block_start_count;
2450 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2451 block_stack = thisblock;
2452 nesting_stack = thisblock;
2453
2454 /* Make a new level for allocating stack slots. */
2455 push_temp_slots ();
2456}
2457
2458/* Generate RTL code to terminate a binding contour.
2459 VARS is the chain of VAR_DECL nodes
2460 for the variables bound in this contour.
2461 MARK_ENDS is nonzero if we should put a note at the beginning
2462 and end of this binding contour.
2463
2464 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2465 (That is true automatically if the contour has a saved stack level.) */
2466
2467void
2468expand_end_bindings (vars, mark_ends, dont_jump_in)
2469 tree vars;
2470 int mark_ends;
2471 int dont_jump_in;
2472{
2473 register struct nesting *thisblock = block_stack;
2474 register tree decl;
2475
2476 if (warn_unused)
2477 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2478 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2479 warning_with_decl (decl, "unused variable `%s'");
2480
2481 /* Mark the beginning and end of the scope if requested. */
2482
2483 if (mark_ends)
37366632 2484 emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
28d81abb
RK
2485 else
2486 /* Get rid of the beginning-mark if we don't make an end-mark. */
2487 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2488
2489 if (thisblock->exit_label)
2490 {
2491 do_pending_stack_adjust ();
2492 emit_label (thisblock->exit_label);
2493 }
2494
2495 /* If necessary, make a handler for nonlocal gotos taking
2496 place in the function calls in this block. */
2497 if (function_call_count != thisblock->data.block.function_call_count
2498 && nonlocal_labels
2499 /* Make handler for outermost block
2500 if there were any nonlocal gotos to this function. */
2501 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2502 /* Make handler for inner block if it has something
2503 special to do when you jump out of it. */
2504 : (thisblock->data.block.cleanups != 0
2505 || thisblock->data.block.stack_level != 0)))
2506 {
2507 tree link;
2508 rtx afterward = gen_label_rtx ();
2509 rtx handler_label = gen_label_rtx ();
2510 rtx save_receiver = gen_reg_rtx (Pmode);
2511
2512 /* Don't let jump_optimize delete the handler. */
2513 LABEL_PRESERVE_P (handler_label) = 1;
2514
2515 /* Record the handler address in the stack slot for that purpose,
2516 during this block, saving and restoring the outer value. */
2517 if (thisblock->next != 0)
2518 {
2519 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2520 emit_insn_before (gen_move_insn (save_receiver,
2521 nonlocal_goto_handler_slot),
2522 thisblock->data.block.first_insn);
2523 }
2524 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2525 gen_rtx (LABEL_REF, Pmode,
2526 handler_label)),
2527 thisblock->data.block.first_insn);
2528
2529 /* Jump around the handler; it runs only when specially invoked. */
2530 emit_jump (afterward);
2531 emit_label (handler_label);
2532
2533#ifdef HAVE_nonlocal_goto
2534 if (! HAVE_nonlocal_goto)
2535#endif
2536 /* First adjust our frame pointer to its actual value. It was
2537 previously set to the start of the virtual area corresponding to
2538 the stacked variables when we branched here and now needs to be
2539 adjusted to the actual hardware fp value.
2540
2541 Assignments are to virtual registers are converted by
2542 instantiate_virtual_regs into the corresponding assignment
2543 to the underlying register (fp in this case) that makes
2544 the original assignment true.
2545 So the following insn will actually be
2546 decrementing fp by STARTING_FRAME_OFFSET. */
2547 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2548
2549#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2550 if (fixed_regs[ARG_POINTER_REGNUM])
2551 {
42495ca0
RK
2552#ifdef ELIMINABLE_REGS
2553 /* If the argument pointer can be eliminated in favor of the
2554 frame pointer, we don't need to restore it. We assume here
2555 that if such an elimination is present, it can always be used.
2556 This is the case on all known machines; if we don't make this
2557 assumption, we do unnecessary saving on many machines. */
2558 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2559 int i;
2560
2561 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2562 if (elim_regs[i].from == ARG_POINTER_REGNUM
2563 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2564 break;
2565
2566 if (i == sizeof elim_regs / sizeof elim_regs [0])
2567#endif
2568 {
2569 /* Now restore our arg pointer from the address at which it
2570 was saved in our stack frame.
2571 If there hasn't be space allocated for it yet, make
2572 some now. */
2573 if (arg_pointer_save_area == 0)
2574 arg_pointer_save_area
2575 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2576 emit_move_insn (virtual_incoming_args_rtx,
2577 /* We need a pseudo here, or else
2578 instantiate_virtual_regs_1 complains. */
2579 copy_to_reg (arg_pointer_save_area));
2580 }
28d81abb
RK
2581 }
2582#endif
2583
2584 /* The handler expects the desired label address in the static chain
2585 register. It tests the address and does an appropriate jump
2586 to whatever label is desired. */
2587 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2588 /* Skip any labels we shouldn't be able to jump to from here. */
2589 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2590 {
2591 rtx not_this = gen_label_rtx ();
2592 rtx this = gen_label_rtx ();
2593 do_jump_if_equal (static_chain_rtx,
2594 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2595 this, 0);
2596 emit_jump (not_this);
2597 emit_label (this);
2598 expand_goto (TREE_VALUE (link));
2599 emit_label (not_this);
2600 }
2601 /* If label is not recognized, abort. */
2602 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2603 VOIDmode, 0);
2604 emit_label (afterward);
2605 }
2606
2607 /* Don't allow jumping into a block that has cleanups or a stack level. */
2608 if (dont_jump_in
2609 || thisblock->data.block.stack_level != 0
2610 || thisblock->data.block.cleanups != 0)
2611 {
2612 struct label_chain *chain;
2613
2614 /* Any labels in this block are no longer valid to go to.
2615 Mark them to cause an error message. */
2616 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2617 {
2618 DECL_TOO_LATE (chain->label) = 1;
2619 /* If any goto without a fixup came to this label,
2620 that must be an error, because gotos without fixups
2621 come from outside all saved stack-levels and all cleanups. */
2622 if (TREE_ADDRESSABLE (chain->label))
2623 error_with_decl (chain->label,
2624 "label `%s' used before containing binding contour");
2625 }
2626 }
2627
2628 /* Restore stack level in effect before the block
2629 (only if variable-size objects allocated). */
2630 /* Perform any cleanups associated with the block. */
2631
2632 if (thisblock->data.block.stack_level != 0
2633 || thisblock->data.block.cleanups != 0)
2634 {
2635 /* Don't let cleanups affect ({...}) constructs. */
2636 int old_expr_stmts_for_value = expr_stmts_for_value;
2637 rtx old_last_expr_value = last_expr_value;
2638 tree old_last_expr_type = last_expr_type;
2639 expr_stmts_for_value = 0;
2640
2641 /* Do the cleanups. */
37366632 2642 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
2643 do_pending_stack_adjust ();
2644
2645 expr_stmts_for_value = old_expr_stmts_for_value;
2646 last_expr_value = old_last_expr_value;
2647 last_expr_type = old_last_expr_type;
2648
2649 /* Restore the stack level. */
2650
2651 if (thisblock->data.block.stack_level != 0)
2652 {
59257ff7 2653 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 2654 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 2655 if (nonlocal_goto_handler_slot != 0)
37366632
RK
2656 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2657 NULL_RTX);
28d81abb
RK
2658 }
2659
2660 /* Any gotos out of this block must also do these things.
59257ff7
RK
2661 Also report any gotos with fixups that came to labels in this
2662 level. */
28d81abb
RK
2663 fixup_gotos (thisblock,
2664 thisblock->data.block.stack_level,
2665 thisblock->data.block.cleanups,
2666 thisblock->data.block.first_insn,
2667 dont_jump_in);
2668 }
2669
2670 /* If doing stupid register allocation, make sure lives of all
2671 register variables declared here extend thru end of scope. */
2672
2673 if (obey_regdecls)
2674 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2675 {
2676 rtx rtl = DECL_RTL (decl);
2677 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2678 use_variable (rtl);
2679 }
2680
2681 /* Restore block_stack level for containing block. */
2682
2683 stack_block_stack = thisblock->data.block.innermost_stack_block;
2684 POPSTACK (block_stack);
2685
2686 /* Pop the stack slot nesting and free any slots at this level. */
2687 pop_temp_slots ();
2688}
2689\f
2690/* Generate RTL for the automatic variable declaration DECL.
2691 (Other kinds of declarations are simply ignored if seen here.)
2692 CLEANUP is an expression to be executed at exit from this binding contour;
2693 for example, in C++, it might call the destructor for this variable.
2694
2695 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2696 either before or after calling `expand_decl' but before compiling
2697 any subsequent expressions. This is because CLEANUP may be expanded
2698 more than once, on different branches of execution.
2699 For the same reason, CLEANUP may not contain a CALL_EXPR
2700 except as its topmost node--else `preexpand_calls' would get confused.
2701
2702 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2703 that is not associated with any particular variable.
2704
2705 There is no special support here for C++ constructors.
2706 They should be handled by the proper code in DECL_INITIAL. */
2707
2708void
2709expand_decl (decl)
2710 register tree decl;
2711{
2712 struct nesting *thisblock = block_stack;
2713 tree type = TREE_TYPE (decl);
2714
2715 /* Only automatic variables need any expansion done.
2716 Static and external variables, and external functions,
2717 will be handled by `assemble_variable' (called from finish_decl).
2718 TYPE_DECL and CONST_DECL require nothing.
2719 PARM_DECLs are handled in `assign_parms'. */
2720
2721 if (TREE_CODE (decl) != VAR_DECL)
2722 return;
2723 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2724 return;
2725
2726 /* Create the RTL representation for the variable. */
2727
2728 if (type == error_mark_node)
2729 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2730 else if (DECL_SIZE (decl) == 0)
2731 /* Variable with incomplete type. */
2732 {
2733 if (DECL_INITIAL (decl) == 0)
2734 /* Error message was already done; now avoid a crash. */
2735 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2736 else
2737 /* An initializer is going to decide the size of this array.
2738 Until we know the size, represent its address with a reg. */
2739 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2740 }
2741 else if (DECL_MODE (decl) != BLKmode
2742 /* If -ffloat-store, don't put explicit float vars
2743 into regs. */
2744 && !(flag_float_store
2745 && TREE_CODE (type) == REAL_TYPE)
2746 && ! TREE_THIS_VOLATILE (decl)
2747 && ! TREE_ADDRESSABLE (decl)
2748 && (TREE_REGDECL (decl) || ! obey_regdecls))
2749 {
2750 /* Automatic variable that can go in a register. */
2751 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2752 if (TREE_CODE (type) == POINTER_TYPE)
2753 mark_reg_pointer (DECL_RTL (decl));
2754 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2755 }
2756 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2757 {
2758 /* Variable of fixed size that goes on the stack. */
2759 rtx oldaddr = 0;
2760 rtx addr;
2761
2762 /* If we previously made RTL for this decl, it must be an array
2763 whose size was determined by the initializer.
2764 The old address was a register; set that register now
2765 to the proper address. */
2766 if (DECL_RTL (decl) != 0)
2767 {
2768 if (GET_CODE (DECL_RTL (decl)) != MEM
2769 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2770 abort ();
2771 oldaddr = XEXP (DECL_RTL (decl), 0);
2772 }
2773
2774 DECL_RTL (decl)
2775 = assign_stack_temp (DECL_MODE (decl),
2776 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2777 + BITS_PER_UNIT - 1)
2778 / BITS_PER_UNIT),
2779 1);
2780
2781 /* Set alignment we actually gave this decl. */
2782 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2783 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2784
2785 if (oldaddr)
2786 {
2787 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2788 if (addr != oldaddr)
2789 emit_move_insn (oldaddr, addr);
2790 }
2791
2792 /* If this is a memory ref that contains aggregate components,
2793 mark it as such for cse and loop optimize. */
2794 MEM_IN_STRUCT_P (DECL_RTL (decl))
2795 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2796 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2797 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2798#if 0
2799 /* If this is in memory because of -ffloat-store,
2800 set the volatile bit, to prevent optimizations from
2801 undoing the effects. */
2802 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2803 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2804#endif
2805 }
2806 else
2807 /* Dynamic-size object: must push space on the stack. */
2808 {
2809 rtx address, size;
2810
2811 /* Record the stack pointer on entry to block, if have
2812 not already done so. */
2813 if (thisblock->data.block.stack_level == 0)
2814 {
2815 do_pending_stack_adjust ();
59257ff7
RK
2816 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2817 &thisblock->data.block.stack_level,
2818 thisblock->data.block.first_insn);
28d81abb
RK
2819 stack_block_stack = thisblock;
2820 }
2821
2822 /* Compute the variable's size, in bytes. */
2823 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2824 DECL_SIZE (decl),
2825 size_int (BITS_PER_UNIT)),
37366632 2826 NULL_RTX, VOIDmode, 0);
28d81abb
RK
2827 free_temp_slots ();
2828
59257ff7
RK
2829 /* This is equivalent to calling alloca. */
2830 current_function_calls_alloca = 1;
2831
28d81abb 2832 /* Allocate space on the stack for the variable. */
37366632
RK
2833 address = allocate_dynamic_stack_space (size, NULL_RTX,
2834 DECL_ALIGN (decl));
28d81abb 2835
59257ff7 2836 if (nonlocal_goto_handler_slot != 0)
37366632 2837 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
2838
2839 /* Reference the variable indirect through that rtx. */
2840 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2841
2207e295
RS
2842 /* If this is a memory ref that contains aggregate components,
2843 mark it as such for cse and loop optimize. */
2844 MEM_IN_STRUCT_P (DECL_RTL (decl))
2845 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2846 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2847 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2848
28d81abb
RK
2849 /* Indicate the alignment we actually gave this variable. */
2850#ifdef STACK_BOUNDARY
2851 DECL_ALIGN (decl) = STACK_BOUNDARY;
2852#else
2853 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2854#endif
2855 }
2856
2857 if (TREE_THIS_VOLATILE (decl))
2858 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2859 if (TREE_READONLY (decl))
2860 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2861
2862 /* If doing stupid register allocation, make sure life of any
2863 register variable starts here, at the start of its scope. */
2864
2865 if (obey_regdecls)
2866 use_variable (DECL_RTL (decl));
2867}
2868\f
2869/* Emit code to perform the initialization of a declaration DECL. */
2870
2871void
2872expand_decl_init (decl)
2873 tree decl;
2874{
b4ac57ab
RS
2875 int was_used = TREE_USED (decl);
2876
28d81abb
RK
2877 if (TREE_STATIC (decl))
2878 return;
2879
2880 /* Compute and store the initial value now. */
2881
2882 if (DECL_INITIAL (decl) == error_mark_node)
2883 {
2884 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2885 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2886 || code == POINTER_TYPE)
2887 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2888 0, 0);
2889 emit_queue ();
2890 }
2891 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2892 {
2893 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2894 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2895 emit_queue ();
2896 }
2897
b4ac57ab
RS
2898 /* Don't let the initialization count as "using" the variable. */
2899 TREE_USED (decl) = was_used;
2900
28d81abb
RK
2901 /* Free any temporaries we made while initializing the decl. */
2902 free_temp_slots ();
2903}
2904
2905/* CLEANUP is an expression to be executed at exit from this binding contour;
2906 for example, in C++, it might call the destructor for this variable.
2907
2908 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2909 either before or after calling `expand_decl' but before compiling
2910 any subsequent expressions. This is because CLEANUP may be expanded
2911 more than once, on different branches of execution.
2912 For the same reason, CLEANUP may not contain a CALL_EXPR
2913 except as its topmost node--else `preexpand_calls' would get confused.
2914
2915 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2916 that is not associated with any particular variable. */
2917
2918int
2919expand_decl_cleanup (decl, cleanup)
2920 tree decl, cleanup;
2921{
2922 struct nesting *thisblock = block_stack;
2923
2924 /* Error if we are not in any block. */
2925 if (thisblock == 0)
2926 return 0;
2927
2928 /* Record the cleanup if there is one. */
2929
2930 if (cleanup != 0)
2931 {
2932 thisblock->data.block.cleanups
2933 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2934 /* If this block has a cleanup, it belongs in stack_block_stack. */
2935 stack_block_stack = thisblock;
2936 }
2937 return 1;
2938}
2939\f
2940/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2941 DECL_ELTS is the list of elements that belong to DECL's type.
2942 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2943
2944void
2945expand_anon_union_decl (decl, cleanup, decl_elts)
2946 tree decl, cleanup, decl_elts;
2947{
2948 struct nesting *thisblock = block_stack;
2949 rtx x;
2950
2951 expand_decl (decl, cleanup);
2952 x = DECL_RTL (decl);
2953
2954 while (decl_elts)
2955 {
2956 tree decl_elt = TREE_VALUE (decl_elts);
2957 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2958 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2959
2960 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2961 instead create a new MEM rtx with the proper mode. */
2962 if (GET_CODE (x) == MEM)
2963 {
2964 if (mode == GET_MODE (x))
2965 DECL_RTL (decl_elt) = x;
2966 else
2967 {
2968 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2969 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2970 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2971 }
2972 }
2973 else if (GET_CODE (x) == REG)
2974 {
2975 if (mode == GET_MODE (x))
2976 DECL_RTL (decl_elt) = x;
2977 else
2978 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2979 }
2980 else
2981 abort ();
2982
2983 /* Record the cleanup if there is one. */
2984
2985 if (cleanup != 0)
2986 thisblock->data.block.cleanups
2987 = temp_tree_cons (decl_elt, cleanup_elt,
2988 thisblock->data.block.cleanups);
2989
2990 decl_elts = TREE_CHAIN (decl_elts);
2991 }
2992}
2993\f
2994/* Expand a list of cleanups LIST.
2995 Elements may be expressions or may be nested lists.
2996
2997 If DONT_DO is nonnull, then any list-element
2998 whose TREE_PURPOSE matches DONT_DO is omitted.
2999 This is sometimes used to avoid a cleanup associated with
3000 a value that is being returned out of the scope. */
3001
3002static void
3003expand_cleanups (list, dont_do)
3004 tree list;
3005 tree dont_do;
3006{
3007 tree tail;
3008 for (tail = list; tail; tail = TREE_CHAIN (tail))
3009 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3010 {
3011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3012 expand_cleanups (TREE_VALUE (tail), dont_do);
3013 else
3014 {
3015 /* Cleanups may be run multiple times. For example,
3016 when exiting a binding contour, we expand the
3017 cleanups associated with that contour. When a goto
3018 within that binding contour has a target outside that
3019 contour, it will expand all cleanups from its scope to
3020 the target. Though the cleanups are expanded multiple
3021 times, the control paths are non-overlapping so the
3022 cleanups will not be executed twice. */
3023 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3024 free_temp_slots ();
3025 }
3026 }
3027}
3028
3029/* Expand a list of cleanups for a goto fixup.
3030 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3031 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3032
3033static void
3034fixup_cleanups (list, before_jump)
3035 tree list;
3036 rtx *before_jump;
3037{
3038 rtx beyond_jump = get_last_insn ();
3039 rtx new_before_jump;
3040
37366632 3041 expand_cleanups (list, NULL_TREE);
28d81abb
RK
3042 /* Pop any pushes done in the cleanups,
3043 in case function is about to return. */
3044 do_pending_stack_adjust ();
3045
3046 new_before_jump = get_last_insn ();
3047
3048 if (beyond_jump != new_before_jump)
3049 {
3050 /* If cleanups expand to nothing, don't reorder. */
3051 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3052 *before_jump = new_before_jump;
3053 }
3054}
3055
3056/* Move all cleanups from the current block_stack
3057 to the containing block_stack, where they are assumed to
3058 have been created. If anything can cause a temporary to
3059 be created, but not expanded for more than one level of
3060 block_stacks, then this code will have to change. */
3061
3062void
3063move_cleanups_up ()
3064{
3065 struct nesting *block = block_stack;
3066 struct nesting *outer = block->next;
3067
3068 outer->data.block.cleanups
3069 = chainon (block->data.block.cleanups,
3070 outer->data.block.cleanups);
3071 block->data.block.cleanups = 0;
3072}
3073
3074tree
3075last_cleanup_this_contour ()
3076{
3077 if (block_stack == 0)
3078 return 0;
3079
3080 return block_stack->data.block.cleanups;
3081}
3082
3083/* Return 1 if there are any pending cleanups at this point.
3084 If THIS_CONTOUR is nonzero, check the current contour as well.
3085 Otherwise, look only at the contours that enclose this one. */
3086
3087int
3088any_pending_cleanups (this_contour)
3089 int this_contour;
3090{
3091 struct nesting *block;
3092
3093 if (block_stack == 0)
3094 return 0;
3095
3096 if (this_contour && block_stack->data.block.cleanups != NULL)
3097 return 1;
3098 if (block_stack->data.block.cleanups == 0
3099 && (block_stack->data.block.outer_cleanups == 0
3100#if 0
3101 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3102#endif
3103 ))
3104 return 0;
3105
3106 for (block = block_stack->next; block; block = block->next)
3107 if (block->data.block.cleanups != 0)
3108 return 1;
3109
3110 return 0;
3111}
3112\f
3113/* Enter a case (Pascal) or switch (C) statement.
3114 Push a block onto case_stack and nesting_stack
3115 to accumulate the case-labels that are seen
3116 and to record the labels generated for the statement.
3117
3118 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3119 Otherwise, this construct is transparent for `exit_something'.
3120
3121 EXPR is the index-expression to be dispatched on.
3122 TYPE is its nominal type. We could simply convert EXPR to this type,
3123 but instead we take short cuts. */
3124
3125void
3126expand_start_case (exit_flag, expr, type, printname)
3127 int exit_flag;
3128 tree expr;
3129 tree type;
3130 char *printname;
3131{
3132 register struct nesting *thiscase = ALLOC_NESTING ();
3133
3134 /* Make an entry on case_stack for the case we are entering. */
3135
3136 thiscase->next = case_stack;
3137 thiscase->all = nesting_stack;
3138 thiscase->depth = ++nesting_depth;
3139 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3140 thiscase->data.case_stmt.case_list = 0;
3141 thiscase->data.case_stmt.index_expr = expr;
3142 thiscase->data.case_stmt.nominal_type = type;
3143 thiscase->data.case_stmt.default_label = 0;
3144 thiscase->data.case_stmt.num_ranges = 0;
3145 thiscase->data.case_stmt.printname = printname;
3146 thiscase->data.case_stmt.seenlabel = 0;
3147 case_stack = thiscase;
3148 nesting_stack = thiscase;
3149
3150 do_pending_stack_adjust ();
3151
3152 /* Make sure case_stmt.start points to something that won't
3153 need any transformation before expand_end_case. */
3154 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3155 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3156
3157 thiscase->data.case_stmt.start = get_last_insn ();
3158}
3159
3160/* Start a "dummy case statement" within which case labels are invalid
3161 and are not connected to any larger real case statement.
3162 This can be used if you don't want to let a case statement jump
3163 into the middle of certain kinds of constructs. */
3164
3165void
3166expand_start_case_dummy ()
3167{
3168 register struct nesting *thiscase = ALLOC_NESTING ();
3169
3170 /* Make an entry on case_stack for the dummy. */
3171
3172 thiscase->next = case_stack;
3173 thiscase->all = nesting_stack;
3174 thiscase->depth = ++nesting_depth;
3175 thiscase->exit_label = 0;
3176 thiscase->data.case_stmt.case_list = 0;
3177 thiscase->data.case_stmt.start = 0;
3178 thiscase->data.case_stmt.nominal_type = 0;
3179 thiscase->data.case_stmt.default_label = 0;
3180 thiscase->data.case_stmt.num_ranges = 0;
3181 case_stack = thiscase;
3182 nesting_stack = thiscase;
3183}
3184
3185/* End a dummy case statement. */
3186
3187void
3188expand_end_case_dummy ()
3189{
3190 POPSTACK (case_stack);
3191}
3192
3193/* Return the data type of the index-expression
3194 of the innermost case statement, or null if none. */
3195
3196tree
3197case_index_expr_type ()
3198{
3199 if (case_stack)
3200 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3201 return 0;
3202}
3203\f
3204/* Accumulate one case or default label inside a case or switch statement.
3205 VALUE is the value of the case (a null pointer, for a default label).
3206
3207 If not currently inside a case or switch statement, return 1 and do
3208 nothing. The caller will print a language-specific error message.
3209 If VALUE is a duplicate or overlaps, return 2 and do nothing
3210 except store the (first) duplicate node in *DUPLICATE.
3211 If VALUE is out of range, return 3 and do nothing.
3212 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3213 Return 0 on success.
3214
3215 Extended to handle range statements. */
3216
3217int
3218pushcase (value, label, duplicate)
3219 register tree value;
3220 register tree label;
3221 tree *duplicate;
3222{
3223 register struct case_node **l;
3224 register struct case_node *n;
3225 tree index_type;
3226 tree nominal_type;
3227
3228 /* Fail if not inside a real case statement. */
3229 if (! (case_stack && case_stack->data.case_stmt.start))
3230 return 1;
3231
3232 if (stack_block_stack
3233 && stack_block_stack->depth > case_stack->depth)
3234 return 5;
3235
3236 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3237 nominal_type = case_stack->data.case_stmt.nominal_type;
3238
3239 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3240 if (index_type == error_mark_node)
3241 return 0;
3242
3243 /* Convert VALUE to the type in which the comparisons are nominally done. */
3244 if (value != 0)
3245 value = convert (nominal_type, value);
3246
3247 /* If this is the first label, warn if any insns have been emitted. */
3248 if (case_stack->data.case_stmt.seenlabel == 0)
3249 {
3250 rtx insn;
3251 for (insn = case_stack->data.case_stmt.start;
3252 insn;
3253 insn = NEXT_INSN (insn))
3254 {
3255 if (GET_CODE (insn) == CODE_LABEL)
3256 break;
3257 if (GET_CODE (insn) != NOTE
3258 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3259 {
3260 warning ("unreachable code at beginning of %s",
3261 case_stack->data.case_stmt.printname);
3262 break;
3263 }
3264 }
3265 }
3266 case_stack->data.case_stmt.seenlabel = 1;
3267
3268 /* Fail if this value is out of range for the actual type of the index
3269 (which may be narrower than NOMINAL_TYPE). */
3270 if (value != 0 && ! int_fits_type_p (value, index_type))
3271 return 3;
3272
3273 /* Fail if this is a duplicate or overlaps another entry. */
3274 if (value == 0)
3275 {
3276 if (case_stack->data.case_stmt.default_label != 0)
3277 {
3278 *duplicate = case_stack->data.case_stmt.default_label;
3279 return 2;
3280 }
3281 case_stack->data.case_stmt.default_label = label;
3282 }
3283 else
3284 {
3285 /* Find the elt in the chain before which to insert the new value,
3286 to keep the chain sorted in increasing order.
3287 But report an error if this element is a duplicate. */
3288 for (l = &case_stack->data.case_stmt.case_list;
3289 /* Keep going past elements distinctly less than VALUE. */
3290 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3291 l = &(*l)->right)
3292 ;
3293 if (*l)
3294 {
3295 /* Element we will insert before must be distinctly greater;
3296 overlap means error. */
3297 if (! tree_int_cst_lt (value, (*l)->low))
3298 {
3299 *duplicate = (*l)->code_label;
3300 return 2;
3301 }
3302 }
3303
3304 /* Add this label to the chain, and succeed.
3305 Copy VALUE so it is on temporary rather than momentary
3306 obstack and will thus survive till the end of the case statement. */
3307 n = (struct case_node *) oballoc (sizeof (struct case_node));
3308 n->left = 0;
3309 n->right = *l;
3310 n->high = n->low = copy_node (value);
3311 n->code_label = label;
3312 *l = n;
3313 }
3314
3315 expand_label (label);
3316 return 0;
3317}
3318
3319/* Like pushcase but this case applies to all values
3320 between VALUE1 and VALUE2 (inclusive).
3321 The return value is the same as that of pushcase
3322 but there is one additional error code:
3323 4 means the specified range was empty. */
3324
3325int
3326pushcase_range (value1, value2, label, duplicate)
3327 register tree value1, value2;
3328 register tree label;
3329 tree *duplicate;
3330{
3331 register struct case_node **l;
3332 register struct case_node *n;
3333 tree index_type;
3334 tree nominal_type;
3335
3336 /* Fail if not inside a real case statement. */
3337 if (! (case_stack && case_stack->data.case_stmt.start))
3338 return 1;
3339
3340 if (stack_block_stack
3341 && stack_block_stack->depth > case_stack->depth)
3342 return 5;
3343
3344 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3345 nominal_type = case_stack->data.case_stmt.nominal_type;
3346
3347 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3348 if (index_type == error_mark_node)
3349 return 0;
3350
3351 /* If this is the first label, warn if any insns have been emitted. */
3352 if (case_stack->data.case_stmt.seenlabel == 0)
3353 {
3354 rtx insn;
3355 for (insn = case_stack->data.case_stmt.start;
3356 insn;
3357 insn = NEXT_INSN (insn))
3358 {
3359 if (GET_CODE (insn) == CODE_LABEL)
3360 break;
3361 if (GET_CODE (insn) != NOTE
3362 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3363 {
3364 warning ("unreachable code at beginning of %s",
3365 case_stack->data.case_stmt.printname);
3366 break;
3367 }
3368 }
3369 }
3370 case_stack->data.case_stmt.seenlabel = 1;
3371
3372 /* Convert VALUEs to type in which the comparisons are nominally done. */
3373 if (value1 == 0) /* Negative infinity. */
3374 value1 = TYPE_MIN_VALUE(index_type);
3375 value1 = convert (nominal_type, value1);
3376
3377 if (value2 == 0) /* Positive infinity. */
3378 value2 = TYPE_MAX_VALUE(index_type);
3379 value2 = convert (nominal_type, value2);
3380
3381 /* Fail if these values are out of range. */
3382 if (! int_fits_type_p (value1, index_type))
3383 return 3;
3384
3385 if (! int_fits_type_p (value2, index_type))
3386 return 3;
3387
3388 /* Fail if the range is empty. */
3389 if (tree_int_cst_lt (value2, value1))
3390 return 4;
3391
3392 /* If the bounds are equal, turn this into the one-value case. */
3393 if (tree_int_cst_equal (value1, value2))
3394 return pushcase (value1, label, duplicate);
3395
3396 /* Find the elt in the chain before which to insert the new value,
3397 to keep the chain sorted in increasing order.
3398 But report an error if this element is a duplicate. */
3399 for (l = &case_stack->data.case_stmt.case_list;
3400 /* Keep going past elements distinctly less than this range. */
3401 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3402 l = &(*l)->right)
3403 ;
3404 if (*l)
3405 {
3406 /* Element we will insert before must be distinctly greater;
3407 overlap means error. */
3408 if (! tree_int_cst_lt (value2, (*l)->low))
3409 {
3410 *duplicate = (*l)->code_label;
3411 return 2;
3412 }
3413 }
3414
3415 /* Add this label to the chain, and succeed.
3416 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3417 obstack and will thus survive till the end of the case statement. */
3418
3419 n = (struct case_node *) oballoc (sizeof (struct case_node));
3420 n->left = 0;
3421 n->right = *l;
3422 n->low = copy_node (value1);
3423 n->high = copy_node (value2);
3424 n->code_label = label;
3425 *l = n;
3426
3427 expand_label (label);
3428
3429 case_stack->data.case_stmt.num_ranges++;
3430
3431 return 0;
3432}
3433\f
3434/* Called when the index of a switch statement is an enumerated type
3435 and there is no default label.
3436
3437 Checks that all enumeration literals are covered by the case
3438 expressions of a switch. Also, warn if there are any extra
3439 switch cases that are *not* elements of the enumerated type.
3440
3441 If all enumeration literals were covered by the case expressions,
3442 turn one of the expressions into the default expression since it should
3443 not be possible to fall through such a switch. */
3444
3445void
3446check_for_full_enumeration_handling (type)
3447 tree type;
3448{
3449 register struct case_node *n;
3450 register struct case_node **l;
3451 register tree chain;
3452 int all_values = 1;
3453
3454 /* The time complexity of this loop is currently O(N * M), with
3455 N being the number of enumerals in the enumerated type, and
3456 M being the number of case expressions in the switch. */
3457
3458 for (chain = TYPE_VALUES (type);
3459 chain;
3460 chain = TREE_CHAIN (chain))
3461 {
3462 /* Find a match between enumeral and case expression, if possible.
3463 Quit looking when we've gone too far (since case expressions
3464 are kept sorted in ascending order). Warn about enumerals not
3465 handled in the switch statement case expression list. */
3466
3467 for (n = case_stack->data.case_stmt.case_list;
3468 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3469 n = n->right)
3470 ;
3471
1ddde1cd 3472 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3473 {
3474 if (warn_switch)
1ddde1cd 3475 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3476 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3477 all_values = 0;
3478 }
3479 }
3480
3481 /* Now we go the other way around; we warn if there are case
3482 expressions that don't correspond to enumerals. This can
3483 occur since C and C++ don't enforce type-checking of
3484 assignments to enumeration variables. */
3485
3486 if (warn_switch)
3487 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3488 {
3489 for (chain = TYPE_VALUES (type);
3490 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3491 chain = TREE_CHAIN (chain))
3492 ;
3493
3494 if (!chain)
3495 warning ("case value `%d' not in enumerated type `%s'",
3496 TREE_INT_CST_LOW (n->low),
3497 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3498 == IDENTIFIER_NODE)
3499 ? TYPE_NAME (type)
3500 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3501 if (!tree_int_cst_equal (n->low, n->high))
3502 {
3503 for (chain = TYPE_VALUES (type);
3504 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3505 chain = TREE_CHAIN (chain))
3506 ;
3507
3508 if (!chain)
3509 warning ("case value `%d' not in enumerated type `%s'",
3510 TREE_INT_CST_LOW (n->high),
3511 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3512 == IDENTIFIER_NODE)
3513 ? TYPE_NAME (type)
3514 : DECL_NAME (TYPE_NAME (type))));
3515 }
28d81abb
RK
3516 }
3517
3518 /* If all values were found as case labels, make one of them the default
3519 label. Thus, this switch will never fall through. We arbitrarily pick
3520 the last one to make the default since this is likely the most
3521 efficient choice. */
3522
3523 if (all_values)
3524 {
3525 for (l = &case_stack->data.case_stmt.case_list;
3526 (*l)->right != 0;
3527 l = &(*l)->right)
3528 ;
3529
3530 case_stack->data.case_stmt.default_label = (*l)->code_label;
3531 *l = 0;
3532 }
3533}
3534\f
3535/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 3536 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
3537 Generate the code to test it and jump to the right place. */
3538
3539void
3540expand_end_case (orig_index)
3541 tree orig_index;
3542{
3543 tree minval, maxval, range;
3544 rtx default_label = 0;
3545 register struct case_node *n;
3546 int count;
3547 rtx index;
3548 rtx table_label = gen_label_rtx ();
3549 int ncases;
3550 rtx *labelvec;
3551 register int i;
3552 rtx before_case;
3553 register struct nesting *thiscase = case_stack;
3554 tree index_expr = thiscase->data.case_stmt.index_expr;
3555 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3556
3557 do_pending_stack_adjust ();
3558
3559 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3560 if (TREE_TYPE (index_expr) != error_mark_node)
3561 {
3562 /* If switch expression was an enumerated type, check that all
3563 enumeration literals are covered by the cases.
3564 No sense trying this if there's a default case, however. */
3565
3566 if (!thiscase->data.case_stmt.default_label
3567 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3568 && TREE_CODE (index_expr) != INTEGER_CST)
3569 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3570
3571 /* If this is the first label, warn if any insns have been emitted. */
3572 if (thiscase->data.case_stmt.seenlabel == 0)
3573 {
3574 rtx insn;
3575 for (insn = get_last_insn ();
3576 insn != case_stack->data.case_stmt.start;
3577 insn = PREV_INSN (insn))
3578 if (GET_CODE (insn) != NOTE
3579 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3580 {
3581 warning ("unreachable code at beginning of %s",
3582 case_stack->data.case_stmt.printname);
3583 break;
3584 }
3585 }
3586
3587 /* If we don't have a default-label, create one here,
3588 after the body of the switch. */
3589 if (thiscase->data.case_stmt.default_label == 0)
3590 {
3591 thiscase->data.case_stmt.default_label
3592 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3593 expand_label (thiscase->data.case_stmt.default_label);
3594 }
3595 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3596
3597 before_case = get_last_insn ();
3598
3599 /* Simplify the case-list before we count it. */
3600 group_case_nodes (thiscase->data.case_stmt.case_list);
3601
3602 /* Get upper and lower bounds of case values.
3603 Also convert all the case values to the index expr's data type. */
3604
3605 count = 0;
3606 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3607 {
3608 /* Check low and high label values are integers. */
3609 if (TREE_CODE (n->low) != INTEGER_CST)
3610 abort ();
3611 if (TREE_CODE (n->high) != INTEGER_CST)
3612 abort ();
3613
3614 n->low = convert (TREE_TYPE (index_expr), n->low);
3615 n->high = convert (TREE_TYPE (index_expr), n->high);
3616
3617 /* Count the elements and track the largest and smallest
3618 of them (treating them as signed even if they are not). */
3619 if (count++ == 0)
3620 {
3621 minval = n->low;
3622 maxval = n->high;
3623 }
3624 else
3625 {
3626 if (INT_CST_LT (n->low, minval))
3627 minval = n->low;
3628 if (INT_CST_LT (maxval, n->high))
3629 maxval = n->high;
3630 }
3631 /* A range counts double, since it requires two compares. */
3632 if (! tree_int_cst_equal (n->low, n->high))
3633 count++;
3634 }
3635
3636 /* Compute span of values. */
3637 if (count != 0)
3638 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3639 maxval, minval));
3640
3641 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3642 {
3643 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3644 emit_queue ();
3645 emit_jump (default_label);
3646 }
3647 /* If range of values is much bigger than number of values,
3648 make a sequence of conditional branches instead of a dispatch.
3649 If the switch-index is a constant, do it this way
3650 because we can optimize it. */
4f73c5dd
TW
3651
3652#ifndef CASE_VALUES_THRESHOLD
28d81abb 3653#ifdef HAVE_casesi
4f73c5dd 3654#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 3655#else
4f73c5dd
TW
3656 /* If machine does not have a case insn that compares the
3657 bounds, this means extra overhead for dispatch tables
3658 which raises the threshold for using them. */
3659#define CASE_VALUES_THRESHOLD 5
3660#endif /* HAVE_casesi */
3661#endif /* CASE_VALUES_THRESHOLD */
3662
3663 else if (TREE_INT_CST_HIGH (range) != 0
3664 || count < CASE_VALUES_THRESHOLD
37366632
RK
3665 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
3666 > 10 * count)
28d81abb 3667 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3668 /* These will reduce to a constant. */
28d81abb 3669 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3670 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3671 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3672 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3673 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3674 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 3675 {
37366632 3676 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
3677
3678 /* If the index is a short or char that we do not have
3679 an insn to handle comparisons directly, convert it to
3680 a full integer now, rather than letting each comparison
3681 generate the conversion. */
3682
3683 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3684 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3685 == CODE_FOR_nothing))
3686 {
3687 enum machine_mode wider_mode;
3688 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3689 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3690 if (cmp_optab->handlers[(int) wider_mode].insn_code
3691 != CODE_FOR_nothing)
3692 {
3693 index = convert_to_mode (wider_mode, index, unsignedp);
3694 break;
3695 }
3696 }
3697
3698 emit_queue ();
3699 do_pending_stack_adjust ();
3700
3701 index = protect_from_queue (index, 0);
3702 if (GET_CODE (index) == MEM)
3703 index = copy_to_reg (index);
3704 if (GET_CODE (index) == CONST_INT
3705 || TREE_CODE (index_expr) == INTEGER_CST)
3706 {
3707 /* Make a tree node with the proper constant value
3708 if we don't already have one. */
3709 if (TREE_CODE (index_expr) != INTEGER_CST)
3710 {
3711 index_expr
3712 = build_int_2 (INTVAL (index),
3713 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3714 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3715 }
3716
3717 /* For constant index expressions we need only
3718 issue a unconditional branch to the appropriate
3719 target code. The job of removing any unreachable
3720 code is left to the optimisation phase if the
3721 "-O" option is specified. */
3722 for (n = thiscase->data.case_stmt.case_list;
3723 n;
3724 n = n->right)
3725 {
3726 if (! tree_int_cst_lt (index_expr, n->low)
3727 && ! tree_int_cst_lt (n->high, index_expr))
3728 break;
3729 }
3730 if (n)
3731 emit_jump (label_rtx (n->code_label));
3732 else
3733 emit_jump (default_label);
3734 }
3735 else
3736 {
3737 /* If the index expression is not constant we generate
3738 a binary decision tree to select the appropriate
3739 target code. This is done as follows:
3740
3741 The list of cases is rearranged into a binary tree,
3742 nearly optimal assuming equal probability for each case.
3743
3744 The tree is transformed into RTL, eliminating
3745 redundant test conditions at the same time.
3746
3747 If program flow could reach the end of the
3748 decision tree an unconditional jump to the
3749 default code is emitted. */
3750
3751 use_cost_table
3752 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 3753 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
3754 balance_case_nodes (&thiscase->data.case_stmt.case_list,
3755 NULL_PTR);
28d81abb
RK
3756 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3757 default_label, TREE_TYPE (index_expr));
3758 emit_jump_if_reachable (default_label);
3759 }
3760 }
3761 else
3762 {
3763 int win = 0;
3764#ifdef HAVE_casesi
3765 if (HAVE_casesi)
3766 {
c4fcf531 3767 enum machine_mode index_mode = SImode;
5130a5cc 3768 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3769
28d81abb 3770 /* Convert the index to SImode. */
c4fcf531
RS
3771 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3772 > GET_MODE_BITSIZE (index_mode))
28d81abb 3773 {
af2682ef 3774 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 3775 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
3776
3777 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3778 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3779 index_expr, minval);
3780 minval = integer_zero_node;
37366632
RK
3781 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3782 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
af2682ef
RS
3783 emit_jump_insn (gen_bltu (default_label));
3784 /* Now we can safely truncate. */
3785 index = convert_to_mode (index_mode, index, 0);
3786 }
3787 else
3788 {
3789 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3790 index_expr = convert (type_for_size (index_bits, 0),
3791 index_expr);
37366632 3792 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3793 }
28d81abb
RK
3794 emit_queue ();
3795 index = protect_from_queue (index, 0);
3796 do_pending_stack_adjust ();
3797
37366632
RK
3798 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
3799 VOIDmode, 0),
3800 expand_expr (range, NULL_RTX,
3801 VOIDmode, 0),
28d81abb
RK
3802 table_label, default_label));
3803 win = 1;
3804 }
3805#endif
3806#ifdef HAVE_tablejump
3807 if (! win && HAVE_tablejump)
3808 {
3809 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3810 fold (build (MINUS_EXPR,
3811 TREE_TYPE (index_expr),
3812 index_expr, minval)));
37366632 3813 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3814 emit_queue ();
af2682ef 3815 index = protect_from_queue (index, 0);
28d81abb
RK
3816 do_pending_stack_adjust ();
3817
af2682ef 3818 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 3819 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
3820 table_label, default_label);
3821 win = 1;
3822 }
3823#endif
3824 if (! win)
3825 abort ();
3826
3827 /* Get table of labels to jump to, in order of case index. */
3828
3829 ncases = TREE_INT_CST_LOW (range) + 1;
3830 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3831 bzero (labelvec, ncases * sizeof (rtx));
3832
3833 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3834 {
37366632 3835 register HOST_WIDE_INT i
28d81abb
RK
3836 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3837
3838 while (1)
3839 {
3840 labelvec[i]
3841 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3842 if (i + TREE_INT_CST_LOW (minval)
3843 == TREE_INT_CST_LOW (n->high))
3844 break;
3845 i++;
3846 }
3847 }
3848
3849 /* Fill in the gaps with the default. */
3850 for (i = 0; i < ncases; i++)
3851 if (labelvec[i] == 0)
3852 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3853
3854 /* Output the table */
3855 emit_label (table_label);
3856
3857 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3858 were an expression, instead of a an #ifdef/#ifndef. */
3859 if (
3860#ifdef CASE_VECTOR_PC_RELATIVE
3861 1 ||
3862#endif
3863 flag_pic)
3864 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3865 gen_rtx (LABEL_REF, Pmode, table_label),
3866 gen_rtvec_v (ncases, labelvec)));
3867 else
3868 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3869 gen_rtvec_v (ncases, labelvec)));
3870
3871 /* If the case insn drops through the table,
3872 after the table we must jump to the default-label.
3873 Otherwise record no drop-through after the table. */
3874#ifdef CASE_DROPS_THROUGH
3875 emit_jump (default_label);
3876#else
3877 emit_barrier ();
3878#endif
3879 }
3880
915f619f
JW
3881 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3882 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3883 thiscase->data.case_stmt.start);
3884 }
3885 if (thiscase->exit_label)
3886 emit_label (thiscase->exit_label);
3887
3888 POPSTACK (case_stack);
3889
3890 free_temp_slots ();
3891}
3892
3893/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3894
3895static void
3896do_jump_if_equal (op1, op2, label, unsignedp)
3897 rtx op1, op2, label;
3898 int unsignedp;
3899{
3900 if (GET_CODE (op1) == CONST_INT
3901 && GET_CODE (op2) == CONST_INT)
3902 {
3903 if (INTVAL (op1) == INTVAL (op2))
3904 emit_jump (label);
3905 }
3906 else
3907 {
3908 enum machine_mode mode = GET_MODE (op1);
3909 if (mode == VOIDmode)
3910 mode = GET_MODE (op2);
37366632 3911 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
3912 emit_jump_insn (gen_beq (label));
3913 }
3914}
3915\f
3916/* Not all case values are encountered equally. This function
3917 uses a heuristic to weight case labels, in cases where that
3918 looks like a reasonable thing to do.
3919
3920 Right now, all we try to guess is text, and we establish the
3921 following weights:
3922
3923 chars above space: 16
3924 digits: 16
3925 default: 12
3926 space, punct: 8
3927 tab: 4
3928 newline: 2
3929 other "\" chars: 1
3930 remaining chars: 0
3931
3932 If we find any cases in the switch that are not either -1 or in the range
3933 of valid ASCII characters, or are control characters other than those
3934 commonly used with "\", don't treat this switch scanning text.
3935
3936 Return 1 if these nodes are suitable for cost estimation, otherwise
3937 return 0. */
3938
3939static int
3940estimate_case_costs (node)
3941 case_node_ptr node;
3942{
3943 tree min_ascii = build_int_2 (-1, -1);
3944 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3945 case_node_ptr n;
3946 int i;
3947
3948 /* If we haven't already made the cost table, make it now. Note that the
3949 lower bound of the table is -1, not zero. */
3950
3951 if (cost_table == NULL)
3952 {
3953 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3954 bzero (cost_table - 1, 129 * sizeof (short));
3955
3956 for (i = 0; i < 128; i++)
3957 {
3958 if (isalnum (i))
3959 cost_table[i] = 16;
3960 else if (ispunct (i))
3961 cost_table[i] = 8;
3962 else if (iscntrl (i))
3963 cost_table[i] = -1;
3964 }
3965
3966 cost_table[' '] = 8;
3967 cost_table['\t'] = 4;
3968 cost_table['\0'] = 4;
3969 cost_table['\n'] = 2;
3970 cost_table['\f'] = 1;
3971 cost_table['\v'] = 1;
3972 cost_table['\b'] = 1;
3973 }
3974
3975 /* See if all the case expressions look like text. It is text if the
3976 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3977 as signed arithmetic since we don't want to ever access cost_table with a
3978 value less than -1. Also check that none of the constants in a range
3979 are strange control characters. */
3980
3981 for (n = node; n; n = n->right)
3982 {
3983 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3984 return 0;
3985
3986 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3987 if (cost_table[i] < 0)
3988 return 0;
3989 }
3990
3991 /* All interesting values are within the range of interesting
3992 ASCII characters. */
3993 return 1;
3994}
3995
3996/* Scan an ordered list of case nodes
3997 combining those with consecutive values or ranges.
3998
3999 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4000
4001static void
4002group_case_nodes (head)
4003 case_node_ptr head;
4004{
4005 case_node_ptr node = head;
4006
4007 while (node)
4008 {
4009 rtx lb = next_real_insn (label_rtx (node->code_label));
4010 case_node_ptr np = node;
4011
4012 /* Try to group the successors of NODE with NODE. */
4013 while (((np = np->right) != 0)
4014 /* Do they jump to the same place? */
4015 && next_real_insn (label_rtx (np->code_label)) == lb
4016 /* Are their ranges consecutive? */
4017 && tree_int_cst_equal (np->low,
4018 fold (build (PLUS_EXPR,
4019 TREE_TYPE (node->high),
4020 node->high,
4021 integer_one_node)))
4022 /* An overflow is not consecutive. */
4023 && tree_int_cst_lt (node->high,
4024 fold (build (PLUS_EXPR,
4025 TREE_TYPE (node->high),
4026 node->high,
4027 integer_one_node))))
4028 {
4029 node->high = np->high;
4030 }
4031 /* NP is the first node after NODE which can't be grouped with it.
4032 Delete the nodes in between, and move on to that node. */
4033 node->right = np;
4034 node = np;
4035 }
4036}
4037
4038/* Take an ordered list of case nodes
4039 and transform them into a near optimal binary tree,
6dc42e49 4040 on the assumption that any target code selection value is as
28d81abb
RK
4041 likely as any other.
4042
4043 The transformation is performed by splitting the ordered
4044 list into two equal sections plus a pivot. The parts are
4045 then attached to the pivot as left and right branches. Each
4046 branch is is then transformed recursively. */
4047
4048static void
4049balance_case_nodes (head, parent)
4050 case_node_ptr *head;
4051 case_node_ptr parent;
4052{
4053 register case_node_ptr np;
4054
4055 np = *head;
4056 if (np)
4057 {
4058 int cost = 0;
4059 int i = 0;
4060 int ranges = 0;
4061 register case_node_ptr *npp;
4062 case_node_ptr left;
4063
4064 /* Count the number of entries on branch. Also count the ranges. */
4065
4066 while (np)
4067 {
4068 if (!tree_int_cst_equal (np->low, np->high))
4069 {
4070 ranges++;
4071 if (use_cost_table)
4072 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4073 }
4074
4075 if (use_cost_table)
4076 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4077
4078 i++;
4079 np = np->right;
4080 }
4081
4082 if (i > 2)
4083 {
4084 /* Split this list if it is long enough for that to help. */
4085 npp = head;
4086 left = *npp;
4087 if (use_cost_table)
4088 {
4089 /* Find the place in the list that bisects the list's total cost,
4090 Here I gets half the total cost. */
4091 int n_moved = 0;
4092 i = (cost + 1) / 2;
4093 while (1)
4094 {
4095 /* Skip nodes while their cost does not reach that amount. */
4096 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4097 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4098 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4099 if (i <= 0)
4100 break;
4101 npp = &(*npp)->right;
4102 n_moved += 1;
4103 }
4104 if (n_moved == 0)
4105 {
4106 /* Leave this branch lopsided, but optimize left-hand
4107 side and fill in `parent' fields for right-hand side. */
4108 np = *head;
4109 np->parent = parent;
4110 balance_case_nodes (&np->left, np);
4111 for (; np->right; np = np->right)
4112 np->right->parent = np;
4113 return;
4114 }
4115 }
4116 /* If there are just three nodes, split at the middle one. */
4117 else if (i == 3)
4118 npp = &(*npp)->right;
4119 else
4120 {
4121 /* Find the place in the list that bisects the list's total cost,
4122 where ranges count as 2.
4123 Here I gets half the total cost. */
4124 i = (i + ranges + 1) / 2;
4125 while (1)
4126 {
4127 /* Skip nodes while their cost does not reach that amount. */
4128 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4129 i--;
4130 i--;
4131 if (i <= 0)
4132 break;
4133 npp = &(*npp)->right;
4134 }
4135 }
4136 *head = np = *npp;
4137 *npp = 0;
4138 np->parent = parent;
4139 np->left = left;
4140
4141 /* Optimize each of the two split parts. */
4142 balance_case_nodes (&np->left, np);
4143 balance_case_nodes (&np->right, np);
4144 }
4145 else
4146 {
4147 /* Else leave this branch as one level,
4148 but fill in `parent' fields. */
4149 np = *head;
4150 np->parent = parent;
4151 for (; np->right; np = np->right)
4152 np->right->parent = np;
4153 }
4154 }
4155}
4156\f
4157/* Search the parent sections of the case node tree
4158 to see if a test for the lower bound of NODE would be redundant.
4159 INDEX_TYPE is the type of the index expression.
4160
4161 The instructions to generate the case decision tree are
4162 output in the same order as nodes are processed so it is
4163 known that if a parent node checks the range of the current
4164 node minus one that the current node is bounded at its lower
4165 span. Thus the test would be redundant. */
4166
4167static int
4168node_has_low_bound (node, index_type)
4169 case_node_ptr node;
4170 tree index_type;
4171{
4172 tree low_minus_one;
4173 case_node_ptr pnode;
4174
4175 /* If the lower bound of this node is the lowest value in the index type,
4176 we need not test it. */
4177
4178 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4179 return 1;
4180
4181 /* If this node has a left branch, the value at the left must be less
4182 than that at this node, so it cannot be bounded at the bottom and
4183 we need not bother testing any further. */
4184
4185 if (node->left)
4186 return 0;
4187
4188 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4189 node->low, integer_one_node));
4190
4191 /* If the subtraction above overflowed, we can't verify anything.
4192 Otherwise, look for a parent that tests our value - 1. */
4193
4194 if (! tree_int_cst_lt (low_minus_one, node->low))
4195 return 0;
4196
4197 for (pnode = node->parent; pnode; pnode = pnode->parent)
4198 if (tree_int_cst_equal (low_minus_one, pnode->high))
4199 return 1;
4200
4201 return 0;
4202}
4203
4204/* Search the parent sections of the case node tree
4205 to see if a test for the upper bound of NODE would be redundant.
4206 INDEX_TYPE is the type of the index expression.
4207
4208 The instructions to generate the case decision tree are
4209 output in the same order as nodes are processed so it is
4210 known that if a parent node checks the range of the current
4211 node plus one that the current node is bounded at its upper
4212 span. Thus the test would be redundant. */
4213
4214static int
4215node_has_high_bound (node, index_type)
4216 case_node_ptr node;
4217 tree index_type;
4218{
4219 tree high_plus_one;
4220 case_node_ptr pnode;
4221
4222 /* If the upper bound of this node is the highest value in the type
4223 of the index expression, we need not test against it. */
4224
4225 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4226 return 1;
4227
4228 /* If this node has a right branch, the value at the right must be greater
4229 than that at this node, so it cannot be bounded at the top and
4230 we need not bother testing any further. */
4231
4232 if (node->right)
4233 return 0;
4234
4235 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4236 node->high, integer_one_node));
4237
4238 /* If the addition above overflowed, we can't verify anything.
4239 Otherwise, look for a parent that tests our value + 1. */
4240
4241 if (! tree_int_cst_lt (node->high, high_plus_one))
4242 return 0;
4243
4244 for (pnode = node->parent; pnode; pnode = pnode->parent)
4245 if (tree_int_cst_equal (high_plus_one, pnode->low))
4246 return 1;
4247
4248 return 0;
4249}
4250
4251/* Search the parent sections of the
4252 case node tree to see if both tests for the upper and lower
4253 bounds of NODE would be redundant. */
4254
4255static int
4256node_is_bounded (node, index_type)
4257 case_node_ptr node;
4258 tree index_type;
4259{
4260 return (node_has_low_bound (node, index_type)
4261 && node_has_high_bound (node, index_type));
4262}
4263
4264/* Emit an unconditional jump to LABEL unless it would be dead code. */
4265
4266static void
4267emit_jump_if_reachable (label)
4268 rtx label;
4269{
4270 if (GET_CODE (get_last_insn ()) != BARRIER)
4271 emit_jump (label);
4272}
4273\f
4274/* Emit step-by-step code to select a case for the value of INDEX.
4275 The thus generated decision tree follows the form of the
4276 case-node binary tree NODE, whose nodes represent test conditions.
4277 INDEX_TYPE is the type of the index of the switch.
4278
4279 Care is taken to prune redundant tests from the decision tree
4280 by detecting any boundary conditions already checked by
4281 emitted rtx. (See node_has_high_bound, node_has_low_bound
4282 and node_is_bounded, above.)
4283
4284 Where the test conditions can be shown to be redundant we emit
4285 an unconditional jump to the target code. As a further
4286 optimization, the subordinates of a tree node are examined to
4287 check for bounded nodes. In this case conditional and/or
4288 unconditional jumps as a result of the boundary check for the
4289 current node are arranged to target the subordinates associated
4290 code for out of bound conditions on the current node node.
4291
f72aed24 4292 We can assume that when control reaches the code generated here,
28d81abb
RK
4293 the index value has already been compared with the parents
4294 of this node, and determined to be on the same side of each parent
4295 as this node is. Thus, if this node tests for the value 51,
4296 and a parent tested for 52, we don't need to consider
4297 the possibility of a value greater than 51. If another parent
4298 tests for the value 50, then this node need not test anything. */
4299
4300static void
4301emit_case_nodes (index, node, default_label, index_type)
4302 rtx index;
4303 case_node_ptr node;
4304 rtx default_label;
4305 tree index_type;
4306{
4307 /* If INDEX has an unsigned type, we must make unsigned branches. */
4308 int unsignedp = TREE_UNSIGNED (index_type);
4309 typedef rtx rtx_function ();
4310 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4311 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4312 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4313 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4314 enum machine_mode mode = GET_MODE (index);
4315
4316 /* See if our parents have already tested everything for us.
4317 If they have, emit an unconditional jump for this node. */
4318 if (node_is_bounded (node, index_type))
4319 emit_jump (label_rtx (node->code_label));
4320
4321 else if (tree_int_cst_equal (node->low, node->high))
4322 {
4323 /* Node is single valued. First see if the index expression matches
4324 this node and then check our children, if any. */
4325
37366632 4326 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4327 label_rtx (node->code_label), unsignedp);
4328
4329 if (node->right != 0 && node->left != 0)
4330 {
4331 /* This node has children on both sides.
4332 Dispatch to one side or the other
4333 by comparing the index value with this node's value.
4334 If one subtree is bounded, check that one first,
4335 so we can avoid real branches in the tree. */
4336
4337 if (node_is_bounded (node->right, index_type))
4338 {
37366632
RK
4339 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4340 VOIDmode, 0),
4341 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4342
4343 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4344 emit_case_nodes (index, node->left, default_label, index_type);
4345 }
4346
4347 else if (node_is_bounded (node->left, index_type))
4348 {
37366632 4349 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4350 VOIDmode, 0),
37366632 4351 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4352 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4353 emit_case_nodes (index, node->right, default_label, index_type);
4354 }
4355
4356 else
4357 {
4358 /* Neither node is bounded. First distinguish the two sides;
4359 then emit the code for one side at a time. */
4360
4361 tree test_label
4362 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4363
4364 /* See if the value is on the right. */
37366632 4365 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4366 VOIDmode, 0),
37366632 4367 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4368 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4369
4370 /* Value must be on the left.
4371 Handle the left-hand subtree. */
4372 emit_case_nodes (index, node->left, default_label, index_type);
4373 /* If left-hand subtree does nothing,
4374 go to default. */
4375 emit_jump_if_reachable (default_label);
4376
4377 /* Code branches here for the right-hand subtree. */
4378 expand_label (test_label);
4379 emit_case_nodes (index, node->right, default_label, index_type);
4380 }
4381 }
4382
4383 else if (node->right != 0 && node->left == 0)
4384 {
4385 /* Here we have a right child but no left so we issue conditional
4386 branch to default and process the right child.
4387
4388 Omit the conditional branch to default if we it avoid only one
4389 right child; it costs too much space to save so little time. */
4390
de14fd73 4391 if (node->right->right || node->right->left
28d81abb
RK
4392 || !tree_int_cst_equal (node->right->low, node->right->high))
4393 {
4394 if (!node_has_low_bound (node, index_type))
4395 {
37366632
RK
4396 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4397 VOIDmode, 0),
4398 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4399 emit_jump_insn ((*gen_blt_pat) (default_label));
4400 }
4401
4402 emit_case_nodes (index, node->right, default_label, index_type);
4403 }
4404 else
4405 /* We cannot process node->right normally
4406 since we haven't ruled out the numbers less than
4407 this node's value. So handle node->right explicitly. */
4408 do_jump_if_equal (index,
37366632
RK
4409 expand_expr (node->right->low, NULL_RTX,
4410 VOIDmode, 0),
28d81abb
RK
4411 label_rtx (node->right->code_label), unsignedp);
4412 }
4413
4414 else if (node->right == 0 && node->left != 0)
4415 {
4416 /* Just one subtree, on the left. */
4417
de14fd73
RK
4418#if 0 /* The following code and comment were formerly part
4419 of the condition here, but they didn't work
4420 and I don't understand what the idea was. -- rms. */
4421 /* If our "most probable entry" is less probable
28d81abb
RK
4422 than the default label, emit a jump to
4423 the default label using condition codes
4424 already lying around. With no right branch,
4425 a branch-greater-than will get us to the default
4426 label correctly. */
de14fd73
RK
4427 if (use_cost_table
4428 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4429 ;
4430#endif /* 0 */
4431 if (node->left->left || node->left->right
28d81abb
RK
4432 || !tree_int_cst_equal (node->left->low, node->left->high))
4433 {
4434 if (!node_has_high_bound (node, index_type))
4435 {
37366632
RK
4436 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4437 VOIDmode, 0),
4438 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4439 emit_jump_insn ((*gen_bgt_pat) (default_label));
4440 }
4441
4442 emit_case_nodes (index, node->left, default_label, index_type);
4443 }
4444 else
4445 /* We cannot process node->left normally
4446 since we haven't ruled out the numbers less than
4447 this node's value. So handle node->left explicitly. */
4448 do_jump_if_equal (index,
37366632
RK
4449 expand_expr (node->left->low, NULL_RTX,
4450 VOIDmode, 0),
28d81abb
RK
4451 label_rtx (node->left->code_label), unsignedp);
4452 }
4453 }
4454 else
4455 {
4456 /* Node is a range. These cases are very similar to those for a single
4457 value, except that we do not start by testing whether this node
4458 is the one to branch to. */
4459
4460 if (node->right != 0 && node->left != 0)
4461 {
4462 /* Node has subtrees on both sides.
4463 If the right-hand subtree is bounded,
4464 test for it first, since we can go straight there.
4465 Otherwise, we need to make a branch in the control structure,
4466 then handle the two subtrees. */
4467 tree test_label = 0;
4468
37366632
RK
4469 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4470 VOIDmode, 0),
4471 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4472
4473 if (node_is_bounded (node->right, index_type))
4474 /* Right hand node is fully bounded so we can eliminate any
4475 testing and branch directly to the target code. */
4476 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4477 else
4478 {
4479 /* Right hand node requires testing.
4480 Branch to a label where we will handle it later. */
4481
4482 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4483 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4484 }
4485
4486 /* Value belongs to this node or to the left-hand subtree. */
4487
37366632
RK
4488 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4489 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4490 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4491
4492 /* Handle the left-hand subtree. */
4493 emit_case_nodes (index, node->left, default_label, index_type);
4494
4495 /* If right node had to be handled later, do that now. */
4496
4497 if (test_label)
4498 {
4499 /* If the left-hand subtree fell through,
4500 don't let it fall into the right-hand subtree. */
4501 emit_jump_if_reachable (default_label);
4502
4503 expand_label (test_label);
4504 emit_case_nodes (index, node->right, default_label, index_type);
4505 }
4506 }
4507
4508 else if (node->right != 0 && node->left == 0)
4509 {
4510 /* Deal with values to the left of this node,
4511 if they are possible. */
4512 if (!node_has_low_bound (node, index_type))
4513 {
37366632
RK
4514 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4515 VOIDmode, 0),
4516 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4517 emit_jump_insn ((*gen_blt_pat) (default_label));
4518 }
4519
4520 /* Value belongs to this node or to the right-hand subtree. */
4521
37366632
RK
4522 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4523 VOIDmode, 0),
4524 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4525 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4526
4527 emit_case_nodes (index, node->right, default_label, index_type);
4528 }
4529
4530 else if (node->right == 0 && node->left != 0)
4531 {
4532 /* Deal with values to the right of this node,
4533 if they are possible. */
4534 if (!node_has_high_bound (node, index_type))
4535 {
37366632
RK
4536 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4537 VOIDmode, 0),
4538 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4539 emit_jump_insn ((*gen_bgt_pat) (default_label));
4540 }
4541
4542 /* Value belongs to this node or to the left-hand subtree. */
4543
37366632
RK
4544 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4545 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4546 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4547
4548 emit_case_nodes (index, node->left, default_label, index_type);
4549 }
4550
4551 else
4552 {
4553 /* Node has no children so we check low and high bounds to remove
4554 redundant tests. Only one of the bounds can exist,
4555 since otherwise this node is bounded--a case tested already. */
4556
4557 if (!node_has_high_bound (node, index_type))
4558 {
37366632
RK
4559 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4560 VOIDmode, 0),
4561 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4562 emit_jump_insn ((*gen_bgt_pat) (default_label));
4563 }
4564
4565 if (!node_has_low_bound (node, index_type))
4566 {
37366632
RK
4567 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4568 VOIDmode, 0),
4569 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4570 emit_jump_insn ((*gen_blt_pat) (default_label));
4571 }
4572
4573 emit_jump (label_rtx (node->code_label));
4574 }
4575 }
4576}
4577\f
4578/* These routines are used by the loop unrolling code. They copy BLOCK trees
4579 so that the debugging info will be correct for the unrolled loop. */
4580
4581/* Indexed by loop number, contains pointer to the first block in the loop,
4582 or zero if none. Only valid if doing loop unrolling and outputting debugger
4583 info. */
4584
4585tree *loop_number_first_block;
4586
4587/* Indexed by loop number, contains pointer to the last block in the loop,
4588 only valid if loop_number_first_block is nonzero. */
4589
4590tree *loop_number_last_block;
4591
4592/* Indexed by loop number, contains nesting level of first block in the
4593 loop, if any. Only valid if doing loop unrolling and outputting debugger
4594 info. */
4595
4596int *loop_number_block_level;
4597
4598/* Scan the function looking for loops, and walk the BLOCK tree at the
4599 same time. Record the first and last BLOCK tree corresponding to each
4600 loop. This function is similar to find_and_verify_loops in loop.c. */
4601
4602void
4603find_loop_tree_blocks (f)
4604 rtx f;
4605{
4606 rtx insn;
4607 int current_loop = -1;
4608 int next_loop = -1;
4609 int loop;
4610 int block_level, tree_level;
4611 tree tree_block, parent_tree_block;
4612
4613 tree_block = DECL_INITIAL (current_function_decl);
4614 parent_tree_block = 0;
4615 block_level = 0;
4616 tree_level = -1;
4617
4618 /* Find boundaries of loops, and save the first and last BLOCK tree
4619 corresponding to each loop. */
4620
4621 for (insn = f; insn; insn = NEXT_INSN (insn))
4622 {
4623 if (GET_CODE (insn) == NOTE)
4624 switch (NOTE_LINE_NUMBER (insn))
4625 {
4626 case NOTE_INSN_LOOP_BEG:
4627 loop_number_block_level[++next_loop] = block_level;
4628 loop_number_first_block[next_loop] = 0;
4629 current_loop = next_loop;
4630 break;
4631
4632 case NOTE_INSN_LOOP_END:
4633 if (current_loop == -1)
4634 abort ();
4635
4636 current_loop = loop_outer_loop[current_loop];
4637 break;
4638
4639 case NOTE_INSN_BLOCK_BEG:
4640 if (tree_level < block_level)
4641 {
4642 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4643 we must now visit the subtree of the current block. */
4644 parent_tree_block = tree_block;
4645 tree_block = BLOCK_SUBBLOCKS (tree_block);
4646 tree_level++;
4647 }
4648 else if (tree_level > block_level)
4649 abort ();
4650
4651 /* Save this block tree here for all nested loops for which
4652 this is the topmost block. */
4653 for (loop = current_loop;
4654 loop != -1 && block_level == loop_number_block_level[loop];
4655 loop = loop_outer_loop[loop])
4656 {
4657 if (loop_number_first_block[loop] == 0)
4658 loop_number_first_block[loop] = tree_block;
4659 loop_number_last_block[loop] = tree_block;
4660 }
4661
4662 block_level++;
4663 break;
4664
4665 case NOTE_INSN_BLOCK_END:
4666 block_level--;
4667 if (tree_level > block_level)
4668 {
4669 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4670 we must now visit the parent of the current tree. */
4671 if (tree_block != 0 || parent_tree_block == 0)
4672 abort ();
4673 tree_block = parent_tree_block;
4674 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4675 tree_level--;
4676 }
4677 tree_block = BLOCK_CHAIN (tree_block);
4678 break;
4679 }
4680 }
4681}
4682
4683/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4684 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4685
4686 Note that we only copy the topmost level of tree nodes; they will share
4687 pointers to the same subblocks. */
4688
4689void
4690unroll_block_trees (loop_number, copies)
4691 int loop_number;
4692 int copies;
4693{
4694 int i;
4695
4696 /* First check whether there are any blocks that need to be copied. */
4697 if (loop_number_first_block[loop_number])
4698 {
4699 tree first_block = loop_number_first_block[loop_number];
4700 tree last_block = loop_number_last_block[loop_number];
4701 tree last_block_created = 0;
4702
4703 for (i = 0; i < copies - 1; i++)
4704 {
4705 tree block = first_block;
4706 tree insert_after = last_block;
4707 tree copied_block;
4708
4709 /* Copy every block between first_block and last_block inclusive,
4710 inserting the new blocks after last_block. */
4711 do
4712 {
4713 tree new_block = make_node (BLOCK);
4714 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4715 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4716 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4717 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4718 TREE_USED (new_block) = TREE_USED (block);
4719
4720 /* Insert the new block after the insertion point, and move
4721 the insertion point to the new block. This ensures that
4722 the copies are inserted in the right order. */
4723 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4724 BLOCK_CHAIN (insert_after) = new_block;
4725 insert_after = new_block;
4726
4727 copied_block = block;
4728 block = BLOCK_CHAIN (block);
4729 }
4730 while (copied_block != last_block);
4731
4732 /* Remember the last block created, so that we can update the
4733 info in the tables. */
4734 if (last_block_created == 0)
4735 last_block_created = insert_after;
4736 }
4737
4738 /* For all nested loops for which LAST_BLOCK was originally the last
4739 block, update the tables to indicate that LAST_BLOCK_CREATED is
4740 now the last block in the loop. */
4741 for (i = loop_number; last_block == loop_number_last_block[i];
4742 i = loop_outer_loop[i])
4743 loop_number_last_block[i] = last_block_created;
4744 }
4745}
This page took 0.479683 seconds and 5 git commands to generate.