]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
(expand_value_return): Don't emit move between differing modes when PROMOTE_FUNCTION_...
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb
RK
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35#include "config.h"
36
37#include <stdio.h>
38#include <ctype.h>
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52
53#define obstack_chunk_alloc xmalloc
54#define obstack_chunk_free free
55struct obstack stmt_obstack;
56
28d81abb
RK
57/* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
59char *emit_filename;
60int emit_lineno;
61
62/* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
64
65int expr_stmts_for_value;
66
67/* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
69
70static tree last_expr_type;
71static rtx last_expr_value;
72
7629c936
RS
73/* Each time we expand the end of a binding contour (in `expand_end_bindings')
74 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
75 This is used by the `remember_end_note' function to record the endpoint
76 of each generated block in its associated BLOCK node. */
77
78static rtx last_block_end_note;
79
28d81abb
RK
80/* Number of binding contours started so far in this function. */
81
82int block_start_count;
83
84/* Nonzero if function being compiled needs to
85 return the address of where it has put a structure value. */
86
87extern int current_function_returns_pcc_struct;
88
89/* Label that will go on parm cleanup code, if any.
90 Jumping to this label runs cleanup code for parameters, if
91 such code must be run. Following this code is the logical return label. */
92
93extern rtx cleanup_label;
94
95/* Label that will go on function epilogue.
96 Jumping to this label serves as a "return" instruction
97 on machines which require execution of the epilogue on all returns. */
98
99extern rtx return_label;
100
101/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
102 So we can mark them all live at the end of the function, if nonopt. */
103extern rtx save_expr_regs;
104
105/* Offset to end of allocated area of stack frame.
106 If stack grows down, this is the address of the last stack slot allocated.
107 If stack grows up, this is the address for the next slot. */
108extern int frame_offset;
109
110/* Label to jump back to for tail recursion, or 0 if we have
111 not yet needed one for this function. */
112extern rtx tail_recursion_label;
113
114/* Place after which to insert the tail_recursion_label if we need one. */
115extern rtx tail_recursion_reentry;
116
117/* Location at which to save the argument pointer if it will need to be
118 referenced. There are two cases where this is done: if nonlocal gotos
119 exist, or if vars whose is an offset from the argument pointer will be
120 needed by inner routines. */
121
122extern rtx arg_pointer_save_area;
123
124/* Chain of all RTL_EXPRs that have insns in them. */
125extern tree rtl_expr_chain;
126
127#if 0 /* Turned off because 0 seems to work just as well. */
128/* Cleanup lists are required for binding levels regardless of whether
129 that binding level has cleanups or not. This node serves as the
130 cleanup list whenever an empty list is required. */
131static tree empty_cleanup_list;
132#endif
133\f
134/* Functions and data structures for expanding case statements. */
135
136/* Case label structure, used to hold info on labels within case
137 statements. We handle "range" labels; for a single-value label
138 as in C, the high and low limits are the same.
139
140 A chain of case nodes is initially maintained via the RIGHT fields
141 in the nodes. Nodes with higher case values are later in the list.
142
143 Switch statements can be output in one of two forms. A branch table
144 is used if there are more than a few labels and the labels are dense
145 within the range between the smallest and largest case value. If a
146 branch table is used, no further manipulations are done with the case
147 node chain.
148
149 The alternative to the use of a branch table is to generate a series
150 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
151 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
152 totally unbalanced, with everything on the right. We balance the tree
153 with nodes on the left having lower case values than the parent
28d81abb
RK
154 and nodes on the right having higher values. We then output the tree
155 in order. */
156
157struct case_node
158{
159 struct case_node *left; /* Left son in binary tree */
160 struct case_node *right; /* Right son in binary tree; also node chain */
161 struct case_node *parent; /* Parent of node in binary tree */
162 tree low; /* Lowest index value for this label */
163 tree high; /* Highest index value for this label */
164 tree code_label; /* Label to jump to when node matches */
165};
166
167typedef struct case_node case_node;
168typedef struct case_node *case_node_ptr;
169
170/* These are used by estimate_case_costs and balance_case_nodes. */
171
172/* This must be a signed type, and non-ANSI compilers lack signed char. */
173static short *cost_table;
174static int use_cost_table;
175
176static int estimate_case_costs ();
177static void balance_case_nodes ();
178static void emit_case_nodes ();
179static void group_case_nodes ();
180static void emit_jump_if_reachable ();
181
182static int warn_if_unused_value ();
183static void expand_goto_internal ();
184static int expand_fixup ();
185void fixup_gotos ();
186void free_temp_slots ();
187static void expand_cleanups ();
28d81abb
RK
188static void expand_null_return_1 ();
189static int tail_recursion_args ();
190static void do_jump_if_equal ();
191\f
192/* Stack of control and binding constructs we are currently inside.
193
194 These constructs begin when you call `expand_start_WHATEVER'
195 and end when you call `expand_end_WHATEVER'. This stack records
196 info about how the construct began that tells the end-function
197 what to do. It also may provide information about the construct
198 to alter the behavior of other constructs within the body.
199 For example, they may affect the behavior of C `break' and `continue'.
200
201 Each construct gets one `struct nesting' object.
202 All of these objects are chained through the `all' field.
203 `nesting_stack' points to the first object (innermost construct).
204 The position of an entry on `nesting_stack' is in its `depth' field.
205
206 Each type of construct has its own individual stack.
207 For example, loops have `loop_stack'. Each object points to the
208 next object of the same type through the `next' field.
209
210 Some constructs are visible to `break' exit-statements and others
211 are not. Which constructs are visible depends on the language.
212 Therefore, the data structure allows each construct to be visible
213 or not, according to the args given when the construct is started.
214 The construct is visible if the `exit_label' field is non-null.
215 In that case, the value should be a CODE_LABEL rtx. */
216
217struct nesting
218{
219 struct nesting *all;
220 struct nesting *next;
221 int depth;
222 rtx exit_label;
223 union
224 {
225 /* For conds (if-then and if-then-else statements). */
226 struct
227 {
228 /* Label for the end of the if construct.
229 There is none if EXITFLAG was not set
230 and no `else' has been seen yet. */
231 rtx endif_label;
232 /* Label for the end of this alternative.
233 This may be the end of the if or the next else/elseif. */
234 rtx next_label;
235 } cond;
236 /* For loops. */
237 struct
238 {
239 /* Label at the top of the loop; place to loop back to. */
240 rtx start_label;
241 /* Label at the end of the whole construct. */
242 rtx end_label;
243 /* Label for `continue' statement to jump to;
244 this is in front of the stepper of the loop. */
245 rtx continue_label;
246 } loop;
247 /* For variable binding contours. */
248 struct
249 {
250 /* Sequence number of this binding contour within the function,
251 in order of entry. */
252 int block_start_count;
253 /* Nonzero => value to restore stack to on exit. */
254 rtx stack_level;
255 /* The NOTE that starts this contour.
256 Used by expand_goto to check whether the destination
257 is within each contour or not. */
258 rtx first_insn;
259 /* Innermost containing binding contour that has a stack level. */
260 struct nesting *innermost_stack_block;
261 /* List of cleanups to be run on exit from this contour.
262 This is a list of expressions to be evaluated.
263 The TREE_PURPOSE of each link is the ..._DECL node
264 which the cleanup pertains to. */
265 tree cleanups;
266 /* List of cleanup-lists of blocks containing this block,
267 as they were at the locus where this block appears.
268 There is an element for each containing block,
269 ordered innermost containing block first.
270 The tail of this list can be 0 (was empty_cleanup_list),
271 if all remaining elements would be empty lists.
272 The element's TREE_VALUE is the cleanup-list of that block,
273 which may be null. */
274 tree outer_cleanups;
275 /* Chain of labels defined inside this binding contour.
276 For contours that have stack levels or cleanups. */
277 struct label_chain *label_chain;
278 /* Number of function calls seen, as of start of this block. */
279 int function_call_count;
280 } block;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
283 struct
284 {
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
287 rtx start;
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
306 /* For exception contours. */
307 struct
308 {
309 /* List of exceptions raised. This is a TREE_LIST
310 of whatever you want. */
311 tree raised;
312 /* List of exceptions caught. This is also a TREE_LIST
313 of whatever you want. As a special case, it has the
314 value `void_type_node' if it handles default exceptions. */
315 tree handled;
316
317 /* First insn of TRY block, in case resumptive model is needed. */
318 rtx first_insn;
319 /* Label for the catch clauses. */
320 rtx except_label;
321 /* Label for unhandled exceptions. */
322 rtx unhandled_label;
323 /* Label at the end of whole construct. */
324 rtx after_label;
325 /* Label which "escapes" the exception construct.
326 Like EXIT_LABEL for BREAK construct, but for exceptions. */
327 rtx escape_label;
328 } except_stmt;
329 } data;
330};
331
332/* Chain of all pending binding contours. */
333struct nesting *block_stack;
334
6ed1d6c5
RS
335/* If any new stacks are added here, add them to POPSTACKS too. */
336
28d81abb
RK
337/* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339struct nesting *stack_block_stack;
340
341/* Chain of all pending conditional statements. */
342struct nesting *cond_stack;
343
344/* Chain of all pending loops. */
345struct nesting *loop_stack;
346
347/* Chain of all pending case or switch statements. */
348struct nesting *case_stack;
349
350/* Chain of all pending exception contours. */
351struct nesting *except_stack;
352
353/* Separate chain including all of the above,
354 chained through the `all' field. */
355struct nesting *nesting_stack;
356
357/* Number of entries on nesting_stack now. */
358int nesting_depth;
359
360/* Allocate and return a new `struct nesting'. */
361
362#define ALLOC_NESTING() \
363 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
364
6ed1d6c5
RS
365/* Pop the nesting stack element by element until we pop off
366 the element which is at the top of STACK.
367 Update all the other stacks, popping off elements from them
368 as we pop them from nesting_stack. */
28d81abb
RK
369
370#define POPSTACK(STACK) \
6ed1d6c5
RS
371do { struct nesting *target = STACK; \
372 struct nesting *this; \
373 do { this = nesting_stack; \
374 if (loop_stack == this) \
375 loop_stack = loop_stack->next; \
376 if (cond_stack == this) \
377 cond_stack = cond_stack->next; \
378 if (block_stack == this) \
379 block_stack = block_stack->next; \
380 if (stack_block_stack == this) \
381 stack_block_stack = stack_block_stack->next; \
382 if (case_stack == this) \
383 case_stack = case_stack->next; \
384 if (except_stack == this) \
385 except_stack = except_stack->next; \
386 nesting_depth = nesting_stack->depth - 1; \
28d81abb 387 nesting_stack = this->all; \
28d81abb 388 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 389 while (this != target); } while (0)
28d81abb
RK
390\f
391/* In some cases it is impossible to generate code for a forward goto
392 until the label definition is seen. This happens when it may be necessary
393 for the goto to reset the stack pointer: we don't yet know how to do that.
394 So expand_goto puts an entry on this fixup list.
395 Each time a binding contour that resets the stack is exited,
396 we check each fixup.
397 If the target label has now been defined, we can insert the proper code. */
398
399struct goto_fixup
400{
401 /* Points to following fixup. */
402 struct goto_fixup *next;
403 /* Points to the insn before the jump insn.
404 If more code must be inserted, it goes after this insn. */
405 rtx before_jump;
406 /* The LABEL_DECL that this jump is jumping to, or 0
407 for break, continue or return. */
408 tree target;
7629c936
RS
409 /* The BLOCK for the place where this goto was found. */
410 tree context;
28d81abb
RK
411 /* The CODE_LABEL rtx that this is jumping to. */
412 rtx target_rtl;
413 /* Number of binding contours started in current function
414 before the label reference. */
415 int block_start_count;
416 /* The outermost stack level that should be restored for this jump.
417 Each time a binding contour that resets the stack is exited,
418 if the target label is *not* yet defined, this slot is updated. */
419 rtx stack_level;
420 /* List of lists of cleanup expressions to be run by this goto.
421 There is one element for each block that this goto is within.
422 The tail of this list can be 0 (was empty_cleanup_list),
423 if all remaining elements would be empty.
424 The TREE_VALUE contains the cleanup list of that block as of the
425 time this goto was seen.
426 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
427 tree cleanup_list_list;
428};
429
430static struct goto_fixup *goto_fixup_chain;
431
432/* Within any binding contour that must restore a stack level,
433 all labels are recorded with a chain of these structures. */
434
435struct label_chain
436{
437 /* Points to following fixup. */
438 struct label_chain *next;
439 tree label;
440};
441\f
442void
443init_stmt ()
444{
445 gcc_obstack_init (&stmt_obstack);
446#if 0
447 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
448#endif
449}
450
451void
452init_stmt_for_function ()
453{
454 /* We are not currently within any block, conditional, loop or case. */
455 block_stack = 0;
456 loop_stack = 0;
457 case_stack = 0;
458 cond_stack = 0;
459 nesting_stack = 0;
460 nesting_depth = 0;
461
462 block_start_count = 0;
463
464 /* No gotos have been expanded yet. */
465 goto_fixup_chain = 0;
466
467 /* We are not processing a ({...}) grouping. */
468 expr_stmts_for_value = 0;
469 last_expr_type = 0;
470}
471
472void
473save_stmt_status (p)
474 struct function *p;
475{
476 p->block_stack = block_stack;
477 p->stack_block_stack = stack_block_stack;
478 p->cond_stack = cond_stack;
479 p->loop_stack = loop_stack;
480 p->case_stack = case_stack;
481 p->nesting_stack = nesting_stack;
482 p->nesting_depth = nesting_depth;
483 p->block_start_count = block_start_count;
484 p->last_expr_type = last_expr_type;
485 p->last_expr_value = last_expr_value;
486 p->expr_stmts_for_value = expr_stmts_for_value;
487 p->emit_filename = emit_filename;
488 p->emit_lineno = emit_lineno;
489 p->goto_fixup_chain = goto_fixup_chain;
490}
491
492void
493restore_stmt_status (p)
494 struct function *p;
495{
496 block_stack = p->block_stack;
497 stack_block_stack = p->stack_block_stack;
498 cond_stack = p->cond_stack;
499 loop_stack = p->loop_stack;
500 case_stack = p->case_stack;
501 nesting_stack = p->nesting_stack;
502 nesting_depth = p->nesting_depth;
503 block_start_count = p->block_start_count;
504 last_expr_type = p->last_expr_type;
505 last_expr_value = p->last_expr_value;
506 expr_stmts_for_value = p->expr_stmts_for_value;
507 emit_filename = p->emit_filename;
508 emit_lineno = p->emit_lineno;
509 goto_fixup_chain = p->goto_fixup_chain;
510}
511\f
512/* Emit a no-op instruction. */
513
514void
515emit_nop ()
516{
517 rtx last_insn = get_last_insn ();
518 if (!optimize
519 && (GET_CODE (last_insn) == CODE_LABEL
520 || prev_real_insn (last_insn) == 0))
521 emit_insn (gen_nop ());
522}
523\f
524/* Return the rtx-label that corresponds to a LABEL_DECL,
525 creating it if necessary. */
526
527rtx
528label_rtx (label)
529 tree label;
530{
531 if (TREE_CODE (label) != LABEL_DECL)
532 abort ();
533
534 if (DECL_RTL (label))
535 return DECL_RTL (label);
536
537 return DECL_RTL (label) = gen_label_rtx ();
538}
539
540/* Add an unconditional jump to LABEL as the next sequential instruction. */
541
542void
543emit_jump (label)
544 rtx label;
545{
546 do_pending_stack_adjust ();
547 emit_jump_insn (gen_jump (label));
548 emit_barrier ();
549}
550
551/* Emit code to jump to the address
552 specified by the pointer expression EXP. */
553
554void
555expand_computed_goto (exp)
556 tree exp;
557{
37366632 558 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
de14fd73 559 emit_queue ();
28d81abb 560 emit_indirect_jump (x);
28d81abb
RK
561}
562\f
563/* Handle goto statements and the labels that they can go to. */
564
565/* Specify the location in the RTL code of a label LABEL,
566 which is a LABEL_DECL tree node.
567
568 This is used for the kind of label that the user can jump to with a
569 goto statement, and for alternatives of a switch or case statement.
570 RTL labels generated for loops and conditionals don't go through here;
571 they are generated directly at the RTL level, by other functions below.
572
573 Note that this has nothing to do with defining label *names*.
574 Languages vary in how they do that and what that even means. */
575
576void
577expand_label (label)
578 tree label;
579{
580 struct label_chain *p;
581
582 do_pending_stack_adjust ();
583 emit_label (label_rtx (label));
584 if (DECL_NAME (label))
585 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
586
587 if (stack_block_stack != 0)
588 {
589 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
590 p->next = stack_block_stack->data.block.label_chain;
591 stack_block_stack->data.block.label_chain = p;
592 p->label = label;
593 }
594}
595
596/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
597 from nested functions. */
598
599void
600declare_nonlocal_label (label)
601 tree label;
602{
603 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
604 LABEL_PRESERVE_P (label_rtx (label)) = 1;
605 if (nonlocal_goto_handler_slot == 0)
606 {
607 nonlocal_goto_handler_slot
608 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
609 emit_stack_save (SAVE_NONLOCAL,
610 &nonlocal_goto_stack_level,
611 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
612 }
613}
614
615/* Generate RTL code for a `goto' statement with target label LABEL.
616 LABEL should be a LABEL_DECL tree node that was or will later be
617 defined with `expand_label'. */
618
619void
620expand_goto (label)
621 tree label;
622{
623 /* Check for a nonlocal goto to a containing function. */
624 tree context = decl_function_context (label);
625 if (context != 0 && context != current_function_decl)
626 {
627 struct function *p = find_function_data (context);
628 rtx temp;
629 p->has_nonlocal_label = 1;
59257ff7
RK
630
631 /* Copy the rtl for the slots so that they won't be shared in
632 case the virtual stack vars register gets instantiated differently
633 in the parent than in the child. */
634
28d81abb
RK
635#if HAVE_nonlocal_goto
636 if (HAVE_nonlocal_goto)
637 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
638 copy_rtx (p->nonlocal_goto_handler_slot),
639 copy_rtx (p->nonlocal_goto_stack_level),
28d81abb
RK
640 gen_rtx (LABEL_REF, Pmode,
641 label_rtx (label))));
642 else
643#endif
644 {
59257ff7
RK
645 rtx addr;
646
28d81abb
RK
647 /* Restore frame pointer for containing function.
648 This sets the actual hard register used for the frame pointer
649 to the location of the function's incoming static chain info.
650 The non-local goto handler will then adjust it to contain the
651 proper value and reload the argument pointer, if needed. */
652 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
653
654 /* We have now loaded the frame pointer hardware register with
655 the address of that corresponds to the start of the virtual
656 stack vars. So replace virtual_stack_vars_rtx in all
657 addresses we use with stack_pointer_rtx. */
658
28d81abb
RK
659 /* Get addr of containing function's current nonlocal goto handler,
660 which will do any cleanups and then jump to the label. */
59257ff7
RK
661 addr = copy_rtx (p->nonlocal_goto_handler_slot);
662 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
663 frame_pointer_rtx));
664
28d81abb 665 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
666 addr = p->nonlocal_goto_stack_level;
667 if (addr)
5e116627
MM
668 addr = replace_rtx (copy_rtx (addr),
669 virtual_stack_vars_rtx, frame_pointer_rtx);
59257ff7 670
37366632 671 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 672
28d81abb
RK
673 /* Put in the static chain register the nonlocal label address. */
674 emit_move_insn (static_chain_rtx,
675 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
676 /* USE of frame_pointer_rtx added for consistency; not clear if
677 really needed. */
678 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
679 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
680 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
681 emit_indirect_jump (temp);
682 }
683 }
684 else
37366632 685 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
686}
687
688/* Generate RTL code for a `goto' statement with target label BODY.
689 LABEL should be a LABEL_REF.
690 LAST_INSN, if non-0, is the rtx we should consider as the last
691 insn emitted (for the purposes of cleaning up a return). */
692
693static void
694expand_goto_internal (body, label, last_insn)
695 tree body;
696 rtx label;
697 rtx last_insn;
698{
699 struct nesting *block;
700 rtx stack_level = 0;
701
702 if (GET_CODE (label) != CODE_LABEL)
703 abort ();
704
705 /* If label has already been defined, we can tell now
706 whether and how we must alter the stack level. */
707
708 if (PREV_INSN (label) != 0)
709 {
710 /* Find the innermost pending block that contains the label.
711 (Check containment by comparing insn-uids.)
712 Then restore the outermost stack level within that block,
713 and do cleanups of all blocks contained in it. */
714 for (block = block_stack; block; block = block->next)
715 {
716 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
717 break;
718 if (block->data.block.stack_level != 0)
719 stack_level = block->data.block.stack_level;
720 /* Execute the cleanups for blocks we are exiting. */
721 if (block->data.block.cleanups != 0)
722 {
37366632 723 expand_cleanups (block->data.block.cleanups, NULL_TREE);
28d81abb
RK
724 do_pending_stack_adjust ();
725 }
726 }
727
728 if (stack_level)
729 {
730 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
731 the stack pointer. This one should be deleted as dead by flow. */
732 clear_pending_stack_adjust ();
733 do_pending_stack_adjust ();
37366632 734 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
735 }
736
737 if (body != 0 && DECL_TOO_LATE (body))
738 error ("jump to `%s' invalidly jumps into binding contour",
739 IDENTIFIER_POINTER (DECL_NAME (body)));
740 }
741 /* Label not yet defined: may need to put this goto
742 on the fixup list. */
743 else if (! expand_fixup (body, label, last_insn))
744 {
745 /* No fixup needed. Record that the label is the target
746 of at least one goto that has no fixup. */
747 if (body != 0)
748 TREE_ADDRESSABLE (body) = 1;
749 }
750
751 emit_jump (label);
752}
753\f
754/* Generate if necessary a fixup for a goto
755 whose target label in tree structure (if any) is TREE_LABEL
756 and whose target in rtl is RTL_LABEL.
757
758 If LAST_INSN is nonzero, we pretend that the jump appears
759 after insn LAST_INSN instead of at the current point in the insn stream.
760
023b57e6
RS
761 The fixup will be used later to insert insns just before the goto.
762 Those insns will restore the stack level as appropriate for the
763 target label, and will (in the case of C++) also invoke any object
764 destructors which have to be invoked when we exit the scopes which
765 are exited by the goto.
28d81abb
RK
766
767 Value is nonzero if a fixup is made. */
768
769static int
770expand_fixup (tree_label, rtl_label, last_insn)
771 tree tree_label;
772 rtx rtl_label;
773 rtx last_insn;
774{
775 struct nesting *block, *end_block;
776
777 /* See if we can recognize which block the label will be output in.
778 This is possible in some very common cases.
779 If we succeed, set END_BLOCK to that block.
780 Otherwise, set it to 0. */
781
782 if (cond_stack
783 && (rtl_label == cond_stack->data.cond.endif_label
784 || rtl_label == cond_stack->data.cond.next_label))
785 end_block = cond_stack;
786 /* If we are in a loop, recognize certain labels which
787 are likely targets. This reduces the number of fixups
788 we need to create. */
789 else if (loop_stack
790 && (rtl_label == loop_stack->data.loop.start_label
791 || rtl_label == loop_stack->data.loop.end_label
792 || rtl_label == loop_stack->data.loop.continue_label))
793 end_block = loop_stack;
794 else
795 end_block = 0;
796
797 /* Now set END_BLOCK to the binding level to which we will return. */
798
799 if (end_block)
800 {
801 struct nesting *next_block = end_block->all;
802 block = block_stack;
803
804 /* First see if the END_BLOCK is inside the innermost binding level.
805 If so, then no cleanups or stack levels are relevant. */
806 while (next_block && next_block != block)
807 next_block = next_block->all;
808
809 if (next_block)
810 return 0;
811
812 /* Otherwise, set END_BLOCK to the innermost binding level
813 which is outside the relevant control-structure nesting. */
814 next_block = block_stack->next;
815 for (block = block_stack; block != end_block; block = block->all)
816 if (block == next_block)
817 next_block = next_block->next;
818 end_block = next_block;
819 }
820
821 /* Does any containing block have a stack level or cleanups?
822 If not, no fixup is needed, and that is the normal case
823 (the only case, for standard C). */
824 for (block = block_stack; block != end_block; block = block->next)
825 if (block->data.block.stack_level != 0
826 || block->data.block.cleanups != 0)
827 break;
828
829 if (block != end_block)
830 {
831 /* Ok, a fixup is needed. Add a fixup to the list of such. */
832 struct goto_fixup *fixup
833 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
834 /* In case an old stack level is restored, make sure that comes
835 after any pending stack adjust. */
836 /* ?? If the fixup isn't to come at the present position,
837 doing the stack adjust here isn't useful. Doing it with our
838 settings at that location isn't useful either. Let's hope
839 someone does it! */
840 if (last_insn == 0)
841 do_pending_stack_adjust ();
28d81abb
RK
842 fixup->target = tree_label;
843 fixup->target_rtl = rtl_label;
023b57e6
RS
844
845 /* Create a BLOCK node and a corresponding matched set of
846 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
847 this point. The notes will encapsulate any and all fixup
848 code which we might later insert at this point in the insn
849 stream. Also, the BLOCK node will be the parent (i.e. the
850 `SUPERBLOCK') of any other BLOCK nodes which we might create
851 later on when we are expanding the fixup code. */
852
853 {
854 register rtx original_before_jump
855 = last_insn ? last_insn : get_last_insn ();
856
857 start_sequence ();
858 pushlevel (0);
859 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
860 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
861 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
862 end_sequence ();
863 emit_insns_after (fixup->before_jump, original_before_jump);
864 }
865
28d81abb
RK
866 fixup->block_start_count = block_start_count;
867 fixup->stack_level = 0;
868 fixup->cleanup_list_list
869 = (((block->data.block.outer_cleanups
870#if 0
871 && block->data.block.outer_cleanups != empty_cleanup_list
872#endif
873 )
874 || block->data.block.cleanups)
37366632 875 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
876 block->data.block.outer_cleanups)
877 : 0);
878 fixup->next = goto_fixup_chain;
879 goto_fixup_chain = fixup;
880 }
881
882 return block != 0;
883}
884
885/* When exiting a binding contour, process all pending gotos requiring fixups.
886 THISBLOCK is the structure that describes the block being exited.
887 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
888 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
889 FIRST_INSN is the insn that began this contour.
890
891 Gotos that jump out of this contour must restore the
892 stack level and do the cleanups before actually jumping.
893
894 DONT_JUMP_IN nonzero means report error there is a jump into this
895 contour from before the beginning of the contour.
896 This is also done if STACK_LEVEL is nonzero. */
897
898void
899fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
900 struct nesting *thisblock;
901 rtx stack_level;
902 tree cleanup_list;
903 rtx first_insn;
904 int dont_jump_in;
905{
906 register struct goto_fixup *f, *prev;
907
908 /* F is the fixup we are considering; PREV is the previous one. */
909 /* We run this loop in two passes so that cleanups of exited blocks
910 are run first, and blocks that are exited are marked so
911 afterwards. */
912
913 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
914 {
915 /* Test for a fixup that is inactive because it is already handled. */
916 if (f->before_jump == 0)
917 {
918 /* Delete inactive fixup from the chain, if that is easy to do. */
919 if (prev != 0)
920 prev->next = f->next;
921 }
922 /* Has this fixup's target label been defined?
923 If so, we can finalize it. */
924 else if (PREV_INSN (f->target_rtl) != 0)
925 {
7629c936 926 register rtx cleanup_insns;
7629c936 927
28d81abb
RK
928 /* Get the first non-label after the label
929 this goto jumps to. If that's before this scope begins,
930 we don't have a jump into the scope. */
931 rtx after_label = f->target_rtl;
932 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
933 after_label = NEXT_INSN (after_label);
934
935 /* If this fixup jumped into this contour from before the beginning
936 of this contour, report an error. */
937 /* ??? Bug: this does not detect jumping in through intermediate
938 blocks that have stack levels or cleanups.
939 It detects only a problem with the innermost block
940 around the label. */
941 if (f->target != 0
942 && (dont_jump_in || stack_level || cleanup_list)
943 /* If AFTER_LABEL is 0, it means the jump goes to the end
944 of the rtl, which means it jumps into this scope. */
945 && (after_label == 0
946 || INSN_UID (first_insn) < INSN_UID (after_label))
947 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
44fe2e80 948 && ! DECL_REGISTER (f->target))
28d81abb
RK
949 {
950 error_with_decl (f->target,
951 "label `%s' used before containing binding contour");
952 /* Prevent multiple errors for one label. */
44fe2e80 953 DECL_REGISTER (f->target) = 1;
28d81abb
RK
954 }
955
7629c936
RS
956 /* We will expand the cleanups into a sequence of their own and
957 then later on we will attach this new sequence to the insn
958 stream just ahead of the actual jump insn. */
959
960 start_sequence ();
961
023b57e6
RS
962 /* Temporarily restore the lexical context where we will
963 logically be inserting the fixup code. We do this for the
964 sake of getting the debugging information right. */
965
7629c936 966 pushlevel (0);
023b57e6 967 set_block (f->context);
7629c936
RS
968
969 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
970 if (f->cleanup_list_list)
971 {
972 tree lists;
973 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
974 /* Marked elements correspond to blocks that have been closed.
975 Do their cleanups. */
976 if (TREE_ADDRESSABLE (lists)
977 && TREE_VALUE (lists) != 0)
7629c936
RS
978 {
979 expand_cleanups (TREE_VALUE (lists), 0);
980 /* Pop any pushes done in the cleanups,
981 in case function is about to return. */
982 do_pending_stack_adjust ();
983 }
28d81abb
RK
984 }
985
986 /* Restore stack level for the biggest contour that this
987 jump jumps out of. */
988 if (f->stack_level)
59257ff7 989 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
990
991 /* Finish up the sequence containing the insns which implement the
992 necessary cleanups, and then attach that whole sequence to the
993 insn stream just ahead of the actual jump insn. Attaching it
994 at that point insures that any cleanups which are in fact
995 implicit C++ object destructions (which must be executed upon
996 leaving the block) appear (to the debugger) to be taking place
997 in an area of the generated code where the object(s) being
998 destructed are still "in scope". */
999
1000 cleanup_insns = get_insns ();
023b57e6 1001 poplevel (1, 0, 0);
7629c936
RS
1002
1003 end_sequence ();
1004 emit_insns_after (cleanup_insns, f->before_jump);
1005
7629c936 1006
28d81abb
RK
1007 f->before_jump = 0;
1008 }
1009 }
1010
1011 /* Mark the cleanups of exited blocks so that they are executed
1012 by the code above. */
1013 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1014 if (f->before_jump != 0
1015 && PREV_INSN (f->target_rtl) == 0
1016 /* Label has still not appeared. If we are exiting a block with
1017 a stack level to restore, that started before the fixup,
1018 mark this stack level as needing restoration
1019 when the fixup is later finalized.
1020 Also mark the cleanup_list_list element for F
1021 that corresponds to this block, so that ultimately
1022 this block's cleanups will be executed by the code above. */
1023 && thisblock != 0
1024 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1025 it means the label is undefined. That's erroneous, but possible. */
1026 && (thisblock->data.block.block_start_count
1027 <= f->block_start_count))
1028 {
1029 tree lists = f->cleanup_list_list;
1030 for (; lists; lists = TREE_CHAIN (lists))
1031 /* If the following elt. corresponds to our containing block
1032 then the elt. must be for this block. */
1033 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1034 TREE_ADDRESSABLE (lists) = 1;
1035
1036 if (stack_level)
1037 f->stack_level = stack_level;
1038 }
1039}
1040\f
1041/* Generate RTL for an asm statement (explicit assembler code).
1042 BODY is a STRING_CST node containing the assembler code text,
1043 or an ADDR_EXPR containing a STRING_CST. */
1044
1045void
1046expand_asm (body)
1047 tree body;
1048{
1049 if (TREE_CODE (body) == ADDR_EXPR)
1050 body = TREE_OPERAND (body, 0);
1051
1052 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1053 TREE_STRING_POINTER (body)));
1054 last_expr_type = 0;
1055}
1056
1057/* Generate RTL for an asm statement with arguments.
1058 STRING is the instruction template.
1059 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1060 Each output or input has an expression in the TREE_VALUE and
1061 a constraint-string in the TREE_PURPOSE.
1062 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1063 that is clobbered by this insn.
1064
1065 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1066 Some elements of OUTPUTS may be replaced with trees representing temporary
1067 values. The caller should copy those temporary values to the originally
1068 specified lvalues.
1069
1070 VOL nonzero means the insn is volatile; don't optimize it. */
1071
1072void
1073expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1074 tree string, outputs, inputs, clobbers;
1075 int vol;
1076 char *filename;
1077 int line;
1078{
1079 rtvec argvec, constraints;
1080 rtx body;
1081 int ninputs = list_length (inputs);
1082 int noutputs = list_length (outputs);
b4ccaa16 1083 int nclobbers;
28d81abb
RK
1084 tree tail;
1085 register int i;
1086 /* Vector of RTX's of evaluated output operands. */
1087 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1088 /* The insn we have emitted. */
1089 rtx insn;
1090
b4ccaa16
RS
1091 /* Count the number of meaningful clobbered registers, ignoring what
1092 we would ignore later. */
1093 nclobbers = 0;
1094 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1095 {
1096 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1097 i = decode_reg_name (regname);
1098 if (i >= 0 || i == -4)
b4ccaa16
RS
1099 ++nclobbers;
1100 }
1101
28d81abb
RK
1102 last_expr_type = 0;
1103
1104 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1105 {
1106 tree val = TREE_VALUE (tail);
1107 tree val1;
1108 int j;
1109 int found_equal;
1110
1111 /* If there's an erroneous arg, emit no insn. */
1112 if (TREE_TYPE (val) == error_mark_node)
1113 return;
1114
1115 /* Make sure constraint has `=' and does not have `+'. */
1116
1117 found_equal = 0;
1118 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1119 {
1120 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1121 {
1122 error ("output operand constraint contains `+'");
1123 return;
1124 }
1125 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1126 found_equal = 1;
1127 }
1128 if (! found_equal)
1129 {
1130 error ("output operand constraint lacks `='");
1131 return;
1132 }
1133
1134 /* If an output operand is not a variable or indirect ref,
1135 or a part of one,
1136 create a SAVE_EXPR which is a pseudo-reg
1137 to act as an intermediate temporary.
1138 Make the asm insn write into that, then copy it to
1139 the real output operand. */
1140
1141 while (TREE_CODE (val) == COMPONENT_REF
1142 || TREE_CODE (val) == ARRAY_REF)
1143 val = TREE_OPERAND (val, 0);
1144
1145 if (TREE_CODE (val) != VAR_DECL
1146 && TREE_CODE (val) != PARM_DECL
1147 && TREE_CODE (val) != INDIRECT_REF)
1148 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1149
37366632 1150 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1151 }
1152
1153 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1154 {
1155 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1156 return;
1157 }
1158
1159 /* Make vectors for the expression-rtx and constraint strings. */
1160
1161 argvec = rtvec_alloc (ninputs);
1162 constraints = rtvec_alloc (ninputs);
1163
1164 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1165 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1166 filename, line);
1167 MEM_VOLATILE_P (body) = vol;
1168
1169 /* Eval the inputs and put them into ARGVEC.
1170 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1171
1172 i = 0;
1173 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1174 {
1175 int j;
1176
1177 /* If there's an erroneous arg, emit no insn,
1178 because the ASM_INPUT would get VOIDmode
1179 and that could cause a crash in reload. */
1180 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1181 return;
1182 if (TREE_PURPOSE (tail) == NULL_TREE)
1183 {
1184 error ("hard register `%s' listed as input operand to `asm'",
1185 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1186 return;
1187 }
1188
1189 /* Make sure constraint has neither `=' nor `+'. */
1190
1191 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1192 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1193 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1194 {
1195 error ("input operand constraint contains `%c'",
1196 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1197 return;
1198 }
1199
1200 XVECEXP (body, 3, i) /* argvec */
37366632 1201 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
28d81abb
RK
1202 XVECEXP (body, 4, i) /* constraints */
1203 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1204 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1205 i++;
1206 }
1207
1208 /* Protect all the operands from the queue,
1209 now that they have all been evaluated. */
1210
1211 for (i = 0; i < ninputs; i++)
1212 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1213
1214 for (i = 0; i < noutputs; i++)
1215 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1216
1217 /* Now, for each output, construct an rtx
1218 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1219 ARGVEC CONSTRAINTS))
1220 If there is more than one, put them inside a PARALLEL. */
1221
1222 if (noutputs == 1 && nclobbers == 0)
1223 {
1224 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1225 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1226 }
1227 else if (noutputs == 0 && nclobbers == 0)
1228 {
1229 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1230 insn = emit_insn (body);
1231 }
1232 else
1233 {
1234 rtx obody = body;
1235 int num = noutputs;
1236 if (num == 0) num = 1;
1237 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1238
1239 /* For each output operand, store a SET. */
1240
1241 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1242 {
1243 XVECEXP (body, 0, i)
1244 = gen_rtx (SET, VOIDmode,
1245 output_rtx[i],
1246 gen_rtx (ASM_OPERANDS, VOIDmode,
1247 TREE_STRING_POINTER (string),
1248 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1249 i, argvec, constraints,
1250 filename, line));
1251 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1252 }
1253
1254 /* If there are no outputs (but there are some clobbers)
1255 store the bare ASM_OPERANDS into the PARALLEL. */
1256
1257 if (i == 0)
1258 XVECEXP (body, 0, i++) = obody;
1259
1260 /* Store (clobber REG) for each clobbered register specified. */
1261
b4ccaa16 1262 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1263 {
28d81abb 1264 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1265 int j = decode_reg_name (regname);
28d81abb 1266
b4ac57ab 1267 if (j < 0)
28d81abb 1268 {
c09e6498 1269 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1270 continue;
1271
c09e6498
RS
1272 if (j == -4) /* `memory', don't cache memory across asm */
1273 {
1274 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1275 continue;
1276 }
1277
28d81abb
RK
1278 error ("unknown register name `%s' in `asm'", regname);
1279 return;
1280 }
1281
1282 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1283 XVECEXP (body, 0, i++)
28d81abb
RK
1284 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1285 }
1286
1287 insn = emit_insn (body);
1288 }
1289
1290 free_temp_slots ();
1291}
1292\f
1293/* Generate RTL to evaluate the expression EXP
1294 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1295
1296void
1297expand_expr_stmt (exp)
1298 tree exp;
1299{
1300 /* If -W, warn about statements with no side effects,
1301 except for an explicit cast to void (e.g. for assert()), and
1302 except inside a ({...}) where they may be useful. */
1303 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1304 {
1305 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1306 && !(TREE_CODE (exp) == CONVERT_EXPR
1307 && TREE_TYPE (exp) == void_type_node))
1308 warning_with_file_and_line (emit_filename, emit_lineno,
1309 "statement with no effect");
1310 else if (warn_unused)
1311 warn_if_unused_value (exp);
1312 }
1313 last_expr_type = TREE_TYPE (exp);
1314 if (! flag_syntax_only)
37366632
RK
1315 last_expr_value = expand_expr (exp,
1316 (expr_stmts_for_value
1317 ? NULL_RTX : const0_rtx),
28d81abb
RK
1318 VOIDmode, 0);
1319
1320 /* If all we do is reference a volatile value in memory,
1321 copy it to a register to be sure it is actually touched. */
1322 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1323 && TREE_THIS_VOLATILE (exp))
1324 {
1325 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1326 copy_to_reg (last_expr_value);
1327 else
ddbe9812
RS
1328 {
1329 rtx lab = gen_label_rtx ();
1330
1331 /* Compare the value with itself to reference it. */
1332 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1333 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1334 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1335 BLKmode, 0,
1336 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1337 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1338 emit_label (lab);
1339 }
28d81abb
RK
1340 }
1341
1342 /* If this expression is part of a ({...}) and is in memory, we may have
1343 to preserve temporaries. */
1344 preserve_temp_slots (last_expr_value);
1345
1346 /* Free any temporaries used to evaluate this expression. Any temporary
1347 used as a result of this expression will already have been preserved
1348 above. */
1349 free_temp_slots ();
1350
1351 emit_queue ();
1352}
1353
1354/* Warn if EXP contains any computations whose results are not used.
1355 Return 1 if a warning is printed; 0 otherwise. */
1356
1357static int
1358warn_if_unused_value (exp)
1359 tree exp;
1360{
1361 if (TREE_USED (exp))
1362 return 0;
1363
1364 switch (TREE_CODE (exp))
1365 {
1366 case PREINCREMENT_EXPR:
1367 case POSTINCREMENT_EXPR:
1368 case PREDECREMENT_EXPR:
1369 case POSTDECREMENT_EXPR:
1370 case MODIFY_EXPR:
1371 case INIT_EXPR:
1372 case TARGET_EXPR:
1373 case CALL_EXPR:
1374 case METHOD_CALL_EXPR:
1375 case RTL_EXPR:
28d81abb
RK
1376 case WITH_CLEANUP_EXPR:
1377 case EXIT_EXPR:
1378 /* We don't warn about COND_EXPR because it may be a useful
1379 construct if either arm contains a side effect. */
1380 case COND_EXPR:
1381 return 0;
1382
1383 case BIND_EXPR:
1384 /* For a binding, warn if no side effect within it. */
1385 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1386
1387 case TRUTH_ORIF_EXPR:
1388 case TRUTH_ANDIF_EXPR:
1389 /* In && or ||, warn if 2nd operand has no side effect. */
1390 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1391
1392 case COMPOUND_EXPR:
1393 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1394 return 1;
4d23e509
RS
1395 /* Let people do `(foo (), 0)' without a warning. */
1396 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1397 return 0;
28d81abb
RK
1398 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1399
1400 case NOP_EXPR:
1401 case CONVERT_EXPR:
b4ac57ab 1402 case NON_LVALUE_EXPR:
28d81abb
RK
1403 /* Don't warn about values cast to void. */
1404 if (TREE_TYPE (exp) == void_type_node)
1405 return 0;
1406 /* Don't warn about conversions not explicit in the user's program. */
1407 if (TREE_NO_UNUSED_WARNING (exp))
1408 return 0;
1409 /* Assignment to a cast usually results in a cast of a modify.
1410 Don't complain about that. */
1411 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1412 return 0;
1413 /* Sometimes it results in a cast of a cast of a modify.
1414 Don't complain about that. */
1415 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1416 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1417 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1418 return 0;
1419
1420 default:
ddbe9812
RS
1421 /* Referencing a volatile value is a side effect, so don't warn. */
1422 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1423 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1424 && TREE_THIS_VOLATILE (exp))
1425 return 0;
28d81abb
RK
1426 warning_with_file_and_line (emit_filename, emit_lineno,
1427 "value computed is not used");
1428 return 1;
1429 }
1430}
1431
1432/* Clear out the memory of the last expression evaluated. */
1433
1434void
1435clear_last_expr ()
1436{
1437 last_expr_type = 0;
1438}
1439
1440/* Begin a statement which will return a value.
1441 Return the RTL_EXPR for this statement expr.
1442 The caller must save that value and pass it to expand_end_stmt_expr. */
1443
1444tree
1445expand_start_stmt_expr ()
1446{
1447 /* Make the RTL_EXPR node temporary, not momentary,
1448 so that rtl_expr_chain doesn't become garbage. */
1449 int momentary = suspend_momentary ();
1450 tree t = make_node (RTL_EXPR);
1451 resume_momentary (momentary);
1452 start_sequence ();
1453 NO_DEFER_POP;
1454 expr_stmts_for_value++;
1455 return t;
1456}
1457
1458/* Restore the previous state at the end of a statement that returns a value.
1459 Returns a tree node representing the statement's value and the
1460 insns to compute the value.
1461
1462 The nodes of that expression have been freed by now, so we cannot use them.
1463 But we don't want to do that anyway; the expression has already been
1464 evaluated and now we just want to use the value. So generate a RTL_EXPR
1465 with the proper type and RTL value.
1466
1467 If the last substatement was not an expression,
1468 return something with type `void'. */
1469
1470tree
1471expand_end_stmt_expr (t)
1472 tree t;
1473{
1474 OK_DEFER_POP;
1475
1476 if (last_expr_type == 0)
1477 {
1478 last_expr_type = void_type_node;
1479 last_expr_value = const0_rtx;
1480 }
1481 else if (last_expr_value == 0)
1482 /* There are some cases where this can happen, such as when the
1483 statement is void type. */
1484 last_expr_value = const0_rtx;
1485 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1486 /* Remove any possible QUEUED. */
1487 last_expr_value = protect_from_queue (last_expr_value, 0);
1488
1489 emit_queue ();
1490
1491 TREE_TYPE (t) = last_expr_type;
1492 RTL_EXPR_RTL (t) = last_expr_value;
1493 RTL_EXPR_SEQUENCE (t) = get_insns ();
1494
1495 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1496
1497 end_sequence ();
1498
1499 /* Don't consider deleting this expr or containing exprs at tree level. */
1500 TREE_SIDE_EFFECTS (t) = 1;
1501 /* Propagate volatility of the actual RTL expr. */
1502 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1503
1504 last_expr_type = 0;
1505 expr_stmts_for_value--;
1506
1507 return t;
1508}
1509\f
1510/* The exception handling nesting looks like this:
1511
1512 <-- Level N-1
1513 { <-- exception handler block
1514 <-- Level N
1515 <-- in an exception handler
1516 { <-- try block
1517 : <-- in a TRY block
1518 : <-- in an exception handler
1519 :
1520 }
1521
1522 { <-- except block
1523 : <-- in an except block
1524 : <-- in an exception handler
1525 :
1526 }
1527
1528 }
a124fd5e 1529*/
28d81abb
RK
1530
1531/* Return nonzero iff in a try block at level LEVEL. */
1532
1533int
1534in_try_block (level)
1535 int level;
1536{
1537 struct nesting *n = except_stack;
1538 while (1)
1539 {
1540 while (n && n->data.except_stmt.after_label != 0)
1541 n = n->next;
1542 if (n == 0)
1543 return 0;
1544 if (level == 0)
1545 return n != 0;
1546 level--;
1547 n = n->next;
1548 }
1549}
1550
1551/* Return nonzero iff in an except block at level LEVEL. */
1552
1553int
1554in_except_block (level)
1555 int level;
1556{
1557 struct nesting *n = except_stack;
1558 while (1)
1559 {
1560 while (n && n->data.except_stmt.after_label == 0)
1561 n = n->next;
1562 if (n == 0)
1563 return 0;
1564 if (level == 0)
1565 return n != 0;
1566 level--;
1567 n = n->next;
1568 }
1569}
1570
1571/* Return nonzero iff in an exception handler at level LEVEL. */
1572
1573int
1574in_exception_handler (level)
1575 int level;
1576{
1577 struct nesting *n = except_stack;
1578 while (n && level--)
1579 n = n->next;
1580 return n != 0;
1581}
1582
1583/* Record the fact that the current exception nesting raises
1584 exception EX. If not in an exception handler, return 0. */
1585int
1586expand_raise (ex)
1587 tree ex;
1588{
1589 tree *raises_ptr;
1590
1591 if (except_stack == 0)
1592 return 0;
1593 raises_ptr = &except_stack->data.except_stmt.raised;
1594 if (! value_member (ex, *raises_ptr))
1595 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1596 return 1;
1597}
1598
1599/* Generate RTL for the start of a try block.
1600
1601 TRY_CLAUSE is the condition to test to enter the try block. */
1602
1603void
1604expand_start_try (try_clause, exitflag, escapeflag)
1605 tree try_clause;
1606 int exitflag;
1607 int escapeflag;
1608{
1609 struct nesting *thishandler = ALLOC_NESTING ();
1610
1611 /* Make an entry on cond_stack for the cond we are entering. */
1612
1613 thishandler->next = except_stack;
1614 thishandler->all = nesting_stack;
1615 thishandler->depth = ++nesting_depth;
1616 thishandler->data.except_stmt.raised = 0;
1617 thishandler->data.except_stmt.handled = 0;
1618 thishandler->data.except_stmt.first_insn = get_insns ();
1619 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1620 thishandler->data.except_stmt.unhandled_label = 0;
1621 thishandler->data.except_stmt.after_label = 0;
1622 thishandler->data.except_stmt.escape_label
1623 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1624 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1625 except_stack = thishandler;
1626 nesting_stack = thishandler;
1627
37366632 1628 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
28d81abb
RK
1629}
1630
1631/* End of a TRY block. Nothing to do for now. */
1632
1633void
1634expand_end_try ()
1635{
1636 except_stack->data.except_stmt.after_label = gen_label_rtx ();
37366632
RK
1637 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1638 NULL_RTX);
28d81abb
RK
1639}
1640
1641/* Start an `except' nesting contour.
1642 EXITFLAG says whether this contour should be able to `exit' something.
1643 ESCAPEFLAG says whether this contour should be escapable. */
1644
1645void
1646expand_start_except (exitflag, escapeflag)
1647 int exitflag;
1648 int escapeflag;
1649{
1650 if (exitflag)
1651 {
1652 struct nesting *n;
1653 /* An `exit' from catch clauses goes out to next exit level,
1654 if there is one. Otherwise, it just goes to the end
1655 of the construct. */
1656 for (n = except_stack->next; n; n = n->next)
1657 if (n->exit_label != 0)
1658 {
1659 except_stack->exit_label = n->exit_label;
1660 break;
1661 }
1662 if (n == 0)
1663 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1664 }
1665 if (escapeflag)
1666 {
1667 struct nesting *n;
1668 /* An `escape' from catch clauses goes out to next escape level,
1669 if there is one. Otherwise, it just goes to the end
1670 of the construct. */
1671 for (n = except_stack->next; n; n = n->next)
1672 if (n->data.except_stmt.escape_label != 0)
1673 {
1674 except_stack->data.except_stmt.escape_label
1675 = n->data.except_stmt.escape_label;
1676 break;
1677 }
1678 if (n == 0)
1679 except_stack->data.except_stmt.escape_label
1680 = except_stack->data.except_stmt.after_label;
1681 }
1682 do_pending_stack_adjust ();
1683 emit_label (except_stack->data.except_stmt.except_label);
1684}
1685
1686/* Generate code to `escape' from an exception contour. This
1687 is like `exiting', but does not conflict with constructs which
1688 use `exit_label'.
1689
1690 Return nonzero if this contour is escapable, otherwise
1691 return zero, and language-specific code will emit the
1692 appropriate error message. */
1693int
1694expand_escape_except ()
1695{
1696 struct nesting *n;
1697 last_expr_type = 0;
1698 for (n = except_stack; n; n = n->next)
1699 if (n->data.except_stmt.escape_label != 0)
1700 {
37366632
RK
1701 expand_goto_internal (NULL_TREE,
1702 n->data.except_stmt.escape_label, NULL_RTX);
28d81abb
RK
1703 return 1;
1704 }
1705
1706 return 0;
1707}
1708
1709/* Finish processing and `except' contour.
1710 Culls out all exceptions which might be raise but not
1711 handled, and returns the list to the caller.
1712 Language-specific code is responsible for dealing with these
1713 exceptions. */
1714
1715tree
1716expand_end_except ()
1717{
1718 struct nesting *n;
1719 tree raised = NULL_TREE;
1720
1721 do_pending_stack_adjust ();
1722 emit_label (except_stack->data.except_stmt.after_label);
1723
1724 n = except_stack->next;
1725 if (n)
1726 {
1727 /* Propagate exceptions raised but not handled to next
1728 highest level. */
1729 tree handled = except_stack->data.except_stmt.raised;
1730 if (handled != void_type_node)
1731 {
1732 tree prev = NULL_TREE;
1733 raised = except_stack->data.except_stmt.raised;
1734 while (handled)
1735 {
1736 tree this_raise;
1737 for (this_raise = raised, prev = 0; this_raise;
1738 this_raise = TREE_CHAIN (this_raise))
1739 {
1740 if (value_member (TREE_VALUE (this_raise), handled))
1741 {
1742 if (prev)
1743 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1744 else
1745 {
1746 raised = TREE_CHAIN (raised);
1747 if (raised == NULL_TREE)
1748 goto nada;
1749 }
1750 }
1751 else
1752 prev = this_raise;
1753 }
1754 handled = TREE_CHAIN (handled);
1755 }
1756 if (prev == NULL_TREE)
1757 prev = raised;
1758 if (prev)
1759 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1760 nada:
1761 n->data.except_stmt.raised = raised;
1762 }
1763 }
1764
1765 POPSTACK (except_stack);
1766 last_expr_type = 0;
1767 return raised;
1768}
1769
1770/* Record that exception EX is caught by this exception handler.
1771 Return nonzero if in exception handling construct, otherwise return 0. */
1772int
1773expand_catch (ex)
1774 tree ex;
1775{
1776 tree *raises_ptr;
1777
1778 if (except_stack == 0)
1779 return 0;
1780 raises_ptr = &except_stack->data.except_stmt.handled;
1781 if (*raises_ptr != void_type_node
1782 && ex != NULL_TREE
1783 && ! value_member (ex, *raises_ptr))
1784 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1785 return 1;
1786}
1787
1788/* Record that this exception handler catches all exceptions.
1789 Return nonzero if in exception handling construct, otherwise return 0. */
1790
1791int
1792expand_catch_default ()
1793{
1794 if (except_stack == 0)
1795 return 0;
1796 except_stack->data.except_stmt.handled = void_type_node;
1797 return 1;
1798}
1799
1800int
1801expand_end_catch ()
1802{
1803 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1804 return 0;
37366632
RK
1805 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1806 NULL_RTX);
28d81abb
RK
1807 return 1;
1808}
1809\f
1810/* Generate RTL for the start of an if-then. COND is the expression
1811 whose truth should be tested.
1812
1813 If EXITFLAG is nonzero, this conditional is visible to
1814 `exit_something'. */
1815
1816void
1817expand_start_cond (cond, exitflag)
1818 tree cond;
1819 int exitflag;
1820{
1821 struct nesting *thiscond = ALLOC_NESTING ();
1822
1823 /* Make an entry on cond_stack for the cond we are entering. */
1824
1825 thiscond->next = cond_stack;
1826 thiscond->all = nesting_stack;
1827 thiscond->depth = ++nesting_depth;
1828 thiscond->data.cond.next_label = gen_label_rtx ();
1829 /* Before we encounter an `else', we don't need a separate exit label
1830 unless there are supposed to be exit statements
1831 to exit this conditional. */
1832 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1833 thiscond->data.cond.endif_label = thiscond->exit_label;
1834 cond_stack = thiscond;
1835 nesting_stack = thiscond;
1836
37366632 1837 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1838}
1839
1840/* Generate RTL between then-clause and the elseif-clause
1841 of an if-then-elseif-.... */
1842
1843void
1844expand_start_elseif (cond)
1845 tree cond;
1846{
1847 if (cond_stack->data.cond.endif_label == 0)
1848 cond_stack->data.cond.endif_label = gen_label_rtx ();
1849 emit_jump (cond_stack->data.cond.endif_label);
1850 emit_label (cond_stack->data.cond.next_label);
1851 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1852 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1853}
1854
1855/* Generate RTL between the then-clause and the else-clause
1856 of an if-then-else. */
1857
1858void
1859expand_start_else ()
1860{
1861 if (cond_stack->data.cond.endif_label == 0)
1862 cond_stack->data.cond.endif_label = gen_label_rtx ();
1863 emit_jump (cond_stack->data.cond.endif_label);
1864 emit_label (cond_stack->data.cond.next_label);
1865 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1866}
1867
1868/* Generate RTL for the end of an if-then.
1869 Pop the record for it off of cond_stack. */
1870
1871void
1872expand_end_cond ()
1873{
1874 struct nesting *thiscond = cond_stack;
1875
1876 do_pending_stack_adjust ();
1877 if (thiscond->data.cond.next_label)
1878 emit_label (thiscond->data.cond.next_label);
1879 if (thiscond->data.cond.endif_label)
1880 emit_label (thiscond->data.cond.endif_label);
1881
1882 POPSTACK (cond_stack);
1883 last_expr_type = 0;
1884}
1885\f
1886/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1887 loop should be exited by `exit_something'. This is a loop for which
1888 `expand_continue' will jump to the top of the loop.
1889
1890 Make an entry on loop_stack to record the labels associated with
1891 this loop. */
1892
1893struct nesting *
1894expand_start_loop (exit_flag)
1895 int exit_flag;
1896{
1897 register struct nesting *thisloop = ALLOC_NESTING ();
1898
1899 /* Make an entry on loop_stack for the loop we are entering. */
1900
1901 thisloop->next = loop_stack;
1902 thisloop->all = nesting_stack;
1903 thisloop->depth = ++nesting_depth;
1904 thisloop->data.loop.start_label = gen_label_rtx ();
1905 thisloop->data.loop.end_label = gen_label_rtx ();
1906 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1907 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1908 loop_stack = thisloop;
1909 nesting_stack = thisloop;
1910
1911 do_pending_stack_adjust ();
1912 emit_queue ();
37366632 1913 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
1914 emit_label (thisloop->data.loop.start_label);
1915
1916 return thisloop;
1917}
1918
1919/* Like expand_start_loop but for a loop where the continuation point
1920 (for expand_continue_loop) will be specified explicitly. */
1921
1922struct nesting *
1923expand_start_loop_continue_elsewhere (exit_flag)
1924 int exit_flag;
1925{
1926 struct nesting *thisloop = expand_start_loop (exit_flag);
1927 loop_stack->data.loop.continue_label = gen_label_rtx ();
1928 return thisloop;
1929}
1930
1931/* Specify the continuation point for a loop started with
1932 expand_start_loop_continue_elsewhere.
1933 Use this at the point in the code to which a continue statement
1934 should jump. */
1935
1936void
1937expand_loop_continue_here ()
1938{
1939 do_pending_stack_adjust ();
37366632 1940 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
1941 emit_label (loop_stack->data.loop.continue_label);
1942}
1943
1944/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1945 Pop the block off of loop_stack. */
1946
1947void
1948expand_end_loop ()
1949{
1950 register rtx insn = get_last_insn ();
1951 register rtx start_label = loop_stack->data.loop.start_label;
1952 rtx last_test_insn = 0;
1953 int num_insns = 0;
1954
1955 /* Mark the continue-point at the top of the loop if none elsewhere. */
1956 if (start_label == loop_stack->data.loop.continue_label)
1957 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1958
1959 do_pending_stack_adjust ();
1960
1961 /* If optimizing, perhaps reorder the loop. If the loop
1962 starts with a conditional exit, roll that to the end
1963 where it will optimize together with the jump back.
1964
1965 We look for the last conditional branch to the exit that we encounter
1966 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1967 branch to the exit first, use it.
1968
1969 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1970 because moving them is not valid. */
1971
1972 if (optimize
1973 &&
1974 ! (GET_CODE (insn) == JUMP_INSN
1975 && GET_CODE (PATTERN (insn)) == SET
1976 && SET_DEST (PATTERN (insn)) == pc_rtx
1977 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1978 {
1979 /* Scan insns from the top of the loop looking for a qualified
1980 conditional exit. */
1981 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1982 insn = NEXT_INSN (insn))
1983 {
1984 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1985 break;
1986
1987 if (GET_CODE (insn) == NOTE
1988 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1989 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1990 break;
1991
1992 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1993 num_insns++;
1994
1995 if (last_test_insn && num_insns > 30)
1996 break;
1997
1998 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1999 && SET_DEST (PATTERN (insn)) == pc_rtx
2000 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2001 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2002 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2003 == loop_stack->data.loop.end_label))
2004 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2005 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2006 == loop_stack->data.loop.end_label))))
2007 last_test_insn = insn;
2008
2009 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2010 && GET_CODE (PATTERN (insn)) == SET
2011 && SET_DEST (PATTERN (insn)) == pc_rtx
2012 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2013 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2014 == loop_stack->data.loop.end_label))
2015 /* Include BARRIER. */
2016 last_test_insn = NEXT_INSN (insn);
2017 }
2018
2019 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2020 {
2021 /* We found one. Move everything from there up
2022 to the end of the loop, and add a jump into the loop
2023 to jump to there. */
2024 register rtx newstart_label = gen_label_rtx ();
2025 register rtx start_move = start_label;
2026
b4ac57ab 2027 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2028 then we want to move this note also. */
2029 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2030 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2031 == NOTE_INSN_LOOP_CONT))
2032 start_move = PREV_INSN (start_move);
2033
2034 emit_label_after (newstart_label, PREV_INSN (start_move));
2035 reorder_insns (start_move, last_test_insn, get_last_insn ());
2036 emit_jump_insn_after (gen_jump (start_label),
2037 PREV_INSN (newstart_label));
2038 emit_barrier_after (PREV_INSN (newstart_label));
2039 start_label = newstart_label;
2040 }
2041 }
2042
2043 emit_jump (start_label);
37366632 2044 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2045 emit_label (loop_stack->data.loop.end_label);
2046
2047 POPSTACK (loop_stack);
2048
2049 last_expr_type = 0;
2050}
2051
2052/* Generate a jump to the current loop's continue-point.
2053 This is usually the top of the loop, but may be specified
2054 explicitly elsewhere. If not currently inside a loop,
2055 return 0 and do nothing; caller will print an error message. */
2056
2057int
2058expand_continue_loop (whichloop)
2059 struct nesting *whichloop;
2060{
2061 last_expr_type = 0;
2062 if (whichloop == 0)
2063 whichloop = loop_stack;
2064 if (whichloop == 0)
2065 return 0;
37366632
RK
2066 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2067 NULL_RTX);
28d81abb
RK
2068 return 1;
2069}
2070
2071/* Generate a jump to exit the current loop. If not currently inside a loop,
2072 return 0 and do nothing; caller will print an error message. */
2073
2074int
2075expand_exit_loop (whichloop)
2076 struct nesting *whichloop;
2077{
2078 last_expr_type = 0;
2079 if (whichloop == 0)
2080 whichloop = loop_stack;
2081 if (whichloop == 0)
2082 return 0;
37366632 2083 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2084 return 1;
2085}
2086
2087/* Generate a conditional jump to exit the current loop if COND
2088 evaluates to zero. If not currently inside a loop,
2089 return 0 and do nothing; caller will print an error message. */
2090
2091int
2092expand_exit_loop_if_false (whichloop, cond)
2093 struct nesting *whichloop;
2094 tree cond;
2095{
2096 last_expr_type = 0;
2097 if (whichloop == 0)
2098 whichloop = loop_stack;
2099 if (whichloop == 0)
2100 return 0;
37366632 2101 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2102 return 1;
2103}
2104
2105/* Return non-zero if we should preserve sub-expressions as separate
2106 pseudos. We never do so if we aren't optimizing. We always do so
2107 if -fexpensive-optimizations.
2108
2109 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2110 the loop may still be a small one. */
2111
2112int
2113preserve_subexpressions_p ()
2114{
2115 rtx insn;
2116
2117 if (flag_expensive_optimizations)
2118 return 1;
2119
2120 if (optimize == 0 || loop_stack == 0)
2121 return 0;
2122
2123 insn = get_last_insn_anywhere ();
2124
2125 return (insn
2126 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2127 < n_non_fixed_regs * 3));
2128
2129}
2130
2131/* Generate a jump to exit the current loop, conditional, binding contour
2132 or case statement. Not all such constructs are visible to this function,
2133 only those started with EXIT_FLAG nonzero. Individual languages use
2134 the EXIT_FLAG parameter to control which kinds of constructs you can
2135 exit this way.
2136
2137 If not currently inside anything that can be exited,
2138 return 0 and do nothing; caller will print an error message. */
2139
2140int
2141expand_exit_something ()
2142{
2143 struct nesting *n;
2144 last_expr_type = 0;
2145 for (n = nesting_stack; n; n = n->all)
2146 if (n->exit_label != 0)
2147 {
37366632 2148 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2149 return 1;
2150 }
2151
2152 return 0;
2153}
2154\f
2155/* Generate RTL to return from the current function, with no value.
2156 (That is, we do not do anything about returning any value.) */
2157
2158void
2159expand_null_return ()
2160{
2161 struct nesting *block = block_stack;
2162 rtx last_insn = 0;
2163
2164 /* Does any pending block have cleanups? */
2165
2166 while (block && block->data.block.cleanups == 0)
2167 block = block->next;
2168
2169 /* If yes, use a goto to return, since that runs cleanups. */
2170
2171 expand_null_return_1 (last_insn, block != 0);
2172}
2173
2174/* Generate RTL to return from the current function, with value VAL. */
2175
2176void
2177expand_value_return (val)
2178 rtx val;
2179{
2180 struct nesting *block = block_stack;
2181 rtx last_insn = get_last_insn ();
2182 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2183
2184 /* Copy the value to the return location
2185 unless it's already there. */
2186
2187 if (return_reg != val)
77636079
RS
2188 {
2189#ifdef PROMOTE_FUNCTION_RETURN
2190 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2191 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2192 int unsignedp = TREE_UNSIGNED (type);
2193
2194 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2195 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2196 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2197 || TREE_CODE (type) == OFFSET_TYPE)
2198 {
2199 PROMOTE_MODE (mode, unsignedp, type);
2200 }
2201
2202 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2203 convert_to_mode (return_reg, val, unsignedp);
2204 else
2205#endif
2206 emit_move_insn (return_reg, val);
2207 }
28d81abb
RK
2208 if (GET_CODE (return_reg) == REG
2209 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2210 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2211
2212 /* Does any pending block have cleanups? */
2213
2214 while (block && block->data.block.cleanups == 0)
2215 block = block->next;
2216
2217 /* If yes, use a goto to return, since that runs cleanups.
2218 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2219
2220 expand_null_return_1 (last_insn, block != 0);
2221}
2222
2223/* Output a return with no value. If LAST_INSN is nonzero,
2224 pretend that the return takes place after LAST_INSN.
2225 If USE_GOTO is nonzero then don't use a return instruction;
2226 go to the return label instead. This causes any cleanups
2227 of pending blocks to be executed normally. */
2228
2229static void
2230expand_null_return_1 (last_insn, use_goto)
2231 rtx last_insn;
2232 int use_goto;
2233{
2234 rtx end_label = cleanup_label ? cleanup_label : return_label;
2235
2236 clear_pending_stack_adjust ();
2237 do_pending_stack_adjust ();
2238 last_expr_type = 0;
2239
2240 /* PCC-struct return always uses an epilogue. */
2241 if (current_function_returns_pcc_struct || use_goto)
2242 {
2243 if (end_label == 0)
2244 end_label = return_label = gen_label_rtx ();
37366632 2245 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2246 return;
2247 }
2248
2249 /* Otherwise output a simple return-insn if one is available,
2250 unless it won't do the job. */
2251#ifdef HAVE_return
2252 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2253 {
2254 emit_jump_insn (gen_return ());
2255 emit_barrier ();
2256 return;
2257 }
2258#endif
2259
2260 /* Otherwise jump to the epilogue. */
37366632 2261 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2262}
2263\f
2264/* Generate RTL to evaluate the expression RETVAL and return it
2265 from the current function. */
2266
2267void
2268expand_return (retval)
2269 tree retval;
2270{
2271 /* If there are any cleanups to be performed, then they will
2272 be inserted following LAST_INSN. It is desirable
2273 that the last_insn, for such purposes, should be the
2274 last insn before computing the return value. Otherwise, cleanups
2275 which call functions can clobber the return value. */
2276 /* ??? rms: I think that is erroneous, because in C++ it would
2277 run destructors on variables that might be used in the subsequent
2278 computation of the return value. */
2279 rtx last_insn = 0;
2280 register rtx val = 0;
2281 register rtx op0;
2282 tree retval_rhs;
2283 int cleanups;
2284 struct nesting *block;
2285
2286 /* If function wants no value, give it none. */
2287 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2288 {
37366632 2289 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2290 emit_queue ();
28d81abb
RK
2291 expand_null_return ();
2292 return;
2293 }
2294
2295 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2296 cleanups = any_pending_cleanups (1);
2297
2298 if (TREE_CODE (retval) == RESULT_DECL)
2299 retval_rhs = retval;
2300 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2301 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2302 retval_rhs = TREE_OPERAND (retval, 1);
2303 else if (TREE_TYPE (retval) == void_type_node)
2304 /* Recognize tail-recursive call to void function. */
2305 retval_rhs = retval;
2306 else
2307 retval_rhs = NULL_TREE;
2308
2309 /* Only use `last_insn' if there are cleanups which must be run. */
2310 if (cleanups || cleanup_label != 0)
2311 last_insn = get_last_insn ();
2312
2313 /* Distribute return down conditional expr if either of the sides
2314 may involve tail recursion (see test below). This enhances the number
2315 of tail recursions we see. Don't do this always since it can produce
2316 sub-optimal code in some cases and we distribute assignments into
2317 conditional expressions when it would help. */
2318
2319 if (optimize && retval_rhs != 0
2320 && frame_offset == 0
2321 && TREE_CODE (retval_rhs) == COND_EXPR
2322 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2323 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2324 {
2325 rtx label = gen_label_rtx ();
37366632 2326 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
28d81abb
RK
2327 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2328 DECL_RESULT (current_function_decl),
2329 TREE_OPERAND (retval_rhs, 1)));
2330 emit_label (label);
2331 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2332 DECL_RESULT (current_function_decl),
2333 TREE_OPERAND (retval_rhs, 2)));
2334 return;
2335 }
2336
2337 /* For tail-recursive call to current function,
2338 just jump back to the beginning.
2339 It's unsafe if any auto variable in this function
2340 has its address taken; for simplicity,
2341 require stack frame to be empty. */
2342 if (optimize && retval_rhs != 0
2343 && frame_offset == 0
2344 && TREE_CODE (retval_rhs) == CALL_EXPR
2345 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2346 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2347 /* Finish checking validity, and if valid emit code
2348 to set the argument variables for the new call. */
2349 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2350 DECL_ARGUMENTS (current_function_decl)))
2351 {
2352 if (tail_recursion_label == 0)
2353 {
2354 tail_recursion_label = gen_label_rtx ();
2355 emit_label_after (tail_recursion_label,
2356 tail_recursion_reentry);
2357 }
a3229491 2358 emit_queue ();
37366632 2359 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2360 emit_barrier ();
2361 return;
2362 }
2363#ifdef HAVE_return
2364 /* This optimization is safe if there are local cleanups
2365 because expand_null_return takes care of them.
2366 ??? I think it should also be safe when there is a cleanup label,
2367 because expand_null_return takes care of them, too.
2368 Any reason why not? */
2369 if (HAVE_return && cleanup_label == 0
2370 && ! current_function_returns_pcc_struct)
2371 {
2372 /* If this is return x == y; then generate
2373 if (x == y) return 1; else return 0;
2374 if we can do it with explicit return insns. */
2375 if (retval_rhs)
2376 switch (TREE_CODE (retval_rhs))
2377 {
2378 case EQ_EXPR:
2379 case NE_EXPR:
2380 case GT_EXPR:
2381 case GE_EXPR:
2382 case LT_EXPR:
2383 case LE_EXPR:
2384 case TRUTH_ANDIF_EXPR:
2385 case TRUTH_ORIF_EXPR:
2386 case TRUTH_AND_EXPR:
2387 case TRUTH_OR_EXPR:
2388 case TRUTH_NOT_EXPR:
2389 op0 = gen_label_rtx ();
2390 jumpifnot (retval_rhs, op0);
2391 expand_value_return (const1_rtx);
2392 emit_label (op0);
2393 expand_value_return (const0_rtx);
2394 return;
2395 }
2396 }
2397#endif /* HAVE_return */
2398
2399 if (cleanups
2400 && retval_rhs != 0
2401 && TREE_TYPE (retval_rhs) != void_type_node
2402 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2403 {
2404 /* Calculate the return value into a pseudo reg. */
37366632 2405 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2406 emit_queue ();
2407 /* All temporaries have now been used. */
2408 free_temp_slots ();
2409 /* Return the calculated value, doing cleanups first. */
2410 expand_value_return (val);
2411 }
2412 else
2413 {
2414 /* No cleanups or no hard reg used;
2415 calculate value into hard return reg. */
37366632 2416 expand_expr (retval, NULL_RTX, VOIDmode, 0);
28d81abb
RK
2417 emit_queue ();
2418 free_temp_slots ();
2419 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2420 }
2421}
2422
2423/* Return 1 if the end of the generated RTX is not a barrier.
2424 This means code already compiled can drop through. */
2425
2426int
2427drop_through_at_end_p ()
2428{
2429 rtx insn = get_last_insn ();
2430 while (insn && GET_CODE (insn) == NOTE)
2431 insn = PREV_INSN (insn);
2432 return insn && GET_CODE (insn) != BARRIER;
2433}
2434\f
2435/* Emit code to alter this function's formal parms for a tail-recursive call.
2436 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2437 FORMALS is the chain of decls of formals.
2438 Return 1 if this can be done;
2439 otherwise return 0 and do not emit any code. */
2440
2441static int
2442tail_recursion_args (actuals, formals)
2443 tree actuals, formals;
2444{
2445 register tree a = actuals, f = formals;
2446 register int i;
2447 register rtx *argvec;
2448
2449 /* Check that number and types of actuals are compatible
2450 with the formals. This is not always true in valid C code.
2451 Also check that no formal needs to be addressable
2452 and that all formals are scalars. */
2453
2454 /* Also count the args. */
2455
2456 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2457 {
2458 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2459 return 0;
2460 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2461 return 0;
2462 }
2463 if (a != 0 || f != 0)
2464 return 0;
2465
2466 /* Compute all the actuals. */
2467
2468 argvec = (rtx *) alloca (i * sizeof (rtx));
2469
2470 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2471 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2472
2473 /* Find which actual values refer to current values of previous formals.
2474 Copy each of them now, before any formal is changed. */
2475
2476 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2477 {
2478 int copy = 0;
2479 register int j;
2480 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2481 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2482 { copy = 1; break; }
2483 if (copy)
2484 argvec[i] = copy_to_reg (argvec[i]);
2485 }
2486
2487 /* Store the values of the actuals into the formals. */
2488
2489 for (f = formals, a = actuals, i = 0; f;
2490 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2491 {
98f3b471 2492 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2493 emit_move_insn (DECL_RTL (f), argvec[i]);
2494 else
2495 convert_move (DECL_RTL (f), argvec[i],
2496 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2497 }
2498
2499 free_temp_slots ();
2500 return 1;
2501}
2502\f
2503/* Generate the RTL code for entering a binding contour.
2504 The variables are declared one by one, by calls to `expand_decl'.
2505
2506 EXIT_FLAG is nonzero if this construct should be visible to
2507 `exit_something'. */
2508
2509void
2510expand_start_bindings (exit_flag)
2511 int exit_flag;
2512{
2513 struct nesting *thisblock = ALLOC_NESTING ();
2514
37366632 2515 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2516
2517 /* Make an entry on block_stack for the block we are entering. */
2518
2519 thisblock->next = block_stack;
2520 thisblock->all = nesting_stack;
2521 thisblock->depth = ++nesting_depth;
2522 thisblock->data.block.stack_level = 0;
2523 thisblock->data.block.cleanups = 0;
2524 thisblock->data.block.function_call_count = 0;
2525#if 0
2526 if (block_stack)
2527 {
2528 if (block_stack->data.block.cleanups == NULL_TREE
2529 && (block_stack->data.block.outer_cleanups == NULL_TREE
2530 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2531 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2532 else
2533 thisblock->data.block.outer_cleanups
2534 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2535 block_stack->data.block.outer_cleanups);
2536 }
2537 else
2538 thisblock->data.block.outer_cleanups = 0;
2539#endif
2540#if 1
2541 if (block_stack
2542 && !(block_stack->data.block.cleanups == NULL_TREE
2543 && block_stack->data.block.outer_cleanups == NULL_TREE))
2544 thisblock->data.block.outer_cleanups
2545 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2546 block_stack->data.block.outer_cleanups);
2547 else
2548 thisblock->data.block.outer_cleanups = 0;
2549#endif
2550 thisblock->data.block.label_chain = 0;
2551 thisblock->data.block.innermost_stack_block = stack_block_stack;
2552 thisblock->data.block.first_insn = note;
2553 thisblock->data.block.block_start_count = ++block_start_count;
2554 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2555 block_stack = thisblock;
2556 nesting_stack = thisblock;
2557
2558 /* Make a new level for allocating stack slots. */
2559 push_temp_slots ();
2560}
2561
7629c936
RS
2562/* Given a pointer to a BLOCK node, save a pointer to the most recently
2563 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2564 BLOCK node. */
2565
2566void
2567remember_end_note (block)
2568 register tree block;
2569{
2570 BLOCK_END_NOTE (block) = last_block_end_note;
2571 last_block_end_note = NULL_RTX;
2572}
2573
28d81abb
RK
2574/* Generate RTL code to terminate a binding contour.
2575 VARS is the chain of VAR_DECL nodes
2576 for the variables bound in this contour.
2577 MARK_ENDS is nonzero if we should put a note at the beginning
2578 and end of this binding contour.
2579
2580 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2581 (That is true automatically if the contour has a saved stack level.) */
2582
2583void
2584expand_end_bindings (vars, mark_ends, dont_jump_in)
2585 tree vars;
2586 int mark_ends;
2587 int dont_jump_in;
2588{
2589 register struct nesting *thisblock = block_stack;
2590 register tree decl;
2591
2592 if (warn_unused)
2593 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5
RS
2594 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2595 && ! DECL_IN_SYSTEM_HEADER (decl))
28d81abb
RK
2596 warning_with_decl (decl, "unused variable `%s'");
2597
28d81abb
RK
2598 if (thisblock->exit_label)
2599 {
2600 do_pending_stack_adjust ();
2601 emit_label (thisblock->exit_label);
2602 }
2603
2604 /* If necessary, make a handler for nonlocal gotos taking
2605 place in the function calls in this block. */
2606 if (function_call_count != thisblock->data.block.function_call_count
2607 && nonlocal_labels
2608 /* Make handler for outermost block
2609 if there were any nonlocal gotos to this function. */
2610 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2611 /* Make handler for inner block if it has something
2612 special to do when you jump out of it. */
2613 : (thisblock->data.block.cleanups != 0
2614 || thisblock->data.block.stack_level != 0)))
2615 {
2616 tree link;
2617 rtx afterward = gen_label_rtx ();
2618 rtx handler_label = gen_label_rtx ();
2619 rtx save_receiver = gen_reg_rtx (Pmode);
2620
2621 /* Don't let jump_optimize delete the handler. */
2622 LABEL_PRESERVE_P (handler_label) = 1;
2623
2624 /* Record the handler address in the stack slot for that purpose,
2625 during this block, saving and restoring the outer value. */
2626 if (thisblock->next != 0)
2627 {
2628 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2629 emit_insn_before (gen_move_insn (save_receiver,
2630 nonlocal_goto_handler_slot),
2631 thisblock->data.block.first_insn);
2632 }
2633 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2634 gen_rtx (LABEL_REF, Pmode,
2635 handler_label)),
2636 thisblock->data.block.first_insn);
2637
2638 /* Jump around the handler; it runs only when specially invoked. */
2639 emit_jump (afterward);
2640 emit_label (handler_label);
2641
2642#ifdef HAVE_nonlocal_goto
2643 if (! HAVE_nonlocal_goto)
2644#endif
2645 /* First adjust our frame pointer to its actual value. It was
2646 previously set to the start of the virtual area corresponding to
2647 the stacked variables when we branched here and now needs to be
2648 adjusted to the actual hardware fp value.
2649
2650 Assignments are to virtual registers are converted by
2651 instantiate_virtual_regs into the corresponding assignment
2652 to the underlying register (fp in this case) that makes
2653 the original assignment true.
2654 So the following insn will actually be
2655 decrementing fp by STARTING_FRAME_OFFSET. */
2656 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2657
2658#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2659 if (fixed_regs[ARG_POINTER_REGNUM])
2660 {
42495ca0
RK
2661#ifdef ELIMINABLE_REGS
2662 /* If the argument pointer can be eliminated in favor of the
2663 frame pointer, we don't need to restore it. We assume here
2664 that if such an elimination is present, it can always be used.
2665 This is the case on all known machines; if we don't make this
2666 assumption, we do unnecessary saving on many machines. */
2667 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2668 int i;
2669
2670 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2671 if (elim_regs[i].from == ARG_POINTER_REGNUM
2672 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2673 break;
2674
2675 if (i == sizeof elim_regs / sizeof elim_regs [0])
2676#endif
2677 {
2678 /* Now restore our arg pointer from the address at which it
2679 was saved in our stack frame.
2680 If there hasn't be space allocated for it yet, make
2681 some now. */
2682 if (arg_pointer_save_area == 0)
2683 arg_pointer_save_area
2684 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2685 emit_move_insn (virtual_incoming_args_rtx,
2686 /* We need a pseudo here, or else
2687 instantiate_virtual_regs_1 complains. */
2688 copy_to_reg (arg_pointer_save_area));
2689 }
28d81abb
RK
2690 }
2691#endif
2692
2693 /* The handler expects the desired label address in the static chain
2694 register. It tests the address and does an appropriate jump
2695 to whatever label is desired. */
2696 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2697 /* Skip any labels we shouldn't be able to jump to from here. */
2698 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2699 {
2700 rtx not_this = gen_label_rtx ();
2701 rtx this = gen_label_rtx ();
2702 do_jump_if_equal (static_chain_rtx,
2703 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2704 this, 0);
2705 emit_jump (not_this);
2706 emit_label (this);
2707 expand_goto (TREE_VALUE (link));
2708 emit_label (not_this);
2709 }
2710 /* If label is not recognized, abort. */
2711 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2712 VOIDmode, 0);
2713 emit_label (afterward);
2714 }
2715
2716 /* Don't allow jumping into a block that has cleanups or a stack level. */
2717 if (dont_jump_in
2718 || thisblock->data.block.stack_level != 0
2719 || thisblock->data.block.cleanups != 0)
2720 {
2721 struct label_chain *chain;
2722
2723 /* Any labels in this block are no longer valid to go to.
2724 Mark them to cause an error message. */
2725 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2726 {
2727 DECL_TOO_LATE (chain->label) = 1;
2728 /* If any goto without a fixup came to this label,
2729 that must be an error, because gotos without fixups
2730 come from outside all saved stack-levels and all cleanups. */
2731 if (TREE_ADDRESSABLE (chain->label))
2732 error_with_decl (chain->label,
2733 "label `%s' used before containing binding contour");
2734 }
2735 }
2736
2737 /* Restore stack level in effect before the block
2738 (only if variable-size objects allocated). */
2739 /* Perform any cleanups associated with the block. */
2740
2741 if (thisblock->data.block.stack_level != 0
2742 || thisblock->data.block.cleanups != 0)
2743 {
2744 /* Don't let cleanups affect ({...}) constructs. */
2745 int old_expr_stmts_for_value = expr_stmts_for_value;
2746 rtx old_last_expr_value = last_expr_value;
2747 tree old_last_expr_type = last_expr_type;
2748 expr_stmts_for_value = 0;
2749
2750 /* Do the cleanups. */
37366632 2751 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
28d81abb
RK
2752 do_pending_stack_adjust ();
2753
2754 expr_stmts_for_value = old_expr_stmts_for_value;
2755 last_expr_value = old_last_expr_value;
2756 last_expr_type = old_last_expr_type;
2757
2758 /* Restore the stack level. */
2759
2760 if (thisblock->data.block.stack_level != 0)
2761 {
59257ff7 2762 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
37366632 2763 thisblock->data.block.stack_level, NULL_RTX);
59257ff7 2764 if (nonlocal_goto_handler_slot != 0)
37366632
RK
2765 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2766 NULL_RTX);
28d81abb
RK
2767 }
2768
2769 /* Any gotos out of this block must also do these things.
59257ff7
RK
2770 Also report any gotos with fixups that came to labels in this
2771 level. */
28d81abb
RK
2772 fixup_gotos (thisblock,
2773 thisblock->data.block.stack_level,
2774 thisblock->data.block.cleanups,
2775 thisblock->data.block.first_insn,
2776 dont_jump_in);
2777 }
2778
c7d2d61d
RS
2779 /* Mark the beginning and end of the scope if requested.
2780 We do this now, after running cleanups on the variables
2781 just going out of scope, so they are in scope for their cleanups. */
2782
2783 if (mark_ends)
7629c936 2784 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
2785 else
2786 /* Get rid of the beginning-mark if we don't make an end-mark. */
2787 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2788
28d81abb
RK
2789 /* If doing stupid register allocation, make sure lives of all
2790 register variables declared here extend thru end of scope. */
2791
2792 if (obey_regdecls)
2793 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2794 {
2795 rtx rtl = DECL_RTL (decl);
2796 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2797 use_variable (rtl);
2798 }
2799
2800 /* Restore block_stack level for containing block. */
2801
2802 stack_block_stack = thisblock->data.block.innermost_stack_block;
2803 POPSTACK (block_stack);
2804
2805 /* Pop the stack slot nesting and free any slots at this level. */
2806 pop_temp_slots ();
2807}
2808\f
2809/* Generate RTL for the automatic variable declaration DECL.
2810 (Other kinds of declarations are simply ignored if seen here.)
2811 CLEANUP is an expression to be executed at exit from this binding contour;
2812 for example, in C++, it might call the destructor for this variable.
2813
2814 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2815 either before or after calling `expand_decl' but before compiling
2816 any subsequent expressions. This is because CLEANUP may be expanded
2817 more than once, on different branches of execution.
2818 For the same reason, CLEANUP may not contain a CALL_EXPR
2819 except as its topmost node--else `preexpand_calls' would get confused.
2820
2821 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2822 that is not associated with any particular variable.
2823
2824 There is no special support here for C++ constructors.
2825 They should be handled by the proper code in DECL_INITIAL. */
2826
2827void
2828expand_decl (decl)
2829 register tree decl;
2830{
2831 struct nesting *thisblock = block_stack;
2832 tree type = TREE_TYPE (decl);
2833
2834 /* Only automatic variables need any expansion done.
2835 Static and external variables, and external functions,
2836 will be handled by `assemble_variable' (called from finish_decl).
2837 TYPE_DECL and CONST_DECL require nothing.
2838 PARM_DECLs are handled in `assign_parms'. */
2839
2840 if (TREE_CODE (decl) != VAR_DECL)
2841 return;
44fe2e80 2842 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
2843 return;
2844
2845 /* Create the RTL representation for the variable. */
2846
2847 if (type == error_mark_node)
2848 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2849 else if (DECL_SIZE (decl) == 0)
2850 /* Variable with incomplete type. */
2851 {
2852 if (DECL_INITIAL (decl) == 0)
2853 /* Error message was already done; now avoid a crash. */
2854 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2855 else
2856 /* An initializer is going to decide the size of this array.
2857 Until we know the size, represent its address with a reg. */
2858 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2859 }
2860 else if (DECL_MODE (decl) != BLKmode
2861 /* If -ffloat-store, don't put explicit float vars
2862 into regs. */
2863 && !(flag_float_store
2864 && TREE_CODE (type) == REAL_TYPE)
2865 && ! TREE_THIS_VOLATILE (decl)
2866 && ! TREE_ADDRESSABLE (decl)
44fe2e80 2867 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
2868 {
2869 /* Automatic variable that can go in a register. */
98f3b471
RK
2870 enum machine_mode reg_mode = DECL_MODE (decl);
2871 int unsignedp = TREE_UNSIGNED (type);
2872
2873 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2874 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2875 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2876 || TREE_CODE (type) == OFFSET_TYPE)
2877 {
2878 PROMOTE_MODE (reg_mode, unsignedp, type);
2879 }
2880
2881 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
28d81abb
RK
2882 if (TREE_CODE (type) == POINTER_TYPE)
2883 mark_reg_pointer (DECL_RTL (decl));
2884 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2885 }
2886 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2887 {
2888 /* Variable of fixed size that goes on the stack. */
2889 rtx oldaddr = 0;
2890 rtx addr;
2891
2892 /* If we previously made RTL for this decl, it must be an array
2893 whose size was determined by the initializer.
2894 The old address was a register; set that register now
2895 to the proper address. */
2896 if (DECL_RTL (decl) != 0)
2897 {
2898 if (GET_CODE (DECL_RTL (decl)) != MEM
2899 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2900 abort ();
2901 oldaddr = XEXP (DECL_RTL (decl), 0);
2902 }
2903
2904 DECL_RTL (decl)
2905 = assign_stack_temp (DECL_MODE (decl),
2906 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2907 + BITS_PER_UNIT - 1)
2908 / BITS_PER_UNIT),
2909 1);
2910
2911 /* Set alignment we actually gave this decl. */
2912 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2913 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2914
2915 if (oldaddr)
2916 {
2917 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2918 if (addr != oldaddr)
2919 emit_move_insn (oldaddr, addr);
2920 }
2921
2922 /* If this is a memory ref that contains aggregate components,
2923 mark it as such for cse and loop optimize. */
2924 MEM_IN_STRUCT_P (DECL_RTL (decl))
2925 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2926 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2927 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2928#if 0
2929 /* If this is in memory because of -ffloat-store,
2930 set the volatile bit, to prevent optimizations from
2931 undoing the effects. */
2932 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2933 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2934#endif
2935 }
2936 else
2937 /* Dynamic-size object: must push space on the stack. */
2938 {
2939 rtx address, size;
2940
2941 /* Record the stack pointer on entry to block, if have
2942 not already done so. */
2943 if (thisblock->data.block.stack_level == 0)
2944 {
2945 do_pending_stack_adjust ();
59257ff7
RK
2946 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2947 &thisblock->data.block.stack_level,
2948 thisblock->data.block.first_insn);
28d81abb
RK
2949 stack_block_stack = thisblock;
2950 }
2951
2952 /* Compute the variable's size, in bytes. */
2953 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2954 DECL_SIZE (decl),
2955 size_int (BITS_PER_UNIT)),
37366632 2956 NULL_RTX, VOIDmode, 0);
28d81abb
RK
2957 free_temp_slots ();
2958
59257ff7
RK
2959 /* This is equivalent to calling alloca. */
2960 current_function_calls_alloca = 1;
2961
28d81abb 2962 /* Allocate space on the stack for the variable. */
37366632
RK
2963 address = allocate_dynamic_stack_space (size, NULL_RTX,
2964 DECL_ALIGN (decl));
28d81abb 2965
59257ff7 2966 if (nonlocal_goto_handler_slot != 0)
37366632 2967 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
28d81abb
RK
2968
2969 /* Reference the variable indirect through that rtx. */
2970 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2971
2207e295
RS
2972 /* If this is a memory ref that contains aggregate components,
2973 mark it as such for cse and loop optimize. */
2974 MEM_IN_STRUCT_P (DECL_RTL (decl))
2975 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2976 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2977 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2978
28d81abb
RK
2979 /* Indicate the alignment we actually gave this variable. */
2980#ifdef STACK_BOUNDARY
2981 DECL_ALIGN (decl) = STACK_BOUNDARY;
2982#else
2983 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2984#endif
2985 }
2986
2987 if (TREE_THIS_VOLATILE (decl))
2988 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2989 if (TREE_READONLY (decl))
2990 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2991
2992 /* If doing stupid register allocation, make sure life of any
2993 register variable starts here, at the start of its scope. */
2994
2995 if (obey_regdecls)
2996 use_variable (DECL_RTL (decl));
2997}
2998\f
2999/* Emit code to perform the initialization of a declaration DECL. */
3000
3001void
3002expand_decl_init (decl)
3003 tree decl;
3004{
b4ac57ab
RS
3005 int was_used = TREE_USED (decl);
3006
28d81abb
RK
3007 if (TREE_STATIC (decl))
3008 return;
3009
3010 /* Compute and store the initial value now. */
3011
3012 if (DECL_INITIAL (decl) == error_mark_node)
3013 {
3014 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3015 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3016 || code == POINTER_TYPE)
3017 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3018 0, 0);
3019 emit_queue ();
3020 }
3021 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3022 {
3023 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3024 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3025 emit_queue ();
3026 }
3027
b4ac57ab
RS
3028 /* Don't let the initialization count as "using" the variable. */
3029 TREE_USED (decl) = was_used;
3030
28d81abb
RK
3031 /* Free any temporaries we made while initializing the decl. */
3032 free_temp_slots ();
3033}
3034
3035/* CLEANUP is an expression to be executed at exit from this binding contour;
3036 for example, in C++, it might call the destructor for this variable.
3037
3038 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3039 either before or after calling `expand_decl' but before compiling
3040 any subsequent expressions. This is because CLEANUP may be expanded
3041 more than once, on different branches of execution.
3042 For the same reason, CLEANUP may not contain a CALL_EXPR
3043 except as its topmost node--else `preexpand_calls' would get confused.
3044
3045 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3046 that is not associated with any particular variable. */
3047
3048int
3049expand_decl_cleanup (decl, cleanup)
3050 tree decl, cleanup;
3051{
3052 struct nesting *thisblock = block_stack;
3053
3054 /* Error if we are not in any block. */
3055 if (thisblock == 0)
3056 return 0;
3057
3058 /* Record the cleanup if there is one. */
3059
3060 if (cleanup != 0)
3061 {
3062 thisblock->data.block.cleanups
3063 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3064 /* If this block has a cleanup, it belongs in stack_block_stack. */
3065 stack_block_stack = thisblock;
3066 }
3067 return 1;
3068}
3069\f
3070/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3071 DECL_ELTS is the list of elements that belong to DECL's type.
3072 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3073
3074void
3075expand_anon_union_decl (decl, cleanup, decl_elts)
3076 tree decl, cleanup, decl_elts;
3077{
3078 struct nesting *thisblock = block_stack;
3079 rtx x;
3080
3081 expand_decl (decl, cleanup);
3082 x = DECL_RTL (decl);
3083
3084 while (decl_elts)
3085 {
3086 tree decl_elt = TREE_VALUE (decl_elts);
3087 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3089
3090 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3091 instead create a new MEM rtx with the proper mode. */
3092 if (GET_CODE (x) == MEM)
3093 {
3094 if (mode == GET_MODE (x))
3095 DECL_RTL (decl_elt) = x;
3096 else
3097 {
3098 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3099 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3100 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3101 }
3102 }
3103 else if (GET_CODE (x) == REG)
3104 {
3105 if (mode == GET_MODE (x))
3106 DECL_RTL (decl_elt) = x;
3107 else
3108 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3109 }
3110 else
3111 abort ();
3112
3113 /* Record the cleanup if there is one. */
3114
3115 if (cleanup != 0)
3116 thisblock->data.block.cleanups
3117 = temp_tree_cons (decl_elt, cleanup_elt,
3118 thisblock->data.block.cleanups);
3119
3120 decl_elts = TREE_CHAIN (decl_elts);
3121 }
3122}
3123\f
3124/* Expand a list of cleanups LIST.
3125 Elements may be expressions or may be nested lists.
3126
3127 If DONT_DO is nonnull, then any list-element
3128 whose TREE_PURPOSE matches DONT_DO is omitted.
3129 This is sometimes used to avoid a cleanup associated with
3130 a value that is being returned out of the scope. */
3131
3132static void
3133expand_cleanups (list, dont_do)
3134 tree list;
3135 tree dont_do;
3136{
3137 tree tail;
3138 for (tail = list; tail; tail = TREE_CHAIN (tail))
3139 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3140 {
3141 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3142 expand_cleanups (TREE_VALUE (tail), dont_do);
3143 else
3144 {
3145 /* Cleanups may be run multiple times. For example,
3146 when exiting a binding contour, we expand the
3147 cleanups associated with that contour. When a goto
3148 within that binding contour has a target outside that
3149 contour, it will expand all cleanups from its scope to
3150 the target. Though the cleanups are expanded multiple
3151 times, the control paths are non-overlapping so the
3152 cleanups will not be executed twice. */
3153 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3154 free_temp_slots ();
3155 }
3156 }
3157}
3158
28d81abb
RK
3159/* Move all cleanups from the current block_stack
3160 to the containing block_stack, where they are assumed to
3161 have been created. If anything can cause a temporary to
3162 be created, but not expanded for more than one level of
3163 block_stacks, then this code will have to change. */
3164
3165void
3166move_cleanups_up ()
3167{
3168 struct nesting *block = block_stack;
3169 struct nesting *outer = block->next;
3170
3171 outer->data.block.cleanups
3172 = chainon (block->data.block.cleanups,
3173 outer->data.block.cleanups);
3174 block->data.block.cleanups = 0;
3175}
3176
3177tree
3178last_cleanup_this_contour ()
3179{
3180 if (block_stack == 0)
3181 return 0;
3182
3183 return block_stack->data.block.cleanups;
3184}
3185
3186/* Return 1 if there are any pending cleanups at this point.
3187 If THIS_CONTOUR is nonzero, check the current contour as well.
3188 Otherwise, look only at the contours that enclose this one. */
3189
3190int
3191any_pending_cleanups (this_contour)
3192 int this_contour;
3193{
3194 struct nesting *block;
3195
3196 if (block_stack == 0)
3197 return 0;
3198
3199 if (this_contour && block_stack->data.block.cleanups != NULL)
3200 return 1;
3201 if (block_stack->data.block.cleanups == 0
3202 && (block_stack->data.block.outer_cleanups == 0
3203#if 0
3204 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3205#endif
3206 ))
3207 return 0;
3208
3209 for (block = block_stack->next; block; block = block->next)
3210 if (block->data.block.cleanups != 0)
3211 return 1;
3212
3213 return 0;
3214}
3215\f
3216/* Enter a case (Pascal) or switch (C) statement.
3217 Push a block onto case_stack and nesting_stack
3218 to accumulate the case-labels that are seen
3219 and to record the labels generated for the statement.
3220
3221 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3222 Otherwise, this construct is transparent for `exit_something'.
3223
3224 EXPR is the index-expression to be dispatched on.
3225 TYPE is its nominal type. We could simply convert EXPR to this type,
3226 but instead we take short cuts. */
3227
3228void
3229expand_start_case (exit_flag, expr, type, printname)
3230 int exit_flag;
3231 tree expr;
3232 tree type;
3233 char *printname;
3234{
3235 register struct nesting *thiscase = ALLOC_NESTING ();
3236
3237 /* Make an entry on case_stack for the case we are entering. */
3238
3239 thiscase->next = case_stack;
3240 thiscase->all = nesting_stack;
3241 thiscase->depth = ++nesting_depth;
3242 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3243 thiscase->data.case_stmt.case_list = 0;
3244 thiscase->data.case_stmt.index_expr = expr;
3245 thiscase->data.case_stmt.nominal_type = type;
3246 thiscase->data.case_stmt.default_label = 0;
3247 thiscase->data.case_stmt.num_ranges = 0;
3248 thiscase->data.case_stmt.printname = printname;
3249 thiscase->data.case_stmt.seenlabel = 0;
3250 case_stack = thiscase;
3251 nesting_stack = thiscase;
3252
3253 do_pending_stack_adjust ();
3254
3255 /* Make sure case_stmt.start points to something that won't
3256 need any transformation before expand_end_case. */
3257 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3258 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3259
3260 thiscase->data.case_stmt.start = get_last_insn ();
3261}
3262
3263/* Start a "dummy case statement" within which case labels are invalid
3264 and are not connected to any larger real case statement.
3265 This can be used if you don't want to let a case statement jump
3266 into the middle of certain kinds of constructs. */
3267
3268void
3269expand_start_case_dummy ()
3270{
3271 register struct nesting *thiscase = ALLOC_NESTING ();
3272
3273 /* Make an entry on case_stack for the dummy. */
3274
3275 thiscase->next = case_stack;
3276 thiscase->all = nesting_stack;
3277 thiscase->depth = ++nesting_depth;
3278 thiscase->exit_label = 0;
3279 thiscase->data.case_stmt.case_list = 0;
3280 thiscase->data.case_stmt.start = 0;
3281 thiscase->data.case_stmt.nominal_type = 0;
3282 thiscase->data.case_stmt.default_label = 0;
3283 thiscase->data.case_stmt.num_ranges = 0;
3284 case_stack = thiscase;
3285 nesting_stack = thiscase;
3286}
3287
3288/* End a dummy case statement. */
3289
3290void
3291expand_end_case_dummy ()
3292{
3293 POPSTACK (case_stack);
3294}
3295
3296/* Return the data type of the index-expression
3297 of the innermost case statement, or null if none. */
3298
3299tree
3300case_index_expr_type ()
3301{
3302 if (case_stack)
3303 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3304 return 0;
3305}
3306\f
3307/* Accumulate one case or default label inside a case or switch statement.
3308 VALUE is the value of the case (a null pointer, for a default label).
3309
3310 If not currently inside a case or switch statement, return 1 and do
3311 nothing. The caller will print a language-specific error message.
3312 If VALUE is a duplicate or overlaps, return 2 and do nothing
3313 except store the (first) duplicate node in *DUPLICATE.
3314 If VALUE is out of range, return 3 and do nothing.
3315 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3316 Return 0 on success.
3317
3318 Extended to handle range statements. */
3319
3320int
3321pushcase (value, label, duplicate)
3322 register tree value;
3323 register tree label;
3324 tree *duplicate;
3325{
3326 register struct case_node **l;
3327 register struct case_node *n;
3328 tree index_type;
3329 tree nominal_type;
3330
3331 /* Fail if not inside a real case statement. */
3332 if (! (case_stack && case_stack->data.case_stmt.start))
3333 return 1;
3334
3335 if (stack_block_stack
3336 && stack_block_stack->depth > case_stack->depth)
3337 return 5;
3338
3339 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3340 nominal_type = case_stack->data.case_stmt.nominal_type;
3341
3342 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3343 if (index_type == error_mark_node)
3344 return 0;
3345
3346 /* Convert VALUE to the type in which the comparisons are nominally done. */
3347 if (value != 0)
3348 value = convert (nominal_type, value);
3349
3350 /* If this is the first label, warn if any insns have been emitted. */
3351 if (case_stack->data.case_stmt.seenlabel == 0)
3352 {
3353 rtx insn;
3354 for (insn = case_stack->data.case_stmt.start;
3355 insn;
3356 insn = NEXT_INSN (insn))
3357 {
3358 if (GET_CODE (insn) == CODE_LABEL)
3359 break;
3360 if (GET_CODE (insn) != NOTE
3361 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3362 {
3363 warning ("unreachable code at beginning of %s",
3364 case_stack->data.case_stmt.printname);
3365 break;
3366 }
3367 }
3368 }
3369 case_stack->data.case_stmt.seenlabel = 1;
3370
3371 /* Fail if this value is out of range for the actual type of the index
3372 (which may be narrower than NOMINAL_TYPE). */
3373 if (value != 0 && ! int_fits_type_p (value, index_type))
3374 return 3;
3375
3376 /* Fail if this is a duplicate or overlaps another entry. */
3377 if (value == 0)
3378 {
3379 if (case_stack->data.case_stmt.default_label != 0)
3380 {
3381 *duplicate = case_stack->data.case_stmt.default_label;
3382 return 2;
3383 }
3384 case_stack->data.case_stmt.default_label = label;
3385 }
3386 else
3387 {
3388 /* Find the elt in the chain before which to insert the new value,
3389 to keep the chain sorted in increasing order.
3390 But report an error if this element is a duplicate. */
3391 for (l = &case_stack->data.case_stmt.case_list;
3392 /* Keep going past elements distinctly less than VALUE. */
3393 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3394 l = &(*l)->right)
3395 ;
3396 if (*l)
3397 {
3398 /* Element we will insert before must be distinctly greater;
3399 overlap means error. */
3400 if (! tree_int_cst_lt (value, (*l)->low))
3401 {
3402 *duplicate = (*l)->code_label;
3403 return 2;
3404 }
3405 }
3406
3407 /* Add this label to the chain, and succeed.
3408 Copy VALUE so it is on temporary rather than momentary
3409 obstack and will thus survive till the end of the case statement. */
3410 n = (struct case_node *) oballoc (sizeof (struct case_node));
3411 n->left = 0;
3412 n->right = *l;
3413 n->high = n->low = copy_node (value);
3414 n->code_label = label;
3415 *l = n;
3416 }
3417
3418 expand_label (label);
3419 return 0;
3420}
3421
3422/* Like pushcase but this case applies to all values
3423 between VALUE1 and VALUE2 (inclusive).
3424 The return value is the same as that of pushcase
3425 but there is one additional error code:
3426 4 means the specified range was empty. */
3427
3428int
3429pushcase_range (value1, value2, label, duplicate)
3430 register tree value1, value2;
3431 register tree label;
3432 tree *duplicate;
3433{
3434 register struct case_node **l;
3435 register struct case_node *n;
3436 tree index_type;
3437 tree nominal_type;
3438
3439 /* Fail if not inside a real case statement. */
3440 if (! (case_stack && case_stack->data.case_stmt.start))
3441 return 1;
3442
3443 if (stack_block_stack
3444 && stack_block_stack->depth > case_stack->depth)
3445 return 5;
3446
3447 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3448 nominal_type = case_stack->data.case_stmt.nominal_type;
3449
3450 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3451 if (index_type == error_mark_node)
3452 return 0;
3453
3454 /* If this is the first label, warn if any insns have been emitted. */
3455 if (case_stack->data.case_stmt.seenlabel == 0)
3456 {
3457 rtx insn;
3458 for (insn = case_stack->data.case_stmt.start;
3459 insn;
3460 insn = NEXT_INSN (insn))
3461 {
3462 if (GET_CODE (insn) == CODE_LABEL)
3463 break;
3464 if (GET_CODE (insn) != NOTE
3465 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3466 {
3467 warning ("unreachable code at beginning of %s",
3468 case_stack->data.case_stmt.printname);
3469 break;
3470 }
3471 }
3472 }
3473 case_stack->data.case_stmt.seenlabel = 1;
3474
3475 /* Convert VALUEs to type in which the comparisons are nominally done. */
3476 if (value1 == 0) /* Negative infinity. */
3477 value1 = TYPE_MIN_VALUE(index_type);
3478 value1 = convert (nominal_type, value1);
3479
3480 if (value2 == 0) /* Positive infinity. */
3481 value2 = TYPE_MAX_VALUE(index_type);
3482 value2 = convert (nominal_type, value2);
3483
3484 /* Fail if these values are out of range. */
3485 if (! int_fits_type_p (value1, index_type))
3486 return 3;
3487
3488 if (! int_fits_type_p (value2, index_type))
3489 return 3;
3490
3491 /* Fail if the range is empty. */
3492 if (tree_int_cst_lt (value2, value1))
3493 return 4;
3494
3495 /* If the bounds are equal, turn this into the one-value case. */
3496 if (tree_int_cst_equal (value1, value2))
3497 return pushcase (value1, label, duplicate);
3498
3499 /* Find the elt in the chain before which to insert the new value,
3500 to keep the chain sorted in increasing order.
3501 But report an error if this element is a duplicate. */
3502 for (l = &case_stack->data.case_stmt.case_list;
3503 /* Keep going past elements distinctly less than this range. */
3504 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3505 l = &(*l)->right)
3506 ;
3507 if (*l)
3508 {
3509 /* Element we will insert before must be distinctly greater;
3510 overlap means error. */
3511 if (! tree_int_cst_lt (value2, (*l)->low))
3512 {
3513 *duplicate = (*l)->code_label;
3514 return 2;
3515 }
3516 }
3517
3518 /* Add this label to the chain, and succeed.
3519 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3520 obstack and will thus survive till the end of the case statement. */
3521
3522 n = (struct case_node *) oballoc (sizeof (struct case_node));
3523 n->left = 0;
3524 n->right = *l;
3525 n->low = copy_node (value1);
3526 n->high = copy_node (value2);
3527 n->code_label = label;
3528 *l = n;
3529
3530 expand_label (label);
3531
3532 case_stack->data.case_stmt.num_ranges++;
3533
3534 return 0;
3535}
3536\f
3537/* Called when the index of a switch statement is an enumerated type
3538 and there is no default label.
3539
3540 Checks that all enumeration literals are covered by the case
3541 expressions of a switch. Also, warn if there are any extra
3542 switch cases that are *not* elements of the enumerated type.
3543
3544 If all enumeration literals were covered by the case expressions,
3545 turn one of the expressions into the default expression since it should
3546 not be possible to fall through such a switch. */
3547
3548void
3549check_for_full_enumeration_handling (type)
3550 tree type;
3551{
3552 register struct case_node *n;
3553 register struct case_node **l;
3554 register tree chain;
3555 int all_values = 1;
3556
3557 /* The time complexity of this loop is currently O(N * M), with
3558 N being the number of enumerals in the enumerated type, and
3559 M being the number of case expressions in the switch. */
3560
3561 for (chain = TYPE_VALUES (type);
3562 chain;
3563 chain = TREE_CHAIN (chain))
3564 {
3565 /* Find a match between enumeral and case expression, if possible.
3566 Quit looking when we've gone too far (since case expressions
3567 are kept sorted in ascending order). Warn about enumerals not
3568 handled in the switch statement case expression list. */
3569
3570 for (n = case_stack->data.case_stmt.case_list;
3571 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3572 n = n->right)
3573 ;
3574
1ddde1cd 3575 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
28d81abb
RK
3576 {
3577 if (warn_switch)
1ddde1cd 3578 warning ("enumeration value `%s' not handled in switch",
28d81abb
RK
3579 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3580 all_values = 0;
3581 }
3582 }
3583
3584 /* Now we go the other way around; we warn if there are case
3585 expressions that don't correspond to enumerals. This can
3586 occur since C and C++ don't enforce type-checking of
3587 assignments to enumeration variables. */
3588
3589 if (warn_switch)
3590 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3591 {
3592 for (chain = TYPE_VALUES (type);
3593 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3594 chain = TREE_CHAIN (chain))
3595 ;
3596
3597 if (!chain)
3598 warning ("case value `%d' not in enumerated type `%s'",
3599 TREE_INT_CST_LOW (n->low),
3600 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3601 == IDENTIFIER_NODE)
3602 ? TYPE_NAME (type)
3603 : DECL_NAME (TYPE_NAME (type))));
1ddde1cd
RS
3604 if (!tree_int_cst_equal (n->low, n->high))
3605 {
3606 for (chain = TYPE_VALUES (type);
3607 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3608 chain = TREE_CHAIN (chain))
3609 ;
3610
3611 if (!chain)
3612 warning ("case value `%d' not in enumerated type `%s'",
3613 TREE_INT_CST_LOW (n->high),
3614 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3615 == IDENTIFIER_NODE)
3616 ? TYPE_NAME (type)
3617 : DECL_NAME (TYPE_NAME (type))));
3618 }
28d81abb
RK
3619 }
3620
3621 /* If all values were found as case labels, make one of them the default
3622 label. Thus, this switch will never fall through. We arbitrarily pick
3623 the last one to make the default since this is likely the most
3624 efficient choice. */
3625
3626 if (all_values)
3627 {
3628 for (l = &case_stack->data.case_stmt.case_list;
3629 (*l)->right != 0;
3630 l = &(*l)->right)
3631 ;
3632
3633 case_stack->data.case_stmt.default_label = (*l)->code_label;
3634 *l = 0;
3635 }
3636}
3637\f
3638/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 3639 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
3640 Generate the code to test it and jump to the right place. */
3641
3642void
3643expand_end_case (orig_index)
3644 tree orig_index;
3645{
3646 tree minval, maxval, range;
3647 rtx default_label = 0;
3648 register struct case_node *n;
3649 int count;
3650 rtx index;
3651 rtx table_label = gen_label_rtx ();
3652 int ncases;
3653 rtx *labelvec;
3654 register int i;
3655 rtx before_case;
3656 register struct nesting *thiscase = case_stack;
3657 tree index_expr = thiscase->data.case_stmt.index_expr;
3658 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3659
3660 do_pending_stack_adjust ();
3661
3662 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3663 if (TREE_TYPE (index_expr) != error_mark_node)
3664 {
3665 /* If switch expression was an enumerated type, check that all
3666 enumeration literals are covered by the cases.
3667 No sense trying this if there's a default case, however. */
3668
3669 if (!thiscase->data.case_stmt.default_label
3670 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3671 && TREE_CODE (index_expr) != INTEGER_CST)
3672 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3673
3674 /* If this is the first label, warn if any insns have been emitted. */
3675 if (thiscase->data.case_stmt.seenlabel == 0)
3676 {
3677 rtx insn;
3678 for (insn = get_last_insn ();
3679 insn != case_stack->data.case_stmt.start;
3680 insn = PREV_INSN (insn))
3681 if (GET_CODE (insn) != NOTE
3682 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3683 {
3684 warning ("unreachable code at beginning of %s",
3685 case_stack->data.case_stmt.printname);
3686 break;
3687 }
3688 }
3689
3690 /* If we don't have a default-label, create one here,
3691 after the body of the switch. */
3692 if (thiscase->data.case_stmt.default_label == 0)
3693 {
3694 thiscase->data.case_stmt.default_label
3695 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3696 expand_label (thiscase->data.case_stmt.default_label);
3697 }
3698 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3699
3700 before_case = get_last_insn ();
3701
3702 /* Simplify the case-list before we count it. */
3703 group_case_nodes (thiscase->data.case_stmt.case_list);
3704
3705 /* Get upper and lower bounds of case values.
3706 Also convert all the case values to the index expr's data type. */
3707
3708 count = 0;
3709 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3710 {
3711 /* Check low and high label values are integers. */
3712 if (TREE_CODE (n->low) != INTEGER_CST)
3713 abort ();
3714 if (TREE_CODE (n->high) != INTEGER_CST)
3715 abort ();
3716
3717 n->low = convert (TREE_TYPE (index_expr), n->low);
3718 n->high = convert (TREE_TYPE (index_expr), n->high);
3719
3720 /* Count the elements and track the largest and smallest
3721 of them (treating them as signed even if they are not). */
3722 if (count++ == 0)
3723 {
3724 minval = n->low;
3725 maxval = n->high;
3726 }
3727 else
3728 {
3729 if (INT_CST_LT (n->low, minval))
3730 minval = n->low;
3731 if (INT_CST_LT (maxval, n->high))
3732 maxval = n->high;
3733 }
3734 /* A range counts double, since it requires two compares. */
3735 if (! tree_int_cst_equal (n->low, n->high))
3736 count++;
3737 }
3738
3739 /* Compute span of values. */
3740 if (count != 0)
3741 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3742 maxval, minval));
3743
3744 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3745 {
3746 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3747 emit_queue ();
3748 emit_jump (default_label);
3749 }
3750 /* If range of values is much bigger than number of values,
3751 make a sequence of conditional branches instead of a dispatch.
3752 If the switch-index is a constant, do it this way
3753 because we can optimize it. */
4f73c5dd
TW
3754
3755#ifndef CASE_VALUES_THRESHOLD
28d81abb 3756#ifdef HAVE_casesi
4f73c5dd 3757#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 3758#else
4f73c5dd
TW
3759 /* If machine does not have a case insn that compares the
3760 bounds, this means extra overhead for dispatch tables
3761 which raises the threshold for using them. */
3762#define CASE_VALUES_THRESHOLD 5
3763#endif /* HAVE_casesi */
3764#endif /* CASE_VALUES_THRESHOLD */
3765
3766 else if (TREE_INT_CST_HIGH (range) != 0
3767 || count < CASE_VALUES_THRESHOLD
37366632
RK
3768 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
3769 > 10 * count)
28d81abb 3770 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 3771 /* These will reduce to a constant. */
28d81abb 3772 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 3773 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 3774 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
3775 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3776 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3777 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 3778 {
37366632 3779 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
3780
3781 /* If the index is a short or char that we do not have
3782 an insn to handle comparisons directly, convert it to
3783 a full integer now, rather than letting each comparison
3784 generate the conversion. */
3785
3786 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3787 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3788 == CODE_FOR_nothing))
3789 {
3790 enum machine_mode wider_mode;
3791 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3792 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3793 if (cmp_optab->handlers[(int) wider_mode].insn_code
3794 != CODE_FOR_nothing)
3795 {
3796 index = convert_to_mode (wider_mode, index, unsignedp);
3797 break;
3798 }
3799 }
3800
3801 emit_queue ();
3802 do_pending_stack_adjust ();
3803
3804 index = protect_from_queue (index, 0);
3805 if (GET_CODE (index) == MEM)
3806 index = copy_to_reg (index);
3807 if (GET_CODE (index) == CONST_INT
3808 || TREE_CODE (index_expr) == INTEGER_CST)
3809 {
3810 /* Make a tree node with the proper constant value
3811 if we don't already have one. */
3812 if (TREE_CODE (index_expr) != INTEGER_CST)
3813 {
3814 index_expr
3815 = build_int_2 (INTVAL (index),
3816 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3817 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3818 }
3819
3820 /* For constant index expressions we need only
3821 issue a unconditional branch to the appropriate
3822 target code. The job of removing any unreachable
3823 code is left to the optimisation phase if the
3824 "-O" option is specified. */
3825 for (n = thiscase->data.case_stmt.case_list;
3826 n;
3827 n = n->right)
3828 {
3829 if (! tree_int_cst_lt (index_expr, n->low)
3830 && ! tree_int_cst_lt (n->high, index_expr))
3831 break;
3832 }
3833 if (n)
3834 emit_jump (label_rtx (n->code_label));
3835 else
3836 emit_jump (default_label);
3837 }
3838 else
3839 {
3840 /* If the index expression is not constant we generate
3841 a binary decision tree to select the appropriate
3842 target code. This is done as follows:
3843
3844 The list of cases is rearranged into a binary tree,
3845 nearly optimal assuming equal probability for each case.
3846
3847 The tree is transformed into RTL, eliminating
3848 redundant test conditions at the same time.
3849
3850 If program flow could reach the end of the
3851 decision tree an unconditional jump to the
3852 default code is emitted. */
3853
3854 use_cost_table
3855 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 3856 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
3857 balance_case_nodes (&thiscase->data.case_stmt.case_list,
3858 NULL_PTR);
28d81abb
RK
3859 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3860 default_label, TREE_TYPE (index_expr));
3861 emit_jump_if_reachable (default_label);
3862 }
3863 }
3864 else
3865 {
3866 int win = 0;
3867#ifdef HAVE_casesi
3868 if (HAVE_casesi)
3869 {
c4fcf531 3870 enum machine_mode index_mode = SImode;
5130a5cc 3871 int index_bits = GET_MODE_BITSIZE (index_mode);
c4fcf531 3872
28d81abb 3873 /* Convert the index to SImode. */
c4fcf531
RS
3874 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3875 > GET_MODE_BITSIZE (index_mode))
28d81abb 3876 {
af2682ef 3877 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
37366632 3878 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
3879
3880 /* We must handle the endpoints in the original mode. */
28d81abb
RK
3881 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3882 index_expr, minval);
3883 minval = integer_zero_node;
37366632
RK
3884 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3885 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
af2682ef
RS
3886 emit_jump_insn (gen_bltu (default_label));
3887 /* Now we can safely truncate. */
3888 index = convert_to_mode (index_mode, index, 0);
3889 }
3890 else
3891 {
3892 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3893 index_expr = convert (type_for_size (index_bits, 0),
3894 index_expr);
37366632 3895 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3896 }
28d81abb
RK
3897 emit_queue ();
3898 index = protect_from_queue (index, 0);
3899 do_pending_stack_adjust ();
3900
37366632
RK
3901 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
3902 VOIDmode, 0),
3903 expand_expr (range, NULL_RTX,
3904 VOIDmode, 0),
28d81abb
RK
3905 table_label, default_label));
3906 win = 1;
3907 }
3908#endif
3909#ifdef HAVE_tablejump
3910 if (! win && HAVE_tablejump)
3911 {
3912 index_expr = convert (thiscase->data.case_stmt.nominal_type,
b4ac57ab
RS
3913 fold (build (MINUS_EXPR,
3914 TREE_TYPE (index_expr),
3915 index_expr, minval)));
37366632 3916 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 3917 emit_queue ();
af2682ef 3918 index = protect_from_queue (index, 0);
28d81abb
RK
3919 do_pending_stack_adjust ();
3920
af2682ef 3921 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
37366632 3922 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
3923 table_label, default_label);
3924 win = 1;
3925 }
3926#endif
3927 if (! win)
3928 abort ();
3929
3930 /* Get table of labels to jump to, in order of case index. */
3931
3932 ncases = TREE_INT_CST_LOW (range) + 1;
3933 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3934 bzero (labelvec, ncases * sizeof (rtx));
3935
3936 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3937 {
37366632 3938 register HOST_WIDE_INT i
28d81abb
RK
3939 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3940
3941 while (1)
3942 {
3943 labelvec[i]
3944 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3945 if (i + TREE_INT_CST_LOW (minval)
3946 == TREE_INT_CST_LOW (n->high))
3947 break;
3948 i++;
3949 }
3950 }
3951
3952 /* Fill in the gaps with the default. */
3953 for (i = 0; i < ncases; i++)
3954 if (labelvec[i] == 0)
3955 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3956
3957 /* Output the table */
3958 emit_label (table_label);
3959
3960 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
858a47b1 3961 were an expression, instead of an #ifdef/#ifndef. */
28d81abb
RK
3962 if (
3963#ifdef CASE_VECTOR_PC_RELATIVE
3964 1 ||
3965#endif
3966 flag_pic)
3967 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3968 gen_rtx (LABEL_REF, Pmode, table_label),
3969 gen_rtvec_v (ncases, labelvec)));
3970 else
3971 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3972 gen_rtvec_v (ncases, labelvec)));
3973
3974 /* If the case insn drops through the table,
3975 after the table we must jump to the default-label.
3976 Otherwise record no drop-through after the table. */
3977#ifdef CASE_DROPS_THROUGH
3978 emit_jump (default_label);
3979#else
3980 emit_barrier ();
3981#endif
3982 }
3983
915f619f
JW
3984 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3985 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
3986 thiscase->data.case_stmt.start);
3987 }
3988 if (thiscase->exit_label)
3989 emit_label (thiscase->exit_label);
3990
3991 POPSTACK (case_stack);
3992
3993 free_temp_slots ();
3994}
3995
3996/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3997
3998static void
3999do_jump_if_equal (op1, op2, label, unsignedp)
4000 rtx op1, op2, label;
4001 int unsignedp;
4002{
4003 if (GET_CODE (op1) == CONST_INT
4004 && GET_CODE (op2) == CONST_INT)
4005 {
4006 if (INTVAL (op1) == INTVAL (op2))
4007 emit_jump (label);
4008 }
4009 else
4010 {
4011 enum machine_mode mode = GET_MODE (op1);
4012 if (mode == VOIDmode)
4013 mode = GET_MODE (op2);
37366632 4014 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4015 emit_jump_insn (gen_beq (label));
4016 }
4017}
4018\f
4019/* Not all case values are encountered equally. This function
4020 uses a heuristic to weight case labels, in cases where that
4021 looks like a reasonable thing to do.
4022
4023 Right now, all we try to guess is text, and we establish the
4024 following weights:
4025
4026 chars above space: 16
4027 digits: 16
4028 default: 12
4029 space, punct: 8
4030 tab: 4
4031 newline: 2
4032 other "\" chars: 1
4033 remaining chars: 0
4034
4035 If we find any cases in the switch that are not either -1 or in the range
4036 of valid ASCII characters, or are control characters other than those
4037 commonly used with "\", don't treat this switch scanning text.
4038
4039 Return 1 if these nodes are suitable for cost estimation, otherwise
4040 return 0. */
4041
4042static int
4043estimate_case_costs (node)
4044 case_node_ptr node;
4045{
4046 tree min_ascii = build_int_2 (-1, -1);
4047 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4048 case_node_ptr n;
4049 int i;
4050
4051 /* If we haven't already made the cost table, make it now. Note that the
4052 lower bound of the table is -1, not zero. */
4053
4054 if (cost_table == NULL)
4055 {
4056 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4057 bzero (cost_table - 1, 129 * sizeof (short));
4058
4059 for (i = 0; i < 128; i++)
4060 {
4061 if (isalnum (i))
4062 cost_table[i] = 16;
4063 else if (ispunct (i))
4064 cost_table[i] = 8;
4065 else if (iscntrl (i))
4066 cost_table[i] = -1;
4067 }
4068
4069 cost_table[' '] = 8;
4070 cost_table['\t'] = 4;
4071 cost_table['\0'] = 4;
4072 cost_table['\n'] = 2;
4073 cost_table['\f'] = 1;
4074 cost_table['\v'] = 1;
4075 cost_table['\b'] = 1;
4076 }
4077
4078 /* See if all the case expressions look like text. It is text if the
4079 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4080 as signed arithmetic since we don't want to ever access cost_table with a
4081 value less than -1. Also check that none of the constants in a range
4082 are strange control characters. */
4083
4084 for (n = node; n; n = n->right)
4085 {
4086 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4087 return 0;
4088
4089 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4090 if (cost_table[i] < 0)
4091 return 0;
4092 }
4093
4094 /* All interesting values are within the range of interesting
4095 ASCII characters. */
4096 return 1;
4097}
4098
4099/* Scan an ordered list of case nodes
4100 combining those with consecutive values or ranges.
4101
4102 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4103
4104static void
4105group_case_nodes (head)
4106 case_node_ptr head;
4107{
4108 case_node_ptr node = head;
4109
4110 while (node)
4111 {
4112 rtx lb = next_real_insn (label_rtx (node->code_label));
4113 case_node_ptr np = node;
4114
4115 /* Try to group the successors of NODE with NODE. */
4116 while (((np = np->right) != 0)
4117 /* Do they jump to the same place? */
4118 && next_real_insn (label_rtx (np->code_label)) == lb
4119 /* Are their ranges consecutive? */
4120 && tree_int_cst_equal (np->low,
4121 fold (build (PLUS_EXPR,
4122 TREE_TYPE (node->high),
4123 node->high,
4124 integer_one_node)))
4125 /* An overflow is not consecutive. */
4126 && tree_int_cst_lt (node->high,
4127 fold (build (PLUS_EXPR,
4128 TREE_TYPE (node->high),
4129 node->high,
4130 integer_one_node))))
4131 {
4132 node->high = np->high;
4133 }
4134 /* NP is the first node after NODE which can't be grouped with it.
4135 Delete the nodes in between, and move on to that node. */
4136 node->right = np;
4137 node = np;
4138 }
4139}
4140
4141/* Take an ordered list of case nodes
4142 and transform them into a near optimal binary tree,
6dc42e49 4143 on the assumption that any target code selection value is as
28d81abb
RK
4144 likely as any other.
4145
4146 The transformation is performed by splitting the ordered
4147 list into two equal sections plus a pivot. The parts are
4148 then attached to the pivot as left and right branches. Each
4149 branch is is then transformed recursively. */
4150
4151static void
4152balance_case_nodes (head, parent)
4153 case_node_ptr *head;
4154 case_node_ptr parent;
4155{
4156 register case_node_ptr np;
4157
4158 np = *head;
4159 if (np)
4160 {
4161 int cost = 0;
4162 int i = 0;
4163 int ranges = 0;
4164 register case_node_ptr *npp;
4165 case_node_ptr left;
4166
4167 /* Count the number of entries on branch. Also count the ranges. */
4168
4169 while (np)
4170 {
4171 if (!tree_int_cst_equal (np->low, np->high))
4172 {
4173 ranges++;
4174 if (use_cost_table)
4175 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4176 }
4177
4178 if (use_cost_table)
4179 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4180
4181 i++;
4182 np = np->right;
4183 }
4184
4185 if (i > 2)
4186 {
4187 /* Split this list if it is long enough for that to help. */
4188 npp = head;
4189 left = *npp;
4190 if (use_cost_table)
4191 {
4192 /* Find the place in the list that bisects the list's total cost,
4193 Here I gets half the total cost. */
4194 int n_moved = 0;
4195 i = (cost + 1) / 2;
4196 while (1)
4197 {
4198 /* Skip nodes while their cost does not reach that amount. */
4199 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4200 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4201 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4202 if (i <= 0)
4203 break;
4204 npp = &(*npp)->right;
4205 n_moved += 1;
4206 }
4207 if (n_moved == 0)
4208 {
4209 /* Leave this branch lopsided, but optimize left-hand
4210 side and fill in `parent' fields for right-hand side. */
4211 np = *head;
4212 np->parent = parent;
4213 balance_case_nodes (&np->left, np);
4214 for (; np->right; np = np->right)
4215 np->right->parent = np;
4216 return;
4217 }
4218 }
4219 /* If there are just three nodes, split at the middle one. */
4220 else if (i == 3)
4221 npp = &(*npp)->right;
4222 else
4223 {
4224 /* Find the place in the list that bisects the list's total cost,
4225 where ranges count as 2.
4226 Here I gets half the total cost. */
4227 i = (i + ranges + 1) / 2;
4228 while (1)
4229 {
4230 /* Skip nodes while their cost does not reach that amount. */
4231 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4232 i--;
4233 i--;
4234 if (i <= 0)
4235 break;
4236 npp = &(*npp)->right;
4237 }
4238 }
4239 *head = np = *npp;
4240 *npp = 0;
4241 np->parent = parent;
4242 np->left = left;
4243
4244 /* Optimize each of the two split parts. */
4245 balance_case_nodes (&np->left, np);
4246 balance_case_nodes (&np->right, np);
4247 }
4248 else
4249 {
4250 /* Else leave this branch as one level,
4251 but fill in `parent' fields. */
4252 np = *head;
4253 np->parent = parent;
4254 for (; np->right; np = np->right)
4255 np->right->parent = np;
4256 }
4257 }
4258}
4259\f
4260/* Search the parent sections of the case node tree
4261 to see if a test for the lower bound of NODE would be redundant.
4262 INDEX_TYPE is the type of the index expression.
4263
4264 The instructions to generate the case decision tree are
4265 output in the same order as nodes are processed so it is
4266 known that if a parent node checks the range of the current
4267 node minus one that the current node is bounded at its lower
4268 span. Thus the test would be redundant. */
4269
4270static int
4271node_has_low_bound (node, index_type)
4272 case_node_ptr node;
4273 tree index_type;
4274{
4275 tree low_minus_one;
4276 case_node_ptr pnode;
4277
4278 /* If the lower bound of this node is the lowest value in the index type,
4279 we need not test it. */
4280
4281 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4282 return 1;
4283
4284 /* If this node has a left branch, the value at the left must be less
4285 than that at this node, so it cannot be bounded at the bottom and
4286 we need not bother testing any further. */
4287
4288 if (node->left)
4289 return 0;
4290
4291 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4292 node->low, integer_one_node));
4293
4294 /* If the subtraction above overflowed, we can't verify anything.
4295 Otherwise, look for a parent that tests our value - 1. */
4296
4297 if (! tree_int_cst_lt (low_minus_one, node->low))
4298 return 0;
4299
4300 for (pnode = node->parent; pnode; pnode = pnode->parent)
4301 if (tree_int_cst_equal (low_minus_one, pnode->high))
4302 return 1;
4303
4304 return 0;
4305}
4306
4307/* Search the parent sections of the case node tree
4308 to see if a test for the upper bound of NODE would be redundant.
4309 INDEX_TYPE is the type of the index expression.
4310
4311 The instructions to generate the case decision tree are
4312 output in the same order as nodes are processed so it is
4313 known that if a parent node checks the range of the current
4314 node plus one that the current node is bounded at its upper
4315 span. Thus the test would be redundant. */
4316
4317static int
4318node_has_high_bound (node, index_type)
4319 case_node_ptr node;
4320 tree index_type;
4321{
4322 tree high_plus_one;
4323 case_node_ptr pnode;
4324
4325 /* If the upper bound of this node is the highest value in the type
4326 of the index expression, we need not test against it. */
4327
4328 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4329 return 1;
4330
4331 /* If this node has a right branch, the value at the right must be greater
4332 than that at this node, so it cannot be bounded at the top and
4333 we need not bother testing any further. */
4334
4335 if (node->right)
4336 return 0;
4337
4338 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4339 node->high, integer_one_node));
4340
4341 /* If the addition above overflowed, we can't verify anything.
4342 Otherwise, look for a parent that tests our value + 1. */
4343
4344 if (! tree_int_cst_lt (node->high, high_plus_one))
4345 return 0;
4346
4347 for (pnode = node->parent; pnode; pnode = pnode->parent)
4348 if (tree_int_cst_equal (high_plus_one, pnode->low))
4349 return 1;
4350
4351 return 0;
4352}
4353
4354/* Search the parent sections of the
4355 case node tree to see if both tests for the upper and lower
4356 bounds of NODE would be redundant. */
4357
4358static int
4359node_is_bounded (node, index_type)
4360 case_node_ptr node;
4361 tree index_type;
4362{
4363 return (node_has_low_bound (node, index_type)
4364 && node_has_high_bound (node, index_type));
4365}
4366
4367/* Emit an unconditional jump to LABEL unless it would be dead code. */
4368
4369static void
4370emit_jump_if_reachable (label)
4371 rtx label;
4372{
4373 if (GET_CODE (get_last_insn ()) != BARRIER)
4374 emit_jump (label);
4375}
4376\f
4377/* Emit step-by-step code to select a case for the value of INDEX.
4378 The thus generated decision tree follows the form of the
4379 case-node binary tree NODE, whose nodes represent test conditions.
4380 INDEX_TYPE is the type of the index of the switch.
4381
4382 Care is taken to prune redundant tests from the decision tree
4383 by detecting any boundary conditions already checked by
4384 emitted rtx. (See node_has_high_bound, node_has_low_bound
4385 and node_is_bounded, above.)
4386
4387 Where the test conditions can be shown to be redundant we emit
4388 an unconditional jump to the target code. As a further
4389 optimization, the subordinates of a tree node are examined to
4390 check for bounded nodes. In this case conditional and/or
4391 unconditional jumps as a result of the boundary check for the
4392 current node are arranged to target the subordinates associated
4393 code for out of bound conditions on the current node node.
4394
f72aed24 4395 We can assume that when control reaches the code generated here,
28d81abb
RK
4396 the index value has already been compared with the parents
4397 of this node, and determined to be on the same side of each parent
4398 as this node is. Thus, if this node tests for the value 51,
4399 and a parent tested for 52, we don't need to consider
4400 the possibility of a value greater than 51. If another parent
4401 tests for the value 50, then this node need not test anything. */
4402
4403static void
4404emit_case_nodes (index, node, default_label, index_type)
4405 rtx index;
4406 case_node_ptr node;
4407 rtx default_label;
4408 tree index_type;
4409{
4410 /* If INDEX has an unsigned type, we must make unsigned branches. */
4411 int unsignedp = TREE_UNSIGNED (index_type);
4412 typedef rtx rtx_function ();
4413 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4414 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4415 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4416 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4417 enum machine_mode mode = GET_MODE (index);
4418
4419 /* See if our parents have already tested everything for us.
4420 If they have, emit an unconditional jump for this node. */
4421 if (node_is_bounded (node, index_type))
4422 emit_jump (label_rtx (node->code_label));
4423
4424 else if (tree_int_cst_equal (node->low, node->high))
4425 {
4426 /* Node is single valued. First see if the index expression matches
4427 this node and then check our children, if any. */
4428
37366632 4429 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4430 label_rtx (node->code_label), unsignedp);
4431
4432 if (node->right != 0 && node->left != 0)
4433 {
4434 /* This node has children on both sides.
4435 Dispatch to one side or the other
4436 by comparing the index value with this node's value.
4437 If one subtree is bounded, check that one first,
4438 so we can avoid real branches in the tree. */
4439
4440 if (node_is_bounded (node->right, index_type))
4441 {
37366632
RK
4442 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4443 VOIDmode, 0),
4444 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4445
4446 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4447 emit_case_nodes (index, node->left, default_label, index_type);
4448 }
4449
4450 else if (node_is_bounded (node->left, index_type))
4451 {
37366632 4452 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4453 VOIDmode, 0),
37366632 4454 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4455 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4456 emit_case_nodes (index, node->right, default_label, index_type);
4457 }
4458
4459 else
4460 {
4461 /* Neither node is bounded. First distinguish the two sides;
4462 then emit the code for one side at a time. */
4463
4464 tree test_label
4465 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4466
4467 /* See if the value is on the right. */
37366632 4468 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 4469 VOIDmode, 0),
37366632 4470 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4471 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4472
4473 /* Value must be on the left.
4474 Handle the left-hand subtree. */
4475 emit_case_nodes (index, node->left, default_label, index_type);
4476 /* If left-hand subtree does nothing,
4477 go to default. */
4478 emit_jump_if_reachable (default_label);
4479
4480 /* Code branches here for the right-hand subtree. */
4481 expand_label (test_label);
4482 emit_case_nodes (index, node->right, default_label, index_type);
4483 }
4484 }
4485
4486 else if (node->right != 0 && node->left == 0)
4487 {
4488 /* Here we have a right child but no left so we issue conditional
4489 branch to default and process the right child.
4490
4491 Omit the conditional branch to default if we it avoid only one
4492 right child; it costs too much space to save so little time. */
4493
de14fd73 4494 if (node->right->right || node->right->left
28d81abb
RK
4495 || !tree_int_cst_equal (node->right->low, node->right->high))
4496 {
4497 if (!node_has_low_bound (node, index_type))
4498 {
37366632
RK
4499 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4500 VOIDmode, 0),
4501 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4502 emit_jump_insn ((*gen_blt_pat) (default_label));
4503 }
4504
4505 emit_case_nodes (index, node->right, default_label, index_type);
4506 }
4507 else
4508 /* We cannot process node->right normally
4509 since we haven't ruled out the numbers less than
4510 this node's value. So handle node->right explicitly. */
4511 do_jump_if_equal (index,
37366632
RK
4512 expand_expr (node->right->low, NULL_RTX,
4513 VOIDmode, 0),
28d81abb
RK
4514 label_rtx (node->right->code_label), unsignedp);
4515 }
4516
4517 else if (node->right == 0 && node->left != 0)
4518 {
4519 /* Just one subtree, on the left. */
4520
de14fd73
RK
4521#if 0 /* The following code and comment were formerly part
4522 of the condition here, but they didn't work
4523 and I don't understand what the idea was. -- rms. */
4524 /* If our "most probable entry" is less probable
28d81abb
RK
4525 than the default label, emit a jump to
4526 the default label using condition codes
4527 already lying around. With no right branch,
4528 a branch-greater-than will get us to the default
4529 label correctly. */
de14fd73
RK
4530 if (use_cost_table
4531 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4532 ;
4533#endif /* 0 */
4534 if (node->left->left || node->left->right
28d81abb
RK
4535 || !tree_int_cst_equal (node->left->low, node->left->high))
4536 {
4537 if (!node_has_high_bound (node, index_type))
4538 {
37366632
RK
4539 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4540 VOIDmode, 0),
4541 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4542 emit_jump_insn ((*gen_bgt_pat) (default_label));
4543 }
4544
4545 emit_case_nodes (index, node->left, default_label, index_type);
4546 }
4547 else
4548 /* We cannot process node->left normally
4549 since we haven't ruled out the numbers less than
4550 this node's value. So handle node->left explicitly. */
4551 do_jump_if_equal (index,
37366632
RK
4552 expand_expr (node->left->low, NULL_RTX,
4553 VOIDmode, 0),
28d81abb
RK
4554 label_rtx (node->left->code_label), unsignedp);
4555 }
4556 }
4557 else
4558 {
4559 /* Node is a range. These cases are very similar to those for a single
4560 value, except that we do not start by testing whether this node
4561 is the one to branch to. */
4562
4563 if (node->right != 0 && node->left != 0)
4564 {
4565 /* Node has subtrees on both sides.
4566 If the right-hand subtree is bounded,
4567 test for it first, since we can go straight there.
4568 Otherwise, we need to make a branch in the control structure,
4569 then handle the two subtrees. */
4570 tree test_label = 0;
4571
37366632
RK
4572 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4573 VOIDmode, 0),
4574 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4575
4576 if (node_is_bounded (node->right, index_type))
4577 /* Right hand node is fully bounded so we can eliminate any
4578 testing and branch directly to the target code. */
4579 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4580 else
4581 {
4582 /* Right hand node requires testing.
4583 Branch to a label where we will handle it later. */
4584
4585 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4586 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4587 }
4588
4589 /* Value belongs to this node or to the left-hand subtree. */
4590
37366632
RK
4591 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4592 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4593 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4594
4595 /* Handle the left-hand subtree. */
4596 emit_case_nodes (index, node->left, default_label, index_type);
4597
4598 /* If right node had to be handled later, do that now. */
4599
4600 if (test_label)
4601 {
4602 /* If the left-hand subtree fell through,
4603 don't let it fall into the right-hand subtree. */
4604 emit_jump_if_reachable (default_label);
4605
4606 expand_label (test_label);
4607 emit_case_nodes (index, node->right, default_label, index_type);
4608 }
4609 }
4610
4611 else if (node->right != 0 && node->left == 0)
4612 {
4613 /* Deal with values to the left of this node,
4614 if they are possible. */
4615 if (!node_has_low_bound (node, index_type))
4616 {
37366632
RK
4617 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4618 VOIDmode, 0),
4619 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4620 emit_jump_insn ((*gen_blt_pat) (default_label));
4621 }
4622
4623 /* Value belongs to this node or to the right-hand subtree. */
4624
37366632
RK
4625 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4626 VOIDmode, 0),
4627 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4628 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4629
4630 emit_case_nodes (index, node->right, default_label, index_type);
4631 }
4632
4633 else if (node->right == 0 && node->left != 0)
4634 {
4635 /* Deal with values to the right of this node,
4636 if they are possible. */
4637 if (!node_has_high_bound (node, index_type))
4638 {
37366632
RK
4639 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4640 VOIDmode, 0),
4641 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4642 emit_jump_insn ((*gen_bgt_pat) (default_label));
4643 }
4644
4645 /* Value belongs to this node or to the left-hand subtree. */
4646
37366632
RK
4647 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4648 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4649 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4650
4651 emit_case_nodes (index, node->left, default_label, index_type);
4652 }
4653
4654 else
4655 {
4656 /* Node has no children so we check low and high bounds to remove
4657 redundant tests. Only one of the bounds can exist,
4658 since otherwise this node is bounded--a case tested already. */
4659
4660 if (!node_has_high_bound (node, index_type))
4661 {
37366632
RK
4662 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4663 VOIDmode, 0),
4664 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4665 emit_jump_insn ((*gen_bgt_pat) (default_label));
4666 }
4667
4668 if (!node_has_low_bound (node, index_type))
4669 {
37366632
RK
4670 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4671 VOIDmode, 0),
4672 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
4673 emit_jump_insn ((*gen_blt_pat) (default_label));
4674 }
4675
4676 emit_jump (label_rtx (node->code_label));
4677 }
4678 }
4679}
4680\f
4681/* These routines are used by the loop unrolling code. They copy BLOCK trees
4682 so that the debugging info will be correct for the unrolled loop. */
4683
94dc8b56 4684/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 4685
94dc8b56 4686static tree *block_vector;
28d81abb
RK
4687
4688void
94dc8b56 4689find_loop_tree_blocks ()
28d81abb 4690{
94dc8b56 4691 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4692
94dc8b56
JW
4693 /* There first block is for the function body, and does not have
4694 corresponding block notes. Don't include it in the block vector. */
4695 block = BLOCK_SUBBLOCKS (block);
28d81abb 4696
94dc8b56 4697 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
4698}
4699
28d81abb 4700void
94dc8b56 4701unroll_block_trees ()
28d81abb 4702{
94dc8b56 4703 tree block = DECL_INITIAL (current_function_decl);
28d81abb 4704
94dc8b56 4705 reorder_blocks (block_vector, block, get_insns ());
28d81abb 4706}
94dc8b56 4707
This page took 0.591591 seconds and 5 git commands to generate.