]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
Makefile.in: Remove all bytecode support.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
1974bfb1 2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
37
38#include <stdio.h>
39#include <ctype.h>
40
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
6adb4e3a 44#include "except.h"
28d81abb
RK
45#include "function.h"
46#include "insn-flags.h"
47#include "insn-config.h"
48#include "insn-codes.h"
49#include "expr.h"
50#include "hard-reg-set.h"
51#include "obstack.h"
52#include "loop.h"
53#include "recog.h"
ca695ac9
JB
54#include "machmode.h"
55
28d81abb
RK
56#define obstack_chunk_alloc xmalloc
57#define obstack_chunk_free free
58struct obstack stmt_obstack;
59
18543a22
ILT
60/* Assume that case vectors are not pc-relative. */
61#ifndef CASE_VECTOR_PC_RELATIVE
62#define CASE_VECTOR_PC_RELATIVE 0
63#endif
64
28d81abb
RK
65/* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67char *emit_filename;
68int emit_lineno;
69
70/* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73int expr_stmts_for_value;
74
75/* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78static tree last_expr_type;
79static rtx last_expr_value;
80
7629c936
RS
81/* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86static rtx last_block_end_note;
87
28d81abb
RK
88/* Number of binding contours started so far in this function. */
89
90int block_start_count;
91
92/* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95extern int current_function_returns_pcc_struct;
96
97/* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101extern rtx cleanup_label;
102
103/* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107extern rtx return_label;
108
28d81abb
RK
109/* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112extern int frame_offset;
113
114/* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116extern rtx tail_recursion_label;
117
118/* Place after which to insert the tail_recursion_label if we need one. */
119extern rtx tail_recursion_reentry;
120
121/* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126extern rtx arg_pointer_save_area;
127
128/* Chain of all RTL_EXPRs that have insns in them. */
129extern tree rtl_expr_chain;
130
e976b8b2
MS
131/* Stack allocation level in which temporaries for TARGET_EXPRs live. */
132extern int target_temp_slot_level;
133
134extern int temp_slot_level;
28d81abb
RK
135\f
136/* Functions and data structures for expanding case statements. */
137
138/* Case label structure, used to hold info on labels within case
139 statements. We handle "range" labels; for a single-value label
140 as in C, the high and low limits are the same.
141
5720c7e7
RK
142 An AVL tree of case nodes is initially created, and later transformed
143 to a list linked via the RIGHT fields in the nodes. Nodes with
144 higher case values are later in the list.
28d81abb
RK
145
146 Switch statements can be output in one of two forms. A branch table
147 is used if there are more than a few labels and the labels are dense
148 within the range between the smallest and largest case value. If a
149 branch table is used, no further manipulations are done with the case
150 node chain.
151
152 The alternative to the use of a branch table is to generate a series
153 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
154 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
155 totally unbalanced, with everything on the right. We balance the tree
156 with nodes on the left having lower case values than the parent
28d81abb
RK
157 and nodes on the right having higher values. We then output the tree
158 in order. */
159
160struct case_node
161{
162 struct case_node *left; /* Left son in binary tree */
163 struct case_node *right; /* Right son in binary tree; also node chain */
164 struct case_node *parent; /* Parent of node in binary tree */
165 tree low; /* Lowest index value for this label */
166 tree high; /* Highest index value for this label */
167 tree code_label; /* Label to jump to when node matches */
57641239 168 int balance;
28d81abb
RK
169};
170
171typedef struct case_node case_node;
172typedef struct case_node *case_node_ptr;
173
174/* These are used by estimate_case_costs and balance_case_nodes. */
175
176/* This must be a signed type, and non-ANSI compilers lack signed char. */
177static short *cost_table;
178static int use_cost_table;
28d81abb
RK
179\f
180/* Stack of control and binding constructs we are currently inside.
181
182 These constructs begin when you call `expand_start_WHATEVER'
183 and end when you call `expand_end_WHATEVER'. This stack records
184 info about how the construct began that tells the end-function
185 what to do. It also may provide information about the construct
186 to alter the behavior of other constructs within the body.
187 For example, they may affect the behavior of C `break' and `continue'.
188
189 Each construct gets one `struct nesting' object.
190 All of these objects are chained through the `all' field.
191 `nesting_stack' points to the first object (innermost construct).
192 The position of an entry on `nesting_stack' is in its `depth' field.
193
194 Each type of construct has its own individual stack.
195 For example, loops have `loop_stack'. Each object points to the
196 next object of the same type through the `next' field.
197
198 Some constructs are visible to `break' exit-statements and others
199 are not. Which constructs are visible depends on the language.
200 Therefore, the data structure allows each construct to be visible
201 or not, according to the args given when the construct is started.
202 The construct is visible if the `exit_label' field is non-null.
203 In that case, the value should be a CODE_LABEL rtx. */
204
205struct nesting
206{
207 struct nesting *all;
208 struct nesting *next;
209 int depth;
210 rtx exit_label;
211 union
212 {
213 /* For conds (if-then and if-then-else statements). */
214 struct
215 {
216 /* Label for the end of the if construct.
217 There is none if EXITFLAG was not set
218 and no `else' has been seen yet. */
219 rtx endif_label;
220 /* Label for the end of this alternative.
0f41302f 221 This may be the end of the if or the next else/elseif. */
28d81abb
RK
222 rtx next_label;
223 } cond;
224 /* For loops. */
225 struct
226 {
227 /* Label at the top of the loop; place to loop back to. */
228 rtx start_label;
229 /* Label at the end of the whole construct. */
230 rtx end_label;
8afad312
JW
231 /* Label before a jump that branches to the end of the whole
232 construct. This is where destructors go if any. */
233 rtx alt_end_label;
28d81abb
RK
234 /* Label for `continue' statement to jump to;
235 this is in front of the stepper of the loop. */
236 rtx continue_label;
237 } loop;
238 /* For variable binding contours. */
239 struct
240 {
241 /* Sequence number of this binding contour within the function,
242 in order of entry. */
243 int block_start_count;
b93a436e 244 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
245 rtx stack_level;
246 /* The NOTE that starts this contour.
247 Used by expand_goto to check whether the destination
248 is within each contour or not. */
249 rtx first_insn;
250 /* Innermost containing binding contour that has a stack level. */
251 struct nesting *innermost_stack_block;
252 /* List of cleanups to be run on exit from this contour.
253 This is a list of expressions to be evaluated.
254 The TREE_PURPOSE of each link is the ..._DECL node
255 which the cleanup pertains to. */
256 tree cleanups;
257 /* List of cleanup-lists of blocks containing this block,
258 as they were at the locus where this block appears.
259 There is an element for each containing block,
260 ordered innermost containing block first.
e976b8b2 261 The tail of this list can be 0,
28d81abb
RK
262 if all remaining elements would be empty lists.
263 The element's TREE_VALUE is the cleanup-list of that block,
264 which may be null. */
265 tree outer_cleanups;
266 /* Chain of labels defined inside this binding contour.
267 For contours that have stack levels or cleanups. */
268 struct label_chain *label_chain;
269 /* Number of function calls seen, as of start of this block. */
270 int function_call_count;
ca695ac9 271 /* Bytecode specific: stack level to restore stack to on exit. */
e976b8b2
MS
272 /* Nonzero if this is associated with a EH region. */
273 int exception_region;
274 /* The saved target_temp_slot_level from our outer block.
275 We may reset target_temp_slot_level to be the level of
276 this block, if that is done, target_temp_slot_level
277 reverts to the saved target_temp_slot_level at the very
278 end of the block. */
279 int target_temp_slot_level;
280 /* True if we are currently emitting insns in an area of
281 output code that is controlled by a conditional
282 expression. This is used by the cleanup handling code to
283 generate conditional cleanup actions. */
284 int conditional_code;
285 /* A place to move the start of the exception region for any
286 of the conditional cleanups, must be at the end or after
287 the start of the last unconditional cleanup, and before any
288 conditional branch points. */
289 rtx last_unconditional_cleanup;
290 /* When in a conditional context, this is the specific
291 cleanup list associated with last_unconditional_cleanup,
292 where we place the conditionalized cleanups. */
293 tree *cleanup_ptr;
28d81abb
RK
294 } block;
295 /* For switch (C) or case (Pascal) statements,
296 and also for dummies (see `expand_start_case_dummy'). */
297 struct
298 {
299 /* The insn after which the case dispatch should finally
300 be emitted. Zero for a dummy. */
301 rtx start;
57641239
RK
302 /* A list of case labels; it is first built as an AVL tree.
303 During expand_end_case, this is converted to a list, and may be
304 rearranged into a nearly balanced binary tree. */
28d81abb
RK
305 struct case_node *case_list;
306 /* Label to jump to if no case matches. */
307 tree default_label;
308 /* The expression to be dispatched on. */
309 tree index_expr;
310 /* Type that INDEX_EXPR should be converted to. */
311 tree nominal_type;
312 /* Number of range exprs in case statement. */
313 int num_ranges;
314 /* Name of this kind of statement, for warnings. */
315 char *printname;
316 /* Nonzero if a case label has been seen in this case stmt. */
317 char seenlabel;
318 } case_stmt;
28d81abb
RK
319 } data;
320};
321
322/* Chain of all pending binding contours. */
323struct nesting *block_stack;
324
6ed1d6c5
RS
325/* If any new stacks are added here, add them to POPSTACKS too. */
326
28d81abb
RK
327/* Chain of all pending binding contours that restore stack levels
328 or have cleanups. */
329struct nesting *stack_block_stack;
330
331/* Chain of all pending conditional statements. */
332struct nesting *cond_stack;
333
334/* Chain of all pending loops. */
335struct nesting *loop_stack;
336
337/* Chain of all pending case or switch statements. */
338struct nesting *case_stack;
339
28d81abb
RK
340/* Separate chain including all of the above,
341 chained through the `all' field. */
342struct nesting *nesting_stack;
343
344/* Number of entries on nesting_stack now. */
345int nesting_depth;
346
347/* Allocate and return a new `struct nesting'. */
348
349#define ALLOC_NESTING() \
350 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
351
6ed1d6c5
RS
352/* Pop the nesting stack element by element until we pop off
353 the element which is at the top of STACK.
354 Update all the other stacks, popping off elements from them
355 as we pop them from nesting_stack. */
28d81abb
RK
356
357#define POPSTACK(STACK) \
6ed1d6c5
RS
358do { struct nesting *target = STACK; \
359 struct nesting *this; \
360 do { this = nesting_stack; \
361 if (loop_stack == this) \
362 loop_stack = loop_stack->next; \
363 if (cond_stack == this) \
364 cond_stack = cond_stack->next; \
365 if (block_stack == this) \
366 block_stack = block_stack->next; \
367 if (stack_block_stack == this) \
368 stack_block_stack = stack_block_stack->next; \
369 if (case_stack == this) \
370 case_stack = case_stack->next; \
6ed1d6c5 371 nesting_depth = nesting_stack->depth - 1; \
28d81abb 372 nesting_stack = this->all; \
28d81abb 373 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 374 while (this != target); } while (0)
28d81abb
RK
375\f
376/* In some cases it is impossible to generate code for a forward goto
377 until the label definition is seen. This happens when it may be necessary
378 for the goto to reset the stack pointer: we don't yet know how to do that.
379 So expand_goto puts an entry on this fixup list.
380 Each time a binding contour that resets the stack is exited,
381 we check each fixup.
382 If the target label has now been defined, we can insert the proper code. */
383
384struct goto_fixup
385{
386 /* Points to following fixup. */
387 struct goto_fixup *next;
388 /* Points to the insn before the jump insn.
389 If more code must be inserted, it goes after this insn. */
390 rtx before_jump;
391 /* The LABEL_DECL that this jump is jumping to, or 0
392 for break, continue or return. */
393 tree target;
7629c936
RS
394 /* The BLOCK for the place where this goto was found. */
395 tree context;
28d81abb
RK
396 /* The CODE_LABEL rtx that this is jumping to. */
397 rtx target_rtl;
398 /* Number of binding contours started in current function
399 before the label reference. */
400 int block_start_count;
401 /* The outermost stack level that should be restored for this jump.
402 Each time a binding contour that resets the stack is exited,
403 if the target label is *not* yet defined, this slot is updated. */
404 rtx stack_level;
405 /* List of lists of cleanup expressions to be run by this goto.
406 There is one element for each block that this goto is within.
e976b8b2 407 The tail of this list can be 0,
28d81abb
RK
408 if all remaining elements would be empty.
409 The TREE_VALUE contains the cleanup list of that block as of the
410 time this goto was seen.
411 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
412 tree cleanup_list_list;
413};
414
415static struct goto_fixup *goto_fixup_chain;
416
417/* Within any binding contour that must restore a stack level,
418 all labels are recorded with a chain of these structures. */
419
420struct label_chain
421{
422 /* Points to following fixup. */
423 struct label_chain *next;
424 tree label;
425};
e9a25f70
JL
426
427
428/* Non-zero if we are using EH to handle cleanus. */
429static int using_eh_for_cleanups_p = 0;
430
431
cfc3d13f 432static void expand_goto_internal PROTO((tree, rtx, rtx));
cfc3d13f 433static int expand_fixup PROTO((tree, rtx, rtx));
cfc3d13f
RK
434static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
cfc3d13f 436static void expand_null_return_1 PROTO((rtx, int));
8d800403 437static void expand_value_return PROTO((rtx));
cfc3d13f 438static int tail_recursion_args PROTO((tree, tree));
50d1b7a1 439static void expand_cleanups PROTO((tree, tree, int, int));
cfc3d13f
RK
440static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
441static int estimate_case_costs PROTO((case_node_ptr));
442static void group_case_nodes PROTO((case_node_ptr));
443static void balance_case_nodes PROTO((case_node_ptr *,
444 case_node_ptr));
445static int node_has_low_bound PROTO((case_node_ptr, tree));
446static int node_has_high_bound PROTO((case_node_ptr, tree));
447static int node_is_bounded PROTO((case_node_ptr, tree));
448static void emit_jump_if_reachable PROTO((rtx));
449static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
57641239
RK
450static int add_case_node PROTO((tree, tree, tree, tree *));
451static struct case_node *case_tree2list PROTO((case_node *, case_node *));
cfc3d13f 452
28d81abb 453\f
e9a25f70
JL
454void
455using_eh_for_cleanups ()
456{
457 using_eh_for_cleanups_p = 1;
458}
459
28d81abb
RK
460void
461init_stmt ()
462{
463 gcc_obstack_init (&stmt_obstack);
6adb4e3a 464 init_eh ();
28d81abb
RK
465}
466
467void
468init_stmt_for_function ()
469{
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
0b931590 472 stack_block_stack = 0;
28d81abb
RK
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
478
479 block_start_count = 0;
480
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
483
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
6adb4e3a
MS
487
488 init_eh_for_function ();
28d81abb
RK
489}
490
491void
492save_stmt_status (p)
493 struct function *p;
494{
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
6adb4e3a 509 save_eh_status (p);
28d81abb
RK
510}
511
512void
513restore_stmt_status (p)
514 struct function *p;
515{
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
6adb4e3a 530 restore_eh_status (p);
28d81abb
RK
531}
532\f
533/* Emit a no-op instruction. */
534
535void
536emit_nop ()
537{
ca695ac9
JB
538 rtx last_insn;
539
b93a436e
JL
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
28d81abb
RK
546}
547\f
548/* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
550
551rtx
552label_rtx (label)
553 tree label;
554{
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
557
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
560
561 return DECL_RTL (label) = gen_label_rtx ();
562}
563
564/* Add an unconditional jump to LABEL as the next sequential instruction. */
565
566void
567emit_jump (label)
568 rtx label;
569{
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
573}
574
575/* Emit code to jump to the address
576 specified by the pointer expression EXP. */
577
578void
579expand_computed_goto (exp)
580 tree exp;
581{
b93a436e 582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
583
584#ifdef POINTERS_EXTEND_UNSIGNED
b93a436e 585 x = convert_memory_address (Pmode, x);
ed9a9db1 586#endif
ffa1a1ce 587
b93a436e
JL
588 emit_queue ();
589 /* Be sure the function is executable. */
590 if (flag_check_memory_usage)
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
17f5f329 593
b93a436e
JL
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
28d81abb
RK
596}
597\f
598/* Handle goto statements and the labels that they can go to. */
599
600/* Specify the location in the RTL code of a label LABEL,
601 which is a LABEL_DECL tree node.
602
603 This is used for the kind of label that the user can jump to with a
604 goto statement, and for alternatives of a switch or case statement.
605 RTL labels generated for loops and conditionals don't go through here;
606 they are generated directly at the RTL level, by other functions below.
607
608 Note that this has nothing to do with defining label *names*.
609 Languages vary in how they do that and what that even means. */
610
611void
612expand_label (label)
613 tree label;
614{
615 struct label_chain *p;
616
617 do_pending_stack_adjust ();
618 emit_label (label_rtx (label));
619 if (DECL_NAME (label))
620 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
621
622 if (stack_block_stack != 0)
623 {
624 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
625 p->next = stack_block_stack->data.block.label_chain;
626 stack_block_stack->data.block.label_chain = p;
627 p->label = label;
628 }
629}
630
631/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
632 from nested functions. */
633
634void
635declare_nonlocal_label (label)
636 tree label;
637{
638 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
639 LABEL_PRESERVE_P (label_rtx (label)) = 1;
640 if (nonlocal_goto_handler_slot == 0)
641 {
642 nonlocal_goto_handler_slot
643 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
59257ff7
RK
644 emit_stack_save (SAVE_NONLOCAL,
645 &nonlocal_goto_stack_level,
646 PREV_INSN (tail_recursion_reentry));
28d81abb
RK
647 }
648}
649
650/* Generate RTL code for a `goto' statement with target label LABEL.
651 LABEL should be a LABEL_DECL tree node that was or will later be
652 defined with `expand_label'. */
653
654void
655expand_goto (label)
656 tree label;
657{
ca695ac9
JB
658 tree context;
659
28d81abb 660 /* Check for a nonlocal goto to a containing function. */
ca695ac9 661 context = decl_function_context (label);
28d81abb
RK
662 if (context != 0 && context != current_function_decl)
663 {
664 struct function *p = find_function_data (context);
38a448ca 665 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
28d81abb 666 rtx temp;
dd132134 667
28d81abb 668 p->has_nonlocal_label = 1;
c1255328 669 current_function_has_nonlocal_goto = 1;
dd132134 670 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
671
672 /* Copy the rtl for the slots so that they won't be shared in
673 case the virtual stack vars register gets instantiated differently
674 in the parent than in the child. */
675
28d81abb
RK
676#if HAVE_nonlocal_goto
677 if (HAVE_nonlocal_goto)
678 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
59257ff7
RK
679 copy_rtx (p->nonlocal_goto_handler_slot),
680 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 681 label_ref));
28d81abb
RK
682 else
683#endif
684 {
59257ff7
RK
685 rtx addr;
686
28d81abb
RK
687 /* Restore frame pointer for containing function.
688 This sets the actual hard register used for the frame pointer
689 to the location of the function's incoming static chain info.
690 The non-local goto handler will then adjust it to contain the
691 proper value and reload the argument pointer, if needed. */
a35ad168 692 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
693
694 /* We have now loaded the frame pointer hardware register with
695 the address of that corresponds to the start of the virtual
696 stack vars. So replace virtual_stack_vars_rtx in all
697 addresses we use with stack_pointer_rtx. */
698
28d81abb
RK
699 /* Get addr of containing function's current nonlocal goto handler,
700 which will do any cleanups and then jump to the label. */
59257ff7
RK
701 addr = copy_rtx (p->nonlocal_goto_handler_slot);
702 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 703 hard_frame_pointer_rtx));
59257ff7 704
28d81abb 705 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
706 addr = p->nonlocal_goto_stack_level;
707 if (addr)
5e116627 708 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
709 virtual_stack_vars_rtx,
710 hard_frame_pointer_rtx);
59257ff7 711
37366632 712 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 713
28d81abb 714 /* Put in the static chain register the nonlocal label address. */
dd132134 715 emit_move_insn (static_chain_rtx, label_ref);
a35ad168 716 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 717 really needed. */
38a448ca
RH
718 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
719 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
720 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
28d81abb
RK
721 emit_indirect_jump (temp);
722 }
723 }
724 else
37366632 725 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
726}
727
728/* Generate RTL code for a `goto' statement with target label BODY.
729 LABEL should be a LABEL_REF.
730 LAST_INSN, if non-0, is the rtx we should consider as the last
731 insn emitted (for the purposes of cleaning up a return). */
732
733static void
734expand_goto_internal (body, label, last_insn)
735 tree body;
736 rtx label;
737 rtx last_insn;
738{
739 struct nesting *block;
740 rtx stack_level = 0;
741
742 if (GET_CODE (label) != CODE_LABEL)
743 abort ();
744
745 /* If label has already been defined, we can tell now
746 whether and how we must alter the stack level. */
747
748 if (PREV_INSN (label) != 0)
749 {
750 /* Find the innermost pending block that contains the label.
751 (Check containment by comparing insn-uids.)
752 Then restore the outermost stack level within that block,
753 and do cleanups of all blocks contained in it. */
754 for (block = block_stack; block; block = block->next)
755 {
756 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
757 break;
758 if (block->data.block.stack_level != 0)
759 stack_level = block->data.block.stack_level;
760 /* Execute the cleanups for blocks we are exiting. */
761 if (block->data.block.cleanups != 0)
762 {
50d1b7a1 763 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
764 do_pending_stack_adjust ();
765 }
766 }
767
768 if (stack_level)
769 {
0f41302f
MS
770 /* Ensure stack adjust isn't done by emit_jump, as this
771 would clobber the stack pointer. This one should be
772 deleted as dead by flow. */
28d81abb
RK
773 clear_pending_stack_adjust ();
774 do_pending_stack_adjust ();
37366632 775 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
776 }
777
778 if (body != 0 && DECL_TOO_LATE (body))
779 error ("jump to `%s' invalidly jumps into binding contour",
780 IDENTIFIER_POINTER (DECL_NAME (body)));
781 }
782 /* Label not yet defined: may need to put this goto
783 on the fixup list. */
784 else if (! expand_fixup (body, label, last_insn))
785 {
786 /* No fixup needed. Record that the label is the target
787 of at least one goto that has no fixup. */
788 if (body != 0)
789 TREE_ADDRESSABLE (body) = 1;
790 }
791
792 emit_jump (label);
793}
794\f
795/* Generate if necessary a fixup for a goto
796 whose target label in tree structure (if any) is TREE_LABEL
797 and whose target in rtl is RTL_LABEL.
798
799 If LAST_INSN is nonzero, we pretend that the jump appears
800 after insn LAST_INSN instead of at the current point in the insn stream.
801
023b57e6
RS
802 The fixup will be used later to insert insns just before the goto.
803 Those insns will restore the stack level as appropriate for the
804 target label, and will (in the case of C++) also invoke any object
805 destructors which have to be invoked when we exit the scopes which
806 are exited by the goto.
28d81abb
RK
807
808 Value is nonzero if a fixup is made. */
809
810static int
811expand_fixup (tree_label, rtl_label, last_insn)
812 tree tree_label;
813 rtx rtl_label;
814 rtx last_insn;
815{
816 struct nesting *block, *end_block;
817
818 /* See if we can recognize which block the label will be output in.
819 This is possible in some very common cases.
820 If we succeed, set END_BLOCK to that block.
821 Otherwise, set it to 0. */
822
823 if (cond_stack
824 && (rtl_label == cond_stack->data.cond.endif_label
825 || rtl_label == cond_stack->data.cond.next_label))
826 end_block = cond_stack;
827 /* If we are in a loop, recognize certain labels which
828 are likely targets. This reduces the number of fixups
829 we need to create. */
830 else if (loop_stack
831 && (rtl_label == loop_stack->data.loop.start_label
832 || rtl_label == loop_stack->data.loop.end_label
833 || rtl_label == loop_stack->data.loop.continue_label))
834 end_block = loop_stack;
835 else
836 end_block = 0;
837
838 /* Now set END_BLOCK to the binding level to which we will return. */
839
840 if (end_block)
841 {
842 struct nesting *next_block = end_block->all;
843 block = block_stack;
844
845 /* First see if the END_BLOCK is inside the innermost binding level.
846 If so, then no cleanups or stack levels are relevant. */
847 while (next_block && next_block != block)
848 next_block = next_block->all;
849
850 if (next_block)
851 return 0;
852
853 /* Otherwise, set END_BLOCK to the innermost binding level
854 which is outside the relevant control-structure nesting. */
855 next_block = block_stack->next;
856 for (block = block_stack; block != end_block; block = block->all)
857 if (block == next_block)
858 next_block = next_block->next;
859 end_block = next_block;
860 }
861
862 /* Does any containing block have a stack level or cleanups?
863 If not, no fixup is needed, and that is the normal case
864 (the only case, for standard C). */
865 for (block = block_stack; block != end_block; block = block->next)
866 if (block->data.block.stack_level != 0
867 || block->data.block.cleanups != 0)
868 break;
869
870 if (block != end_block)
871 {
872 /* Ok, a fixup is needed. Add a fixup to the list of such. */
873 struct goto_fixup *fixup
874 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
875 /* In case an old stack level is restored, make sure that comes
876 after any pending stack adjust. */
877 /* ?? If the fixup isn't to come at the present position,
878 doing the stack adjust here isn't useful. Doing it with our
879 settings at that location isn't useful either. Let's hope
880 someone does it! */
881 if (last_insn == 0)
882 do_pending_stack_adjust ();
28d81abb
RK
883 fixup->target = tree_label;
884 fixup->target_rtl = rtl_label;
023b57e6
RS
885
886 /* Create a BLOCK node and a corresponding matched set of
887 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
888 this point. The notes will encapsulate any and all fixup
889 code which we might later insert at this point in the insn
890 stream. Also, the BLOCK node will be the parent (i.e. the
891 `SUPERBLOCK') of any other BLOCK nodes which we might create
892 later on when we are expanding the fixup code. */
893
894 {
895 register rtx original_before_jump
896 = last_insn ? last_insn : get_last_insn ();
897
898 start_sequence ();
899 pushlevel (0);
900 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
901 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
902 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
903 end_sequence ();
904 emit_insns_after (fixup->before_jump, original_before_jump);
905 }
906
28d81abb
RK
907 fixup->block_start_count = block_start_count;
908 fixup->stack_level = 0;
909 fixup->cleanup_list_list
e976b8b2 910 = ((block->data.block.outer_cleanups
28d81abb 911 || block->data.block.cleanups)
37366632 912 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
913 block->data.block.outer_cleanups)
914 : 0);
915 fixup->next = goto_fixup_chain;
916 goto_fixup_chain = fixup;
917 }
918
919 return block != 0;
920}
921
ca695ac9 922
cfc3d13f
RK
923\f
924/* Expand any needed fixups in the outputmost binding level of the
925 function. FIRST_INSN is the first insn in the function. */
ca695ac9 926
cfc3d13f
RK
927void
928expand_fixups (first_insn)
929 rtx first_insn;
930{
931 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
932}
ca695ac9 933
28d81abb
RK
934/* When exiting a binding contour, process all pending gotos requiring fixups.
935 THISBLOCK is the structure that describes the block being exited.
936 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
937 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
938 FIRST_INSN is the insn that began this contour.
939
940 Gotos that jump out of this contour must restore the
941 stack level and do the cleanups before actually jumping.
942
943 DONT_JUMP_IN nonzero means report error there is a jump into this
944 contour from before the beginning of the contour.
945 This is also done if STACK_LEVEL is nonzero. */
946
704f4dca 947static void
28d81abb
RK
948fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
949 struct nesting *thisblock;
950 rtx stack_level;
951 tree cleanup_list;
952 rtx first_insn;
953 int dont_jump_in;
954{
955 register struct goto_fixup *f, *prev;
956
957 /* F is the fixup we are considering; PREV is the previous one. */
958 /* We run this loop in two passes so that cleanups of exited blocks
959 are run first, and blocks that are exited are marked so
960 afterwards. */
961
962 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
963 {
964 /* Test for a fixup that is inactive because it is already handled. */
965 if (f->before_jump == 0)
966 {
967 /* Delete inactive fixup from the chain, if that is easy to do. */
968 if (prev != 0)
969 prev->next = f->next;
970 }
971 /* Has this fixup's target label been defined?
972 If so, we can finalize it. */
973 else if (PREV_INSN (f->target_rtl) != 0)
974 {
7629c936 975 register rtx cleanup_insns;
7629c936 976
28d81abb
RK
977 /* Get the first non-label after the label
978 this goto jumps to. If that's before this scope begins,
979 we don't have a jump into the scope. */
980 rtx after_label = f->target_rtl;
981 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
982 after_label = NEXT_INSN (after_label);
983
984 /* If this fixup jumped into this contour from before the beginning
985 of this contour, report an error. */
986 /* ??? Bug: this does not detect jumping in through intermediate
987 blocks that have stack levels or cleanups.
988 It detects only a problem with the innermost block
989 around the label. */
990 if (f->target != 0
991 && (dont_jump_in || stack_level || cleanup_list)
992 /* If AFTER_LABEL is 0, it means the jump goes to the end
993 of the rtl, which means it jumps into this scope. */
994 && (after_label == 0
995 || INSN_UID (first_insn) < INSN_UID (after_label))
996 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 997 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
998 {
999 error_with_decl (f->target,
1000 "label `%s' used before containing binding contour");
1001 /* Prevent multiple errors for one label. */
33bc3ff5 1002 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1003 }
1004
7629c936
RS
1005 /* We will expand the cleanups into a sequence of their own and
1006 then later on we will attach this new sequence to the insn
1007 stream just ahead of the actual jump insn. */
1008
1009 start_sequence ();
1010
023b57e6
RS
1011 /* Temporarily restore the lexical context where we will
1012 logically be inserting the fixup code. We do this for the
1013 sake of getting the debugging information right. */
1014
7629c936 1015 pushlevel (0);
023b57e6 1016 set_block (f->context);
7629c936
RS
1017
1018 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1019 if (f->cleanup_list_list)
1020 {
1021 tree lists;
1022 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1023 /* Marked elements correspond to blocks that have been closed.
1024 Do their cleanups. */
1025 if (TREE_ADDRESSABLE (lists)
1026 && TREE_VALUE (lists) != 0)
7629c936 1027 {
50d1b7a1 1028 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1029 /* Pop any pushes done in the cleanups,
1030 in case function is about to return. */
1031 do_pending_stack_adjust ();
1032 }
28d81abb
RK
1033 }
1034
1035 /* Restore stack level for the biggest contour that this
1036 jump jumps out of. */
1037 if (f->stack_level)
59257ff7 1038 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1039
1040 /* Finish up the sequence containing the insns which implement the
1041 necessary cleanups, and then attach that whole sequence to the
1042 insn stream just ahead of the actual jump insn. Attaching it
1043 at that point insures that any cleanups which are in fact
1044 implicit C++ object destructions (which must be executed upon
1045 leaving the block) appear (to the debugger) to be taking place
1046 in an area of the generated code where the object(s) being
1047 destructed are still "in scope". */
1048
1049 cleanup_insns = get_insns ();
023b57e6 1050 poplevel (1, 0, 0);
7629c936
RS
1051
1052 end_sequence ();
1053 emit_insns_after (cleanup_insns, f->before_jump);
1054
7629c936 1055
28d81abb
RK
1056 f->before_jump = 0;
1057 }
1058 }
1059
6bc2f582
RK
1060 /* For any still-undefined labels, do the cleanups for this block now.
1061 We must do this now since items in the cleanup list may go out
0f41302f 1062 of scope when the block ends. */
28d81abb
RK
1063 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1064 if (f->before_jump != 0
1065 && PREV_INSN (f->target_rtl) == 0
1066 /* Label has still not appeared. If we are exiting a block with
1067 a stack level to restore, that started before the fixup,
1068 mark this stack level as needing restoration
6bc2f582 1069 when the fixup is later finalized. */
28d81abb 1070 && thisblock != 0
6bc2f582
RK
1071 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1072 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1073 && (thisblock->data.block.block_start_count
1074 <= f->block_start_count))
1075 {
1076 tree lists = f->cleanup_list_list;
6bc2f582
RK
1077 rtx cleanup_insns;
1078
28d81abb
RK
1079 for (; lists; lists = TREE_CHAIN (lists))
1080 /* If the following elt. corresponds to our containing block
1081 then the elt. must be for this block. */
1082 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1083 {
1084 start_sequence ();
1085 pushlevel (0);
1086 set_block (f->context);
1087 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1088 do_pending_stack_adjust ();
6bc2f582
RK
1089 cleanup_insns = get_insns ();
1090 poplevel (1, 0, 0);
1091 end_sequence ();
412c00dc
RK
1092 if (cleanup_insns != 0)
1093 f->before_jump
1094 = emit_insns_after (cleanup_insns, f->before_jump);
6bc2f582 1095
e07ed33f 1096 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1097 }
28d81abb
RK
1098
1099 if (stack_level)
1100 f->stack_level = stack_level;
1101 }
1102}
ca695ac9
JB
1103
1104
28d81abb
RK
1105\f
1106/* Generate RTL for an asm statement (explicit assembler code).
1107 BODY is a STRING_CST node containing the assembler code text,
1108 or an ADDR_EXPR containing a STRING_CST. */
1109
1110void
1111expand_asm (body)
1112 tree body;
1113{
17f5f329
RK
1114 if (flag_check_memory_usage)
1115 {
1116 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1117 return;
1118 }
1119
28d81abb
RK
1120 if (TREE_CODE (body) == ADDR_EXPR)
1121 body = TREE_OPERAND (body, 0);
1122
38a448ca
RH
1123 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1124 TREE_STRING_POINTER (body)));
28d81abb
RK
1125 last_expr_type = 0;
1126}
1127
1128/* Generate RTL for an asm statement with arguments.
1129 STRING is the instruction template.
1130 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1131 Each output or input has an expression in the TREE_VALUE and
1132 a constraint-string in the TREE_PURPOSE.
1133 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1134 that is clobbered by this insn.
1135
1136 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1137 Some elements of OUTPUTS may be replaced with trees representing temporary
1138 values. The caller should copy those temporary values to the originally
1139 specified lvalues.
1140
1141 VOL nonzero means the insn is volatile; don't optimize it. */
1142
1143void
1144expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1145 tree string, outputs, inputs, clobbers;
1146 int vol;
1147 char *filename;
1148 int line;
1149{
1150 rtvec argvec, constraints;
1151 rtx body;
1152 int ninputs = list_length (inputs);
1153 int noutputs = list_length (outputs);
235c5021 1154 int ninout = 0;
b4ccaa16 1155 int nclobbers;
28d81abb
RK
1156 tree tail;
1157 register int i;
1158 /* Vector of RTX's of evaluated output operands. */
1159 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1160 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1161 enum machine_mode *inout_mode
1162 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
28d81abb
RK
1163 /* The insn we have emitted. */
1164 rtx insn;
1165
296f8acc
JL
1166 /* An ASM with no outputs needs to be treated as volatile. */
1167 if (noutputs == 0)
1168 vol = 1;
1169
17f5f329
RK
1170 if (flag_check_memory_usage)
1171 {
1172 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1173 return;
1174 }
1175
b4ccaa16
RS
1176 /* Count the number of meaningful clobbered registers, ignoring what
1177 we would ignore later. */
1178 nclobbers = 0;
1179 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1180 {
1181 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1182 i = decode_reg_name (regname);
1183 if (i >= 0 || i == -4)
b4ccaa16 1184 ++nclobbers;
7859e3ac
DE
1185 else if (i == -2)
1186 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1187 }
1188
28d81abb
RK
1189 last_expr_type = 0;
1190
1191 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1192 {
1193 tree val = TREE_VALUE (tail);
b50a024d 1194 tree type = TREE_TYPE (val);
28d81abb
RK
1195 tree val1;
1196 int j;
d09a75ae 1197 int found_equal = 0;
235c5021 1198 int found_plus = 0;
d09a75ae 1199 int allows_reg = 0;
28d81abb
RK
1200
1201 /* If there's an erroneous arg, emit no insn. */
1202 if (TREE_TYPE (val) == error_mark_node)
1203 return;
1204
d09a75ae
RK
1205 /* Make sure constraint has `=' and does not have `+'. Also, see
1206 if it allows any register. Be liberal on the latter test, since
1207 the worst that happens if we get it wrong is we issue an error
1208 message. */
28d81abb 1209
7f070d5e 1210 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
d09a75ae
RK
1211 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1212 {
1213 case '+':
235c5021 1214 /* Make sure we can specify the matching operand. */
7f070d5e 1215 if (i > 9)
235c5021
RK
1216 {
1217 error ("output operand constraint %d contains `+'", i);
1218 return;
1219 }
1220
1221 /* Replace '+' with '='. */
1222 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1223 found_plus = 1;
1224 break;
d09a75ae
RK
1225
1226 case '=':
28d81abb 1227 found_equal = 1;
d09a75ae
RK
1228 break;
1229
1230 case '?': case '!': case '*': case '%': case '&':
d09a75ae
RK
1231 case 'V': case 'm': case 'o': case '<': case '>':
1232 case 'E': case 'F': case 'G': case 'H': case 'X':
1233 case 's': case 'i': case 'n':
1234 case 'I': case 'J': case 'K': case 'L': case 'M':
1235 case 'N': case 'O': case 'P': case ',':
1236#ifdef EXTRA_CONSTRAINT
1237 case 'Q': case 'R': case 'S': case 'T': case 'U':
1238#endif
1239 break;
1240
7b7a33b3 1241 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1242 case '5': case '6': case '7': case '8': case '9':
1243 error ("matching constraint not valid in output operand");
1244 break;
1245
1246 case 'p': case 'g': case 'r':
d09a75ae
RK
1247 default:
1248 allows_reg = 1;
1249 break;
1250 }
1251
235c5021 1252 if (! found_equal && ! found_plus)
28d81abb
RK
1253 {
1254 error ("output operand constraint lacks `='");
1255 return;
1256 }
1257
d09a75ae
RK
1258 /* If an output operand is not a decl or indirect ref and our constraint
1259 allows a register, make a temporary to act as an intermediate.
1260 Make the asm insn write into that, then our caller will copy it to
1261 the real output operand. Likewise for promoted variables. */
28d81abb 1262
b50a024d
RK
1263 if (TREE_CODE (val) == INDIRECT_REF
1264 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1265 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1266 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
235c5021
RK
1267 || ! allows_reg
1268 || found_plus)
d09a75ae
RK
1269 {
1270 if (! allows_reg)
1271 mark_addressable (TREE_VALUE (tail));
1272
1273 output_rtx[i]
17f5f329
RK
1274 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1275 EXPAND_MEMORY_USE_WO);
d09a75ae
RK
1276
1277 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1278 error ("output number %d not directly addressable", i);
1279 }
b50a024d 1280 else
e619bb8d 1281 {
6e81958a 1282 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1283 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1284 }
235c5021
RK
1285
1286 if (found_plus)
1287 {
1288 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1289 inout_opnum[ninout++] = i;
1290 }
28d81abb
RK
1291 }
1292
235c5021 1293 ninputs += ninout;
28d81abb
RK
1294 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1295 {
1296 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1297 return;
1298 }
1299
1300 /* Make vectors for the expression-rtx and constraint strings. */
1301
1302 argvec = rtvec_alloc (ninputs);
1303 constraints = rtvec_alloc (ninputs);
1304
38a448ca
RH
1305 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1306 TREE_STRING_POINTER (string), "", 0, argvec,
1307 constraints, filename, line);
c85f7c16
JL
1308
1309 /* The only use of BODY is if no outputs are specified, so set
1310 it volatile, at least for now. */
1311 MEM_VOLATILE_P (body) = 1;
28d81abb
RK
1312
1313 /* Eval the inputs and put them into ARGVEC.
1314 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1315
1316 i = 0;
1317 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1318 {
1319 int j;
65fed0cb 1320 int allows_reg = 0;
28d81abb
RK
1321
1322 /* If there's an erroneous arg, emit no insn,
1323 because the ASM_INPUT would get VOIDmode
1324 and that could cause a crash in reload. */
1325 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1326 return;
1327 if (TREE_PURPOSE (tail) == NULL_TREE)
1328 {
1329 error ("hard register `%s' listed as input operand to `asm'",
1330 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1331 return;
1332 }
1333
1334 /* Make sure constraint has neither `=' nor `+'. */
1335
7f070d5e 1336 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
65fed0cb 1337 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
28d81abb 1338 {
65fed0cb 1339 case '+': case '=':
28d81abb
RK
1340 error ("input operand constraint contains `%c'",
1341 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1342 return;
65fed0cb
RK
1343
1344 case '?': case '!': case '*': case '%': case '&':
65fed0cb
RK
1345 case 'V': case 'm': case 'o': case '<': case '>':
1346 case 'E': case 'F': case 'G': case 'H': case 'X':
1347 case 's': case 'i': case 'n':
1348 case 'I': case 'J': case 'K': case 'L': case 'M':
1349 case 'N': case 'O': case 'P': case ',':
1350#ifdef EXTRA_CONSTRAINT
1351 case 'Q': case 'R': case 'S': case 'T': case 'U':
1352#endif
1353 break;
1354
7b7a33b3
JW
1355 /* Whether or not a numeric constraint allows a register is
1356 decided by the matching constraint, and so there is no need
1357 to do anything special with them. We must handle them in
1358 the default case, so that we don't unnecessarily force
1359 operands to memory. */
1360 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1361 case '5': case '6': case '7': case '8': case '9':
1362 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1363 >= '0' + noutputs)
956d6950
JL
1364 {
1365 error
1366 ("matching constraint references invalid operand number");
1367 return;
1368 }
cd76ea33
RK
1369
1370 /* ... fall through ... */
1371
1372 case 'p': case 'g': case 'r':
65fed0cb
RK
1373 default:
1374 allows_reg = 1;
1375 break;
28d81abb
RK
1376 }
1377
65fed0cb
RK
1378 if (! allows_reg)
1379 mark_addressable (TREE_VALUE (tail));
1380
28d81abb 1381 XVECEXP (body, 3, i) /* argvec */
37366632 1382 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
76ebc969
RK
1383 if (CONSTANT_P (XVECEXP (body, 3, i))
1384 && ! general_operand (XVECEXP (body, 3, i),
1385 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
65fed0cb
RK
1386 {
1387 if (allows_reg)
1388 XVECEXP (body, 3, i)
1389 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1390 XVECEXP (body, 3, i));
1391 else
1392 XVECEXP (body, 3, i)
1393 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1394 XVECEXP (body, 3, i));
1395 }
1396
1397 if (! allows_reg
1398 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1399 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1400 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1401 {
1402 tree type = TREE_TYPE (TREE_VALUE (tail));
6e81958a 1403 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1404
65fed0cb
RK
1405 emit_move_insn (memloc, XVECEXP (body, 3, i));
1406 XVECEXP (body, 3, i) = memloc;
1407 }
1408
28d81abb 1409 XVECEXP (body, 4, i) /* constraints */
38a448ca
RH
1410 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1411 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
28d81abb
RK
1412 i++;
1413 }
1414
1415 /* Protect all the operands from the queue,
1416 now that they have all been evaluated. */
1417
235c5021 1418 for (i = 0; i < ninputs - ninout; i++)
28d81abb
RK
1419 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1420
1421 for (i = 0; i < noutputs; i++)
1422 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1423
235c5021
RK
1424 /* For in-out operands, copy output rtx to input rtx. */
1425 for (i = 0; i < ninout; i++)
1426 {
1427 static char match[9+1][2]
1428 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1429 int j = inout_opnum[i];
1430
1431 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1432 = output_rtx[j];
1433 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
38a448ca 1434 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
235c5021
RK
1435 }
1436
28d81abb
RK
1437 /* Now, for each output, construct an rtx
1438 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1439 ARGVEC CONSTRAINTS))
1440 If there is more than one, put them inside a PARALLEL. */
1441
1442 if (noutputs == 1 && nclobbers == 0)
1443 {
1444 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
38a448ca 1445 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb
RK
1446 }
1447 else if (noutputs == 0 && nclobbers == 0)
1448 {
1449 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1450 insn = emit_insn (body);
1451 }
1452 else
1453 {
1454 rtx obody = body;
1455 int num = noutputs;
1456 if (num == 0) num = 1;
38a448ca 1457 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1458
1459 /* For each output operand, store a SET. */
1460
1461 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1462 {
1463 XVECEXP (body, 0, i)
38a448ca
RH
1464 = gen_rtx_SET (VOIDmode,
1465 output_rtx[i],
1466 gen_rtx_ASM_OPERANDS (VOIDmode,
1467 TREE_STRING_POINTER (string),
1468 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1469 i, argvec, constraints,
1470 filename, line));
28d81abb
RK
1471 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1472 }
1473
1474 /* If there are no outputs (but there are some clobbers)
1475 store the bare ASM_OPERANDS into the PARALLEL. */
1476
1477 if (i == 0)
1478 XVECEXP (body, 0, i++) = obody;
1479
1480 /* Store (clobber REG) for each clobbered register specified. */
1481
b4ccaa16 1482 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1483 {
28d81abb 1484 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1485 int j = decode_reg_name (regname);
28d81abb 1486
b4ac57ab 1487 if (j < 0)
28d81abb 1488 {
c09e6498 1489 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1490 continue;
1491
c09e6498
RS
1492 if (j == -4) /* `memory', don't cache memory across asm */
1493 {
bffc6177 1494 XVECEXP (body, 0, i++)
38a448ca
RH
1495 = gen_rtx_CLOBBER (VOIDmode,
1496 gen_rtx_MEM (BLKmode,
1497 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1498 continue;
1499 }
1500
956d6950 1501 /* Ignore unknown register, error already signaled. */
cc1f5387 1502 continue;
28d81abb
RK
1503 }
1504
1505 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1506 XVECEXP (body, 0, i++)
38a448ca 1507 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
28d81abb
RK
1508 }
1509
1510 insn = emit_insn (body);
1511 }
1512
1513 free_temp_slots ();
1514}
1515\f
1516/* Generate RTL to evaluate the expression EXP
1517 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1518
1519void
1520expand_expr_stmt (exp)
1521 tree exp;
1522{
1523 /* If -W, warn about statements with no side effects,
1524 except for an explicit cast to void (e.g. for assert()), and
1525 except inside a ({...}) where they may be useful. */
1526 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1527 {
1528 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1529 && !(TREE_CODE (exp) == CONVERT_EXPR
1530 && TREE_TYPE (exp) == void_type_node))
1531 warning_with_file_and_line (emit_filename, emit_lineno,
1532 "statement with no effect");
1533 else if (warn_unused)
1534 warn_if_unused_value (exp);
1535 }
b6ec8c5f
RK
1536
1537 /* If EXP is of function type and we are expanding statements for
1538 value, convert it to pointer-to-function. */
1539 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1540 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1541
28d81abb
RK
1542 last_expr_type = TREE_TYPE (exp);
1543 if (! flag_syntax_only)
37366632
RK
1544 last_expr_value = expand_expr (exp,
1545 (expr_stmts_for_value
1546 ? NULL_RTX : const0_rtx),
28d81abb
RK
1547 VOIDmode, 0);
1548
1549 /* If all we do is reference a volatile value in memory,
1550 copy it to a register to be sure it is actually touched. */
1551 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1552 && TREE_THIS_VOLATILE (exp))
1553 {
6a5bbbe6
RS
1554 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1555 ;
1556 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1557 copy_to_reg (last_expr_value);
1558 else
ddbe9812
RS
1559 {
1560 rtx lab = gen_label_rtx ();
1561
1562 /* Compare the value with itself to reference it. */
1563 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1564 expand_expr (TYPE_SIZE (last_expr_type),
37366632 1565 NULL_RTX, VOIDmode, 0),
ddbe9812
RS
1566 BLKmode, 0,
1567 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1568 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1569 emit_label (lab);
1570 }
28d81abb
RK
1571 }
1572
1573 /* If this expression is part of a ({...}) and is in memory, we may have
1574 to preserve temporaries. */
1575 preserve_temp_slots (last_expr_value);
1576
1577 /* Free any temporaries used to evaluate this expression. Any temporary
1578 used as a result of this expression will already have been preserved
1579 above. */
1580 free_temp_slots ();
1581
1582 emit_queue ();
1583}
1584
1585/* Warn if EXP contains any computations whose results are not used.
1586 Return 1 if a warning is printed; 0 otherwise. */
1587
150a992a 1588int
28d81abb
RK
1589warn_if_unused_value (exp)
1590 tree exp;
1591{
1592 if (TREE_USED (exp))
1593 return 0;
1594
1595 switch (TREE_CODE (exp))
1596 {
1597 case PREINCREMENT_EXPR:
1598 case POSTINCREMENT_EXPR:
1599 case PREDECREMENT_EXPR:
1600 case POSTDECREMENT_EXPR:
1601 case MODIFY_EXPR:
1602 case INIT_EXPR:
1603 case TARGET_EXPR:
1604 case CALL_EXPR:
1605 case METHOD_CALL_EXPR:
1606 case RTL_EXPR:
81797aba 1607 case TRY_CATCH_EXPR:
28d81abb
RK
1608 case WITH_CLEANUP_EXPR:
1609 case EXIT_EXPR:
1610 /* We don't warn about COND_EXPR because it may be a useful
1611 construct if either arm contains a side effect. */
1612 case COND_EXPR:
1613 return 0;
1614
1615 case BIND_EXPR:
1616 /* For a binding, warn if no side effect within it. */
1617 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1618
de73f171
RK
1619 case SAVE_EXPR:
1620 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1621
28d81abb
RK
1622 case TRUTH_ORIF_EXPR:
1623 case TRUTH_ANDIF_EXPR:
1624 /* In && or ||, warn if 2nd operand has no side effect. */
1625 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1626
1627 case COMPOUND_EXPR:
a646a211
JM
1628 if (TREE_NO_UNUSED_WARNING (exp))
1629 return 0;
28d81abb
RK
1630 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1631 return 1;
4d23e509
RS
1632 /* Let people do `(foo (), 0)' without a warning. */
1633 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1634 return 0;
28d81abb
RK
1635 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1636
1637 case NOP_EXPR:
1638 case CONVERT_EXPR:
b4ac57ab 1639 case NON_LVALUE_EXPR:
28d81abb
RK
1640 /* Don't warn about values cast to void. */
1641 if (TREE_TYPE (exp) == void_type_node)
1642 return 0;
1643 /* Don't warn about conversions not explicit in the user's program. */
1644 if (TREE_NO_UNUSED_WARNING (exp))
1645 return 0;
1646 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1647 Don't complain about that. There can be an arbitrary number of
1648 casts before the modify, so we must loop until we find the first
1649 non-cast expression and then test to see if that is a modify. */
1650 {
1651 tree tem = TREE_OPERAND (exp, 0);
1652
1653 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1654 tem = TREE_OPERAND (tem, 0);
1655
de73f171
RK
1656 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1657 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1658 return 0;
1659 }
d1e1adfb 1660 goto warn;
28d81abb 1661
d1e1adfb
JM
1662 case INDIRECT_REF:
1663 /* Don't warn about automatic dereferencing of references, since
1664 the user cannot control it. */
1665 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1666 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1667 /* ... fall through ... */
d1e1adfb 1668
28d81abb 1669 default:
ddbe9812
RS
1670 /* Referencing a volatile value is a side effect, so don't warn. */
1671 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1672 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1673 && TREE_THIS_VOLATILE (exp))
1674 return 0;
d1e1adfb 1675 warn:
28d81abb
RK
1676 warning_with_file_and_line (emit_filename, emit_lineno,
1677 "value computed is not used");
1678 return 1;
1679 }
1680}
1681
1682/* Clear out the memory of the last expression evaluated. */
1683
1684void
1685clear_last_expr ()
1686{
1687 last_expr_type = 0;
1688}
1689
1690/* Begin a statement which will return a value.
1691 Return the RTL_EXPR for this statement expr.
1692 The caller must save that value and pass it to expand_end_stmt_expr. */
1693
1694tree
1695expand_start_stmt_expr ()
1696{
ca695ac9
JB
1697 int momentary;
1698 tree t;
1699
28d81abb
RK
1700 /* Make the RTL_EXPR node temporary, not momentary,
1701 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1702 momentary = suspend_momentary ();
1703 t = make_node (RTL_EXPR);
28d81abb 1704 resume_momentary (momentary);
33c6ab80 1705 do_pending_stack_adjust ();
e922dbad 1706 start_sequence_for_rtl_expr (t);
28d81abb
RK
1707 NO_DEFER_POP;
1708 expr_stmts_for_value++;
1709 return t;
1710}
1711
1712/* Restore the previous state at the end of a statement that returns a value.
1713 Returns a tree node representing the statement's value and the
1714 insns to compute the value.
1715
1716 The nodes of that expression have been freed by now, so we cannot use them.
1717 But we don't want to do that anyway; the expression has already been
1718 evaluated and now we just want to use the value. So generate a RTL_EXPR
1719 with the proper type and RTL value.
1720
1721 If the last substatement was not an expression,
1722 return something with type `void'. */
1723
1724tree
1725expand_end_stmt_expr (t)
1726 tree t;
1727{
1728 OK_DEFER_POP;
1729
1730 if (last_expr_type == 0)
1731 {
1732 last_expr_type = void_type_node;
1733 last_expr_value = const0_rtx;
1734 }
1735 else if (last_expr_value == 0)
1736 /* There are some cases where this can happen, such as when the
1737 statement is void type. */
1738 last_expr_value = const0_rtx;
1739 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1740 /* Remove any possible QUEUED. */
1741 last_expr_value = protect_from_queue (last_expr_value, 0);
1742
1743 emit_queue ();
1744
1745 TREE_TYPE (t) = last_expr_type;
1746 RTL_EXPR_RTL (t) = last_expr_value;
1747 RTL_EXPR_SEQUENCE (t) = get_insns ();
1748
1749 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1750
1751 end_sequence ();
1752
1753 /* Don't consider deleting this expr or containing exprs at tree level. */
1754 TREE_SIDE_EFFECTS (t) = 1;
1755 /* Propagate volatility of the actual RTL expr. */
1756 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1757
1758 last_expr_type = 0;
1759 expr_stmts_for_value--;
1760
1761 return t;
1762}
1763\f
28d81abb
RK
1764/* Generate RTL for the start of an if-then. COND is the expression
1765 whose truth should be tested.
1766
1767 If EXITFLAG is nonzero, this conditional is visible to
1768 `exit_something'. */
1769
1770void
1771expand_start_cond (cond, exitflag)
1772 tree cond;
1773 int exitflag;
1774{
1775 struct nesting *thiscond = ALLOC_NESTING ();
1776
1777 /* Make an entry on cond_stack for the cond we are entering. */
1778
1779 thiscond->next = cond_stack;
1780 thiscond->all = nesting_stack;
1781 thiscond->depth = ++nesting_depth;
1782 thiscond->data.cond.next_label = gen_label_rtx ();
1783 /* Before we encounter an `else', we don't need a separate exit label
1784 unless there are supposed to be exit statements
1785 to exit this conditional. */
1786 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1787 thiscond->data.cond.endif_label = thiscond->exit_label;
1788 cond_stack = thiscond;
1789 nesting_stack = thiscond;
1790
b93a436e 1791 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1792}
1793
1794/* Generate RTL between then-clause and the elseif-clause
1795 of an if-then-elseif-.... */
1796
1797void
1798expand_start_elseif (cond)
1799 tree cond;
1800{
1801 if (cond_stack->data.cond.endif_label == 0)
1802 cond_stack->data.cond.endif_label = gen_label_rtx ();
1803 emit_jump (cond_stack->data.cond.endif_label);
1804 emit_label (cond_stack->data.cond.next_label);
1805 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1806 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1807}
1808
1809/* Generate RTL between the then-clause and the else-clause
1810 of an if-then-else. */
1811
1812void
1813expand_start_else ()
1814{
1815 if (cond_stack->data.cond.endif_label == 0)
1816 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 1817
28d81abb
RK
1818 emit_jump (cond_stack->data.cond.endif_label);
1819 emit_label (cond_stack->data.cond.next_label);
0f41302f 1820 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
1821}
1822
d947ba59
RK
1823/* After calling expand_start_else, turn this "else" into an "else if"
1824 by providing another condition. */
1825
1826void
1827expand_elseif (cond)
1828 tree cond;
1829{
1830 cond_stack->data.cond.next_label = gen_label_rtx ();
1831 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1832}
1833
28d81abb
RK
1834/* Generate RTL for the end of an if-then.
1835 Pop the record for it off of cond_stack. */
1836
1837void
1838expand_end_cond ()
1839{
1840 struct nesting *thiscond = cond_stack;
1841
b93a436e
JL
1842 do_pending_stack_adjust ();
1843 if (thiscond->data.cond.next_label)
1844 emit_label (thiscond->data.cond.next_label);
1845 if (thiscond->data.cond.endif_label)
1846 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
1847
1848 POPSTACK (cond_stack);
1849 last_expr_type = 0;
1850}
ca695ac9
JB
1851
1852
28d81abb
RK
1853\f
1854/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1855 loop should be exited by `exit_something'. This is a loop for which
1856 `expand_continue' will jump to the top of the loop.
1857
1858 Make an entry on loop_stack to record the labels associated with
1859 this loop. */
1860
1861struct nesting *
1862expand_start_loop (exit_flag)
1863 int exit_flag;
1864{
1865 register struct nesting *thisloop = ALLOC_NESTING ();
1866
1867 /* Make an entry on loop_stack for the loop we are entering. */
1868
1869 thisloop->next = loop_stack;
1870 thisloop->all = nesting_stack;
1871 thisloop->depth = ++nesting_depth;
1872 thisloop->data.loop.start_label = gen_label_rtx ();
1873 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 1874 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
1875 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1876 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1877 loop_stack = thisloop;
1878 nesting_stack = thisloop;
1879
1880 do_pending_stack_adjust ();
1881 emit_queue ();
37366632 1882 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
1883 emit_label (thisloop->data.loop.start_label);
1884
1885 return thisloop;
1886}
1887
1888/* Like expand_start_loop but for a loop where the continuation point
1889 (for expand_continue_loop) will be specified explicitly. */
1890
1891struct nesting *
1892expand_start_loop_continue_elsewhere (exit_flag)
1893 int exit_flag;
1894{
1895 struct nesting *thisloop = expand_start_loop (exit_flag);
1896 loop_stack->data.loop.continue_label = gen_label_rtx ();
1897 return thisloop;
1898}
1899
1900/* Specify the continuation point for a loop started with
1901 expand_start_loop_continue_elsewhere.
1902 Use this at the point in the code to which a continue statement
1903 should jump. */
1904
1905void
1906expand_loop_continue_here ()
1907{
1908 do_pending_stack_adjust ();
37366632 1909 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
1910 emit_label (loop_stack->data.loop.continue_label);
1911}
1912
1913/* Finish a loop. Generate a jump back to the top and the loop-exit label.
1914 Pop the block off of loop_stack. */
1915
1916void
1917expand_end_loop ()
1918{
ca695ac9
JB
1919 register rtx insn;
1920 register rtx start_label;
28d81abb
RK
1921 rtx last_test_insn = 0;
1922 int num_insns = 0;
ca695ac9 1923
ca695ac9
JB
1924 insn = get_last_insn ();
1925 start_label = loop_stack->data.loop.start_label;
28d81abb
RK
1926
1927 /* Mark the continue-point at the top of the loop if none elsewhere. */
1928 if (start_label == loop_stack->data.loop.continue_label)
1929 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1930
1931 do_pending_stack_adjust ();
1932
1933 /* If optimizing, perhaps reorder the loop. If the loop
1934 starts with a conditional exit, roll that to the end
1935 where it will optimize together with the jump back.
1936
1937 We look for the last conditional branch to the exit that we encounter
1938 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1939 branch to the exit first, use it.
1940
1941 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1942 because moving them is not valid. */
1943
1944 if (optimize
1945 &&
1946 ! (GET_CODE (insn) == JUMP_INSN
1947 && GET_CODE (PATTERN (insn)) == SET
1948 && SET_DEST (PATTERN (insn)) == pc_rtx
1949 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1950 {
1951 /* Scan insns from the top of the loop looking for a qualified
1952 conditional exit. */
1953 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1954 insn = NEXT_INSN (insn))
1955 {
1956 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1957 break;
1958
1959 if (GET_CODE (insn) == NOTE
1960 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1961 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1962 break;
1963
1964 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1965 num_insns++;
1966
1967 if (last_test_insn && num_insns > 30)
1968 break;
1969
1970 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1971 && SET_DEST (PATTERN (insn)) == pc_rtx
1972 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1973 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
8afad312
JW
1974 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1975 == loop_stack->data.loop.end_label)
1976 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1977 == loop_stack->data.loop.alt_end_label)))
28d81abb 1978 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
8afad312
JW
1979 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1980 == loop_stack->data.loop.end_label)
1981 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1982 == loop_stack->data.loop.alt_end_label)))))
28d81abb
RK
1983 last_test_insn = insn;
1984
1985 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1986 && GET_CODE (PATTERN (insn)) == SET
1987 && SET_DEST (PATTERN (insn)) == pc_rtx
1988 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
8afad312
JW
1989 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
1990 == loop_stack->data.loop.end_label)
1991 || (XEXP (SET_SRC (PATTERN (insn)), 0)
1992 == loop_stack->data.loop.alt_end_label)))
28d81abb
RK
1993 /* Include BARRIER. */
1994 last_test_insn = NEXT_INSN (insn);
1995 }
1996
1997 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1998 {
1999 /* We found one. Move everything from there up
2000 to the end of the loop, and add a jump into the loop
2001 to jump to there. */
2002 register rtx newstart_label = gen_label_rtx ();
2003 register rtx start_move = start_label;
2004
b4ac57ab 2005 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2006 then we want to move this note also. */
2007 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2008 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2009 == NOTE_INSN_LOOP_CONT))
2010 start_move = PREV_INSN (start_move);
2011
2012 emit_label_after (newstart_label, PREV_INSN (start_move));
2013 reorder_insns (start_move, last_test_insn, get_last_insn ());
2014 emit_jump_insn_after (gen_jump (start_label),
2015 PREV_INSN (newstart_label));
2016 emit_barrier_after (PREV_INSN (newstart_label));
2017 start_label = newstart_label;
2018 }
2019 }
2020
2021 emit_jump (start_label);
37366632 2022 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2023 emit_label (loop_stack->data.loop.end_label);
2024
2025 POPSTACK (loop_stack);
2026
2027 last_expr_type = 0;
2028}
2029
2030/* Generate a jump to the current loop's continue-point.
2031 This is usually the top of the loop, but may be specified
2032 explicitly elsewhere. If not currently inside a loop,
2033 return 0 and do nothing; caller will print an error message. */
2034
2035int
2036expand_continue_loop (whichloop)
2037 struct nesting *whichloop;
2038{
2039 last_expr_type = 0;
2040 if (whichloop == 0)
2041 whichloop = loop_stack;
2042 if (whichloop == 0)
2043 return 0;
37366632
RK
2044 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2045 NULL_RTX);
28d81abb
RK
2046 return 1;
2047}
2048
2049/* Generate a jump to exit the current loop. If not currently inside a loop,
2050 return 0 and do nothing; caller will print an error message. */
2051
2052int
2053expand_exit_loop (whichloop)
2054 struct nesting *whichloop;
2055{
2056 last_expr_type = 0;
2057 if (whichloop == 0)
2058 whichloop = loop_stack;
2059 if (whichloop == 0)
2060 return 0;
37366632 2061 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2062 return 1;
2063}
2064
2065/* Generate a conditional jump to exit the current loop if COND
2066 evaluates to zero. If not currently inside a loop,
2067 return 0 and do nothing; caller will print an error message. */
2068
2069int
2070expand_exit_loop_if_false (whichloop, cond)
2071 struct nesting *whichloop;
2072 tree cond;
2073{
b93a436e
JL
2074 rtx label = gen_label_rtx ();
2075 rtx last_insn;
28d81abb 2076 last_expr_type = 0;
b93a436e 2077
28d81abb
RK
2078 if (whichloop == 0)
2079 whichloop = loop_stack;
2080 if (whichloop == 0)
2081 return 0;
b93a436e
JL
2082 /* In order to handle fixups, we actually create a conditional jump
2083 around a unconditional branch to exit the loop. If fixups are
2084 necessary, they go before the unconditional branch. */
d902c7ea 2085
b93a436e
JL
2086
2087 do_jump (cond, NULL_RTX, label);
2088 last_insn = get_last_insn ();
2089 if (GET_CODE (last_insn) == CODE_LABEL)
2090 whichloop->data.loop.alt_end_label = last_insn;
2091 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2092 NULL_RTX);
2093 emit_label (label);
ca695ac9 2094
28d81abb
RK
2095 return 1;
2096}
2097
2098/* Return non-zero if we should preserve sub-expressions as separate
2099 pseudos. We never do so if we aren't optimizing. We always do so
2100 if -fexpensive-optimizations.
2101
2102 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2103 the loop may still be a small one. */
2104
2105int
2106preserve_subexpressions_p ()
2107{
2108 rtx insn;
2109
2110 if (flag_expensive_optimizations)
2111 return 1;
2112
2113 if (optimize == 0 || loop_stack == 0)
2114 return 0;
2115
2116 insn = get_last_insn_anywhere ();
2117
2118 return (insn
2119 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2120 < n_non_fixed_regs * 3));
2121
2122}
2123
2124/* Generate a jump to exit the current loop, conditional, binding contour
2125 or case statement. Not all such constructs are visible to this function,
2126 only those started with EXIT_FLAG nonzero. Individual languages use
2127 the EXIT_FLAG parameter to control which kinds of constructs you can
2128 exit this way.
2129
2130 If not currently inside anything that can be exited,
2131 return 0 and do nothing; caller will print an error message. */
2132
2133int
2134expand_exit_something ()
2135{
2136 struct nesting *n;
2137 last_expr_type = 0;
2138 for (n = nesting_stack; n; n = n->all)
2139 if (n->exit_label != 0)
2140 {
37366632 2141 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2142 return 1;
2143 }
2144
2145 return 0;
2146}
2147\f
2148/* Generate RTL to return from the current function, with no value.
2149 (That is, we do not do anything about returning any value.) */
2150
2151void
2152expand_null_return ()
2153{
2154 struct nesting *block = block_stack;
2155 rtx last_insn = 0;
2156
2157 /* Does any pending block have cleanups? */
2158
2159 while (block && block->data.block.cleanups == 0)
2160 block = block->next;
2161
2162 /* If yes, use a goto to return, since that runs cleanups. */
2163
2164 expand_null_return_1 (last_insn, block != 0);
2165}
2166
2167/* Generate RTL to return from the current function, with value VAL. */
2168
8d800403 2169static void
28d81abb
RK
2170expand_value_return (val)
2171 rtx val;
2172{
2173 struct nesting *block = block_stack;
2174 rtx last_insn = get_last_insn ();
2175 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2176
2177 /* Copy the value to the return location
2178 unless it's already there. */
2179
2180 if (return_reg != val)
77636079
RS
2181 {
2182#ifdef PROMOTE_FUNCTION_RETURN
77636079
RS
2183 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2184 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
2185 enum machine_mode mode
2186 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2187 &unsignedp, 1);
77636079
RS
2188
2189 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2190 convert_move (return_reg, val, unsignedp);
77636079
RS
2191 else
2192#endif
2193 emit_move_insn (return_reg, val);
2194 }
28d81abb
RK
2195 if (GET_CODE (return_reg) == REG
2196 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
38a448ca 2197 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
e5eeae65
JW
2198 /* Handle calls that return values in multiple non-contiguous locations.
2199 The Irix 6 ABI has examples of this. */
2200 else if (GET_CODE (return_reg) == PARALLEL)
2201 {
2202 int i;
2203
2204 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2205 {
2206 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2207
2208 if (GET_CODE (x) == REG
2209 && REGNO (x) < FIRST_PSEUDO_REGISTER)
38a448ca 2210 emit_insn (gen_rtx_USE (VOIDmode, x));
e5eeae65
JW
2211 }
2212 }
28d81abb
RK
2213
2214 /* Does any pending block have cleanups? */
2215
2216 while (block && block->data.block.cleanups == 0)
2217 block = block->next;
2218
2219 /* If yes, use a goto to return, since that runs cleanups.
2220 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2221
2222 expand_null_return_1 (last_insn, block != 0);
2223}
2224
2225/* Output a return with no value. If LAST_INSN is nonzero,
2226 pretend that the return takes place after LAST_INSN.
2227 If USE_GOTO is nonzero then don't use a return instruction;
2228 go to the return label instead. This causes any cleanups
2229 of pending blocks to be executed normally. */
2230
2231static void
2232expand_null_return_1 (last_insn, use_goto)
2233 rtx last_insn;
2234 int use_goto;
2235{
2236 rtx end_label = cleanup_label ? cleanup_label : return_label;
2237
2238 clear_pending_stack_adjust ();
2239 do_pending_stack_adjust ();
2240 last_expr_type = 0;
2241
2242 /* PCC-struct return always uses an epilogue. */
2243 if (current_function_returns_pcc_struct || use_goto)
2244 {
2245 if (end_label == 0)
2246 end_label = return_label = gen_label_rtx ();
37366632 2247 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2248 return;
2249 }
2250
2251 /* Otherwise output a simple return-insn if one is available,
2252 unless it won't do the job. */
2253#ifdef HAVE_return
2254 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2255 {
2256 emit_jump_insn (gen_return ());
2257 emit_barrier ();
2258 return;
2259 }
2260#endif
2261
2262 /* Otherwise jump to the epilogue. */
37366632 2263 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2264}
2265\f
2266/* Generate RTL to evaluate the expression RETVAL and return it
2267 from the current function. */
2268
2269void
2270expand_return (retval)
2271 tree retval;
2272{
2273 /* If there are any cleanups to be performed, then they will
2274 be inserted following LAST_INSN. It is desirable
2275 that the last_insn, for such purposes, should be the
2276 last insn before computing the return value. Otherwise, cleanups
2277 which call functions can clobber the return value. */
2278 /* ??? rms: I think that is erroneous, because in C++ it would
2279 run destructors on variables that might be used in the subsequent
2280 computation of the return value. */
2281 rtx last_insn = 0;
2282 register rtx val = 0;
2283 register rtx op0;
2284 tree retval_rhs;
2285 int cleanups;
2286 struct nesting *block;
2287
2288 /* If function wants no value, give it none. */
2289 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2290 {
37366632 2291 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2292 emit_queue ();
28d81abb
RK
2293 expand_null_return ();
2294 return;
2295 }
2296
2297 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2298 /* This is not sufficient. We also need to watch for cleanups of the
2299 expression we are about to expand. Unfortunately, we cannot know
2300 if it has cleanups until we expand it, and we want to change how we
2301 expand it depending upon if we need cleanups. We can't win. */
2302#if 0
28d81abb 2303 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2304#else
2305 cleanups = 1;
2306#endif
28d81abb
RK
2307
2308 if (TREE_CODE (retval) == RESULT_DECL)
2309 retval_rhs = retval;
2310 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2311 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2312 retval_rhs = TREE_OPERAND (retval, 1);
2313 else if (TREE_TYPE (retval) == void_type_node)
2314 /* Recognize tail-recursive call to void function. */
2315 retval_rhs = retval;
2316 else
2317 retval_rhs = NULL_TREE;
2318
2319 /* Only use `last_insn' if there are cleanups which must be run. */
2320 if (cleanups || cleanup_label != 0)
2321 last_insn = get_last_insn ();
2322
2323 /* Distribute return down conditional expr if either of the sides
2324 may involve tail recursion (see test below). This enhances the number
2325 of tail recursions we see. Don't do this always since it can produce
2326 sub-optimal code in some cases and we distribute assignments into
2327 conditional expressions when it would help. */
2328
2329 if (optimize && retval_rhs != 0
2330 && frame_offset == 0
2331 && TREE_CODE (retval_rhs) == COND_EXPR
2332 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2333 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2334 {
2335 rtx label = gen_label_rtx ();
a0a34f94
RK
2336 tree expr;
2337
37366632 2338 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
dd98f85c 2339 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2340 DECL_RESULT (current_function_decl),
2341 TREE_OPERAND (retval_rhs, 1));
2342 TREE_SIDE_EFFECTS (expr) = 1;
2343 expand_return (expr);
28d81abb 2344 emit_label (label);
a0a34f94 2345
dd98f85c 2346 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2347 DECL_RESULT (current_function_decl),
2348 TREE_OPERAND (retval_rhs, 2));
2349 TREE_SIDE_EFFECTS (expr) = 1;
2350 expand_return (expr);
28d81abb
RK
2351 return;
2352 }
2353
2354 /* For tail-recursive call to current function,
2355 just jump back to the beginning.
2356 It's unsafe if any auto variable in this function
2357 has its address taken; for simplicity,
2358 require stack frame to be empty. */
2359 if (optimize && retval_rhs != 0
2360 && frame_offset == 0
2361 && TREE_CODE (retval_rhs) == CALL_EXPR
2362 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2363 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2364 /* Finish checking validity, and if valid emit code
2365 to set the argument variables for the new call. */
2366 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2367 DECL_ARGUMENTS (current_function_decl)))
2368 {
2369 if (tail_recursion_label == 0)
2370 {
2371 tail_recursion_label = gen_label_rtx ();
2372 emit_label_after (tail_recursion_label,
2373 tail_recursion_reentry);
2374 }
a3229491 2375 emit_queue ();
37366632 2376 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
28d81abb
RK
2377 emit_barrier ();
2378 return;
2379 }
2380#ifdef HAVE_return
2381 /* This optimization is safe if there are local cleanups
2382 because expand_null_return takes care of them.
2383 ??? I think it should also be safe when there is a cleanup label,
2384 because expand_null_return takes care of them, too.
2385 Any reason why not? */
2386 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2387 && ! current_function_returns_pcc_struct
2388 && BRANCH_COST <= 1)
28d81abb
RK
2389 {
2390 /* If this is return x == y; then generate
2391 if (x == y) return 1; else return 0;
3f8b69de
TG
2392 if we can do it with explicit return insns and branches are cheap,
2393 but not if we have the corresponding scc insn. */
2394 int has_scc = 0;
28d81abb
RK
2395 if (retval_rhs)
2396 switch (TREE_CODE (retval_rhs))
2397 {
2398 case EQ_EXPR:
3f8b69de
TG
2399#ifdef HAVE_seq
2400 has_scc = HAVE_seq;
2401#endif
28d81abb 2402 case NE_EXPR:
3f8b69de
TG
2403#ifdef HAVE_sne
2404 has_scc = HAVE_sne;
2405#endif
28d81abb 2406 case GT_EXPR:
3f8b69de
TG
2407#ifdef HAVE_sgt
2408 has_scc = HAVE_sgt;
2409#endif
28d81abb 2410 case GE_EXPR:
3f8b69de
TG
2411#ifdef HAVE_sge
2412 has_scc = HAVE_sge;
2413#endif
28d81abb 2414 case LT_EXPR:
3f8b69de
TG
2415#ifdef HAVE_slt
2416 has_scc = HAVE_slt;
2417#endif
28d81abb 2418 case LE_EXPR:
3f8b69de
TG
2419#ifdef HAVE_sle
2420 has_scc = HAVE_sle;
2421#endif
28d81abb
RK
2422 case TRUTH_ANDIF_EXPR:
2423 case TRUTH_ORIF_EXPR:
2424 case TRUTH_AND_EXPR:
2425 case TRUTH_OR_EXPR:
2426 case TRUTH_NOT_EXPR:
94ed3915 2427 case TRUTH_XOR_EXPR:
3f8b69de
TG
2428 if (! has_scc)
2429 {
2430 op0 = gen_label_rtx ();
2431 jumpifnot (retval_rhs, op0);
2432 expand_value_return (const1_rtx);
2433 emit_label (op0);
2434 expand_value_return (const0_rtx);
2435 return;
2436 }
e9a25f70
JL
2437 break;
2438
2439 default:
2440 break;
28d81abb
RK
2441 }
2442 }
2443#endif /* HAVE_return */
2444
4c485b63
JL
2445 /* If the result is an aggregate that is being returned in one (or more)
2446 registers, load the registers here. The compiler currently can't handle
2447 copying a BLKmode value into registers. We could put this code in a
2448 more general area (for use by everyone instead of just function
2449 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2450 (and in expand_call). The value must go into a pseudo in case there
2451 are cleanups that will clobber the real return register. */
4c485b63
JL
2452
2453 if (retval_rhs != 0
2454 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2455 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2456 {
a7f875d7 2457 int i, bitpos, xbitpos;
4c485b63
JL
2458 int big_endian_correction = 0;
2459 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2460 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
a7f875d7 2461 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
4c485b63 2462 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
a7f875d7 2463 rtx result_reg, src, dst;
4c485b63 2464 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2465 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2466
a7f875d7
RK
2467 /* Structures whose size is not a multiple of a word are aligned
2468 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2469 machine, this means we must skip the empty high order bytes when
2470 calculating the bit offset. */
2471 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2472 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2473 * BITS_PER_UNIT));
2474
2475 /* Copy the structure BITSIZE bits at a time. */
2476 for (bitpos = 0, xbitpos = big_endian_correction;
2477 bitpos < bytes * BITS_PER_UNIT;
2478 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2479 {
a7f875d7 2480 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2481 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2482 (the first time through). */
2483 if (xbitpos % BITS_PER_WORD == 0
2484 || xbitpos == big_endian_correction)
4c485b63 2485 {
a7f875d7
RK
2486 /* Generate an appropriate register. */
2487 dst = gen_reg_rtx (word_mode);
2488 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2489
2490 /* Clobber the destination before we move anything into it. */
38a448ca 2491 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
4c485b63 2492 }
a7f875d7
RK
2493
2494 /* We need a new source operand each time bitpos is on a word
2495 boundary. */
2496 if (bitpos % BITS_PER_WORD == 0)
2497 src = operand_subword_force (result_val,
2498 bitpos / BITS_PER_WORD,
2499 BLKmode);
2500
2501 /* Use bitpos for the source extraction (left justified) and
2502 xbitpos for the destination store (right justified). */
2503 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2504 extract_bit_field (src, bitsize,
2505 bitpos % BITS_PER_WORD, 1,
2506 NULL_RTX, word_mode,
2507 word_mode,
2508 bitsize / BITS_PER_UNIT,
2509 BITS_PER_WORD),
2510 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
2511 }
2512
4c485b63
JL
2513 /* Find the smallest integer mode large enough to hold the
2514 entire structure and use that mode instead of BLKmode
2515 on the USE insn for the return register. */
2516 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2517 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2518 tmpmode != MAX_MACHINE_MODE;
2519 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
2520 {
2521 /* Have we found a large enough mode? */
2522 if (GET_MODE_SIZE (tmpmode) >= bytes)
2523 break;
2524 }
4c485b63
JL
2525
2526 /* No suitable mode found. */
2527 if (tmpmode == MAX_MACHINE_MODE)
3ffeb8f1 2528 abort ();
4c485b63 2529
3ffeb8f1
JW
2530 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2531
af55da56
JW
2532 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2533 result_reg_mode = word_mode;
2534 else
2535 result_reg_mode = tmpmode;
2536 result_reg = gen_reg_rtx (result_reg_mode);
2537
3ffeb8f1 2538 emit_queue ();
3ffeb8f1 2539 for (i = 0; i < n_regs; i++)
af55da56 2540 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2541 result_pseudos[i]);
4c485b63 2542
af55da56
JW
2543 if (tmpmode != result_reg_mode)
2544 result_reg = gen_lowpart (tmpmode, result_reg);
2545
4c485b63
JL
2546 expand_value_return (result_reg);
2547 }
2548 else if (cleanups
28d81abb
RK
2549 && retval_rhs != 0
2550 && TREE_TYPE (retval_rhs) != void_type_node
2551 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2552 {
2553 /* Calculate the return value into a pseudo reg. */
dd98f85c
JM
2554 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2555 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2556 val = force_not_mem (val);
28d81abb 2557 emit_queue ();
28d81abb
RK
2558 /* Return the calculated value, doing cleanups first. */
2559 expand_value_return (val);
2560 }
2561 else
2562 {
2563 /* No cleanups or no hard reg used;
2564 calculate value into hard return reg. */
cba389cd 2565 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 2566 emit_queue ();
28d81abb
RK
2567 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2568 }
2569}
2570
2571/* Return 1 if the end of the generated RTX is not a barrier.
2572 This means code already compiled can drop through. */
2573
2574int
2575drop_through_at_end_p ()
2576{
2577 rtx insn = get_last_insn ();
2578 while (insn && GET_CODE (insn) == NOTE)
2579 insn = PREV_INSN (insn);
2580 return insn && GET_CODE (insn) != BARRIER;
2581}
2582\f
2583/* Emit code to alter this function's formal parms for a tail-recursive call.
2584 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2585 FORMALS is the chain of decls of formals.
2586 Return 1 if this can be done;
2587 otherwise return 0 and do not emit any code. */
2588
2589static int
2590tail_recursion_args (actuals, formals)
2591 tree actuals, formals;
2592{
2593 register tree a = actuals, f = formals;
2594 register int i;
2595 register rtx *argvec;
2596
2597 /* Check that number and types of actuals are compatible
2598 with the formals. This is not always true in valid C code.
2599 Also check that no formal needs to be addressable
2600 and that all formals are scalars. */
2601
2602 /* Also count the args. */
2603
2604 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2605 {
5c7fe359
RK
2606 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2607 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
2608 return 0;
2609 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2610 return 0;
2611 }
2612 if (a != 0 || f != 0)
2613 return 0;
2614
2615 /* Compute all the actuals. */
2616
2617 argvec = (rtx *) alloca (i * sizeof (rtx));
2618
2619 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2620 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2621
2622 /* Find which actual values refer to current values of previous formals.
2623 Copy each of them now, before any formal is changed. */
2624
2625 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2626 {
2627 int copy = 0;
2628 register int j;
2629 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2630 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2631 { copy = 1; break; }
2632 if (copy)
2633 argvec[i] = copy_to_reg (argvec[i]);
2634 }
2635
2636 /* Store the values of the actuals into the formals. */
2637
2638 for (f = formals, a = actuals, i = 0; f;
2639 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2640 {
98f3b471 2641 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2642 emit_move_insn (DECL_RTL (f), argvec[i]);
2643 else
2644 convert_move (DECL_RTL (f), argvec[i],
2645 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2646 }
2647
2648 free_temp_slots ();
2649 return 1;
2650}
2651\f
2652/* Generate the RTL code for entering a binding contour.
2653 The variables are declared one by one, by calls to `expand_decl'.
2654
2655 EXIT_FLAG is nonzero if this construct should be visible to
2656 `exit_something'. */
2657
2658void
2659expand_start_bindings (exit_flag)
2660 int exit_flag;
2661{
2662 struct nesting *thisblock = ALLOC_NESTING ();
b93a436e 2663 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2664
2665 /* Make an entry on block_stack for the block we are entering. */
2666
2667 thisblock->next = block_stack;
2668 thisblock->all = nesting_stack;
2669 thisblock->depth = ++nesting_depth;
2670 thisblock->data.block.stack_level = 0;
2671 thisblock->data.block.cleanups = 0;
2672 thisblock->data.block.function_call_count = 0;
e976b8b2
MS
2673 thisblock->data.block.exception_region = 0;
2674 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2675
2676 thisblock->data.block.conditional_code = 0;
2677 thisblock->data.block.last_unconditional_cleanup = note;
2678 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2679
28d81abb
RK
2680 if (block_stack
2681 && !(block_stack->data.block.cleanups == NULL_TREE
2682 && block_stack->data.block.outer_cleanups == NULL_TREE))
2683 thisblock->data.block.outer_cleanups
2684 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2685 block_stack->data.block.outer_cleanups);
2686 else
2687 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
2688 thisblock->data.block.label_chain = 0;
2689 thisblock->data.block.innermost_stack_block = stack_block_stack;
2690 thisblock->data.block.first_insn = note;
2691 thisblock->data.block.block_start_count = ++block_start_count;
2692 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2693 block_stack = thisblock;
2694 nesting_stack = thisblock;
2695
b93a436e
JL
2696 /* Make a new level for allocating stack slots. */
2697 push_temp_slots ();
28d81abb
RK
2698}
2699
e976b8b2
MS
2700/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2701 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2702 expand_expr are made. After we end the region, we know that all
2703 space for all temporaries that were created by TARGET_EXPRs will be
2704 destroyed and their space freed for reuse. */
2705
2706void
2707expand_start_target_temps ()
2708{
2709 /* This is so that even if the result is preserved, the space
2710 allocated will be freed, as we know that it is no longer in use. */
2711 push_temp_slots ();
2712
2713 /* Start a new binding layer that will keep track of all cleanup
2714 actions to be performed. */
2715 expand_start_bindings (0);
2716
2717 target_temp_slot_level = temp_slot_level;
2718}
2719
2720void
2721expand_end_target_temps ()
2722{
2723 expand_end_bindings (NULL_TREE, 0, 0);
2724
2725 /* This is so that even if the result is preserved, the space
2726 allocated will be freed, as we know that it is no longer in use. */
2727 pop_temp_slots ();
2728}
2729
2730/* Mark top block of block_stack as an implicit binding for an
2731 exception region. This is used to prevent infinite recursion when
2732 ending a binding with expand_end_bindings. It is only ever called
2733 by expand_eh_region_start, as that it the only way to create a
2734 block stack for a exception region. */
2735
2736void
2737mark_block_as_eh_region ()
2738{
2739 block_stack->data.block.exception_region = 1;
2740 if (block_stack->next
2741 && block_stack->next->data.block.conditional_code)
2742 {
2743 block_stack->data.block.conditional_code
2744 = block_stack->next->data.block.conditional_code;
2745 block_stack->data.block.last_unconditional_cleanup
2746 = block_stack->next->data.block.last_unconditional_cleanup;
2747 block_stack->data.block.cleanup_ptr
2748 = block_stack->next->data.block.cleanup_ptr;
2749 }
2750}
2751
2752/* True if we are currently emitting insns in an area of output code
2753 that is controlled by a conditional expression. This is used by
2754 the cleanup handling code to generate conditional cleanup actions. */
2755
2756int
2757conditional_context ()
2758{
2759 return block_stack && block_stack->data.block.conditional_code;
2760}
2761
2762/* Mark top block of block_stack as not for an implicit binding for an
2763 exception region. This is only ever done by expand_eh_region_end
2764 to let expand_end_bindings know that it is being called explicitly
2765 to end the binding layer for just the binding layer associated with
2766 the exception region, otherwise expand_end_bindings would try and
2767 end all implicit binding layers for exceptions regions, and then
2768 one normal binding layer. */
2769
2770void
2771mark_block_as_not_eh_region ()
2772{
2773 block_stack->data.block.exception_region = 0;
2774}
2775
2776/* True if the top block of block_stack was marked as for an exception
2777 region by mark_block_as_eh_region. */
2778
2779int
2780is_eh_region ()
2781{
2782 return block_stack && block_stack->data.block.exception_region;
2783}
2784
7629c936
RS
2785/* Given a pointer to a BLOCK node, save a pointer to the most recently
2786 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2787 BLOCK node. */
2788
2789void
2790remember_end_note (block)
2791 register tree block;
2792{
2793 BLOCK_END_NOTE (block) = last_block_end_note;
2794 last_block_end_note = NULL_RTX;
2795}
2796
28d81abb
RK
2797/* Generate RTL code to terminate a binding contour.
2798 VARS is the chain of VAR_DECL nodes
2799 for the variables bound in this contour.
2800 MARK_ENDS is nonzero if we should put a note at the beginning
2801 and end of this binding contour.
2802
2803 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2804 (That is true automatically if the contour has a saved stack level.) */
2805
2806void
2807expand_end_bindings (vars, mark_ends, dont_jump_in)
2808 tree vars;
2809 int mark_ends;
2810 int dont_jump_in;
2811{
e976b8b2 2812 register struct nesting *thisblock;
28d81abb
RK
2813 register tree decl;
2814
e976b8b2
MS
2815 while (block_stack->data.block.exception_region)
2816 {
2817 /* Because we don't need or want a new temporary level and
2818 because we didn't create one in expand_eh_region_start,
2819 create a fake one now to avoid removing one in
2820 expand_end_bindings. */
2821 push_temp_slots ();
2822
2823 block_stack->data.block.exception_region = 0;
2824
2825 expand_end_bindings (NULL_TREE, 0, 0);
2826 }
2827
e976b8b2
MS
2828 /* Since expand_eh_region_start does an expand_start_bindings, we
2829 have to first end all the bindings that were created by
2830 expand_eh_region_start. */
2831
2832 thisblock = block_stack;
2833
28d81abb
RK
2834 if (warn_unused)
2835 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5 2836 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
e9a25f70
JL
2837 && ! DECL_IN_SYSTEM_HEADER (decl)
2838 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
28d81abb
RK
2839 warning_with_decl (decl, "unused variable `%s'");
2840
28d81abb
RK
2841 if (thisblock->exit_label)
2842 {
2843 do_pending_stack_adjust ();
2844 emit_label (thisblock->exit_label);
2845 }
2846
2847 /* If necessary, make a handler for nonlocal gotos taking
2848 place in the function calls in this block. */
2849 if (function_call_count != thisblock->data.block.function_call_count
2850 && nonlocal_labels
2851 /* Make handler for outermost block
2852 if there were any nonlocal gotos to this function. */
2853 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2854 /* Make handler for inner block if it has something
2855 special to do when you jump out of it. */
2856 : (thisblock->data.block.cleanups != 0
2857 || thisblock->data.block.stack_level != 0)))
2858 {
2859 tree link;
2860 rtx afterward = gen_label_rtx ();
2861 rtx handler_label = gen_label_rtx ();
2862 rtx save_receiver = gen_reg_rtx (Pmode);
ba83886f 2863 rtx insns;
28d81abb
RK
2864
2865 /* Don't let jump_optimize delete the handler. */
2866 LABEL_PRESERVE_P (handler_label) = 1;
2867
2868 /* Record the handler address in the stack slot for that purpose,
2869 during this block, saving and restoring the outer value. */
2870 if (thisblock->next != 0)
2871 {
2872 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
ba83886f
RS
2873
2874 start_sequence ();
2875 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2876 insns = get_insns ();
2877 end_sequence ();
2878 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb 2879 }
ba83886f
RS
2880
2881 start_sequence ();
2882 emit_move_insn (nonlocal_goto_handler_slot,
38a448ca 2883 gen_rtx_LABEL_REF (Pmode, handler_label));
ba83886f
RS
2884 insns = get_insns ();
2885 end_sequence ();
2886 emit_insns_before (insns, thisblock->data.block.first_insn);
28d81abb
RK
2887
2888 /* Jump around the handler; it runs only when specially invoked. */
2889 emit_jump (afterward);
2890 emit_label (handler_label);
2891
2892#ifdef HAVE_nonlocal_goto
2893 if (! HAVE_nonlocal_goto)
2894#endif
2895 /* First adjust our frame pointer to its actual value. It was
2896 previously set to the start of the virtual area corresponding to
2897 the stacked variables when we branched here and now needs to be
2898 adjusted to the actual hardware fp value.
2899
2900 Assignments are to virtual registers are converted by
2901 instantiate_virtual_regs into the corresponding assignment
2902 to the underlying register (fp in this case) that makes
2903 the original assignment true.
2904 So the following insn will actually be
2905 decrementing fp by STARTING_FRAME_OFFSET. */
705e524e 2906 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
28d81abb 2907
a35ad168 2908#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
28d81abb
RK
2909 if (fixed_regs[ARG_POINTER_REGNUM])
2910 {
42495ca0
RK
2911#ifdef ELIMINABLE_REGS
2912 /* If the argument pointer can be eliminated in favor of the
2913 frame pointer, we don't need to restore it. We assume here
2914 that if such an elimination is present, it can always be used.
2915 This is the case on all known machines; if we don't make this
2916 assumption, we do unnecessary saving on many machines. */
2917 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2918 int i;
2919
2920 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2921 if (elim_regs[i].from == ARG_POINTER_REGNUM
a35ad168 2922 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
42495ca0
RK
2923 break;
2924
2925 if (i == sizeof elim_regs / sizeof elim_regs [0])
2926#endif
2927 {
2928 /* Now restore our arg pointer from the address at which it
2929 was saved in our stack frame.
2930 If there hasn't be space allocated for it yet, make
2931 some now. */
2932 if (arg_pointer_save_area == 0)
2933 arg_pointer_save_area
2934 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2935 emit_move_insn (virtual_incoming_args_rtx,
2936 /* We need a pseudo here, or else
2937 instantiate_virtual_regs_1 complains. */
2938 copy_to_reg (arg_pointer_save_area));
2939 }
28d81abb
RK
2940 }
2941#endif
2942
4e05a62c
RK
2943#ifdef HAVE_nonlocal_goto_receiver
2944 if (HAVE_nonlocal_goto_receiver)
2945 emit_insn (gen_nonlocal_goto_receiver ());
2946#endif
2947
28d81abb
RK
2948 /* The handler expects the desired label address in the static chain
2949 register. It tests the address and does an appropriate jump
2950 to whatever label is desired. */
2951 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2952 /* Skip any labels we shouldn't be able to jump to from here. */
2953 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2954 {
2955 rtx not_this = gen_label_rtx ();
2956 rtx this = gen_label_rtx ();
2957 do_jump_if_equal (static_chain_rtx,
38a448ca 2958 gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
28d81abb
RK
2959 this, 0);
2960 emit_jump (not_this);
2961 emit_label (this);
2962 expand_goto (TREE_VALUE (link));
2963 emit_label (not_this);
2964 }
2965 /* If label is not recognized, abort. */
38a448ca 2966 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
28d81abb 2967 VOIDmode, 0);
a3fd7507 2968 emit_barrier ();
28d81abb
RK
2969 emit_label (afterward);
2970 }
2971
72eb1038
BH
2972 /* Don't allow jumping into a block that has a stack level.
2973 Cleanups are allowed, though. */
28d81abb 2974 if (dont_jump_in
72eb1038 2975 || thisblock->data.block.stack_level != 0)
28d81abb
RK
2976 {
2977 struct label_chain *chain;
2978
2979 /* Any labels in this block are no longer valid to go to.
2980 Mark them to cause an error message. */
2981 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2982 {
2983 DECL_TOO_LATE (chain->label) = 1;
2984 /* If any goto without a fixup came to this label,
2985 that must be an error, because gotos without fixups
72eb1038 2986 come from outside all saved stack-levels. */
28d81abb
RK
2987 if (TREE_ADDRESSABLE (chain->label))
2988 error_with_decl (chain->label,
2989 "label `%s' used before containing binding contour");
2990 }
2991 }
2992
2993 /* Restore stack level in effect before the block
2994 (only if variable-size objects allocated). */
2995 /* Perform any cleanups associated with the block. */
2996
2997 if (thisblock->data.block.stack_level != 0
2998 || thisblock->data.block.cleanups != 0)
2999 {
413ec213 3000 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3001 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3002
50d1b7a1
MS
3003 /* Don't let cleanups affect ({...}) constructs. */
3004 int old_expr_stmts_for_value = expr_stmts_for_value;
3005 rtx old_last_expr_value = last_expr_value;
3006 tree old_last_expr_type = last_expr_type;
3007 expr_stmts_for_value = 0;
28d81abb 3008
50d1b7a1
MS
3009 /* Do the cleanups. */
3010 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3011 if (reachable)
3012 do_pending_stack_adjust ();
28d81abb 3013
50d1b7a1
MS
3014 expr_stmts_for_value = old_expr_stmts_for_value;
3015 last_expr_value = old_last_expr_value;
3016 last_expr_type = old_last_expr_type;
3017
3018 /* Restore the stack level. */
3019
3020 if (reachable && thisblock->data.block.stack_level != 0)
3021 {
3022 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3023 thisblock->data.block.stack_level, NULL_RTX);
3024 if (nonlocal_goto_handler_slot != 0)
3025 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3026 NULL_RTX);
28d81abb
RK
3027 }
3028
3029 /* Any gotos out of this block must also do these things.
59257ff7
RK
3030 Also report any gotos with fixups that came to labels in this
3031 level. */
28d81abb
RK
3032 fixup_gotos (thisblock,
3033 thisblock->data.block.stack_level,
3034 thisblock->data.block.cleanups,
3035 thisblock->data.block.first_insn,
3036 dont_jump_in);
3037 }
3038
c7d2d61d
RS
3039 /* Mark the beginning and end of the scope if requested.
3040 We do this now, after running cleanups on the variables
3041 just going out of scope, so they are in scope for their cleanups. */
3042
3043 if (mark_ends)
7629c936 3044 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3045 else
3046 /* Get rid of the beginning-mark if we don't make an end-mark. */
3047 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3048
28d81abb
RK
3049 /* If doing stupid register allocation, make sure lives of all
3050 register variables declared here extend thru end of scope. */
3051
3052 if (obey_regdecls)
3053 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3054 {
3055 rtx rtl = DECL_RTL (decl);
3056 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3057 use_variable (rtl);
3058 }
3059
e976b8b2
MS
3060 /* Restore the temporary level of TARGET_EXPRs. */
3061 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3062
28d81abb
RK
3063 /* Restore block_stack level for containing block. */
3064
3065 stack_block_stack = thisblock->data.block.innermost_stack_block;
3066 POPSTACK (block_stack);
3067
3068 /* Pop the stack slot nesting and free any slots at this level. */
3069 pop_temp_slots ();
3070}
ca695ac9
JB
3071
3072
28d81abb
RK
3073\f
3074/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3075 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3076
3077void
3078expand_decl (decl)
3079 register tree decl;
3080{
3081 struct nesting *thisblock = block_stack;
ca695ac9
JB
3082 tree type;
3083
ca695ac9 3084 type = TREE_TYPE (decl);
28d81abb
RK
3085
3086 /* Only automatic variables need any expansion done.
3087 Static and external variables, and external functions,
3088 will be handled by `assemble_variable' (called from finish_decl).
3089 TYPE_DECL and CONST_DECL require nothing.
3090 PARM_DECLs are handled in `assign_parms'. */
3091
3092 if (TREE_CODE (decl) != VAR_DECL)
3093 return;
44fe2e80 3094 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3095 return;
3096
3097 /* Create the RTL representation for the variable. */
3098
3099 if (type == error_mark_node)
38a448ca 3100 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3101 else if (DECL_SIZE (decl) == 0)
3102 /* Variable with incomplete type. */
3103 {
3104 if (DECL_INITIAL (decl) == 0)
3105 /* Error message was already done; now avoid a crash. */
3106 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3107 else
3108 /* An initializer is going to decide the size of this array.
3109 Until we know the size, represent its address with a reg. */
38a448ca 3110 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3668e76e 3111 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
28d81abb
RK
3112 }
3113 else if (DECL_MODE (decl) != BLKmode
3114 /* If -ffloat-store, don't put explicit float vars
3115 into regs. */
3116 && !(flag_float_store
3117 && TREE_CODE (type) == REAL_TYPE)
3118 && ! TREE_THIS_VOLATILE (decl)
3119 && ! TREE_ADDRESSABLE (decl)
44fe2e80 3120 && (DECL_REGISTER (decl) || ! obey_regdecls))
28d81abb
RK
3121 {
3122 /* Automatic variable that can go in a register. */
98f3b471 3123 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3124 enum machine_mode reg_mode
3125 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3126
7f070d5e
RK
3127 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3128 mark_user_reg (DECL_RTL (decl));
3129
3130 if (TREE_CODE (type) == POINTER_TYPE)
3131 mark_reg_pointer (DECL_RTL (decl),
3132 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3133 / BITS_PER_UNIT));
28d81abb 3134 }
0df15c2c 3135
5e4ef18a
RK
3136 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3137 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3138 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3139 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3140 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
28d81abb
RK
3141 {
3142 /* Variable of fixed size that goes on the stack. */
3143 rtx oldaddr = 0;
3144 rtx addr;
3145
3146 /* If we previously made RTL for this decl, it must be an array
3147 whose size was determined by the initializer.
3148 The old address was a register; set that register now
3149 to the proper address. */
3150 if (DECL_RTL (decl) != 0)
3151 {
3152 if (GET_CODE (DECL_RTL (decl)) != MEM
3153 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3154 abort ();
3155 oldaddr = XEXP (DECL_RTL (decl), 0);
3156 }
3157
3158 DECL_RTL (decl)
3159 = assign_stack_temp (DECL_MODE (decl),
3160 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3161 + BITS_PER_UNIT - 1)
3162 / BITS_PER_UNIT),
3163 1);
3668e76e 3164 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3165
3166 /* Set alignment we actually gave this decl. */
3167 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3168 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3169
3170 if (oldaddr)
3171 {
3172 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3173 if (addr != oldaddr)
3174 emit_move_insn (oldaddr, addr);
3175 }
3176
3177 /* If this is a memory ref that contains aggregate components,
3178 mark it as such for cse and loop optimize. */
05e3bdb9 3179 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
28d81abb
RK
3180#if 0
3181 /* If this is in memory because of -ffloat-store,
3182 set the volatile bit, to prevent optimizations from
3183 undoing the effects. */
3184 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3185 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3186#endif
3187 }
3188 else
3189 /* Dynamic-size object: must push space on the stack. */
3190 {
3191 rtx address, size;
3192
3193 /* Record the stack pointer on entry to block, if have
3194 not already done so. */
3195 if (thisblock->data.block.stack_level == 0)
3196 {
3197 do_pending_stack_adjust ();
59257ff7
RK
3198 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3199 &thisblock->data.block.stack_level,
3200 thisblock->data.block.first_insn);
28d81abb
RK
3201 stack_block_stack = thisblock;
3202 }
3203
3204 /* Compute the variable's size, in bytes. */
3205 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3206 DECL_SIZE (decl),
3207 size_int (BITS_PER_UNIT)),
37366632 3208 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3209 free_temp_slots ();
3210
ff91ad08
RK
3211 /* Allocate space on the stack for the variable. Note that
3212 DECL_ALIGN says how the variable is to be aligned and we
3213 cannot use it to conclude anything about the alignment of
3214 the size. */
37366632 3215 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3216 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3217
28d81abb 3218 /* Reference the variable indirect through that rtx. */
38a448ca 3219 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
28d81abb 3220
2207e295
RS
3221 /* If this is a memory ref that contains aggregate components,
3222 mark it as such for cse and loop optimize. */
05e3bdb9 3223 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
2207e295 3224
28d81abb
RK
3225 /* Indicate the alignment we actually gave this variable. */
3226#ifdef STACK_BOUNDARY
3227 DECL_ALIGN (decl) = STACK_BOUNDARY;
3228#else
3229 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3230#endif
3231 }
3232
3233 if (TREE_THIS_VOLATILE (decl))
3234 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3235#if 0 /* A variable is not necessarily unchanging
3236 just because it is const. RTX_UNCHANGING_P
3237 means no change in the function,
3238 not merely no change in the variable's scope.
3239 It is correct to set RTX_UNCHANGING_P if the variable's scope
3240 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3241 if (TREE_READONLY (decl))
3242 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3243#endif
28d81abb
RK
3244
3245 /* If doing stupid register allocation, make sure life of any
3246 register variable starts here, at the start of its scope. */
3247
3248 if (obey_regdecls)
3249 use_variable (DECL_RTL (decl));
3250}
ca695ac9
JB
3251
3252
28d81abb
RK
3253\f
3254/* Emit code to perform the initialization of a declaration DECL. */
3255
3256void
3257expand_decl_init (decl)
3258 tree decl;
3259{
b4ac57ab
RS
3260 int was_used = TREE_USED (decl);
3261
3564e40e
RK
3262 /* If this is a CONST_DECL, we don't have to generate any code, but
3263 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3264 to be set while in the obstack containing the constant. If we don't
3265 do this, we can lose if we have functions nested three deep and the middle
3266 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3267 the innermost function is the first to expand that STRING_CST. */
3268 if (TREE_CODE (decl) == CONST_DECL)
3269 {
3270 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3271 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3272 EXPAND_INITIALIZER);
3273 return;
3274 }
3275
28d81abb
RK
3276 if (TREE_STATIC (decl))
3277 return;
3278
3279 /* Compute and store the initial value now. */
3280
3281 if (DECL_INITIAL (decl) == error_mark_node)
3282 {
3283 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3284 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3285 || code == POINTER_TYPE)
3286 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3287 0, 0);
3288 emit_queue ();
3289 }
3290 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3291 {
3292 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3293 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3294 emit_queue ();
3295 }
3296
b4ac57ab
RS
3297 /* Don't let the initialization count as "using" the variable. */
3298 TREE_USED (decl) = was_used;
3299
28d81abb 3300 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3301 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3302 free_temp_slots ();
3303}
3304
3305/* CLEANUP is an expression to be executed at exit from this binding contour;
3306 for example, in C++, it might call the destructor for this variable.
3307
4847c938
MS
3308 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3309 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
3310 happens in exception handling, for gotos, returns, breaks that
3311 leave the current scope.
28d81abb
RK
3312
3313 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3314 that is not associated with any particular variable. */
3315
3316int
3317expand_decl_cleanup (decl, cleanup)
3318 tree decl, cleanup;
3319{
3320 struct nesting *thisblock = block_stack;
3321
3322 /* Error if we are not in any block. */
3323 if (thisblock == 0)
3324 return 0;
3325
3326 /* Record the cleanup if there is one. */
3327
3328 if (cleanup != 0)
3329 {
e976b8b2
MS
3330 tree t;
3331 rtx seq;
3332 tree *cleanups = &thisblock->data.block.cleanups;
3333 int cond_context = conditional_context ();
3334
3335 if (cond_context)
3336 {
3337 rtx flag = gen_reg_rtx (word_mode);
3338 rtx set_flag_0;
3339 tree cond;
3340
3341 start_sequence ();
3342 emit_move_insn (flag, const0_rtx);
3343 set_flag_0 = get_insns ();
3344 end_sequence ();
3345
3346 thisblock->data.block.last_unconditional_cleanup
3347 = emit_insns_after (set_flag_0,
3348 thisblock->data.block.last_unconditional_cleanup);
3349
3350 emit_move_insn (flag, const1_rtx);
3351
3352 /* All cleanups must be on the function_obstack. */
3353 push_obstacks_nochange ();
3354 resume_temporary_allocation ();
3355
3356 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3357 DECL_RTL (cond) = flag;
3358
3359 /* Conditionalize the cleanup. */
3360 cleanup = build (COND_EXPR, void_type_node,
3361 truthvalue_conversion (cond),
3362 cleanup, integer_zero_node);
3363 cleanup = fold (cleanup);
3364
3365 pop_obstacks ();
3366
3367 cleanups = thisblock->data.block.cleanup_ptr;
3368 }
3369
3370 /* All cleanups must be on the function_obstack. */
3371 push_obstacks_nochange ();
3372 resume_temporary_allocation ();
4847c938 3373 cleanup = unsave_expr (cleanup);
e976b8b2
MS
3374 pop_obstacks ();
3375
3376 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3377
3378 if (! cond_context)
3379 /* If this block has a cleanup, it belongs in stack_block_stack. */
3380 stack_block_stack = thisblock;
3381
3382 if (cond_context)
3383 {
3384 start_sequence ();
3385 }
4847c938 3386
e976b8b2
MS
3387 /* If this was optimized so that there is no exception region for the
3388 cleanup, then mark the TREE_LIST node, so that we can later tell
3389 if we need to call expand_eh_region_end. */
e9a25f70
JL
3390 if (! using_eh_for_cleanups_p
3391 || expand_eh_region_start_tree (decl, cleanup))
e976b8b2 3392 TREE_ADDRESSABLE (t) = 1;
716cc7f7
JM
3393 /* If that started a new EH region, we're in a new block. */
3394 thisblock = block_stack;
e976b8b2
MS
3395
3396 if (cond_context)
3397 {
3398 seq = get_insns ();
3399 end_sequence ();
7e82801f
MS
3400 if (seq)
3401 thisblock->data.block.last_unconditional_cleanup
3402 = emit_insns_after (seq,
3403 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
3404 }
3405 else
3406 {
3407 thisblock->data.block.last_unconditional_cleanup
3408 = get_last_insn ();
3409 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3410 }
28d81abb
RK
3411 }
3412 return 1;
3413}
e976b8b2 3414
c7ae64f2
JM
3415/* Like expand_decl_cleanup, but suppress generating an exception handler
3416 to perform the cleanup. */
3417
3418int
3419expand_decl_cleanup_no_eh (decl, cleanup)
3420 tree decl, cleanup;
3421{
3422 int save_eh = using_eh_for_cleanups_p;
b472527b
JL
3423 int result;
3424
c7ae64f2 3425 using_eh_for_cleanups_p = 0;
b472527b 3426 result = expand_decl_cleanup (decl, cleanup);
c7ae64f2 3427 using_eh_for_cleanups_p = save_eh;
b472527b
JL
3428
3429 return result;
c7ae64f2
JM
3430}
3431
e976b8b2 3432/* Arrange for the top element of the dynamic cleanup chain to be
4c581243
MS
3433 popped if we exit the current binding contour. DECL is the
3434 associated declaration, if any, otherwise NULL_TREE. If the
3435 current contour is left via an exception, then __sjthrow will pop
3436 the top element off the dynamic cleanup chain. The code that
3437 avoids doing the action we push into the cleanup chain in the
3438 exceptional case is contained in expand_cleanups.
e976b8b2
MS
3439
3440 This routine is only used by expand_eh_region_start, and that is
3441 the only way in which an exception region should be started. This
3442 routine is only used when using the setjmp/longjmp codegen method
3443 for exception handling. */
3444
3445int
4c581243
MS
3446expand_dcc_cleanup (decl)
3447 tree decl;
e976b8b2
MS
3448{
3449 struct nesting *thisblock = block_stack;
3450 tree cleanup;
3451
3452 /* Error if we are not in any block. */
3453 if (thisblock == 0)
3454 return 0;
3455
3456 /* Record the cleanup for the dynamic handler chain. */
3457
3458 /* All cleanups must be on the function_obstack. */
3459 push_obstacks_nochange ();
3460 resume_temporary_allocation ();
3461 cleanup = make_node (POPDCC_EXPR);
3462 pop_obstacks ();
3463
3464 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3465 thisblock->data.block.cleanups
4c581243 3466 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
3467
3468 /* If this block has a cleanup, it belongs in stack_block_stack. */
3469 stack_block_stack = thisblock;
3470 return 1;
3471}
3472
3473/* Arrange for the top element of the dynamic handler chain to be
4c581243 3474 popped if we exit the current binding contour. DECL is the
956d6950 3475 associated declaration, if any, otherwise NULL_TREE. If the current
4c581243
MS
3476 contour is left via an exception, then __sjthrow will pop the top
3477 element off the dynamic handler chain. The code that avoids doing
3478 the action we push into the handler chain in the exceptional case
3479 is contained in expand_cleanups.
e976b8b2
MS
3480
3481 This routine is only used by expand_eh_region_start, and that is
3482 the only way in which an exception region should be started. This
3483 routine is only used when using the setjmp/longjmp codegen method
3484 for exception handling. */
3485
3486int
4c581243
MS
3487expand_dhc_cleanup (decl)
3488 tree decl;
e976b8b2
MS
3489{
3490 struct nesting *thisblock = block_stack;
3491 tree cleanup;
3492
3493 /* Error if we are not in any block. */
3494 if (thisblock == 0)
3495 return 0;
3496
3497 /* Record the cleanup for the dynamic handler chain. */
3498
3499 /* All cleanups must be on the function_obstack. */
3500 push_obstacks_nochange ();
3501 resume_temporary_allocation ();
3502 cleanup = make_node (POPDHC_EXPR);
3503 pop_obstacks ();
3504
3505 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3506 thisblock->data.block.cleanups
4c581243 3507 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
3508
3509 /* If this block has a cleanup, it belongs in stack_block_stack. */
3510 stack_block_stack = thisblock;
3511 return 1;
3512}
28d81abb
RK
3513\f
3514/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3515 DECL_ELTS is the list of elements that belong to DECL's type.
3516 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3517
3518void
3519expand_anon_union_decl (decl, cleanup, decl_elts)
3520 tree decl, cleanup, decl_elts;
3521{
3522 struct nesting *thisblock = block_stack;
3523 rtx x;
3524
ec5cd386
RK
3525 expand_decl (decl);
3526 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
3527 x = DECL_RTL (decl);
3528
3529 while (decl_elts)
3530 {
3531 tree decl_elt = TREE_VALUE (decl_elts);
3532 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3533 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3534
7b9032dd
JM
3535 /* Propagate the union's alignment to the elements. */
3536 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3537
3538 /* If the element has BLKmode and the union doesn't, the union is
3539 aligned such that the element doesn't need to have BLKmode, so
3540 change the element's mode to the appropriate one for its size. */
3541 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3542 DECL_MODE (decl_elt) = mode
3543 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3544 MODE_INT, 1);
3545
28d81abb
RK
3546 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3547 instead create a new MEM rtx with the proper mode. */
3548 if (GET_CODE (x) == MEM)
3549 {
3550 if (mode == GET_MODE (x))
3551 DECL_RTL (decl_elt) = x;
3552 else
3553 {
38a448ca 3554 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
28d81abb
RK
3555 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3556 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3557 }
3558 }
3559 else if (GET_CODE (x) == REG)
3560 {
3561 if (mode == GET_MODE (x))
3562 DECL_RTL (decl_elt) = x;
3563 else
38a448ca 3564 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
28d81abb
RK
3565 }
3566 else
3567 abort ();
3568
3569 /* Record the cleanup if there is one. */
3570
3571 if (cleanup != 0)
3572 thisblock->data.block.cleanups
3573 = temp_tree_cons (decl_elt, cleanup_elt,
3574 thisblock->data.block.cleanups);
3575
3576 decl_elts = TREE_CHAIN (decl_elts);
3577 }
3578}
3579\f
3580/* Expand a list of cleanups LIST.
3581 Elements may be expressions or may be nested lists.
3582
3583 If DONT_DO is nonnull, then any list-element
3584 whose TREE_PURPOSE matches DONT_DO is omitted.
3585 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
3586 a value that is being returned out of the scope.
3587
3588 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
3589 goto and handle protection regions specially in that case.
3590
3591 If REACHABLE, we emit code, otherwise just inform the exception handling
3592 code about this finalization. */
28d81abb
RK
3593
3594static void
50d1b7a1 3595expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
3596 tree list;
3597 tree dont_do;
4e44807b 3598 int in_fixup;
50d1b7a1 3599 int reachable;
28d81abb
RK
3600{
3601 tree tail;
3602 for (tail = list; tail; tail = TREE_CHAIN (tail))
3603 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3604 {
3605 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 3606 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
3607 else
3608 {
4e44807b 3609 if (! in_fixup)
e976b8b2
MS
3610 {
3611 tree cleanup = TREE_VALUE (tail);
3612
3613 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3614 if (TREE_CODE (cleanup) != POPDHC_EXPR
3615 && TREE_CODE (cleanup) != POPDCC_EXPR
3616 /* See expand_eh_region_start_tree for this case. */
3617 && ! TREE_ADDRESSABLE (tail))
3618 {
3619 cleanup = protect_with_terminate (cleanup);
3620 expand_eh_region_end (cleanup);
3621 }
3622 }
61d6b1cc 3623
50d1b7a1
MS
3624 if (reachable)
3625 {
3626 /* Cleanups may be run multiple times. For example,
3627 when exiting a binding contour, we expand the
3628 cleanups associated with that contour. When a goto
3629 within that binding contour has a target outside that
3630 contour, it will expand all cleanups from its scope to
3631 the target. Though the cleanups are expanded multiple
3632 times, the control paths are non-overlapping so the
3633 cleanups will not be executed twice. */
9762d48d
JM
3634
3635 /* We may need to protect fixups with rethrow regions. */
3636 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3637 if (protect)
3638 expand_fixup_region_start ();
50d1b7a1 3639 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
9762d48d
JM
3640 if (protect)
3641 expand_fixup_region_end (TREE_VALUE (tail));
50d1b7a1
MS
3642 free_temp_slots ();
3643 }
28d81abb
RK
3644 }
3645 }
3646}
3647
e976b8b2
MS
3648/* Mark when the context we are emitting RTL for as a conditional
3649 context, so that any cleanup actions we register with
3650 expand_decl_init will be properly conditionalized when those
3651 cleanup actions are later performed. Must be called before any
956d6950 3652 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
3653
3654void
956d6950 3655start_cleanup_deferral ()
e976b8b2 3656{
e3eef942
JW
3657 /* block_stack can be NULL if we are inside the parameter list. It is
3658 OK to do nothing, because cleanups aren't possible here. */
3659 if (block_stack)
3660 ++block_stack->data.block.conditional_code;
e976b8b2
MS
3661}
3662
3663/* Mark the end of a conditional region of code. Because cleanup
956d6950 3664 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
3665 after we end the currently deferred cleanups, only after we end all
3666 deferred cleanups, are we back in unconditional code. */
3667
3668void
956d6950 3669end_cleanup_deferral ()
e976b8b2 3670{
e3eef942
JW
3671 /* block_stack can be NULL if we are inside the parameter list. It is
3672 OK to do nothing, because cleanups aren't possible here. */
3673 if (block_stack)
3674 --block_stack->data.block.conditional_code;
e976b8b2
MS
3675}
3676
28d81abb
RK
3677/* Move all cleanups from the current block_stack
3678 to the containing block_stack, where they are assumed to
3679 have been created. If anything can cause a temporary to
3680 be created, but not expanded for more than one level of
3681 block_stacks, then this code will have to change. */
3682
3683void
3684move_cleanups_up ()
3685{
3686 struct nesting *block = block_stack;
3687 struct nesting *outer = block->next;
3688
3689 outer->data.block.cleanups
3690 = chainon (block->data.block.cleanups,
3691 outer->data.block.cleanups);
3692 block->data.block.cleanups = 0;
3693}
3694
3695tree
3696last_cleanup_this_contour ()
3697{
3698 if (block_stack == 0)
3699 return 0;
3700
3701 return block_stack->data.block.cleanups;
3702}
3703
3704/* Return 1 if there are any pending cleanups at this point.
3705 If THIS_CONTOUR is nonzero, check the current contour as well.
3706 Otherwise, look only at the contours that enclose this one. */
3707
3708int
3709any_pending_cleanups (this_contour)
3710 int this_contour;
3711{
3712 struct nesting *block;
3713
3714 if (block_stack == 0)
3715 return 0;
3716
3717 if (this_contour && block_stack->data.block.cleanups != NULL)
3718 return 1;
3719 if (block_stack->data.block.cleanups == 0
e976b8b2 3720 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
3721 return 0;
3722
3723 for (block = block_stack->next; block; block = block->next)
3724 if (block->data.block.cleanups != 0)
3725 return 1;
3726
3727 return 0;
3728}
3729\f
3730/* Enter a case (Pascal) or switch (C) statement.
3731 Push a block onto case_stack and nesting_stack
3732 to accumulate the case-labels that are seen
3733 and to record the labels generated for the statement.
3734
3735 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3736 Otherwise, this construct is transparent for `exit_something'.
3737
3738 EXPR is the index-expression to be dispatched on.
3739 TYPE is its nominal type. We could simply convert EXPR to this type,
3740 but instead we take short cuts. */
3741
3742void
3743expand_start_case (exit_flag, expr, type, printname)
3744 int exit_flag;
3745 tree expr;
3746 tree type;
3747 char *printname;
3748{
3749 register struct nesting *thiscase = ALLOC_NESTING ();
3750
3751 /* Make an entry on case_stack for the case we are entering. */
3752
3753 thiscase->next = case_stack;
3754 thiscase->all = nesting_stack;
3755 thiscase->depth = ++nesting_depth;
3756 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3757 thiscase->data.case_stmt.case_list = 0;
3758 thiscase->data.case_stmt.index_expr = expr;
3759 thiscase->data.case_stmt.nominal_type = type;
3760 thiscase->data.case_stmt.default_label = 0;
3761 thiscase->data.case_stmt.num_ranges = 0;
3762 thiscase->data.case_stmt.printname = printname;
3763 thiscase->data.case_stmt.seenlabel = 0;
3764 case_stack = thiscase;
3765 nesting_stack = thiscase;
3766
3767 do_pending_stack_adjust ();
3768
3769 /* Make sure case_stmt.start points to something that won't
3770 need any transformation before expand_end_case. */
3771 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 3772 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
3773
3774 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 3775
956d6950 3776 start_cleanup_deferral ();
28d81abb
RK
3777}
3778
ca695ac9 3779
28d81abb
RK
3780/* Start a "dummy case statement" within which case labels are invalid
3781 and are not connected to any larger real case statement.
3782 This can be used if you don't want to let a case statement jump
3783 into the middle of certain kinds of constructs. */
3784
3785void
3786expand_start_case_dummy ()
3787{
3788 register struct nesting *thiscase = ALLOC_NESTING ();
3789
3790 /* Make an entry on case_stack for the dummy. */
3791
3792 thiscase->next = case_stack;
3793 thiscase->all = nesting_stack;
3794 thiscase->depth = ++nesting_depth;
3795 thiscase->exit_label = 0;
3796 thiscase->data.case_stmt.case_list = 0;
3797 thiscase->data.case_stmt.start = 0;
3798 thiscase->data.case_stmt.nominal_type = 0;
3799 thiscase->data.case_stmt.default_label = 0;
3800 thiscase->data.case_stmt.num_ranges = 0;
3801 case_stack = thiscase;
3802 nesting_stack = thiscase;
956d6950 3803 start_cleanup_deferral ();
28d81abb
RK
3804}
3805
3806/* End a dummy case statement. */
3807
3808void
3809expand_end_case_dummy ()
3810{
956d6950 3811 end_cleanup_deferral ();
28d81abb
RK
3812 POPSTACK (case_stack);
3813}
3814
3815/* Return the data type of the index-expression
3816 of the innermost case statement, or null if none. */
3817
3818tree
3819case_index_expr_type ()
3820{
3821 if (case_stack)
3822 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3823 return 0;
3824}
3825\f
3826/* Accumulate one case or default label inside a case or switch statement.
3827 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
3828 The function CONVERTER, when applied to arguments T and V,
3829 converts the value V to the type T.
28d81abb
RK
3830
3831 If not currently inside a case or switch statement, return 1 and do
3832 nothing. The caller will print a language-specific error message.
3833 If VALUE is a duplicate or overlaps, return 2 and do nothing
3834 except store the (first) duplicate node in *DUPLICATE.
3835 If VALUE is out of range, return 3 and do nothing.
e976b8b2 3836 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
3837 Return 0 on success.
3838
3839 Extended to handle range statements. */
3840
3841int
f52fba84 3842pushcase (value, converter, label, duplicate)
28d81abb 3843 register tree value;
f52fba84 3844 tree (*converter) PROTO((tree, tree));
28d81abb
RK
3845 register tree label;
3846 tree *duplicate;
3847{
3848 register struct case_node **l;
3849 register struct case_node *n;
3850 tree index_type;
3851 tree nominal_type;
3852
3853 /* Fail if not inside a real case statement. */
3854 if (! (case_stack && case_stack->data.case_stmt.start))
3855 return 1;
3856
3857 if (stack_block_stack
3858 && stack_block_stack->depth > case_stack->depth)
3859 return 5;
3860
3861 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3862 nominal_type = case_stack->data.case_stmt.nominal_type;
3863
3864 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3865 if (index_type == error_mark_node)
3866 return 0;
3867
3868 /* Convert VALUE to the type in which the comparisons are nominally done. */
3869 if (value != 0)
f52fba84 3870 value = (*converter) (nominal_type, value);
28d81abb
RK
3871
3872 /* If this is the first label, warn if any insns have been emitted. */
3873 if (case_stack->data.case_stmt.seenlabel == 0)
3874 {
3875 rtx insn;
3876 for (insn = case_stack->data.case_stmt.start;
3877 insn;
3878 insn = NEXT_INSN (insn))
3879 {
3880 if (GET_CODE (insn) == CODE_LABEL)
3881 break;
3882 if (GET_CODE (insn) != NOTE
3883 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3884 {
3885 warning ("unreachable code at beginning of %s",
3886 case_stack->data.case_stmt.printname);
3887 break;
3888 }
3889 }
3890 }
3891 case_stack->data.case_stmt.seenlabel = 1;
3892
3893 /* Fail if this value is out of range for the actual type of the index
3894 (which may be narrower than NOMINAL_TYPE). */
3895 if (value != 0 && ! int_fits_type_p (value, index_type))
3896 return 3;
3897
3898 /* Fail if this is a duplicate or overlaps another entry. */
3899 if (value == 0)
3900 {
3901 if (case_stack->data.case_stmt.default_label != 0)
3902 {
3903 *duplicate = case_stack->data.case_stmt.default_label;
3904 return 2;
3905 }
3906 case_stack->data.case_stmt.default_label = label;
3907 }
3908 else
57641239 3909 return add_case_node (value, value, label, duplicate);
28d81abb
RK
3910
3911 expand_label (label);
3912 return 0;
3913}
3914
956d6950
JL
3915/* Like pushcase but this case applies to all values between VALUE1 and
3916 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3917 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3918 starts at VALUE1 and ends at the highest value of the index type.
3919 If both are NULL, this case applies to all values.
3920
3921 The return value is the same as that of pushcase but there is one
3922 additional error code: 4 means the specified range was empty. */
28d81abb
RK
3923
3924int
f52fba84 3925pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 3926 register tree value1, value2;
f52fba84 3927 tree (*converter) PROTO((tree, tree));
28d81abb
RK
3928 register tree label;
3929 tree *duplicate;
3930{
3931 register struct case_node **l;
3932 register struct case_node *n;
3933 tree index_type;
3934 tree nominal_type;
3935
3936 /* Fail if not inside a real case statement. */
3937 if (! (case_stack && case_stack->data.case_stmt.start))
3938 return 1;
3939
3940 if (stack_block_stack
3941 && stack_block_stack->depth > case_stack->depth)
3942 return 5;
3943
3944 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3945 nominal_type = case_stack->data.case_stmt.nominal_type;
3946
3947 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3948 if (index_type == error_mark_node)
3949 return 0;
3950
3951 /* If this is the first label, warn if any insns have been emitted. */
3952 if (case_stack->data.case_stmt.seenlabel == 0)
3953 {
3954 rtx insn;
3955 for (insn = case_stack->data.case_stmt.start;
3956 insn;
3957 insn = NEXT_INSN (insn))
3958 {
3959 if (GET_CODE (insn) == CODE_LABEL)
3960 break;
3961 if (GET_CODE (insn) != NOTE
3962 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3963 {
3964 warning ("unreachable code at beginning of %s",
3965 case_stack->data.case_stmt.printname);
3966 break;
3967 }
3968 }
3969 }
3970 case_stack->data.case_stmt.seenlabel = 1;
3971
956d6950
JL
3972 /* Convert VALUEs to type in which the comparisons are nominally done
3973 and replace any unspecified value with the corresponding bound. */
3974 if (value1 == 0)
1974bfb1 3975 value1 = TYPE_MIN_VALUE (index_type);
956d6950 3976 if (value2 == 0)
1974bfb1 3977 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
3978
3979 /* Fail if the range is empty. Do this before any conversion since
3980 we want to allow out-of-range empty ranges. */
e1ee5cdc 3981 if (value2 && tree_int_cst_lt (value2, value1))
956d6950
JL
3982 return 4;
3983
3984 value1 = (*converter) (nominal_type, value1);
e1ee5cdc
RH
3985
3986 /* If the max was unbounded, use the max of the nominal_type we are
3987 converting to. Do this after the < check above to suppress false
3988 positives. */
3989 if (!value2)
3990 value2 = TYPE_MAX_VALUE (nominal_type);
f52fba84 3991 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
3992
3993 /* Fail if these values are out of range. */
956d6950
JL
3994 if (TREE_CONSTANT_OVERFLOW (value1)
3995 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
3996 return 3;
3997
956d6950
JL
3998 if (TREE_CONSTANT_OVERFLOW (value2)
3999 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4000 return 3;
4001
57641239
RK
4002 return add_case_node (value1, value2, label, duplicate);
4003}
4004
4005/* Do the actual insertion of a case label for pushcase and pushcase_range
4006 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4007 slowdown for large switch statements. */
4008
4009static int
4010add_case_node (low, high, label, duplicate)
4011 tree low, high;
4012 tree label;
4013 tree *duplicate;
4014{
4015 struct case_node *p, **q, *r;
4016
4017 q = &case_stack->data.case_stmt.case_list;
4018 p = *q;
4019
4020 while (r = *q)
28d81abb 4021 {
57641239
RK
4022 p = r;
4023
4024 /* Keep going past elements distinctly greater than HIGH. */
4025 if (tree_int_cst_lt (high, p->low))
4026 q = &p->left;
4027
4028 /* or distinctly less than LOW. */
4029 else if (tree_int_cst_lt (p->high, low))
4030 q = &p->right;
4031
4032 else
28d81abb 4033 {
57641239
RK
4034 /* We have an overlap; this is an error. */
4035 *duplicate = p->code_label;
28d81abb
RK
4036 return 2;
4037 }
4038 }
4039
4040 /* Add this label to the chain, and succeed.
57641239 4041 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4042 obstack and will thus survive till the end of the case statement. */
4043
57641239
RK
4044 r = (struct case_node *) oballoc (sizeof (struct case_node));
4045 r->low = copy_node (low);
28d81abb 4046
57641239
RK
4047 /* If the bounds are equal, turn this into the one-value case. */
4048
4049 if (tree_int_cst_equal (low, high))
4050 r->high = r->low;
4051 else
4052 {
4053 r->high = copy_node (high);
4054 case_stack->data.case_stmt.num_ranges++;
4055 }
4056
4057 r->code_label = label;
28d81abb
RK
4058 expand_label (label);
4059
57641239
RK
4060 *q = r;
4061 r->parent = p;
4062 r->left = 0;
4063 r->right = 0;
4064 r->balance = 0;
4065
4066 while (p)
4067 {
4068 struct case_node *s;
4069
4070 if (r == p->left)
4071 {
4072 int b;
4073
4074 if (! (b = p->balance))
4075 /* Growth propagation from left side. */
4076 p->balance = -1;
4077 else if (b < 0)
4078 {
4079 if (r->balance < 0)
4080 {
4081 /* R-Rotation */
4082 if (p->left = s = r->right)
4083 s->parent = p;
4084
4085 r->right = p;
4086 p->balance = 0;
4087 r->balance = 0;
4088 s = p->parent;
4089 p->parent = r;
4090
4091 if (r->parent = s)
4092 {
4093 if (s->left == p)
4094 s->left = r;
4095 else
4096 s->right = r;
4097 }
4098 else
4099 case_stack->data.case_stmt.case_list = r;
4100 }
4101 else
4102 /* r->balance == +1 */
4103 {
5720c7e7
RK
4104 /* LR-Rotation */
4105
57641239
RK
4106 int b2;
4107 struct case_node *t = r->right;
4108
4109 if (p->left = s = t->right)
4110 s->parent = p;
4111
4112 t->right = p;
4113 if (r->right = s = t->left)
4114 s->parent = r;
4115
4116 t->left = r;
4117 b = t->balance;
4118 b2 = b < 0;
4119 p->balance = b2;
4120 b2 = -b2 - b;
4121 r->balance = b2;
4122 t->balance = 0;
4123 s = p->parent;
4124 p->parent = t;
4125 r->parent = t;
4126
4127 if (t->parent = s)
4128 {
4129 if (s->left == p)
4130 s->left = t;
4131 else
4132 s->right = t;
4133 }
4134 else
4135 case_stack->data.case_stmt.case_list = t;
4136 }
4137 break;
4138 }
4139
4140 else
4141 {
4142 /* p->balance == +1; growth of left side balances the node. */
4143 p->balance = 0;
4144 break;
4145 }
4146 }
4147 else
4148 /* r == p->right */
4149 {
4150 int b;
4151
4152 if (! (b = p->balance))
4153 /* Growth propagation from right side. */
4154 p->balance++;
4155 else if (b > 0)
4156 {
4157 if (r->balance > 0)
4158 {
4159 /* L-Rotation */
4160
4161 if (p->right = s = r->left)
4162 s->parent = p;
4163
4164 r->left = p;
4165 p->balance = 0;
4166 r->balance = 0;
4167 s = p->parent;
4168 p->parent = r;
4169 if (r->parent = s)
4170 {
4171 if (s->left == p)
4172 s->left = r;
4173 else
4174 s->right = r;
4175 }
4176
4177 else
4178 case_stack->data.case_stmt.case_list = r;
4179 }
4180
4181 else
4182 /* r->balance == -1 */
4183 {
4184 /* RL-Rotation */
4185 int b2;
4186 struct case_node *t = r->left;
4187
4188 if (p->right = s = t->left)
4189 s->parent = p;
4190
4191 t->left = p;
4192
4193 if (r->left = s = t->right)
4194 s->parent = r;
4195
4196 t->right = r;
4197 b = t->balance;
4198 b2 = b < 0;
4199 r->balance = b2;
4200 b2 = -b2 - b;
4201 p->balance = b2;
4202 t->balance = 0;
4203 s = p->parent;
4204 p->parent = t;
4205 r->parent = t;
4206
4207 if (t->parent = s)
4208 {
4209 if (s->left == p)
4210 s->left = t;
4211 else
4212 s->right = t;
4213 }
4214
4215 else
4216 case_stack->data.case_stmt.case_list = t;
4217 }
4218 break;
4219 }
4220 else
4221 {
4222 /* p->balance == -1; growth of right side balances the node. */
4223 p->balance = 0;
4224 break;
4225 }
4226 }
4227
4228 r = p;
4229 p = p->parent;
4230 }
28d81abb
RK
4231
4232 return 0;
4233}
ca695ac9 4234
28d81abb 4235\f
94d6511c
PB
4236/* Returns the number of possible values of TYPE.
4237 Returns -1 if the number is unknown or variable.
4238 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4239 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4240 do not increase monotonically (there may be duplicates);
4241 to 1 if the values increase monotonically, but not always by 1;
4242 otherwise sets it to 0. */
4243
4244HOST_WIDE_INT
4245all_cases_count (type, spareness)
4246 tree type;
4247 int *spareness;
4248{
4249 HOST_WIDE_INT count, count_high = 0;
4250 *spareness = 0;
4251
4252 switch (TREE_CODE (type))
4253 {
4254 tree t;
4255 case BOOLEAN_TYPE:
4256 count = 2;
4257 break;
4258 case CHAR_TYPE:
4259 count = 1 << BITS_PER_UNIT;
4260 break;
4261 default:
4262 case INTEGER_TYPE:
4263 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
e1ee5cdc 4264 || TYPE_MAX_VALUE (type) == NULL
c02aebe2 4265 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4266 return -1;
4267 else
4268 {
4269 /* count
4270 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4271 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4272 but with overflow checking. */
94d6511c
PB
4273 tree mint = TYPE_MIN_VALUE (type);
4274 tree maxt = TYPE_MAX_VALUE (type);
4275 HOST_WIDE_INT lo, hi;
4276 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4277 &lo, &hi);
4278 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4279 lo, hi, &lo, &hi);
4280 add_double (lo, hi, 1, 0, &lo, &hi);
4281 if (hi != 0 || lo < 0)
4282 return -2;
4283 count = lo;
4284 }
4285 break;
4286 case ENUMERAL_TYPE:
4287 count = 0;
4288 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4289 {
4290 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4291 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4292 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4293 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4294 *spareness = 1;
4295 count++;
4296 }
4297 if (*spareness == 1)
4298 {
4299 tree prev = TREE_VALUE (TYPE_VALUES (type));
4300 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4301 {
4302 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4303 {
4304 *spareness = 2;
4305 break;
4306 }
4307 prev = TREE_VALUE (t);
4308 }
4309
4310 }
4311 }
4312 return count;
4313}
4314
4315
4316#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4317 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4318 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4319#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4320 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4321 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4322
4323/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4324 with the case values we have seen, assuming the case expression
4325 has the given TYPE.
4326 SPARSENESS is as determined by all_cases_count.
4327
9faa82d8 4328 The time needed is proportional to COUNT, unless
94d6511c
PB
4329 SPARSENESS is 2, in which case quadratic time is needed. */
4330
4331void
4332mark_seen_cases (type, cases_seen, count, sparseness)
4333 tree type;
4334 unsigned char *cases_seen;
4335 long count;
4336 int sparseness;
4337{
4338 long i;
4339
4340 tree next_node_to_try = NULL_TREE;
4341 long next_node_offset = 0;
4342
5720c7e7 4343 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
4344 tree val = make_node (INTEGER_CST);
4345 TREE_TYPE (val) = type;
5720c7e7
RK
4346 if (! root)
4347 ; /* Do nothing */
4348 else if (sparseness == 2)
94d6511c 4349 {
5720c7e7
RK
4350 tree t;
4351 HOST_WIDE_INT xlo;
4352
4353 /* This less efficient loop is only needed to handle
4354 duplicate case values (multiple enum constants
4355 with the same value). */
4356 TREE_TYPE (val) = TREE_TYPE (root->low);
4357 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4358 t = TREE_CHAIN (t), xlo++)
94d6511c 4359 {
5720c7e7
RK
4360 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4361 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4362 n = root;
4363 do
94d6511c 4364 {
5720c7e7
RK
4365 /* Keep going past elements distinctly greater than VAL. */
4366 if (tree_int_cst_lt (val, n->low))
4367 n = n->left;
4368
4369 /* or distinctly less than VAL. */
4370 else if (tree_int_cst_lt (n->high, val))
4371 n = n->right;
4372
4373 else
94d6511c 4374 {
5720c7e7
RK
4375 /* We have found a matching range. */
4376 BITARRAY_SET (cases_seen, xlo);
4377 break;
94d6511c
PB
4378 }
4379 }
5720c7e7
RK
4380 while (n);
4381 }
4382 }
4383 else
4384 {
4385 if (root->left)
4386 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4387 for (n = root; n; n = n->right)
4388 {
4389 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4390 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4391 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 4392 {
5720c7e7
RK
4393 /* Calculate (into xlo) the "offset" of the integer (val).
4394 The element with lowest value has offset 0, the next smallest
4395 element has offset 1, etc. */
4396
4397 HOST_WIDE_INT xlo, xhi;
4398 tree t;
94d6511c
PB
4399 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4400 {
4401 /* The TYPE_VALUES will be in increasing order, so
4402 starting searching where we last ended. */
4403 t = next_node_to_try;
4404 xlo = next_node_offset;
4405 xhi = 0;
4406 for (;;)
4407 {
4408 if (t == NULL_TREE)
4409 {
4410 t = TYPE_VALUES (type);
4411 xlo = 0;
4412 }
4413 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4414 {
4415 next_node_to_try = TREE_CHAIN (t);
4416 next_node_offset = xlo + 1;
4417 break;
4418 }
4419 xlo++;
4420 t = TREE_CHAIN (t);
4421 if (t == next_node_to_try)
5720c7e7
RK
4422 {
4423 xlo = -1;
4424 break;
4425 }
94d6511c
PB
4426 }
4427 }
4428 else
4429 {
4430 t = TYPE_MIN_VALUE (type);
4431 if (t)
4432 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4433 &xlo, &xhi);
4434 else
4435 xlo = xhi = 0;
4436 add_double (xlo, xhi,
4437 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4438 &xlo, &xhi);
4439 }
4440
9dd53f1e 4441 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 4442 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
4443 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4444 1, 0,
4445 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 4446 }
94d6511c
PB
4447 }
4448 }
4449}
4450
28d81abb
RK
4451/* Called when the index of a switch statement is an enumerated type
4452 and there is no default label.
4453
4454 Checks that all enumeration literals are covered by the case
4455 expressions of a switch. Also, warn if there are any extra
4456 switch cases that are *not* elements of the enumerated type.
4457
4458 If all enumeration literals were covered by the case expressions,
4459 turn one of the expressions into the default expression since it should
4460 not be possible to fall through such a switch. */
4461
4462void
4463check_for_full_enumeration_handling (type)
4464 tree type;
4465{
4466 register struct case_node *n;
4467 register struct case_node **l;
4468 register tree chain;
4469 int all_values = 1;
4470
0f41302f 4471 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
4472 int sparseness = 0;
4473
0f41302f 4474 /* The number of possible selector values. */
94d6511c
PB
4475 HOST_WIDE_INT size;
4476
4477 /* For each possible selector value. a one iff it has been matched
0f41302f 4478 by a case value alternative. */
94d6511c
PB
4479 unsigned char *cases_seen;
4480
0f41302f 4481 /* The allocated size of cases_seen, in chars. */
94d6511c
PB
4482 long bytes_needed;
4483 tree t;
4484
94d6511c
PB
4485 if (! warn_switch)
4486 return;
4487
4488 size = all_cases_count (type, &sparseness);
4489 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 4490
94d6511c 4491 if (size > 0 && size < 600000
0f41302f 4492 /* We deliberately use malloc here - not xmalloc. */
ad03007a 4493 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
28d81abb 4494 {
94d6511c
PB
4495 long i;
4496 tree v = TYPE_VALUES (type);
4497 bzero (cases_seen, bytes_needed);
28d81abb 4498
94d6511c
PB
4499 /* The time complexity of this code is normally O(N), where
4500 N being the number of members in the enumerated type.
4501 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 4502 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
4503
4504 mark_seen_cases (type, cases_seen, size, sparseness);
4505
4506 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 4507 {
94d6511c 4508 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 4509 warning ("enumeration value `%s' not handled in switch",
94d6511c 4510 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 4511 }
94d6511c
PB
4512
4513 free (cases_seen);
28d81abb
RK
4514 }
4515
4516 /* Now we go the other way around; we warn if there are case
ac2a9454 4517 expressions that don't correspond to enumerators. This can
28d81abb 4518 occur since C and C++ don't enforce type-checking of
0f41302f 4519 assignments to enumeration variables. */
28d81abb 4520
5720c7e7
RK
4521 if (case_stack->data.case_stmt.case_list
4522 && case_stack->data.case_stmt.case_list->left)
4523 case_stack->data.case_stmt.case_list
4524 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
4525 if (warn_switch)
4526 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4527 {
4528 for (chain = TYPE_VALUES (type);
4529 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4530 chain = TREE_CHAIN (chain))
4531 ;
4532
4533 if (!chain)
3b24f55b
RS
4534 {
4535 if (TYPE_NAME (type) == 0)
4536 warning ("case value `%d' not in enumerated type",
4537 TREE_INT_CST_LOW (n->low));
4538 else
4539 warning ("case value `%d' not in enumerated type `%s'",
4540 TREE_INT_CST_LOW (n->low),
4541 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4542 == IDENTIFIER_NODE)
4543 ? TYPE_NAME (type)
4544 : DECL_NAME (TYPE_NAME (type))));
4545 }
1ddde1cd
RS
4546 if (!tree_int_cst_equal (n->low, n->high))
4547 {
4548 for (chain = TYPE_VALUES (type);
4549 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4550 chain = TREE_CHAIN (chain))
4551 ;
4552
4553 if (!chain)
3b24f55b
RS
4554 {
4555 if (TYPE_NAME (type) == 0)
4556 warning ("case value `%d' not in enumerated type",
4557 TREE_INT_CST_LOW (n->high));
4558 else
4559 warning ("case value `%d' not in enumerated type `%s'",
4560 TREE_INT_CST_LOW (n->high),
4561 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4562 == IDENTIFIER_NODE)
4563 ? TYPE_NAME (type)
4564 : DECL_NAME (TYPE_NAME (type))));
4565 }
1ddde1cd 4566 }
28d81abb
RK
4567 }
4568
ae8cb346
RS
4569#if 0
4570 /* ??? This optimization is disabled because it causes valid programs to
4571 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 4572 will have a value that is the same as one of the enumeration literals. */
ae8cb346 4573
28d81abb
RK
4574 /* If all values were found as case labels, make one of them the default
4575 label. Thus, this switch will never fall through. We arbitrarily pick
4576 the last one to make the default since this is likely the most
4577 efficient choice. */
4578
4579 if (all_values)
4580 {
4581 for (l = &case_stack->data.case_stmt.case_list;
4582 (*l)->right != 0;
4583 l = &(*l)->right)
4584 ;
4585
4586 case_stack->data.case_stmt.default_label = (*l)->code_label;
4587 *l = 0;
4588 }
ae8cb346 4589#endif /* 0 */
28d81abb 4590}
ca695ac9 4591
28d81abb
RK
4592\f
4593/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4594 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4595 Generate the code to test it and jump to the right place. */
4596
4597void
4598expand_end_case (orig_index)
4599 tree orig_index;
4600{
3474db0e 4601 tree minval, maxval, range, orig_minval;
28d81abb
RK
4602 rtx default_label = 0;
4603 register struct case_node *n;
4604 int count;
4605 rtx index;
ca695ac9 4606 rtx table_label;
28d81abb
RK
4607 int ncases;
4608 rtx *labelvec;
4609 register int i;
4610 rtx before_case;
4611 register struct nesting *thiscase = case_stack;
1b0cb6fc 4612 tree index_expr, index_type;
ca695ac9
JB
4613 int unsignedp;
4614
ca695ac9
JB
4615 table_label = gen_label_rtx ();
4616 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
4617 index_type = TREE_TYPE (index_expr);
4618 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
4619
4620 do_pending_stack_adjust ();
4621
4622 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4623 if (index_type != error_mark_node)
28d81abb
RK
4624 {
4625 /* If switch expression was an enumerated type, check that all
4626 enumeration literals are covered by the cases.
4627 No sense trying this if there's a default case, however. */
4628
4629 if (!thiscase->data.case_stmt.default_label
4630 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4631 && TREE_CODE (index_expr) != INTEGER_CST)
4632 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4633
4634 /* If this is the first label, warn if any insns have been emitted. */
4635 if (thiscase->data.case_stmt.seenlabel == 0)
4636 {
4637 rtx insn;
4638 for (insn = get_last_insn ();
4639 insn != case_stack->data.case_stmt.start;
4640 insn = PREV_INSN (insn))
4641 if (GET_CODE (insn) != NOTE
4642 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4643 {
4644 warning ("unreachable code at beginning of %s",
4645 case_stack->data.case_stmt.printname);
4646 break;
4647 }
4648 }
4649
4650 /* If we don't have a default-label, create one here,
4651 after the body of the switch. */
4652 if (thiscase->data.case_stmt.default_label == 0)
4653 {
4654 thiscase->data.case_stmt.default_label
4655 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4656 expand_label (thiscase->data.case_stmt.default_label);
4657 }
4658 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4659
4660 before_case = get_last_insn ();
4661
5720c7e7
RK
4662 if (thiscase->data.case_stmt.case_list
4663 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
4664 thiscase->data.case_stmt.case_list
4665 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4666
28d81abb
RK
4667 /* Simplify the case-list before we count it. */
4668 group_case_nodes (thiscase->data.case_stmt.case_list);
4669
4670 /* Get upper and lower bounds of case values.
4671 Also convert all the case values to the index expr's data type. */
4672
4673 count = 0;
4674 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4675 {
4676 /* Check low and high label values are integers. */
4677 if (TREE_CODE (n->low) != INTEGER_CST)
4678 abort ();
4679 if (TREE_CODE (n->high) != INTEGER_CST)
4680 abort ();
4681
1b0cb6fc
RK
4682 n->low = convert (index_type, n->low);
4683 n->high = convert (index_type, n->high);
28d81abb
RK
4684
4685 /* Count the elements and track the largest and smallest
4686 of them (treating them as signed even if they are not). */
4687 if (count++ == 0)
4688 {
4689 minval = n->low;
4690 maxval = n->high;
4691 }
4692 else
4693 {
4694 if (INT_CST_LT (n->low, minval))
4695 minval = n->low;
4696 if (INT_CST_LT (maxval, n->high))
4697 maxval = n->high;
4698 }
4699 /* A range counts double, since it requires two compares. */
4700 if (! tree_int_cst_equal (n->low, n->high))
4701 count++;
4702 }
4703
3474db0e
RS
4704 orig_minval = minval;
4705
28d81abb
RK
4706 /* Compute span of values. */
4707 if (count != 0)
1b0cb6fc 4708 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 4709
956d6950 4710 end_cleanup_deferral ();
4c581243 4711
1b0cb6fc 4712 if (count == 0)
28d81abb
RK
4713 {
4714 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4715 emit_queue ();
4716 emit_jump (default_label);
4717 }
3474db0e 4718
28d81abb
RK
4719 /* If range of values is much bigger than number of values,
4720 make a sequence of conditional branches instead of a dispatch.
4721 If the switch-index is a constant, do it this way
4722 because we can optimize it. */
4f73c5dd
TW
4723
4724#ifndef CASE_VALUES_THRESHOLD
28d81abb 4725#ifdef HAVE_casesi
4f73c5dd 4726#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 4727#else
4f73c5dd
TW
4728 /* If machine does not have a case insn that compares the
4729 bounds, this means extra overhead for dispatch tables
4730 which raises the threshold for using them. */
4731#define CASE_VALUES_THRESHOLD 5
4732#endif /* HAVE_casesi */
4733#endif /* CASE_VALUES_THRESHOLD */
4734
4735 else if (TREE_INT_CST_HIGH (range) != 0
4736 || count < CASE_VALUES_THRESHOLD
37366632
RK
4737 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4738 > 10 * count)
3f6fe18e
RK
4739#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4740 || flag_pic
4741#endif
28d81abb 4742 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 4743 /* These will reduce to a constant. */
28d81abb 4744 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 4745 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 4746 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
4747 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4748 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4749 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 4750 {
37366632 4751 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
4752
4753 /* If the index is a short or char that we do not have
4754 an insn to handle comparisons directly, convert it to
4755 a full integer now, rather than letting each comparison
4756 generate the conversion. */
4757
4758 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4759 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4760 == CODE_FOR_nothing))
4761 {
4762 enum machine_mode wider_mode;
4763 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4764 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4765 if (cmp_optab->handlers[(int) wider_mode].insn_code
4766 != CODE_FOR_nothing)
4767 {
4768 index = convert_to_mode (wider_mode, index, unsignedp);
4769 break;
4770 }
4771 }
4772
4773 emit_queue ();
4774 do_pending_stack_adjust ();
4775
4776 index = protect_from_queue (index, 0);
4777 if (GET_CODE (index) == MEM)
4778 index = copy_to_reg (index);
4779 if (GET_CODE (index) == CONST_INT
4780 || TREE_CODE (index_expr) == INTEGER_CST)
4781 {
4782 /* Make a tree node with the proper constant value
4783 if we don't already have one. */
4784 if (TREE_CODE (index_expr) != INTEGER_CST)
4785 {
4786 index_expr
4787 = build_int_2 (INTVAL (index),
e9a042b6 4788 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 4789 index_expr = convert (index_type, index_expr);
28d81abb
RK
4790 }
4791
4792 /* For constant index expressions we need only
4793 issue a unconditional branch to the appropriate
4794 target code. The job of removing any unreachable
4795 code is left to the optimisation phase if the
4796 "-O" option is specified. */
1b0cb6fc
RK
4797 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4798 if (! tree_int_cst_lt (index_expr, n->low)
4799 && ! tree_int_cst_lt (n->high, index_expr))
4800 break;
4801
28d81abb
RK
4802 if (n)
4803 emit_jump (label_rtx (n->code_label));
4804 else
4805 emit_jump (default_label);
4806 }
4807 else
4808 {
4809 /* If the index expression is not constant we generate
4810 a binary decision tree to select the appropriate
4811 target code. This is done as follows:
4812
4813 The list of cases is rearranged into a binary tree,
4814 nearly optimal assuming equal probability for each case.
4815
4816 The tree is transformed into RTL, eliminating
4817 redundant test conditions at the same time.
4818
4819 If program flow could reach the end of the
4820 decision tree an unconditional jump to the
4821 default code is emitted. */
4822
4823 use_cost_table
4824 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 4825 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
4826 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4827 NULL_PTR);
28d81abb 4828 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 4829 default_label, index_type);
28d81abb
RK
4830 emit_jump_if_reachable (default_label);
4831 }
4832 }
4833 else
4834 {
4835 int win = 0;
4836#ifdef HAVE_casesi
4837 if (HAVE_casesi)
4838 {
c4fcf531 4839 enum machine_mode index_mode = SImode;
5130a5cc 4840 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
4841 rtx op1, op2;
4842 enum machine_mode op_mode;
c4fcf531 4843
28d81abb 4844 /* Convert the index to SImode. */
1b0cb6fc 4845 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 4846 > GET_MODE_BITSIZE (index_mode))
28d81abb 4847 {
1b0cb6fc 4848 enum machine_mode omode = TYPE_MODE (index_type);
37366632 4849 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
4850
4851 /* We must handle the endpoints in the original mode. */
1b0cb6fc 4852 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
4853 index_expr, minval);
4854 minval = integer_zero_node;
37366632 4855 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3474db0e 4856 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
af2682ef
RS
4857 emit_jump_insn (gen_bltu (default_label));
4858 /* Now we can safely truncate. */
4859 index = convert_to_mode (index_mode, index, 0);
4860 }
4861 else
4862 {
1b0cb6fc 4863 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
4864 {
4865 index_expr = convert (type_for_size (index_bits, 0),
4866 index_expr);
4867 index_type = TREE_TYPE (index_expr);
4868 }
4869
37366632 4870 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4871 }
28d81abb
RK
4872 emit_queue ();
4873 index = protect_from_queue (index, 0);
4874 do_pending_stack_adjust ();
4875
086f237d
JW
4876 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4877 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4878 (index, op_mode))
4879 index = copy_to_mode_reg (op_mode, index);
4880
4881 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4882
4883 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4884 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4885 (op1, op_mode))
4886 op1 = copy_to_mode_reg (op_mode, op1);
4887
4888 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4889
4890 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4891 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4892 (op2, op_mode))
4893 op2 = copy_to_mode_reg (op_mode, op2);
4894
4895 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
4896 table_label, default_label));
4897 win = 1;
4898 }
4899#endif
4900#ifdef HAVE_tablejump
4901 if (! win && HAVE_tablejump)
4902 {
4903 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 4904 fold (build (MINUS_EXPR, index_type,
b4ac57ab 4905 index_expr, minval)));
d3b35d75 4906 index_type = TREE_TYPE (index_expr);
37366632 4907 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 4908 emit_queue ();
af2682ef 4909 index = protect_from_queue (index, 0);
28d81abb
RK
4910 do_pending_stack_adjust ();
4911
1b0cb6fc 4912 do_tablejump (index, TYPE_MODE (index_type),
37366632 4913 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
4914 table_label, default_label);
4915 win = 1;
4916 }
4917#endif
4918 if (! win)
4919 abort ();
4920
4921 /* Get table of labels to jump to, in order of case index. */
4922
4923 ncases = TREE_INT_CST_LOW (range) + 1;
4924 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 4925 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
4926
4927 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4928 {
37366632 4929 register HOST_WIDE_INT i
3474db0e 4930 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
4931
4932 while (1)
4933 {
4934 labelvec[i]
38a448ca 4935 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3474db0e 4936 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
4937 == TREE_INT_CST_LOW (n->high))
4938 break;
4939 i++;
4940 }
4941 }
4942
4943 /* Fill in the gaps with the default. */
4944 for (i = 0; i < ncases; i++)
4945 if (labelvec[i] == 0)
38a448ca 4946 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
4947
4948 /* Output the table */
4949 emit_label (table_label);
4950
18543a22 4951 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
4952 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4953 gen_rtx_LABEL_REF (Pmode, table_label),
4954 gen_rtvec_v (ncases, labelvec)));
28d81abb 4955 else
38a448ca
RH
4956 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4957 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
4958
4959 /* If the case insn drops through the table,
4960 after the table we must jump to the default-label.
4961 Otherwise record no drop-through after the table. */
4962#ifdef CASE_DROPS_THROUGH
4963 emit_jump (default_label);
4964#else
4965 emit_barrier ();
4966#endif
4967 }
4968
915f619f
JW
4969 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4970 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
4971 thiscase->data.case_stmt.start);
4972 }
4c581243 4973 else
956d6950 4974 end_cleanup_deferral ();
1b0cb6fc 4975
28d81abb
RK
4976 if (thiscase->exit_label)
4977 emit_label (thiscase->exit_label);
4978
4979 POPSTACK (case_stack);
4980
4981 free_temp_slots ();
4982}
4983
57641239
RK
4984/* Convert the tree NODE into a list linked by the right field, with the left
4985 field zeroed. RIGHT is used for recursion; it is a list to be placed
4986 rightmost in the resulting list. */
4987
4988static struct case_node *
4989case_tree2list (node, right)
4990 struct case_node *node, *right;
4991{
4992 struct case_node *left;
4993
4994 if (node->right)
4995 right = case_tree2list (node->right, right);
4996
4997 node->right = right;
4998 if (left = node->left)
4999 {
5000 node->left = 0;
5001 return case_tree2list (left, node);
5002 }
5003
5004 return node;
5005}
ca695ac9 5006
28d81abb
RK
5007/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5008
5009static void
5010do_jump_if_equal (op1, op2, label, unsignedp)
5011 rtx op1, op2, label;
5012 int unsignedp;
5013{
5014 if (GET_CODE (op1) == CONST_INT
5015 && GET_CODE (op2) == CONST_INT)
5016 {
5017 if (INTVAL (op1) == INTVAL (op2))
5018 emit_jump (label);
5019 }
5020 else
5021 {
5022 enum machine_mode mode = GET_MODE (op1);
5023 if (mode == VOIDmode)
5024 mode = GET_MODE (op2);
37366632 5025 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5026 emit_jump_insn (gen_beq (label));
5027 }
5028}
5029\f
5030/* Not all case values are encountered equally. This function
5031 uses a heuristic to weight case labels, in cases where that
5032 looks like a reasonable thing to do.
5033
5034 Right now, all we try to guess is text, and we establish the
5035 following weights:
5036
5037 chars above space: 16
5038 digits: 16
5039 default: 12
5040 space, punct: 8
5041 tab: 4
5042 newline: 2
5043 other "\" chars: 1
5044 remaining chars: 0
5045
5046 If we find any cases in the switch that are not either -1 or in the range
5047 of valid ASCII characters, or are control characters other than those
5048 commonly used with "\", don't treat this switch scanning text.
5049
5050 Return 1 if these nodes are suitable for cost estimation, otherwise
5051 return 0. */
5052
5053static int
5054estimate_case_costs (node)
5055 case_node_ptr node;
5056{
5057 tree min_ascii = build_int_2 (-1, -1);
5058 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5059 case_node_ptr n;
5060 int i;
5061
5062 /* If we haven't already made the cost table, make it now. Note that the
5063 lower bound of the table is -1, not zero. */
5064
5065 if (cost_table == NULL)
5066 {
5067 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4c9a05bc 5068 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
28d81abb
RK
5069
5070 for (i = 0; i < 128; i++)
5071 {
5072 if (isalnum (i))
5073 cost_table[i] = 16;
5074 else if (ispunct (i))
5075 cost_table[i] = 8;
5076 else if (iscntrl (i))
5077 cost_table[i] = -1;
5078 }
5079
5080 cost_table[' '] = 8;
5081 cost_table['\t'] = 4;
5082 cost_table['\0'] = 4;
5083 cost_table['\n'] = 2;
5084 cost_table['\f'] = 1;
5085 cost_table['\v'] = 1;
5086 cost_table['\b'] = 1;
5087 }
5088
5089 /* See if all the case expressions look like text. It is text if the
5090 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5091 as signed arithmetic since we don't want to ever access cost_table with a
5092 value less than -1. Also check that none of the constants in a range
5093 are strange control characters. */
5094
5095 for (n = node; n; n = n->right)
5096 {
5097 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5098 return 0;
5099
5100 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5101 if (cost_table[i] < 0)
5102 return 0;
5103 }
5104
5105 /* All interesting values are within the range of interesting
5106 ASCII characters. */
5107 return 1;
5108}
5109
5110/* Scan an ordered list of case nodes
5111 combining those with consecutive values or ranges.
5112
5113 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5114
5115static void
5116group_case_nodes (head)
5117 case_node_ptr head;
5118{
5119 case_node_ptr node = head;
5120
5121 while (node)
5122 {
5123 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5124 rtx lb2;
28d81abb
RK
5125 case_node_ptr np = node;
5126
5127 /* Try to group the successors of NODE with NODE. */
5128 while (((np = np->right) != 0)
5129 /* Do they jump to the same place? */
ad7e369f
JL
5130 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5131 || (lb != 0 && lb2 != 0
5132 && simplejump_p (lb)
5133 && simplejump_p (lb2)
5134 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5135 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5136 /* Are their ranges consecutive? */
5137 && tree_int_cst_equal (np->low,
5138 fold (build (PLUS_EXPR,
5139 TREE_TYPE (node->high),
5140 node->high,
5141 integer_one_node)))
5142 /* An overflow is not consecutive. */
5143 && tree_int_cst_lt (node->high,
5144 fold (build (PLUS_EXPR,
5145 TREE_TYPE (node->high),
5146 node->high,
5147 integer_one_node))))
5148 {
5149 node->high = np->high;
5150 }
5151 /* NP is the first node after NODE which can't be grouped with it.
5152 Delete the nodes in between, and move on to that node. */
5153 node->right = np;
5154 node = np;
5155 }
5156}
5157
5158/* Take an ordered list of case nodes
5159 and transform them into a near optimal binary tree,
6dc42e49 5160 on the assumption that any target code selection value is as
28d81abb
RK
5161 likely as any other.
5162
5163 The transformation is performed by splitting the ordered
5164 list into two equal sections plus a pivot. The parts are
5165 then attached to the pivot as left and right branches. Each
5166 branch is is then transformed recursively. */
5167
5168static void
5169balance_case_nodes (head, parent)
5170 case_node_ptr *head;
5171 case_node_ptr parent;
5172{
5173 register case_node_ptr np;
5174
5175 np = *head;
5176 if (np)
5177 {
5178 int cost = 0;
5179 int i = 0;
5180 int ranges = 0;
5181 register case_node_ptr *npp;
5182 case_node_ptr left;
5183
5184 /* Count the number of entries on branch. Also count the ranges. */
5185
5186 while (np)
5187 {
5188 if (!tree_int_cst_equal (np->low, np->high))
5189 {
5190 ranges++;
5191 if (use_cost_table)
5192 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5193 }
5194
5195 if (use_cost_table)
5196 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5197
5198 i++;
5199 np = np->right;
5200 }
5201
5202 if (i > 2)
5203 {
5204 /* Split this list if it is long enough for that to help. */
5205 npp = head;
5206 left = *npp;
5207 if (use_cost_table)
5208 {
5209 /* Find the place in the list that bisects the list's total cost,
5210 Here I gets half the total cost. */
5211 int n_moved = 0;
5212 i = (cost + 1) / 2;
5213 while (1)
5214 {
5215 /* Skip nodes while their cost does not reach that amount. */
5216 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5217 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5218 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5219 if (i <= 0)
5220 break;
5221 npp = &(*npp)->right;
5222 n_moved += 1;
5223 }
5224 if (n_moved == 0)
5225 {
5226 /* Leave this branch lopsided, but optimize left-hand
5227 side and fill in `parent' fields for right-hand side. */
5228 np = *head;
5229 np->parent = parent;
5230 balance_case_nodes (&np->left, np);
5231 for (; np->right; np = np->right)
5232 np->right->parent = np;
5233 return;
5234 }
5235 }
5236 /* If there are just three nodes, split at the middle one. */
5237 else if (i == 3)
5238 npp = &(*npp)->right;
5239 else
5240 {
5241 /* Find the place in the list that bisects the list's total cost,
5242 where ranges count as 2.
5243 Here I gets half the total cost. */
5244 i = (i + ranges + 1) / 2;
5245 while (1)
5246 {
5247 /* Skip nodes while their cost does not reach that amount. */
5248 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5249 i--;
5250 i--;
5251 if (i <= 0)
5252 break;
5253 npp = &(*npp)->right;
5254 }
5255 }
5256 *head = np = *npp;
5257 *npp = 0;
5258 np->parent = parent;
5259 np->left = left;
5260
5261 /* Optimize each of the two split parts. */
5262 balance_case_nodes (&np->left, np);
5263 balance_case_nodes (&np->right, np);
5264 }
5265 else
5266 {
5267 /* Else leave this branch as one level,
5268 but fill in `parent' fields. */
5269 np = *head;
5270 np->parent = parent;
5271 for (; np->right; np = np->right)
5272 np->right->parent = np;
5273 }
5274 }
5275}
5276\f
5277/* Search the parent sections of the case node tree
5278 to see if a test for the lower bound of NODE would be redundant.
5279 INDEX_TYPE is the type of the index expression.
5280
5281 The instructions to generate the case decision tree are
5282 output in the same order as nodes are processed so it is
5283 known that if a parent node checks the range of the current
5284 node minus one that the current node is bounded at its lower
5285 span. Thus the test would be redundant. */
5286
5287static int
5288node_has_low_bound (node, index_type)
5289 case_node_ptr node;
5290 tree index_type;
5291{
5292 tree low_minus_one;
5293 case_node_ptr pnode;
5294
5295 /* If the lower bound of this node is the lowest value in the index type,
5296 we need not test it. */
5297
5298 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5299 return 1;
5300
5301 /* If this node has a left branch, the value at the left must be less
5302 than that at this node, so it cannot be bounded at the bottom and
5303 we need not bother testing any further. */
5304
5305 if (node->left)
5306 return 0;
5307
5308 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5309 node->low, integer_one_node));
5310
5311 /* If the subtraction above overflowed, we can't verify anything.
5312 Otherwise, look for a parent that tests our value - 1. */
5313
5314 if (! tree_int_cst_lt (low_minus_one, node->low))
5315 return 0;
5316
5317 for (pnode = node->parent; pnode; pnode = pnode->parent)
5318 if (tree_int_cst_equal (low_minus_one, pnode->high))
5319 return 1;
5320
5321 return 0;
5322}
5323
5324/* Search the parent sections of the case node tree
5325 to see if a test for the upper bound of NODE would be redundant.
5326 INDEX_TYPE is the type of the index expression.
5327
5328 The instructions to generate the case decision tree are
5329 output in the same order as nodes are processed so it is
5330 known that if a parent node checks the range of the current
5331 node plus one that the current node is bounded at its upper
5332 span. Thus the test would be redundant. */
5333
5334static int
5335node_has_high_bound (node, index_type)
5336 case_node_ptr node;
5337 tree index_type;
5338{
5339 tree high_plus_one;
5340 case_node_ptr pnode;
5341
e1ee5cdc
RH
5342 /* If there is no upper bound, obviously no test is needed. */
5343
5344 if (TYPE_MAX_VALUE (index_type) == NULL)
5345 return 1;
5346
28d81abb
RK
5347 /* If the upper bound of this node is the highest value in the type
5348 of the index expression, we need not test against it. */
5349
5350 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5351 return 1;
5352
5353 /* If this node has a right branch, the value at the right must be greater
5354 than that at this node, so it cannot be bounded at the top and
5355 we need not bother testing any further. */
5356
5357 if (node->right)
5358 return 0;
5359
5360 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5361 node->high, integer_one_node));
5362
5363 /* If the addition above overflowed, we can't verify anything.
5364 Otherwise, look for a parent that tests our value + 1. */
5365
5366 if (! tree_int_cst_lt (node->high, high_plus_one))
5367 return 0;
5368
5369 for (pnode = node->parent; pnode; pnode = pnode->parent)
5370 if (tree_int_cst_equal (high_plus_one, pnode->low))
5371 return 1;
5372
5373 return 0;
5374}
5375
5376/* Search the parent sections of the
5377 case node tree to see if both tests for the upper and lower
5378 bounds of NODE would be redundant. */
5379
5380static int
5381node_is_bounded (node, index_type)
5382 case_node_ptr node;
5383 tree index_type;
5384{
5385 return (node_has_low_bound (node, index_type)
5386 && node_has_high_bound (node, index_type));
5387}
5388
5389/* Emit an unconditional jump to LABEL unless it would be dead code. */
5390
5391static void
5392emit_jump_if_reachable (label)
5393 rtx label;
5394{
5395 if (GET_CODE (get_last_insn ()) != BARRIER)
5396 emit_jump (label);
5397}
5398\f
5399/* Emit step-by-step code to select a case for the value of INDEX.
5400 The thus generated decision tree follows the form of the
5401 case-node binary tree NODE, whose nodes represent test conditions.
5402 INDEX_TYPE is the type of the index of the switch.
5403
5404 Care is taken to prune redundant tests from the decision tree
5405 by detecting any boundary conditions already checked by
5406 emitted rtx. (See node_has_high_bound, node_has_low_bound
5407 and node_is_bounded, above.)
5408
5409 Where the test conditions can be shown to be redundant we emit
5410 an unconditional jump to the target code. As a further
5411 optimization, the subordinates of a tree node are examined to
5412 check for bounded nodes. In this case conditional and/or
5413 unconditional jumps as a result of the boundary check for the
5414 current node are arranged to target the subordinates associated
5415 code for out of bound conditions on the current node node.
5416
f72aed24 5417 We can assume that when control reaches the code generated here,
28d81abb
RK
5418 the index value has already been compared with the parents
5419 of this node, and determined to be on the same side of each parent
5420 as this node is. Thus, if this node tests for the value 51,
5421 and a parent tested for 52, we don't need to consider
5422 the possibility of a value greater than 51. If another parent
5423 tests for the value 50, then this node need not test anything. */
5424
5425static void
5426emit_case_nodes (index, node, default_label, index_type)
5427 rtx index;
5428 case_node_ptr node;
5429 rtx default_label;
5430 tree index_type;
5431{
5432 /* If INDEX has an unsigned type, we must make unsigned branches. */
5433 int unsignedp = TREE_UNSIGNED (index_type);
5434 typedef rtx rtx_function ();
5435 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5436 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5437 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5438 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5439 enum machine_mode mode = GET_MODE (index);
5440
5441 /* See if our parents have already tested everything for us.
5442 If they have, emit an unconditional jump for this node. */
5443 if (node_is_bounded (node, index_type))
5444 emit_jump (label_rtx (node->code_label));
5445
5446 else if (tree_int_cst_equal (node->low, node->high))
5447 {
5448 /* Node is single valued. First see if the index expression matches
0f41302f 5449 this node and then check our children, if any. */
28d81abb 5450
37366632 5451 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5452 label_rtx (node->code_label), unsignedp);
5453
5454 if (node->right != 0 && node->left != 0)
5455 {
5456 /* This node has children on both sides.
5457 Dispatch to one side or the other
5458 by comparing the index value with this node's value.
5459 If one subtree is bounded, check that one first,
5460 so we can avoid real branches in the tree. */
5461
5462 if (node_is_bounded (node->right, index_type))
5463 {
37366632
RK
5464 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5465 VOIDmode, 0),
5466 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5467
5468 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5469 emit_case_nodes (index, node->left, default_label, index_type);
5470 }
5471
5472 else if (node_is_bounded (node->left, index_type))
5473 {
37366632 5474 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5475 VOIDmode, 0),
37366632 5476 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5477 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5478 emit_case_nodes (index, node->right, default_label, index_type);
5479 }
5480
5481 else
5482 {
5483 /* Neither node is bounded. First distinguish the two sides;
5484 then emit the code for one side at a time. */
5485
5486 tree test_label
5487 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5488
5489 /* See if the value is on the right. */
37366632 5490 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
28d81abb 5491 VOIDmode, 0),
37366632 5492 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5493 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5494
5495 /* Value must be on the left.
5496 Handle the left-hand subtree. */
5497 emit_case_nodes (index, node->left, default_label, index_type);
5498 /* If left-hand subtree does nothing,
5499 go to default. */
5500 emit_jump_if_reachable (default_label);
5501
5502 /* Code branches here for the right-hand subtree. */
5503 expand_label (test_label);
5504 emit_case_nodes (index, node->right, default_label, index_type);
5505 }
5506 }
5507
5508 else if (node->right != 0 && node->left == 0)
5509 {
5510 /* Here we have a right child but no left so we issue conditional
5511 branch to default and process the right child.
5512
5513 Omit the conditional branch to default if we it avoid only one
5514 right child; it costs too much space to save so little time. */
5515
de14fd73 5516 if (node->right->right || node->right->left
28d81abb
RK
5517 || !tree_int_cst_equal (node->right->low, node->right->high))
5518 {
5519 if (!node_has_low_bound (node, index_type))
5520 {
37366632
RK
5521 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5522 VOIDmode, 0),
5523 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5524 emit_jump_insn ((*gen_blt_pat) (default_label));
5525 }
5526
5527 emit_case_nodes (index, node->right, default_label, index_type);
5528 }
5529 else
5530 /* We cannot process node->right normally
5531 since we haven't ruled out the numbers less than
5532 this node's value. So handle node->right explicitly. */
5533 do_jump_if_equal (index,
37366632
RK
5534 expand_expr (node->right->low, NULL_RTX,
5535 VOIDmode, 0),
28d81abb
RK
5536 label_rtx (node->right->code_label), unsignedp);
5537 }
5538
5539 else if (node->right == 0 && node->left != 0)
5540 {
5541 /* Just one subtree, on the left. */
5542
de14fd73
RK
5543#if 0 /* The following code and comment were formerly part
5544 of the condition here, but they didn't work
5545 and I don't understand what the idea was. -- rms. */
5546 /* If our "most probable entry" is less probable
28d81abb
RK
5547 than the default label, emit a jump to
5548 the default label using condition codes
5549 already lying around. With no right branch,
5550 a branch-greater-than will get us to the default
5551 label correctly. */
de14fd73
RK
5552 if (use_cost_table
5553 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5554 ;
5555#endif /* 0 */
5556 if (node->left->left || node->left->right
28d81abb
RK
5557 || !tree_int_cst_equal (node->left->low, node->left->high))
5558 {
5559 if (!node_has_high_bound (node, index_type))
5560 {
37366632
RK
5561 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5562 VOIDmode, 0),
5563 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5564 emit_jump_insn ((*gen_bgt_pat) (default_label));
5565 }
5566
5567 emit_case_nodes (index, node->left, default_label, index_type);
5568 }
5569 else
5570 /* We cannot process node->left normally
5571 since we haven't ruled out the numbers less than
5572 this node's value. So handle node->left explicitly. */
5573 do_jump_if_equal (index,
37366632
RK
5574 expand_expr (node->left->low, NULL_RTX,
5575 VOIDmode, 0),
28d81abb
RK
5576 label_rtx (node->left->code_label), unsignedp);
5577 }
5578 }
5579 else
5580 {
5581 /* Node is a range. These cases are very similar to those for a single
5582 value, except that we do not start by testing whether this node
5583 is the one to branch to. */
5584
5585 if (node->right != 0 && node->left != 0)
5586 {
5587 /* Node has subtrees on both sides.
5588 If the right-hand subtree is bounded,
5589 test for it first, since we can go straight there.
5590 Otherwise, we need to make a branch in the control structure,
5591 then handle the two subtrees. */
5592 tree test_label = 0;
5593
37366632
RK
5594 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5595 VOIDmode, 0),
5596 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5597
5598 if (node_is_bounded (node->right, index_type))
5599 /* Right hand node is fully bounded so we can eliminate any
5600 testing and branch directly to the target code. */
5601 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5602 else
5603 {
5604 /* Right hand node requires testing.
5605 Branch to a label where we will handle it later. */
5606
5607 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5608 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5609 }
5610
5611 /* Value belongs to this node or to the left-hand subtree. */
5612
37366632
RK
5613 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5614 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5615 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5616
5617 /* Handle the left-hand subtree. */
5618 emit_case_nodes (index, node->left, default_label, index_type);
5619
5620 /* If right node had to be handled later, do that now. */
5621
5622 if (test_label)
5623 {
5624 /* If the left-hand subtree fell through,
5625 don't let it fall into the right-hand subtree. */
5626 emit_jump_if_reachable (default_label);
5627
5628 expand_label (test_label);
5629 emit_case_nodes (index, node->right, default_label, index_type);
5630 }
5631 }
5632
5633 else if (node->right != 0 && node->left == 0)
5634 {
5635 /* Deal with values to the left of this node,
5636 if they are possible. */
5637 if (!node_has_low_bound (node, index_type))
5638 {
37366632
RK
5639 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5640 VOIDmode, 0),
5641 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5642 emit_jump_insn ((*gen_blt_pat) (default_label));
5643 }
5644
5645 /* Value belongs to this node or to the right-hand subtree. */
5646
37366632
RK
5647 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5648 VOIDmode, 0),
5649 LE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5650 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5651
5652 emit_case_nodes (index, node->right, default_label, index_type);
5653 }
5654
5655 else if (node->right == 0 && node->left != 0)
5656 {
5657 /* Deal with values to the right of this node,
5658 if they are possible. */
5659 if (!node_has_high_bound (node, index_type))
5660 {
37366632
RK
5661 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5662 VOIDmode, 0),
5663 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5664 emit_jump_insn ((*gen_bgt_pat) (default_label));
5665 }
5666
5667 /* Value belongs to this node or to the left-hand subtree. */
5668
37366632
RK
5669 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5670 GE, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5671 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5672
5673 emit_case_nodes (index, node->left, default_label, index_type);
5674 }
5675
5676 else
5677 {
5678 /* Node has no children so we check low and high bounds to remove
5679 redundant tests. Only one of the bounds can exist,
5680 since otherwise this node is bounded--a case tested already. */
5681
5682 if (!node_has_high_bound (node, index_type))
5683 {
37366632
RK
5684 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5685 VOIDmode, 0),
5686 GT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5687 emit_jump_insn ((*gen_bgt_pat) (default_label));
5688 }
5689
5690 if (!node_has_low_bound (node, index_type))
5691 {
37366632
RK
5692 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5693 VOIDmode, 0),
5694 LT, NULL_RTX, mode, unsignedp, 0);
28d81abb
RK
5695 emit_jump_insn ((*gen_blt_pat) (default_label));
5696 }
5697
5698 emit_jump (label_rtx (node->code_label));
5699 }
5700 }
5701}
5702\f
5703/* These routines are used by the loop unrolling code. They copy BLOCK trees
5704 so that the debugging info will be correct for the unrolled loop. */
5705
94dc8b56 5706/* Indexed by block number, contains a pointer to the N'th block node. */
28d81abb 5707
94dc8b56 5708static tree *block_vector;
28d81abb
RK
5709
5710void
94dc8b56 5711find_loop_tree_blocks ()
28d81abb 5712{
94dc8b56 5713 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5714
94dc8b56 5715 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
5716}
5717
28d81abb 5718void
94dc8b56 5719unroll_block_trees ()
28d81abb 5720{
94dc8b56 5721 tree block = DECL_INITIAL (current_function_decl);
28d81abb 5722
94dc8b56 5723 reorder_blocks (block_vector, block, get_insns ());
28d81abb 5724}
This page took 1.382615 seconds and 5 git commands to generate.