]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
reload.c: PROTO -> PARAMS.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
b87c6d55 2 Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
ccd043a9 38
28d81abb
RK
39#include "rtl.h"
40#include "tree.h"
6baf1cc8 41#include "tm_p.h"
28d81abb 42#include "flags.h"
6adb4e3a 43#include "except.h"
28d81abb
RK
44#include "function.h"
45#include "insn-flags.h"
46#include "insn-config.h"
47#include "insn-codes.h"
48#include "expr.h"
49#include "hard-reg-set.h"
50#include "obstack.h"
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
ca695ac9 57
28d81abb
RK
58#define obstack_chunk_alloc xmalloc
59#define obstack_chunk_free free
60struct obstack stmt_obstack;
61
18543a22
ILT
62/* Assume that case vectors are not pc-relative. */
63#ifndef CASE_VECTOR_PC_RELATIVE
64#define CASE_VECTOR_PC_RELATIVE 0
65#endif
66
28d81abb
RK
67\f
68/* Functions and data structures for expanding case statements. */
69
70/* Case label structure, used to hold info on labels within case
71 statements. We handle "range" labels; for a single-value label
72 as in C, the high and low limits are the same.
73
5720c7e7
RK
74 An AVL tree of case nodes is initially created, and later transformed
75 to a list linked via the RIGHT fields in the nodes. Nodes with
76 higher case values are later in the list.
28d81abb
RK
77
78 Switch statements can be output in one of two forms. A branch table
79 is used if there are more than a few labels and the labels are dense
80 within the range between the smallest and largest case value. If a
81 branch table is used, no further manipulations are done with the case
82 node chain.
83
84 The alternative to the use of a branch table is to generate a series
85 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
86 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
87 totally unbalanced, with everything on the right. We balance the tree
88 with nodes on the left having lower case values than the parent
28d81abb
RK
89 and nodes on the right having higher values. We then output the tree
90 in order. */
91
92struct case_node
93{
94 struct case_node *left; /* Left son in binary tree */
95 struct case_node *right; /* Right son in binary tree; also node chain */
96 struct case_node *parent; /* Parent of node in binary tree */
97 tree low; /* Lowest index value for this label */
98 tree high; /* Highest index value for this label */
99 tree code_label; /* Label to jump to when node matches */
57641239 100 int balance;
28d81abb
RK
101};
102
103typedef struct case_node case_node;
104typedef struct case_node *case_node_ptr;
105
106/* These are used by estimate_case_costs and balance_case_nodes. */
107
108/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 109static short cost_table_[129];
28d81abb
RK
110static short *cost_table;
111static int use_cost_table;
28d81abb
RK
112\f
113/* Stack of control and binding constructs we are currently inside.
114
115 These constructs begin when you call `expand_start_WHATEVER'
116 and end when you call `expand_end_WHATEVER'. This stack records
117 info about how the construct began that tells the end-function
118 what to do. It also may provide information about the construct
119 to alter the behavior of other constructs within the body.
120 For example, they may affect the behavior of C `break' and `continue'.
121
122 Each construct gets one `struct nesting' object.
123 All of these objects are chained through the `all' field.
124 `nesting_stack' points to the first object (innermost construct).
125 The position of an entry on `nesting_stack' is in its `depth' field.
126
127 Each type of construct has its own individual stack.
128 For example, loops have `loop_stack'. Each object points to the
129 next object of the same type through the `next' field.
130
131 Some constructs are visible to `break' exit-statements and others
132 are not. Which constructs are visible depends on the language.
133 Therefore, the data structure allows each construct to be visible
134 or not, according to the args given when the construct is started.
135 The construct is visible if the `exit_label' field is non-null.
136 In that case, the value should be a CODE_LABEL rtx. */
137
138struct nesting
139{
140 struct nesting *all;
141 struct nesting *next;
142 int depth;
143 rtx exit_label;
144 union
145 {
146 /* For conds (if-then and if-then-else statements). */
147 struct
148 {
149 /* Label for the end of the if construct.
150 There is none if EXITFLAG was not set
151 and no `else' has been seen yet. */
152 rtx endif_label;
153 /* Label for the end of this alternative.
0f41302f 154 This may be the end of the if or the next else/elseif. */
28d81abb
RK
155 rtx next_label;
156 } cond;
157 /* For loops. */
158 struct
159 {
160 /* Label at the top of the loop; place to loop back to. */
161 rtx start_label;
162 /* Label at the end of the whole construct. */
163 rtx end_label;
8afad312
JW
164 /* Label before a jump that branches to the end of the whole
165 construct. This is where destructors go if any. */
166 rtx alt_end_label;
28d81abb
RK
167 /* Label for `continue' statement to jump to;
168 this is in front of the stepper of the loop. */
169 rtx continue_label;
170 } loop;
171 /* For variable binding contours. */
172 struct
173 {
174 /* Sequence number of this binding contour within the function,
175 in order of entry. */
176 int block_start_count;
b93a436e 177 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
178 rtx stack_level;
179 /* The NOTE that starts this contour.
180 Used by expand_goto to check whether the destination
181 is within each contour or not. */
182 rtx first_insn;
183 /* Innermost containing binding contour that has a stack level. */
184 struct nesting *innermost_stack_block;
185 /* List of cleanups to be run on exit from this contour.
186 This is a list of expressions to be evaluated.
187 The TREE_PURPOSE of each link is the ..._DECL node
188 which the cleanup pertains to. */
189 tree cleanups;
190 /* List of cleanup-lists of blocks containing this block,
191 as they were at the locus where this block appears.
192 There is an element for each containing block,
193 ordered innermost containing block first.
e976b8b2 194 The tail of this list can be 0,
28d81abb
RK
195 if all remaining elements would be empty lists.
196 The element's TREE_VALUE is the cleanup-list of that block,
197 which may be null. */
198 tree outer_cleanups;
199 /* Chain of labels defined inside this binding contour.
200 For contours that have stack levels or cleanups. */
201 struct label_chain *label_chain;
202 /* Number of function calls seen, as of start of this block. */
3f1d071b 203 int n_function_calls;
e976b8b2
MS
204 /* Nonzero if this is associated with a EH region. */
205 int exception_region;
206 /* The saved target_temp_slot_level from our outer block.
207 We may reset target_temp_slot_level to be the level of
208 this block, if that is done, target_temp_slot_level
209 reverts to the saved target_temp_slot_level at the very
210 end of the block. */
3f1d071b 211 int block_target_temp_slot_level;
e976b8b2
MS
212 /* True if we are currently emitting insns in an area of
213 output code that is controlled by a conditional
214 expression. This is used by the cleanup handling code to
215 generate conditional cleanup actions. */
216 int conditional_code;
217 /* A place to move the start of the exception region for any
218 of the conditional cleanups, must be at the end or after
219 the start of the last unconditional cleanup, and before any
220 conditional branch points. */
221 rtx last_unconditional_cleanup;
222 /* When in a conditional context, this is the specific
223 cleanup list associated with last_unconditional_cleanup,
224 where we place the conditionalized cleanups. */
225 tree *cleanup_ptr;
28d81abb
RK
226 } block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct
230 {
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
57641239
RK
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
28d81abb
RK
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Number of range exprs in case statement. */
245 int num_ranges;
246 /* Name of this kind of statement, for warnings. */
dff01034 247 const char *printname;
a11759a3
JR
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
28d81abb 252 } case_stmt;
28d81abb
RK
253 } data;
254};
255
28d81abb
RK
256/* Allocate and return a new `struct nesting'. */
257
258#define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260
6ed1d6c5
RS
261/* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
28d81abb
RK
265
266#define POPSTACK(STACK) \
6ed1d6c5
RS
267do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
6ed1d6c5 280 nesting_depth = nesting_stack->depth - 1; \
28d81abb 281 nesting_stack = this->all; \
28d81abb 282 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 283 while (this != target); } while (0)
28d81abb
RK
284\f
285/* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
290 we check each fixup.
291 If the target label has now been defined, we can insert the proper code. */
292
293struct goto_fixup
294{
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
299 rtx before_jump;
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
302 tree target;
7629c936
RS
303 /* The BLOCK for the place where this goto was found. */
304 tree context;
28d81abb
RK
305 /* The CODE_LABEL rtx that this is jumping to. */
306 rtx target_rtl;
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
313 rtx stack_level;
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
e976b8b2 316 The tail of this list can be 0,
28d81abb
RK
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
322};
323
28d81abb
RK
324/* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
326
327struct label_chain
328{
329 /* Points to following fixup. */
330 struct label_chain *next;
331 tree label;
332};
e9a25f70 333
3f1d071b
BS
334struct stmt_status
335{
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
338
339 /* If any new stacks are added here, add them to POPSTACKS too. */
340
341 /* Chain of all pending binding contours that restore stack levels
342 or have cleanups. */
343 struct nesting *x_stack_block_stack;
344
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
347
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
350
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
353
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
357
358 /* Number of entries on nesting_stack now. */
359 int x_nesting_depth;
360
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
363
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
368
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
372
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 char *x_emit_filename;
376 int x_emit_lineno;
377
378 struct goto_fixup *x_goto_fixup_chain;
379};
380
01d939e8
BS
381#define block_stack (cfun->stmt->x_block_stack)
382#define stack_block_stack (cfun->stmt->x_stack_block_stack)
383#define cond_stack (cfun->stmt->x_cond_stack)
384#define loop_stack (cfun->stmt->x_loop_stack)
385#define case_stack (cfun->stmt->x_case_stack)
386#define nesting_stack (cfun->stmt->x_nesting_stack)
387#define nesting_depth (cfun->stmt->x_nesting_depth)
388#define current_block_start_count (cfun->stmt->x_block_start_count)
389#define last_expr_type (cfun->stmt->x_last_expr_type)
390#define last_expr_value (cfun->stmt->x_last_expr_value)
391#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392#define emit_filename (cfun->stmt->x_emit_filename)
393#define emit_lineno (cfun->stmt->x_emit_lineno)
394#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70
JL
395
396/* Non-zero if we are using EH to handle cleanus. */
397static int using_eh_for_cleanups_p = 0;
398
21a427cc
AS
399/* Character strings, each containing a single decimal digit. */
400static char *digit_strings[10];
401
e9a25f70 402
cdadb1dd
KG
403static int n_occurrences PARAMS ((int, const char *));
404static void expand_goto_internal PARAMS ((tree, rtx, rtx));
405static int expand_fixup PARAMS ((tree, rtx, rtx));
406static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
407static void expand_nl_goto_receiver PARAMS ((void));
408static void expand_nl_goto_receivers PARAMS ((struct nesting *));
409static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
cfc3d13f 410 rtx, int));
cdadb1dd
KG
411static void expand_null_return_1 PARAMS ((rtx, int));
412static void expand_value_return PARAMS ((rtx));
413static int tail_recursion_args PARAMS ((tree, tree));
414static void expand_cleanups PARAMS ((tree, tree, int, int));
415static void check_seenlabel PARAMS ((void));
416static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
417static int estimate_case_costs PARAMS ((case_node_ptr));
418static void group_case_nodes PARAMS ((case_node_ptr));
419static void balance_case_nodes PARAMS ((case_node_ptr *,
cfc3d13f 420 case_node_ptr));
cdadb1dd
KG
421static int node_has_low_bound PARAMS ((case_node_ptr, tree));
422static int node_has_high_bound PARAMS ((case_node_ptr, tree));
423static int node_is_bounded PARAMS ((case_node_ptr, tree));
424static void emit_jump_if_reachable PARAMS ((rtx));
425static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
426static int add_case_node PARAMS ((tree, tree, tree, tree *));
427static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
428static void mark_cond_nesting PARAMS ((struct nesting *));
429static void mark_loop_nesting PARAMS ((struct nesting *));
430static void mark_block_nesting PARAMS ((struct nesting *));
431static void mark_case_nesting PARAMS ((struct nesting *));
432static void mark_goto_fixup PARAMS ((struct goto_fixup *));
87ff9c8e 433
28d81abb 434\f
e9a25f70
JL
435void
436using_eh_for_cleanups ()
437{
438 using_eh_for_cleanups_p = 1;
439}
440
87ff9c8e
RH
441/* Mark N (known to be a cond-nesting) for GC. */
442
443static void
444mark_cond_nesting (n)
445 struct nesting *n;
446{
447 while (n)
448 {
449 ggc_mark_rtx (n->exit_label);
450 ggc_mark_rtx (n->data.cond.endif_label);
451 ggc_mark_rtx (n->data.cond.next_label);
452
453 n = n->next;
454 }
455}
456
457/* Mark N (known to be a loop-nesting) for GC. */
458
459static void
460mark_loop_nesting (n)
461 struct nesting *n;
462{
463
464 while (n)
465 {
466 ggc_mark_rtx (n->exit_label);
467 ggc_mark_rtx (n->data.loop.start_label);
468 ggc_mark_rtx (n->data.loop.end_label);
469 ggc_mark_rtx (n->data.loop.alt_end_label);
470 ggc_mark_rtx (n->data.loop.continue_label);
471
472 n = n->next;
473 }
474}
475
476/* Mark N (known to be a block-nesting) for GC. */
477
478static void
479mark_block_nesting (n)
480 struct nesting *n;
481{
482 while (n)
483 {
484 struct label_chain *l;
485
486 ggc_mark_rtx (n->exit_label);
487 ggc_mark_rtx (n->data.block.stack_level);
488 ggc_mark_rtx (n->data.block.first_insn);
489 ggc_mark_tree (n->data.block.cleanups);
490 ggc_mark_tree (n->data.block.outer_cleanups);
491
492 for (l = n->data.block.label_chain; l != NULL; l = l->next)
493 ggc_mark_tree (l->label);
494
495 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
496
497 /* ??? cleanup_ptr never points outside the stack, does it? */
498
499 n = n->next;
500 }
501}
502
503/* Mark N (known to be a case-nesting) for GC. */
504
505static void
506mark_case_nesting (n)
507 struct nesting *n;
508{
509 while (n)
510 {
511 struct case_node *node;
512
513 ggc_mark_rtx (n->exit_label);
514 ggc_mark_rtx (n->data.case_stmt.start);
515
516 node = n->data.case_stmt.case_list;
517 while (node)
518 {
519 ggc_mark_tree (node->low);
520 ggc_mark_tree (node->high);
521 ggc_mark_tree (node->code_label);
522 node = node->right;
523 }
524
525 ggc_mark_tree (n->data.case_stmt.default_label);
526 ggc_mark_tree (n->data.case_stmt.index_expr);
527 ggc_mark_tree (n->data.case_stmt.nominal_type);
528
529 n = n->next;
530 }
531}
532
533/* Mark G for GC. */
534
535static void
536mark_goto_fixup (g)
537 struct goto_fixup *g;
538{
539 while (g)
540 {
541 ggc_mark_rtx (g->before_jump);
542 ggc_mark_tree (g->target);
543 ggc_mark_tree (g->context);
544 ggc_mark_rtx (g->target_rtl);
545 ggc_mark_rtx (g->stack_level);
546 ggc_mark_tree (g->cleanup_list_list);
547
548 g = g->next;
549 }
550}
551
21cd906e
MM
552/* Clear out all parts of the state in F that can safely be discarded
553 after the function has been compiled, to let garbage collection
0a8a198c 554 reclaim the memory. */
21cd906e
MM
555
556void
0a8a198c 557free_stmt_status (f)
21cd906e 558 struct function *f;
21cd906e
MM
559{
560 /* We're about to free the function obstack. If we hold pointers to
561 things allocated there, then we'll try to mark them when we do
562 GC. So, we clear them out here explicitly. */
5faf03ae
MM
563 if (f->stmt)
564 free (f->stmt);
fa51b01b 565 f->stmt = NULL;
21cd906e
MM
566}
567
87ff9c8e
RH
568/* Mark P for GC. */
569
570void
fa51b01b 571mark_stmt_status (p)
87ff9c8e
RH
572 struct stmt_status *p;
573{
574 if (p == 0)
575 return;
576
577 mark_block_nesting (p->x_block_stack);
578 mark_cond_nesting (p->x_cond_stack);
579 mark_loop_nesting (p->x_loop_stack);
580 mark_case_nesting (p->x_case_stack);
581
582 ggc_mark_tree (p->x_last_expr_type);
583 /* last_epxr_value is only valid if last_expr_type is nonzero. */
584 if (p->x_last_expr_type)
585 ggc_mark_rtx (p->x_last_expr_value);
586
587 mark_goto_fixup (p->x_goto_fixup_chain);
588}
589
28d81abb
RK
590void
591init_stmt ()
592{
21a427cc
AS
593 int i;
594
28d81abb 595 gcc_obstack_init (&stmt_obstack);
21a427cc
AS
596
597 for (i = 0; i < 10; i++)
598 {
599 digit_strings[i] = ggc_alloc_string (NULL, 1);
600 digit_strings[i][0] = '0' + i;
601 }
602 ggc_add_string_root (digit_strings, 10);
28d81abb
RK
603}
604
605void
606init_stmt_for_function ()
607{
01d939e8 608 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
3f1d071b 609
28d81abb
RK
610 /* We are not currently within any block, conditional, loop or case. */
611 block_stack = 0;
0b931590 612 stack_block_stack = 0;
28d81abb
RK
613 loop_stack = 0;
614 case_stack = 0;
615 cond_stack = 0;
616 nesting_stack = 0;
617 nesting_depth = 0;
618
3f1d071b 619 current_block_start_count = 0;
28d81abb
RK
620
621 /* No gotos have been expanded yet. */
622 goto_fixup_chain = 0;
623
624 /* We are not processing a ({...}) grouping. */
625 expr_stmts_for_value = 0;
626 last_expr_type = 0;
21cd906e 627 last_expr_value = NULL_RTX;
28d81abb 628}
3f1d071b
BS
629\f
630/* Return nonzero if anything is pushed on the loop, condition, or case
631 stack. */
632int
633in_control_zone_p ()
634{
635 return cond_stack || loop_stack || case_stack;
28d81abb
RK
636}
637
3f1d071b 638/* Record the current file and line. Called from emit_line_note. */
28d81abb 639void
3f1d071b
BS
640set_file_and_line_for_stmt (file, line)
641 char *file;
642 int line;
643{
644 emit_filename = file;
645 emit_lineno = line;
28d81abb 646}
3f1d071b 647
28d81abb
RK
648/* Emit a no-op instruction. */
649
650void
651emit_nop ()
652{
ca695ac9
JB
653 rtx last_insn;
654
b93a436e
JL
655 last_insn = get_last_insn ();
656 if (!optimize
657 && (GET_CODE (last_insn) == CODE_LABEL
658 || (GET_CODE (last_insn) == NOTE
659 && prev_real_insn (last_insn) == 0)))
660 emit_insn (gen_nop ());
28d81abb
RK
661}
662\f
663/* Return the rtx-label that corresponds to a LABEL_DECL,
664 creating it if necessary. */
665
666rtx
667label_rtx (label)
668 tree label;
669{
670 if (TREE_CODE (label) != LABEL_DECL)
671 abort ();
672
673 if (DECL_RTL (label))
674 return DECL_RTL (label);
675
676 return DECL_RTL (label) = gen_label_rtx ();
677}
678
679/* Add an unconditional jump to LABEL as the next sequential instruction. */
680
681void
682emit_jump (label)
683 rtx label;
684{
685 do_pending_stack_adjust ();
686 emit_jump_insn (gen_jump (label));
687 emit_barrier ();
688}
689
690/* Emit code to jump to the address
691 specified by the pointer expression EXP. */
692
693void
694expand_computed_goto (exp)
695 tree exp;
696{
b93a436e 697 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
698
699#ifdef POINTERS_EXTEND_UNSIGNED
b93a436e 700 x = convert_memory_address (Pmode, x);
ed9a9db1 701#endif
ffa1a1ce 702
b93a436e
JL
703 emit_queue ();
704 /* Be sure the function is executable. */
7d384cc0 705 if (current_function_check_memory_usage)
b93a436e
JL
706 emit_library_call (chkr_check_exec_libfunc, 1,
707 VOIDmode, 1, x, ptr_mode);
17f5f329 708
b93a436e
JL
709 do_pending_stack_adjust ();
710 emit_indirect_jump (x);
acd693d1
RH
711
712 current_function_has_computed_jump = 1;
28d81abb
RK
713}
714\f
715/* Handle goto statements and the labels that they can go to. */
716
717/* Specify the location in the RTL code of a label LABEL,
718 which is a LABEL_DECL tree node.
719
720 This is used for the kind of label that the user can jump to with a
721 goto statement, and for alternatives of a switch or case statement.
722 RTL labels generated for loops and conditionals don't go through here;
723 they are generated directly at the RTL level, by other functions below.
724
725 Note that this has nothing to do with defining label *names*.
726 Languages vary in how they do that and what that even means. */
727
728void
729expand_label (label)
730 tree label;
731{
732 struct label_chain *p;
733
734 do_pending_stack_adjust ();
735 emit_label (label_rtx (label));
736 if (DECL_NAME (label))
737 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
738
739 if (stack_block_stack != 0)
740 {
741 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
742 p->next = stack_block_stack->data.block.label_chain;
743 stack_block_stack->data.block.label_chain = p;
744 p->label = label;
745 }
746}
747
748/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
749 from nested functions. */
750
751void
752declare_nonlocal_label (label)
753 tree label;
754{
ba716ac9
BS
755 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
756
28d81abb
RK
757 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
758 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 759 if (nonlocal_goto_handler_slots == 0)
28d81abb 760 {
59257ff7
RK
761 emit_stack_save (SAVE_NONLOCAL,
762 &nonlocal_goto_stack_level,
763 PREV_INSN (tail_recursion_reentry));
28d81abb 764 }
ba716ac9
BS
765 nonlocal_goto_handler_slots
766 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
767}
768
769/* Generate RTL code for a `goto' statement with target label LABEL.
770 LABEL should be a LABEL_DECL tree node that was or will later be
771 defined with `expand_label'. */
772
773void
774expand_goto (label)
775 tree label;
776{
ca695ac9
JB
777 tree context;
778
28d81abb 779 /* Check for a nonlocal goto to a containing function. */
ca695ac9 780 context = decl_function_context (label);
28d81abb
RK
781 if (context != 0 && context != current_function_decl)
782 {
783 struct function *p = find_function_data (context);
38a448ca 784 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
ba716ac9
BS
785 rtx temp, handler_slot;
786 tree link;
787
788 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
789 handler_slot = p->x_nonlocal_goto_handler_slots;
790 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
791 link = TREE_CHAIN (link))
792 handler_slot = XEXP (handler_slot, 1);
793 handler_slot = XEXP (handler_slot, 0);
dd132134 794
28d81abb 795 p->has_nonlocal_label = 1;
c1255328 796 current_function_has_nonlocal_goto = 1;
dd132134 797 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
798
799 /* Copy the rtl for the slots so that they won't be shared in
800 case the virtual stack vars register gets instantiated differently
801 in the parent than in the child. */
802
28d81abb
RK
803#if HAVE_nonlocal_goto
804 if (HAVE_nonlocal_goto)
805 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
ba716ac9 806 copy_rtx (handler_slot),
49ad7cfa 807 copy_rtx (p->x_nonlocal_goto_stack_level),
dd132134 808 label_ref));
28d81abb
RK
809 else
810#endif
811 {
59257ff7
RK
812 rtx addr;
813
28d81abb
RK
814 /* Restore frame pointer for containing function.
815 This sets the actual hard register used for the frame pointer
816 to the location of the function's incoming static chain info.
817 The non-local goto handler will then adjust it to contain the
818 proper value and reload the argument pointer, if needed. */
a35ad168 819 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
820
821 /* We have now loaded the frame pointer hardware register with
822 the address of that corresponds to the start of the virtual
823 stack vars. So replace virtual_stack_vars_rtx in all
824 addresses we use with stack_pointer_rtx. */
825
28d81abb
RK
826 /* Get addr of containing function's current nonlocal goto handler,
827 which will do any cleanups and then jump to the label. */
ba716ac9 828 addr = copy_rtx (handler_slot);
59257ff7 829 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 830 hard_frame_pointer_rtx));
59257ff7 831
28d81abb 832 /* Restore the stack pointer. Note this uses fp just restored. */
49ad7cfa 833 addr = p->x_nonlocal_goto_stack_level;
59257ff7 834 if (addr)
5e116627 835 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
836 virtual_stack_vars_rtx,
837 hard_frame_pointer_rtx);
59257ff7 838
37366632 839 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 840
a35ad168 841 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 842 really needed. */
38a448ca
RH
843 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
844 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
28d81abb
RK
845 emit_indirect_jump (temp);
846 }
847 }
848 else
37366632 849 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
850}
851
852/* Generate RTL code for a `goto' statement with target label BODY.
853 LABEL should be a LABEL_REF.
854 LAST_INSN, if non-0, is the rtx we should consider as the last
855 insn emitted (for the purposes of cleaning up a return). */
856
857static void
858expand_goto_internal (body, label, last_insn)
859 tree body;
860 rtx label;
861 rtx last_insn;
862{
863 struct nesting *block;
864 rtx stack_level = 0;
865
866 if (GET_CODE (label) != CODE_LABEL)
867 abort ();
868
869 /* If label has already been defined, we can tell now
870 whether and how we must alter the stack level. */
871
872 if (PREV_INSN (label) != 0)
873 {
874 /* Find the innermost pending block that contains the label.
875 (Check containment by comparing insn-uids.)
876 Then restore the outermost stack level within that block,
877 and do cleanups of all blocks contained in it. */
878 for (block = block_stack; block; block = block->next)
879 {
880 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
881 break;
882 if (block->data.block.stack_level != 0)
883 stack_level = block->data.block.stack_level;
884 /* Execute the cleanups for blocks we are exiting. */
885 if (block->data.block.cleanups != 0)
886 {
50d1b7a1 887 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
888 do_pending_stack_adjust ();
889 }
890 }
891
892 if (stack_level)
893 {
0f41302f
MS
894 /* Ensure stack adjust isn't done by emit_jump, as this
895 would clobber the stack pointer. This one should be
896 deleted as dead by flow. */
28d81abb
RK
897 clear_pending_stack_adjust ();
898 do_pending_stack_adjust ();
37366632 899 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
900 }
901
902 if (body != 0 && DECL_TOO_LATE (body))
903 error ("jump to `%s' invalidly jumps into binding contour",
904 IDENTIFIER_POINTER (DECL_NAME (body)));
905 }
906 /* Label not yet defined: may need to put this goto
907 on the fixup list. */
908 else if (! expand_fixup (body, label, last_insn))
909 {
910 /* No fixup needed. Record that the label is the target
911 of at least one goto that has no fixup. */
912 if (body != 0)
913 TREE_ADDRESSABLE (body) = 1;
914 }
915
916 emit_jump (label);
917}
918\f
919/* Generate if necessary a fixup for a goto
920 whose target label in tree structure (if any) is TREE_LABEL
921 and whose target in rtl is RTL_LABEL.
922
923 If LAST_INSN is nonzero, we pretend that the jump appears
924 after insn LAST_INSN instead of at the current point in the insn stream.
925
023b57e6
RS
926 The fixup will be used later to insert insns just before the goto.
927 Those insns will restore the stack level as appropriate for the
928 target label, and will (in the case of C++) also invoke any object
929 destructors which have to be invoked when we exit the scopes which
930 are exited by the goto.
28d81abb
RK
931
932 Value is nonzero if a fixup is made. */
933
934static int
935expand_fixup (tree_label, rtl_label, last_insn)
936 tree tree_label;
937 rtx rtl_label;
938 rtx last_insn;
939{
940 struct nesting *block, *end_block;
941
942 /* See if we can recognize which block the label will be output in.
943 This is possible in some very common cases.
944 If we succeed, set END_BLOCK to that block.
945 Otherwise, set it to 0. */
946
947 if (cond_stack
948 && (rtl_label == cond_stack->data.cond.endif_label
949 || rtl_label == cond_stack->data.cond.next_label))
950 end_block = cond_stack;
951 /* If we are in a loop, recognize certain labels which
952 are likely targets. This reduces the number of fixups
953 we need to create. */
954 else if (loop_stack
955 && (rtl_label == loop_stack->data.loop.start_label
956 || rtl_label == loop_stack->data.loop.end_label
957 || rtl_label == loop_stack->data.loop.continue_label))
958 end_block = loop_stack;
959 else
960 end_block = 0;
961
962 /* Now set END_BLOCK to the binding level to which we will return. */
963
964 if (end_block)
965 {
966 struct nesting *next_block = end_block->all;
967 block = block_stack;
968
969 /* First see if the END_BLOCK is inside the innermost binding level.
970 If so, then no cleanups or stack levels are relevant. */
971 while (next_block && next_block != block)
972 next_block = next_block->all;
973
974 if (next_block)
975 return 0;
976
977 /* Otherwise, set END_BLOCK to the innermost binding level
978 which is outside the relevant control-structure nesting. */
979 next_block = block_stack->next;
980 for (block = block_stack; block != end_block; block = block->all)
981 if (block == next_block)
982 next_block = next_block->next;
983 end_block = next_block;
984 }
985
986 /* Does any containing block have a stack level or cleanups?
987 If not, no fixup is needed, and that is the normal case
988 (the only case, for standard C). */
989 for (block = block_stack; block != end_block; block = block->next)
990 if (block->data.block.stack_level != 0
991 || block->data.block.cleanups != 0)
992 break;
993
994 if (block != end_block)
995 {
996 /* Ok, a fixup is needed. Add a fixup to the list of such. */
997 struct goto_fixup *fixup
998 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
999 /* In case an old stack level is restored, make sure that comes
1000 after any pending stack adjust. */
1001 /* ?? If the fixup isn't to come at the present position,
1002 doing the stack adjust here isn't useful. Doing it with our
1003 settings at that location isn't useful either. Let's hope
1004 someone does it! */
1005 if (last_insn == 0)
1006 do_pending_stack_adjust ();
28d81abb
RK
1007 fixup->target = tree_label;
1008 fixup->target_rtl = rtl_label;
023b57e6
RS
1009
1010 /* Create a BLOCK node and a corresponding matched set of
12f61228 1011 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
1012 this point. The notes will encapsulate any and all fixup
1013 code which we might later insert at this point in the insn
1014 stream. Also, the BLOCK node will be the parent (i.e. the
1015 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
1016 later on when we are expanding the fixup code.
1017
1018 Note that optimization passes (including expand_end_loop)
1019 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1020 as a placeholder. */
023b57e6
RS
1021
1022 {
1023 register rtx original_before_jump
1024 = last_insn ? last_insn : get_last_insn ();
0679e3fc 1025 rtx start;
a97901e6 1026 rtx end;
e6fd097e
MM
1027 tree block;
1028
1029 block = make_node (BLOCK);
1030 TREE_USED (block) = 1;
1031
01d939e8 1032 if (!cfun->x_whole_function_mode_p)
a97901e6
MM
1033 insert_block (block);
1034 else
e6fd097e 1035 {
a97901e6
MM
1036 BLOCK_CHAIN (block)
1037 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1038 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1039 = block;
e6fd097e 1040 }
023b57e6
RS
1041
1042 start_sequence ();
0679e3fc 1043 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
01d939e8 1044 if (cfun->x_whole_function_mode_p)
a97901e6 1045 NOTE_BLOCK (start) = block;
0679e3fc 1046 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
a97901e6 1047 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
01d939e8 1048 if (cfun->x_whole_function_mode_p)
a97901e6 1049 NOTE_BLOCK (end) = block;
e6fd097e 1050 fixup->context = block;
023b57e6 1051 end_sequence ();
0679e3fc 1052 emit_insns_after (start, original_before_jump);
023b57e6
RS
1053 }
1054
3f1d071b 1055 fixup->block_start_count = current_block_start_count;
28d81abb
RK
1056 fixup->stack_level = 0;
1057 fixup->cleanup_list_list
e976b8b2 1058 = ((block->data.block.outer_cleanups
28d81abb 1059 || block->data.block.cleanups)
37366632 1060 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1061 block->data.block.outer_cleanups)
1062 : 0);
1063 fixup->next = goto_fixup_chain;
1064 goto_fixup_chain = fixup;
1065 }
1066
1067 return block != 0;
1068}
1069
ca695ac9 1070
cfc3d13f
RK
1071\f
1072/* Expand any needed fixups in the outputmost binding level of the
1073 function. FIRST_INSN is the first insn in the function. */
ca695ac9 1074
cfc3d13f
RK
1075void
1076expand_fixups (first_insn)
1077 rtx first_insn;
1078{
1079 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1080}
ca695ac9 1081
28d81abb
RK
1082/* When exiting a binding contour, process all pending gotos requiring fixups.
1083 THISBLOCK is the structure that describes the block being exited.
1084 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1085 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1086 FIRST_INSN is the insn that began this contour.
1087
1088 Gotos that jump out of this contour must restore the
1089 stack level and do the cleanups before actually jumping.
1090
1091 DONT_JUMP_IN nonzero means report error there is a jump into this
1092 contour from before the beginning of the contour.
1093 This is also done if STACK_LEVEL is nonzero. */
1094
704f4dca 1095static void
28d81abb
RK
1096fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1097 struct nesting *thisblock;
1098 rtx stack_level;
1099 tree cleanup_list;
1100 rtx first_insn;
1101 int dont_jump_in;
1102{
1103 register struct goto_fixup *f, *prev;
1104
1105 /* F is the fixup we are considering; PREV is the previous one. */
1106 /* We run this loop in two passes so that cleanups of exited blocks
1107 are run first, and blocks that are exited are marked so
1108 afterwards. */
1109
1110 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1111 {
1112 /* Test for a fixup that is inactive because it is already handled. */
1113 if (f->before_jump == 0)
1114 {
1115 /* Delete inactive fixup from the chain, if that is easy to do. */
1116 if (prev != 0)
1117 prev->next = f->next;
1118 }
1119 /* Has this fixup's target label been defined?
1120 If so, we can finalize it. */
1121 else if (PREV_INSN (f->target_rtl) != 0)
1122 {
7629c936 1123 register rtx cleanup_insns;
7629c936 1124
28d81abb 1125 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
1126 of this contour, report an error. This code used to use
1127 the first non-label insn after f->target_rtl, but that's
1128 wrong since such can be added, by things like put_var_into_stack
1129 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
1130 /* ??? Bug: this does not detect jumping in through intermediate
1131 blocks that have stack levels or cleanups.
1132 It detects only a problem with the innermost block
1133 around the label. */
1134 if (f->target != 0
1135 && (dont_jump_in || stack_level || cleanup_list)
14a774a9 1136 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 1137 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1138 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1139 {
1140 error_with_decl (f->target,
1141 "label `%s' used before containing binding contour");
1142 /* Prevent multiple errors for one label. */
33bc3ff5 1143 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1144 }
1145
7629c936
RS
1146 /* We will expand the cleanups into a sequence of their own and
1147 then later on we will attach this new sequence to the insn
1148 stream just ahead of the actual jump insn. */
1149
1150 start_sequence ();
1151
023b57e6
RS
1152 /* Temporarily restore the lexical context where we will
1153 logically be inserting the fixup code. We do this for the
1154 sake of getting the debugging information right. */
1155
7629c936 1156 pushlevel (0);
023b57e6 1157 set_block (f->context);
7629c936
RS
1158
1159 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1160 if (f->cleanup_list_list)
1161 {
1162 tree lists;
1163 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1164 /* Marked elements correspond to blocks that have been closed.
1165 Do their cleanups. */
1166 if (TREE_ADDRESSABLE (lists)
1167 && TREE_VALUE (lists) != 0)
7629c936 1168 {
50d1b7a1 1169 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1170 /* Pop any pushes done in the cleanups,
1171 in case function is about to return. */
1172 do_pending_stack_adjust ();
1173 }
28d81abb
RK
1174 }
1175
1176 /* Restore stack level for the biggest contour that this
1177 jump jumps out of. */
1178 if (f->stack_level)
59257ff7 1179 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1180
1181 /* Finish up the sequence containing the insns which implement the
1182 necessary cleanups, and then attach that whole sequence to the
1183 insn stream just ahead of the actual jump insn. Attaching it
1184 at that point insures that any cleanups which are in fact
1185 implicit C++ object destructions (which must be executed upon
1186 leaving the block) appear (to the debugger) to be taking place
1187 in an area of the generated code where the object(s) being
1188 destructed are still "in scope". */
1189
1190 cleanup_insns = get_insns ();
023b57e6 1191 poplevel (1, 0, 0);
7629c936
RS
1192
1193 end_sequence ();
1194 emit_insns_after (cleanup_insns, f->before_jump);
1195
7629c936 1196
28d81abb
RK
1197 f->before_jump = 0;
1198 }
1199 }
1200
6bc2f582
RK
1201 /* For any still-undefined labels, do the cleanups for this block now.
1202 We must do this now since items in the cleanup list may go out
0f41302f 1203 of scope when the block ends. */
28d81abb
RK
1204 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1205 if (f->before_jump != 0
1206 && PREV_INSN (f->target_rtl) == 0
1207 /* Label has still not appeared. If we are exiting a block with
1208 a stack level to restore, that started before the fixup,
1209 mark this stack level as needing restoration
6bc2f582 1210 when the fixup is later finalized. */
28d81abb 1211 && thisblock != 0
6bc2f582
RK
1212 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1213 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1214 && (thisblock->data.block.block_start_count
1215 <= f->block_start_count))
1216 {
1217 tree lists = f->cleanup_list_list;
6bc2f582
RK
1218 rtx cleanup_insns;
1219
28d81abb
RK
1220 for (; lists; lists = TREE_CHAIN (lists))
1221 /* If the following elt. corresponds to our containing block
1222 then the elt. must be for this block. */
1223 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1224 {
1225 start_sequence ();
1226 pushlevel (0);
1227 set_block (f->context);
1228 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1229 do_pending_stack_adjust ();
6bc2f582
RK
1230 cleanup_insns = get_insns ();
1231 poplevel (1, 0, 0);
1232 end_sequence ();
412c00dc
RK
1233 if (cleanup_insns != 0)
1234 f->before_jump
1235 = emit_insns_after (cleanup_insns, f->before_jump);
6bc2f582 1236
e07ed33f 1237 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1238 }
28d81abb
RK
1239
1240 if (stack_level)
1241 f->stack_level = stack_level;
1242 }
1243}
2a230e9d
BS
1244\f
1245/* Return the number of times character C occurs in string S. */
1246static int
1247n_occurrences (c, s)
1248 int c;
dff01034 1249 const char *s;
2a230e9d
BS
1250{
1251 int n = 0;
1252 while (*s)
1253 n += (*s++ == c);
1254 return n;
1255}
28d81abb
RK
1256\f
1257/* Generate RTL for an asm statement (explicit assembler code).
1258 BODY is a STRING_CST node containing the assembler code text,
1259 or an ADDR_EXPR containing a STRING_CST. */
1260
1261void
1262expand_asm (body)
1263 tree body;
1264{
7d384cc0 1265 if (current_function_check_memory_usage)
17f5f329 1266 {
c5c76735 1267 error ("`asm' cannot be used in function where memory usage is checked");
17f5f329
RK
1268 return;
1269 }
1270
28d81abb
RK
1271 if (TREE_CODE (body) == ADDR_EXPR)
1272 body = TREE_OPERAND (body, 0);
1273
38a448ca
RH
1274 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1275 TREE_STRING_POINTER (body)));
28d81abb
RK
1276 last_expr_type = 0;
1277}
1278
1279/* Generate RTL for an asm statement with arguments.
1280 STRING is the instruction template.
1281 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1282 Each output or input has an expression in the TREE_VALUE and
1283 a constraint-string in the TREE_PURPOSE.
1284 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1285 that is clobbered by this insn.
1286
1287 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1288 Some elements of OUTPUTS may be replaced with trees representing temporary
1289 values. The caller should copy those temporary values to the originally
1290 specified lvalues.
1291
1292 VOL nonzero means the insn is volatile; don't optimize it. */
1293
1294void
1295expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1296 tree string, outputs, inputs, clobbers;
1297 int vol;
1298 char *filename;
1299 int line;
1300{
1301 rtvec argvec, constraints;
1302 rtx body;
1303 int ninputs = list_length (inputs);
1304 int noutputs = list_length (outputs);
235c5021 1305 int ninout = 0;
b4ccaa16 1306 int nclobbers;
28d81abb
RK
1307 tree tail;
1308 register int i;
1309 /* Vector of RTX's of evaluated output operands. */
1310 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1311 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1312 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1313 enum machine_mode *inout_mode
1314 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
28d81abb
RK
1315 /* The insn we have emitted. */
1316 rtx insn;
1317
e5e809f4 1318 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1319 if (noutputs == 0)
1320 vol = 1;
1321
7d384cc0 1322 if (current_function_check_memory_usage)
17f5f329
RK
1323 {
1324 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1325 return;
1326 }
1327
57bcb97a
RH
1328#ifdef MD_ASM_CLOBBERS
1329 /* Sometimes we wish to automatically clobber registers across an asm.
1330 Case in point is when the i386 backend moved from cc0 to a hard reg --
1331 maintaining source-level compatability means automatically clobbering
1332 the flags register. */
1333 MD_ASM_CLOBBERS (clobbers);
1334#endif
1335
c5c76735
JL
1336 if (current_function_check_memory_usage)
1337 {
1338 error ("`asm' cannot be used in function where memory usage is checked");
1339 return;
1340 }
1341
b4ccaa16
RS
1342 /* Count the number of meaningful clobbered registers, ignoring what
1343 we would ignore later. */
1344 nclobbers = 0;
1345 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1346 {
1347 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1348
c09e6498
RS
1349 i = decode_reg_name (regname);
1350 if (i >= 0 || i == -4)
b4ccaa16 1351 ++nclobbers;
7859e3ac
DE
1352 else if (i == -2)
1353 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1354 }
1355
28d81abb
RK
1356 last_expr_type = 0;
1357
2a230e9d
BS
1358 /* Check that the number of alternatives is constant across all
1359 operands. */
1360 if (outputs || inputs)
1361 {
1362 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1363 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1364 tree next = inputs;
1365
f62a15e3
BS
1366 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1367 {
1368 error ("too many alternatives in `asm'");
1369 return;
1370 }
1371
2a230e9d
BS
1372 tmp = outputs;
1373 while (tmp)
1374 {
1375 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
14a774a9 1376
2a230e9d
BS
1377 if (n_occurrences (',', constraint) != nalternatives)
1378 {
1379 error ("operand constraints for `asm' differ in number of alternatives");
1380 return;
1381 }
14a774a9 1382
2a230e9d
BS
1383 if (TREE_CHAIN (tmp))
1384 tmp = TREE_CHAIN (tmp);
1385 else
1386 tmp = next, next = 0;
1387 }
1388 }
1389
28d81abb
RK
1390 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1391 {
1392 tree val = TREE_VALUE (tail);
b50a024d 1393 tree type = TREE_TYPE (val);
2a230e9d 1394 char *constraint;
3fbd5c2c 1395 char *p;
2a230e9d 1396 int c_len;
28d81abb 1397 int j;
2a230e9d 1398 int is_inout = 0;
d09a75ae 1399 int allows_reg = 0;
1afbe1c4 1400 int allows_mem = 0;
28d81abb
RK
1401
1402 /* If there's an erroneous arg, emit no insn. */
1403 if (TREE_TYPE (val) == error_mark_node)
1404 return;
1405
d09a75ae
RK
1406 /* Make sure constraint has `=' and does not have `+'. Also, see
1407 if it allows any register. Be liberal on the latter test, since
1408 the worst that happens if we get it wrong is we issue an error
1409 message. */
28d81abb 1410
14a774a9 1411 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
2a230e9d
BS
1412 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1413
3fbd5c2c
RH
1414 /* Allow the `=' or `+' to not be at the beginning of the string,
1415 since it wasn't explicitly documented that way, and there is a
1416 large body of code that puts it last. Swap the character to
1417 the front, so as not to uglify any place else. */
1418 switch (c_len)
2a230e9d 1419 {
3fbd5c2c
RH
1420 default:
1421 if ((p = strchr (constraint, '=')) != NULL)
1422 break;
1423 if ((p = strchr (constraint, '+')) != NULL)
1424 break;
1425 case 0:
2a230e9d
BS
1426 error ("output operand constraint lacks `='");
1427 return;
1428 }
1429
3fbd5c2c
RH
1430 if (p != constraint)
1431 {
1432 j = *p;
1433 bcopy (constraint, constraint+1, p-constraint);
1434 *constraint = j;
1435
1436 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1437 }
1438
2a230e9d
BS
1439 is_inout = constraint[0] == '+';
1440 /* Replace '+' with '='. */
1441 constraint[0] = '=';
1442 /* Make sure we can specify the matching operand. */
1443 if (is_inout && i > 9)
1444 {
1445 error ("output operand constraint %d contains `+'", i);
1446 return;
1447 }
1448
1449 for (j = 1; j < c_len; j++)
1450 switch (constraint[j])
d09a75ae
RK
1451 {
1452 case '+':
2a230e9d
BS
1453 case '=':
1454 error ("operand constraint contains '+' or '=' at illegal position.");
1455 return;
1456
1457 case '%':
1458 if (i + 1 == ninputs + noutputs)
235c5021 1459 {
2a230e9d 1460 error ("`%%' constraint used with last operand");
235c5021
RK
1461 return;
1462 }
235c5021 1463 break;
d09a75ae 1464
2a230e9d 1465 case '?': case '!': case '*': case '&':
1afbe1c4 1466 case 'E': case 'F': case 'G': case 'H':
d09a75ae
RK
1467 case 's': case 'i': case 'n':
1468 case 'I': case 'J': case 'K': case 'L': case 'M':
1469 case 'N': case 'O': case 'P': case ',':
1470#ifdef EXTRA_CONSTRAINT
1471 case 'Q': case 'R': case 'S': case 'T': case 'U':
1472#endif
1473 break;
1474
7b7a33b3 1475 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1476 case '5': case '6': case '7': case '8': case '9':
1477 error ("matching constraint not valid in output operand");
1478 break;
1479
1afbe1c4
RH
1480 case 'V': case 'm': case 'o':
1481 allows_mem = 1;
1482 break;
1483
1484 case '<': case '>':
1485 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1486 excepting those that expand_call created. So match memory
1487 and hope. */
1488 allows_mem = 1;
1489 break;
1490
1491 case 'g': case 'X':
1492 allows_reg = 1;
1493 allows_mem = 1;
1494 break;
1495
1496 case 'p': case 'r':
d09a75ae
RK
1497 default:
1498 allows_reg = 1;
1499 break;
1500 }
1501
d09a75ae
RK
1502 /* If an output operand is not a decl or indirect ref and our constraint
1503 allows a register, make a temporary to act as an intermediate.
1504 Make the asm insn write into that, then our caller will copy it to
1505 the real output operand. Likewise for promoted variables. */
28d81abb 1506
947255ed 1507 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1508 if ((TREE_CODE (val) == INDIRECT_REF
1509 && allows_mem)
b50a024d 1510 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1afbe1c4 1511 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1512 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1513 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
235c5021 1514 || ! allows_reg
2a230e9d 1515 || is_inout)
d09a75ae
RK
1516 {
1517 if (! allows_reg)
1518 mark_addressable (TREE_VALUE (tail));
1519
1520 output_rtx[i]
17f5f329
RK
1521 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1522 EXPAND_MEMORY_USE_WO);
d09a75ae
RK
1523
1524 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1525 error ("output number %d not directly addressable", i);
1afbe1c4 1526 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
947255ed
RH
1527 {
1528 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1529 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1530 if (is_inout)
1531 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1532 }
d09a75ae 1533 }
b50a024d 1534 else
e619bb8d 1535 {
6e81958a 1536 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1537 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1538 }
235c5021 1539
2a230e9d 1540 if (is_inout)
235c5021
RK
1541 {
1542 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1543 inout_opnum[ninout++] = i;
1544 }
28d81abb
RK
1545 }
1546
235c5021 1547 ninputs += ninout;
28d81abb
RK
1548 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1549 {
1550 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1551 return;
1552 }
1553
1554 /* Make vectors for the expression-rtx and constraint strings. */
1555
1556 argvec = rtvec_alloc (ninputs);
1557 constraints = rtvec_alloc (ninputs);
1558
21a427cc
AS
1559 body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
1560 empty_string, 0, argvec, constraints,
1561 filename, line);
c85f7c16 1562
78418280 1563 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1564
1565 /* Eval the inputs and put them into ARGVEC.
1566 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1567
1568 i = 0;
1569 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1570 {
1571 int j;
1f06ee8d
RH
1572 int allows_reg = 0, allows_mem = 0;
1573 char *constraint, *orig_constraint;
2a230e9d 1574 int c_len;
1f06ee8d 1575 rtx op;
28d81abb
RK
1576
1577 /* If there's an erroneous arg, emit no insn,
1578 because the ASM_INPUT would get VOIDmode
1579 and that could cause a crash in reload. */
1580 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1581 return;
2a230e9d
BS
1582
1583 /* ??? Can this happen, and does the error message make any sense? */
28d81abb
RK
1584 if (TREE_PURPOSE (tail) == NULL_TREE)
1585 {
1586 error ("hard register `%s' listed as input operand to `asm'",
1587 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1588 return;
1589 }
1590
14a774a9 1591 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
2a230e9d 1592 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1f06ee8d 1593 orig_constraint = constraint;
28d81abb 1594
2a230e9d
BS
1595 /* Make sure constraint has neither `=', `+', nor '&'. */
1596
1597 for (j = 0; j < c_len; j++)
1598 switch (constraint[j])
28d81abb 1599 {
2a230e9d 1600 case '+': case '=': case '&':
1f06ee8d
RH
1601 if (constraint == orig_constraint)
1602 {
14a774a9
RK
1603 error ("input operand constraint contains `%c'",
1604 constraint[j]);
1f06ee8d
RH
1605 return;
1606 }
1607 break;
65fed0cb 1608
2a230e9d 1609 case '%':
1f06ee8d
RH
1610 if (constraint == orig_constraint
1611 && i + 1 == ninputs - ninout)
2a230e9d
BS
1612 {
1613 error ("`%%' constraint used with last operand");
1614 return;
1615 }
1616 break;
1617
1f06ee8d
RH
1618 case 'V': case 'm': case 'o':
1619 allows_mem = 1;
1620 break;
1621
1622 case '<': case '>':
2a230e9d 1623 case '?': case '!': case '*':
65fed0cb
RK
1624 case 'E': case 'F': case 'G': case 'H': case 'X':
1625 case 's': case 'i': case 'n':
1626 case 'I': case 'J': case 'K': case 'L': case 'M':
1627 case 'N': case 'O': case 'P': case ',':
1628#ifdef EXTRA_CONSTRAINT
1629 case 'Q': case 'R': case 'S': case 'T': case 'U':
1630#endif
1631 break;
1632
7b7a33b3
JW
1633 /* Whether or not a numeric constraint allows a register is
1634 decided by the matching constraint, and so there is no need
1635 to do anything special with them. We must handle them in
1636 the default case, so that we don't unnecessarily force
1637 operands to memory. */
1638 case '0': case '1': case '2': case '3': case '4':
cd76ea33 1639 case '5': case '6': case '7': case '8': case '9':
2a230e9d 1640 if (constraint[j] >= '0' + noutputs)
956d6950
JL
1641 {
1642 error
1643 ("matching constraint references invalid operand number");
1644 return;
1645 }
cd76ea33 1646
1f06ee8d 1647 /* Try and find the real constraint for this dup. */
1afbe1c4
RH
1648 if ((j == 0 && c_len == 1)
1649 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1f06ee8d
RH
1650 {
1651 tree o = outputs;
14a774a9 1652
1f06ee8d
RH
1653 for (j = constraint[j] - '0'; j > 0; --j)
1654 o = TREE_CHAIN (o);
1655
14a774a9 1656 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
1f06ee8d
RH
1657 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1658 j = 0;
1659 break;
1660 }
1661
cd76ea33
RK
1662 /* ... fall through ... */
1663
1f06ee8d 1664 case 'p': case 'r':
65fed0cb
RK
1665 default:
1666 allows_reg = 1;
1667 break;
1f06ee8d
RH
1668
1669 case 'g':
1670 allows_reg = 1;
1671 allows_mem = 1;
1672 break;
28d81abb
RK
1673 }
1674
1f06ee8d 1675 if (! allows_reg && allows_mem)
65fed0cb
RK
1676 mark_addressable (TREE_VALUE (tail));
1677
1f06ee8d 1678 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
65fed0cb 1679
1afbe1c4 1680 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1681 {
1f06ee8d
RH
1682 if (allows_reg)
1683 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1684 else if (!allows_mem)
1685 warning ("asm operand %d probably doesn't match constraints", i);
1686 else if (CONSTANT_P (op))
1687 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1688 op);
1689 else if (GET_CODE (op) == REG
1690 || GET_CODE (op) == SUBREG
1691 || GET_CODE (op) == CONCAT)
1692 {
1693 tree type = TREE_TYPE (TREE_VALUE (tail));
1694 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1695
1f06ee8d
RH
1696 emit_move_insn (memloc, op);
1697 op = memloc;
1698 }
14a774a9 1699
1f06ee8d
RH
1700 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1701 /* We won't recognize volatile memory as available a
1702 memory_operand at this point. Ignore it. */
1703 ;
1704 else if (queued_subexp_p (op))
1705 ;
1706 else
1707 /* ??? Leave this only until we have experience with what
1708 happens in combine and elsewhere when constraints are
1709 not satisfied. */
1710 warning ("asm operand %d probably doesn't match constraints", i);
65fed0cb 1711 }
1f06ee8d 1712 XVECEXP (body, 3, i) = op;
2a230e9d 1713
28d81abb 1714 XVECEXP (body, 4, i) /* constraints */
38a448ca 1715 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1f06ee8d 1716 orig_constraint);
28d81abb
RK
1717 i++;
1718 }
1719
14a774a9
RK
1720 /* Protect all the operands from the queue now that they have all been
1721 evaluated. */
28d81abb 1722
235c5021 1723 for (i = 0; i < ninputs - ninout; i++)
28d81abb
RK
1724 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1725
1726 for (i = 0; i < noutputs; i++)
1727 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1728
235c5021
RK
1729 /* For in-out operands, copy output rtx to input rtx. */
1730 for (i = 0; i < ninout; i++)
1731 {
235c5021
RK
1732 int j = inout_opnum[i];
1733
1734 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1735 = output_rtx[j];
1736 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
21a427cc 1737 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
235c5021
RK
1738 }
1739
28d81abb
RK
1740 /* Now, for each output, construct an rtx
1741 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1742 ARGVEC CONSTRAINTS))
1743 If there is more than one, put them inside a PARALLEL. */
1744
1745 if (noutputs == 1 && nclobbers == 0)
1746 {
1747 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
38a448ca 1748 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1749 }
14a774a9 1750
28d81abb
RK
1751 else if (noutputs == 0 && nclobbers == 0)
1752 {
1753 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1754 insn = emit_insn (body);
1755 }
14a774a9 1756
28d81abb
RK
1757 else
1758 {
1759 rtx obody = body;
1760 int num = noutputs;
14a774a9
RK
1761
1762 if (num == 0)
1763 num = 1;
1764
38a448ca 1765 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1766
1767 /* For each output operand, store a SET. */
28d81abb
RK
1768 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1769 {
1770 XVECEXP (body, 0, i)
38a448ca
RH
1771 = gen_rtx_SET (VOIDmode,
1772 output_rtx[i],
c5c76735
JL
1773 gen_rtx_ASM_OPERANDS
1774 (VOIDmode,
1775 TREE_STRING_POINTER (string),
1776 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1777 i, argvec, constraints,
1778 filename, line));
1779
28d81abb
RK
1780 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1781 }
1782
1783 /* If there are no outputs (but there are some clobbers)
1784 store the bare ASM_OPERANDS into the PARALLEL. */
1785
1786 if (i == 0)
1787 XVECEXP (body, 0, i++) = obody;
1788
1789 /* Store (clobber REG) for each clobbered register specified. */
1790
b4ccaa16 1791 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1792 {
28d81abb 1793 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1794 int j = decode_reg_name (regname);
28d81abb 1795
b4ac57ab 1796 if (j < 0)
28d81abb 1797 {
c09e6498 1798 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1799 continue;
1800
c09e6498
RS
1801 if (j == -4) /* `memory', don't cache memory across asm */
1802 {
bffc6177 1803 XVECEXP (body, 0, i++)
38a448ca 1804 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1805 gen_rtx_MEM
1806 (BLKmode,
1807 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1808 continue;
1809 }
1810
956d6950 1811 /* Ignore unknown register, error already signaled. */
cc1f5387 1812 continue;
28d81abb
RK
1813 }
1814
1815 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1816 XVECEXP (body, 0, i++)
38a448ca 1817 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
28d81abb
RK
1818 }
1819
1820 insn = emit_insn (body);
1821 }
1822
947255ed
RH
1823 /* For any outputs that needed reloading into registers, spill them
1824 back to where they belong. */
1825 for (i = 0; i < noutputs; ++i)
1826 if (real_output_rtx[i])
1827 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1828
28d81abb
RK
1829 free_temp_slots ();
1830}
1831\f
1832/* Generate RTL to evaluate the expression EXP
1833 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1834
1835void
1836expand_expr_stmt (exp)
1837 tree exp;
1838{
1839 /* If -W, warn about statements with no side effects,
1840 except for an explicit cast to void (e.g. for assert()), and
1841 except inside a ({...}) where they may be useful. */
1842 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1843 {
1844 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1845 && !(TREE_CODE (exp) == CONVERT_EXPR
1846 && TREE_TYPE (exp) == void_type_node))
1847 warning_with_file_and_line (emit_filename, emit_lineno,
1848 "statement with no effect");
1849 else if (warn_unused)
1850 warn_if_unused_value (exp);
1851 }
b6ec8c5f
RK
1852
1853 /* If EXP is of function type and we are expanding statements for
1854 value, convert it to pointer-to-function. */
1855 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1856 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1857
28d81abb 1858 last_expr_type = TREE_TYPE (exp);
a2cf7deb
CB
1859 last_expr_value = expand_expr (exp,
1860 (expr_stmts_for_value
1861 ? NULL_RTX : const0_rtx),
1862 VOIDmode, 0);
28d81abb
RK
1863
1864 /* If all we do is reference a volatile value in memory,
1865 copy it to a register to be sure it is actually touched. */
1866 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1867 && TREE_THIS_VOLATILE (exp))
1868 {
6a5bbbe6
RS
1869 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1870 ;
1871 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1872 copy_to_reg (last_expr_value);
1873 else
ddbe9812
RS
1874 {
1875 rtx lab = gen_label_rtx ();
1876
1877 /* Compare the value with itself to reference it. */
c5d5d461
JL
1878 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1879 expand_expr (TYPE_SIZE (last_expr_type),
1880 NULL_RTX, VOIDmode, 0),
1881 BLKmode, 0,
1882 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1883 lab);
ddbe9812
RS
1884 emit_label (lab);
1885 }
28d81abb
RK
1886 }
1887
1888 /* If this expression is part of a ({...}) and is in memory, we may have
1889 to preserve temporaries. */
1890 preserve_temp_slots (last_expr_value);
1891
1892 /* Free any temporaries used to evaluate this expression. Any temporary
1893 used as a result of this expression will already have been preserved
1894 above. */
1895 free_temp_slots ();
1896
1897 emit_queue ();
1898}
1899
1900/* Warn if EXP contains any computations whose results are not used.
1901 Return 1 if a warning is printed; 0 otherwise. */
1902
150a992a 1903int
28d81abb
RK
1904warn_if_unused_value (exp)
1905 tree exp;
1906{
1907 if (TREE_USED (exp))
1908 return 0;
1909
1910 switch (TREE_CODE (exp))
1911 {
1912 case PREINCREMENT_EXPR:
1913 case POSTINCREMENT_EXPR:
1914 case PREDECREMENT_EXPR:
1915 case POSTDECREMENT_EXPR:
1916 case MODIFY_EXPR:
1917 case INIT_EXPR:
1918 case TARGET_EXPR:
1919 case CALL_EXPR:
1920 case METHOD_CALL_EXPR:
1921 case RTL_EXPR:
81797aba 1922 case TRY_CATCH_EXPR:
28d81abb
RK
1923 case WITH_CLEANUP_EXPR:
1924 case EXIT_EXPR:
1925 /* We don't warn about COND_EXPR because it may be a useful
1926 construct if either arm contains a side effect. */
1927 case COND_EXPR:
1928 return 0;
1929
1930 case BIND_EXPR:
1931 /* For a binding, warn if no side effect within it. */
1932 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1933
de73f171
RK
1934 case SAVE_EXPR:
1935 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1936
28d81abb
RK
1937 case TRUTH_ORIF_EXPR:
1938 case TRUTH_ANDIF_EXPR:
1939 /* In && or ||, warn if 2nd operand has no side effect. */
1940 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1941
1942 case COMPOUND_EXPR:
a646a211
JM
1943 if (TREE_NO_UNUSED_WARNING (exp))
1944 return 0;
28d81abb
RK
1945 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1946 return 1;
4d23e509
RS
1947 /* Let people do `(foo (), 0)' without a warning. */
1948 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1949 return 0;
28d81abb
RK
1950 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1951
1952 case NOP_EXPR:
1953 case CONVERT_EXPR:
b4ac57ab 1954 case NON_LVALUE_EXPR:
28d81abb
RK
1955 /* Don't warn about values cast to void. */
1956 if (TREE_TYPE (exp) == void_type_node)
1957 return 0;
1958 /* Don't warn about conversions not explicit in the user's program. */
1959 if (TREE_NO_UNUSED_WARNING (exp))
1960 return 0;
1961 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1962 Don't complain about that. There can be an arbitrary number of
1963 casts before the modify, so we must loop until we find the first
1964 non-cast expression and then test to see if that is a modify. */
1965 {
1966 tree tem = TREE_OPERAND (exp, 0);
1967
1968 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1969 tem = TREE_OPERAND (tem, 0);
1970
de73f171
RK
1971 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1972 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1973 return 0;
1974 }
d1e1adfb 1975 goto warn;
28d81abb 1976
d1e1adfb
JM
1977 case INDIRECT_REF:
1978 /* Don't warn about automatic dereferencing of references, since
1979 the user cannot control it. */
1980 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1981 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1982 /* ... fall through ... */
d1e1adfb 1983
28d81abb 1984 default:
ddbe9812
RS
1985 /* Referencing a volatile value is a side effect, so don't warn. */
1986 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1987 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1988 && TREE_THIS_VOLATILE (exp))
1989 return 0;
d1e1adfb 1990 warn:
28d81abb
RK
1991 warning_with_file_and_line (emit_filename, emit_lineno,
1992 "value computed is not used");
1993 return 1;
1994 }
1995}
1996
1997/* Clear out the memory of the last expression evaluated. */
1998
1999void
2000clear_last_expr ()
2001{
2002 last_expr_type = 0;
2003}
2004
2005/* Begin a statement which will return a value.
2006 Return the RTL_EXPR for this statement expr.
2007 The caller must save that value and pass it to expand_end_stmt_expr. */
2008
2009tree
2010expand_start_stmt_expr ()
2011{
ca695ac9
JB
2012 int momentary;
2013 tree t;
2014
28d81abb
RK
2015 /* Make the RTL_EXPR node temporary, not momentary,
2016 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
2017 momentary = suspend_momentary ();
2018 t = make_node (RTL_EXPR);
28d81abb 2019 resume_momentary (momentary);
33c6ab80 2020 do_pending_stack_adjust ();
e922dbad 2021 start_sequence_for_rtl_expr (t);
28d81abb
RK
2022 NO_DEFER_POP;
2023 expr_stmts_for_value++;
2024 return t;
2025}
2026
2027/* Restore the previous state at the end of a statement that returns a value.
2028 Returns a tree node representing the statement's value and the
2029 insns to compute the value.
2030
2031 The nodes of that expression have been freed by now, so we cannot use them.
2032 But we don't want to do that anyway; the expression has already been
2033 evaluated and now we just want to use the value. So generate a RTL_EXPR
2034 with the proper type and RTL value.
2035
2036 If the last substatement was not an expression,
2037 return something with type `void'. */
2038
2039tree
2040expand_end_stmt_expr (t)
2041 tree t;
2042{
2043 OK_DEFER_POP;
2044
2045 if (last_expr_type == 0)
2046 {
2047 last_expr_type = void_type_node;
2048 last_expr_value = const0_rtx;
2049 }
2050 else if (last_expr_value == 0)
2051 /* There are some cases where this can happen, such as when the
2052 statement is void type. */
2053 last_expr_value = const0_rtx;
2054 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2055 /* Remove any possible QUEUED. */
2056 last_expr_value = protect_from_queue (last_expr_value, 0);
2057
2058 emit_queue ();
2059
2060 TREE_TYPE (t) = last_expr_type;
2061 RTL_EXPR_RTL (t) = last_expr_value;
2062 RTL_EXPR_SEQUENCE (t) = get_insns ();
2063
2064 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2065
2066 end_sequence ();
2067
2068 /* Don't consider deleting this expr or containing exprs at tree level. */
2069 TREE_SIDE_EFFECTS (t) = 1;
2070 /* Propagate volatility of the actual RTL expr. */
2071 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2072
2073 last_expr_type = 0;
2074 expr_stmts_for_value--;
2075
2076 return t;
2077}
2078\f
28d81abb
RK
2079/* Generate RTL for the start of an if-then. COND is the expression
2080 whose truth should be tested.
2081
2082 If EXITFLAG is nonzero, this conditional is visible to
2083 `exit_something'. */
2084
2085void
2086expand_start_cond (cond, exitflag)
2087 tree cond;
2088 int exitflag;
2089{
2090 struct nesting *thiscond = ALLOC_NESTING ();
2091
2092 /* Make an entry on cond_stack for the cond we are entering. */
2093
2094 thiscond->next = cond_stack;
2095 thiscond->all = nesting_stack;
2096 thiscond->depth = ++nesting_depth;
2097 thiscond->data.cond.next_label = gen_label_rtx ();
2098 /* Before we encounter an `else', we don't need a separate exit label
2099 unless there are supposed to be exit statements
2100 to exit this conditional. */
2101 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2102 thiscond->data.cond.endif_label = thiscond->exit_label;
2103 cond_stack = thiscond;
2104 nesting_stack = thiscond;
2105
b93a436e 2106 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2107}
2108
2109/* Generate RTL between then-clause and the elseif-clause
2110 of an if-then-elseif-.... */
2111
2112void
2113expand_start_elseif (cond)
2114 tree cond;
2115{
2116 if (cond_stack->data.cond.endif_label == 0)
2117 cond_stack->data.cond.endif_label = gen_label_rtx ();
2118 emit_jump (cond_stack->data.cond.endif_label);
2119 emit_label (cond_stack->data.cond.next_label);
2120 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2121 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2122}
2123
2124/* Generate RTL between the then-clause and the else-clause
2125 of an if-then-else. */
2126
2127void
2128expand_start_else ()
2129{
2130 if (cond_stack->data.cond.endif_label == 0)
2131 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2132
28d81abb
RK
2133 emit_jump (cond_stack->data.cond.endif_label);
2134 emit_label (cond_stack->data.cond.next_label);
0f41302f 2135 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2136}
2137
d947ba59
RK
2138/* After calling expand_start_else, turn this "else" into an "else if"
2139 by providing another condition. */
2140
2141void
2142expand_elseif (cond)
2143 tree cond;
2144{
2145 cond_stack->data.cond.next_label = gen_label_rtx ();
2146 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2147}
2148
28d81abb
RK
2149/* Generate RTL for the end of an if-then.
2150 Pop the record for it off of cond_stack. */
2151
2152void
2153expand_end_cond ()
2154{
2155 struct nesting *thiscond = cond_stack;
2156
b93a436e
JL
2157 do_pending_stack_adjust ();
2158 if (thiscond->data.cond.next_label)
2159 emit_label (thiscond->data.cond.next_label);
2160 if (thiscond->data.cond.endif_label)
2161 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2162
2163 POPSTACK (cond_stack);
2164 last_expr_type = 0;
2165}
ca695ac9
JB
2166
2167
28d81abb
RK
2168\f
2169/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2170 loop should be exited by `exit_something'. This is a loop for which
2171 `expand_continue' will jump to the top of the loop.
2172
2173 Make an entry on loop_stack to record the labels associated with
2174 this loop. */
2175
2176struct nesting *
2177expand_start_loop (exit_flag)
2178 int exit_flag;
2179{
2180 register struct nesting *thisloop = ALLOC_NESTING ();
2181
2182 /* Make an entry on loop_stack for the loop we are entering. */
2183
2184 thisloop->next = loop_stack;
2185 thisloop->all = nesting_stack;
2186 thisloop->depth = ++nesting_depth;
2187 thisloop->data.loop.start_label = gen_label_rtx ();
2188 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2189 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2190 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2191 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2192 loop_stack = thisloop;
2193 nesting_stack = thisloop;
2194
2195 do_pending_stack_adjust ();
2196 emit_queue ();
37366632 2197 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2198 emit_label (thisloop->data.loop.start_label);
2199
2200 return thisloop;
2201}
2202
2203/* Like expand_start_loop but for a loop where the continuation point
2204 (for expand_continue_loop) will be specified explicitly. */
2205
2206struct nesting *
2207expand_start_loop_continue_elsewhere (exit_flag)
2208 int exit_flag;
2209{
2210 struct nesting *thisloop = expand_start_loop (exit_flag);
2211 loop_stack->data.loop.continue_label = gen_label_rtx ();
2212 return thisloop;
2213}
2214
2215/* Specify the continuation point for a loop started with
2216 expand_start_loop_continue_elsewhere.
2217 Use this at the point in the code to which a continue statement
2218 should jump. */
2219
2220void
2221expand_loop_continue_here ()
2222{
2223 do_pending_stack_adjust ();
37366632 2224 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2225 emit_label (loop_stack->data.loop.continue_label);
2226}
2227
2228/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2229 Pop the block off of loop_stack. */
2230
2231void
2232expand_end_loop ()
2233{
0720f6fb
MM
2234 rtx start_label = loop_stack->data.loop.start_label;
2235 rtx insn = get_last_insn ();
a7d308f7 2236 int needs_end_jump = 1;
28d81abb
RK
2237
2238 /* Mark the continue-point at the top of the loop if none elsewhere. */
2239 if (start_label == loop_stack->data.loop.continue_label)
2240 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2241
2242 do_pending_stack_adjust ();
2243
a7d308f7
R
2244 /* If optimizing, perhaps reorder the loop.
2245 First, try to use a condjump near the end.
2246 expand_exit_loop_if_false ends loops with unconditional jumps,
2247 like this:
2248
2249 if (test) goto label;
2250 optional: cleanup
2251 goto loop_stack->data.loop.end_label
2252 barrier
2253 label:
2254
2255 If we find such a pattern, we can end the loop earlier. */
2256
2257 if (optimize
2258 && GET_CODE (insn) == CODE_LABEL
2259 && LABEL_NAME (insn) == NULL
2260 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2261 {
2262 rtx label = insn;
2263 rtx jump = PREV_INSN (PREV_INSN (label));
2264
2265 if (GET_CODE (jump) == JUMP_INSN
2266 && GET_CODE (PATTERN (jump)) == SET
2267 && SET_DEST (PATTERN (jump)) == pc_rtx
2268 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2269 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2270 == loop_stack->data.loop.end_label))
2271 {
2272 rtx prev;
2273
2274 /* The test might be complex and reference LABEL multiple times,
2275 like the loop in loop_iterations to set vtop. To handle this,
2276 we move LABEL. */
2277 insn = PREV_INSN (label);
2278 reorder_insns (label, label, start_label);
2279
2280 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2281 {
2282 /* We ignore line number notes, but if we see any other note,
2283 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2284 NOTE_INSN_LOOP_*, we disable this optimization. */
2285 if (GET_CODE (prev) == NOTE)
2286 {
2287 if (NOTE_LINE_NUMBER (prev) < 0)
2288 break;
2289 continue;
2290 }
2291 if (GET_CODE (prev) == CODE_LABEL)
2292 break;
2293 if (GET_CODE (prev) == JUMP_INSN)
2294 {
2295 if (GET_CODE (PATTERN (prev)) == SET
2296 && SET_DEST (PATTERN (prev)) == pc_rtx
2297 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2298 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2299 == LABEL_REF)
2300 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2301 {
2302 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2303 = start_label;
2304 emit_note_after (NOTE_INSN_LOOP_END, prev);
2305 needs_end_jump = 0;
2306 }
2307 break;
2308 }
2309 }
2310 }
2311 }
2312
2313 /* If the loop starts with a loop exit, roll that to the end where
2314 it will optimize together with the jump back.
93de5c31
MM
2315
2316 We look for the conditional branch to the exit, except that once
2317 we find such a branch, we don't look past 30 instructions.
2318
2319 In more detail, if the loop presently looks like this (in pseudo-C):
2320
2321 start_label:
2322 if (test) goto end_label;
2323 body;
2324 goto start_label;
0720f6fb 2325 end_label:
93de5c31
MM
2326
2327 transform it to look like:
2328
2329 goto start_label;
2330 newstart_label:
2331 body;
2332 start_label:
2333 if (test) goto end_label;
2334 goto newstart_label;
0720f6fb 2335 end_label:
93de5c31
MM
2336
2337 Here, the `test' may actually consist of some reasonably complex
2338 code, terminating in a test. */
0720f6fb 2339
28d81abb 2340 if (optimize
a7d308f7 2341 && needs_end_jump
28d81abb
RK
2342 &&
2343 ! (GET_CODE (insn) == JUMP_INSN
2344 && GET_CODE (PATTERN (insn)) == SET
2345 && SET_DEST (PATTERN (insn)) == pc_rtx
2346 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2347 {
93de5c31 2348 int eh_regions = 0;
0720f6fb
MM
2349 int num_insns = 0;
2350 rtx last_test_insn = NULL_RTX;
93de5c31 2351
28d81abb
RK
2352 /* Scan insns from the top of the loop looking for a qualified
2353 conditional exit. */
2354 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2355 insn = NEXT_INSN (insn))
2356 {
93de5c31
MM
2357 if (GET_CODE (insn) == NOTE)
2358 {
2359 if (optimize < 2
2360 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2361 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2362 /* The code that actually moves the exit test will
2363 carefully leave BLOCK notes in their original
2364 location. That means, however, that we can't debug
2365 the exit test itself. So, we refuse to move code
2366 containing BLOCK notes at low optimization levels. */
2367 break;
2368
2369 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2370 ++eh_regions;
2371 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2372 {
2373 --eh_regions;
2374 if (eh_regions < 0)
2375 /* We've come to the end of an EH region, but
2376 never saw the beginning of that region. That
2377 means that an EH region begins before the top
2378 of the loop, and ends in the middle of it. The
2379 existence of such a situation violates a basic
2380 assumption in this code, since that would imply
2381 that even when EH_REGIONS is zero, we might
2382 move code out of an exception region. */
2383 abort ();
2384 }
28d81abb 2385
f114df20
JL
2386 /* We must not walk into a nested loop. */
2387 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2388 break;
2389
93de5c31
MM
2390 /* We already know this INSN is a NOTE, so there's no
2391 point in looking at it to see if it's a JUMP. */
2392 continue;
2393 }
28d81abb
RK
2394
2395 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2396 num_insns++;
2397
2398 if (last_test_insn && num_insns > 30)
2399 break;
2400
93de5c31
MM
2401 if (eh_regions > 0)
2402 /* We don't want to move a partial EH region. Consider:
2403
2404 while ( ( { try {
2405 if (cond ()) 0;
2406 else {
2407 bar();
2408 1;
2409 }
2410 } catch (...) {
2411 1;
2412 } )) {
2413 body;
2414 }
2415
2416 This isn't legal C++, but here's what it's supposed to
2417 mean: if cond() is true, stop looping. Otherwise,
2418 call bar, and keep looping. In addition, if cond
2419 throws an exception, catch it and keep looping. Such
2420 constructs are certainy legal in LISP.
2421
2422 We should not move the `if (cond()) 0' test since then
2423 the EH-region for the try-block would be broken up.
2424 (In this case we would the EH_BEG note for the `try'
2425 and `if cond()' but not the call to bar() or the
2426 EH_END note.)
2427
2428 So we don't look for tests within an EH region. */
2429 continue;
2430
0720f6fb 2431 if (GET_CODE (insn) == JUMP_INSN
28d81abb 2432 && GET_CODE (PATTERN (insn)) == SET
0720f6fb
MM
2433 && SET_DEST (PATTERN (insn)) == pc_rtx)
2434 {
2435 /* This is indeed a jump. */
2436 rtx dest1 = NULL_RTX;
2437 rtx dest2 = NULL_RTX;
2438 rtx potential_last_test;
2439 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2440 {
2441 /* A conditional jump. */
2442 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2443 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2444 potential_last_test = insn;
2445 }
2446 else
2447 {
2448 /* An unconditional jump. */
2449 dest1 = SET_SRC (PATTERN (insn));
2450 /* Include the BARRIER after the JUMP. */
2451 potential_last_test = NEXT_INSN (insn);
2452 }
2453
2454 do {
2455 if (dest1 && GET_CODE (dest1) == LABEL_REF
2456 && ((XEXP (dest1, 0)
2457 == loop_stack->data.loop.alt_end_label)
2458 || (XEXP (dest1, 0)
2459 == loop_stack->data.loop.end_label)))
2460 {
2461 last_test_insn = potential_last_test;
2462 break;
2463 }
2464
2465 /* If this was a conditional jump, there may be
2466 another label at which we should look. */
2467 dest1 = dest2;
2468 dest2 = NULL_RTX;
2469 } while (dest1);
2470 }
28d81abb
RK
2471 }
2472
2473 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2474 {
2475 /* We found one. Move everything from there up
2476 to the end of the loop, and add a jump into the loop
2477 to jump to there. */
2478 register rtx newstart_label = gen_label_rtx ();
2479 register rtx start_move = start_label;
93de5c31 2480 rtx next_insn;
28d81abb 2481
b4ac57ab 2482 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2483 then we want to move this note also. */
2484 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2485 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2486 == NOTE_INSN_LOOP_CONT))
2487 start_move = PREV_INSN (start_move);
2488
2489 emit_label_after (newstart_label, PREV_INSN (start_move));
93de5c31
MM
2490
2491 /* Actually move the insns. Start at the beginning, and
2492 keep copying insns until we've copied the
2493 last_test_insn. */
2494 for (insn = start_move; insn; insn = next_insn)
2495 {
2496 /* Figure out which insn comes after this one. We have
2497 to do this before we move INSN. */
2498 if (insn == last_test_insn)
2499 /* We've moved all the insns. */
2500 next_insn = NULL_RTX;
2501 else
2502 next_insn = NEXT_INSN (insn);
2503
2504 if (GET_CODE (insn) == NOTE
2505 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2506 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2507 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2508 NOTE_INSN_BLOCK_ENDs because the correct generation
2509 of debugging information depends on these appearing
2510 in the same order in the RTL and in the tree
2511 structure, where they are represented as BLOCKs.
2512 So, we don't move block notes. Of course, moving
2513 the code inside the block is likely to make it
2514 impossible to debug the instructions in the exit
2515 test, but such is the price of optimization. */
2516 continue;
2517
2518 /* Move the INSN. */
2519 reorder_insns (insn, insn, get_last_insn ());
2520 }
2521
28d81abb
RK
2522 emit_jump_insn_after (gen_jump (start_label),
2523 PREV_INSN (newstart_label));
2524 emit_barrier_after (PREV_INSN (newstart_label));
2525 start_label = newstart_label;
2526 }
2527 }
2528
a7d308f7
R
2529 if (needs_end_jump)
2530 {
2531 emit_jump (start_label);
2532 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2533 }
28d81abb
RK
2534 emit_label (loop_stack->data.loop.end_label);
2535
2536 POPSTACK (loop_stack);
2537
2538 last_expr_type = 0;
2539}
2540
2541/* Generate a jump to the current loop's continue-point.
2542 This is usually the top of the loop, but may be specified
2543 explicitly elsewhere. If not currently inside a loop,
2544 return 0 and do nothing; caller will print an error message. */
2545
2546int
2547expand_continue_loop (whichloop)
2548 struct nesting *whichloop;
2549{
2550 last_expr_type = 0;
2551 if (whichloop == 0)
2552 whichloop = loop_stack;
2553 if (whichloop == 0)
2554 return 0;
37366632
RK
2555 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2556 NULL_RTX);
28d81abb
RK
2557 return 1;
2558}
2559
2560/* Generate a jump to exit the current loop. If not currently inside a loop,
2561 return 0 and do nothing; caller will print an error message. */
2562
2563int
2564expand_exit_loop (whichloop)
2565 struct nesting *whichloop;
2566{
2567 last_expr_type = 0;
2568 if (whichloop == 0)
2569 whichloop = loop_stack;
2570 if (whichloop == 0)
2571 return 0;
37366632 2572 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2573 return 1;
2574}
2575
2576/* Generate a conditional jump to exit the current loop if COND
2577 evaluates to zero. If not currently inside a loop,
2578 return 0 and do nothing; caller will print an error message. */
2579
2580int
2581expand_exit_loop_if_false (whichloop, cond)
2582 struct nesting *whichloop;
2583 tree cond;
2584{
b93a436e
JL
2585 rtx label = gen_label_rtx ();
2586 rtx last_insn;
28d81abb 2587 last_expr_type = 0;
b93a436e 2588
28d81abb
RK
2589 if (whichloop == 0)
2590 whichloop = loop_stack;
2591 if (whichloop == 0)
2592 return 0;
b93a436e
JL
2593 /* In order to handle fixups, we actually create a conditional jump
2594 around a unconditional branch to exit the loop. If fixups are
2595 necessary, they go before the unconditional branch. */
d902c7ea 2596
b93a436e
JL
2597
2598 do_jump (cond, NULL_RTX, label);
2599 last_insn = get_last_insn ();
2600 if (GET_CODE (last_insn) == CODE_LABEL)
2601 whichloop->data.loop.alt_end_label = last_insn;
2602 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2603 NULL_RTX);
2604 emit_label (label);
ca695ac9 2605
28d81abb
RK
2606 return 1;
2607}
2608
4a050cc2
JL
2609/* Return nonzero if the loop nest is empty. Else return zero. */
2610
2611int
2612stmt_loop_nest_empty ()
2613{
2614 return (loop_stack == NULL);
2615}
2616
28d81abb
RK
2617/* Return non-zero if we should preserve sub-expressions as separate
2618 pseudos. We never do so if we aren't optimizing. We always do so
2619 if -fexpensive-optimizations.
2620
2621 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2622 the loop may still be a small one. */
2623
2624int
2625preserve_subexpressions_p ()
2626{
2627 rtx insn;
2628
2629 if (flag_expensive_optimizations)
2630 return 1;
2631
01d939e8 2632 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2633 return 0;
2634
2635 insn = get_last_insn_anywhere ();
2636
2637 return (insn
2638 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2639 < n_non_fixed_regs * 3));
2640
2641}
2642
2643/* Generate a jump to exit the current loop, conditional, binding contour
2644 or case statement. Not all such constructs are visible to this function,
2645 only those started with EXIT_FLAG nonzero. Individual languages use
2646 the EXIT_FLAG parameter to control which kinds of constructs you can
2647 exit this way.
2648
2649 If not currently inside anything that can be exited,
2650 return 0 and do nothing; caller will print an error message. */
2651
2652int
2653expand_exit_something ()
2654{
2655 struct nesting *n;
2656 last_expr_type = 0;
2657 for (n = nesting_stack; n; n = n->all)
2658 if (n->exit_label != 0)
2659 {
37366632 2660 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2661 return 1;
2662 }
2663
2664 return 0;
2665}
2666\f
2667/* Generate RTL to return from the current function, with no value.
2668 (That is, we do not do anything about returning any value.) */
2669
2670void
2671expand_null_return ()
2672{
2673 struct nesting *block = block_stack;
bd695e1e
RH
2674 rtx last_insn = get_last_insn ();
2675
2676 /* If this function was declared to return a value, but we
2677 didn't, clobber the return registers so that they are not
2678 propogated live to the rest of the function. */
2679
2680 diddle_return_value (CLOBBER);
28d81abb
RK
2681
2682 /* Does any pending block have cleanups? */
2683
2684 while (block && block->data.block.cleanups == 0)
2685 block = block->next;
2686
2687 /* If yes, use a goto to return, since that runs cleanups. */
2688
2689 expand_null_return_1 (last_insn, block != 0);
2690}
2691
2692/* Generate RTL to return from the current function, with value VAL. */
2693
8d800403 2694static void
28d81abb
RK
2695expand_value_return (val)
2696 rtx val;
2697{
2698 struct nesting *block = block_stack;
2699 rtx last_insn = get_last_insn ();
2700 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2701
2702 /* Copy the value to the return location
2703 unless it's already there. */
2704
2705 if (return_reg != val)
77636079 2706 {
77636079 2707 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
37877eb0 2708#ifdef PROMOTE_FUNCTION_RETURN
77636079 2709 int unsignedp = TREE_UNSIGNED (type);
14a774a9
RK
2710 enum machine_mode old_mode
2711 = DECL_MODE (DECL_RESULT (current_function_decl));
28612f9e 2712 enum machine_mode mode
14a774a9 2713 = promote_mode (type, old_mode, &unsignedp, 1);
77636079 2714
14a774a9
RK
2715 if (mode != old_mode)
2716 val = convert_modes (mode, old_mode, val, unsignedp);
77636079 2717#endif
14a774a9
RK
2718 if (GET_CODE (return_reg) == PARALLEL)
2719 emit_group_load (return_reg, val, int_size_in_bytes (type),
2720 TYPE_ALIGN (type) / BITS_PER_UNIT);
2721 else
77636079
RS
2722 emit_move_insn (return_reg, val);
2723 }
14a774a9 2724
bd695e1e 2725 diddle_return_value (USE);
28d81abb
RK
2726
2727 /* Does any pending block have cleanups? */
2728
2729 while (block && block->data.block.cleanups == 0)
2730 block = block->next;
2731
2732 /* If yes, use a goto to return, since that runs cleanups.
2733 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2734
2735 expand_null_return_1 (last_insn, block != 0);
2736}
2737
2738/* Output a return with no value. If LAST_INSN is nonzero,
2739 pretend that the return takes place after LAST_INSN.
2740 If USE_GOTO is nonzero then don't use a return instruction;
2741 go to the return label instead. This causes any cleanups
2742 of pending blocks to be executed normally. */
2743
2744static void
2745expand_null_return_1 (last_insn, use_goto)
2746 rtx last_insn;
2747 int use_goto;
2748{
2749 rtx end_label = cleanup_label ? cleanup_label : return_label;
2750
2751 clear_pending_stack_adjust ();
2752 do_pending_stack_adjust ();
2753 last_expr_type = 0;
2754
2755 /* PCC-struct return always uses an epilogue. */
2756 if (current_function_returns_pcc_struct || use_goto)
2757 {
2758 if (end_label == 0)
2759 end_label = return_label = gen_label_rtx ();
37366632 2760 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2761 return;
2762 }
2763
2764 /* Otherwise output a simple return-insn if one is available,
2765 unless it won't do the job. */
2766#ifdef HAVE_return
2767 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2768 {
2769 emit_jump_insn (gen_return ());
2770 emit_barrier ();
2771 return;
2772 }
2773#endif
2774
2775 /* Otherwise jump to the epilogue. */
37366632 2776 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2777}
2778\f
2779/* Generate RTL to evaluate the expression RETVAL and return it
2780 from the current function. */
2781
2782void
2783expand_return (retval)
2784 tree retval;
2785{
2786 /* If there are any cleanups to be performed, then they will
2787 be inserted following LAST_INSN. It is desirable
2788 that the last_insn, for such purposes, should be the
2789 last insn before computing the return value. Otherwise, cleanups
2790 which call functions can clobber the return value. */
2791 /* ??? rms: I think that is erroneous, because in C++ it would
2792 run destructors on variables that might be used in the subsequent
2793 computation of the return value. */
2794 rtx last_insn = 0;
14a774a9 2795 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb 2796 register rtx val = 0;
7bdb32b9 2797#ifdef HAVE_return
28d81abb 2798 register rtx op0;
7bdb32b9 2799#endif
28d81abb
RK
2800 tree retval_rhs;
2801 int cleanups;
28d81abb
RK
2802
2803 /* If function wants no value, give it none. */
2804 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2805 {
37366632 2806 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2807 emit_queue ();
28d81abb
RK
2808 expand_null_return ();
2809 return;
2810 }
2811
2812 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2813 /* This is not sufficient. We also need to watch for cleanups of the
2814 expression we are about to expand. Unfortunately, we cannot know
2815 if it has cleanups until we expand it, and we want to change how we
2816 expand it depending upon if we need cleanups. We can't win. */
2817#if 0
28d81abb 2818 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2819#else
2820 cleanups = 1;
2821#endif
28d81abb
RK
2822
2823 if (TREE_CODE (retval) == RESULT_DECL)
2824 retval_rhs = retval;
2825 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2826 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2827 retval_rhs = TREE_OPERAND (retval, 1);
2828 else if (TREE_TYPE (retval) == void_type_node)
2829 /* Recognize tail-recursive call to void function. */
2830 retval_rhs = retval;
2831 else
2832 retval_rhs = NULL_TREE;
2833
2834 /* Only use `last_insn' if there are cleanups which must be run. */
2835 if (cleanups || cleanup_label != 0)
2836 last_insn = get_last_insn ();
2837
2838 /* Distribute return down conditional expr if either of the sides
2839 may involve tail recursion (see test below). This enhances the number
2840 of tail recursions we see. Don't do this always since it can produce
2841 sub-optimal code in some cases and we distribute assignments into
2842 conditional expressions when it would help. */
2843
2844 if (optimize && retval_rhs != 0
2845 && frame_offset == 0
2846 && TREE_CODE (retval_rhs) == COND_EXPR
2847 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2848 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2849 {
2850 rtx label = gen_label_rtx ();
a0a34f94
RK
2851 tree expr;
2852
37366632 2853 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 2854 start_cleanup_deferral ();
dd98f85c 2855 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2856 DECL_RESULT (current_function_decl),
2857 TREE_OPERAND (retval_rhs, 1));
2858 TREE_SIDE_EFFECTS (expr) = 1;
2859 expand_return (expr);
28d81abb 2860 emit_label (label);
a0a34f94 2861
dd98f85c 2862 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2863 DECL_RESULT (current_function_decl),
2864 TREE_OPERAND (retval_rhs, 2));
2865 TREE_SIDE_EFFECTS (expr) = 1;
2866 expand_return (expr);
1483bddb 2867 end_cleanup_deferral ();
28d81abb
RK
2868 return;
2869 }
2870
642cac7b 2871 /* Attempt to optimize the call if it is tail recursive. */
564ea051
JW
2872 if (optimize_tail_recursion (retval_rhs, last_insn))
2873 return;
642cac7b 2874
28d81abb
RK
2875#ifdef HAVE_return
2876 /* This optimization is safe if there are local cleanups
2877 because expand_null_return takes care of them.
2878 ??? I think it should also be safe when there is a cleanup label,
2879 because expand_null_return takes care of them, too.
2880 Any reason why not? */
2881 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2882 && ! current_function_returns_pcc_struct
2883 && BRANCH_COST <= 1)
28d81abb
RK
2884 {
2885 /* If this is return x == y; then generate
2886 if (x == y) return 1; else return 0;
3f8b69de
TG
2887 if we can do it with explicit return insns and branches are cheap,
2888 but not if we have the corresponding scc insn. */
2889 int has_scc = 0;
28d81abb
RK
2890 if (retval_rhs)
2891 switch (TREE_CODE (retval_rhs))
2892 {
2893 case EQ_EXPR:
3f8b69de
TG
2894#ifdef HAVE_seq
2895 has_scc = HAVE_seq;
2896#endif
28d81abb 2897 case NE_EXPR:
3f8b69de
TG
2898#ifdef HAVE_sne
2899 has_scc = HAVE_sne;
2900#endif
28d81abb 2901 case GT_EXPR:
3f8b69de
TG
2902#ifdef HAVE_sgt
2903 has_scc = HAVE_sgt;
2904#endif
28d81abb 2905 case GE_EXPR:
3f8b69de
TG
2906#ifdef HAVE_sge
2907 has_scc = HAVE_sge;
2908#endif
28d81abb 2909 case LT_EXPR:
3f8b69de
TG
2910#ifdef HAVE_slt
2911 has_scc = HAVE_slt;
2912#endif
28d81abb 2913 case LE_EXPR:
3f8b69de
TG
2914#ifdef HAVE_sle
2915 has_scc = HAVE_sle;
2916#endif
28d81abb
RK
2917 case TRUTH_ANDIF_EXPR:
2918 case TRUTH_ORIF_EXPR:
2919 case TRUTH_AND_EXPR:
2920 case TRUTH_OR_EXPR:
2921 case TRUTH_NOT_EXPR:
94ed3915 2922 case TRUTH_XOR_EXPR:
3f8b69de
TG
2923 if (! has_scc)
2924 {
2925 op0 = gen_label_rtx ();
2926 jumpifnot (retval_rhs, op0);
2927 expand_value_return (const1_rtx);
2928 emit_label (op0);
2929 expand_value_return (const0_rtx);
2930 return;
2931 }
e9a25f70
JL
2932 break;
2933
2934 default:
2935 break;
28d81abb
RK
2936 }
2937 }
2938#endif /* HAVE_return */
2939
4c485b63
JL
2940 /* If the result is an aggregate that is being returned in one (or more)
2941 registers, load the registers here. The compiler currently can't handle
2942 copying a BLKmode value into registers. We could put this code in a
2943 more general area (for use by everyone instead of just function
2944 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2945 (and in expand_call). The value must go into a pseudo in case there
2946 are cleanups that will clobber the real return register. */
4c485b63
JL
2947
2948 if (retval_rhs != 0
2949 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 2950 && GET_CODE (result_rtl) == REG)
4c485b63 2951 {
a7f875d7 2952 int i, bitpos, xbitpos;
4c485b63
JL
2953 int big_endian_correction = 0;
2954 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2955 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
c84e2712
KG
2956 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2957 (unsigned int)BITS_PER_WORD);
4c485b63 2958 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 2959 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 2960 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2961 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2962
a7f875d7
RK
2963 /* Structures whose size is not a multiple of a word are aligned
2964 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2965 machine, this means we must skip the empty high order bytes when
2966 calculating the bit offset. */
2967 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2968 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2969 * BITS_PER_UNIT));
2970
2971 /* Copy the structure BITSIZE bits at a time. */
2972 for (bitpos = 0, xbitpos = big_endian_correction;
2973 bitpos < bytes * BITS_PER_UNIT;
2974 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2975 {
a7f875d7 2976 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2977 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2978 (the first time through). */
2979 if (xbitpos % BITS_PER_WORD == 0
2980 || xbitpos == big_endian_correction)
4c485b63 2981 {
a7f875d7
RK
2982 /* Generate an appropriate register. */
2983 dst = gen_reg_rtx (word_mode);
2984 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2985
2986 /* Clobber the destination before we move anything into it. */
38a448ca 2987 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
4c485b63 2988 }
a7f875d7
RK
2989
2990 /* We need a new source operand each time bitpos is on a word
2991 boundary. */
2992 if (bitpos % BITS_PER_WORD == 0)
2993 src = operand_subword_force (result_val,
2994 bitpos / BITS_PER_WORD,
2995 BLKmode);
2996
2997 /* Use bitpos for the source extraction (left justified) and
2998 xbitpos for the destination store (right justified). */
2999 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3000 extract_bit_field (src, bitsize,
3001 bitpos % BITS_PER_WORD, 1,
3002 NULL_RTX, word_mode,
3003 word_mode,
3004 bitsize / BITS_PER_UNIT,
3005 BITS_PER_WORD),
3006 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
3007 }
3008
4c485b63
JL
3009 /* Find the smallest integer mode large enough to hold the
3010 entire structure and use that mode instead of BLKmode
3011 on the USE insn for the return register. */
3012 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3013 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 3014 tmpmode != VOIDmode;
4c485b63 3015 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
3016 {
3017 /* Have we found a large enough mode? */
3018 if (GET_MODE_SIZE (tmpmode) >= bytes)
3019 break;
3020 }
4c485b63
JL
3021
3022 /* No suitable mode found. */
0c61f541 3023 if (tmpmode == VOIDmode)
3ffeb8f1 3024 abort ();
4c485b63 3025
14a774a9 3026 PUT_MODE (result_rtl, tmpmode);
3ffeb8f1 3027
af55da56
JW
3028 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3029 result_reg_mode = word_mode;
3030 else
3031 result_reg_mode = tmpmode;
3032 result_reg = gen_reg_rtx (result_reg_mode);
3033
3ffeb8f1 3034 emit_queue ();
3ffeb8f1 3035 for (i = 0; i < n_regs; i++)
af55da56 3036 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3037 result_pseudos[i]);
4c485b63 3038
af55da56
JW
3039 if (tmpmode != result_reg_mode)
3040 result_reg = gen_lowpart (tmpmode, result_reg);
3041
4c485b63
JL
3042 expand_value_return (result_reg);
3043 }
3044 else if (cleanups
28d81abb
RK
3045 && retval_rhs != 0
3046 && TREE_TYPE (retval_rhs) != void_type_node
14a774a9
RK
3047 && (GET_CODE (result_rtl) == REG
3048 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3049 {
14a774a9
RK
3050 /* Calculate the return value into a temporary (usually a pseudo
3051 reg). */
3052 val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
3053 0, 0, 1);
dd98f85c
JM
3054 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3055 val = force_not_mem (val);
28d81abb 3056 emit_queue ();
28d81abb
RK
3057 /* Return the calculated value, doing cleanups first. */
3058 expand_value_return (val);
3059 }
3060 else
3061 {
3062 /* No cleanups or no hard reg used;
3063 calculate value into hard return reg. */
cba389cd 3064 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3065 emit_queue ();
14a774a9 3066 expand_value_return (result_rtl);
28d81abb
RK
3067 }
3068}
3069
3070/* Return 1 if the end of the generated RTX is not a barrier.
3071 This means code already compiled can drop through. */
3072
3073int
3074drop_through_at_end_p ()
3075{
3076 rtx insn = get_last_insn ();
3077 while (insn && GET_CODE (insn) == NOTE)
3078 insn = PREV_INSN (insn);
3079 return insn && GET_CODE (insn) != BARRIER;
3080}
3081\f
642cac7b
JL
3082/* Test CALL_EXPR to determine if it is a potential tail recursion call
3083 and emit code to optimize the tail recursion. LAST_INSN indicates where
564ea051
JW
3084 to place the jump to the tail recursion label. Return TRUE if the
3085 call was optimized into a goto.
642cac7b
JL
3086
3087 This is only used by expand_return, but expand_call is expected to
3088 use it soon. */
3089
564ea051 3090int
642cac7b
JL
3091optimize_tail_recursion (call_expr, last_insn)
3092 tree call_expr;
3093 rtx last_insn;
3094{
3095 /* For tail-recursive call to current function,
3096 just jump back to the beginning.
3097 It's unsafe if any auto variable in this function
3098 has its address taken; for simplicity,
3099 require stack frame to be empty. */
3100 if (optimize && call_expr != 0
3101 && frame_offset == 0
3102 && TREE_CODE (call_expr) == CALL_EXPR
3103 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
3104 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
3105 /* Finish checking validity, and if valid emit code
3106 to set the argument variables for the new call. */
3107 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
3108 DECL_ARGUMENTS (current_function_decl)))
3109 {
3110 if (tail_recursion_label == 0)
3111 {
3112 tail_recursion_label = gen_label_rtx ();
3113 emit_label_after (tail_recursion_label,
3114 tail_recursion_reentry);
3115 }
3116 emit_queue ();
3117 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3118 emit_barrier ();
564ea051 3119 return 1;
642cac7b 3120 }
564ea051
JW
3121
3122 return 0;
642cac7b
JL
3123}
3124
28d81abb
RK
3125/* Emit code to alter this function's formal parms for a tail-recursive call.
3126 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3127 FORMALS is the chain of decls of formals.
3128 Return 1 if this can be done;
3129 otherwise return 0 and do not emit any code. */
3130
3131static int
3132tail_recursion_args (actuals, formals)
3133 tree actuals, formals;
3134{
3135 register tree a = actuals, f = formals;
3136 register int i;
3137 register rtx *argvec;
3138
3139 /* Check that number and types of actuals are compatible
3140 with the formals. This is not always true in valid C code.
3141 Also check that no formal needs to be addressable
3142 and that all formals are scalars. */
3143
3144 /* Also count the args. */
3145
3146 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3147 {
5c7fe359
RK
3148 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3149 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3150 return 0;
3151 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3152 return 0;
3153 }
3154 if (a != 0 || f != 0)
3155 return 0;
3156
3157 /* Compute all the actuals. */
3158
3159 argvec = (rtx *) alloca (i * sizeof (rtx));
3160
3161 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3162 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3163
3164 /* Find which actual values refer to current values of previous formals.
3165 Copy each of them now, before any formal is changed. */
3166
3167 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3168 {
3169 int copy = 0;
3170 register int j;
3171 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3172 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3173 { copy = 1; break; }
3174 if (copy)
3175 argvec[i] = copy_to_reg (argvec[i]);
3176 }
3177
3178 /* Store the values of the actuals into the formals. */
3179
3180 for (f = formals, a = actuals, i = 0; f;
3181 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3182 {
98f3b471 3183 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3184 emit_move_insn (DECL_RTL (f), argvec[i]);
3185 else
3186 convert_move (DECL_RTL (f), argvec[i],
3187 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3188 }
3189
3190 free_temp_slots ();
3191 return 1;
3192}
3193\f
3194/* Generate the RTL code for entering a binding contour.
3195 The variables are declared one by one, by calls to `expand_decl'.
3196
8e91754e
MM
3197 FLAGS is a bitwise or of the following flags:
3198
3199 1 - Nonzero if this construct should be visible to
3200 `exit_something'.
3201
3202 2 - Nonzero if this contour does not require a
3203 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3204 language-independent code should set this flag because they
3205 will not create corresponding BLOCK nodes. (There should be
3206 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3207 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
a97901e6
MM
3208 when expand_end_bindings is called.
3209
3210 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3211 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3212 note. */
28d81abb
RK
3213
3214void
a97901e6 3215expand_start_bindings_and_block (flags, block)
8e91754e 3216 int flags;
a97901e6 3217 tree block;
28d81abb
RK
3218{
3219 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3220 rtx note;
3221 int exit_flag = ((flags & 1) != 0);
3222 int block_flag = ((flags & 2) == 0);
a97901e6
MM
3223
3224 /* If a BLOCK is supplied, then the caller should be requesting a
3225 NOTE_INSN_BLOCK_BEG note. */
3226 if (!block_flag && block)
3227 abort ();
8e91754e 3228
a97901e6
MM
3229 /* Create a note to mark the beginning of the block. */
3230 if (block_flag)
3231 {
3232 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3233 NOTE_BLOCK (note) = block;
3234 }
3235 else
3236 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3237
28d81abb
RK
3238 /* Make an entry on block_stack for the block we are entering. */
3239
3240 thisblock->next = block_stack;
3241 thisblock->all = nesting_stack;
3242 thisblock->depth = ++nesting_depth;
3243 thisblock->data.block.stack_level = 0;
3244 thisblock->data.block.cleanups = 0;
3f1d071b 3245 thisblock->data.block.n_function_calls = 0;
e976b8b2 3246 thisblock->data.block.exception_region = 0;
3f1d071b 3247 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3248
3249 thisblock->data.block.conditional_code = 0;
3250 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3251 /* When we insert instructions after the last unconditional cleanup,
3252 we don't adjust last_insn. That means that a later add_insn will
3253 clobber the instructions we've just added. The easiest way to
3254 fix this is to just insert another instruction here, so that the
3255 instructions inserted after the last unconditional cleanup are
3256 never the last instruction. */
3257 emit_note (NULL_PTR, NOTE_INSN_DELETED);
e976b8b2
MS
3258 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3259
28d81abb
RK
3260 if (block_stack
3261 && !(block_stack->data.block.cleanups == NULL_TREE
3262 && block_stack->data.block.outer_cleanups == NULL_TREE))
3263 thisblock->data.block.outer_cleanups
3264 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3265 block_stack->data.block.outer_cleanups);
3266 else
3267 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3268 thisblock->data.block.label_chain = 0;
3269 thisblock->data.block.innermost_stack_block = stack_block_stack;
3270 thisblock->data.block.first_insn = note;
3f1d071b 3271 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3272 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3273 block_stack = thisblock;
3274 nesting_stack = thisblock;
3275
b93a436e
JL
3276 /* Make a new level for allocating stack slots. */
3277 push_temp_slots ();
28d81abb
RK
3278}
3279
e976b8b2
MS
3280/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3281 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3282 expand_expr are made. After we end the region, we know that all
3283 space for all temporaries that were created by TARGET_EXPRs will be
3284 destroyed and their space freed for reuse. */
3285
3286void
3287expand_start_target_temps ()
3288{
3289 /* This is so that even if the result is preserved, the space
3290 allocated will be freed, as we know that it is no longer in use. */
3291 push_temp_slots ();
3292
3293 /* Start a new binding layer that will keep track of all cleanup
3294 actions to be performed. */
8e91754e 3295 expand_start_bindings (2);
e976b8b2
MS
3296
3297 target_temp_slot_level = temp_slot_level;
3298}
3299
3300void
3301expand_end_target_temps ()
3302{
3303 expand_end_bindings (NULL_TREE, 0, 0);
3304
3305 /* This is so that even if the result is preserved, the space
3306 allocated will be freed, as we know that it is no longer in use. */
3307 pop_temp_slots ();
3308}
3309
3310/* Mark top block of block_stack as an implicit binding for an
3311 exception region. This is used to prevent infinite recursion when
3312 ending a binding with expand_end_bindings. It is only ever called
3313 by expand_eh_region_start, as that it the only way to create a
3314 block stack for a exception region. */
3315
3316void
3317mark_block_as_eh_region ()
3318{
3319 block_stack->data.block.exception_region = 1;
3320 if (block_stack->next
3321 && block_stack->next->data.block.conditional_code)
3322 {
3323 block_stack->data.block.conditional_code
3324 = block_stack->next->data.block.conditional_code;
3325 block_stack->data.block.last_unconditional_cleanup
3326 = block_stack->next->data.block.last_unconditional_cleanup;
3327 block_stack->data.block.cleanup_ptr
3328 = block_stack->next->data.block.cleanup_ptr;
3329 }
3330}
3331
3332/* True if we are currently emitting insns in an area of output code
3333 that is controlled by a conditional expression. This is used by
3334 the cleanup handling code to generate conditional cleanup actions. */
3335
3336int
3337conditional_context ()
3338{
3339 return block_stack && block_stack->data.block.conditional_code;
3340}
3341
3342/* Mark top block of block_stack as not for an implicit binding for an
3343 exception region. This is only ever done by expand_eh_region_end
3344 to let expand_end_bindings know that it is being called explicitly
3345 to end the binding layer for just the binding layer associated with
3346 the exception region, otherwise expand_end_bindings would try and
3347 end all implicit binding layers for exceptions regions, and then
3348 one normal binding layer. */
3349
3350void
3351mark_block_as_not_eh_region ()
3352{
3353 block_stack->data.block.exception_region = 0;
3354}
3355
3356/* True if the top block of block_stack was marked as for an exception
3357 region by mark_block_as_eh_region. */
3358
3359int
3360is_eh_region ()
3361{
01d939e8 3362 return cfun && block_stack && block_stack->data.block.exception_region;
e976b8b2
MS
3363}
3364
ba716ac9
BS
3365/* Emit a handler label for a nonlocal goto handler.
3366 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3367
e881bb1b 3368static rtx
ba716ac9
BS
3369expand_nl_handler_label (slot, before_insn)
3370 rtx slot, before_insn;
3371{
3372 rtx insns;
3373 rtx handler_label = gen_label_rtx ();
3374
3375 /* Don't let jump_optimize delete the handler. */
3376 LABEL_PRESERVE_P (handler_label) = 1;
3377
3378 start_sequence ();
3379 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3380 insns = get_insns ();
3381 end_sequence ();
3382 emit_insns_before (insns, before_insn);
3383
3384 emit_label (handler_label);
e881bb1b
RH
3385
3386 return handler_label;
ba716ac9
BS
3387}
3388
3389/* Emit code to restore vital registers at the beginning of a nonlocal goto
3390 handler. */
3391static void
3392expand_nl_goto_receiver ()
3393{
3394#ifdef HAVE_nonlocal_goto
3395 if (! HAVE_nonlocal_goto)
3396#endif
3397 /* First adjust our frame pointer to its actual value. It was
3398 previously set to the start of the virtual area corresponding to
3399 the stacked variables when we branched here and now needs to be
3400 adjusted to the actual hardware fp value.
3401
3402 Assignments are to virtual registers are converted by
3403 instantiate_virtual_regs into the corresponding assignment
3404 to the underlying register (fp in this case) that makes
3405 the original assignment true.
3406 So the following insn will actually be
3407 decrementing fp by STARTING_FRAME_OFFSET. */
3408 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3409
3410#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3411 if (fixed_regs[ARG_POINTER_REGNUM])
3412 {
3413#ifdef ELIMINABLE_REGS
3414 /* If the argument pointer can be eliminated in favor of the
3415 frame pointer, we don't need to restore it. We assume here
3416 that if such an elimination is present, it can always be used.
3417 This is the case on all known machines; if we don't make this
3418 assumption, we do unnecessary saving on many machines. */
3419 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3420 size_t i;
3421
3422 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3423 if (elim_regs[i].from == ARG_POINTER_REGNUM
3424 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3425 break;
3426
3427 if (i == sizeof elim_regs / sizeof elim_regs [0])
3428#endif
3429 {
3430 /* Now restore our arg pointer from the address at which it
3431 was saved in our stack frame.
3432 If there hasn't be space allocated for it yet, make
3433 some now. */
3434 if (arg_pointer_save_area == 0)
3435 arg_pointer_save_area
3436 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3437 emit_move_insn (virtual_incoming_args_rtx,
3438 /* We need a pseudo here, or else
3439 instantiate_virtual_regs_1 complains. */
3440 copy_to_reg (arg_pointer_save_area));
3441 }
3442 }
3443#endif
3444
3445#ifdef HAVE_nonlocal_goto_receiver
3446 if (HAVE_nonlocal_goto_receiver)
3447 emit_insn (gen_nonlocal_goto_receiver ());
3448#endif
3449}
3450
3451/* Make handlers for nonlocal gotos taking place in the function calls in
3452 block THISBLOCK. */
3453
3454static void
3455expand_nl_goto_receivers (thisblock)
3456 struct nesting *thisblock;
3457{
3458 tree link;
3459 rtx afterward = gen_label_rtx ();
3460 rtx insns, slot;
e881bb1b 3461 rtx label_list;
ba716ac9
BS
3462 int any_invalid;
3463
3464 /* Record the handler address in the stack slot for that purpose,
3465 during this block, saving and restoring the outer value. */
3466 if (thisblock->next != 0)
3467 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3468 {
3469 rtx save_receiver = gen_reg_rtx (Pmode);
3470 emit_move_insn (XEXP (slot, 0), save_receiver);
3471
3472 start_sequence ();
3473 emit_move_insn (save_receiver, XEXP (slot, 0));
3474 insns = get_insns ();
3475 end_sequence ();
3476 emit_insns_before (insns, thisblock->data.block.first_insn);
3477 }
3478
3479 /* Jump around the handlers; they run only when specially invoked. */
3480 emit_jump (afterward);
3481
3482 /* Make a separate handler for each label. */
3483 link = nonlocal_labels;
3484 slot = nonlocal_goto_handler_slots;
e881bb1b 3485 label_list = NULL_RTX;
ba716ac9
BS
3486 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3487 /* Skip any labels we shouldn't be able to jump to from here,
3488 we generate one special handler for all of them below which just calls
3489 abort. */
3490 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3491 {
e881bb1b
RH
3492 rtx lab;
3493 lab = expand_nl_handler_label (XEXP (slot, 0),
3494 thisblock->data.block.first_insn);
3495 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3496
ba716ac9
BS
3497 expand_nl_goto_receiver ();
3498
3499 /* Jump to the "real" nonlocal label. */
3500 expand_goto (TREE_VALUE (link));
3501 }
3502
3503 /* A second pass over all nonlocal labels; this time we handle those
3504 we should not be able to jump to at this point. */
3505 link = nonlocal_labels;
3506 slot = nonlocal_goto_handler_slots;
3507 any_invalid = 0;
3508 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3509 if (DECL_TOO_LATE (TREE_VALUE (link)))
3510 {
e881bb1b
RH
3511 rtx lab;
3512 lab = expand_nl_handler_label (XEXP (slot, 0),
3513 thisblock->data.block.first_insn);
3514 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3515 any_invalid = 1;
3516 }
3517
3518 if (any_invalid)
3519 {
3520 expand_nl_goto_receiver ();
3521 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3522 VOIDmode, 0);
3523 emit_barrier ();
3524 }
3525
e881bb1b 3526 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3527 emit_label (afterward);
3528}
3529
ba716677
MM
3530/* Warn about any unused VARS (which may contain nodes other than
3531 VAR_DECLs, but such nodes are ignored). The nodes are connected
3532 via the TREE_CHAIN field. */
3533
3534void
3535warn_about_unused_variables (vars)
3536 tree vars;
3537{
3538 tree decl;
3539
3540 if (warn_unused)
3541 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3542 if (TREE_CODE (decl) == VAR_DECL
3543 && ! TREE_USED (decl)
3544 && ! DECL_IN_SYSTEM_HEADER (decl)
3545 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3546 warning_with_decl (decl, "unused variable `%s'");
3547}
3548
28d81abb 3549/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3550
3551 VARS is the chain of VAR_DECL nodes for the variables bound in this
3552 contour. There may actually be other nodes in this chain, but any
3553 nodes other than VAR_DECLS are ignored.
3554
28d81abb
RK
3555 MARK_ENDS is nonzero if we should put a note at the beginning
3556 and end of this binding contour.
3557
3558 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3559 (That is true automatically if the contour has a saved stack level.) */
3560
3561void
3562expand_end_bindings (vars, mark_ends, dont_jump_in)
3563 tree vars;
3564 int mark_ends;
3565 int dont_jump_in;
3566{
e976b8b2 3567 register struct nesting *thisblock;
28d81abb
RK
3568 register tree decl;
3569
e976b8b2
MS
3570 while (block_stack->data.block.exception_region)
3571 {
3572 /* Because we don't need or want a new temporary level and
3573 because we didn't create one in expand_eh_region_start,
3574 create a fake one now to avoid removing one in
3575 expand_end_bindings. */
3576 push_temp_slots ();
3577
3578 block_stack->data.block.exception_region = 0;
3579
3580 expand_end_bindings (NULL_TREE, 0, 0);
3581 }
3582
e976b8b2
MS
3583 /* Since expand_eh_region_start does an expand_start_bindings, we
3584 have to first end all the bindings that were created by
3585 expand_eh_region_start. */
3586
3587 thisblock = block_stack;
3588
ba716677
MM
3589 /* If any of the variables in this scope were not used, warn the
3590 user. */
3591 warn_about_unused_variables (vars);
28d81abb 3592
28d81abb
RK
3593 if (thisblock->exit_label)
3594 {
3595 do_pending_stack_adjust ();
3596 emit_label (thisblock->exit_label);
3597 }
3598
ba716ac9 3599 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3600 place in the function calls in this block. */
3f1d071b 3601 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3602 && nonlocal_labels
3603 /* Make handler for outermost block
3604 if there were any nonlocal gotos to this function. */
3605 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3606 /* Make handler for inner block if it has something
3607 special to do when you jump out of it. */
3608 : (thisblock->data.block.cleanups != 0
3609 || thisblock->data.block.stack_level != 0)))
ba716ac9 3610 expand_nl_goto_receivers (thisblock);
28d81abb 3611
72eb1038
BH
3612 /* Don't allow jumping into a block that has a stack level.
3613 Cleanups are allowed, though. */
28d81abb 3614 if (dont_jump_in
72eb1038 3615 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3616 {
3617 struct label_chain *chain;
3618
3619 /* Any labels in this block are no longer valid to go to.
3620 Mark them to cause an error message. */
3621 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3622 {
3623 DECL_TOO_LATE (chain->label) = 1;
3624 /* If any goto without a fixup came to this label,
3625 that must be an error, because gotos without fixups
72eb1038 3626 come from outside all saved stack-levels. */
28d81abb
RK
3627 if (TREE_ADDRESSABLE (chain->label))
3628 error_with_decl (chain->label,
3629 "label `%s' used before containing binding contour");
3630 }
3631 }
3632
3633 /* Restore stack level in effect before the block
3634 (only if variable-size objects allocated). */
3635 /* Perform any cleanups associated with the block. */
3636
3637 if (thisblock->data.block.stack_level != 0
3638 || thisblock->data.block.cleanups != 0)
3639 {
413ec213 3640 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3641 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3642
50d1b7a1
MS
3643 /* Don't let cleanups affect ({...}) constructs. */
3644 int old_expr_stmts_for_value = expr_stmts_for_value;
3645 rtx old_last_expr_value = last_expr_value;
3646 tree old_last_expr_type = last_expr_type;
3647 expr_stmts_for_value = 0;
28d81abb 3648
50d1b7a1
MS
3649 /* Do the cleanups. */
3650 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3651 if (reachable)
3652 do_pending_stack_adjust ();
28d81abb 3653
50d1b7a1
MS
3654 expr_stmts_for_value = old_expr_stmts_for_value;
3655 last_expr_value = old_last_expr_value;
3656 last_expr_type = old_last_expr_type;
3657
3658 /* Restore the stack level. */
3659
3660 if (reachable && thisblock->data.block.stack_level != 0)
3661 {
3662 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3663 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3664 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3665 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3666 NULL_RTX);
28d81abb
RK
3667 }
3668
3669 /* Any gotos out of this block must also do these things.
59257ff7
RK
3670 Also report any gotos with fixups that came to labels in this
3671 level. */
28d81abb
RK
3672 fixup_gotos (thisblock,
3673 thisblock->data.block.stack_level,
3674 thisblock->data.block.cleanups,
3675 thisblock->data.block.first_insn,
3676 dont_jump_in);
3677 }
3678
c7d2d61d
RS
3679 /* Mark the beginning and end of the scope if requested.
3680 We do this now, after running cleanups on the variables
3681 just going out of scope, so they are in scope for their cleanups. */
3682
3683 if (mark_ends)
a97901e6
MM
3684 {
3685 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3686 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3687 }
c7d2d61d
RS
3688 else
3689 /* Get rid of the beginning-mark if we don't make an end-mark. */
3690 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3691
28d81abb
RK
3692 /* If doing stupid register allocation, make sure lives of all
3693 register variables declared here extend thru end of scope. */
3694
3695 if (obey_regdecls)
3696 for (decl = vars; decl; decl = TREE_CHAIN (decl))
e97b5c12
MM
3697 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3698 use_variable (DECL_RTL (decl));
28d81abb 3699
e976b8b2 3700 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3701 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3702
28d81abb
RK
3703 /* Restore block_stack level for containing block. */
3704
3705 stack_block_stack = thisblock->data.block.innermost_stack_block;
3706 POPSTACK (block_stack);
3707
3708 /* Pop the stack slot nesting and free any slots at this level. */
3709 pop_temp_slots ();
3710}
3711\f
3712/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3713 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3714
3715void
3716expand_decl (decl)
3717 register tree decl;
3718{
3f1d071b 3719 struct nesting *thisblock;
ca695ac9
JB
3720 tree type;
3721
ca695ac9 3722 type = TREE_TYPE (decl);
28d81abb
RK
3723
3724 /* Only automatic variables need any expansion done.
3725 Static and external variables, and external functions,
3726 will be handled by `assemble_variable' (called from finish_decl).
3727 TYPE_DECL and CONST_DECL require nothing.
3728 PARM_DECLs are handled in `assign_parms'. */
3729
3730 if (TREE_CODE (decl) != VAR_DECL)
3731 return;
44fe2e80 3732 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3733 return;
3734
3f1d071b
BS
3735 thisblock = block_stack;
3736
28d81abb
RK
3737 /* Create the RTL representation for the variable. */
3738
3739 if (type == error_mark_node)
38a448ca 3740 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3741 else if (DECL_SIZE (decl) == 0)
3742 /* Variable with incomplete type. */
3743 {
3744 if (DECL_INITIAL (decl) == 0)
3745 /* Error message was already done; now avoid a crash. */
3746 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3747 else
3748 /* An initializer is going to decide the size of this array.
3749 Until we know the size, represent its address with a reg. */
38a448ca 3750 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
c6df88cb 3751 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
28d81abb
RK
3752 }
3753 else if (DECL_MODE (decl) != BLKmode
3754 /* If -ffloat-store, don't put explicit float vars
3755 into regs. */
3756 && !(flag_float_store
3757 && TREE_CODE (type) == REAL_TYPE)
3758 && ! TREE_THIS_VOLATILE (decl)
3759 && ! TREE_ADDRESSABLE (decl)
e5e809f4
JL
3760 && (DECL_REGISTER (decl) || ! obey_regdecls)
3761 /* if -fcheck-memory-usage, check all variables. */
7d384cc0 3762 && ! current_function_check_memory_usage)
28d81abb
RK
3763 {
3764 /* Automatic variable that can go in a register. */
98f3b471 3765 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3766 enum machine_mode reg_mode
3767 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3768
7f070d5e
RK
3769 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3770 mark_user_reg (DECL_RTL (decl));
3771
e5e809f4 3772 if (POINTER_TYPE_P (type))
7f070d5e
RK
3773 mark_reg_pointer (DECL_RTL (decl),
3774 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3775 / BITS_PER_UNIT));
28d81abb 3776 }
0df15c2c 3777
5e4ef18a
RK
3778 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3779 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3780 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3781 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3782 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
28d81abb
RK
3783 {
3784 /* Variable of fixed size that goes on the stack. */
3785 rtx oldaddr = 0;
3786 rtx addr;
3787
3788 /* If we previously made RTL for this decl, it must be an array
3789 whose size was determined by the initializer.
3790 The old address was a register; set that register now
3791 to the proper address. */
3792 if (DECL_RTL (decl) != 0)
3793 {
3794 if (GET_CODE (DECL_RTL (decl)) != MEM
3795 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3796 abort ();
3797 oldaddr = XEXP (DECL_RTL (decl), 0);
3798 }
3799
d16790f2 3800 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
c6df88cb
MM
3801 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3802 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3803
3804 /* Set alignment we actually gave this decl. */
3805 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3806 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3807
3808 if (oldaddr)
3809 {
3810 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3811 if (addr != oldaddr)
3812 emit_move_insn (oldaddr, addr);
3813 }
3814
3815 /* If this is a memory ref that contains aggregate components,
3816 mark it as such for cse and loop optimize. */
c6df88cb
MM
3817 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3818 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3819#if 0
3820 /* If this is in memory because of -ffloat-store,
3821 set the volatile bit, to prevent optimizations from
3822 undoing the effects. */
3823 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3824 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3825#endif
41472af8
MM
3826
3827 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
28d81abb
RK
3828 }
3829 else
3830 /* Dynamic-size object: must push space on the stack. */
3831 {
3832 rtx address, size;
3833
3834 /* Record the stack pointer on entry to block, if have
3835 not already done so. */
3836 if (thisblock->data.block.stack_level == 0)
3837 {
3838 do_pending_stack_adjust ();
59257ff7
RK
3839 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3840 &thisblock->data.block.stack_level,
3841 thisblock->data.block.first_insn);
28d81abb
RK
3842 stack_block_stack = thisblock;
3843 }
3844
4d9af632
JM
3845 /* In function-at-a-time mode, variable_size doesn't expand this,
3846 so do it now. */
3847 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3848 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3849 const0_rtx, VOIDmode, 0);
3850
28d81abb
RK
3851 /* Compute the variable's size, in bytes. */
3852 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3853 DECL_SIZE (decl),
3854 size_int (BITS_PER_UNIT)),
37366632 3855 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3856 free_temp_slots ();
3857
ff91ad08
RK
3858 /* Allocate space on the stack for the variable. Note that
3859 DECL_ALIGN says how the variable is to be aligned and we
3860 cannot use it to conclude anything about the alignment of
3861 the size. */
37366632 3862 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3863 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3864
28d81abb 3865 /* Reference the variable indirect through that rtx. */
38a448ca 3866 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
28d81abb 3867
2207e295
RS
3868 /* If this is a memory ref that contains aggregate components,
3869 mark it as such for cse and loop optimize. */
c6df88cb
MM
3870 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3871 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
2207e295 3872
28d81abb
RK
3873 /* Indicate the alignment we actually gave this variable. */
3874#ifdef STACK_BOUNDARY
3875 DECL_ALIGN (decl) = STACK_BOUNDARY;
3876#else
3877 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3878#endif
3879 }
3880
3881 if (TREE_THIS_VOLATILE (decl))
3882 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3883#if 0 /* A variable is not necessarily unchanging
3884 just because it is const. RTX_UNCHANGING_P
3885 means no change in the function,
3886 not merely no change in the variable's scope.
3887 It is correct to set RTX_UNCHANGING_P if the variable's scope
3888 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3889 if (TREE_READONLY (decl))
3890 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3891#endif
28d81abb
RK
3892
3893 /* If doing stupid register allocation, make sure life of any
3894 register variable starts here, at the start of its scope. */
3895
3896 if (obey_regdecls)
3897 use_variable (DECL_RTL (decl));
3898}
ca695ac9
JB
3899
3900
28d81abb
RK
3901\f
3902/* Emit code to perform the initialization of a declaration DECL. */
3903
3904void
3905expand_decl_init (decl)
3906 tree decl;
3907{
b4ac57ab
RS
3908 int was_used = TREE_USED (decl);
3909
3564e40e
RK
3910 /* If this is a CONST_DECL, we don't have to generate any code, but
3911 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3912 to be set while in the obstack containing the constant. If we don't
3913 do this, we can lose if we have functions nested three deep and the middle
3914 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3915 the innermost function is the first to expand that STRING_CST. */
3916 if (TREE_CODE (decl) == CONST_DECL)
3917 {
3918 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3919 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3920 EXPAND_INITIALIZER);
3921 return;
3922 }
3923
28d81abb
RK
3924 if (TREE_STATIC (decl))
3925 return;
3926
3927 /* Compute and store the initial value now. */
3928
3929 if (DECL_INITIAL (decl) == error_mark_node)
3930 {
3931 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3932
28d81abb 3933 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3934 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
3935 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3936 0, 0);
3937 emit_queue ();
3938 }
3939 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3940 {
3941 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3942 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3943 emit_queue ();
3944 }
3945
b4ac57ab
RS
3946 /* Don't let the initialization count as "using" the variable. */
3947 TREE_USED (decl) = was_used;
3948
28d81abb 3949 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3950 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3951 free_temp_slots ();
3952}
3953
3954/* CLEANUP is an expression to be executed at exit from this binding contour;
3955 for example, in C++, it might call the destructor for this variable.
3956
4847c938
MS
3957 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3958 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
3959 happens in exception handling, for gotos, returns, breaks that
3960 leave the current scope.
28d81abb
RK
3961
3962 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3963 that is not associated with any particular variable. */
3964
3965int
3966expand_decl_cleanup (decl, cleanup)
3967 tree decl, cleanup;
3968{
3f1d071b 3969 struct nesting *thisblock;
28d81abb
RK
3970
3971 /* Error if we are not in any block. */
01d939e8 3972 if (cfun == 0 || block_stack == 0)
28d81abb
RK
3973 return 0;
3974
3f1d071b
BS
3975 thisblock = block_stack;
3976
28d81abb
RK
3977 /* Record the cleanup if there is one. */
3978
3979 if (cleanup != 0)
3980 {
e976b8b2
MS
3981 tree t;
3982 rtx seq;
3983 tree *cleanups = &thisblock->data.block.cleanups;
3984 int cond_context = conditional_context ();
3985
3986 if (cond_context)
3987 {
3988 rtx flag = gen_reg_rtx (word_mode);
3989 rtx set_flag_0;
3990 tree cond;
3991
3992 start_sequence ();
3993 emit_move_insn (flag, const0_rtx);
3994 set_flag_0 = get_insns ();
3995 end_sequence ();
3996
3997 thisblock->data.block.last_unconditional_cleanup
3998 = emit_insns_after (set_flag_0,
3999 thisblock->data.block.last_unconditional_cleanup);
4000
4001 emit_move_insn (flag, const1_rtx);
4002
4003 /* All cleanups must be on the function_obstack. */
4004 push_obstacks_nochange ();
4005 resume_temporary_allocation ();
4006
4007 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4008 DECL_RTL (cond) = flag;
4009
4010 /* Conditionalize the cleanup. */
4011 cleanup = build (COND_EXPR, void_type_node,
4012 truthvalue_conversion (cond),
4013 cleanup, integer_zero_node);
4014 cleanup = fold (cleanup);
4015
4016 pop_obstacks ();
4017
4018 cleanups = thisblock->data.block.cleanup_ptr;
4019 }
4020
4021 /* All cleanups must be on the function_obstack. */
4022 push_obstacks_nochange ();
4023 resume_temporary_allocation ();
4847c938 4024 cleanup = unsave_expr (cleanup);
e976b8b2
MS
4025 pop_obstacks ();
4026
4027 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
4028
4029 if (! cond_context)
4030 /* If this block has a cleanup, it belongs in stack_block_stack. */
4031 stack_block_stack = thisblock;
4032
4033 if (cond_context)
4034 {
4035 start_sequence ();
4036 }
4847c938 4037
e976b8b2
MS
4038 /* If this was optimized so that there is no exception region for the
4039 cleanup, then mark the TREE_LIST node, so that we can later tell
4040 if we need to call expand_eh_region_end. */
e9a25f70
JL
4041 if (! using_eh_for_cleanups_p
4042 || expand_eh_region_start_tree (decl, cleanup))
e976b8b2 4043 TREE_ADDRESSABLE (t) = 1;
716cc7f7
JM
4044 /* If that started a new EH region, we're in a new block. */
4045 thisblock = block_stack;
e976b8b2
MS
4046
4047 if (cond_context)
4048 {
4049 seq = get_insns ();
4050 end_sequence ();
7e82801f
MS
4051 if (seq)
4052 thisblock->data.block.last_unconditional_cleanup
4053 = emit_insns_after (seq,
4054 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4055 }
4056 else
4057 {
4058 thisblock->data.block.last_unconditional_cleanup
4059 = get_last_insn ();
4060 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4061 }
28d81abb
RK
4062 }
4063 return 1;
4064}
e976b8b2 4065
c7ae64f2
JM
4066/* Like expand_decl_cleanup, but suppress generating an exception handler
4067 to perform the cleanup. */
4068
95d75019 4069#if 0
c7ae64f2
JM
4070int
4071expand_decl_cleanup_no_eh (decl, cleanup)
4072 tree decl, cleanup;
4073{
4074 int save_eh = using_eh_for_cleanups_p;
b472527b
JL
4075 int result;
4076
c7ae64f2 4077 using_eh_for_cleanups_p = 0;
b472527b 4078 result = expand_decl_cleanup (decl, cleanup);
c7ae64f2 4079 using_eh_for_cleanups_p = save_eh;
b472527b
JL
4080
4081 return result;
c7ae64f2 4082}
95d75019 4083#endif
c7ae64f2 4084
e976b8b2 4085/* Arrange for the top element of the dynamic cleanup chain to be
4c581243
MS
4086 popped if we exit the current binding contour. DECL is the
4087 associated declaration, if any, otherwise NULL_TREE. If the
4088 current contour is left via an exception, then __sjthrow will pop
4089 the top element off the dynamic cleanup chain. The code that
4090 avoids doing the action we push into the cleanup chain in the
4091 exceptional case is contained in expand_cleanups.
e976b8b2
MS
4092
4093 This routine is only used by expand_eh_region_start, and that is
4094 the only way in which an exception region should be started. This
4095 routine is only used when using the setjmp/longjmp codegen method
4096 for exception handling. */
4097
4098int
4c581243
MS
4099expand_dcc_cleanup (decl)
4100 tree decl;
e976b8b2 4101{
3f1d071b 4102 struct nesting *thisblock;
e976b8b2
MS
4103 tree cleanup;
4104
4105 /* Error if we are not in any block. */
01d939e8 4106 if (cfun == 0 || block_stack == 0)
e976b8b2 4107 return 0;
3f1d071b 4108 thisblock = block_stack;
e976b8b2
MS
4109
4110 /* Record the cleanup for the dynamic handler chain. */
4111
4112 /* All cleanups must be on the function_obstack. */
4113 push_obstacks_nochange ();
4114 resume_temporary_allocation ();
4115 cleanup = make_node (POPDCC_EXPR);
4116 pop_obstacks ();
4117
4118 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4119 thisblock->data.block.cleanups
4c581243 4120 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4121
4122 /* If this block has a cleanup, it belongs in stack_block_stack. */
4123 stack_block_stack = thisblock;
4124 return 1;
4125}
4126
4127/* Arrange for the top element of the dynamic handler chain to be
4c581243 4128 popped if we exit the current binding contour. DECL is the
956d6950 4129 associated declaration, if any, otherwise NULL_TREE. If the current
4c581243
MS
4130 contour is left via an exception, then __sjthrow will pop the top
4131 element off the dynamic handler chain. The code that avoids doing
4132 the action we push into the handler chain in the exceptional case
4133 is contained in expand_cleanups.
e976b8b2
MS
4134
4135 This routine is only used by expand_eh_region_start, and that is
4136 the only way in which an exception region should be started. This
4137 routine is only used when using the setjmp/longjmp codegen method
4138 for exception handling. */
4139
4140int
4c581243
MS
4141expand_dhc_cleanup (decl)
4142 tree decl;
e976b8b2 4143{
3f1d071b 4144 struct nesting *thisblock;
e976b8b2
MS
4145 tree cleanup;
4146
4147 /* Error if we are not in any block. */
01d939e8 4148 if (cfun == 0 || block_stack == 0)
e976b8b2 4149 return 0;
3f1d071b 4150 thisblock = block_stack;
e976b8b2
MS
4151
4152 /* Record the cleanup for the dynamic handler chain. */
4153
4154 /* All cleanups must be on the function_obstack. */
4155 push_obstacks_nochange ();
4156 resume_temporary_allocation ();
4157 cleanup = make_node (POPDHC_EXPR);
4158 pop_obstacks ();
4159
4160 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4161 thisblock->data.block.cleanups
4c581243 4162 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4163
4164 /* If this block has a cleanup, it belongs in stack_block_stack. */
4165 stack_block_stack = thisblock;
4166 return 1;
4167}
28d81abb
RK
4168\f
4169/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4170 DECL_ELTS is the list of elements that belong to DECL's type.
4171 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4172
4173void
4174expand_anon_union_decl (decl, cleanup, decl_elts)
4175 tree decl, cleanup, decl_elts;
4176{
01d939e8 4177 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4178 rtx x;
8a693bd0 4179 tree t;
28d81abb 4180
8a693bd0
MM
4181 /* If any of the elements are addressable, so is the entire union. */
4182 for (t = decl_elts; t; t = TREE_CHAIN (t))
4183 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4184 {
4185 TREE_ADDRESSABLE (decl) = 1;
4186 break;
4187 }
4188
ec5cd386
RK
4189 expand_decl (decl);
4190 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4191 x = DECL_RTL (decl);
4192
8a693bd0
MM
4193 /* Go through the elements, assigning RTL to each. */
4194 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4195 {
8a693bd0
MM
4196 tree decl_elt = TREE_VALUE (t);
4197 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4198 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4199
7b9032dd
JM
4200 /* Propagate the union's alignment to the elements. */
4201 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4202
4203 /* If the element has BLKmode and the union doesn't, the union is
4204 aligned such that the element doesn't need to have BLKmode, so
4205 change the element's mode to the appropriate one for its size. */
4206 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4207 DECL_MODE (decl_elt) = mode
4208 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4209 MODE_INT, 1);
4210
28d81abb
RK
4211 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4212 instead create a new MEM rtx with the proper mode. */
4213 if (GET_CODE (x) == MEM)
4214 {
4215 if (mode == GET_MODE (x))
4216 DECL_RTL (decl_elt) = x;
4217 else
4218 {
38a448ca 4219 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
c6df88cb 4220 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
28d81abb
RK
4221 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4222 }
4223 }
4224 else if (GET_CODE (x) == REG)
4225 {
4226 if (mode == GET_MODE (x))
4227 DECL_RTL (decl_elt) = x;
4228 else
38a448ca 4229 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
28d81abb
RK
4230 }
4231 else
4232 abort ();
4233
4234 /* Record the cleanup if there is one. */
4235
4236 if (cleanup != 0)
4237 thisblock->data.block.cleanups
4238 = temp_tree_cons (decl_elt, cleanup_elt,
4239 thisblock->data.block.cleanups);
28d81abb
RK
4240 }
4241}
4242\f
4243/* Expand a list of cleanups LIST.
4244 Elements may be expressions or may be nested lists.
4245
4246 If DONT_DO is nonnull, then any list-element
4247 whose TREE_PURPOSE matches DONT_DO is omitted.
4248 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4249 a value that is being returned out of the scope.
4250
4251 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
4252 goto and handle protection regions specially in that case.
4253
4254 If REACHABLE, we emit code, otherwise just inform the exception handling
4255 code about this finalization. */
28d81abb
RK
4256
4257static void
50d1b7a1 4258expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4259 tree list;
4260 tree dont_do;
4e44807b 4261 int in_fixup;
50d1b7a1 4262 int reachable;
28d81abb
RK
4263{
4264 tree tail;
4265 for (tail = list; tail; tail = TREE_CHAIN (tail))
4266 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4267 {
4268 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4269 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4270 else
4271 {
4e44807b 4272 if (! in_fixup)
e976b8b2
MS
4273 {
4274 tree cleanup = TREE_VALUE (tail);
4275
4276 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4277 if (TREE_CODE (cleanup) != POPDHC_EXPR
4278 && TREE_CODE (cleanup) != POPDCC_EXPR
4279 /* See expand_eh_region_start_tree for this case. */
4280 && ! TREE_ADDRESSABLE (tail))
4281 {
4282 cleanup = protect_with_terminate (cleanup);
4283 expand_eh_region_end (cleanup);
4284 }
4285 }
61d6b1cc 4286
50d1b7a1
MS
4287 if (reachable)
4288 {
4289 /* Cleanups may be run multiple times. For example,
4290 when exiting a binding contour, we expand the
4291 cleanups associated with that contour. When a goto
4292 within that binding contour has a target outside that
4293 contour, it will expand all cleanups from its scope to
4294 the target. Though the cleanups are expanded multiple
4295 times, the control paths are non-overlapping so the
4296 cleanups will not be executed twice. */
9762d48d
JM
4297
4298 /* We may need to protect fixups with rethrow regions. */
4299 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
e5e809f4 4300
9762d48d
JM
4301 if (protect)
4302 expand_fixup_region_start ();
e5e809f4 4303
f54a7f6f
MM
4304 /* The cleanup might contain try-blocks, so we have to
4305 preserve our current queue. */
4306 push_ehqueue ();
50d1b7a1 4307 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
f54a7f6f 4308 pop_ehqueue ();
9762d48d
JM
4309 if (protect)
4310 expand_fixup_region_end (TREE_VALUE (tail));
50d1b7a1
MS
4311 free_temp_slots ();
4312 }
28d81abb
RK
4313 }
4314 }
4315}
4316
e976b8b2
MS
4317/* Mark when the context we are emitting RTL for as a conditional
4318 context, so that any cleanup actions we register with
4319 expand_decl_init will be properly conditionalized when those
4320 cleanup actions are later performed. Must be called before any
956d6950 4321 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4322
4323void
956d6950 4324start_cleanup_deferral ()
e976b8b2 4325{
e3eef942
JW
4326 /* block_stack can be NULL if we are inside the parameter list. It is
4327 OK to do nothing, because cleanups aren't possible here. */
4328 if (block_stack)
4329 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4330}
4331
4332/* Mark the end of a conditional region of code. Because cleanup
956d6950 4333 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4334 after we end the currently deferred cleanups, only after we end all
4335 deferred cleanups, are we back in unconditional code. */
4336
4337void
956d6950 4338end_cleanup_deferral ()
e976b8b2 4339{
e3eef942
JW
4340 /* block_stack can be NULL if we are inside the parameter list. It is
4341 OK to do nothing, because cleanups aren't possible here. */
4342 if (block_stack)
4343 --block_stack->data.block.conditional_code;
e976b8b2
MS
4344}
4345
28d81abb
RK
4346/* Move all cleanups from the current block_stack
4347 to the containing block_stack, where they are assumed to
4348 have been created. If anything can cause a temporary to
4349 be created, but not expanded for more than one level of
4350 block_stacks, then this code will have to change. */
4351
4352void
4353move_cleanups_up ()
4354{
4355 struct nesting *block = block_stack;
4356 struct nesting *outer = block->next;
4357
4358 outer->data.block.cleanups
4359 = chainon (block->data.block.cleanups,
4360 outer->data.block.cleanups);
4361 block->data.block.cleanups = 0;
4362}
4363
4364tree
4365last_cleanup_this_contour ()
4366{
4367 if (block_stack == 0)
4368 return 0;
4369
4370 return block_stack->data.block.cleanups;
4371}
4372
4373/* Return 1 if there are any pending cleanups at this point.
4374 If THIS_CONTOUR is nonzero, check the current contour as well.
4375 Otherwise, look only at the contours that enclose this one. */
4376
4377int
4378any_pending_cleanups (this_contour)
4379 int this_contour;
4380{
4381 struct nesting *block;
4382
01d939e8 4383 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4384 return 0;
4385
4386 if (this_contour && block_stack->data.block.cleanups != NULL)
4387 return 1;
4388 if (block_stack->data.block.cleanups == 0
e976b8b2 4389 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4390 return 0;
4391
4392 for (block = block_stack->next; block; block = block->next)
4393 if (block->data.block.cleanups != 0)
4394 return 1;
4395
4396 return 0;
4397}
4398\f
4399/* Enter a case (Pascal) or switch (C) statement.
4400 Push a block onto case_stack and nesting_stack
4401 to accumulate the case-labels that are seen
4402 and to record the labels generated for the statement.
4403
4404 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4405 Otherwise, this construct is transparent for `exit_something'.
4406
4407 EXPR is the index-expression to be dispatched on.
4408 TYPE is its nominal type. We could simply convert EXPR to this type,
4409 but instead we take short cuts. */
4410
4411void
4412expand_start_case (exit_flag, expr, type, printname)
4413 int exit_flag;
4414 tree expr;
4415 tree type;
dff01034 4416 const char *printname;
28d81abb
RK
4417{
4418 register struct nesting *thiscase = ALLOC_NESTING ();
4419
4420 /* Make an entry on case_stack for the case we are entering. */
4421
4422 thiscase->next = case_stack;
4423 thiscase->all = nesting_stack;
4424 thiscase->depth = ++nesting_depth;
4425 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4426 thiscase->data.case_stmt.case_list = 0;
4427 thiscase->data.case_stmt.index_expr = expr;
4428 thiscase->data.case_stmt.nominal_type = type;
4429 thiscase->data.case_stmt.default_label = 0;
4430 thiscase->data.case_stmt.num_ranges = 0;
4431 thiscase->data.case_stmt.printname = printname;
a11759a3 4432 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4433 case_stack = thiscase;
4434 nesting_stack = thiscase;
4435
4436 do_pending_stack_adjust ();
4437
4438 /* Make sure case_stmt.start points to something that won't
4439 need any transformation before expand_end_case. */
4440 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 4441 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
4442
4443 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4444
956d6950 4445 start_cleanup_deferral ();
28d81abb
RK
4446}
4447
ca695ac9 4448
28d81abb
RK
4449/* Start a "dummy case statement" within which case labels are invalid
4450 and are not connected to any larger real case statement.
4451 This can be used if you don't want to let a case statement jump
4452 into the middle of certain kinds of constructs. */
4453
4454void
4455expand_start_case_dummy ()
4456{
4457 register struct nesting *thiscase = ALLOC_NESTING ();
4458
4459 /* Make an entry on case_stack for the dummy. */
4460
4461 thiscase->next = case_stack;
4462 thiscase->all = nesting_stack;
4463 thiscase->depth = ++nesting_depth;
4464 thiscase->exit_label = 0;
4465 thiscase->data.case_stmt.case_list = 0;
4466 thiscase->data.case_stmt.start = 0;
4467 thiscase->data.case_stmt.nominal_type = 0;
4468 thiscase->data.case_stmt.default_label = 0;
4469 thiscase->data.case_stmt.num_ranges = 0;
4470 case_stack = thiscase;
4471 nesting_stack = thiscase;
956d6950 4472 start_cleanup_deferral ();
28d81abb
RK
4473}
4474
4475/* End a dummy case statement. */
4476
4477void
4478expand_end_case_dummy ()
4479{
956d6950 4480 end_cleanup_deferral ();
28d81abb
RK
4481 POPSTACK (case_stack);
4482}
4483
4484/* Return the data type of the index-expression
4485 of the innermost case statement, or null if none. */
4486
4487tree
4488case_index_expr_type ()
4489{
4490 if (case_stack)
4491 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4492 return 0;
4493}
4494\f
a11759a3
JR
4495static void
4496check_seenlabel ()
4497{
4498 /* If this is the first label, warn if any insns have been emitted. */
4499 if (case_stack->data.case_stmt.line_number_status >= 0)
4500 {
4501 rtx insn;
4502
4503 restore_line_number_status
4504 (case_stack->data.case_stmt.line_number_status);
4505 case_stack->data.case_stmt.line_number_status = -1;
4506
4507 for (insn = case_stack->data.case_stmt.start;
4508 insn;
4509 insn = NEXT_INSN (insn))
4510 {
4511 if (GET_CODE (insn) == CODE_LABEL)
4512 break;
4513 if (GET_CODE (insn) != NOTE
4514 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4515 {
4516 do
4517 insn = PREV_INSN (insn);
0dacbd0e
JW
4518 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4519
4520 /* If insn is zero, then there must have been a syntax error. */
4521 if (insn)
4522 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4523 NOTE_LINE_NUMBER(insn),
4524 "unreachable code at beginning of %s",
4525 case_stack->data.case_stmt.printname);
a11759a3
JR
4526 break;
4527 }
4528 }
4529 }
4530}
4531
28d81abb
RK
4532/* Accumulate one case or default label inside a case or switch statement.
4533 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4534 The function CONVERTER, when applied to arguments T and V,
4535 converts the value V to the type T.
28d81abb
RK
4536
4537 If not currently inside a case or switch statement, return 1 and do
4538 nothing. The caller will print a language-specific error message.
4539 If VALUE is a duplicate or overlaps, return 2 and do nothing
4540 except store the (first) duplicate node in *DUPLICATE.
4541 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4542 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4543 Return 0 on success.
4544
4545 Extended to handle range statements. */
4546
4547int
f52fba84 4548pushcase (value, converter, label, duplicate)
28d81abb 4549 register tree value;
cdadb1dd 4550 tree (*converter) PARAMS ((tree, tree));
28d81abb
RK
4551 register tree label;
4552 tree *duplicate;
4553{
28d81abb
RK
4554 tree index_type;
4555 tree nominal_type;
4556
4557 /* Fail if not inside a real case statement. */
4558 if (! (case_stack && case_stack->data.case_stmt.start))
4559 return 1;
4560
4561 if (stack_block_stack
4562 && stack_block_stack->depth > case_stack->depth)
4563 return 5;
4564
4565 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4566 nominal_type = case_stack->data.case_stmt.nominal_type;
4567
4568 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4569 if (index_type == error_mark_node)
4570 return 0;
4571
2f985ca6
JW
4572 /* Convert VALUE to the type in which the comparisons are nominally done. */
4573 if (value != 0)
4574 value = (*converter) (nominal_type, value);
4575
feb60352
R
4576 check_seenlabel ();
4577
28d81abb
RK
4578 /* Fail if this value is out of range for the actual type of the index
4579 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4580 if (value != 0
4581 && (TREE_CONSTANT_OVERFLOW (value)
4582 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4583 return 3;
4584
4585 /* Fail if this is a duplicate or overlaps another entry. */
4586 if (value == 0)
4587 {
4588 if (case_stack->data.case_stmt.default_label != 0)
4589 {
4590 *duplicate = case_stack->data.case_stmt.default_label;
4591 return 2;
4592 }
4593 case_stack->data.case_stmt.default_label = label;
4594 }
4595 else
57641239 4596 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4597
4598 expand_label (label);
4599 return 0;
4600}
4601
956d6950
JL
4602/* Like pushcase but this case applies to all values between VALUE1 and
4603 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4604 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4605 starts at VALUE1 and ends at the highest value of the index type.
4606 If both are NULL, this case applies to all values.
4607
4608 The return value is the same as that of pushcase but there is one
4609 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4610
4611int
f52fba84 4612pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4613 register tree value1, value2;
cdadb1dd 4614 tree (*converter) PARAMS ((tree, tree));
28d81abb
RK
4615 register tree label;
4616 tree *duplicate;
4617{
28d81abb
RK
4618 tree index_type;
4619 tree nominal_type;
4620
4621 /* Fail if not inside a real case statement. */
4622 if (! (case_stack && case_stack->data.case_stmt.start))
4623 return 1;
4624
4625 if (stack_block_stack
4626 && stack_block_stack->depth > case_stack->depth)
4627 return 5;
4628
4629 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4630 nominal_type = case_stack->data.case_stmt.nominal_type;
4631
4632 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4633 if (index_type == error_mark_node)
4634 return 0;
4635
a11759a3 4636 check_seenlabel ();
28d81abb 4637
956d6950
JL
4638 /* Convert VALUEs to type in which the comparisons are nominally done
4639 and replace any unspecified value with the corresponding bound. */
4640 if (value1 == 0)
1974bfb1 4641 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4642 if (value2 == 0)
1974bfb1 4643 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4644
4645 /* Fail if the range is empty. Do this before any conversion since
4646 we want to allow out-of-range empty ranges. */
14a774a9 4647 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4648 return 4;
4649
e1ee5cdc
RH
4650 /* If the max was unbounded, use the max of the nominal_type we are
4651 converting to. Do this after the < check above to suppress false
4652 positives. */
14a774a9 4653 if (value2 == 0)
e1ee5cdc 4654 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4655
2f985ca6
JW
4656 value1 = (*converter) (nominal_type, value1);
4657 value2 = (*converter) (nominal_type, value2);
4658
28d81abb 4659 /* Fail if these values are out of range. */
956d6950
JL
4660 if (TREE_CONSTANT_OVERFLOW (value1)
4661 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4662 return 3;
4663
956d6950
JL
4664 if (TREE_CONSTANT_OVERFLOW (value2)
4665 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4666 return 3;
4667
57641239
RK
4668 return add_case_node (value1, value2, label, duplicate);
4669}
4670
4671/* Do the actual insertion of a case label for pushcase and pushcase_range
4672 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4673 slowdown for large switch statements. */
4674
4675static int
4676add_case_node (low, high, label, duplicate)
4677 tree low, high;
4678 tree label;
4679 tree *duplicate;
4680{
4681 struct case_node *p, **q, *r;
4682
4683 q = &case_stack->data.case_stmt.case_list;
4684 p = *q;
4685
69d4ca36 4686 while ((r = *q))
28d81abb 4687 {
57641239
RK
4688 p = r;
4689
4690 /* Keep going past elements distinctly greater than HIGH. */
4691 if (tree_int_cst_lt (high, p->low))
4692 q = &p->left;
4693
4694 /* or distinctly less than LOW. */
4695 else if (tree_int_cst_lt (p->high, low))
4696 q = &p->right;
4697
4698 else
28d81abb 4699 {
57641239
RK
4700 /* We have an overlap; this is an error. */
4701 *duplicate = p->code_label;
28d81abb
RK
4702 return 2;
4703 }
4704 }
4705
4706 /* Add this label to the chain, and succeed.
57641239 4707 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4708 obstack and will thus survive till the end of the case statement. */
4709
57641239
RK
4710 r = (struct case_node *) oballoc (sizeof (struct case_node));
4711 r->low = copy_node (low);
28d81abb 4712
57641239
RK
4713 /* If the bounds are equal, turn this into the one-value case. */
4714
4715 if (tree_int_cst_equal (low, high))
4716 r->high = r->low;
4717 else
4718 {
4719 r->high = copy_node (high);
4720 case_stack->data.case_stmt.num_ranges++;
4721 }
4722
4723 r->code_label = label;
28d81abb
RK
4724 expand_label (label);
4725
57641239
RK
4726 *q = r;
4727 r->parent = p;
4728 r->left = 0;
4729 r->right = 0;
4730 r->balance = 0;
4731
4732 while (p)
4733 {
4734 struct case_node *s;
4735
4736 if (r == p->left)
4737 {
4738 int b;
4739
4740 if (! (b = p->balance))
4741 /* Growth propagation from left side. */
4742 p->balance = -1;
4743 else if (b < 0)
4744 {
4745 if (r->balance < 0)
4746 {
4747 /* R-Rotation */
51723711 4748 if ((p->left = s = r->right))
57641239
RK
4749 s->parent = p;
4750
4751 r->right = p;
4752 p->balance = 0;
4753 r->balance = 0;
4754 s = p->parent;
4755 p->parent = r;
4756
51723711 4757 if ((r->parent = s))
57641239
RK
4758 {
4759 if (s->left == p)
4760 s->left = r;
4761 else
4762 s->right = r;
4763 }
4764 else
4765 case_stack->data.case_stmt.case_list = r;
4766 }
4767 else
4768 /* r->balance == +1 */
4769 {
5720c7e7
RK
4770 /* LR-Rotation */
4771
57641239
RK
4772 int b2;
4773 struct case_node *t = r->right;
4774
51723711 4775 if ((p->left = s = t->right))
57641239
RK
4776 s->parent = p;
4777
4778 t->right = p;
51723711 4779 if ((r->right = s = t->left))
57641239
RK
4780 s->parent = r;
4781
4782 t->left = r;
4783 b = t->balance;
4784 b2 = b < 0;
4785 p->balance = b2;
4786 b2 = -b2 - b;
4787 r->balance = b2;
4788 t->balance = 0;
4789 s = p->parent;
4790 p->parent = t;
4791 r->parent = t;
4792
51723711 4793 if ((t->parent = s))
57641239
RK
4794 {
4795 if (s->left == p)
4796 s->left = t;
4797 else
4798 s->right = t;
4799 }
4800 else
4801 case_stack->data.case_stmt.case_list = t;
4802 }
4803 break;
4804 }
4805
4806 else
4807 {
4808 /* p->balance == +1; growth of left side balances the node. */
4809 p->balance = 0;
4810 break;
4811 }
4812 }
4813 else
4814 /* r == p->right */
4815 {
4816 int b;
4817
4818 if (! (b = p->balance))
4819 /* Growth propagation from right side. */
4820 p->balance++;
4821 else if (b > 0)
4822 {
4823 if (r->balance > 0)
4824 {
4825 /* L-Rotation */
4826
51723711 4827 if ((p->right = s = r->left))
57641239
RK
4828 s->parent = p;
4829
4830 r->left = p;
4831 p->balance = 0;
4832 r->balance = 0;
4833 s = p->parent;
4834 p->parent = r;
51723711 4835 if ((r->parent = s))
57641239
RK
4836 {
4837 if (s->left == p)
4838 s->left = r;
4839 else
4840 s->right = r;
4841 }
4842
4843 else
4844 case_stack->data.case_stmt.case_list = r;
4845 }
4846
4847 else
4848 /* r->balance == -1 */
4849 {
4850 /* RL-Rotation */
4851 int b2;
4852 struct case_node *t = r->left;
4853
51723711 4854 if ((p->right = s = t->left))
57641239
RK
4855 s->parent = p;
4856
4857 t->left = p;
4858
51723711 4859 if ((r->left = s = t->right))
57641239
RK
4860 s->parent = r;
4861
4862 t->right = r;
4863 b = t->balance;
4864 b2 = b < 0;
4865 r->balance = b2;
4866 b2 = -b2 - b;
4867 p->balance = b2;
4868 t->balance = 0;
4869 s = p->parent;
4870 p->parent = t;
4871 r->parent = t;
4872
51723711 4873 if ((t->parent = s))
57641239
RK
4874 {
4875 if (s->left == p)
4876 s->left = t;
4877 else
4878 s->right = t;
4879 }
4880
4881 else
4882 case_stack->data.case_stmt.case_list = t;
4883 }
4884 break;
4885 }
4886 else
4887 {
4888 /* p->balance == -1; growth of right side balances the node. */
4889 p->balance = 0;
4890 break;
4891 }
4892 }
4893
4894 r = p;
4895 p = p->parent;
4896 }
28d81abb
RK
4897
4898 return 0;
4899}
ca695ac9 4900
28d81abb 4901\f
94d6511c
PB
4902/* Returns the number of possible values of TYPE.
4903 Returns -1 if the number is unknown or variable.
4904 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4905 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4906 do not increase monotonically (there may be duplicates);
4907 to 1 if the values increase monotonically, but not always by 1;
4908 otherwise sets it to 0. */
4909
4910HOST_WIDE_INT
4911all_cases_count (type, spareness)
4912 tree type;
4913 int *spareness;
4914{
69d4ca36 4915 HOST_WIDE_INT count;
94d6511c
PB
4916 *spareness = 0;
4917
4918 switch (TREE_CODE (type))
4919 {
4920 tree t;
4921 case BOOLEAN_TYPE:
4922 count = 2;
4923 break;
4924 case CHAR_TYPE:
4925 count = 1 << BITS_PER_UNIT;
4926 break;
4927 default:
4928 case INTEGER_TYPE:
4929 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
e1ee5cdc 4930 || TYPE_MAX_VALUE (type) == NULL
c02aebe2 4931 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4932 return -1;
4933 else
4934 {
4935 /* count
4936 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4937 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4938 but with overflow checking. */
94d6511c
PB
4939 tree mint = TYPE_MIN_VALUE (type);
4940 tree maxt = TYPE_MAX_VALUE (type);
4941 HOST_WIDE_INT lo, hi;
4942 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4943 &lo, &hi);
4944 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4945 lo, hi, &lo, &hi);
4946 add_double (lo, hi, 1, 0, &lo, &hi);
4947 if (hi != 0 || lo < 0)
4948 return -2;
4949 count = lo;
4950 }
4951 break;
4952 case ENUMERAL_TYPE:
4953 count = 0;
4954 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4955 {
4956 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4957 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4958 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4959 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4960 *spareness = 1;
4961 count++;
4962 }
4963 if (*spareness == 1)
4964 {
4965 tree prev = TREE_VALUE (TYPE_VALUES (type));
4966 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4967 {
4968 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4969 {
4970 *spareness = 2;
4971 break;
4972 }
4973 prev = TREE_VALUE (t);
4974 }
4975
4976 }
4977 }
4978 return count;
4979}
4980
4981
4982#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4983 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4984 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4985#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4986 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4987 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4988
4989/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4990 with the case values we have seen, assuming the case expression
4991 has the given TYPE.
4992 SPARSENESS is as determined by all_cases_count.
4993
9faa82d8 4994 The time needed is proportional to COUNT, unless
94d6511c
PB
4995 SPARSENESS is 2, in which case quadratic time is needed. */
4996
df03cab5 4997void
94d6511c
PB
4998mark_seen_cases (type, cases_seen, count, sparseness)
4999 tree type;
5000 unsigned char *cases_seen;
5001 long count;
5002 int sparseness;
5003{
94d6511c
PB
5004 tree next_node_to_try = NULL_TREE;
5005 long next_node_offset = 0;
5006
5720c7e7 5007 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
5008 tree val = make_node (INTEGER_CST);
5009 TREE_TYPE (val) = type;
5720c7e7
RK
5010 if (! root)
5011 ; /* Do nothing */
5012 else if (sparseness == 2)
94d6511c 5013 {
5720c7e7
RK
5014 tree t;
5015 HOST_WIDE_INT xlo;
5016
5017 /* This less efficient loop is only needed to handle
5018 duplicate case values (multiple enum constants
5019 with the same value). */
5020 TREE_TYPE (val) = TREE_TYPE (root->low);
5021 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5022 t = TREE_CHAIN (t), xlo++)
94d6511c 5023 {
5720c7e7
RK
5024 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5025 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5026 n = root;
5027 do
94d6511c 5028 {
5720c7e7
RK
5029 /* Keep going past elements distinctly greater than VAL. */
5030 if (tree_int_cst_lt (val, n->low))
5031 n = n->left;
5032
5033 /* or distinctly less than VAL. */
5034 else if (tree_int_cst_lt (n->high, val))
5035 n = n->right;
5036
5037 else
94d6511c 5038 {
5720c7e7
RK
5039 /* We have found a matching range. */
5040 BITARRAY_SET (cases_seen, xlo);
5041 break;
94d6511c
PB
5042 }
5043 }
5720c7e7
RK
5044 while (n);
5045 }
5046 }
5047 else
5048 {
5049 if (root->left)
5050 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5051 for (n = root; n; n = n->right)
5052 {
5053 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5054 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5055 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 5056 {
5720c7e7
RK
5057 /* Calculate (into xlo) the "offset" of the integer (val).
5058 The element with lowest value has offset 0, the next smallest
5059 element has offset 1, etc. */
5060
5061 HOST_WIDE_INT xlo, xhi;
5062 tree t;
94d6511c
PB
5063 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5064 {
5065 /* The TYPE_VALUES will be in increasing order, so
5066 starting searching where we last ended. */
5067 t = next_node_to_try;
5068 xlo = next_node_offset;
5069 xhi = 0;
5070 for (;;)
5071 {
5072 if (t == NULL_TREE)
5073 {
5074 t = TYPE_VALUES (type);
5075 xlo = 0;
5076 }
5077 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5078 {
5079 next_node_to_try = TREE_CHAIN (t);
5080 next_node_offset = xlo + 1;
5081 break;
5082 }
5083 xlo++;
5084 t = TREE_CHAIN (t);
5085 if (t == next_node_to_try)
5720c7e7
RK
5086 {
5087 xlo = -1;
5088 break;
5089 }
94d6511c
PB
5090 }
5091 }
5092 else
5093 {
5094 t = TYPE_MIN_VALUE (type);
5095 if (t)
5096 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5097 &xlo, &xhi);
5098 else
5099 xlo = xhi = 0;
5100 add_double (xlo, xhi,
5101 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5102 &xlo, &xhi);
5103 }
5104
9dd53f1e 5105 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 5106 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
5107 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5108 1, 0,
5109 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5110 }
94d6511c
PB
5111 }
5112 }
5113}
5114
28d81abb
RK
5115/* Called when the index of a switch statement is an enumerated type
5116 and there is no default label.
5117
5118 Checks that all enumeration literals are covered by the case
5119 expressions of a switch. Also, warn if there are any extra
5120 switch cases that are *not* elements of the enumerated type.
5121
5122 If all enumeration literals were covered by the case expressions,
5123 turn one of the expressions into the default expression since it should
5124 not be possible to fall through such a switch. */
5125
5126void
5127check_for_full_enumeration_handling (type)
5128 tree type;
5129{
5130 register struct case_node *n;
28d81abb 5131 register tree chain;
69d4ca36
RL
5132#if 0 /* variable used by 'if 0'ed code below. */
5133 register struct case_node **l;
28d81abb 5134 int all_values = 1;
69d4ca36 5135#endif
28d81abb 5136
0f41302f 5137 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5138 int sparseness = 0;
5139
0f41302f 5140 /* The number of possible selector values. */
94d6511c
PB
5141 HOST_WIDE_INT size;
5142
5143 /* For each possible selector value. a one iff it has been matched
0f41302f 5144 by a case value alternative. */
94d6511c
PB
5145 unsigned char *cases_seen;
5146
0f41302f 5147 /* The allocated size of cases_seen, in chars. */
94d6511c 5148 long bytes_needed;
94d6511c 5149
94d6511c
PB
5150 if (! warn_switch)
5151 return;
5152
5153 size = all_cases_count (type, &sparseness);
5154 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5155
94d6511c 5156 if (size > 0 && size < 600000
c5c76735
JL
5157 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5158 this optimization if we don't have enough memory rather than
5159 aborting, as xmalloc would do. */
3de90026 5160 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
28d81abb 5161 {
94d6511c
PB
5162 long i;
5163 tree v = TYPE_VALUES (type);
28d81abb 5164
94d6511c
PB
5165 /* The time complexity of this code is normally O(N), where
5166 N being the number of members in the enumerated type.
5167 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 5168 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5169
5170 mark_seen_cases (type, cases_seen, size, sparseness);
5171
5172 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 5173 {
94d6511c 5174 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 5175 warning ("enumeration value `%s' not handled in switch",
94d6511c 5176 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 5177 }
94d6511c
PB
5178
5179 free (cases_seen);
28d81abb
RK
5180 }
5181
5182 /* Now we go the other way around; we warn if there are case
ac2a9454 5183 expressions that don't correspond to enumerators. This can
28d81abb 5184 occur since C and C++ don't enforce type-checking of
0f41302f 5185 assignments to enumeration variables. */
28d81abb 5186
5720c7e7
RK
5187 if (case_stack->data.case_stmt.case_list
5188 && case_stack->data.case_stmt.case_list->left)
5189 case_stack->data.case_stmt.case_list
5190 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
5191 if (warn_switch)
5192 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5193 {
5194 for (chain = TYPE_VALUES (type);
5195 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5196 chain = TREE_CHAIN (chain))
5197 ;
5198
5199 if (!chain)
3b24f55b
RS
5200 {
5201 if (TYPE_NAME (type) == 0)
e016950d
KG
5202 warning ("case value `%ld' not in enumerated type",
5203 (long) TREE_INT_CST_LOW (n->low));
3b24f55b 5204 else
e016950d
KG
5205 warning ("case value `%ld' not in enumerated type `%s'",
5206 (long) TREE_INT_CST_LOW (n->low),
3b24f55b
RS
5207 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5208 == IDENTIFIER_NODE)
5209 ? TYPE_NAME (type)
5210 : DECL_NAME (TYPE_NAME (type))));
5211 }
1ddde1cd
RS
5212 if (!tree_int_cst_equal (n->low, n->high))
5213 {
5214 for (chain = TYPE_VALUES (type);
5215 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5216 chain = TREE_CHAIN (chain))
5217 ;
5218
5219 if (!chain)
3b24f55b
RS
5220 {
5221 if (TYPE_NAME (type) == 0)
e016950d
KG
5222 warning ("case value `%ld' not in enumerated type",
5223 (long) TREE_INT_CST_LOW (n->high));
3b24f55b 5224 else
e016950d
KG
5225 warning ("case value `%ld' not in enumerated type `%s'",
5226 (long) TREE_INT_CST_LOW (n->high),
3b24f55b
RS
5227 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5228 == IDENTIFIER_NODE)
5229 ? TYPE_NAME (type)
5230 : DECL_NAME (TYPE_NAME (type))));
5231 }
1ddde1cd 5232 }
28d81abb
RK
5233 }
5234
ae8cb346
RS
5235#if 0
5236 /* ??? This optimization is disabled because it causes valid programs to
5237 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 5238 will have a value that is the same as one of the enumeration literals. */
ae8cb346 5239
28d81abb
RK
5240 /* If all values were found as case labels, make one of them the default
5241 label. Thus, this switch will never fall through. We arbitrarily pick
5242 the last one to make the default since this is likely the most
5243 efficient choice. */
5244
5245 if (all_values)
5246 {
5247 for (l = &case_stack->data.case_stmt.case_list;
5248 (*l)->right != 0;
5249 l = &(*l)->right)
5250 ;
5251
5252 case_stack->data.case_stmt.default_label = (*l)->code_label;
5253 *l = 0;
5254 }
ae8cb346 5255#endif /* 0 */
28d81abb 5256}
ca695ac9 5257
28d81abb
RK
5258\f
5259/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5260 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
5261 Generate the code to test it and jump to the right place. */
5262
5263void
5264expand_end_case (orig_index)
5265 tree orig_index;
5266{
95d75019 5267 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
28d81abb
RK
5268 rtx default_label = 0;
5269 register struct case_node *n;
85066503 5270 unsigned int count;
28d81abb 5271 rtx index;
ca695ac9 5272 rtx table_label;
28d81abb
RK
5273 int ncases;
5274 rtx *labelvec;
5275 register int i;
5276 rtx before_case;
5277 register struct nesting *thiscase = case_stack;
1b0cb6fc 5278 tree index_expr, index_type;
ca695ac9
JB
5279 int unsignedp;
5280
03c03770
AS
5281 /* Don't crash due to previous errors. */
5282 if (thiscase == NULL)
5283 return;
5284
ca695ac9
JB
5285 table_label = gen_label_rtx ();
5286 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5287 index_type = TREE_TYPE (index_expr);
5288 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
5289
5290 do_pending_stack_adjust ();
5291
feb60352
R
5292 /* This might get an spurious warning in the presence of a syntax error;
5293 it could be fixed by moving the call to check_seenlabel after the
5294 check for error_mark_node, and copying the code of check_seenlabel that
5295 deals with case_stack->data.case_stmt.line_number_status /
5296 restore_line_number_status in front of the call to end_cleanup_deferral;
5297 However, this might miss some useful warnings in the presence of
5298 non-syntax errors. */
a11759a3
JR
5299 check_seenlabel ();
5300
28d81abb 5301 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5302 if (index_type != error_mark_node)
28d81abb
RK
5303 {
5304 /* If switch expression was an enumerated type, check that all
5305 enumeration literals are covered by the cases.
5306 No sense trying this if there's a default case, however. */
5307
5308 if (!thiscase->data.case_stmt.default_label
5309 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5310 && TREE_CODE (index_expr) != INTEGER_CST)
5311 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5312
28d81abb
RK
5313 /* If we don't have a default-label, create one here,
5314 after the body of the switch. */
5315 if (thiscase->data.case_stmt.default_label == 0)
5316 {
5317 thiscase->data.case_stmt.default_label
5318 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5319 expand_label (thiscase->data.case_stmt.default_label);
5320 }
5321 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5322
5323 before_case = get_last_insn ();
5324
5720c7e7
RK
5325 if (thiscase->data.case_stmt.case_list
5326 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
5327 thiscase->data.case_stmt.case_list
5328 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5329
28d81abb
RK
5330 /* Simplify the case-list before we count it. */
5331 group_case_nodes (thiscase->data.case_stmt.case_list);
5332
5333 /* Get upper and lower bounds of case values.
5334 Also convert all the case values to the index expr's data type. */
5335
5336 count = 0;
5337 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5338 {
5339 /* Check low and high label values are integers. */
5340 if (TREE_CODE (n->low) != INTEGER_CST)
5341 abort ();
5342 if (TREE_CODE (n->high) != INTEGER_CST)
5343 abort ();
5344
1b0cb6fc
RK
5345 n->low = convert (index_type, n->low);
5346 n->high = convert (index_type, n->high);
28d81abb
RK
5347
5348 /* Count the elements and track the largest and smallest
5349 of them (treating them as signed even if they are not). */
5350 if (count++ == 0)
5351 {
5352 minval = n->low;
5353 maxval = n->high;
5354 }
5355 else
5356 {
5357 if (INT_CST_LT (n->low, minval))
5358 minval = n->low;
5359 if (INT_CST_LT (maxval, n->high))
5360 maxval = n->high;
5361 }
5362 /* A range counts double, since it requires two compares. */
5363 if (! tree_int_cst_equal (n->low, n->high))
5364 count++;
5365 }
5366
3474db0e
RS
5367 orig_minval = minval;
5368
28d81abb
RK
5369 /* Compute span of values. */
5370 if (count != 0)
1b0cb6fc 5371 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5372
956d6950 5373 end_cleanup_deferral ();
4c581243 5374
1b0cb6fc 5375 if (count == 0)
28d81abb
RK
5376 {
5377 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5378 emit_queue ();
5379 emit_jump (default_label);
5380 }
3474db0e 5381
28d81abb
RK
5382 /* If range of values is much bigger than number of values,
5383 make a sequence of conditional branches instead of a dispatch.
5384 If the switch-index is a constant, do it this way
5385 because we can optimize it. */
4f73c5dd
TW
5386
5387#ifndef CASE_VALUES_THRESHOLD
28d81abb 5388#ifdef HAVE_casesi
4f73c5dd 5389#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5390#else
4f73c5dd
TW
5391 /* If machine does not have a case insn that compares the
5392 bounds, this means extra overhead for dispatch tables
5393 which raises the threshold for using them. */
5394#define CASE_VALUES_THRESHOLD 5
5395#endif /* HAVE_casesi */
5396#endif /* CASE_VALUES_THRESHOLD */
5397
5398 else if (TREE_INT_CST_HIGH (range) != 0
c84e2712 5399 || count < (unsigned int) CASE_VALUES_THRESHOLD
37366632
RK
5400 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5401 > 10 * count)
3f6fe18e
RK
5402#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5403 || flag_pic
5404#endif
28d81abb 5405 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5406 /* These will reduce to a constant. */
28d81abb 5407 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5408 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5409 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
95815af9 5410 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
b4ac57ab
RS
5411 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5412 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5413 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5414 {
37366632 5415 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5416
5417 /* If the index is a short or char that we do not have
5418 an insn to handle comparisons directly, convert it to
5419 a full integer now, rather than letting each comparison
5420 generate the conversion. */
5421
5422 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5423 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5424 == CODE_FOR_nothing))
5425 {
5426 enum machine_mode wider_mode;
5427 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5428 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5429 if (cmp_optab->handlers[(int) wider_mode].insn_code
5430 != CODE_FOR_nothing)
5431 {
5432 index = convert_to_mode (wider_mode, index, unsignedp);
5433 break;
5434 }
5435 }
5436
5437 emit_queue ();
5438 do_pending_stack_adjust ();
5439
5440 index = protect_from_queue (index, 0);
5441 if (GET_CODE (index) == MEM)
5442 index = copy_to_reg (index);
5443 if (GET_CODE (index) == CONST_INT
5444 || TREE_CODE (index_expr) == INTEGER_CST)
5445 {
5446 /* Make a tree node with the proper constant value
5447 if we don't already have one. */
5448 if (TREE_CODE (index_expr) != INTEGER_CST)
5449 {
5450 index_expr
5451 = build_int_2 (INTVAL (index),
e9a042b6 5452 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5453 index_expr = convert (index_type, index_expr);
28d81abb
RK
5454 }
5455
5456 /* For constant index expressions we need only
5457 issue a unconditional branch to the appropriate
5458 target code. The job of removing any unreachable
5459 code is left to the optimisation phase if the
5460 "-O" option is specified. */
1b0cb6fc
RK
5461 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5462 if (! tree_int_cst_lt (index_expr, n->low)
5463 && ! tree_int_cst_lt (n->high, index_expr))
5464 break;
5465
28d81abb
RK
5466 if (n)
5467 emit_jump (label_rtx (n->code_label));
5468 else
5469 emit_jump (default_label);
5470 }
5471 else
5472 {
5473 /* If the index expression is not constant we generate
5474 a binary decision tree to select the appropriate
5475 target code. This is done as follows:
5476
5477 The list of cases is rearranged into a binary tree,
5478 nearly optimal assuming equal probability for each case.
5479
5480 The tree is transformed into RTL, eliminating
5481 redundant test conditions at the same time.
5482
5483 If program flow could reach the end of the
5484 decision tree an unconditional jump to the
5485 default code is emitted. */
5486
5487 use_cost_table
5488 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5489 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
5490 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5491 NULL_PTR);
28d81abb 5492 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5493 default_label, index_type);
28d81abb
RK
5494 emit_jump_if_reachable (default_label);
5495 }
5496 }
5497 else
5498 {
5499 int win = 0;
5500#ifdef HAVE_casesi
5501 if (HAVE_casesi)
5502 {
c4fcf531 5503 enum machine_mode index_mode = SImode;
5130a5cc 5504 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5505 rtx op1, op2;
5506 enum machine_mode op_mode;
c4fcf531 5507
28d81abb 5508 /* Convert the index to SImode. */
1b0cb6fc 5509 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5510 > GET_MODE_BITSIZE (index_mode))
28d81abb 5511 {
1b0cb6fc 5512 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5513 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5514
5515 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5516 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5517 index_expr, minval);
5518 minval = integer_zero_node;
37366632 5519 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
c5d5d461
JL
5520 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5521 omode, 1, 0, default_label);
af2682ef
RS
5522 /* Now we can safely truncate. */
5523 index = convert_to_mode (index_mode, index, 0);
5524 }
5525 else
5526 {
1b0cb6fc 5527 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5528 {
5529 index_expr = convert (type_for_size (index_bits, 0),
5530 index_expr);
5531 index_type = TREE_TYPE (index_expr);
5532 }
5533
37366632 5534 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5535 }
28d81abb
RK
5536 emit_queue ();
5537 index = protect_from_queue (index, 0);
5538 do_pending_stack_adjust ();
5539
a995e389
RH
5540 op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
5541 if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
086f237d
JW
5542 (index, op_mode))
5543 index = copy_to_mode_reg (op_mode, index);
5544
5545 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5546
a995e389
RH
5547 op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
5548 if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
086f237d
JW
5549 (op1, op_mode))
5550 op1 = copy_to_mode_reg (op_mode, op1);
5551
5552 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5553
a995e389
RH
5554 op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
5555 if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
086f237d
JW
5556 (op2, op_mode))
5557 op2 = copy_to_mode_reg (op_mode, op2);
5558
5559 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5560 table_label, default_label));
5561 win = 1;
5562 }
5563#endif
5564#ifdef HAVE_tablejump
5565 if (! win && HAVE_tablejump)
5566 {
5567 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 5568 fold (build (MINUS_EXPR, index_type,
b4ac57ab 5569 index_expr, minval)));
d3b35d75 5570 index_type = TREE_TYPE (index_expr);
37366632 5571 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5572 emit_queue ();
af2682ef 5573 index = protect_from_queue (index, 0);
28d81abb
RK
5574 do_pending_stack_adjust ();
5575
1b0cb6fc 5576 do_tablejump (index, TYPE_MODE (index_type),
37366632 5577 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5578 table_label, default_label);
5579 win = 1;
5580 }
5581#endif
5582 if (! win)
5583 abort ();
5584
5585 /* Get table of labels to jump to, in order of case index. */
5586
5587 ncases = TREE_INT_CST_LOW (range) + 1;
5588 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 5589 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
5590
5591 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5592 {
37366632 5593 register HOST_WIDE_INT i
3474db0e 5594 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5595
5596 while (1)
5597 {
5598 labelvec[i]
38a448ca 5599 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3474db0e 5600 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5601 == TREE_INT_CST_LOW (n->high))
5602 break;
5603 i++;
5604 }
5605 }
5606
5607 /* Fill in the gaps with the default. */
5608 for (i = 0; i < ncases; i++)
5609 if (labelvec[i] == 0)
38a448ca 5610 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5611
5612 /* Output the table */
5613 emit_label (table_label);
5614
18543a22 5615 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5616 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5617 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5618 gen_rtvec_v (ncases, labelvec),
8f985ec4 5619 const0_rtx, const0_rtx));
28d81abb 5620 else
38a448ca
RH
5621 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5622 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5623
5624 /* If the case insn drops through the table,
5625 after the table we must jump to the default-label.
5626 Otherwise record no drop-through after the table. */
5627#ifdef CASE_DROPS_THROUGH
5628 emit_jump (default_label);
5629#else
5630 emit_barrier ();
5631#endif
5632 }
5633
915f619f
JW
5634 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5635 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5636 thiscase->data.case_stmt.start);
5637 }
4c581243 5638 else
956d6950 5639 end_cleanup_deferral ();
1b0cb6fc 5640
28d81abb
RK
5641 if (thiscase->exit_label)
5642 emit_label (thiscase->exit_label);
5643
5644 POPSTACK (case_stack);
5645
5646 free_temp_slots ();
5647}
5648
57641239
RK
5649/* Convert the tree NODE into a list linked by the right field, with the left
5650 field zeroed. RIGHT is used for recursion; it is a list to be placed
5651 rightmost in the resulting list. */
5652
5653static struct case_node *
5654case_tree2list (node, right)
5655 struct case_node *node, *right;
5656{
5657 struct case_node *left;
5658
5659 if (node->right)
5660 right = case_tree2list (node->right, right);
5661
5662 node->right = right;
51723711 5663 if ((left = node->left))
57641239
RK
5664 {
5665 node->left = 0;
5666 return case_tree2list (left, node);
5667 }
5668
5669 return node;
5670}
ca695ac9 5671
28d81abb
RK
5672/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5673
5674static void
5675do_jump_if_equal (op1, op2, label, unsignedp)
5676 rtx op1, op2, label;
5677 int unsignedp;
5678{
5679 if (GET_CODE (op1) == CONST_INT
5680 && GET_CODE (op2) == CONST_INT)
5681 {
5682 if (INTVAL (op1) == INTVAL (op2))
5683 emit_jump (label);
5684 }
5685 else
5686 {
5687 enum machine_mode mode = GET_MODE (op1);
5688 if (mode == VOIDmode)
5689 mode = GET_MODE (op2);
c5d5d461
JL
5690 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5691 0, label);
28d81abb
RK
5692 }
5693}
5694\f
5695/* Not all case values are encountered equally. This function
5696 uses a heuristic to weight case labels, in cases where that
5697 looks like a reasonable thing to do.
5698
5699 Right now, all we try to guess is text, and we establish the
5700 following weights:
5701
5702 chars above space: 16
5703 digits: 16
5704 default: 12
5705 space, punct: 8
5706 tab: 4
5707 newline: 2
5708 other "\" chars: 1
5709 remaining chars: 0
5710
5711 If we find any cases in the switch that are not either -1 or in the range
5712 of valid ASCII characters, or are control characters other than those
5713 commonly used with "\", don't treat this switch scanning text.
5714
5715 Return 1 if these nodes are suitable for cost estimation, otherwise
5716 return 0. */
5717
5718static int
5719estimate_case_costs (node)
5720 case_node_ptr node;
5721{
5722 tree min_ascii = build_int_2 (-1, -1);
5723 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5724 case_node_ptr n;
5725 int i;
5726
5727 /* If we haven't already made the cost table, make it now. Note that the
5728 lower bound of the table is -1, not zero. */
5729
5730 if (cost_table == NULL)
5731 {
e7749837 5732 cost_table = cost_table_ + 1;
28d81abb
RK
5733
5734 for (i = 0; i < 128; i++)
5735 {
e9a780ec 5736 if (ISALNUM (i))
28d81abb 5737 cost_table[i] = 16;
e9a780ec 5738 else if (ISPUNCT (i))
28d81abb 5739 cost_table[i] = 8;
e9a780ec 5740 else if (ISCNTRL (i))
28d81abb
RK
5741 cost_table[i] = -1;
5742 }
5743
5744 cost_table[' '] = 8;
5745 cost_table['\t'] = 4;
5746 cost_table['\0'] = 4;
5747 cost_table['\n'] = 2;
5748 cost_table['\f'] = 1;
5749 cost_table['\v'] = 1;
5750 cost_table['\b'] = 1;
5751 }
5752
5753 /* See if all the case expressions look like text. It is text if the
5754 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5755 as signed arithmetic since we don't want to ever access cost_table with a
5756 value less than -1. Also check that none of the constants in a range
5757 are strange control characters. */
5758
5759 for (n = node; n; n = n->right)
5760 {
5761 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5762 return 0;
5763
5764 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5765 if (cost_table[i] < 0)
5766 return 0;
5767 }
5768
5769 /* All interesting values are within the range of interesting
5770 ASCII characters. */
5771 return 1;
5772}
5773
5774/* Scan an ordered list of case nodes
5775 combining those with consecutive values or ranges.
5776
5777 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5778
5779static void
5780group_case_nodes (head)
5781 case_node_ptr head;
5782{
5783 case_node_ptr node = head;
5784
5785 while (node)
5786 {
5787 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5788 rtx lb2;
28d81abb
RK
5789 case_node_ptr np = node;
5790
5791 /* Try to group the successors of NODE with NODE. */
5792 while (((np = np->right) != 0)
5793 /* Do they jump to the same place? */
ad7e369f
JL
5794 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5795 || (lb != 0 && lb2 != 0
5796 && simplejump_p (lb)
5797 && simplejump_p (lb2)
5798 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5799 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5800 /* Are their ranges consecutive? */
5801 && tree_int_cst_equal (np->low,
5802 fold (build (PLUS_EXPR,
5803 TREE_TYPE (node->high),
5804 node->high,
5805 integer_one_node)))
5806 /* An overflow is not consecutive. */
5807 && tree_int_cst_lt (node->high,
5808 fold (build (PLUS_EXPR,
5809 TREE_TYPE (node->high),
5810 node->high,
5811 integer_one_node))))
5812 {
5813 node->high = np->high;
5814 }
5815 /* NP is the first node after NODE which can't be grouped with it.
5816 Delete the nodes in between, and move on to that node. */
5817 node->right = np;
5818 node = np;
5819 }
5820}
5821
5822/* Take an ordered list of case nodes
5823 and transform them into a near optimal binary tree,
6dc42e49 5824 on the assumption that any target code selection value is as
28d81abb
RK
5825 likely as any other.
5826
5827 The transformation is performed by splitting the ordered
5828 list into two equal sections plus a pivot. The parts are
5829 then attached to the pivot as left and right branches. Each
38e01259 5830 branch is then transformed recursively. */
28d81abb
RK
5831
5832static void
5833balance_case_nodes (head, parent)
5834 case_node_ptr *head;
5835 case_node_ptr parent;
5836{
5837 register case_node_ptr np;
5838
5839 np = *head;
5840 if (np)
5841 {
5842 int cost = 0;
5843 int i = 0;
5844 int ranges = 0;
5845 register case_node_ptr *npp;
5846 case_node_ptr left;
5847
5848 /* Count the number of entries on branch. Also count the ranges. */
5849
5850 while (np)
5851 {
5852 if (!tree_int_cst_equal (np->low, np->high))
5853 {
5854 ranges++;
5855 if (use_cost_table)
5856 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5857 }
5858
5859 if (use_cost_table)
5860 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5861
5862 i++;
5863 np = np->right;
5864 }
5865
5866 if (i > 2)
5867 {
5868 /* Split this list if it is long enough for that to help. */
5869 npp = head;
5870 left = *npp;
5871 if (use_cost_table)
5872 {
5873 /* Find the place in the list that bisects the list's total cost,
5874 Here I gets half the total cost. */
5875 int n_moved = 0;
5876 i = (cost + 1) / 2;
5877 while (1)
5878 {
5879 /* Skip nodes while their cost does not reach that amount. */
5880 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5881 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5882 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5883 if (i <= 0)
5884 break;
5885 npp = &(*npp)->right;
5886 n_moved += 1;
5887 }
5888 if (n_moved == 0)
5889 {
5890 /* Leave this branch lopsided, but optimize left-hand
5891 side and fill in `parent' fields for right-hand side. */
5892 np = *head;
5893 np->parent = parent;
5894 balance_case_nodes (&np->left, np);
5895 for (; np->right; np = np->right)
5896 np->right->parent = np;
5897 return;
5898 }
5899 }
5900 /* If there are just three nodes, split at the middle one. */
5901 else if (i == 3)
5902 npp = &(*npp)->right;
5903 else
5904 {
5905 /* Find the place in the list that bisects the list's total cost,
5906 where ranges count as 2.
5907 Here I gets half the total cost. */
5908 i = (i + ranges + 1) / 2;
5909 while (1)
5910 {
5911 /* Skip nodes while their cost does not reach that amount. */
5912 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5913 i--;
5914 i--;
5915 if (i <= 0)
5916 break;
5917 npp = &(*npp)->right;
5918 }
5919 }
5920 *head = np = *npp;
5921 *npp = 0;
5922 np->parent = parent;
5923 np->left = left;
5924
5925 /* Optimize each of the two split parts. */
5926 balance_case_nodes (&np->left, np);
5927 balance_case_nodes (&np->right, np);
5928 }
5929 else
5930 {
5931 /* Else leave this branch as one level,
5932 but fill in `parent' fields. */
5933 np = *head;
5934 np->parent = parent;
5935 for (; np->right; np = np->right)
5936 np->right->parent = np;
5937 }
5938 }
5939}
5940\f
5941/* Search the parent sections of the case node tree
5942 to see if a test for the lower bound of NODE would be redundant.
5943 INDEX_TYPE is the type of the index expression.
5944
5945 The instructions to generate the case decision tree are
5946 output in the same order as nodes are processed so it is
5947 known that if a parent node checks the range of the current
5948 node minus one that the current node is bounded at its lower
5949 span. Thus the test would be redundant. */
5950
5951static int
5952node_has_low_bound (node, index_type)
5953 case_node_ptr node;
5954 tree index_type;
5955{
5956 tree low_minus_one;
5957 case_node_ptr pnode;
5958
5959 /* If the lower bound of this node is the lowest value in the index type,
5960 we need not test it. */
5961
5962 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5963 return 1;
5964
5965 /* If this node has a left branch, the value at the left must be less
5966 than that at this node, so it cannot be bounded at the bottom and
5967 we need not bother testing any further. */
5968
5969 if (node->left)
5970 return 0;
5971
5972 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5973 node->low, integer_one_node));
5974
5975 /* If the subtraction above overflowed, we can't verify anything.
5976 Otherwise, look for a parent that tests our value - 1. */
5977
5978 if (! tree_int_cst_lt (low_minus_one, node->low))
5979 return 0;
5980
5981 for (pnode = node->parent; pnode; pnode = pnode->parent)
5982 if (tree_int_cst_equal (low_minus_one, pnode->high))
5983 return 1;
5984
5985 return 0;
5986}
5987
5988/* Search the parent sections of the case node tree
5989 to see if a test for the upper bound of NODE would be redundant.
5990 INDEX_TYPE is the type of the index expression.
5991
5992 The instructions to generate the case decision tree are
5993 output in the same order as nodes are processed so it is
5994 known that if a parent node checks the range of the current
5995 node plus one that the current node is bounded at its upper
5996 span. Thus the test would be redundant. */
5997
5998static int
5999node_has_high_bound (node, index_type)
6000 case_node_ptr node;
6001 tree index_type;
6002{
6003 tree high_plus_one;
6004 case_node_ptr pnode;
6005
e1ee5cdc
RH
6006 /* If there is no upper bound, obviously no test is needed. */
6007
6008 if (TYPE_MAX_VALUE (index_type) == NULL)
6009 return 1;
6010
28d81abb
RK
6011 /* If the upper bound of this node is the highest value in the type
6012 of the index expression, we need not test against it. */
6013
6014 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6015 return 1;
6016
6017 /* If this node has a right branch, the value at the right must be greater
6018 than that at this node, so it cannot be bounded at the top and
6019 we need not bother testing any further. */
6020
6021 if (node->right)
6022 return 0;
6023
6024 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6025 node->high, integer_one_node));
6026
6027 /* If the addition above overflowed, we can't verify anything.
6028 Otherwise, look for a parent that tests our value + 1. */
6029
6030 if (! tree_int_cst_lt (node->high, high_plus_one))
6031 return 0;
6032
6033 for (pnode = node->parent; pnode; pnode = pnode->parent)
6034 if (tree_int_cst_equal (high_plus_one, pnode->low))
6035 return 1;
6036
6037 return 0;
6038}
6039
6040/* Search the parent sections of the
6041 case node tree to see if both tests for the upper and lower
6042 bounds of NODE would be redundant. */
6043
6044static int
6045node_is_bounded (node, index_type)
6046 case_node_ptr node;
6047 tree index_type;
6048{
6049 return (node_has_low_bound (node, index_type)
6050 && node_has_high_bound (node, index_type));
6051}
6052
6053/* Emit an unconditional jump to LABEL unless it would be dead code. */
6054
6055static void
6056emit_jump_if_reachable (label)
6057 rtx label;
6058{
6059 if (GET_CODE (get_last_insn ()) != BARRIER)
6060 emit_jump (label);
6061}
6062\f
6063/* Emit step-by-step code to select a case for the value of INDEX.
6064 The thus generated decision tree follows the form of the
6065 case-node binary tree NODE, whose nodes represent test conditions.
6066 INDEX_TYPE is the type of the index of the switch.
6067
6068 Care is taken to prune redundant tests from the decision tree
6069 by detecting any boundary conditions already checked by
6070 emitted rtx. (See node_has_high_bound, node_has_low_bound
6071 and node_is_bounded, above.)
6072
6073 Where the test conditions can be shown to be redundant we emit
6074 an unconditional jump to the target code. As a further
6075 optimization, the subordinates of a tree node are examined to
6076 check for bounded nodes. In this case conditional and/or
6077 unconditional jumps as a result of the boundary check for the
6078 current node are arranged to target the subordinates associated
38e01259 6079 code for out of bound conditions on the current node.
28d81abb 6080
f72aed24 6081 We can assume that when control reaches the code generated here,
28d81abb
RK
6082 the index value has already been compared with the parents
6083 of this node, and determined to be on the same side of each parent
6084 as this node is. Thus, if this node tests for the value 51,
6085 and a parent tested for 52, we don't need to consider
6086 the possibility of a value greater than 51. If another parent
6087 tests for the value 50, then this node need not test anything. */
6088
6089static void
6090emit_case_nodes (index, node, default_label, index_type)
6091 rtx index;
6092 case_node_ptr node;
6093 rtx default_label;
6094 tree index_type;
6095{
6096 /* If INDEX has an unsigned type, we must make unsigned branches. */
6097 int unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
6098 enum machine_mode mode = GET_MODE (index);
6099
6100 /* See if our parents have already tested everything for us.
6101 If they have, emit an unconditional jump for this node. */
6102 if (node_is_bounded (node, index_type))
6103 emit_jump (label_rtx (node->code_label));
6104
6105 else if (tree_int_cst_equal (node->low, node->high))
6106 {
6107 /* Node is single valued. First see if the index expression matches
0f41302f 6108 this node and then check our children, if any. */
28d81abb 6109
37366632 6110 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
6111 label_rtx (node->code_label), unsignedp);
6112
6113 if (node->right != 0 && node->left != 0)
6114 {
6115 /* This node has children on both sides.
6116 Dispatch to one side or the other
6117 by comparing the index value with this node's value.
6118 If one subtree is bounded, check that one first,
6119 so we can avoid real branches in the tree. */
6120
6121 if (node_is_bounded (node->right, index_type))
6122 {
c5d5d461
JL
6123 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6124 VOIDmode, 0),
6125 GT, NULL_RTX, mode, unsignedp, 0,
6126 label_rtx (node->right->code_label));
28d81abb
RK
6127 emit_case_nodes (index, node->left, default_label, index_type);
6128 }
6129
6130 else if (node_is_bounded (node->left, index_type))
6131 {
c5d5d461
JL
6132 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6133 VOIDmode, 0),
6134 LT, NULL_RTX, mode, unsignedp, 0,
6135 label_rtx (node->left->code_label));
28d81abb
RK
6136 emit_case_nodes (index, node->right, default_label, index_type);
6137 }
6138
6139 else
6140 {
6141 /* Neither node is bounded. First distinguish the two sides;
6142 then emit the code for one side at a time. */
6143
6144 tree test_label
6145 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6146
6147 /* See if the value is on the right. */
c5d5d461
JL
6148 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6149 VOIDmode, 0),
6150 GT, NULL_RTX, mode, unsignedp, 0,
6151 label_rtx (test_label));
28d81abb
RK
6152
6153 /* Value must be on the left.
6154 Handle the left-hand subtree. */
6155 emit_case_nodes (index, node->left, default_label, index_type);
6156 /* If left-hand subtree does nothing,
6157 go to default. */
6158 emit_jump_if_reachable (default_label);
6159
6160 /* Code branches here for the right-hand subtree. */
6161 expand_label (test_label);
6162 emit_case_nodes (index, node->right, default_label, index_type);
6163 }
6164 }
6165
6166 else if (node->right != 0 && node->left == 0)
6167 {
6168 /* Here we have a right child but no left so we issue conditional
6169 branch to default and process the right child.
6170
6171 Omit the conditional branch to default if we it avoid only one
6172 right child; it costs too much space to save so little time. */
6173
de14fd73 6174 if (node->right->right || node->right->left
28d81abb
RK
6175 || !tree_int_cst_equal (node->right->low, node->right->high))
6176 {
6177 if (!node_has_low_bound (node, index_type))
6178 {
c5d5d461
JL
6179 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6180 NULL_RTX,
6181 VOIDmode, 0),
6182 LT, NULL_RTX, mode, unsignedp, 0,
6183 default_label);
28d81abb
RK
6184 }
6185
6186 emit_case_nodes (index, node->right, default_label, index_type);
6187 }
6188 else
6189 /* We cannot process node->right normally
6190 since we haven't ruled out the numbers less than
6191 this node's value. So handle node->right explicitly. */
6192 do_jump_if_equal (index,
37366632
RK
6193 expand_expr (node->right->low, NULL_RTX,
6194 VOIDmode, 0),
28d81abb
RK
6195 label_rtx (node->right->code_label), unsignedp);
6196 }
6197
6198 else if (node->right == 0 && node->left != 0)
6199 {
6200 /* Just one subtree, on the left. */
6201
de14fd73
RK
6202#if 0 /* The following code and comment were formerly part
6203 of the condition here, but they didn't work
6204 and I don't understand what the idea was. -- rms. */
6205 /* If our "most probable entry" is less probable
28d81abb
RK
6206 than the default label, emit a jump to
6207 the default label using condition codes
6208 already lying around. With no right branch,
6209 a branch-greater-than will get us to the default
6210 label correctly. */
de14fd73
RK
6211 if (use_cost_table
6212 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6213 ;
6214#endif /* 0 */
6215 if (node->left->left || node->left->right
28d81abb
RK
6216 || !tree_int_cst_equal (node->left->low, node->left->high))
6217 {
6218 if (!node_has_high_bound (node, index_type))
6219 {
c5d5d461
JL
6220 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6221 NULL_RTX,
6222 VOIDmode, 0),
6223 GT, NULL_RTX, mode, unsignedp, 0,
6224 default_label);
28d81abb
RK
6225 }
6226
6227 emit_case_nodes (index, node->left, default_label, index_type);
6228 }
6229 else
6230 /* We cannot process node->left normally
6231 since we haven't ruled out the numbers less than
6232 this node's value. So handle node->left explicitly. */
6233 do_jump_if_equal (index,
37366632
RK
6234 expand_expr (node->left->low, NULL_RTX,
6235 VOIDmode, 0),
28d81abb
RK
6236 label_rtx (node->left->code_label), unsignedp);
6237 }
6238 }
6239 else
6240 {
6241 /* Node is a range. These cases are very similar to those for a single
6242 value, except that we do not start by testing whether this node
6243 is the one to branch to. */
6244
6245 if (node->right != 0 && node->left != 0)
6246 {
6247 /* Node has subtrees on both sides.
6248 If the right-hand subtree is bounded,
6249 test for it first, since we can go straight there.
6250 Otherwise, we need to make a branch in the control structure,
6251 then handle the two subtrees. */
6252 tree test_label = 0;
6253
28d81abb
RK
6254
6255 if (node_is_bounded (node->right, index_type))
6256 /* Right hand node is fully bounded so we can eliminate any
6257 testing and branch directly to the target code. */
c5d5d461
JL
6258 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6259 VOIDmode, 0),
6260 GT, NULL_RTX, mode, unsignedp, 0,
6261 label_rtx (node->right->code_label));
28d81abb
RK
6262 else
6263 {
6264 /* Right hand node requires testing.
6265 Branch to a label where we will handle it later. */
6266
6267 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
c5d5d461
JL
6268 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6269 VOIDmode, 0),
6270 GT, NULL_RTX, mode, unsignedp, 0,
6271 label_rtx (test_label));
28d81abb
RK
6272 }
6273
6274 /* Value belongs to this node or to the left-hand subtree. */
6275
c5d5d461
JL
6276 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6277 VOIDmode, 0),
6278 GE, NULL_RTX, mode, unsignedp, 0,
6279 label_rtx (node->code_label));
28d81abb
RK
6280
6281 /* Handle the left-hand subtree. */
6282 emit_case_nodes (index, node->left, default_label, index_type);
6283
6284 /* If right node had to be handled later, do that now. */
6285
6286 if (test_label)
6287 {
6288 /* If the left-hand subtree fell through,
6289 don't let it fall into the right-hand subtree. */
6290 emit_jump_if_reachable (default_label);
6291
6292 expand_label (test_label);
6293 emit_case_nodes (index, node->right, default_label, index_type);
6294 }
6295 }
6296
6297 else if (node->right != 0 && node->left == 0)
6298 {
6299 /* Deal with values to the left of this node,
6300 if they are possible. */
6301 if (!node_has_low_bound (node, index_type))
6302 {
c5d5d461
JL
6303 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6304 VOIDmode, 0),
6305 LT, NULL_RTX, mode, unsignedp, 0,
6306 default_label);
28d81abb
RK
6307 }
6308
6309 /* Value belongs to this node or to the right-hand subtree. */
6310
c5d5d461
JL
6311 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6312 VOIDmode, 0),
6313 LE, NULL_RTX, mode, unsignedp, 0,
6314 label_rtx (node->code_label));
28d81abb
RK
6315
6316 emit_case_nodes (index, node->right, default_label, index_type);
6317 }
6318
6319 else if (node->right == 0 && node->left != 0)
6320 {
6321 /* Deal with values to the right of this node,
6322 if they are possible. */
6323 if (!node_has_high_bound (node, index_type))
6324 {
c5d5d461
JL
6325 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6326 VOIDmode, 0),
6327 GT, NULL_RTX, mode, unsignedp, 0,
6328 default_label);
28d81abb
RK
6329 }
6330
6331 /* Value belongs to this node or to the left-hand subtree. */
6332
c5d5d461
JL
6333 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6334 VOIDmode, 0),
6335 GE, NULL_RTX, mode, unsignedp, 0,
6336 label_rtx (node->code_label));
28d81abb
RK
6337
6338 emit_case_nodes (index, node->left, default_label, index_type);
6339 }
6340
6341 else
6342 {
6343 /* Node has no children so we check low and high bounds to remove
6344 redundant tests. Only one of the bounds can exist,
6345 since otherwise this node is bounded--a case tested already. */
6346
6347 if (!node_has_high_bound (node, index_type))
6348 {
c5d5d461
JL
6349 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6350 VOIDmode, 0),
6351 GT, NULL_RTX, mode, unsignedp, 0,
6352 default_label);
28d81abb
RK
6353 }
6354
6355 if (!node_has_low_bound (node, index_type))
6356 {
c5d5d461
JL
6357 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6358 VOIDmode, 0),
6359 LT, NULL_RTX, mode, unsignedp, 0,
6360 default_label);
28d81abb
RK
6361 }
6362
6363 emit_jump (label_rtx (node->code_label));
6364 }
6365 }
6366}
6367\f
6368/* These routines are used by the loop unrolling code. They copy BLOCK trees
6369 so that the debugging info will be correct for the unrolled loop. */
6370
28d81abb 6371void
94dc8b56 6372find_loop_tree_blocks ()
28d81abb 6373{
1a4450c7 6374 identify_blocks (DECL_INITIAL (current_function_decl), get_insns ());
28d81abb
RK
6375}
6376
28d81abb 6377void
94dc8b56 6378unroll_block_trees ()
28d81abb 6379{
94dc8b56 6380 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6381
1a4450c7 6382 reorder_blocks (block, get_insns ());
28d81abb 6383}
This page took 1.838077 seconds and 5 git commands to generate.