]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
Merge in gcc2-ss-010999
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
642cac7b 2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
ccd043a9 38
28d81abb
RK
39#include "rtl.h"
40#include "tree.h"
41#include "flags.h"
6adb4e3a 42#include "except.h"
28d81abb
RK
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
ca695ac9 52#include "machmode.h"
10f0ad3d 53#include "toplev.h"
d6f4ec51 54#include "output.h"
87ff9c8e 55#include "ggc.h"
ca695ac9 56
28d81abb
RK
57#define obstack_chunk_alloc xmalloc
58#define obstack_chunk_free free
59struct obstack stmt_obstack;
60
18543a22
ILT
61/* Assume that case vectors are not pc-relative. */
62#ifndef CASE_VECTOR_PC_RELATIVE
63#define CASE_VECTOR_PC_RELATIVE 0
64#endif
65
7629c936
RS
66/* Each time we expand the end of a binding contour (in `expand_end_bindings')
67 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
68 This is used by the `remember_end_note' function to record the endpoint
69 of each generated block in its associated BLOCK node. */
70
71static rtx last_block_end_note;
28d81abb
RK
72\f
73/* Functions and data structures for expanding case statements. */
74
75/* Case label structure, used to hold info on labels within case
76 statements. We handle "range" labels; for a single-value label
77 as in C, the high and low limits are the same.
78
5720c7e7
RK
79 An AVL tree of case nodes is initially created, and later transformed
80 to a list linked via the RIGHT fields in the nodes. Nodes with
81 higher case values are later in the list.
28d81abb
RK
82
83 Switch statements can be output in one of two forms. A branch table
84 is used if there are more than a few labels and the labels are dense
85 within the range between the smallest and largest case value. If a
86 branch table is used, no further manipulations are done with the case
87 node chain.
88
89 The alternative to the use of a branch table is to generate a series
90 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
91 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
92 totally unbalanced, with everything on the right. We balance the tree
93 with nodes on the left having lower case values than the parent
28d81abb
RK
94 and nodes on the right having higher values. We then output the tree
95 in order. */
96
97struct case_node
98{
99 struct case_node *left; /* Left son in binary tree */
100 struct case_node *right; /* Right son in binary tree; also node chain */
101 struct case_node *parent; /* Parent of node in binary tree */
102 tree low; /* Lowest index value for this label */
103 tree high; /* Highest index value for this label */
104 tree code_label; /* Label to jump to when node matches */
57641239 105 int balance;
28d81abb
RK
106};
107
108typedef struct case_node case_node;
109typedef struct case_node *case_node_ptr;
110
111/* These are used by estimate_case_costs and balance_case_nodes. */
112
113/* This must be a signed type, and non-ANSI compilers lack signed char. */
114static short *cost_table;
115static int use_cost_table;
28d81abb
RK
116\f
117/* Stack of control and binding constructs we are currently inside.
118
119 These constructs begin when you call `expand_start_WHATEVER'
120 and end when you call `expand_end_WHATEVER'. This stack records
121 info about how the construct began that tells the end-function
122 what to do. It also may provide information about the construct
123 to alter the behavior of other constructs within the body.
124 For example, they may affect the behavior of C `break' and `continue'.
125
126 Each construct gets one `struct nesting' object.
127 All of these objects are chained through the `all' field.
128 `nesting_stack' points to the first object (innermost construct).
129 The position of an entry on `nesting_stack' is in its `depth' field.
130
131 Each type of construct has its own individual stack.
132 For example, loops have `loop_stack'. Each object points to the
133 next object of the same type through the `next' field.
134
135 Some constructs are visible to `break' exit-statements and others
136 are not. Which constructs are visible depends on the language.
137 Therefore, the data structure allows each construct to be visible
138 or not, according to the args given when the construct is started.
139 The construct is visible if the `exit_label' field is non-null.
140 In that case, the value should be a CODE_LABEL rtx. */
141
142struct nesting
143{
144 struct nesting *all;
145 struct nesting *next;
146 int depth;
147 rtx exit_label;
148 union
149 {
150 /* For conds (if-then and if-then-else statements). */
151 struct
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
0f41302f 158 This may be the end of the if or the next else/elseif. */
28d81abb
RK
159 rtx next_label;
160 } cond;
161 /* For loops. */
162 struct
163 {
164 /* Label at the top of the loop; place to loop back to. */
165 rtx start_label;
166 /* Label at the end of the whole construct. */
167 rtx end_label;
8afad312
JW
168 /* Label before a jump that branches to the end of the whole
169 construct. This is where destructors go if any. */
170 rtx alt_end_label;
28d81abb
RK
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
174 } loop;
175 /* For variable binding contours. */
176 struct
177 {
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
b93a436e 181 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
e976b8b2 198 The tail of this list can be 0,
28d81abb
RK
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
3f1d071b 207 int n_function_calls;
e976b8b2
MS
208 /* Nonzero if this is associated with a EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
3f1d071b 215 int block_target_temp_slot_level;
e976b8b2
MS
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 /* When in a conditional context, this is the specific
227 cleanup list associated with last_unconditional_cleanup,
228 where we place the conditionalized cleanups. */
229 tree *cleanup_ptr;
28d81abb
RK
230 } block;
231 /* For switch (C) or case (Pascal) statements,
232 and also for dummies (see `expand_start_case_dummy'). */
233 struct
234 {
235 /* The insn after which the case dispatch should finally
236 be emitted. Zero for a dummy. */
237 rtx start;
57641239
RK
238 /* A list of case labels; it is first built as an AVL tree.
239 During expand_end_case, this is converted to a list, and may be
240 rearranged into a nearly balanced binary tree. */
28d81abb
RK
241 struct case_node *case_list;
242 /* Label to jump to if no case matches. */
243 tree default_label;
244 /* The expression to be dispatched on. */
245 tree index_expr;
246 /* Type that INDEX_EXPR should be converted to. */
247 tree nominal_type;
248 /* Number of range exprs in case statement. */
249 int num_ranges;
250 /* Name of this kind of statement, for warnings. */
dff01034 251 const char *printname;
a11759a3
JR
252 /* Used to save no_line_numbers till we see the first case label.
253 We set this to -1 when we see the first case label in this
254 case statement. */
255 int line_number_status;
28d81abb 256 } case_stmt;
28d81abb
RK
257 } data;
258};
259
28d81abb
RK
260/* Allocate and return a new `struct nesting'. */
261
262#define ALLOC_NESTING() \
263 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
264
6ed1d6c5
RS
265/* Pop the nesting stack element by element until we pop off
266 the element which is at the top of STACK.
267 Update all the other stacks, popping off elements from them
268 as we pop them from nesting_stack. */
28d81abb
RK
269
270#define POPSTACK(STACK) \
6ed1d6c5
RS
271do { struct nesting *target = STACK; \
272 struct nesting *this; \
273 do { this = nesting_stack; \
274 if (loop_stack == this) \
275 loop_stack = loop_stack->next; \
276 if (cond_stack == this) \
277 cond_stack = cond_stack->next; \
278 if (block_stack == this) \
279 block_stack = block_stack->next; \
280 if (stack_block_stack == this) \
281 stack_block_stack = stack_block_stack->next; \
282 if (case_stack == this) \
283 case_stack = case_stack->next; \
6ed1d6c5 284 nesting_depth = nesting_stack->depth - 1; \
28d81abb 285 nesting_stack = this->all; \
28d81abb 286 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 287 while (this != target); } while (0)
28d81abb
RK
288\f
289/* In some cases it is impossible to generate code for a forward goto
290 until the label definition is seen. This happens when it may be necessary
291 for the goto to reset the stack pointer: we don't yet know how to do that.
292 So expand_goto puts an entry on this fixup list.
293 Each time a binding contour that resets the stack is exited,
294 we check each fixup.
295 If the target label has now been defined, we can insert the proper code. */
296
297struct goto_fixup
298{
299 /* Points to following fixup. */
300 struct goto_fixup *next;
301 /* Points to the insn before the jump insn.
302 If more code must be inserted, it goes after this insn. */
303 rtx before_jump;
304 /* The LABEL_DECL that this jump is jumping to, or 0
305 for break, continue or return. */
306 tree target;
7629c936
RS
307 /* The BLOCK for the place where this goto was found. */
308 tree context;
28d81abb
RK
309 /* The CODE_LABEL rtx that this is jumping to. */
310 rtx target_rtl;
311 /* Number of binding contours started in current function
312 before the label reference. */
313 int block_start_count;
314 /* The outermost stack level that should be restored for this jump.
315 Each time a binding contour that resets the stack is exited,
316 if the target label is *not* yet defined, this slot is updated. */
317 rtx stack_level;
318 /* List of lists of cleanup expressions to be run by this goto.
319 There is one element for each block that this goto is within.
e976b8b2 320 The tail of this list can be 0,
28d81abb
RK
321 if all remaining elements would be empty.
322 The TREE_VALUE contains the cleanup list of that block as of the
323 time this goto was seen.
324 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
325 tree cleanup_list_list;
326};
327
28d81abb
RK
328/* Within any binding contour that must restore a stack level,
329 all labels are recorded with a chain of these structures. */
330
331struct label_chain
332{
333 /* Points to following fixup. */
334 struct label_chain *next;
335 tree label;
336};
e9a25f70 337
3f1d071b
BS
338struct stmt_status
339{
340 /* Chain of all pending binding contours. */
341 struct nesting *x_block_stack;
342
343 /* If any new stacks are added here, add them to POPSTACKS too. */
344
345 /* Chain of all pending binding contours that restore stack levels
346 or have cleanups. */
347 struct nesting *x_stack_block_stack;
348
349 /* Chain of all pending conditional statements. */
350 struct nesting *x_cond_stack;
351
352 /* Chain of all pending loops. */
353 struct nesting *x_loop_stack;
354
355 /* Chain of all pending case or switch statements. */
356 struct nesting *x_case_stack;
357
358 /* Separate chain including all of the above,
359 chained through the `all' field. */
360 struct nesting *x_nesting_stack;
361
362 /* Number of entries on nesting_stack now. */
363 int x_nesting_depth;
364
365 /* Number of binding contours started so far in this function. */
366 int x_block_start_count;
367
368 /* Each time we expand an expression-statement,
369 record the expr's type and its RTL value here. */
370 tree x_last_expr_type;
371 rtx x_last_expr_value;
372
373 /* Nonzero if within a ({...}) grouping, in which case we must
374 always compute a value for each expr-stmt in case it is the last one. */
375 int x_expr_stmts_for_value;
376
377 /* Filename and line number of last line-number note,
378 whether we actually emitted it or not. */
379 char *x_emit_filename;
380 int x_emit_lineno;
381
382 struct goto_fixup *x_goto_fixup_chain;
383};
384
385#define block_stack (current_function->stmt->x_block_stack)
386#define stack_block_stack (current_function->stmt->x_stack_block_stack)
387#define cond_stack (current_function->stmt->x_cond_stack)
388#define loop_stack (current_function->stmt->x_loop_stack)
389#define case_stack (current_function->stmt->x_case_stack)
390#define nesting_stack (current_function->stmt->x_nesting_stack)
391#define nesting_depth (current_function->stmt->x_nesting_depth)
392#define current_block_start_count (current_function->stmt->x_block_start_count)
393#define last_expr_type (current_function->stmt->x_last_expr_type)
394#define last_expr_value (current_function->stmt->x_last_expr_value)
395#define expr_stmts_for_value (current_function->stmt->x_expr_stmts_for_value)
396#define emit_filename (current_function->stmt->x_emit_filename)
397#define emit_lineno (current_function->stmt->x_emit_lineno)
398#define goto_fixup_chain (current_function->stmt->x_goto_fixup_chain)
e9a25f70
JL
399
400/* Non-zero if we are using EH to handle cleanus. */
401static int using_eh_for_cleanups_p = 0;
402
403
dff01034 404static int n_occurrences PROTO((int, const char *));
cfc3d13f 405static void expand_goto_internal PROTO((tree, rtx, rtx));
cfc3d13f 406static int expand_fixup PROTO((tree, rtx, rtx));
e881bb1b 407static rtx expand_nl_handler_label PROTO((rtx, rtx));
ba716ac9
BS
408static void expand_nl_goto_receiver PROTO((void));
409static void expand_nl_goto_receivers PROTO((struct nesting *));
cfc3d13f
RK
410static void fixup_gotos PROTO((struct nesting *, rtx, tree,
411 rtx, int));
cfc3d13f 412static void expand_null_return_1 PROTO((rtx, int));
8d800403 413static void expand_value_return PROTO((rtx));
cfc3d13f 414static int tail_recursion_args PROTO((tree, tree));
50d1b7a1 415static void expand_cleanups PROTO((tree, tree, int, int));
0e05e8ea 416static void check_seenlabel PROTO((void));
cfc3d13f
RK
417static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
418static int estimate_case_costs PROTO((case_node_ptr));
419static void group_case_nodes PROTO((case_node_ptr));
420static void balance_case_nodes PROTO((case_node_ptr *,
421 case_node_ptr));
422static int node_has_low_bound PROTO((case_node_ptr, tree));
423static int node_has_high_bound PROTO((case_node_ptr, tree));
424static int node_is_bounded PROTO((case_node_ptr, tree));
425static void emit_jump_if_reachable PROTO((rtx));
426static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
57641239
RK
427static int add_case_node PROTO((tree, tree, tree, tree *));
428static struct case_node *case_tree2list PROTO((case_node *, case_node *));
87ff9c8e
RH
429static void mark_cond_nesting PROTO((struct nesting *));
430static void mark_loop_nesting PROTO((struct nesting *));
431static void mark_block_nesting PROTO((struct nesting *));
432static void mark_case_nesting PROTO((struct nesting *));
433static void mark_goto_fixup PROTO((struct goto_fixup *));
434
28d81abb 435\f
e9a25f70
JL
436void
437using_eh_for_cleanups ()
438{
439 using_eh_for_cleanups_p = 1;
440}
441
87ff9c8e
RH
442/* Mark N (known to be a cond-nesting) for GC. */
443
444static void
445mark_cond_nesting (n)
446 struct nesting *n;
447{
448 while (n)
449 {
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
453
454 n = n->next;
455 }
456}
457
458/* Mark N (known to be a loop-nesting) for GC. */
459
460static void
461mark_loop_nesting (n)
462 struct nesting *n;
463{
464
465 while (n)
466 {
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
472
473 n = n->next;
474 }
475}
476
477/* Mark N (known to be a block-nesting) for GC. */
478
479static void
480mark_block_nesting (n)
481 struct nesting *n;
482{
483 while (n)
484 {
485 struct label_chain *l;
486
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
492
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
494 ggc_mark_tree (l->label);
495
496 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
497
498 /* ??? cleanup_ptr never points outside the stack, does it? */
499
500 n = n->next;
501 }
502}
503
504/* Mark N (known to be a case-nesting) for GC. */
505
506static void
507mark_case_nesting (n)
508 struct nesting *n;
509{
510 while (n)
511 {
512 struct case_node *node;
513
514 ggc_mark_rtx (n->exit_label);
515 ggc_mark_rtx (n->data.case_stmt.start);
516
517 node = n->data.case_stmt.case_list;
518 while (node)
519 {
520 ggc_mark_tree (node->low);
521 ggc_mark_tree (node->high);
522 ggc_mark_tree (node->code_label);
523 node = node->right;
524 }
525
526 ggc_mark_tree (n->data.case_stmt.default_label);
527 ggc_mark_tree (n->data.case_stmt.index_expr);
528 ggc_mark_tree (n->data.case_stmt.nominal_type);
529
530 n = n->next;
531 }
532}
533
534/* Mark G for GC. */
535
536static void
537mark_goto_fixup (g)
538 struct goto_fixup *g;
539{
540 while (g)
541 {
542 ggc_mark_rtx (g->before_jump);
543 ggc_mark_tree (g->target);
544 ggc_mark_tree (g->context);
545 ggc_mark_rtx (g->target_rtl);
546 ggc_mark_rtx (g->stack_level);
547 ggc_mark_tree (g->cleanup_list_list);
548
549 g = g->next;
550 }
551}
552
553/* Mark P for GC. */
554
555void
556mark_stmt_state (p)
557 struct stmt_status *p;
558{
559 if (p == 0)
560 return;
561
562 mark_block_nesting (p->x_block_stack);
563 mark_cond_nesting (p->x_cond_stack);
564 mark_loop_nesting (p->x_loop_stack);
565 mark_case_nesting (p->x_case_stack);
566
567 ggc_mark_tree (p->x_last_expr_type);
568 /* last_epxr_value is only valid if last_expr_type is nonzero. */
569 if (p->x_last_expr_type)
570 ggc_mark_rtx (p->x_last_expr_value);
571
572 mark_goto_fixup (p->x_goto_fixup_chain);
573}
574
28d81abb
RK
575void
576init_stmt ()
577{
578 gcc_obstack_init (&stmt_obstack);
6adb4e3a 579 init_eh ();
28d81abb
RK
580}
581
582void
583init_stmt_for_function ()
584{
3f1d071b
BS
585 current_function->stmt
586 = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
587
28d81abb
RK
588 /* We are not currently within any block, conditional, loop or case. */
589 block_stack = 0;
0b931590 590 stack_block_stack = 0;
28d81abb
RK
591 loop_stack = 0;
592 case_stack = 0;
593 cond_stack = 0;
594 nesting_stack = 0;
595 nesting_depth = 0;
596
3f1d071b 597 current_block_start_count = 0;
28d81abb
RK
598
599 /* No gotos have been expanded yet. */
600 goto_fixup_chain = 0;
601
602 /* We are not processing a ({...}) grouping. */
603 expr_stmts_for_value = 0;
604 last_expr_type = 0;
6adb4e3a
MS
605
606 init_eh_for_function ();
28d81abb 607}
3f1d071b
BS
608\f
609/* Return nonzero if anything is pushed on the loop, condition, or case
610 stack. */
611int
612in_control_zone_p ()
613{
614 return cond_stack || loop_stack || case_stack;
28d81abb
RK
615}
616
3f1d071b 617/* Record the current file and line. Called from emit_line_note. */
28d81abb 618void
3f1d071b
BS
619set_file_and_line_for_stmt (file, line)
620 char *file;
621 int line;
622{
623 emit_filename = file;
624 emit_lineno = line;
28d81abb 625}
3f1d071b 626
28d81abb
RK
627/* Emit a no-op instruction. */
628
629void
630emit_nop ()
631{
ca695ac9
JB
632 rtx last_insn;
633
b93a436e
JL
634 last_insn = get_last_insn ();
635 if (!optimize
636 && (GET_CODE (last_insn) == CODE_LABEL
637 || (GET_CODE (last_insn) == NOTE
638 && prev_real_insn (last_insn) == 0)))
639 emit_insn (gen_nop ());
28d81abb
RK
640}
641\f
642/* Return the rtx-label that corresponds to a LABEL_DECL,
643 creating it if necessary. */
644
645rtx
646label_rtx (label)
647 tree label;
648{
649 if (TREE_CODE (label) != LABEL_DECL)
650 abort ();
651
652 if (DECL_RTL (label))
653 return DECL_RTL (label);
654
655 return DECL_RTL (label) = gen_label_rtx ();
656}
657
658/* Add an unconditional jump to LABEL as the next sequential instruction. */
659
660void
661emit_jump (label)
662 rtx label;
663{
664 do_pending_stack_adjust ();
665 emit_jump_insn (gen_jump (label));
666 emit_barrier ();
667}
668
669/* Emit code to jump to the address
670 specified by the pointer expression EXP. */
671
672void
673expand_computed_goto (exp)
674 tree exp;
675{
b93a436e 676 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
677
678#ifdef POINTERS_EXTEND_UNSIGNED
b93a436e 679 x = convert_memory_address (Pmode, x);
ed9a9db1 680#endif
ffa1a1ce 681
b93a436e
JL
682 emit_queue ();
683 /* Be sure the function is executable. */
7d384cc0 684 if (current_function_check_memory_usage)
b93a436e
JL
685 emit_library_call (chkr_check_exec_libfunc, 1,
686 VOIDmode, 1, x, ptr_mode);
17f5f329 687
b93a436e
JL
688 do_pending_stack_adjust ();
689 emit_indirect_jump (x);
acd693d1
RH
690
691 current_function_has_computed_jump = 1;
28d81abb
RK
692}
693\f
694/* Handle goto statements and the labels that they can go to. */
695
696/* Specify the location in the RTL code of a label LABEL,
697 which is a LABEL_DECL tree node.
698
699 This is used for the kind of label that the user can jump to with a
700 goto statement, and for alternatives of a switch or case statement.
701 RTL labels generated for loops and conditionals don't go through here;
702 they are generated directly at the RTL level, by other functions below.
703
704 Note that this has nothing to do with defining label *names*.
705 Languages vary in how they do that and what that even means. */
706
707void
708expand_label (label)
709 tree label;
710{
711 struct label_chain *p;
712
713 do_pending_stack_adjust ();
714 emit_label (label_rtx (label));
715 if (DECL_NAME (label))
716 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
717
718 if (stack_block_stack != 0)
719 {
720 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
721 p->next = stack_block_stack->data.block.label_chain;
722 stack_block_stack->data.block.label_chain = p;
723 p->label = label;
724 }
725}
726
727/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
728 from nested functions. */
729
730void
731declare_nonlocal_label (label)
732 tree label;
733{
ba716ac9
BS
734 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
735
28d81abb
RK
736 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
737 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 738 if (nonlocal_goto_handler_slots == 0)
28d81abb 739 {
59257ff7
RK
740 emit_stack_save (SAVE_NONLOCAL,
741 &nonlocal_goto_stack_level,
742 PREV_INSN (tail_recursion_reentry));
28d81abb 743 }
ba716ac9
BS
744 nonlocal_goto_handler_slots
745 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
746}
747
748/* Generate RTL code for a `goto' statement with target label LABEL.
749 LABEL should be a LABEL_DECL tree node that was or will later be
750 defined with `expand_label'. */
751
752void
753expand_goto (label)
754 tree label;
755{
ca695ac9
JB
756 tree context;
757
28d81abb 758 /* Check for a nonlocal goto to a containing function. */
ca695ac9 759 context = decl_function_context (label);
28d81abb
RK
760 if (context != 0 && context != current_function_decl)
761 {
762 struct function *p = find_function_data (context);
38a448ca 763 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
ba716ac9
BS
764 rtx temp, handler_slot;
765 tree link;
766
767 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
768 handler_slot = p->x_nonlocal_goto_handler_slots;
769 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
770 link = TREE_CHAIN (link))
771 handler_slot = XEXP (handler_slot, 1);
772 handler_slot = XEXP (handler_slot, 0);
dd132134 773
28d81abb 774 p->has_nonlocal_label = 1;
c1255328 775 current_function_has_nonlocal_goto = 1;
dd132134 776 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
777
778 /* Copy the rtl for the slots so that they won't be shared in
779 case the virtual stack vars register gets instantiated differently
780 in the parent than in the child. */
781
28d81abb
RK
782#if HAVE_nonlocal_goto
783 if (HAVE_nonlocal_goto)
784 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
ba716ac9 785 copy_rtx (handler_slot),
49ad7cfa 786 copy_rtx (p->x_nonlocal_goto_stack_level),
dd132134 787 label_ref));
28d81abb
RK
788 else
789#endif
790 {
59257ff7
RK
791 rtx addr;
792
28d81abb
RK
793 /* Restore frame pointer for containing function.
794 This sets the actual hard register used for the frame pointer
795 to the location of the function's incoming static chain info.
796 The non-local goto handler will then adjust it to contain the
797 proper value and reload the argument pointer, if needed. */
a35ad168 798 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
799
800 /* We have now loaded the frame pointer hardware register with
801 the address of that corresponds to the start of the virtual
802 stack vars. So replace virtual_stack_vars_rtx in all
803 addresses we use with stack_pointer_rtx. */
804
28d81abb
RK
805 /* Get addr of containing function's current nonlocal goto handler,
806 which will do any cleanups and then jump to the label. */
ba716ac9 807 addr = copy_rtx (handler_slot);
59257ff7 808 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 809 hard_frame_pointer_rtx));
59257ff7 810
28d81abb 811 /* Restore the stack pointer. Note this uses fp just restored. */
49ad7cfa 812 addr = p->x_nonlocal_goto_stack_level;
59257ff7 813 if (addr)
5e116627 814 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
815 virtual_stack_vars_rtx,
816 hard_frame_pointer_rtx);
59257ff7 817
37366632 818 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 819
a35ad168 820 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 821 really needed. */
38a448ca
RH
822 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
823 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
28d81abb
RK
824 emit_indirect_jump (temp);
825 }
826 }
827 else
37366632 828 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
829}
830
831/* Generate RTL code for a `goto' statement with target label BODY.
832 LABEL should be a LABEL_REF.
833 LAST_INSN, if non-0, is the rtx we should consider as the last
834 insn emitted (for the purposes of cleaning up a return). */
835
836static void
837expand_goto_internal (body, label, last_insn)
838 tree body;
839 rtx label;
840 rtx last_insn;
841{
842 struct nesting *block;
843 rtx stack_level = 0;
844
845 if (GET_CODE (label) != CODE_LABEL)
846 abort ();
847
848 /* If label has already been defined, we can tell now
849 whether and how we must alter the stack level. */
850
851 if (PREV_INSN (label) != 0)
852 {
853 /* Find the innermost pending block that contains the label.
854 (Check containment by comparing insn-uids.)
855 Then restore the outermost stack level within that block,
856 and do cleanups of all blocks contained in it. */
857 for (block = block_stack; block; block = block->next)
858 {
859 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
860 break;
861 if (block->data.block.stack_level != 0)
862 stack_level = block->data.block.stack_level;
863 /* Execute the cleanups for blocks we are exiting. */
864 if (block->data.block.cleanups != 0)
865 {
50d1b7a1 866 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
867 do_pending_stack_adjust ();
868 }
869 }
870
871 if (stack_level)
872 {
0f41302f
MS
873 /* Ensure stack adjust isn't done by emit_jump, as this
874 would clobber the stack pointer. This one should be
875 deleted as dead by flow. */
28d81abb
RK
876 clear_pending_stack_adjust ();
877 do_pending_stack_adjust ();
37366632 878 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
879 }
880
881 if (body != 0 && DECL_TOO_LATE (body))
882 error ("jump to `%s' invalidly jumps into binding contour",
883 IDENTIFIER_POINTER (DECL_NAME (body)));
884 }
885 /* Label not yet defined: may need to put this goto
886 on the fixup list. */
887 else if (! expand_fixup (body, label, last_insn))
888 {
889 /* No fixup needed. Record that the label is the target
890 of at least one goto that has no fixup. */
891 if (body != 0)
892 TREE_ADDRESSABLE (body) = 1;
893 }
894
895 emit_jump (label);
896}
897\f
898/* Generate if necessary a fixup for a goto
899 whose target label in tree structure (if any) is TREE_LABEL
900 and whose target in rtl is RTL_LABEL.
901
902 If LAST_INSN is nonzero, we pretend that the jump appears
903 after insn LAST_INSN instead of at the current point in the insn stream.
904
023b57e6
RS
905 The fixup will be used later to insert insns just before the goto.
906 Those insns will restore the stack level as appropriate for the
907 target label, and will (in the case of C++) also invoke any object
908 destructors which have to be invoked when we exit the scopes which
909 are exited by the goto.
28d81abb
RK
910
911 Value is nonzero if a fixup is made. */
912
913static int
914expand_fixup (tree_label, rtl_label, last_insn)
915 tree tree_label;
916 rtx rtl_label;
917 rtx last_insn;
918{
919 struct nesting *block, *end_block;
920
921 /* See if we can recognize which block the label will be output in.
922 This is possible in some very common cases.
923 If we succeed, set END_BLOCK to that block.
924 Otherwise, set it to 0. */
925
926 if (cond_stack
927 && (rtl_label == cond_stack->data.cond.endif_label
928 || rtl_label == cond_stack->data.cond.next_label))
929 end_block = cond_stack;
930 /* If we are in a loop, recognize certain labels which
931 are likely targets. This reduces the number of fixups
932 we need to create. */
933 else if (loop_stack
934 && (rtl_label == loop_stack->data.loop.start_label
935 || rtl_label == loop_stack->data.loop.end_label
936 || rtl_label == loop_stack->data.loop.continue_label))
937 end_block = loop_stack;
938 else
939 end_block = 0;
940
941 /* Now set END_BLOCK to the binding level to which we will return. */
942
943 if (end_block)
944 {
945 struct nesting *next_block = end_block->all;
946 block = block_stack;
947
948 /* First see if the END_BLOCK is inside the innermost binding level.
949 If so, then no cleanups or stack levels are relevant. */
950 while (next_block && next_block != block)
951 next_block = next_block->all;
952
953 if (next_block)
954 return 0;
955
956 /* Otherwise, set END_BLOCK to the innermost binding level
957 which is outside the relevant control-structure nesting. */
958 next_block = block_stack->next;
959 for (block = block_stack; block != end_block; block = block->all)
960 if (block == next_block)
961 next_block = next_block->next;
962 end_block = next_block;
963 }
964
965 /* Does any containing block have a stack level or cleanups?
966 If not, no fixup is needed, and that is the normal case
967 (the only case, for standard C). */
968 for (block = block_stack; block != end_block; block = block->next)
969 if (block->data.block.stack_level != 0
970 || block->data.block.cleanups != 0)
971 break;
972
973 if (block != end_block)
974 {
975 /* Ok, a fixup is needed. Add a fixup to the list of such. */
976 struct goto_fixup *fixup
977 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
978 /* In case an old stack level is restored, make sure that comes
979 after any pending stack adjust. */
980 /* ?? If the fixup isn't to come at the present position,
981 doing the stack adjust here isn't useful. Doing it with our
982 settings at that location isn't useful either. Let's hope
983 someone does it! */
984 if (last_insn == 0)
985 do_pending_stack_adjust ();
28d81abb
RK
986 fixup->target = tree_label;
987 fixup->target_rtl = rtl_label;
023b57e6
RS
988
989 /* Create a BLOCK node and a corresponding matched set of
990 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
991 this point. The notes will encapsulate any and all fixup
992 code which we might later insert at this point in the insn
993 stream. Also, the BLOCK node will be the parent (i.e. the
994 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
995 later on when we are expanding the fixup code.
996
997 Note that optimization passes (including expand_end_loop)
998 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
999 as a placeholder. */
023b57e6
RS
1000
1001 {
1002 register rtx original_before_jump
1003 = last_insn ? last_insn : get_last_insn ();
0679e3fc 1004 rtx start;
023b57e6
RS
1005
1006 start_sequence ();
1007 pushlevel (0);
0679e3fc
JM
1008 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1009 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
023b57e6
RS
1010 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1011 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1012 end_sequence ();
0679e3fc 1013 emit_insns_after (start, original_before_jump);
023b57e6
RS
1014 }
1015
3f1d071b 1016 fixup->block_start_count = current_block_start_count;
28d81abb
RK
1017 fixup->stack_level = 0;
1018 fixup->cleanup_list_list
e976b8b2 1019 = ((block->data.block.outer_cleanups
28d81abb 1020 || block->data.block.cleanups)
37366632 1021 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1022 block->data.block.outer_cleanups)
1023 : 0);
1024 fixup->next = goto_fixup_chain;
1025 goto_fixup_chain = fixup;
1026 }
1027
1028 return block != 0;
1029}
1030
ca695ac9 1031
cfc3d13f
RK
1032\f
1033/* Expand any needed fixups in the outputmost binding level of the
1034 function. FIRST_INSN is the first insn in the function. */
ca695ac9 1035
cfc3d13f
RK
1036void
1037expand_fixups (first_insn)
1038 rtx first_insn;
1039{
1040 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1041}
ca695ac9 1042
28d81abb
RK
1043/* When exiting a binding contour, process all pending gotos requiring fixups.
1044 THISBLOCK is the structure that describes the block being exited.
1045 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1046 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1047 FIRST_INSN is the insn that began this contour.
1048
1049 Gotos that jump out of this contour must restore the
1050 stack level and do the cleanups before actually jumping.
1051
1052 DONT_JUMP_IN nonzero means report error there is a jump into this
1053 contour from before the beginning of the contour.
1054 This is also done if STACK_LEVEL is nonzero. */
1055
704f4dca 1056static void
28d81abb
RK
1057fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1058 struct nesting *thisblock;
1059 rtx stack_level;
1060 tree cleanup_list;
1061 rtx first_insn;
1062 int dont_jump_in;
1063{
1064 register struct goto_fixup *f, *prev;
1065
1066 /* F is the fixup we are considering; PREV is the previous one. */
1067 /* We run this loop in two passes so that cleanups of exited blocks
1068 are run first, and blocks that are exited are marked so
1069 afterwards. */
1070
1071 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1072 {
1073 /* Test for a fixup that is inactive because it is already handled. */
1074 if (f->before_jump == 0)
1075 {
1076 /* Delete inactive fixup from the chain, if that is easy to do. */
1077 if (prev != 0)
1078 prev->next = f->next;
1079 }
1080 /* Has this fixup's target label been defined?
1081 If so, we can finalize it. */
1082 else if (PREV_INSN (f->target_rtl) != 0)
1083 {
7629c936 1084 register rtx cleanup_insns;
7629c936 1085
28d81abb
RK
1086 /* Get the first non-label after the label
1087 this goto jumps to. If that's before this scope begins,
1088 we don't have a jump into the scope. */
1089 rtx after_label = f->target_rtl;
1090 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1091 after_label = NEXT_INSN (after_label);
1092
1093 /* If this fixup jumped into this contour from before the beginning
1094 of this contour, report an error. */
1095 /* ??? Bug: this does not detect jumping in through intermediate
1096 blocks that have stack levels or cleanups.
1097 It detects only a problem with the innermost block
1098 around the label. */
1099 if (f->target != 0
1100 && (dont_jump_in || stack_level || cleanup_list)
1101 /* If AFTER_LABEL is 0, it means the jump goes to the end
1102 of the rtl, which means it jumps into this scope. */
1103 && (after_label == 0
1104 || INSN_UID (first_insn) < INSN_UID (after_label))
1105 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1106 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1107 {
1108 error_with_decl (f->target,
1109 "label `%s' used before containing binding contour");
1110 /* Prevent multiple errors for one label. */
33bc3ff5 1111 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1112 }
1113
7629c936
RS
1114 /* We will expand the cleanups into a sequence of their own and
1115 then later on we will attach this new sequence to the insn
1116 stream just ahead of the actual jump insn. */
1117
1118 start_sequence ();
1119
023b57e6
RS
1120 /* Temporarily restore the lexical context where we will
1121 logically be inserting the fixup code. We do this for the
1122 sake of getting the debugging information right. */
1123
7629c936 1124 pushlevel (0);
023b57e6 1125 set_block (f->context);
7629c936
RS
1126
1127 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1128 if (f->cleanup_list_list)
1129 {
1130 tree lists;
1131 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1132 /* Marked elements correspond to blocks that have been closed.
1133 Do their cleanups. */
1134 if (TREE_ADDRESSABLE (lists)
1135 && TREE_VALUE (lists) != 0)
7629c936 1136 {
50d1b7a1 1137 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1138 /* Pop any pushes done in the cleanups,
1139 in case function is about to return. */
1140 do_pending_stack_adjust ();
1141 }
28d81abb
RK
1142 }
1143
1144 /* Restore stack level for the biggest contour that this
1145 jump jumps out of. */
1146 if (f->stack_level)
59257ff7 1147 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1148
1149 /* Finish up the sequence containing the insns which implement the
1150 necessary cleanups, and then attach that whole sequence to the
1151 insn stream just ahead of the actual jump insn. Attaching it
1152 at that point insures that any cleanups which are in fact
1153 implicit C++ object destructions (which must be executed upon
1154 leaving the block) appear (to the debugger) to be taking place
1155 in an area of the generated code where the object(s) being
1156 destructed are still "in scope". */
1157
1158 cleanup_insns = get_insns ();
023b57e6 1159 poplevel (1, 0, 0);
7629c936
RS
1160
1161 end_sequence ();
1162 emit_insns_after (cleanup_insns, f->before_jump);
1163
7629c936 1164
28d81abb
RK
1165 f->before_jump = 0;
1166 }
1167 }
1168
6bc2f582
RK
1169 /* For any still-undefined labels, do the cleanups for this block now.
1170 We must do this now since items in the cleanup list may go out
0f41302f 1171 of scope when the block ends. */
28d81abb
RK
1172 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1173 if (f->before_jump != 0
1174 && PREV_INSN (f->target_rtl) == 0
1175 /* Label has still not appeared. If we are exiting a block with
1176 a stack level to restore, that started before the fixup,
1177 mark this stack level as needing restoration
6bc2f582 1178 when the fixup is later finalized. */
28d81abb 1179 && thisblock != 0
6bc2f582
RK
1180 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1181 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1182 && (thisblock->data.block.block_start_count
1183 <= f->block_start_count))
1184 {
1185 tree lists = f->cleanup_list_list;
6bc2f582
RK
1186 rtx cleanup_insns;
1187
28d81abb
RK
1188 for (; lists; lists = TREE_CHAIN (lists))
1189 /* If the following elt. corresponds to our containing block
1190 then the elt. must be for this block. */
1191 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1192 {
1193 start_sequence ();
1194 pushlevel (0);
1195 set_block (f->context);
1196 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1197 do_pending_stack_adjust ();
6bc2f582
RK
1198 cleanup_insns = get_insns ();
1199 poplevel (1, 0, 0);
1200 end_sequence ();
412c00dc
RK
1201 if (cleanup_insns != 0)
1202 f->before_jump
1203 = emit_insns_after (cleanup_insns, f->before_jump);
6bc2f582 1204
e07ed33f 1205 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1206 }
28d81abb
RK
1207
1208 if (stack_level)
1209 f->stack_level = stack_level;
1210 }
1211}
2a230e9d
BS
1212\f
1213/* Return the number of times character C occurs in string S. */
1214static int
1215n_occurrences (c, s)
1216 int c;
dff01034 1217 const char *s;
2a230e9d
BS
1218{
1219 int n = 0;
1220 while (*s)
1221 n += (*s++ == c);
1222 return n;
1223}
28d81abb
RK
1224\f
1225/* Generate RTL for an asm statement (explicit assembler code).
1226 BODY is a STRING_CST node containing the assembler code text,
1227 or an ADDR_EXPR containing a STRING_CST. */
1228
1229void
1230expand_asm (body)
1231 tree body;
1232{
7d384cc0 1233 if (current_function_check_memory_usage)
17f5f329 1234 {
c5c76735 1235 error ("`asm' cannot be used in function where memory usage is checked");
17f5f329
RK
1236 return;
1237 }
1238
28d81abb
RK
1239 if (TREE_CODE (body) == ADDR_EXPR)
1240 body = TREE_OPERAND (body, 0);
1241
38a448ca
RH
1242 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1243 TREE_STRING_POINTER (body)));
28d81abb
RK
1244 last_expr_type = 0;
1245}
1246
1247/* Generate RTL for an asm statement with arguments.
1248 STRING is the instruction template.
1249 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1250 Each output or input has an expression in the TREE_VALUE and
1251 a constraint-string in the TREE_PURPOSE.
1252 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1253 that is clobbered by this insn.
1254
1255 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1256 Some elements of OUTPUTS may be replaced with trees representing temporary
1257 values. The caller should copy those temporary values to the originally
1258 specified lvalues.
1259
1260 VOL nonzero means the insn is volatile; don't optimize it. */
1261
1262void
1263expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1264 tree string, outputs, inputs, clobbers;
1265 int vol;
1266 char *filename;
1267 int line;
1268{
1269 rtvec argvec, constraints;
1270 rtx body;
1271 int ninputs = list_length (inputs);
1272 int noutputs = list_length (outputs);
235c5021 1273 int ninout = 0;
b4ccaa16 1274 int nclobbers;
28d81abb
RK
1275 tree tail;
1276 register int i;
1277 /* Vector of RTX's of evaluated output operands. */
1278 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1279 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1280 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1281 enum machine_mode *inout_mode
1282 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
28d81abb
RK
1283 /* The insn we have emitted. */
1284 rtx insn;
1285
e5e809f4 1286 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1287 if (noutputs == 0)
1288 vol = 1;
1289
7d384cc0 1290 if (current_function_check_memory_usage)
17f5f329
RK
1291 {
1292 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1293 return;
1294 }
1295
57bcb97a
RH
1296#ifdef MD_ASM_CLOBBERS
1297 /* Sometimes we wish to automatically clobber registers across an asm.
1298 Case in point is when the i386 backend moved from cc0 to a hard reg --
1299 maintaining source-level compatability means automatically clobbering
1300 the flags register. */
1301 MD_ASM_CLOBBERS (clobbers);
1302#endif
1303
c5c76735
JL
1304 if (current_function_check_memory_usage)
1305 {
1306 error ("`asm' cannot be used in function where memory usage is checked");
1307 return;
1308 }
1309
b4ccaa16
RS
1310 /* Count the number of meaningful clobbered registers, ignoring what
1311 we would ignore later. */
1312 nclobbers = 0;
1313 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1314 {
1315 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1316 i = decode_reg_name (regname);
1317 if (i >= 0 || i == -4)
b4ccaa16 1318 ++nclobbers;
7859e3ac
DE
1319 else if (i == -2)
1320 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1321 }
1322
28d81abb
RK
1323 last_expr_type = 0;
1324
2a230e9d
BS
1325 /* Check that the number of alternatives is constant across all
1326 operands. */
1327 if (outputs || inputs)
1328 {
1329 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1330 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1331 tree next = inputs;
1332
f62a15e3
BS
1333 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1334 {
1335 error ("too many alternatives in `asm'");
1336 return;
1337 }
1338
2a230e9d
BS
1339 tmp = outputs;
1340 while (tmp)
1341 {
1342 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1343 if (n_occurrences (',', constraint) != nalternatives)
1344 {
1345 error ("operand constraints for `asm' differ in number of alternatives");
1346 return;
1347 }
1348 if (TREE_CHAIN (tmp))
1349 tmp = TREE_CHAIN (tmp);
1350 else
1351 tmp = next, next = 0;
1352 }
1353 }
1354
28d81abb
RK
1355 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1356 {
1357 tree val = TREE_VALUE (tail);
b50a024d 1358 tree type = TREE_TYPE (val);
2a230e9d 1359 char *constraint;
3fbd5c2c 1360 char *p;
2a230e9d 1361 int c_len;
28d81abb 1362 int j;
2a230e9d 1363 int is_inout = 0;
d09a75ae 1364 int allows_reg = 0;
1afbe1c4 1365 int allows_mem = 0;
28d81abb
RK
1366
1367 /* If there's an erroneous arg, emit no insn. */
1368 if (TREE_TYPE (val) == error_mark_node)
1369 return;
1370
d09a75ae
RK
1371 /* Make sure constraint has `=' and does not have `+'. Also, see
1372 if it allows any register. Be liberal on the latter test, since
1373 the worst that happens if we get it wrong is we issue an error
1374 message. */
28d81abb 1375
2a230e9d
BS
1376 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1377 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1378
3fbd5c2c
RH
1379 /* Allow the `=' or `+' to not be at the beginning of the string,
1380 since it wasn't explicitly documented that way, and there is a
1381 large body of code that puts it last. Swap the character to
1382 the front, so as not to uglify any place else. */
1383 switch (c_len)
2a230e9d 1384 {
3fbd5c2c
RH
1385 default:
1386 if ((p = strchr (constraint, '=')) != NULL)
1387 break;
1388 if ((p = strchr (constraint, '+')) != NULL)
1389 break;
1390 case 0:
2a230e9d
BS
1391 error ("output operand constraint lacks `='");
1392 return;
1393 }
1394
3fbd5c2c
RH
1395 if (p != constraint)
1396 {
1397 j = *p;
1398 bcopy (constraint, constraint+1, p-constraint);
1399 *constraint = j;
1400
1401 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1402 }
1403
2a230e9d
BS
1404 is_inout = constraint[0] == '+';
1405 /* Replace '+' with '='. */
1406 constraint[0] = '=';
1407 /* Make sure we can specify the matching operand. */
1408 if (is_inout && i > 9)
1409 {
1410 error ("output operand constraint %d contains `+'", i);
1411 return;
1412 }
1413
1414 for (j = 1; j < c_len; j++)
1415 switch (constraint[j])
d09a75ae
RK
1416 {
1417 case '+':
2a230e9d
BS
1418 case '=':
1419 error ("operand constraint contains '+' or '=' at illegal position.");
1420 return;
1421
1422 case '%':
1423 if (i + 1 == ninputs + noutputs)
235c5021 1424 {
2a230e9d 1425 error ("`%%' constraint used with last operand");
235c5021
RK
1426 return;
1427 }
235c5021 1428 break;
d09a75ae 1429
2a230e9d 1430 case '?': case '!': case '*': case '&':
1afbe1c4 1431 case 'E': case 'F': case 'G': case 'H':
d09a75ae
RK
1432 case 's': case 'i': case 'n':
1433 case 'I': case 'J': case 'K': case 'L': case 'M':
1434 case 'N': case 'O': case 'P': case ',':
1435#ifdef EXTRA_CONSTRAINT
1436 case 'Q': case 'R': case 'S': case 'T': case 'U':
1437#endif
1438 break;
1439
7b7a33b3 1440 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1441 case '5': case '6': case '7': case '8': case '9':
1442 error ("matching constraint not valid in output operand");
1443 break;
1444
1afbe1c4
RH
1445 case 'V': case 'm': case 'o':
1446 allows_mem = 1;
1447 break;
1448
1449 case '<': case '>':
1450 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1451 excepting those that expand_call created. So match memory
1452 and hope. */
1453 allows_mem = 1;
1454 break;
1455
1456 case 'g': case 'X':
1457 allows_reg = 1;
1458 allows_mem = 1;
1459 break;
1460
1461 case 'p': case 'r':
d09a75ae
RK
1462 default:
1463 allows_reg = 1;
1464 break;
1465 }
1466
d09a75ae
RK
1467 /* If an output operand is not a decl or indirect ref and our constraint
1468 allows a register, make a temporary to act as an intermediate.
1469 Make the asm insn write into that, then our caller will copy it to
1470 the real output operand. Likewise for promoted variables. */
28d81abb 1471
947255ed 1472 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1473 if ((TREE_CODE (val) == INDIRECT_REF
1474 && allows_mem)
b50a024d 1475 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1afbe1c4 1476 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1477 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1478 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
235c5021 1479 || ! allows_reg
2a230e9d 1480 || is_inout)
d09a75ae
RK
1481 {
1482 if (! allows_reg)
1483 mark_addressable (TREE_VALUE (tail));
1484
1485 output_rtx[i]
17f5f329
RK
1486 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1487 EXPAND_MEMORY_USE_WO);
d09a75ae
RK
1488
1489 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1490 error ("output number %d not directly addressable", i);
1afbe1c4 1491 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
947255ed
RH
1492 {
1493 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1494 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1495 if (is_inout)
1496 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1497 }
d09a75ae 1498 }
b50a024d 1499 else
e619bb8d 1500 {
6e81958a 1501 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1502 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1503 }
235c5021 1504
2a230e9d 1505 if (is_inout)
235c5021
RK
1506 {
1507 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1508 inout_opnum[ninout++] = i;
1509 }
28d81abb
RK
1510 }
1511
235c5021 1512 ninputs += ninout;
28d81abb
RK
1513 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1514 {
1515 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1516 return;
1517 }
1518
1519 /* Make vectors for the expression-rtx and constraint strings. */
1520
1521 argvec = rtvec_alloc (ninputs);
1522 constraints = rtvec_alloc (ninputs);
1523
38a448ca
RH
1524 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1525 TREE_STRING_POINTER (string), "", 0, argvec,
1526 constraints, filename, line);
c85f7c16 1527
78418280 1528 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1529
1530 /* Eval the inputs and put them into ARGVEC.
1531 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1532
1533 i = 0;
1534 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1535 {
1536 int j;
1f06ee8d
RH
1537 int allows_reg = 0, allows_mem = 0;
1538 char *constraint, *orig_constraint;
2a230e9d 1539 int c_len;
1f06ee8d 1540 rtx op;
28d81abb
RK
1541
1542 /* If there's an erroneous arg, emit no insn,
1543 because the ASM_INPUT would get VOIDmode
1544 and that could cause a crash in reload. */
1545 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1546 return;
2a230e9d
BS
1547
1548 /* ??? Can this happen, and does the error message make any sense? */
28d81abb
RK
1549 if (TREE_PURPOSE (tail) == NULL_TREE)
1550 {
1551 error ("hard register `%s' listed as input operand to `asm'",
1552 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1553 return;
1554 }
1555
2a230e9d
BS
1556 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1557 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1f06ee8d 1558 orig_constraint = constraint;
28d81abb 1559
2a230e9d
BS
1560 /* Make sure constraint has neither `=', `+', nor '&'. */
1561
1562 for (j = 0; j < c_len; j++)
1563 switch (constraint[j])
28d81abb 1564 {
2a230e9d 1565 case '+': case '=': case '&':
1f06ee8d
RH
1566 if (constraint == orig_constraint)
1567 {
1568 error ("input operand constraint contains `%c'", constraint[j]);
1569 return;
1570 }
1571 break;
65fed0cb 1572
2a230e9d 1573 case '%':
1f06ee8d
RH
1574 if (constraint == orig_constraint
1575 && i + 1 == ninputs - ninout)
2a230e9d
BS
1576 {
1577 error ("`%%' constraint used with last operand");
1578 return;
1579 }
1580 break;
1581
1f06ee8d
RH
1582 case 'V': case 'm': case 'o':
1583 allows_mem = 1;
1584 break;
1585
1586 case '<': case '>':
2a230e9d 1587 case '?': case '!': case '*':
65fed0cb
RK
1588 case 'E': case 'F': case 'G': case 'H': case 'X':
1589 case 's': case 'i': case 'n':
1590 case 'I': case 'J': case 'K': case 'L': case 'M':
1591 case 'N': case 'O': case 'P': case ',':
1592#ifdef EXTRA_CONSTRAINT
1593 case 'Q': case 'R': case 'S': case 'T': case 'U':
1594#endif
1595 break;
1596
7b7a33b3
JW
1597 /* Whether or not a numeric constraint allows a register is
1598 decided by the matching constraint, and so there is no need
1599 to do anything special with them. We must handle them in
1600 the default case, so that we don't unnecessarily force
1601 operands to memory. */
1602 case '0': case '1': case '2': case '3': case '4':
cd76ea33 1603 case '5': case '6': case '7': case '8': case '9':
2a230e9d 1604 if (constraint[j] >= '0' + noutputs)
956d6950
JL
1605 {
1606 error
1607 ("matching constraint references invalid operand number");
1608 return;
1609 }
cd76ea33 1610
1f06ee8d 1611 /* Try and find the real constraint for this dup. */
1afbe1c4
RH
1612 if ((j == 0 && c_len == 1)
1613 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1f06ee8d
RH
1614 {
1615 tree o = outputs;
1616 for (j = constraint[j] - '0'; j > 0; --j)
1617 o = TREE_CHAIN (o);
1618
1619 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1620 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1621 j = 0;
1622 break;
1623 }
1624
cd76ea33
RK
1625 /* ... fall through ... */
1626
1f06ee8d 1627 case 'p': case 'r':
65fed0cb
RK
1628 default:
1629 allows_reg = 1;
1630 break;
1f06ee8d
RH
1631
1632 case 'g':
1633 allows_reg = 1;
1634 allows_mem = 1;
1635 break;
28d81abb
RK
1636 }
1637
1f06ee8d 1638 if (! allows_reg && allows_mem)
65fed0cb
RK
1639 mark_addressable (TREE_VALUE (tail));
1640
1f06ee8d 1641 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
65fed0cb 1642
1afbe1c4 1643 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1644 {
1f06ee8d
RH
1645 if (allows_reg)
1646 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1647 else if (!allows_mem)
1648 warning ("asm operand %d probably doesn't match constraints", i);
1649 else if (CONSTANT_P (op))
1650 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1651 op);
1652 else if (GET_CODE (op) == REG
1653 || GET_CODE (op) == SUBREG
1654 || GET_CODE (op) == CONCAT)
1655 {
1656 tree type = TREE_TYPE (TREE_VALUE (tail));
1657 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1658
1f06ee8d
RH
1659 emit_move_insn (memloc, op);
1660 op = memloc;
1661 }
1662 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1663 /* We won't recognize volatile memory as available a
1664 memory_operand at this point. Ignore it. */
1665 ;
1666 else if (queued_subexp_p (op))
1667 ;
1668 else
1669 /* ??? Leave this only until we have experience with what
1670 happens in combine and elsewhere when constraints are
1671 not satisfied. */
1672 warning ("asm operand %d probably doesn't match constraints", i);
65fed0cb 1673 }
1f06ee8d 1674 XVECEXP (body, 3, i) = op;
2a230e9d 1675
28d81abb 1676 XVECEXP (body, 4, i) /* constraints */
38a448ca 1677 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1f06ee8d 1678 orig_constraint);
28d81abb
RK
1679 i++;
1680 }
1681
1682 /* Protect all the operands from the queue,
1683 now that they have all been evaluated. */
1684
235c5021 1685 for (i = 0; i < ninputs - ninout; i++)
28d81abb
RK
1686 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1687
1688 for (i = 0; i < noutputs; i++)
1689 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1690
235c5021
RK
1691 /* For in-out operands, copy output rtx to input rtx. */
1692 for (i = 0; i < ninout; i++)
1693 {
1694 static char match[9+1][2]
1695 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1696 int j = inout_opnum[i];
1697
1698 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1699 = output_rtx[j];
1700 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
ad7342be 1701 = gen_rtx_ASM_INPUT (inout_mode[i], match[j]);
235c5021
RK
1702 }
1703
28d81abb
RK
1704 /* Now, for each output, construct an rtx
1705 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1706 ARGVEC CONSTRAINTS))
1707 If there is more than one, put them inside a PARALLEL. */
1708
1709 if (noutputs == 1 && nclobbers == 0)
1710 {
1711 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
38a448ca 1712 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb
RK
1713 }
1714 else if (noutputs == 0 && nclobbers == 0)
1715 {
1716 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1717 insn = emit_insn (body);
1718 }
1719 else
1720 {
1721 rtx obody = body;
1722 int num = noutputs;
1723 if (num == 0) num = 1;
38a448ca 1724 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1725
1726 /* For each output operand, store a SET. */
1727
1728 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1729 {
1730 XVECEXP (body, 0, i)
38a448ca
RH
1731 = gen_rtx_SET (VOIDmode,
1732 output_rtx[i],
c5c76735
JL
1733 gen_rtx_ASM_OPERANDS
1734 (VOIDmode,
1735 TREE_STRING_POINTER (string),
1736 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1737 i, argvec, constraints,
1738 filename, line));
1739
28d81abb
RK
1740 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1741 }
1742
1743 /* If there are no outputs (but there are some clobbers)
1744 store the bare ASM_OPERANDS into the PARALLEL. */
1745
1746 if (i == 0)
1747 XVECEXP (body, 0, i++) = obody;
1748
1749 /* Store (clobber REG) for each clobbered register specified. */
1750
b4ccaa16 1751 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1752 {
28d81abb 1753 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1754 int j = decode_reg_name (regname);
28d81abb 1755
b4ac57ab 1756 if (j < 0)
28d81abb 1757 {
c09e6498 1758 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1759 continue;
1760
c09e6498
RS
1761 if (j == -4) /* `memory', don't cache memory across asm */
1762 {
bffc6177 1763 XVECEXP (body, 0, i++)
38a448ca 1764 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1765 gen_rtx_MEM
1766 (BLKmode,
1767 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1768 continue;
1769 }
1770
956d6950 1771 /* Ignore unknown register, error already signaled. */
cc1f5387 1772 continue;
28d81abb
RK
1773 }
1774
1775 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1776 XVECEXP (body, 0, i++)
38a448ca 1777 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
28d81abb
RK
1778 }
1779
1780 insn = emit_insn (body);
1781 }
1782
947255ed
RH
1783 /* For any outputs that needed reloading into registers, spill them
1784 back to where they belong. */
1785 for (i = 0; i < noutputs; ++i)
1786 if (real_output_rtx[i])
1787 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1788
28d81abb
RK
1789 free_temp_slots ();
1790}
1791\f
1792/* Generate RTL to evaluate the expression EXP
1793 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1794
1795void
1796expand_expr_stmt (exp)
1797 tree exp;
1798{
1799 /* If -W, warn about statements with no side effects,
1800 except for an explicit cast to void (e.g. for assert()), and
1801 except inside a ({...}) where they may be useful. */
1802 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1803 {
1804 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1805 && !(TREE_CODE (exp) == CONVERT_EXPR
1806 && TREE_TYPE (exp) == void_type_node))
1807 warning_with_file_and_line (emit_filename, emit_lineno,
1808 "statement with no effect");
1809 else if (warn_unused)
1810 warn_if_unused_value (exp);
1811 }
b6ec8c5f
RK
1812
1813 /* If EXP is of function type and we are expanding statements for
1814 value, convert it to pointer-to-function. */
1815 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1816 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1817
28d81abb 1818 last_expr_type = TREE_TYPE (exp);
a2cf7deb
CB
1819 last_expr_value = expand_expr (exp,
1820 (expr_stmts_for_value
1821 ? NULL_RTX : const0_rtx),
1822 VOIDmode, 0);
28d81abb
RK
1823
1824 /* If all we do is reference a volatile value in memory,
1825 copy it to a register to be sure it is actually touched. */
1826 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1827 && TREE_THIS_VOLATILE (exp))
1828 {
6a5bbbe6
RS
1829 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1830 ;
1831 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1832 copy_to_reg (last_expr_value);
1833 else
ddbe9812
RS
1834 {
1835 rtx lab = gen_label_rtx ();
1836
1837 /* Compare the value with itself to reference it. */
c5d5d461
JL
1838 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1839 expand_expr (TYPE_SIZE (last_expr_type),
1840 NULL_RTX, VOIDmode, 0),
1841 BLKmode, 0,
1842 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1843 lab);
ddbe9812
RS
1844 emit_label (lab);
1845 }
28d81abb
RK
1846 }
1847
1848 /* If this expression is part of a ({...}) and is in memory, we may have
1849 to preserve temporaries. */
1850 preserve_temp_slots (last_expr_value);
1851
1852 /* Free any temporaries used to evaluate this expression. Any temporary
1853 used as a result of this expression will already have been preserved
1854 above. */
1855 free_temp_slots ();
1856
1857 emit_queue ();
1858}
1859
1860/* Warn if EXP contains any computations whose results are not used.
1861 Return 1 if a warning is printed; 0 otherwise. */
1862
150a992a 1863int
28d81abb
RK
1864warn_if_unused_value (exp)
1865 tree exp;
1866{
1867 if (TREE_USED (exp))
1868 return 0;
1869
1870 switch (TREE_CODE (exp))
1871 {
1872 case PREINCREMENT_EXPR:
1873 case POSTINCREMENT_EXPR:
1874 case PREDECREMENT_EXPR:
1875 case POSTDECREMENT_EXPR:
1876 case MODIFY_EXPR:
1877 case INIT_EXPR:
1878 case TARGET_EXPR:
1879 case CALL_EXPR:
1880 case METHOD_CALL_EXPR:
1881 case RTL_EXPR:
81797aba 1882 case TRY_CATCH_EXPR:
28d81abb
RK
1883 case WITH_CLEANUP_EXPR:
1884 case EXIT_EXPR:
1885 /* We don't warn about COND_EXPR because it may be a useful
1886 construct if either arm contains a side effect. */
1887 case COND_EXPR:
1888 return 0;
1889
1890 case BIND_EXPR:
1891 /* For a binding, warn if no side effect within it. */
1892 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1893
de73f171
RK
1894 case SAVE_EXPR:
1895 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1896
28d81abb
RK
1897 case TRUTH_ORIF_EXPR:
1898 case TRUTH_ANDIF_EXPR:
1899 /* In && or ||, warn if 2nd operand has no side effect. */
1900 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1901
1902 case COMPOUND_EXPR:
a646a211
JM
1903 if (TREE_NO_UNUSED_WARNING (exp))
1904 return 0;
28d81abb
RK
1905 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1906 return 1;
4d23e509
RS
1907 /* Let people do `(foo (), 0)' without a warning. */
1908 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1909 return 0;
28d81abb
RK
1910 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1911
1912 case NOP_EXPR:
1913 case CONVERT_EXPR:
b4ac57ab 1914 case NON_LVALUE_EXPR:
28d81abb
RK
1915 /* Don't warn about values cast to void. */
1916 if (TREE_TYPE (exp) == void_type_node)
1917 return 0;
1918 /* Don't warn about conversions not explicit in the user's program. */
1919 if (TREE_NO_UNUSED_WARNING (exp))
1920 return 0;
1921 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1922 Don't complain about that. There can be an arbitrary number of
1923 casts before the modify, so we must loop until we find the first
1924 non-cast expression and then test to see if that is a modify. */
1925 {
1926 tree tem = TREE_OPERAND (exp, 0);
1927
1928 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1929 tem = TREE_OPERAND (tem, 0);
1930
de73f171
RK
1931 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1932 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1933 return 0;
1934 }
d1e1adfb 1935 goto warn;
28d81abb 1936
d1e1adfb
JM
1937 case INDIRECT_REF:
1938 /* Don't warn about automatic dereferencing of references, since
1939 the user cannot control it. */
1940 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1941 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1942 /* ... fall through ... */
d1e1adfb 1943
28d81abb 1944 default:
ddbe9812
RS
1945 /* Referencing a volatile value is a side effect, so don't warn. */
1946 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1947 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1948 && TREE_THIS_VOLATILE (exp))
1949 return 0;
d1e1adfb 1950 warn:
28d81abb
RK
1951 warning_with_file_and_line (emit_filename, emit_lineno,
1952 "value computed is not used");
1953 return 1;
1954 }
1955}
1956
1957/* Clear out the memory of the last expression evaluated. */
1958
1959void
1960clear_last_expr ()
1961{
1962 last_expr_type = 0;
1963}
1964
1965/* Begin a statement which will return a value.
1966 Return the RTL_EXPR for this statement expr.
1967 The caller must save that value and pass it to expand_end_stmt_expr. */
1968
1969tree
1970expand_start_stmt_expr ()
1971{
ca695ac9
JB
1972 int momentary;
1973 tree t;
1974
28d81abb
RK
1975 /* Make the RTL_EXPR node temporary, not momentary,
1976 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1977 momentary = suspend_momentary ();
1978 t = make_node (RTL_EXPR);
28d81abb 1979 resume_momentary (momentary);
33c6ab80 1980 do_pending_stack_adjust ();
e922dbad 1981 start_sequence_for_rtl_expr (t);
28d81abb
RK
1982 NO_DEFER_POP;
1983 expr_stmts_for_value++;
1984 return t;
1985}
1986
1987/* Restore the previous state at the end of a statement that returns a value.
1988 Returns a tree node representing the statement's value and the
1989 insns to compute the value.
1990
1991 The nodes of that expression have been freed by now, so we cannot use them.
1992 But we don't want to do that anyway; the expression has already been
1993 evaluated and now we just want to use the value. So generate a RTL_EXPR
1994 with the proper type and RTL value.
1995
1996 If the last substatement was not an expression,
1997 return something with type `void'. */
1998
1999tree
2000expand_end_stmt_expr (t)
2001 tree t;
2002{
2003 OK_DEFER_POP;
2004
2005 if (last_expr_type == 0)
2006 {
2007 last_expr_type = void_type_node;
2008 last_expr_value = const0_rtx;
2009 }
2010 else if (last_expr_value == 0)
2011 /* There are some cases where this can happen, such as when the
2012 statement is void type. */
2013 last_expr_value = const0_rtx;
2014 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2015 /* Remove any possible QUEUED. */
2016 last_expr_value = protect_from_queue (last_expr_value, 0);
2017
2018 emit_queue ();
2019
2020 TREE_TYPE (t) = last_expr_type;
2021 RTL_EXPR_RTL (t) = last_expr_value;
2022 RTL_EXPR_SEQUENCE (t) = get_insns ();
2023
2024 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2025
2026 end_sequence ();
2027
2028 /* Don't consider deleting this expr or containing exprs at tree level. */
2029 TREE_SIDE_EFFECTS (t) = 1;
2030 /* Propagate volatility of the actual RTL expr. */
2031 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2032
2033 last_expr_type = 0;
2034 expr_stmts_for_value--;
2035
2036 return t;
2037}
2038\f
28d81abb
RK
2039/* Generate RTL for the start of an if-then. COND is the expression
2040 whose truth should be tested.
2041
2042 If EXITFLAG is nonzero, this conditional is visible to
2043 `exit_something'. */
2044
2045void
2046expand_start_cond (cond, exitflag)
2047 tree cond;
2048 int exitflag;
2049{
2050 struct nesting *thiscond = ALLOC_NESTING ();
2051
2052 /* Make an entry on cond_stack for the cond we are entering. */
2053
2054 thiscond->next = cond_stack;
2055 thiscond->all = nesting_stack;
2056 thiscond->depth = ++nesting_depth;
2057 thiscond->data.cond.next_label = gen_label_rtx ();
2058 /* Before we encounter an `else', we don't need a separate exit label
2059 unless there are supposed to be exit statements
2060 to exit this conditional. */
2061 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2062 thiscond->data.cond.endif_label = thiscond->exit_label;
2063 cond_stack = thiscond;
2064 nesting_stack = thiscond;
2065
b93a436e 2066 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2067}
2068
2069/* Generate RTL between then-clause and the elseif-clause
2070 of an if-then-elseif-.... */
2071
2072void
2073expand_start_elseif (cond)
2074 tree cond;
2075{
2076 if (cond_stack->data.cond.endif_label == 0)
2077 cond_stack->data.cond.endif_label = gen_label_rtx ();
2078 emit_jump (cond_stack->data.cond.endif_label);
2079 emit_label (cond_stack->data.cond.next_label);
2080 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2081 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2082}
2083
2084/* Generate RTL between the then-clause and the else-clause
2085 of an if-then-else. */
2086
2087void
2088expand_start_else ()
2089{
2090 if (cond_stack->data.cond.endif_label == 0)
2091 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2092
28d81abb
RK
2093 emit_jump (cond_stack->data.cond.endif_label);
2094 emit_label (cond_stack->data.cond.next_label);
0f41302f 2095 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2096}
2097
d947ba59
RK
2098/* After calling expand_start_else, turn this "else" into an "else if"
2099 by providing another condition. */
2100
2101void
2102expand_elseif (cond)
2103 tree cond;
2104{
2105 cond_stack->data.cond.next_label = gen_label_rtx ();
2106 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2107}
2108
28d81abb
RK
2109/* Generate RTL for the end of an if-then.
2110 Pop the record for it off of cond_stack. */
2111
2112void
2113expand_end_cond ()
2114{
2115 struct nesting *thiscond = cond_stack;
2116
b93a436e
JL
2117 do_pending_stack_adjust ();
2118 if (thiscond->data.cond.next_label)
2119 emit_label (thiscond->data.cond.next_label);
2120 if (thiscond->data.cond.endif_label)
2121 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2122
2123 POPSTACK (cond_stack);
2124 last_expr_type = 0;
2125}
ca695ac9
JB
2126
2127
28d81abb
RK
2128\f
2129/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2130 loop should be exited by `exit_something'. This is a loop for which
2131 `expand_continue' will jump to the top of the loop.
2132
2133 Make an entry on loop_stack to record the labels associated with
2134 this loop. */
2135
2136struct nesting *
2137expand_start_loop (exit_flag)
2138 int exit_flag;
2139{
2140 register struct nesting *thisloop = ALLOC_NESTING ();
2141
2142 /* Make an entry on loop_stack for the loop we are entering. */
2143
2144 thisloop->next = loop_stack;
2145 thisloop->all = nesting_stack;
2146 thisloop->depth = ++nesting_depth;
2147 thisloop->data.loop.start_label = gen_label_rtx ();
2148 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2149 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2150 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2151 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2152 loop_stack = thisloop;
2153 nesting_stack = thisloop;
2154
2155 do_pending_stack_adjust ();
2156 emit_queue ();
37366632 2157 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2158 emit_label (thisloop->data.loop.start_label);
2159
2160 return thisloop;
2161}
2162
2163/* Like expand_start_loop but for a loop where the continuation point
2164 (for expand_continue_loop) will be specified explicitly. */
2165
2166struct nesting *
2167expand_start_loop_continue_elsewhere (exit_flag)
2168 int exit_flag;
2169{
2170 struct nesting *thisloop = expand_start_loop (exit_flag);
2171 loop_stack->data.loop.continue_label = gen_label_rtx ();
2172 return thisloop;
2173}
2174
2175/* Specify the continuation point for a loop started with
2176 expand_start_loop_continue_elsewhere.
2177 Use this at the point in the code to which a continue statement
2178 should jump. */
2179
2180void
2181expand_loop_continue_here ()
2182{
2183 do_pending_stack_adjust ();
37366632 2184 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2185 emit_label (loop_stack->data.loop.continue_label);
2186}
2187
2188/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2189 Pop the block off of loop_stack. */
2190
2191void
2192expand_end_loop ()
2193{
0720f6fb
MM
2194 rtx start_label = loop_stack->data.loop.start_label;
2195 rtx insn = get_last_insn ();
a7d308f7 2196 int needs_end_jump = 1;
28d81abb
RK
2197
2198 /* Mark the continue-point at the top of the loop if none elsewhere. */
2199 if (start_label == loop_stack->data.loop.continue_label)
2200 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2201
2202 do_pending_stack_adjust ();
2203
a7d308f7
R
2204 /* If optimizing, perhaps reorder the loop.
2205 First, try to use a condjump near the end.
2206 expand_exit_loop_if_false ends loops with unconditional jumps,
2207 like this:
2208
2209 if (test) goto label;
2210 optional: cleanup
2211 goto loop_stack->data.loop.end_label
2212 barrier
2213 label:
2214
2215 If we find such a pattern, we can end the loop earlier. */
2216
2217 if (optimize
2218 && GET_CODE (insn) == CODE_LABEL
2219 && LABEL_NAME (insn) == NULL
2220 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2221 {
2222 rtx label = insn;
2223 rtx jump = PREV_INSN (PREV_INSN (label));
2224
2225 if (GET_CODE (jump) == JUMP_INSN
2226 && GET_CODE (PATTERN (jump)) == SET
2227 && SET_DEST (PATTERN (jump)) == pc_rtx
2228 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2229 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2230 == loop_stack->data.loop.end_label))
2231 {
2232 rtx prev;
2233
2234 /* The test might be complex and reference LABEL multiple times,
2235 like the loop in loop_iterations to set vtop. To handle this,
2236 we move LABEL. */
2237 insn = PREV_INSN (label);
2238 reorder_insns (label, label, start_label);
2239
2240 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2241 {
2242 /* We ignore line number notes, but if we see any other note,
2243 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2244 NOTE_INSN_LOOP_*, we disable this optimization. */
2245 if (GET_CODE (prev) == NOTE)
2246 {
2247 if (NOTE_LINE_NUMBER (prev) < 0)
2248 break;
2249 continue;
2250 }
2251 if (GET_CODE (prev) == CODE_LABEL)
2252 break;
2253 if (GET_CODE (prev) == JUMP_INSN)
2254 {
2255 if (GET_CODE (PATTERN (prev)) == SET
2256 && SET_DEST (PATTERN (prev)) == pc_rtx
2257 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2258 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2259 == LABEL_REF)
2260 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2261 {
2262 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2263 = start_label;
2264 emit_note_after (NOTE_INSN_LOOP_END, prev);
2265 needs_end_jump = 0;
2266 }
2267 break;
2268 }
2269 }
2270 }
2271 }
2272
2273 /* If the loop starts with a loop exit, roll that to the end where
2274 it will optimize together with the jump back.
93de5c31
MM
2275
2276 We look for the conditional branch to the exit, except that once
2277 we find such a branch, we don't look past 30 instructions.
2278
2279 In more detail, if the loop presently looks like this (in pseudo-C):
2280
2281 start_label:
2282 if (test) goto end_label;
2283 body;
2284 goto start_label;
0720f6fb 2285 end_label:
93de5c31
MM
2286
2287 transform it to look like:
2288
2289 goto start_label;
2290 newstart_label:
2291 body;
2292 start_label:
2293 if (test) goto end_label;
2294 goto newstart_label;
0720f6fb 2295 end_label:
93de5c31
MM
2296
2297 Here, the `test' may actually consist of some reasonably complex
2298 code, terminating in a test. */
0720f6fb 2299
28d81abb 2300 if (optimize
a7d308f7 2301 && needs_end_jump
28d81abb
RK
2302 &&
2303 ! (GET_CODE (insn) == JUMP_INSN
2304 && GET_CODE (PATTERN (insn)) == SET
2305 && SET_DEST (PATTERN (insn)) == pc_rtx
2306 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2307 {
93de5c31 2308 int eh_regions = 0;
0720f6fb
MM
2309 int num_insns = 0;
2310 rtx last_test_insn = NULL_RTX;
93de5c31 2311
28d81abb
RK
2312 /* Scan insns from the top of the loop looking for a qualified
2313 conditional exit. */
2314 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2315 insn = NEXT_INSN (insn))
2316 {
93de5c31
MM
2317 if (GET_CODE (insn) == NOTE)
2318 {
2319 if (optimize < 2
2320 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2321 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2322 /* The code that actually moves the exit test will
2323 carefully leave BLOCK notes in their original
2324 location. That means, however, that we can't debug
2325 the exit test itself. So, we refuse to move code
2326 containing BLOCK notes at low optimization levels. */
2327 break;
2328
2329 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2330 ++eh_regions;
2331 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2332 {
2333 --eh_regions;
2334 if (eh_regions < 0)
2335 /* We've come to the end of an EH region, but
2336 never saw the beginning of that region. That
2337 means that an EH region begins before the top
2338 of the loop, and ends in the middle of it. The
2339 existence of such a situation violates a basic
2340 assumption in this code, since that would imply
2341 that even when EH_REGIONS is zero, we might
2342 move code out of an exception region. */
2343 abort ();
2344 }
28d81abb 2345
f114df20
JL
2346 /* We must not walk into a nested loop. */
2347 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2348 break;
2349
93de5c31
MM
2350 /* We already know this INSN is a NOTE, so there's no
2351 point in looking at it to see if it's a JUMP. */
2352 continue;
2353 }
28d81abb
RK
2354
2355 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2356 num_insns++;
2357
2358 if (last_test_insn && num_insns > 30)
2359 break;
2360
93de5c31
MM
2361 if (eh_regions > 0)
2362 /* We don't want to move a partial EH region. Consider:
2363
2364 while ( ( { try {
2365 if (cond ()) 0;
2366 else {
2367 bar();
2368 1;
2369 }
2370 } catch (...) {
2371 1;
2372 } )) {
2373 body;
2374 }
2375
2376 This isn't legal C++, but here's what it's supposed to
2377 mean: if cond() is true, stop looping. Otherwise,
2378 call bar, and keep looping. In addition, if cond
2379 throws an exception, catch it and keep looping. Such
2380 constructs are certainy legal in LISP.
2381
2382 We should not move the `if (cond()) 0' test since then
2383 the EH-region for the try-block would be broken up.
2384 (In this case we would the EH_BEG note for the `try'
2385 and `if cond()' but not the call to bar() or the
2386 EH_END note.)
2387
2388 So we don't look for tests within an EH region. */
2389 continue;
2390
0720f6fb 2391 if (GET_CODE (insn) == JUMP_INSN
28d81abb 2392 && GET_CODE (PATTERN (insn)) == SET
0720f6fb
MM
2393 && SET_DEST (PATTERN (insn)) == pc_rtx)
2394 {
2395 /* This is indeed a jump. */
2396 rtx dest1 = NULL_RTX;
2397 rtx dest2 = NULL_RTX;
2398 rtx potential_last_test;
2399 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2400 {
2401 /* A conditional jump. */
2402 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2403 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2404 potential_last_test = insn;
2405 }
2406 else
2407 {
2408 /* An unconditional jump. */
2409 dest1 = SET_SRC (PATTERN (insn));
2410 /* Include the BARRIER after the JUMP. */
2411 potential_last_test = NEXT_INSN (insn);
2412 }
2413
2414 do {
2415 if (dest1 && GET_CODE (dest1) == LABEL_REF
2416 && ((XEXP (dest1, 0)
2417 == loop_stack->data.loop.alt_end_label)
2418 || (XEXP (dest1, 0)
2419 == loop_stack->data.loop.end_label)))
2420 {
2421 last_test_insn = potential_last_test;
2422 break;
2423 }
2424
2425 /* If this was a conditional jump, there may be
2426 another label at which we should look. */
2427 dest1 = dest2;
2428 dest2 = NULL_RTX;
2429 } while (dest1);
2430 }
28d81abb
RK
2431 }
2432
2433 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2434 {
2435 /* We found one. Move everything from there up
2436 to the end of the loop, and add a jump into the loop
2437 to jump to there. */
2438 register rtx newstart_label = gen_label_rtx ();
2439 register rtx start_move = start_label;
93de5c31 2440 rtx next_insn;
28d81abb 2441
b4ac57ab 2442 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2443 then we want to move this note also. */
2444 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2445 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2446 == NOTE_INSN_LOOP_CONT))
2447 start_move = PREV_INSN (start_move);
2448
2449 emit_label_after (newstart_label, PREV_INSN (start_move));
93de5c31
MM
2450
2451 /* Actually move the insns. Start at the beginning, and
2452 keep copying insns until we've copied the
2453 last_test_insn. */
2454 for (insn = start_move; insn; insn = next_insn)
2455 {
2456 /* Figure out which insn comes after this one. We have
2457 to do this before we move INSN. */
2458 if (insn == last_test_insn)
2459 /* We've moved all the insns. */
2460 next_insn = NULL_RTX;
2461 else
2462 next_insn = NEXT_INSN (insn);
2463
2464 if (GET_CODE (insn) == NOTE
2465 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2466 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2467 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2468 NOTE_INSN_BLOCK_ENDs because the correct generation
2469 of debugging information depends on these appearing
2470 in the same order in the RTL and in the tree
2471 structure, where they are represented as BLOCKs.
2472 So, we don't move block notes. Of course, moving
2473 the code inside the block is likely to make it
2474 impossible to debug the instructions in the exit
2475 test, but such is the price of optimization. */
2476 continue;
2477
2478 /* Move the INSN. */
2479 reorder_insns (insn, insn, get_last_insn ());
2480 }
2481
28d81abb
RK
2482 emit_jump_insn_after (gen_jump (start_label),
2483 PREV_INSN (newstart_label));
2484 emit_barrier_after (PREV_INSN (newstart_label));
2485 start_label = newstart_label;
2486 }
2487 }
2488
a7d308f7
R
2489 if (needs_end_jump)
2490 {
2491 emit_jump (start_label);
2492 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2493 }
28d81abb
RK
2494 emit_label (loop_stack->data.loop.end_label);
2495
2496 POPSTACK (loop_stack);
2497
2498 last_expr_type = 0;
2499}
2500
2501/* Generate a jump to the current loop's continue-point.
2502 This is usually the top of the loop, but may be specified
2503 explicitly elsewhere. If not currently inside a loop,
2504 return 0 and do nothing; caller will print an error message. */
2505
2506int
2507expand_continue_loop (whichloop)
2508 struct nesting *whichloop;
2509{
2510 last_expr_type = 0;
2511 if (whichloop == 0)
2512 whichloop = loop_stack;
2513 if (whichloop == 0)
2514 return 0;
37366632
RK
2515 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2516 NULL_RTX);
28d81abb
RK
2517 return 1;
2518}
2519
2520/* Generate a jump to exit the current loop. If not currently inside a loop,
2521 return 0 and do nothing; caller will print an error message. */
2522
2523int
2524expand_exit_loop (whichloop)
2525 struct nesting *whichloop;
2526{
2527 last_expr_type = 0;
2528 if (whichloop == 0)
2529 whichloop = loop_stack;
2530 if (whichloop == 0)
2531 return 0;
37366632 2532 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2533 return 1;
2534}
2535
2536/* Generate a conditional jump to exit the current loop if COND
2537 evaluates to zero. If not currently inside a loop,
2538 return 0 and do nothing; caller will print an error message. */
2539
2540int
2541expand_exit_loop_if_false (whichloop, cond)
2542 struct nesting *whichloop;
2543 tree cond;
2544{
b93a436e
JL
2545 rtx label = gen_label_rtx ();
2546 rtx last_insn;
28d81abb 2547 last_expr_type = 0;
b93a436e 2548
28d81abb
RK
2549 if (whichloop == 0)
2550 whichloop = loop_stack;
2551 if (whichloop == 0)
2552 return 0;
b93a436e
JL
2553 /* In order to handle fixups, we actually create a conditional jump
2554 around a unconditional branch to exit the loop. If fixups are
2555 necessary, they go before the unconditional branch. */
d902c7ea 2556
b93a436e
JL
2557
2558 do_jump (cond, NULL_RTX, label);
2559 last_insn = get_last_insn ();
2560 if (GET_CODE (last_insn) == CODE_LABEL)
2561 whichloop->data.loop.alt_end_label = last_insn;
2562 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2563 NULL_RTX);
2564 emit_label (label);
ca695ac9 2565
28d81abb
RK
2566 return 1;
2567}
2568
4a050cc2
JL
2569/* Return nonzero if the loop nest is empty. Else return zero. */
2570
2571int
2572stmt_loop_nest_empty ()
2573{
2574 return (loop_stack == NULL);
2575}
2576
28d81abb
RK
2577/* Return non-zero if we should preserve sub-expressions as separate
2578 pseudos. We never do so if we aren't optimizing. We always do so
2579 if -fexpensive-optimizations.
2580
2581 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2582 the loop may still be a small one. */
2583
2584int
2585preserve_subexpressions_p ()
2586{
2587 rtx insn;
2588
2589 if (flag_expensive_optimizations)
2590 return 1;
2591
3f1d071b 2592 if (optimize == 0 || current_function == 0 || loop_stack == 0)
28d81abb
RK
2593 return 0;
2594
2595 insn = get_last_insn_anywhere ();
2596
2597 return (insn
2598 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2599 < n_non_fixed_regs * 3));
2600
2601}
2602
2603/* Generate a jump to exit the current loop, conditional, binding contour
2604 or case statement. Not all such constructs are visible to this function,
2605 only those started with EXIT_FLAG nonzero. Individual languages use
2606 the EXIT_FLAG parameter to control which kinds of constructs you can
2607 exit this way.
2608
2609 If not currently inside anything that can be exited,
2610 return 0 and do nothing; caller will print an error message. */
2611
2612int
2613expand_exit_something ()
2614{
2615 struct nesting *n;
2616 last_expr_type = 0;
2617 for (n = nesting_stack; n; n = n->all)
2618 if (n->exit_label != 0)
2619 {
37366632 2620 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2621 return 1;
2622 }
2623
2624 return 0;
2625}
2626\f
2627/* Generate RTL to return from the current function, with no value.
2628 (That is, we do not do anything about returning any value.) */
2629
2630void
2631expand_null_return ()
2632{
2633 struct nesting *block = block_stack;
2634 rtx last_insn = 0;
2635
2636 /* Does any pending block have cleanups? */
2637
2638 while (block && block->data.block.cleanups == 0)
2639 block = block->next;
2640
2641 /* If yes, use a goto to return, since that runs cleanups. */
2642
2643 expand_null_return_1 (last_insn, block != 0);
2644}
2645
2646/* Generate RTL to return from the current function, with value VAL. */
2647
8d800403 2648static void
28d81abb
RK
2649expand_value_return (val)
2650 rtx val;
2651{
2652 struct nesting *block = block_stack;
2653 rtx last_insn = get_last_insn ();
2654 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2655
2656 /* Copy the value to the return location
2657 unless it's already there. */
2658
2659 if (return_reg != val)
77636079
RS
2660 {
2661#ifdef PROMOTE_FUNCTION_RETURN
77636079
RS
2662 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2663 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
2664 enum machine_mode mode
2665 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2666 &unsignedp, 1);
77636079
RS
2667
2668 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2669 convert_move (return_reg, val, unsignedp);
77636079
RS
2670 else
2671#endif
2672 emit_move_insn (return_reg, val);
2673 }
28d81abb
RK
2674 if (GET_CODE (return_reg) == REG
2675 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
38a448ca 2676 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
e5eeae65
JW
2677 /* Handle calls that return values in multiple non-contiguous locations.
2678 The Irix 6 ABI has examples of this. */
2679 else if (GET_CODE (return_reg) == PARALLEL)
2680 {
2681 int i;
2682
2683 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2684 {
2685 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2686
2687 if (GET_CODE (x) == REG
2688 && REGNO (x) < FIRST_PSEUDO_REGISTER)
38a448ca 2689 emit_insn (gen_rtx_USE (VOIDmode, x));
e5eeae65
JW
2690 }
2691 }
28d81abb
RK
2692
2693 /* Does any pending block have cleanups? */
2694
2695 while (block && block->data.block.cleanups == 0)
2696 block = block->next;
2697
2698 /* If yes, use a goto to return, since that runs cleanups.
2699 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2700
2701 expand_null_return_1 (last_insn, block != 0);
2702}
2703
2704/* Output a return with no value. If LAST_INSN is nonzero,
2705 pretend that the return takes place after LAST_INSN.
2706 If USE_GOTO is nonzero then don't use a return instruction;
2707 go to the return label instead. This causes any cleanups
2708 of pending blocks to be executed normally. */
2709
2710static void
2711expand_null_return_1 (last_insn, use_goto)
2712 rtx last_insn;
2713 int use_goto;
2714{
2715 rtx end_label = cleanup_label ? cleanup_label : return_label;
2716
2717 clear_pending_stack_adjust ();
2718 do_pending_stack_adjust ();
2719 last_expr_type = 0;
2720
2721 /* PCC-struct return always uses an epilogue. */
2722 if (current_function_returns_pcc_struct || use_goto)
2723 {
2724 if (end_label == 0)
2725 end_label = return_label = gen_label_rtx ();
37366632 2726 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2727 return;
2728 }
2729
2730 /* Otherwise output a simple return-insn if one is available,
2731 unless it won't do the job. */
2732#ifdef HAVE_return
2733 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2734 {
2735 emit_jump_insn (gen_return ());
2736 emit_barrier ();
2737 return;
2738 }
2739#endif
2740
2741 /* Otherwise jump to the epilogue. */
37366632 2742 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2743}
2744\f
2745/* Generate RTL to evaluate the expression RETVAL and return it
2746 from the current function. */
2747
2748void
2749expand_return (retval)
2750 tree retval;
2751{
2752 /* If there are any cleanups to be performed, then they will
2753 be inserted following LAST_INSN. It is desirable
2754 that the last_insn, for such purposes, should be the
2755 last insn before computing the return value. Otherwise, cleanups
2756 which call functions can clobber the return value. */
2757 /* ??? rms: I think that is erroneous, because in C++ it would
2758 run destructors on variables that might be used in the subsequent
2759 computation of the return value. */
2760 rtx last_insn = 0;
2761 register rtx val = 0;
2762 register rtx op0;
2763 tree retval_rhs;
2764 int cleanups;
28d81abb
RK
2765
2766 /* If function wants no value, give it none. */
2767 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2768 {
37366632 2769 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2770 emit_queue ();
28d81abb
RK
2771 expand_null_return ();
2772 return;
2773 }
2774
2775 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2776 /* This is not sufficient. We also need to watch for cleanups of the
2777 expression we are about to expand. Unfortunately, we cannot know
2778 if it has cleanups until we expand it, and we want to change how we
2779 expand it depending upon if we need cleanups. We can't win. */
2780#if 0
28d81abb 2781 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2782#else
2783 cleanups = 1;
2784#endif
28d81abb
RK
2785
2786 if (TREE_CODE (retval) == RESULT_DECL)
2787 retval_rhs = retval;
2788 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2789 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2790 retval_rhs = TREE_OPERAND (retval, 1);
2791 else if (TREE_TYPE (retval) == void_type_node)
2792 /* Recognize tail-recursive call to void function. */
2793 retval_rhs = retval;
2794 else
2795 retval_rhs = NULL_TREE;
2796
2797 /* Only use `last_insn' if there are cleanups which must be run. */
2798 if (cleanups || cleanup_label != 0)
2799 last_insn = get_last_insn ();
2800
2801 /* Distribute return down conditional expr if either of the sides
2802 may involve tail recursion (see test below). This enhances the number
2803 of tail recursions we see. Don't do this always since it can produce
2804 sub-optimal code in some cases and we distribute assignments into
2805 conditional expressions when it would help. */
2806
2807 if (optimize && retval_rhs != 0
2808 && frame_offset == 0
2809 && TREE_CODE (retval_rhs) == COND_EXPR
2810 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2811 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2812 {
2813 rtx label = gen_label_rtx ();
a0a34f94
RK
2814 tree expr;
2815
37366632 2816 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 2817 start_cleanup_deferral ();
dd98f85c 2818 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2819 DECL_RESULT (current_function_decl),
2820 TREE_OPERAND (retval_rhs, 1));
2821 TREE_SIDE_EFFECTS (expr) = 1;
2822 expand_return (expr);
28d81abb 2823 emit_label (label);
a0a34f94 2824
dd98f85c 2825 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2826 DECL_RESULT (current_function_decl),
2827 TREE_OPERAND (retval_rhs, 2));
2828 TREE_SIDE_EFFECTS (expr) = 1;
2829 expand_return (expr);
1483bddb 2830 end_cleanup_deferral ();
28d81abb
RK
2831 return;
2832 }
2833
642cac7b 2834 /* Attempt to optimize the call if it is tail recursive. */
564ea051
JW
2835 if (optimize_tail_recursion (retval_rhs, last_insn))
2836 return;
642cac7b 2837
28d81abb
RK
2838#ifdef HAVE_return
2839 /* This optimization is safe if there are local cleanups
2840 because expand_null_return takes care of them.
2841 ??? I think it should also be safe when there is a cleanup label,
2842 because expand_null_return takes care of them, too.
2843 Any reason why not? */
2844 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2845 && ! current_function_returns_pcc_struct
2846 && BRANCH_COST <= 1)
28d81abb
RK
2847 {
2848 /* If this is return x == y; then generate
2849 if (x == y) return 1; else return 0;
3f8b69de
TG
2850 if we can do it with explicit return insns and branches are cheap,
2851 but not if we have the corresponding scc insn. */
2852 int has_scc = 0;
28d81abb
RK
2853 if (retval_rhs)
2854 switch (TREE_CODE (retval_rhs))
2855 {
2856 case EQ_EXPR:
3f8b69de
TG
2857#ifdef HAVE_seq
2858 has_scc = HAVE_seq;
2859#endif
28d81abb 2860 case NE_EXPR:
3f8b69de
TG
2861#ifdef HAVE_sne
2862 has_scc = HAVE_sne;
2863#endif
28d81abb 2864 case GT_EXPR:
3f8b69de
TG
2865#ifdef HAVE_sgt
2866 has_scc = HAVE_sgt;
2867#endif
28d81abb 2868 case GE_EXPR:
3f8b69de
TG
2869#ifdef HAVE_sge
2870 has_scc = HAVE_sge;
2871#endif
28d81abb 2872 case LT_EXPR:
3f8b69de
TG
2873#ifdef HAVE_slt
2874 has_scc = HAVE_slt;
2875#endif
28d81abb 2876 case LE_EXPR:
3f8b69de
TG
2877#ifdef HAVE_sle
2878 has_scc = HAVE_sle;
2879#endif
28d81abb
RK
2880 case TRUTH_ANDIF_EXPR:
2881 case TRUTH_ORIF_EXPR:
2882 case TRUTH_AND_EXPR:
2883 case TRUTH_OR_EXPR:
2884 case TRUTH_NOT_EXPR:
94ed3915 2885 case TRUTH_XOR_EXPR:
3f8b69de
TG
2886 if (! has_scc)
2887 {
2888 op0 = gen_label_rtx ();
2889 jumpifnot (retval_rhs, op0);
2890 expand_value_return (const1_rtx);
2891 emit_label (op0);
2892 expand_value_return (const0_rtx);
2893 return;
2894 }
e9a25f70
JL
2895 break;
2896
2897 default:
2898 break;
28d81abb
RK
2899 }
2900 }
2901#endif /* HAVE_return */
2902
4c485b63
JL
2903 /* If the result is an aggregate that is being returned in one (or more)
2904 registers, load the registers here. The compiler currently can't handle
2905 copying a BLKmode value into registers. We could put this code in a
2906 more general area (for use by everyone instead of just function
2907 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2908 (and in expand_call). The value must go into a pseudo in case there
2909 are cleanups that will clobber the real return register. */
4c485b63
JL
2910
2911 if (retval_rhs != 0
2912 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2913 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2914 {
a7f875d7 2915 int i, bitpos, xbitpos;
4c485b63
JL
2916 int big_endian_correction = 0;
2917 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2918 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
c84e2712
KG
2919 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2920 (unsigned int)BITS_PER_WORD);
4c485b63 2921 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 2922 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 2923 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2924 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2925
a7f875d7
RK
2926 /* Structures whose size is not a multiple of a word are aligned
2927 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2928 machine, this means we must skip the empty high order bytes when
2929 calculating the bit offset. */
2930 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2931 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2932 * BITS_PER_UNIT));
2933
2934 /* Copy the structure BITSIZE bits at a time. */
2935 for (bitpos = 0, xbitpos = big_endian_correction;
2936 bitpos < bytes * BITS_PER_UNIT;
2937 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2938 {
a7f875d7 2939 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2940 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2941 (the first time through). */
2942 if (xbitpos % BITS_PER_WORD == 0
2943 || xbitpos == big_endian_correction)
4c485b63 2944 {
a7f875d7
RK
2945 /* Generate an appropriate register. */
2946 dst = gen_reg_rtx (word_mode);
2947 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2948
2949 /* Clobber the destination before we move anything into it. */
38a448ca 2950 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
4c485b63 2951 }
a7f875d7
RK
2952
2953 /* We need a new source operand each time bitpos is on a word
2954 boundary. */
2955 if (bitpos % BITS_PER_WORD == 0)
2956 src = operand_subword_force (result_val,
2957 bitpos / BITS_PER_WORD,
2958 BLKmode);
2959
2960 /* Use bitpos for the source extraction (left justified) and
2961 xbitpos for the destination store (right justified). */
2962 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2963 extract_bit_field (src, bitsize,
2964 bitpos % BITS_PER_WORD, 1,
2965 NULL_RTX, word_mode,
2966 word_mode,
2967 bitsize / BITS_PER_UNIT,
2968 BITS_PER_WORD),
2969 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
2970 }
2971
4c485b63
JL
2972 /* Find the smallest integer mode large enough to hold the
2973 entire structure and use that mode instead of BLKmode
2974 on the USE insn for the return register. */
2975 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2976 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 2977 tmpmode != VOIDmode;
4c485b63 2978 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
2979 {
2980 /* Have we found a large enough mode? */
2981 if (GET_MODE_SIZE (tmpmode) >= bytes)
2982 break;
2983 }
4c485b63
JL
2984
2985 /* No suitable mode found. */
0c61f541 2986 if (tmpmode == VOIDmode)
3ffeb8f1 2987 abort ();
4c485b63 2988
3ffeb8f1
JW
2989 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2990
af55da56
JW
2991 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2992 result_reg_mode = word_mode;
2993 else
2994 result_reg_mode = tmpmode;
2995 result_reg = gen_reg_rtx (result_reg_mode);
2996
3ffeb8f1 2997 emit_queue ();
3ffeb8f1 2998 for (i = 0; i < n_regs; i++)
af55da56 2999 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3000 result_pseudos[i]);
4c485b63 3001
af55da56
JW
3002 if (tmpmode != result_reg_mode)
3003 result_reg = gen_lowpart (tmpmode, result_reg);
3004
4c485b63
JL
3005 expand_value_return (result_reg);
3006 }
3007 else if (cleanups
28d81abb
RK
3008 && retval_rhs != 0
3009 && TREE_TYPE (retval_rhs) != void_type_node
3010 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
3011 {
3012 /* Calculate the return value into a pseudo reg. */
dd98f85c
JM
3013 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
3014 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3015 val = force_not_mem (val);
28d81abb 3016 emit_queue ();
28d81abb
RK
3017 /* Return the calculated value, doing cleanups first. */
3018 expand_value_return (val);
3019 }
3020 else
3021 {
3022 /* No cleanups or no hard reg used;
3023 calculate value into hard return reg. */
cba389cd 3024 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3025 emit_queue ();
28d81abb
RK
3026 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3027 }
3028}
3029
3030/* Return 1 if the end of the generated RTX is not a barrier.
3031 This means code already compiled can drop through. */
3032
3033int
3034drop_through_at_end_p ()
3035{
3036 rtx insn = get_last_insn ();
3037 while (insn && GET_CODE (insn) == NOTE)
3038 insn = PREV_INSN (insn);
3039 return insn && GET_CODE (insn) != BARRIER;
3040}
3041\f
642cac7b
JL
3042/* Test CALL_EXPR to determine if it is a potential tail recursion call
3043 and emit code to optimize the tail recursion. LAST_INSN indicates where
564ea051
JW
3044 to place the jump to the tail recursion label. Return TRUE if the
3045 call was optimized into a goto.
642cac7b
JL
3046
3047 This is only used by expand_return, but expand_call is expected to
3048 use it soon. */
3049
564ea051 3050int
642cac7b
JL
3051optimize_tail_recursion (call_expr, last_insn)
3052 tree call_expr;
3053 rtx last_insn;
3054{
3055 /* For tail-recursive call to current function,
3056 just jump back to the beginning.
3057 It's unsafe if any auto variable in this function
3058 has its address taken; for simplicity,
3059 require stack frame to be empty. */
3060 if (optimize && call_expr != 0
3061 && frame_offset == 0
3062 && TREE_CODE (call_expr) == CALL_EXPR
3063 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
3064 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
3065 /* Finish checking validity, and if valid emit code
3066 to set the argument variables for the new call. */
3067 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
3068 DECL_ARGUMENTS (current_function_decl)))
3069 {
3070 if (tail_recursion_label == 0)
3071 {
3072 tail_recursion_label = gen_label_rtx ();
3073 emit_label_after (tail_recursion_label,
3074 tail_recursion_reentry);
3075 }
3076 emit_queue ();
3077 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3078 emit_barrier ();
564ea051 3079 return 1;
642cac7b 3080 }
564ea051
JW
3081
3082 return 0;
642cac7b
JL
3083}
3084
28d81abb
RK
3085/* Emit code to alter this function's formal parms for a tail-recursive call.
3086 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3087 FORMALS is the chain of decls of formals.
3088 Return 1 if this can be done;
3089 otherwise return 0 and do not emit any code. */
3090
3091static int
3092tail_recursion_args (actuals, formals)
3093 tree actuals, formals;
3094{
3095 register tree a = actuals, f = formals;
3096 register int i;
3097 register rtx *argvec;
3098
3099 /* Check that number and types of actuals are compatible
3100 with the formals. This is not always true in valid C code.
3101 Also check that no formal needs to be addressable
3102 and that all formals are scalars. */
3103
3104 /* Also count the args. */
3105
3106 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3107 {
5c7fe359
RK
3108 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3109 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3110 return 0;
3111 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3112 return 0;
3113 }
3114 if (a != 0 || f != 0)
3115 return 0;
3116
3117 /* Compute all the actuals. */
3118
3119 argvec = (rtx *) alloca (i * sizeof (rtx));
3120
3121 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3122 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3123
3124 /* Find which actual values refer to current values of previous formals.
3125 Copy each of them now, before any formal is changed. */
3126
3127 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3128 {
3129 int copy = 0;
3130 register int j;
3131 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3132 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3133 { copy = 1; break; }
3134 if (copy)
3135 argvec[i] = copy_to_reg (argvec[i]);
3136 }
3137
3138 /* Store the values of the actuals into the formals. */
3139
3140 for (f = formals, a = actuals, i = 0; f;
3141 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3142 {
98f3b471 3143 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3144 emit_move_insn (DECL_RTL (f), argvec[i]);
3145 else
3146 convert_move (DECL_RTL (f), argvec[i],
3147 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3148 }
3149
3150 free_temp_slots ();
3151 return 1;
3152}
3153\f
3154/* Generate the RTL code for entering a binding contour.
3155 The variables are declared one by one, by calls to `expand_decl'.
3156
3157 EXIT_FLAG is nonzero if this construct should be visible to
3158 `exit_something'. */
3159
3160void
3161expand_start_bindings (exit_flag)
3162 int exit_flag;
3163{
3164 struct nesting *thisblock = ALLOC_NESTING ();
b93a436e 3165 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
3166
3167 /* Make an entry on block_stack for the block we are entering. */
3168
3169 thisblock->next = block_stack;
3170 thisblock->all = nesting_stack;
3171 thisblock->depth = ++nesting_depth;
3172 thisblock->data.block.stack_level = 0;
3173 thisblock->data.block.cleanups = 0;
3f1d071b 3174 thisblock->data.block.n_function_calls = 0;
e976b8b2 3175 thisblock->data.block.exception_region = 0;
3f1d071b 3176 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3177
3178 thisblock->data.block.conditional_code = 0;
3179 thisblock->data.block.last_unconditional_cleanup = note;
3180 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3181
28d81abb
RK
3182 if (block_stack
3183 && !(block_stack->data.block.cleanups == NULL_TREE
3184 && block_stack->data.block.outer_cleanups == NULL_TREE))
3185 thisblock->data.block.outer_cleanups
3186 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3187 block_stack->data.block.outer_cleanups);
3188 else
3189 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3190 thisblock->data.block.label_chain = 0;
3191 thisblock->data.block.innermost_stack_block = stack_block_stack;
3192 thisblock->data.block.first_insn = note;
3f1d071b 3193 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3194 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3195 block_stack = thisblock;
3196 nesting_stack = thisblock;
3197
b93a436e
JL
3198 /* Make a new level for allocating stack slots. */
3199 push_temp_slots ();
28d81abb
RK
3200}
3201
e976b8b2
MS
3202/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3203 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3204 expand_expr are made. After we end the region, we know that all
3205 space for all temporaries that were created by TARGET_EXPRs will be
3206 destroyed and their space freed for reuse. */
3207
3208void
3209expand_start_target_temps ()
3210{
3211 /* This is so that even if the result is preserved, the space
3212 allocated will be freed, as we know that it is no longer in use. */
3213 push_temp_slots ();
3214
3215 /* Start a new binding layer that will keep track of all cleanup
3216 actions to be performed. */
3217 expand_start_bindings (0);
3218
3219 target_temp_slot_level = temp_slot_level;
3220}
3221
3222void
3223expand_end_target_temps ()
3224{
3225 expand_end_bindings (NULL_TREE, 0, 0);
3226
3227 /* This is so that even if the result is preserved, the space
3228 allocated will be freed, as we know that it is no longer in use. */
3229 pop_temp_slots ();
3230}
3231
3232/* Mark top block of block_stack as an implicit binding for an
3233 exception region. This is used to prevent infinite recursion when
3234 ending a binding with expand_end_bindings. It is only ever called
3235 by expand_eh_region_start, as that it the only way to create a
3236 block stack for a exception region. */
3237
3238void
3239mark_block_as_eh_region ()
3240{
3241 block_stack->data.block.exception_region = 1;
3242 if (block_stack->next
3243 && block_stack->next->data.block.conditional_code)
3244 {
3245 block_stack->data.block.conditional_code
3246 = block_stack->next->data.block.conditional_code;
3247 block_stack->data.block.last_unconditional_cleanup
3248 = block_stack->next->data.block.last_unconditional_cleanup;
3249 block_stack->data.block.cleanup_ptr
3250 = block_stack->next->data.block.cleanup_ptr;
3251 }
3252}
3253
3254/* True if we are currently emitting insns in an area of output code
3255 that is controlled by a conditional expression. This is used by
3256 the cleanup handling code to generate conditional cleanup actions. */
3257
3258int
3259conditional_context ()
3260{
3261 return block_stack && block_stack->data.block.conditional_code;
3262}
3263
3264/* Mark top block of block_stack as not for an implicit binding for an
3265 exception region. This is only ever done by expand_eh_region_end
3266 to let expand_end_bindings know that it is being called explicitly
3267 to end the binding layer for just the binding layer associated with
3268 the exception region, otherwise expand_end_bindings would try and
3269 end all implicit binding layers for exceptions regions, and then
3270 one normal binding layer. */
3271
3272void
3273mark_block_as_not_eh_region ()
3274{
3275 block_stack->data.block.exception_region = 0;
3276}
3277
3278/* True if the top block of block_stack was marked as for an exception
3279 region by mark_block_as_eh_region. */
3280
3281int
3282is_eh_region ()
3283{
e7b9b18e
JM
3284 return (current_function && block_stack
3285 && block_stack->data.block.exception_region);
e976b8b2
MS
3286}
3287
7629c936
RS
3288/* Given a pointer to a BLOCK node, save a pointer to the most recently
3289 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3290 BLOCK node. */
3291
3292void
3293remember_end_note (block)
3294 register tree block;
3295{
3296 BLOCK_END_NOTE (block) = last_block_end_note;
3297 last_block_end_note = NULL_RTX;
3298}
3299
ba716ac9
BS
3300/* Emit a handler label for a nonlocal goto handler.
3301 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3302
e881bb1b 3303static rtx
ba716ac9
BS
3304expand_nl_handler_label (slot, before_insn)
3305 rtx slot, before_insn;
3306{
3307 rtx insns;
3308 rtx handler_label = gen_label_rtx ();
3309
3310 /* Don't let jump_optimize delete the handler. */
3311 LABEL_PRESERVE_P (handler_label) = 1;
3312
3313 start_sequence ();
3314 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3315 insns = get_insns ();
3316 end_sequence ();
3317 emit_insns_before (insns, before_insn);
3318
3319 emit_label (handler_label);
e881bb1b
RH
3320
3321 return handler_label;
ba716ac9
BS
3322}
3323
3324/* Emit code to restore vital registers at the beginning of a nonlocal goto
3325 handler. */
3326static void
3327expand_nl_goto_receiver ()
3328{
3329#ifdef HAVE_nonlocal_goto
3330 if (! HAVE_nonlocal_goto)
3331#endif
3332 /* First adjust our frame pointer to its actual value. It was
3333 previously set to the start of the virtual area corresponding to
3334 the stacked variables when we branched here and now needs to be
3335 adjusted to the actual hardware fp value.
3336
3337 Assignments are to virtual registers are converted by
3338 instantiate_virtual_regs into the corresponding assignment
3339 to the underlying register (fp in this case) that makes
3340 the original assignment true.
3341 So the following insn will actually be
3342 decrementing fp by STARTING_FRAME_OFFSET. */
3343 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3344
3345#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3346 if (fixed_regs[ARG_POINTER_REGNUM])
3347 {
3348#ifdef ELIMINABLE_REGS
3349 /* If the argument pointer can be eliminated in favor of the
3350 frame pointer, we don't need to restore it. We assume here
3351 that if such an elimination is present, it can always be used.
3352 This is the case on all known machines; if we don't make this
3353 assumption, we do unnecessary saving on many machines. */
3354 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3355 size_t i;
3356
3357 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3358 if (elim_regs[i].from == ARG_POINTER_REGNUM
3359 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3360 break;
3361
3362 if (i == sizeof elim_regs / sizeof elim_regs [0])
3363#endif
3364 {
3365 /* Now restore our arg pointer from the address at which it
3366 was saved in our stack frame.
3367 If there hasn't be space allocated for it yet, make
3368 some now. */
3369 if (arg_pointer_save_area == 0)
3370 arg_pointer_save_area
3371 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3372 emit_move_insn (virtual_incoming_args_rtx,
3373 /* We need a pseudo here, or else
3374 instantiate_virtual_regs_1 complains. */
3375 copy_to_reg (arg_pointer_save_area));
3376 }
3377 }
3378#endif
3379
3380#ifdef HAVE_nonlocal_goto_receiver
3381 if (HAVE_nonlocal_goto_receiver)
3382 emit_insn (gen_nonlocal_goto_receiver ());
3383#endif
3384}
3385
3386/* Make handlers for nonlocal gotos taking place in the function calls in
3387 block THISBLOCK. */
3388
3389static void
3390expand_nl_goto_receivers (thisblock)
3391 struct nesting *thisblock;
3392{
3393 tree link;
3394 rtx afterward = gen_label_rtx ();
3395 rtx insns, slot;
e881bb1b 3396 rtx label_list;
ba716ac9
BS
3397 int any_invalid;
3398
3399 /* Record the handler address in the stack slot for that purpose,
3400 during this block, saving and restoring the outer value. */
3401 if (thisblock->next != 0)
3402 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3403 {
3404 rtx save_receiver = gen_reg_rtx (Pmode);
3405 emit_move_insn (XEXP (slot, 0), save_receiver);
3406
3407 start_sequence ();
3408 emit_move_insn (save_receiver, XEXP (slot, 0));
3409 insns = get_insns ();
3410 end_sequence ();
3411 emit_insns_before (insns, thisblock->data.block.first_insn);
3412 }
3413
3414 /* Jump around the handlers; they run only when specially invoked. */
3415 emit_jump (afterward);
3416
3417 /* Make a separate handler for each label. */
3418 link = nonlocal_labels;
3419 slot = nonlocal_goto_handler_slots;
e881bb1b 3420 label_list = NULL_RTX;
ba716ac9
BS
3421 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3422 /* Skip any labels we shouldn't be able to jump to from here,
3423 we generate one special handler for all of them below which just calls
3424 abort. */
3425 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3426 {
e881bb1b
RH
3427 rtx lab;
3428 lab = expand_nl_handler_label (XEXP (slot, 0),
3429 thisblock->data.block.first_insn);
3430 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3431
ba716ac9
BS
3432 expand_nl_goto_receiver ();
3433
3434 /* Jump to the "real" nonlocal label. */
3435 expand_goto (TREE_VALUE (link));
3436 }
3437
3438 /* A second pass over all nonlocal labels; this time we handle those
3439 we should not be able to jump to at this point. */
3440 link = nonlocal_labels;
3441 slot = nonlocal_goto_handler_slots;
3442 any_invalid = 0;
3443 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3444 if (DECL_TOO_LATE (TREE_VALUE (link)))
3445 {
e881bb1b
RH
3446 rtx lab;
3447 lab = expand_nl_handler_label (XEXP (slot, 0),
3448 thisblock->data.block.first_insn);
3449 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3450 any_invalid = 1;
3451 }
3452
3453 if (any_invalid)
3454 {
3455 expand_nl_goto_receiver ();
3456 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3457 VOIDmode, 0);
3458 emit_barrier ();
3459 }
3460
e881bb1b 3461 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3462 emit_label (afterward);
3463}
3464
28d81abb 3465/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3466
3467 VARS is the chain of VAR_DECL nodes for the variables bound in this
3468 contour. There may actually be other nodes in this chain, but any
3469 nodes other than VAR_DECLS are ignored.
3470
28d81abb
RK
3471 MARK_ENDS is nonzero if we should put a note at the beginning
3472 and end of this binding contour.
3473
3474 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3475 (That is true automatically if the contour has a saved stack level.) */
3476
3477void
3478expand_end_bindings (vars, mark_ends, dont_jump_in)
3479 tree vars;
3480 int mark_ends;
3481 int dont_jump_in;
3482{
e976b8b2 3483 register struct nesting *thisblock;
28d81abb
RK
3484 register tree decl;
3485
e976b8b2
MS
3486 while (block_stack->data.block.exception_region)
3487 {
3488 /* Because we don't need or want a new temporary level and
3489 because we didn't create one in expand_eh_region_start,
3490 create a fake one now to avoid removing one in
3491 expand_end_bindings. */
3492 push_temp_slots ();
3493
3494 block_stack->data.block.exception_region = 0;
3495
3496 expand_end_bindings (NULL_TREE, 0, 0);
3497 }
3498
e976b8b2
MS
3499 /* Since expand_eh_region_start does an expand_start_bindings, we
3500 have to first end all the bindings that were created by
3501 expand_eh_region_start. */
3502
3503 thisblock = block_stack;
3504
28d81abb
RK
3505 if (warn_unused)
3506 for (decl = vars; decl; decl = TREE_CHAIN (decl))
e97b5c12
MM
3507 if (TREE_CODE (decl) == VAR_DECL
3508 && ! TREE_USED (decl)
e9a25f70
JL
3509 && ! DECL_IN_SYSTEM_HEADER (decl)
3510 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
28d81abb
RK
3511 warning_with_decl (decl, "unused variable `%s'");
3512
28d81abb
RK
3513 if (thisblock->exit_label)
3514 {
3515 do_pending_stack_adjust ();
3516 emit_label (thisblock->exit_label);
3517 }
3518
ba716ac9 3519 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3520 place in the function calls in this block. */
3f1d071b 3521 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3522 && nonlocal_labels
3523 /* Make handler for outermost block
3524 if there were any nonlocal gotos to this function. */
3525 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3526 /* Make handler for inner block if it has something
3527 special to do when you jump out of it. */
3528 : (thisblock->data.block.cleanups != 0
3529 || thisblock->data.block.stack_level != 0)))
ba716ac9 3530 expand_nl_goto_receivers (thisblock);
28d81abb 3531
72eb1038
BH
3532 /* Don't allow jumping into a block that has a stack level.
3533 Cleanups are allowed, though. */
28d81abb 3534 if (dont_jump_in
72eb1038 3535 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3536 {
3537 struct label_chain *chain;
3538
3539 /* Any labels in this block are no longer valid to go to.
3540 Mark them to cause an error message. */
3541 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3542 {
3543 DECL_TOO_LATE (chain->label) = 1;
3544 /* If any goto without a fixup came to this label,
3545 that must be an error, because gotos without fixups
72eb1038 3546 come from outside all saved stack-levels. */
28d81abb
RK
3547 if (TREE_ADDRESSABLE (chain->label))
3548 error_with_decl (chain->label,
3549 "label `%s' used before containing binding contour");
3550 }
3551 }
3552
3553 /* Restore stack level in effect before the block
3554 (only if variable-size objects allocated). */
3555 /* Perform any cleanups associated with the block. */
3556
3557 if (thisblock->data.block.stack_level != 0
3558 || thisblock->data.block.cleanups != 0)
3559 {
413ec213 3560 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3561 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3562
50d1b7a1
MS
3563 /* Don't let cleanups affect ({...}) constructs. */
3564 int old_expr_stmts_for_value = expr_stmts_for_value;
3565 rtx old_last_expr_value = last_expr_value;
3566 tree old_last_expr_type = last_expr_type;
3567 expr_stmts_for_value = 0;
28d81abb 3568
50d1b7a1
MS
3569 /* Do the cleanups. */
3570 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3571 if (reachable)
3572 do_pending_stack_adjust ();
28d81abb 3573
50d1b7a1
MS
3574 expr_stmts_for_value = old_expr_stmts_for_value;
3575 last_expr_value = old_last_expr_value;
3576 last_expr_type = old_last_expr_type;
3577
3578 /* Restore the stack level. */
3579
3580 if (reachable && thisblock->data.block.stack_level != 0)
3581 {
3582 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3583 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3584 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3585 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3586 NULL_RTX);
28d81abb
RK
3587 }
3588
3589 /* Any gotos out of this block must also do these things.
59257ff7
RK
3590 Also report any gotos with fixups that came to labels in this
3591 level. */
28d81abb
RK
3592 fixup_gotos (thisblock,
3593 thisblock->data.block.stack_level,
3594 thisblock->data.block.cleanups,
3595 thisblock->data.block.first_insn,
3596 dont_jump_in);
3597 }
3598
c7d2d61d
RS
3599 /* Mark the beginning and end of the scope if requested.
3600 We do this now, after running cleanups on the variables
3601 just going out of scope, so they are in scope for their cleanups. */
3602
3603 if (mark_ends)
7629c936 3604 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3605 else
3606 /* Get rid of the beginning-mark if we don't make an end-mark. */
3607 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3608
28d81abb
RK
3609 /* If doing stupid register allocation, make sure lives of all
3610 register variables declared here extend thru end of scope. */
3611
3612 if (obey_regdecls)
3613 for (decl = vars; decl; decl = TREE_CHAIN (decl))
e97b5c12
MM
3614 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3615 use_variable (DECL_RTL (decl));
28d81abb 3616
e976b8b2 3617 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3618 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3619
28d81abb
RK
3620 /* Restore block_stack level for containing block. */
3621
3622 stack_block_stack = thisblock->data.block.innermost_stack_block;
3623 POPSTACK (block_stack);
3624
3625 /* Pop the stack slot nesting and free any slots at this level. */
3626 pop_temp_slots ();
3627}
3628\f
3629/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3630 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3631
3632void
3633expand_decl (decl)
3634 register tree decl;
3635{
3f1d071b 3636 struct nesting *thisblock;
ca695ac9
JB
3637 tree type;
3638
ca695ac9 3639 type = TREE_TYPE (decl);
28d81abb
RK
3640
3641 /* Only automatic variables need any expansion done.
3642 Static and external variables, and external functions,
3643 will be handled by `assemble_variable' (called from finish_decl).
3644 TYPE_DECL and CONST_DECL require nothing.
3645 PARM_DECLs are handled in `assign_parms'. */
3646
3647 if (TREE_CODE (decl) != VAR_DECL)
3648 return;
44fe2e80 3649 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3650 return;
3651
3f1d071b
BS
3652 thisblock = block_stack;
3653
28d81abb
RK
3654 /* Create the RTL representation for the variable. */
3655
3656 if (type == error_mark_node)
38a448ca 3657 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3658 else if (DECL_SIZE (decl) == 0)
3659 /* Variable with incomplete type. */
3660 {
3661 if (DECL_INITIAL (decl) == 0)
3662 /* Error message was already done; now avoid a crash. */
3663 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3664 else
3665 /* An initializer is going to decide the size of this array.
3666 Until we know the size, represent its address with a reg. */
38a448ca 3667 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
c6df88cb 3668 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
28d81abb
RK
3669 }
3670 else if (DECL_MODE (decl) != BLKmode
3671 /* If -ffloat-store, don't put explicit float vars
3672 into regs. */
3673 && !(flag_float_store
3674 && TREE_CODE (type) == REAL_TYPE)
3675 && ! TREE_THIS_VOLATILE (decl)
3676 && ! TREE_ADDRESSABLE (decl)
e5e809f4
JL
3677 && (DECL_REGISTER (decl) || ! obey_regdecls)
3678 /* if -fcheck-memory-usage, check all variables. */
7d384cc0 3679 && ! current_function_check_memory_usage)
28d81abb
RK
3680 {
3681 /* Automatic variable that can go in a register. */
98f3b471 3682 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3683 enum machine_mode reg_mode
3684 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3685
7f070d5e
RK
3686 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3687 mark_user_reg (DECL_RTL (decl));
3688
e5e809f4 3689 if (POINTER_TYPE_P (type))
7f070d5e
RK
3690 mark_reg_pointer (DECL_RTL (decl),
3691 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3692 / BITS_PER_UNIT));
28d81abb 3693 }
0df15c2c 3694
5e4ef18a
RK
3695 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3696 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3697 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3698 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3699 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
28d81abb
RK
3700 {
3701 /* Variable of fixed size that goes on the stack. */
3702 rtx oldaddr = 0;
3703 rtx addr;
3704
3705 /* If we previously made RTL for this decl, it must be an array
3706 whose size was determined by the initializer.
3707 The old address was a register; set that register now
3708 to the proper address. */
3709 if (DECL_RTL (decl) != 0)
3710 {
3711 if (GET_CODE (DECL_RTL (decl)) != MEM
3712 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3713 abort ();
3714 oldaddr = XEXP (DECL_RTL (decl), 0);
3715 }
3716
d16790f2 3717 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
c6df88cb
MM
3718 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3719 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3720
3721 /* Set alignment we actually gave this decl. */
3722 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3723 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3724
3725 if (oldaddr)
3726 {
3727 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3728 if (addr != oldaddr)
3729 emit_move_insn (oldaddr, addr);
3730 }
3731
3732 /* If this is a memory ref that contains aggregate components,
3733 mark it as such for cse and loop optimize. */
c6df88cb
MM
3734 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3735 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3736#if 0
3737 /* If this is in memory because of -ffloat-store,
3738 set the volatile bit, to prevent optimizations from
3739 undoing the effects. */
3740 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3741 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3742#endif
41472af8
MM
3743
3744 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
28d81abb
RK
3745 }
3746 else
3747 /* Dynamic-size object: must push space on the stack. */
3748 {
3749 rtx address, size;
3750
3751 /* Record the stack pointer on entry to block, if have
3752 not already done so. */
3753 if (thisblock->data.block.stack_level == 0)
3754 {
3755 do_pending_stack_adjust ();
59257ff7
RK
3756 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3757 &thisblock->data.block.stack_level,
3758 thisblock->data.block.first_insn);
28d81abb
RK
3759 stack_block_stack = thisblock;
3760 }
3761
3762 /* Compute the variable's size, in bytes. */
3763 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3764 DECL_SIZE (decl),
3765 size_int (BITS_PER_UNIT)),
37366632 3766 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3767 free_temp_slots ();
3768
ff91ad08
RK
3769 /* Allocate space on the stack for the variable. Note that
3770 DECL_ALIGN says how the variable is to be aligned and we
3771 cannot use it to conclude anything about the alignment of
3772 the size. */
37366632 3773 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3774 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3775
28d81abb 3776 /* Reference the variable indirect through that rtx. */
38a448ca 3777 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
28d81abb 3778
2207e295
RS
3779 /* If this is a memory ref that contains aggregate components,
3780 mark it as such for cse and loop optimize. */
c6df88cb
MM
3781 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3782 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
2207e295 3783
28d81abb
RK
3784 /* Indicate the alignment we actually gave this variable. */
3785#ifdef STACK_BOUNDARY
3786 DECL_ALIGN (decl) = STACK_BOUNDARY;
3787#else
3788 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3789#endif
3790 }
3791
3792 if (TREE_THIS_VOLATILE (decl))
3793 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3794#if 0 /* A variable is not necessarily unchanging
3795 just because it is const. RTX_UNCHANGING_P
3796 means no change in the function,
3797 not merely no change in the variable's scope.
3798 It is correct to set RTX_UNCHANGING_P if the variable's scope
3799 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3800 if (TREE_READONLY (decl))
3801 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3802#endif
28d81abb
RK
3803
3804 /* If doing stupid register allocation, make sure life of any
3805 register variable starts here, at the start of its scope. */
3806
3807 if (obey_regdecls)
3808 use_variable (DECL_RTL (decl));
3809}
ca695ac9
JB
3810
3811
28d81abb
RK
3812\f
3813/* Emit code to perform the initialization of a declaration DECL. */
3814
3815void
3816expand_decl_init (decl)
3817 tree decl;
3818{
b4ac57ab
RS
3819 int was_used = TREE_USED (decl);
3820
3564e40e
RK
3821 /* If this is a CONST_DECL, we don't have to generate any code, but
3822 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3823 to be set while in the obstack containing the constant. If we don't
3824 do this, we can lose if we have functions nested three deep and the middle
3825 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3826 the innermost function is the first to expand that STRING_CST. */
3827 if (TREE_CODE (decl) == CONST_DECL)
3828 {
3829 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3830 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3831 EXPAND_INITIALIZER);
3832 return;
3833 }
3834
28d81abb
RK
3835 if (TREE_STATIC (decl))
3836 return;
3837
3838 /* Compute and store the initial value now. */
3839
3840 if (DECL_INITIAL (decl) == error_mark_node)
3841 {
3842 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3843
28d81abb 3844 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3845 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
3846 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3847 0, 0);
3848 emit_queue ();
3849 }
3850 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3851 {
3852 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3853 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3854 emit_queue ();
3855 }
3856
b4ac57ab
RS
3857 /* Don't let the initialization count as "using" the variable. */
3858 TREE_USED (decl) = was_used;
3859
28d81abb 3860 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3861 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3862 free_temp_slots ();
3863}
3864
3865/* CLEANUP is an expression to be executed at exit from this binding contour;
3866 for example, in C++, it might call the destructor for this variable.
3867
4847c938
MS
3868 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3869 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
3870 happens in exception handling, for gotos, returns, breaks that
3871 leave the current scope.
28d81abb
RK
3872
3873 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3874 that is not associated with any particular variable. */
3875
3876int
3877expand_decl_cleanup (decl, cleanup)
3878 tree decl, cleanup;
3879{
3f1d071b 3880 struct nesting *thisblock;
28d81abb
RK
3881
3882 /* Error if we are not in any block. */
3f1d071b 3883 if (current_function == 0 || block_stack == 0)
28d81abb
RK
3884 return 0;
3885
3f1d071b
BS
3886 thisblock = block_stack;
3887
28d81abb
RK
3888 /* Record the cleanup if there is one. */
3889
3890 if (cleanup != 0)
3891 {
e976b8b2
MS
3892 tree t;
3893 rtx seq;
3894 tree *cleanups = &thisblock->data.block.cleanups;
3895 int cond_context = conditional_context ();
3896
3897 if (cond_context)
3898 {
3899 rtx flag = gen_reg_rtx (word_mode);
3900 rtx set_flag_0;
3901 tree cond;
3902
3903 start_sequence ();
3904 emit_move_insn (flag, const0_rtx);
3905 set_flag_0 = get_insns ();
3906 end_sequence ();
3907
3908 thisblock->data.block.last_unconditional_cleanup
3909 = emit_insns_after (set_flag_0,
3910 thisblock->data.block.last_unconditional_cleanup);
3911
3912 emit_move_insn (flag, const1_rtx);
3913
3914 /* All cleanups must be on the function_obstack. */
3915 push_obstacks_nochange ();
3916 resume_temporary_allocation ();
3917
3918 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3919 DECL_RTL (cond) = flag;
3920
3921 /* Conditionalize the cleanup. */
3922 cleanup = build (COND_EXPR, void_type_node,
3923 truthvalue_conversion (cond),
3924 cleanup, integer_zero_node);
3925 cleanup = fold (cleanup);
3926
3927 pop_obstacks ();
3928
3929 cleanups = thisblock->data.block.cleanup_ptr;
3930 }
3931
3932 /* All cleanups must be on the function_obstack. */
3933 push_obstacks_nochange ();
3934 resume_temporary_allocation ();
4847c938 3935 cleanup = unsave_expr (cleanup);
e976b8b2
MS
3936 pop_obstacks ();
3937
3938 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3939
3940 if (! cond_context)
3941 /* If this block has a cleanup, it belongs in stack_block_stack. */
3942 stack_block_stack = thisblock;
3943
3944 if (cond_context)
3945 {
3946 start_sequence ();
3947 }
4847c938 3948
e976b8b2
MS
3949 /* If this was optimized so that there is no exception region for the
3950 cleanup, then mark the TREE_LIST node, so that we can later tell
3951 if we need to call expand_eh_region_end. */
e9a25f70
JL
3952 if (! using_eh_for_cleanups_p
3953 || expand_eh_region_start_tree (decl, cleanup))
e976b8b2 3954 TREE_ADDRESSABLE (t) = 1;
716cc7f7
JM
3955 /* If that started a new EH region, we're in a new block. */
3956 thisblock = block_stack;
e976b8b2
MS
3957
3958 if (cond_context)
3959 {
3960 seq = get_insns ();
3961 end_sequence ();
7e82801f
MS
3962 if (seq)
3963 thisblock->data.block.last_unconditional_cleanup
3964 = emit_insns_after (seq,
3965 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
3966 }
3967 else
3968 {
3969 thisblock->data.block.last_unconditional_cleanup
3970 = get_last_insn ();
3971 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3972 }
28d81abb
RK
3973 }
3974 return 1;
3975}
e976b8b2 3976
c7ae64f2
JM
3977/* Like expand_decl_cleanup, but suppress generating an exception handler
3978 to perform the cleanup. */
3979
3980int
3981expand_decl_cleanup_no_eh (decl, cleanup)
3982 tree decl, cleanup;
3983{
3984 int save_eh = using_eh_for_cleanups_p;
b472527b
JL
3985 int result;
3986
c7ae64f2 3987 using_eh_for_cleanups_p = 0;
b472527b 3988 result = expand_decl_cleanup (decl, cleanup);
c7ae64f2 3989 using_eh_for_cleanups_p = save_eh;
b472527b
JL
3990
3991 return result;
c7ae64f2
JM
3992}
3993
e976b8b2 3994/* Arrange for the top element of the dynamic cleanup chain to be
4c581243
MS
3995 popped if we exit the current binding contour. DECL is the
3996 associated declaration, if any, otherwise NULL_TREE. If the
3997 current contour is left via an exception, then __sjthrow will pop
3998 the top element off the dynamic cleanup chain. The code that
3999 avoids doing the action we push into the cleanup chain in the
4000 exceptional case is contained in expand_cleanups.
e976b8b2
MS
4001
4002 This routine is only used by expand_eh_region_start, and that is
4003 the only way in which an exception region should be started. This
4004 routine is only used when using the setjmp/longjmp codegen method
4005 for exception handling. */
4006
4007int
4c581243
MS
4008expand_dcc_cleanup (decl)
4009 tree decl;
e976b8b2 4010{
3f1d071b 4011 struct nesting *thisblock;
e976b8b2
MS
4012 tree cleanup;
4013
4014 /* Error if we are not in any block. */
3f1d071b 4015 if (current_function == 0 || block_stack == 0)
e976b8b2 4016 return 0;
3f1d071b 4017 thisblock = block_stack;
e976b8b2
MS
4018
4019 /* Record the cleanup for the dynamic handler chain. */
4020
4021 /* All cleanups must be on the function_obstack. */
4022 push_obstacks_nochange ();
4023 resume_temporary_allocation ();
4024 cleanup = make_node (POPDCC_EXPR);
4025 pop_obstacks ();
4026
4027 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4028 thisblock->data.block.cleanups
4c581243 4029 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4030
4031 /* If this block has a cleanup, it belongs in stack_block_stack. */
4032 stack_block_stack = thisblock;
4033 return 1;
4034}
4035
4036/* Arrange for the top element of the dynamic handler chain to be
4c581243 4037 popped if we exit the current binding contour. DECL is the
956d6950 4038 associated declaration, if any, otherwise NULL_TREE. If the current
4c581243
MS
4039 contour is left via an exception, then __sjthrow will pop the top
4040 element off the dynamic handler chain. The code that avoids doing
4041 the action we push into the handler chain in the exceptional case
4042 is contained in expand_cleanups.
e976b8b2
MS
4043
4044 This routine is only used by expand_eh_region_start, and that is
4045 the only way in which an exception region should be started. This
4046 routine is only used when using the setjmp/longjmp codegen method
4047 for exception handling. */
4048
4049int
4c581243
MS
4050expand_dhc_cleanup (decl)
4051 tree decl;
e976b8b2 4052{
3f1d071b 4053 struct nesting *thisblock;
e976b8b2
MS
4054 tree cleanup;
4055
4056 /* Error if we are not in any block. */
3f1d071b 4057 if (current_function == 0 || block_stack == 0)
e976b8b2 4058 return 0;
3f1d071b 4059 thisblock = block_stack;
e976b8b2
MS
4060
4061 /* Record the cleanup for the dynamic handler chain. */
4062
4063 /* All cleanups must be on the function_obstack. */
4064 push_obstacks_nochange ();
4065 resume_temporary_allocation ();
4066 cleanup = make_node (POPDHC_EXPR);
4067 pop_obstacks ();
4068
4069 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4070 thisblock->data.block.cleanups
4c581243 4071 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4072
4073 /* If this block has a cleanup, it belongs in stack_block_stack. */
4074 stack_block_stack = thisblock;
4075 return 1;
4076}
28d81abb
RK
4077\f
4078/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4079 DECL_ELTS is the list of elements that belong to DECL's type.
4080 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4081
4082void
4083expand_anon_union_decl (decl, cleanup, decl_elts)
4084 tree decl, cleanup, decl_elts;
4085{
3f1d071b 4086 struct nesting *thisblock = current_function == 0 ? 0 : block_stack;
28d81abb
RK
4087 rtx x;
4088
ec5cd386
RK
4089 expand_decl (decl);
4090 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4091 x = DECL_RTL (decl);
4092
4093 while (decl_elts)
4094 {
4095 tree decl_elt = TREE_VALUE (decl_elts);
4096 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4098
7b9032dd
JM
4099 /* Propagate the union's alignment to the elements. */
4100 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4101
4102 /* If the element has BLKmode and the union doesn't, the union is
4103 aligned such that the element doesn't need to have BLKmode, so
4104 change the element's mode to the appropriate one for its size. */
4105 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4106 DECL_MODE (decl_elt) = mode
4107 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4108 MODE_INT, 1);
4109
28d81abb
RK
4110 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4111 instead create a new MEM rtx with the proper mode. */
4112 if (GET_CODE (x) == MEM)
4113 {
4114 if (mode == GET_MODE (x))
4115 DECL_RTL (decl_elt) = x;
4116 else
4117 {
38a448ca 4118 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
c6df88cb 4119 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
28d81abb
RK
4120 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4121 }
4122 }
4123 else if (GET_CODE (x) == REG)
4124 {
4125 if (mode == GET_MODE (x))
4126 DECL_RTL (decl_elt) = x;
4127 else
38a448ca 4128 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
28d81abb
RK
4129 }
4130 else
4131 abort ();
4132
4133 /* Record the cleanup if there is one. */
4134
4135 if (cleanup != 0)
4136 thisblock->data.block.cleanups
4137 = temp_tree_cons (decl_elt, cleanup_elt,
4138 thisblock->data.block.cleanups);
4139
4140 decl_elts = TREE_CHAIN (decl_elts);
4141 }
4142}
4143\f
4144/* Expand a list of cleanups LIST.
4145 Elements may be expressions or may be nested lists.
4146
4147 If DONT_DO is nonnull, then any list-element
4148 whose TREE_PURPOSE matches DONT_DO is omitted.
4149 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4150 a value that is being returned out of the scope.
4151
4152 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
4153 goto and handle protection regions specially in that case.
4154
4155 If REACHABLE, we emit code, otherwise just inform the exception handling
4156 code about this finalization. */
28d81abb
RK
4157
4158static void
50d1b7a1 4159expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4160 tree list;
4161 tree dont_do;
4e44807b 4162 int in_fixup;
50d1b7a1 4163 int reachable;
28d81abb
RK
4164{
4165 tree tail;
4166 for (tail = list; tail; tail = TREE_CHAIN (tail))
4167 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4168 {
4169 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4170 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4171 else
4172 {
4e44807b 4173 if (! in_fixup)
e976b8b2
MS
4174 {
4175 tree cleanup = TREE_VALUE (tail);
4176
4177 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4178 if (TREE_CODE (cleanup) != POPDHC_EXPR
4179 && TREE_CODE (cleanup) != POPDCC_EXPR
4180 /* See expand_eh_region_start_tree for this case. */
4181 && ! TREE_ADDRESSABLE (tail))
4182 {
4183 cleanup = protect_with_terminate (cleanup);
4184 expand_eh_region_end (cleanup);
4185 }
4186 }
61d6b1cc 4187
50d1b7a1
MS
4188 if (reachable)
4189 {
4190 /* Cleanups may be run multiple times. For example,
4191 when exiting a binding contour, we expand the
4192 cleanups associated with that contour. When a goto
4193 within that binding contour has a target outside that
4194 contour, it will expand all cleanups from its scope to
4195 the target. Though the cleanups are expanded multiple
4196 times, the control paths are non-overlapping so the
4197 cleanups will not be executed twice. */
9762d48d
JM
4198
4199 /* We may need to protect fixups with rethrow regions. */
4200 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
e5e809f4 4201
9762d48d
JM
4202 if (protect)
4203 expand_fixup_region_start ();
e5e809f4 4204
50d1b7a1 4205 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
9762d48d
JM
4206 if (protect)
4207 expand_fixup_region_end (TREE_VALUE (tail));
50d1b7a1
MS
4208 free_temp_slots ();
4209 }
28d81abb
RK
4210 }
4211 }
4212}
4213
e976b8b2
MS
4214/* Mark when the context we are emitting RTL for as a conditional
4215 context, so that any cleanup actions we register with
4216 expand_decl_init will be properly conditionalized when those
4217 cleanup actions are later performed. Must be called before any
956d6950 4218 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4219
4220void
956d6950 4221start_cleanup_deferral ()
e976b8b2 4222{
e3eef942
JW
4223 /* block_stack can be NULL if we are inside the parameter list. It is
4224 OK to do nothing, because cleanups aren't possible here. */
4225 if (block_stack)
4226 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4227}
4228
4229/* Mark the end of a conditional region of code. Because cleanup
956d6950 4230 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4231 after we end the currently deferred cleanups, only after we end all
4232 deferred cleanups, are we back in unconditional code. */
4233
4234void
956d6950 4235end_cleanup_deferral ()
e976b8b2 4236{
e3eef942
JW
4237 /* block_stack can be NULL if we are inside the parameter list. It is
4238 OK to do nothing, because cleanups aren't possible here. */
4239 if (block_stack)
4240 --block_stack->data.block.conditional_code;
e976b8b2
MS
4241}
4242
28d81abb
RK
4243/* Move all cleanups from the current block_stack
4244 to the containing block_stack, where they are assumed to
4245 have been created. If anything can cause a temporary to
4246 be created, but not expanded for more than one level of
4247 block_stacks, then this code will have to change. */
4248
4249void
4250move_cleanups_up ()
4251{
4252 struct nesting *block = block_stack;
4253 struct nesting *outer = block->next;
4254
4255 outer->data.block.cleanups
4256 = chainon (block->data.block.cleanups,
4257 outer->data.block.cleanups);
4258 block->data.block.cleanups = 0;
4259}
4260
4261tree
4262last_cleanup_this_contour ()
4263{
4264 if (block_stack == 0)
4265 return 0;
4266
4267 return block_stack->data.block.cleanups;
4268}
4269
4270/* Return 1 if there are any pending cleanups at this point.
4271 If THIS_CONTOUR is nonzero, check the current contour as well.
4272 Otherwise, look only at the contours that enclose this one. */
4273
4274int
4275any_pending_cleanups (this_contour)
4276 int this_contour;
4277{
4278 struct nesting *block;
4279
4280 if (block_stack == 0)
4281 return 0;
4282
4283 if (this_contour && block_stack->data.block.cleanups != NULL)
4284 return 1;
4285 if (block_stack->data.block.cleanups == 0
e976b8b2 4286 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4287 return 0;
4288
4289 for (block = block_stack->next; block; block = block->next)
4290 if (block->data.block.cleanups != 0)
4291 return 1;
4292
4293 return 0;
4294}
4295\f
4296/* Enter a case (Pascal) or switch (C) statement.
4297 Push a block onto case_stack and nesting_stack
4298 to accumulate the case-labels that are seen
4299 and to record the labels generated for the statement.
4300
4301 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4302 Otherwise, this construct is transparent for `exit_something'.
4303
4304 EXPR is the index-expression to be dispatched on.
4305 TYPE is its nominal type. We could simply convert EXPR to this type,
4306 but instead we take short cuts. */
4307
4308void
4309expand_start_case (exit_flag, expr, type, printname)
4310 int exit_flag;
4311 tree expr;
4312 tree type;
dff01034 4313 const char *printname;
28d81abb
RK
4314{
4315 register struct nesting *thiscase = ALLOC_NESTING ();
4316
4317 /* Make an entry on case_stack for the case we are entering. */
4318
4319 thiscase->next = case_stack;
4320 thiscase->all = nesting_stack;
4321 thiscase->depth = ++nesting_depth;
4322 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4323 thiscase->data.case_stmt.case_list = 0;
4324 thiscase->data.case_stmt.index_expr = expr;
4325 thiscase->data.case_stmt.nominal_type = type;
4326 thiscase->data.case_stmt.default_label = 0;
4327 thiscase->data.case_stmt.num_ranges = 0;
4328 thiscase->data.case_stmt.printname = printname;
a11759a3 4329 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4330 case_stack = thiscase;
4331 nesting_stack = thiscase;
4332
4333 do_pending_stack_adjust ();
4334
4335 /* Make sure case_stmt.start points to something that won't
4336 need any transformation before expand_end_case. */
4337 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 4338 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
4339
4340 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4341
956d6950 4342 start_cleanup_deferral ();
28d81abb
RK
4343}
4344
ca695ac9 4345
28d81abb
RK
4346/* Start a "dummy case statement" within which case labels are invalid
4347 and are not connected to any larger real case statement.
4348 This can be used if you don't want to let a case statement jump
4349 into the middle of certain kinds of constructs. */
4350
4351void
4352expand_start_case_dummy ()
4353{
4354 register struct nesting *thiscase = ALLOC_NESTING ();
4355
4356 /* Make an entry on case_stack for the dummy. */
4357
4358 thiscase->next = case_stack;
4359 thiscase->all = nesting_stack;
4360 thiscase->depth = ++nesting_depth;
4361 thiscase->exit_label = 0;
4362 thiscase->data.case_stmt.case_list = 0;
4363 thiscase->data.case_stmt.start = 0;
4364 thiscase->data.case_stmt.nominal_type = 0;
4365 thiscase->data.case_stmt.default_label = 0;
4366 thiscase->data.case_stmt.num_ranges = 0;
4367 case_stack = thiscase;
4368 nesting_stack = thiscase;
956d6950 4369 start_cleanup_deferral ();
28d81abb
RK
4370}
4371
4372/* End a dummy case statement. */
4373
4374void
4375expand_end_case_dummy ()
4376{
956d6950 4377 end_cleanup_deferral ();
28d81abb
RK
4378 POPSTACK (case_stack);
4379}
4380
4381/* Return the data type of the index-expression
4382 of the innermost case statement, or null if none. */
4383
4384tree
4385case_index_expr_type ()
4386{
4387 if (case_stack)
4388 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4389 return 0;
4390}
4391\f
a11759a3
JR
4392static void
4393check_seenlabel ()
4394{
4395 /* If this is the first label, warn if any insns have been emitted. */
4396 if (case_stack->data.case_stmt.line_number_status >= 0)
4397 {
4398 rtx insn;
4399
4400 restore_line_number_status
4401 (case_stack->data.case_stmt.line_number_status);
4402 case_stack->data.case_stmt.line_number_status = -1;
4403
4404 for (insn = case_stack->data.case_stmt.start;
4405 insn;
4406 insn = NEXT_INSN (insn))
4407 {
4408 if (GET_CODE (insn) == CODE_LABEL)
4409 break;
4410 if (GET_CODE (insn) != NOTE
4411 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4412 {
4413 do
4414 insn = PREV_INSN (insn);
0dacbd0e
JW
4415 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4416
4417 /* If insn is zero, then there must have been a syntax error. */
4418 if (insn)
4419 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4420 NOTE_LINE_NUMBER(insn),
4421 "unreachable code at beginning of %s",
4422 case_stack->data.case_stmt.printname);
a11759a3
JR
4423 break;
4424 }
4425 }
4426 }
4427}
4428
28d81abb
RK
4429/* Accumulate one case or default label inside a case or switch statement.
4430 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4431 The function CONVERTER, when applied to arguments T and V,
4432 converts the value V to the type T.
28d81abb
RK
4433
4434 If not currently inside a case or switch statement, return 1 and do
4435 nothing. The caller will print a language-specific error message.
4436 If VALUE is a duplicate or overlaps, return 2 and do nothing
4437 except store the (first) duplicate node in *DUPLICATE.
4438 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4439 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4440 Return 0 on success.
4441
4442 Extended to handle range statements. */
4443
4444int
f52fba84 4445pushcase (value, converter, label, duplicate)
28d81abb 4446 register tree value;
f52fba84 4447 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4448 register tree label;
4449 tree *duplicate;
4450{
28d81abb
RK
4451 tree index_type;
4452 tree nominal_type;
4453
4454 /* Fail if not inside a real case statement. */
4455 if (! (case_stack && case_stack->data.case_stmt.start))
4456 return 1;
4457
4458 if (stack_block_stack
4459 && stack_block_stack->depth > case_stack->depth)
4460 return 5;
4461
4462 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4463 nominal_type = case_stack->data.case_stmt.nominal_type;
4464
4465 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4466 if (index_type == error_mark_node)
4467 return 0;
4468
4469 /* Convert VALUE to the type in which the comparisons are nominally done. */
4470 if (value != 0)
f52fba84 4471 value = (*converter) (nominal_type, value);
28d81abb 4472
feb60352
R
4473 check_seenlabel ();
4474
28d81abb
RK
4475 /* Fail if this value is out of range for the actual type of the index
4476 (which may be narrower than NOMINAL_TYPE). */
4477 if (value != 0 && ! int_fits_type_p (value, index_type))
4478 return 3;
4479
4480 /* Fail if this is a duplicate or overlaps another entry. */
4481 if (value == 0)
4482 {
4483 if (case_stack->data.case_stmt.default_label != 0)
4484 {
4485 *duplicate = case_stack->data.case_stmt.default_label;
4486 return 2;
4487 }
4488 case_stack->data.case_stmt.default_label = label;
4489 }
4490 else
57641239 4491 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4492
4493 expand_label (label);
4494 return 0;
4495}
4496
956d6950
JL
4497/* Like pushcase but this case applies to all values between VALUE1 and
4498 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4499 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4500 starts at VALUE1 and ends at the highest value of the index type.
4501 If both are NULL, this case applies to all values.
4502
4503 The return value is the same as that of pushcase but there is one
4504 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4505
4506int
f52fba84 4507pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4508 register tree value1, value2;
f52fba84 4509 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4510 register tree label;
4511 tree *duplicate;
4512{
28d81abb
RK
4513 tree index_type;
4514 tree nominal_type;
4515
4516 /* Fail if not inside a real case statement. */
4517 if (! (case_stack && case_stack->data.case_stmt.start))
4518 return 1;
4519
4520 if (stack_block_stack
4521 && stack_block_stack->depth > case_stack->depth)
4522 return 5;
4523
4524 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4525 nominal_type = case_stack->data.case_stmt.nominal_type;
4526
4527 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4528 if (index_type == error_mark_node)
4529 return 0;
4530
a11759a3 4531 check_seenlabel ();
28d81abb 4532
956d6950
JL
4533 /* Convert VALUEs to type in which the comparisons are nominally done
4534 and replace any unspecified value with the corresponding bound. */
4535 if (value1 == 0)
1974bfb1 4536 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4537 if (value2 == 0)
1974bfb1 4538 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4539
4540 /* Fail if the range is empty. Do this before any conversion since
4541 we want to allow out-of-range empty ranges. */
e1ee5cdc 4542 if (value2 && tree_int_cst_lt (value2, value1))
956d6950
JL
4543 return 4;
4544
4545 value1 = (*converter) (nominal_type, value1);
e1ee5cdc
RH
4546
4547 /* If the max was unbounded, use the max of the nominal_type we are
4548 converting to. Do this after the < check above to suppress false
4549 positives. */
4550 if (!value2)
4551 value2 = TYPE_MAX_VALUE (nominal_type);
f52fba84 4552 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4553
4554 /* Fail if these values are out of range. */
956d6950
JL
4555 if (TREE_CONSTANT_OVERFLOW (value1)
4556 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4557 return 3;
4558
956d6950
JL
4559 if (TREE_CONSTANT_OVERFLOW (value2)
4560 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4561 return 3;
4562
57641239
RK
4563 return add_case_node (value1, value2, label, duplicate);
4564}
4565
4566/* Do the actual insertion of a case label for pushcase and pushcase_range
4567 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4568 slowdown for large switch statements. */
4569
4570static int
4571add_case_node (low, high, label, duplicate)
4572 tree low, high;
4573 tree label;
4574 tree *duplicate;
4575{
4576 struct case_node *p, **q, *r;
4577
4578 q = &case_stack->data.case_stmt.case_list;
4579 p = *q;
4580
69d4ca36 4581 while ((r = *q))
28d81abb 4582 {
57641239
RK
4583 p = r;
4584
4585 /* Keep going past elements distinctly greater than HIGH. */
4586 if (tree_int_cst_lt (high, p->low))
4587 q = &p->left;
4588
4589 /* or distinctly less than LOW. */
4590 else if (tree_int_cst_lt (p->high, low))
4591 q = &p->right;
4592
4593 else
28d81abb 4594 {
57641239
RK
4595 /* We have an overlap; this is an error. */
4596 *duplicate = p->code_label;
28d81abb
RK
4597 return 2;
4598 }
4599 }
4600
4601 /* Add this label to the chain, and succeed.
57641239 4602 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4603 obstack and will thus survive till the end of the case statement. */
4604
57641239
RK
4605 r = (struct case_node *) oballoc (sizeof (struct case_node));
4606 r->low = copy_node (low);
28d81abb 4607
57641239
RK
4608 /* If the bounds are equal, turn this into the one-value case. */
4609
4610 if (tree_int_cst_equal (low, high))
4611 r->high = r->low;
4612 else
4613 {
4614 r->high = copy_node (high);
4615 case_stack->data.case_stmt.num_ranges++;
4616 }
4617
4618 r->code_label = label;
28d81abb
RK
4619 expand_label (label);
4620
57641239
RK
4621 *q = r;
4622 r->parent = p;
4623 r->left = 0;
4624 r->right = 0;
4625 r->balance = 0;
4626
4627 while (p)
4628 {
4629 struct case_node *s;
4630
4631 if (r == p->left)
4632 {
4633 int b;
4634
4635 if (! (b = p->balance))
4636 /* Growth propagation from left side. */
4637 p->balance = -1;
4638 else if (b < 0)
4639 {
4640 if (r->balance < 0)
4641 {
4642 /* R-Rotation */
51723711 4643 if ((p->left = s = r->right))
57641239
RK
4644 s->parent = p;
4645
4646 r->right = p;
4647 p->balance = 0;
4648 r->balance = 0;
4649 s = p->parent;
4650 p->parent = r;
4651
51723711 4652 if ((r->parent = s))
57641239
RK
4653 {
4654 if (s->left == p)
4655 s->left = r;
4656 else
4657 s->right = r;
4658 }
4659 else
4660 case_stack->data.case_stmt.case_list = r;
4661 }
4662 else
4663 /* r->balance == +1 */
4664 {
5720c7e7
RK
4665 /* LR-Rotation */
4666
57641239
RK
4667 int b2;
4668 struct case_node *t = r->right;
4669
51723711 4670 if ((p->left = s = t->right))
57641239
RK
4671 s->parent = p;
4672
4673 t->right = p;
51723711 4674 if ((r->right = s = t->left))
57641239
RK
4675 s->parent = r;
4676
4677 t->left = r;
4678 b = t->balance;
4679 b2 = b < 0;
4680 p->balance = b2;
4681 b2 = -b2 - b;
4682 r->balance = b2;
4683 t->balance = 0;
4684 s = p->parent;
4685 p->parent = t;
4686 r->parent = t;
4687
51723711 4688 if ((t->parent = s))
57641239
RK
4689 {
4690 if (s->left == p)
4691 s->left = t;
4692 else
4693 s->right = t;
4694 }
4695 else
4696 case_stack->data.case_stmt.case_list = t;
4697 }
4698 break;
4699 }
4700
4701 else
4702 {
4703 /* p->balance == +1; growth of left side balances the node. */
4704 p->balance = 0;
4705 break;
4706 }
4707 }
4708 else
4709 /* r == p->right */
4710 {
4711 int b;
4712
4713 if (! (b = p->balance))
4714 /* Growth propagation from right side. */
4715 p->balance++;
4716 else if (b > 0)
4717 {
4718 if (r->balance > 0)
4719 {
4720 /* L-Rotation */
4721
51723711 4722 if ((p->right = s = r->left))
57641239
RK
4723 s->parent = p;
4724
4725 r->left = p;
4726 p->balance = 0;
4727 r->balance = 0;
4728 s = p->parent;
4729 p->parent = r;
51723711 4730 if ((r->parent = s))
57641239
RK
4731 {
4732 if (s->left == p)
4733 s->left = r;
4734 else
4735 s->right = r;
4736 }
4737
4738 else
4739 case_stack->data.case_stmt.case_list = r;
4740 }
4741
4742 else
4743 /* r->balance == -1 */
4744 {
4745 /* RL-Rotation */
4746 int b2;
4747 struct case_node *t = r->left;
4748
51723711 4749 if ((p->right = s = t->left))
57641239
RK
4750 s->parent = p;
4751
4752 t->left = p;
4753
51723711 4754 if ((r->left = s = t->right))
57641239
RK
4755 s->parent = r;
4756
4757 t->right = r;
4758 b = t->balance;
4759 b2 = b < 0;
4760 r->balance = b2;
4761 b2 = -b2 - b;
4762 p->balance = b2;
4763 t->balance = 0;
4764 s = p->parent;
4765 p->parent = t;
4766 r->parent = t;
4767
51723711 4768 if ((t->parent = s))
57641239
RK
4769 {
4770 if (s->left == p)
4771 s->left = t;
4772 else
4773 s->right = t;
4774 }
4775
4776 else
4777 case_stack->data.case_stmt.case_list = t;
4778 }
4779 break;
4780 }
4781 else
4782 {
4783 /* p->balance == -1; growth of right side balances the node. */
4784 p->balance = 0;
4785 break;
4786 }
4787 }
4788
4789 r = p;
4790 p = p->parent;
4791 }
28d81abb
RK
4792
4793 return 0;
4794}
ca695ac9 4795
28d81abb 4796\f
94d6511c
PB
4797/* Returns the number of possible values of TYPE.
4798 Returns -1 if the number is unknown or variable.
4799 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4800 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4801 do not increase monotonically (there may be duplicates);
4802 to 1 if the values increase monotonically, but not always by 1;
4803 otherwise sets it to 0. */
4804
4805HOST_WIDE_INT
4806all_cases_count (type, spareness)
4807 tree type;
4808 int *spareness;
4809{
69d4ca36 4810 HOST_WIDE_INT count;
94d6511c
PB
4811 *spareness = 0;
4812
4813 switch (TREE_CODE (type))
4814 {
4815 tree t;
4816 case BOOLEAN_TYPE:
4817 count = 2;
4818 break;
4819 case CHAR_TYPE:
4820 count = 1 << BITS_PER_UNIT;
4821 break;
4822 default:
4823 case INTEGER_TYPE:
4824 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
e1ee5cdc 4825 || TYPE_MAX_VALUE (type) == NULL
c02aebe2 4826 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4827 return -1;
4828 else
4829 {
4830 /* count
4831 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4832 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4833 but with overflow checking. */
94d6511c
PB
4834 tree mint = TYPE_MIN_VALUE (type);
4835 tree maxt = TYPE_MAX_VALUE (type);
4836 HOST_WIDE_INT lo, hi;
4837 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4838 &lo, &hi);
4839 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4840 lo, hi, &lo, &hi);
4841 add_double (lo, hi, 1, 0, &lo, &hi);
4842 if (hi != 0 || lo < 0)
4843 return -2;
4844 count = lo;
4845 }
4846 break;
4847 case ENUMERAL_TYPE:
4848 count = 0;
4849 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4850 {
4851 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4852 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4853 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4854 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4855 *spareness = 1;
4856 count++;
4857 }
4858 if (*spareness == 1)
4859 {
4860 tree prev = TREE_VALUE (TYPE_VALUES (type));
4861 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4862 {
4863 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4864 {
4865 *spareness = 2;
4866 break;
4867 }
4868 prev = TREE_VALUE (t);
4869 }
4870
4871 }
4872 }
4873 return count;
4874}
4875
4876
4877#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4878 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4879 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4880#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4881 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4882 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4883
4884/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4885 with the case values we have seen, assuming the case expression
4886 has the given TYPE.
4887 SPARSENESS is as determined by all_cases_count.
4888
9faa82d8 4889 The time needed is proportional to COUNT, unless
94d6511c
PB
4890 SPARSENESS is 2, in which case quadratic time is needed. */
4891
df03cab5 4892void
94d6511c
PB
4893mark_seen_cases (type, cases_seen, count, sparseness)
4894 tree type;
4895 unsigned char *cases_seen;
4896 long count;
4897 int sparseness;
4898{
94d6511c
PB
4899 tree next_node_to_try = NULL_TREE;
4900 long next_node_offset = 0;
4901
5720c7e7 4902 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
4903 tree val = make_node (INTEGER_CST);
4904 TREE_TYPE (val) = type;
5720c7e7
RK
4905 if (! root)
4906 ; /* Do nothing */
4907 else if (sparseness == 2)
94d6511c 4908 {
5720c7e7
RK
4909 tree t;
4910 HOST_WIDE_INT xlo;
4911
4912 /* This less efficient loop is only needed to handle
4913 duplicate case values (multiple enum constants
4914 with the same value). */
4915 TREE_TYPE (val) = TREE_TYPE (root->low);
4916 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4917 t = TREE_CHAIN (t), xlo++)
94d6511c 4918 {
5720c7e7
RK
4919 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4920 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4921 n = root;
4922 do
94d6511c 4923 {
5720c7e7
RK
4924 /* Keep going past elements distinctly greater than VAL. */
4925 if (tree_int_cst_lt (val, n->low))
4926 n = n->left;
4927
4928 /* or distinctly less than VAL. */
4929 else if (tree_int_cst_lt (n->high, val))
4930 n = n->right;
4931
4932 else
94d6511c 4933 {
5720c7e7
RK
4934 /* We have found a matching range. */
4935 BITARRAY_SET (cases_seen, xlo);
4936 break;
94d6511c
PB
4937 }
4938 }
5720c7e7
RK
4939 while (n);
4940 }
4941 }
4942 else
4943 {
4944 if (root->left)
4945 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4946 for (n = root; n; n = n->right)
4947 {
4948 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4949 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4950 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 4951 {
5720c7e7
RK
4952 /* Calculate (into xlo) the "offset" of the integer (val).
4953 The element with lowest value has offset 0, the next smallest
4954 element has offset 1, etc. */
4955
4956 HOST_WIDE_INT xlo, xhi;
4957 tree t;
94d6511c
PB
4958 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4959 {
4960 /* The TYPE_VALUES will be in increasing order, so
4961 starting searching where we last ended. */
4962 t = next_node_to_try;
4963 xlo = next_node_offset;
4964 xhi = 0;
4965 for (;;)
4966 {
4967 if (t == NULL_TREE)
4968 {
4969 t = TYPE_VALUES (type);
4970 xlo = 0;
4971 }
4972 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4973 {
4974 next_node_to_try = TREE_CHAIN (t);
4975 next_node_offset = xlo + 1;
4976 break;
4977 }
4978 xlo++;
4979 t = TREE_CHAIN (t);
4980 if (t == next_node_to_try)
5720c7e7
RK
4981 {
4982 xlo = -1;
4983 break;
4984 }
94d6511c
PB
4985 }
4986 }
4987 else
4988 {
4989 t = TYPE_MIN_VALUE (type);
4990 if (t)
4991 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4992 &xlo, &xhi);
4993 else
4994 xlo = xhi = 0;
4995 add_double (xlo, xhi,
4996 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4997 &xlo, &xhi);
4998 }
4999
9dd53f1e 5000 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 5001 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
5002 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5003 1, 0,
5004 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5005 }
94d6511c
PB
5006 }
5007 }
5008}
5009
28d81abb
RK
5010/* Called when the index of a switch statement is an enumerated type
5011 and there is no default label.
5012
5013 Checks that all enumeration literals are covered by the case
5014 expressions of a switch. Also, warn if there are any extra
5015 switch cases that are *not* elements of the enumerated type.
5016
5017 If all enumeration literals were covered by the case expressions,
5018 turn one of the expressions into the default expression since it should
5019 not be possible to fall through such a switch. */
5020
5021void
5022check_for_full_enumeration_handling (type)
5023 tree type;
5024{
5025 register struct case_node *n;
28d81abb 5026 register tree chain;
69d4ca36
RL
5027#if 0 /* variable used by 'if 0'ed code below. */
5028 register struct case_node **l;
28d81abb 5029 int all_values = 1;
69d4ca36 5030#endif
28d81abb 5031
0f41302f 5032 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5033 int sparseness = 0;
5034
0f41302f 5035 /* The number of possible selector values. */
94d6511c
PB
5036 HOST_WIDE_INT size;
5037
5038 /* For each possible selector value. a one iff it has been matched
0f41302f 5039 by a case value alternative. */
94d6511c
PB
5040 unsigned char *cases_seen;
5041
0f41302f 5042 /* The allocated size of cases_seen, in chars. */
94d6511c 5043 long bytes_needed;
94d6511c 5044
94d6511c
PB
5045 if (! warn_switch)
5046 return;
5047
5048 size = all_cases_count (type, &sparseness);
5049 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5050
94d6511c 5051 if (size > 0 && size < 600000
c5c76735
JL
5052 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5053 this optimization if we don't have enough memory rather than
5054 aborting, as xmalloc would do. */
3de90026 5055 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
28d81abb 5056 {
94d6511c
PB
5057 long i;
5058 tree v = TYPE_VALUES (type);
28d81abb 5059
94d6511c
PB
5060 /* The time complexity of this code is normally O(N), where
5061 N being the number of members in the enumerated type.
5062 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 5063 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5064
5065 mark_seen_cases (type, cases_seen, size, sparseness);
5066
5067 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 5068 {
94d6511c 5069 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 5070 warning ("enumeration value `%s' not handled in switch",
94d6511c 5071 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 5072 }
94d6511c
PB
5073
5074 free (cases_seen);
28d81abb
RK
5075 }
5076
5077 /* Now we go the other way around; we warn if there are case
ac2a9454 5078 expressions that don't correspond to enumerators. This can
28d81abb 5079 occur since C and C++ don't enforce type-checking of
0f41302f 5080 assignments to enumeration variables. */
28d81abb 5081
5720c7e7
RK
5082 if (case_stack->data.case_stmt.case_list
5083 && case_stack->data.case_stmt.case_list->left)
5084 case_stack->data.case_stmt.case_list
5085 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
5086 if (warn_switch)
5087 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5088 {
5089 for (chain = TYPE_VALUES (type);
5090 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5091 chain = TREE_CHAIN (chain))
5092 ;
5093
5094 if (!chain)
3b24f55b
RS
5095 {
5096 if (TYPE_NAME (type) == 0)
e016950d
KG
5097 warning ("case value `%ld' not in enumerated type",
5098 (long) TREE_INT_CST_LOW (n->low));
3b24f55b 5099 else
e016950d
KG
5100 warning ("case value `%ld' not in enumerated type `%s'",
5101 (long) TREE_INT_CST_LOW (n->low),
3b24f55b
RS
5102 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5103 == IDENTIFIER_NODE)
5104 ? TYPE_NAME (type)
5105 : DECL_NAME (TYPE_NAME (type))));
5106 }
1ddde1cd
RS
5107 if (!tree_int_cst_equal (n->low, n->high))
5108 {
5109 for (chain = TYPE_VALUES (type);
5110 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5111 chain = TREE_CHAIN (chain))
5112 ;
5113
5114 if (!chain)
3b24f55b
RS
5115 {
5116 if (TYPE_NAME (type) == 0)
e016950d
KG
5117 warning ("case value `%ld' not in enumerated type",
5118 (long) TREE_INT_CST_LOW (n->high));
3b24f55b 5119 else
e016950d
KG
5120 warning ("case value `%ld' not in enumerated type `%s'",
5121 (long) TREE_INT_CST_LOW (n->high),
3b24f55b
RS
5122 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5123 == IDENTIFIER_NODE)
5124 ? TYPE_NAME (type)
5125 : DECL_NAME (TYPE_NAME (type))));
5126 }
1ddde1cd 5127 }
28d81abb
RK
5128 }
5129
ae8cb346
RS
5130#if 0
5131 /* ??? This optimization is disabled because it causes valid programs to
5132 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 5133 will have a value that is the same as one of the enumeration literals. */
ae8cb346 5134
28d81abb
RK
5135 /* If all values were found as case labels, make one of them the default
5136 label. Thus, this switch will never fall through. We arbitrarily pick
5137 the last one to make the default since this is likely the most
5138 efficient choice. */
5139
5140 if (all_values)
5141 {
5142 for (l = &case_stack->data.case_stmt.case_list;
5143 (*l)->right != 0;
5144 l = &(*l)->right)
5145 ;
5146
5147 case_stack->data.case_stmt.default_label = (*l)->code_label;
5148 *l = 0;
5149 }
ae8cb346 5150#endif /* 0 */
28d81abb 5151}
ca695ac9 5152
28d81abb
RK
5153\f
5154/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5155 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
5156 Generate the code to test it and jump to the right place. */
5157
5158void
5159expand_end_case (orig_index)
5160 tree orig_index;
5161{
07444f1d 5162 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
28d81abb
RK
5163 rtx default_label = 0;
5164 register struct case_node *n;
85066503 5165 unsigned int count;
28d81abb 5166 rtx index;
ca695ac9 5167 rtx table_label;
28d81abb
RK
5168 int ncases;
5169 rtx *labelvec;
5170 register int i;
5171 rtx before_case;
5172 register struct nesting *thiscase = case_stack;
1b0cb6fc 5173 tree index_expr, index_type;
ca695ac9
JB
5174 int unsignedp;
5175
ca695ac9
JB
5176 table_label = gen_label_rtx ();
5177 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5178 index_type = TREE_TYPE (index_expr);
5179 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
5180
5181 do_pending_stack_adjust ();
5182
feb60352
R
5183 /* This might get an spurious warning in the presence of a syntax error;
5184 it could be fixed by moving the call to check_seenlabel after the
5185 check for error_mark_node, and copying the code of check_seenlabel that
5186 deals with case_stack->data.case_stmt.line_number_status /
5187 restore_line_number_status in front of the call to end_cleanup_deferral;
5188 However, this might miss some useful warnings in the presence of
5189 non-syntax errors. */
a11759a3
JR
5190 check_seenlabel ();
5191
28d81abb 5192 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5193 if (index_type != error_mark_node)
28d81abb
RK
5194 {
5195 /* If switch expression was an enumerated type, check that all
5196 enumeration literals are covered by the cases.
5197 No sense trying this if there's a default case, however. */
5198
5199 if (!thiscase->data.case_stmt.default_label
5200 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5201 && TREE_CODE (index_expr) != INTEGER_CST)
5202 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5203
28d81abb
RK
5204 /* If we don't have a default-label, create one here,
5205 after the body of the switch. */
5206 if (thiscase->data.case_stmt.default_label == 0)
5207 {
5208 thiscase->data.case_stmt.default_label
5209 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5210 expand_label (thiscase->data.case_stmt.default_label);
5211 }
5212 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5213
5214 before_case = get_last_insn ();
5215
5720c7e7
RK
5216 if (thiscase->data.case_stmt.case_list
5217 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
5218 thiscase->data.case_stmt.case_list
5219 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5220
28d81abb
RK
5221 /* Simplify the case-list before we count it. */
5222 group_case_nodes (thiscase->data.case_stmt.case_list);
5223
5224 /* Get upper and lower bounds of case values.
5225 Also convert all the case values to the index expr's data type. */
5226
5227 count = 0;
5228 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5229 {
5230 /* Check low and high label values are integers. */
5231 if (TREE_CODE (n->low) != INTEGER_CST)
5232 abort ();
5233 if (TREE_CODE (n->high) != INTEGER_CST)
5234 abort ();
5235
1b0cb6fc
RK
5236 n->low = convert (index_type, n->low);
5237 n->high = convert (index_type, n->high);
28d81abb
RK
5238
5239 /* Count the elements and track the largest and smallest
5240 of them (treating them as signed even if they are not). */
5241 if (count++ == 0)
5242 {
5243 minval = n->low;
5244 maxval = n->high;
5245 }
5246 else
5247 {
5248 if (INT_CST_LT (n->low, minval))
5249 minval = n->low;
5250 if (INT_CST_LT (maxval, n->high))
5251 maxval = n->high;
5252 }
5253 /* A range counts double, since it requires two compares. */
5254 if (! tree_int_cst_equal (n->low, n->high))
5255 count++;
5256 }
5257
3474db0e
RS
5258 orig_minval = minval;
5259
28d81abb
RK
5260 /* Compute span of values. */
5261 if (count != 0)
1b0cb6fc 5262 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5263
956d6950 5264 end_cleanup_deferral ();
4c581243 5265
1b0cb6fc 5266 if (count == 0)
28d81abb
RK
5267 {
5268 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5269 emit_queue ();
5270 emit_jump (default_label);
5271 }
3474db0e 5272
28d81abb
RK
5273 /* If range of values is much bigger than number of values,
5274 make a sequence of conditional branches instead of a dispatch.
5275 If the switch-index is a constant, do it this way
5276 because we can optimize it. */
4f73c5dd
TW
5277
5278#ifndef CASE_VALUES_THRESHOLD
28d81abb 5279#ifdef HAVE_casesi
4f73c5dd 5280#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5281#else
4f73c5dd
TW
5282 /* If machine does not have a case insn that compares the
5283 bounds, this means extra overhead for dispatch tables
5284 which raises the threshold for using them. */
5285#define CASE_VALUES_THRESHOLD 5
5286#endif /* HAVE_casesi */
5287#endif /* CASE_VALUES_THRESHOLD */
5288
5289 else if (TREE_INT_CST_HIGH (range) != 0
c84e2712 5290 || count < (unsigned int) CASE_VALUES_THRESHOLD
37366632
RK
5291 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5292 > 10 * count)
3f6fe18e
RK
5293#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5294 || flag_pic
5295#endif
28d81abb 5296 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5297 /* These will reduce to a constant. */
28d81abb 5298 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5299 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5300 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
5301 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5302 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5303 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5304 {
37366632 5305 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5306
5307 /* If the index is a short or char that we do not have
5308 an insn to handle comparisons directly, convert it to
5309 a full integer now, rather than letting each comparison
5310 generate the conversion. */
5311
5312 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5313 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5314 == CODE_FOR_nothing))
5315 {
5316 enum machine_mode wider_mode;
5317 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5318 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5319 if (cmp_optab->handlers[(int) wider_mode].insn_code
5320 != CODE_FOR_nothing)
5321 {
5322 index = convert_to_mode (wider_mode, index, unsignedp);
5323 break;
5324 }
5325 }
5326
5327 emit_queue ();
5328 do_pending_stack_adjust ();
5329
5330 index = protect_from_queue (index, 0);
5331 if (GET_CODE (index) == MEM)
5332 index = copy_to_reg (index);
5333 if (GET_CODE (index) == CONST_INT
5334 || TREE_CODE (index_expr) == INTEGER_CST)
5335 {
5336 /* Make a tree node with the proper constant value
5337 if we don't already have one. */
5338 if (TREE_CODE (index_expr) != INTEGER_CST)
5339 {
5340 index_expr
5341 = build_int_2 (INTVAL (index),
e9a042b6 5342 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5343 index_expr = convert (index_type, index_expr);
28d81abb
RK
5344 }
5345
5346 /* For constant index expressions we need only
5347 issue a unconditional branch to the appropriate
5348 target code. The job of removing any unreachable
5349 code is left to the optimisation phase if the
5350 "-O" option is specified. */
1b0cb6fc
RK
5351 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5352 if (! tree_int_cst_lt (index_expr, n->low)
5353 && ! tree_int_cst_lt (n->high, index_expr))
5354 break;
5355
28d81abb
RK
5356 if (n)
5357 emit_jump (label_rtx (n->code_label));
5358 else
5359 emit_jump (default_label);
5360 }
5361 else
5362 {
5363 /* If the index expression is not constant we generate
5364 a binary decision tree to select the appropriate
5365 target code. This is done as follows:
5366
5367 The list of cases is rearranged into a binary tree,
5368 nearly optimal assuming equal probability for each case.
5369
5370 The tree is transformed into RTL, eliminating
5371 redundant test conditions at the same time.
5372
5373 If program flow could reach the end of the
5374 decision tree an unconditional jump to the
5375 default code is emitted. */
5376
5377 use_cost_table
5378 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5379 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
5380 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5381 NULL_PTR);
28d81abb 5382 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5383 default_label, index_type);
28d81abb
RK
5384 emit_jump_if_reachable (default_label);
5385 }
5386 }
5387 else
5388 {
5389 int win = 0;
5390#ifdef HAVE_casesi
5391 if (HAVE_casesi)
5392 {
c4fcf531 5393 enum machine_mode index_mode = SImode;
5130a5cc 5394 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5395 rtx op1, op2;
5396 enum machine_mode op_mode;
c4fcf531 5397
28d81abb 5398 /* Convert the index to SImode. */
1b0cb6fc 5399 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5400 > GET_MODE_BITSIZE (index_mode))
28d81abb 5401 {
1b0cb6fc 5402 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5403 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5404
5405 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5406 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5407 index_expr, minval);
5408 minval = integer_zero_node;
37366632 5409 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
c5d5d461
JL
5410 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5411 omode, 1, 0, default_label);
af2682ef
RS
5412 /* Now we can safely truncate. */
5413 index = convert_to_mode (index_mode, index, 0);
5414 }
5415 else
5416 {
1b0cb6fc 5417 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5418 {
5419 index_expr = convert (type_for_size (index_bits, 0),
5420 index_expr);
5421 index_type = TREE_TYPE (index_expr);
5422 }
5423
37366632 5424 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5425 }
28d81abb
RK
5426 emit_queue ();
5427 index = protect_from_queue (index, 0);
5428 do_pending_stack_adjust ();
5429
086f237d
JW
5430 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5431 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5432 (index, op_mode))
5433 index = copy_to_mode_reg (op_mode, index);
5434
5435 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5436
5437 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5438 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5439 (op1, op_mode))
5440 op1 = copy_to_mode_reg (op_mode, op1);
5441
5442 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5443
5444 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5445 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5446 (op2, op_mode))
5447 op2 = copy_to_mode_reg (op_mode, op2);
5448
5449 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5450 table_label, default_label));
5451 win = 1;
5452 }
5453#endif
5454#ifdef HAVE_tablejump
5455 if (! win && HAVE_tablejump)
5456 {
5457 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 5458 fold (build (MINUS_EXPR, index_type,
b4ac57ab 5459 index_expr, minval)));
d3b35d75 5460 index_type = TREE_TYPE (index_expr);
37366632 5461 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5462 emit_queue ();
af2682ef 5463 index = protect_from_queue (index, 0);
28d81abb
RK
5464 do_pending_stack_adjust ();
5465
1b0cb6fc 5466 do_tablejump (index, TYPE_MODE (index_type),
37366632 5467 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5468 table_label, default_label);
5469 win = 1;
5470 }
5471#endif
5472 if (! win)
5473 abort ();
5474
5475 /* Get table of labels to jump to, in order of case index. */
5476
5477 ncases = TREE_INT_CST_LOW (range) + 1;
5478 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 5479 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
5480
5481 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5482 {
37366632 5483 register HOST_WIDE_INT i
3474db0e 5484 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5485
5486 while (1)
5487 {
5488 labelvec[i]
38a448ca 5489 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3474db0e 5490 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5491 == TREE_INT_CST_LOW (n->high))
5492 break;
5493 i++;
5494 }
5495 }
5496
5497 /* Fill in the gaps with the default. */
5498 for (i = 0; i < ncases; i++)
5499 if (labelvec[i] == 0)
38a448ca 5500 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5501
5502 /* Output the table */
5503 emit_label (table_label);
5504
18543a22 5505 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5506 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5507 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5508 gen_rtvec_v (ncases, labelvec),
8f985ec4 5509 const0_rtx, const0_rtx));
28d81abb 5510 else
38a448ca
RH
5511 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5512 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5513
5514 /* If the case insn drops through the table,
5515 after the table we must jump to the default-label.
5516 Otherwise record no drop-through after the table. */
5517#ifdef CASE_DROPS_THROUGH
5518 emit_jump (default_label);
5519#else
5520 emit_barrier ();
5521#endif
5522 }
5523
915f619f
JW
5524 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5525 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5526 thiscase->data.case_stmt.start);
5527 }
4c581243 5528 else
956d6950 5529 end_cleanup_deferral ();
1b0cb6fc 5530
28d81abb
RK
5531 if (thiscase->exit_label)
5532 emit_label (thiscase->exit_label);
5533
5534 POPSTACK (case_stack);
5535
5536 free_temp_slots ();
5537}
5538
57641239
RK
5539/* Convert the tree NODE into a list linked by the right field, with the left
5540 field zeroed. RIGHT is used for recursion; it is a list to be placed
5541 rightmost in the resulting list. */
5542
5543static struct case_node *
5544case_tree2list (node, right)
5545 struct case_node *node, *right;
5546{
5547 struct case_node *left;
5548
5549 if (node->right)
5550 right = case_tree2list (node->right, right);
5551
5552 node->right = right;
51723711 5553 if ((left = node->left))
57641239
RK
5554 {
5555 node->left = 0;
5556 return case_tree2list (left, node);
5557 }
5558
5559 return node;
5560}
ca695ac9 5561
28d81abb
RK
5562/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5563
5564static void
5565do_jump_if_equal (op1, op2, label, unsignedp)
5566 rtx op1, op2, label;
5567 int unsignedp;
5568{
5569 if (GET_CODE (op1) == CONST_INT
5570 && GET_CODE (op2) == CONST_INT)
5571 {
5572 if (INTVAL (op1) == INTVAL (op2))
5573 emit_jump (label);
5574 }
5575 else
5576 {
5577 enum machine_mode mode = GET_MODE (op1);
5578 if (mode == VOIDmode)
5579 mode = GET_MODE (op2);
c5d5d461
JL
5580 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5581 0, label);
28d81abb
RK
5582 }
5583}
5584\f
5585/* Not all case values are encountered equally. This function
5586 uses a heuristic to weight case labels, in cases where that
5587 looks like a reasonable thing to do.
5588
5589 Right now, all we try to guess is text, and we establish the
5590 following weights:
5591
5592 chars above space: 16
5593 digits: 16
5594 default: 12
5595 space, punct: 8
5596 tab: 4
5597 newline: 2
5598 other "\" chars: 1
5599 remaining chars: 0
5600
5601 If we find any cases in the switch that are not either -1 or in the range
5602 of valid ASCII characters, or are control characters other than those
5603 commonly used with "\", don't treat this switch scanning text.
5604
5605 Return 1 if these nodes are suitable for cost estimation, otherwise
5606 return 0. */
5607
5608static int
5609estimate_case_costs (node)
5610 case_node_ptr node;
5611{
5612 tree min_ascii = build_int_2 (-1, -1);
5613 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5614 case_node_ptr n;
5615 int i;
5616
5617 /* If we haven't already made the cost table, make it now. Note that the
5618 lower bound of the table is -1, not zero. */
5619
5620 if (cost_table == NULL)
5621 {
3de90026 5622 cost_table = ((short *) xcalloc (129, sizeof (short))) + 1;
28d81abb
RK
5623
5624 for (i = 0; i < 128; i++)
5625 {
e9a780ec 5626 if (ISALNUM (i))
28d81abb 5627 cost_table[i] = 16;
e9a780ec 5628 else if (ISPUNCT (i))
28d81abb 5629 cost_table[i] = 8;
e9a780ec 5630 else if (ISCNTRL (i))
28d81abb
RK
5631 cost_table[i] = -1;
5632 }
5633
5634 cost_table[' '] = 8;
5635 cost_table['\t'] = 4;
5636 cost_table['\0'] = 4;
5637 cost_table['\n'] = 2;
5638 cost_table['\f'] = 1;
5639 cost_table['\v'] = 1;
5640 cost_table['\b'] = 1;
5641 }
5642
5643 /* See if all the case expressions look like text. It is text if the
5644 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5645 as signed arithmetic since we don't want to ever access cost_table with a
5646 value less than -1. Also check that none of the constants in a range
5647 are strange control characters. */
5648
5649 for (n = node; n; n = n->right)
5650 {
5651 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5652 return 0;
5653
5654 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5655 if (cost_table[i] < 0)
5656 return 0;
5657 }
5658
5659 /* All interesting values are within the range of interesting
5660 ASCII characters. */
5661 return 1;
5662}
5663
5664/* Scan an ordered list of case nodes
5665 combining those with consecutive values or ranges.
5666
5667 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5668
5669static void
5670group_case_nodes (head)
5671 case_node_ptr head;
5672{
5673 case_node_ptr node = head;
5674
5675 while (node)
5676 {
5677 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5678 rtx lb2;
28d81abb
RK
5679 case_node_ptr np = node;
5680
5681 /* Try to group the successors of NODE with NODE. */
5682 while (((np = np->right) != 0)
5683 /* Do they jump to the same place? */
ad7e369f
JL
5684 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5685 || (lb != 0 && lb2 != 0
5686 && simplejump_p (lb)
5687 && simplejump_p (lb2)
5688 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5689 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5690 /* Are their ranges consecutive? */
5691 && tree_int_cst_equal (np->low,
5692 fold (build (PLUS_EXPR,
5693 TREE_TYPE (node->high),
5694 node->high,
5695 integer_one_node)))
5696 /* An overflow is not consecutive. */
5697 && tree_int_cst_lt (node->high,
5698 fold (build (PLUS_EXPR,
5699 TREE_TYPE (node->high),
5700 node->high,
5701 integer_one_node))))
5702 {
5703 node->high = np->high;
5704 }
5705 /* NP is the first node after NODE which can't be grouped with it.
5706 Delete the nodes in between, and move on to that node. */
5707 node->right = np;
5708 node = np;
5709 }
5710}
5711
5712/* Take an ordered list of case nodes
5713 and transform them into a near optimal binary tree,
6dc42e49 5714 on the assumption that any target code selection value is as
28d81abb
RK
5715 likely as any other.
5716
5717 The transformation is performed by splitting the ordered
5718 list into two equal sections plus a pivot. The parts are
5719 then attached to the pivot as left and right branches. Each
38e01259 5720 branch is then transformed recursively. */
28d81abb
RK
5721
5722static void
5723balance_case_nodes (head, parent)
5724 case_node_ptr *head;
5725 case_node_ptr parent;
5726{
5727 register case_node_ptr np;
5728
5729 np = *head;
5730 if (np)
5731 {
5732 int cost = 0;
5733 int i = 0;
5734 int ranges = 0;
5735 register case_node_ptr *npp;
5736 case_node_ptr left;
5737
5738 /* Count the number of entries on branch. Also count the ranges. */
5739
5740 while (np)
5741 {
5742 if (!tree_int_cst_equal (np->low, np->high))
5743 {
5744 ranges++;
5745 if (use_cost_table)
5746 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5747 }
5748
5749 if (use_cost_table)
5750 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5751
5752 i++;
5753 np = np->right;
5754 }
5755
5756 if (i > 2)
5757 {
5758 /* Split this list if it is long enough for that to help. */
5759 npp = head;
5760 left = *npp;
5761 if (use_cost_table)
5762 {
5763 /* Find the place in the list that bisects the list's total cost,
5764 Here I gets half the total cost. */
5765 int n_moved = 0;
5766 i = (cost + 1) / 2;
5767 while (1)
5768 {
5769 /* Skip nodes while their cost does not reach that amount. */
5770 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5771 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5772 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5773 if (i <= 0)
5774 break;
5775 npp = &(*npp)->right;
5776 n_moved += 1;
5777 }
5778 if (n_moved == 0)
5779 {
5780 /* Leave this branch lopsided, but optimize left-hand
5781 side and fill in `parent' fields for right-hand side. */
5782 np = *head;
5783 np->parent = parent;
5784 balance_case_nodes (&np->left, np);
5785 for (; np->right; np = np->right)
5786 np->right->parent = np;
5787 return;
5788 }
5789 }
5790 /* If there are just three nodes, split at the middle one. */
5791 else if (i == 3)
5792 npp = &(*npp)->right;
5793 else
5794 {
5795 /* Find the place in the list that bisects the list's total cost,
5796 where ranges count as 2.
5797 Here I gets half the total cost. */
5798 i = (i + ranges + 1) / 2;
5799 while (1)
5800 {
5801 /* Skip nodes while their cost does not reach that amount. */
5802 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5803 i--;
5804 i--;
5805 if (i <= 0)
5806 break;
5807 npp = &(*npp)->right;
5808 }
5809 }
5810 *head = np = *npp;
5811 *npp = 0;
5812 np->parent = parent;
5813 np->left = left;
5814
5815 /* Optimize each of the two split parts. */
5816 balance_case_nodes (&np->left, np);
5817 balance_case_nodes (&np->right, np);
5818 }
5819 else
5820 {
5821 /* Else leave this branch as one level,
5822 but fill in `parent' fields. */
5823 np = *head;
5824 np->parent = parent;
5825 for (; np->right; np = np->right)
5826 np->right->parent = np;
5827 }
5828 }
5829}
5830\f
5831/* Search the parent sections of the case node tree
5832 to see if a test for the lower bound of NODE would be redundant.
5833 INDEX_TYPE is the type of the index expression.
5834
5835 The instructions to generate the case decision tree are
5836 output in the same order as nodes are processed so it is
5837 known that if a parent node checks the range of the current
5838 node minus one that the current node is bounded at its lower
5839 span. Thus the test would be redundant. */
5840
5841static int
5842node_has_low_bound (node, index_type)
5843 case_node_ptr node;
5844 tree index_type;
5845{
5846 tree low_minus_one;
5847 case_node_ptr pnode;
5848
5849 /* If the lower bound of this node is the lowest value in the index type,
5850 we need not test it. */
5851
5852 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5853 return 1;
5854
5855 /* If this node has a left branch, the value at the left must be less
5856 than that at this node, so it cannot be bounded at the bottom and
5857 we need not bother testing any further. */
5858
5859 if (node->left)
5860 return 0;
5861
5862 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5863 node->low, integer_one_node));
5864
5865 /* If the subtraction above overflowed, we can't verify anything.
5866 Otherwise, look for a parent that tests our value - 1. */
5867
5868 if (! tree_int_cst_lt (low_minus_one, node->low))
5869 return 0;
5870
5871 for (pnode = node->parent; pnode; pnode = pnode->parent)
5872 if (tree_int_cst_equal (low_minus_one, pnode->high))
5873 return 1;
5874
5875 return 0;
5876}
5877
5878/* Search the parent sections of the case node tree
5879 to see if a test for the upper bound of NODE would be redundant.
5880 INDEX_TYPE is the type of the index expression.
5881
5882 The instructions to generate the case decision tree are
5883 output in the same order as nodes are processed so it is
5884 known that if a parent node checks the range of the current
5885 node plus one that the current node is bounded at its upper
5886 span. Thus the test would be redundant. */
5887
5888static int
5889node_has_high_bound (node, index_type)
5890 case_node_ptr node;
5891 tree index_type;
5892{
5893 tree high_plus_one;
5894 case_node_ptr pnode;
5895
e1ee5cdc
RH
5896 /* If there is no upper bound, obviously no test is needed. */
5897
5898 if (TYPE_MAX_VALUE (index_type) == NULL)
5899 return 1;
5900
28d81abb
RK
5901 /* If the upper bound of this node is the highest value in the type
5902 of the index expression, we need not test against it. */
5903
5904 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5905 return 1;
5906
5907 /* If this node has a right branch, the value at the right must be greater
5908 than that at this node, so it cannot be bounded at the top and
5909 we need not bother testing any further. */
5910
5911 if (node->right)
5912 return 0;
5913
5914 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5915 node->high, integer_one_node));
5916
5917 /* If the addition above overflowed, we can't verify anything.
5918 Otherwise, look for a parent that tests our value + 1. */
5919
5920 if (! tree_int_cst_lt (node->high, high_plus_one))
5921 return 0;
5922
5923 for (pnode = node->parent; pnode; pnode = pnode->parent)
5924 if (tree_int_cst_equal (high_plus_one, pnode->low))
5925 return 1;
5926
5927 return 0;
5928}
5929
5930/* Search the parent sections of the
5931 case node tree to see if both tests for the upper and lower
5932 bounds of NODE would be redundant. */
5933
5934static int
5935node_is_bounded (node, index_type)
5936 case_node_ptr node;
5937 tree index_type;
5938{
5939 return (node_has_low_bound (node, index_type)
5940 && node_has_high_bound (node, index_type));
5941}
5942
5943/* Emit an unconditional jump to LABEL unless it would be dead code. */
5944
5945static void
5946emit_jump_if_reachable (label)
5947 rtx label;
5948{
5949 if (GET_CODE (get_last_insn ()) != BARRIER)
5950 emit_jump (label);
5951}
5952\f
5953/* Emit step-by-step code to select a case for the value of INDEX.
5954 The thus generated decision tree follows the form of the
5955 case-node binary tree NODE, whose nodes represent test conditions.
5956 INDEX_TYPE is the type of the index of the switch.
5957
5958 Care is taken to prune redundant tests from the decision tree
5959 by detecting any boundary conditions already checked by
5960 emitted rtx. (See node_has_high_bound, node_has_low_bound
5961 and node_is_bounded, above.)
5962
5963 Where the test conditions can be shown to be redundant we emit
5964 an unconditional jump to the target code. As a further
5965 optimization, the subordinates of a tree node are examined to
5966 check for bounded nodes. In this case conditional and/or
5967 unconditional jumps as a result of the boundary check for the
5968 current node are arranged to target the subordinates associated
38e01259 5969 code for out of bound conditions on the current node.
28d81abb 5970
f72aed24 5971 We can assume that when control reaches the code generated here,
28d81abb
RK
5972 the index value has already been compared with the parents
5973 of this node, and determined to be on the same side of each parent
5974 as this node is. Thus, if this node tests for the value 51,
5975 and a parent tested for 52, we don't need to consider
5976 the possibility of a value greater than 51. If another parent
5977 tests for the value 50, then this node need not test anything. */
5978
5979static void
5980emit_case_nodes (index, node, default_label, index_type)
5981 rtx index;
5982 case_node_ptr node;
5983 rtx default_label;
5984 tree index_type;
5985{
5986 /* If INDEX has an unsigned type, we must make unsigned branches. */
5987 int unsignedp = TREE_UNSIGNED (index_type);
ea90cb62 5988 typedef rtx rtx_fn ();
28d81abb
RK
5989 enum machine_mode mode = GET_MODE (index);
5990
5991 /* See if our parents have already tested everything for us.
5992 If they have, emit an unconditional jump for this node. */
5993 if (node_is_bounded (node, index_type))
5994 emit_jump (label_rtx (node->code_label));
5995
5996 else if (tree_int_cst_equal (node->low, node->high))
5997 {
5998 /* Node is single valued. First see if the index expression matches
0f41302f 5999 this node and then check our children, if any. */
28d81abb 6000
37366632 6001 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
6002 label_rtx (node->code_label), unsignedp);
6003
6004 if (node->right != 0 && node->left != 0)
6005 {
6006 /* This node has children on both sides.
6007 Dispatch to one side or the other
6008 by comparing the index value with this node's value.
6009 If one subtree is bounded, check that one first,
6010 so we can avoid real branches in the tree. */
6011
6012 if (node_is_bounded (node->right, index_type))
6013 {
c5d5d461
JL
6014 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6015 VOIDmode, 0),
6016 GT, NULL_RTX, mode, unsignedp, 0,
6017 label_rtx (node->right->code_label));
28d81abb
RK
6018 emit_case_nodes (index, node->left, default_label, index_type);
6019 }
6020
6021 else if (node_is_bounded (node->left, index_type))
6022 {
c5d5d461
JL
6023 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6024 VOIDmode, 0),
6025 LT, NULL_RTX, mode, unsignedp, 0,
6026 label_rtx (node->left->code_label));
28d81abb
RK
6027 emit_case_nodes (index, node->right, default_label, index_type);
6028 }
6029
6030 else
6031 {
6032 /* Neither node is bounded. First distinguish the two sides;
6033 then emit the code for one side at a time. */
6034
6035 tree test_label
6036 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6037
6038 /* See if the value is on the right. */
c5d5d461
JL
6039 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6040 VOIDmode, 0),
6041 GT, NULL_RTX, mode, unsignedp, 0,
6042 label_rtx (test_label));
28d81abb
RK
6043
6044 /* Value must be on the left.
6045 Handle the left-hand subtree. */
6046 emit_case_nodes (index, node->left, default_label, index_type);
6047 /* If left-hand subtree does nothing,
6048 go to default. */
6049 emit_jump_if_reachable (default_label);
6050
6051 /* Code branches here for the right-hand subtree. */
6052 expand_label (test_label);
6053 emit_case_nodes (index, node->right, default_label, index_type);
6054 }
6055 }
6056
6057 else if (node->right != 0 && node->left == 0)
6058 {
6059 /* Here we have a right child but no left so we issue conditional
6060 branch to default and process the right child.
6061
6062 Omit the conditional branch to default if we it avoid only one
6063 right child; it costs too much space to save so little time. */
6064
de14fd73 6065 if (node->right->right || node->right->left
28d81abb
RK
6066 || !tree_int_cst_equal (node->right->low, node->right->high))
6067 {
6068 if (!node_has_low_bound (node, index_type))
6069 {
c5d5d461
JL
6070 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6071 NULL_RTX,
6072 VOIDmode, 0),
6073 LT, NULL_RTX, mode, unsignedp, 0,
6074 default_label);
28d81abb
RK
6075 }
6076
6077 emit_case_nodes (index, node->right, default_label, index_type);
6078 }
6079 else
6080 /* We cannot process node->right normally
6081 since we haven't ruled out the numbers less than
6082 this node's value. So handle node->right explicitly. */
6083 do_jump_if_equal (index,
37366632
RK
6084 expand_expr (node->right->low, NULL_RTX,
6085 VOIDmode, 0),
28d81abb
RK
6086 label_rtx (node->right->code_label), unsignedp);
6087 }
6088
6089 else if (node->right == 0 && node->left != 0)
6090 {
6091 /* Just one subtree, on the left. */
6092
de14fd73
RK
6093#if 0 /* The following code and comment were formerly part
6094 of the condition here, but they didn't work
6095 and I don't understand what the idea was. -- rms. */
6096 /* If our "most probable entry" is less probable
28d81abb
RK
6097 than the default label, emit a jump to
6098 the default label using condition codes
6099 already lying around. With no right branch,
6100 a branch-greater-than will get us to the default
6101 label correctly. */
de14fd73
RK
6102 if (use_cost_table
6103 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6104 ;
6105#endif /* 0 */
6106 if (node->left->left || node->left->right
28d81abb
RK
6107 || !tree_int_cst_equal (node->left->low, node->left->high))
6108 {
6109 if (!node_has_high_bound (node, index_type))
6110 {
c5d5d461
JL
6111 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6112 NULL_RTX,
6113 VOIDmode, 0),
6114 GT, NULL_RTX, mode, unsignedp, 0,
6115 default_label);
28d81abb
RK
6116 }
6117
6118 emit_case_nodes (index, node->left, default_label, index_type);
6119 }
6120 else
6121 /* We cannot process node->left normally
6122 since we haven't ruled out the numbers less than
6123 this node's value. So handle node->left explicitly. */
6124 do_jump_if_equal (index,
37366632
RK
6125 expand_expr (node->left->low, NULL_RTX,
6126 VOIDmode, 0),
28d81abb
RK
6127 label_rtx (node->left->code_label), unsignedp);
6128 }
6129 }
6130 else
6131 {
6132 /* Node is a range. These cases are very similar to those for a single
6133 value, except that we do not start by testing whether this node
6134 is the one to branch to. */
6135
6136 if (node->right != 0 && node->left != 0)
6137 {
6138 /* Node has subtrees on both sides.
6139 If the right-hand subtree is bounded,
6140 test for it first, since we can go straight there.
6141 Otherwise, we need to make a branch in the control structure,
6142 then handle the two subtrees. */
6143 tree test_label = 0;
6144
28d81abb
RK
6145
6146 if (node_is_bounded (node->right, index_type))
6147 /* Right hand node is fully bounded so we can eliminate any
6148 testing and branch directly to the target code. */
c5d5d461
JL
6149 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6150 VOIDmode, 0),
6151 GT, NULL_RTX, mode, unsignedp, 0,
6152 label_rtx (node->right->code_label));
28d81abb
RK
6153 else
6154 {
6155 /* Right hand node requires testing.
6156 Branch to a label where we will handle it later. */
6157
6158 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
c5d5d461
JL
6159 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6160 VOIDmode, 0),
6161 GT, NULL_RTX, mode, unsignedp, 0,
6162 label_rtx (test_label));
28d81abb
RK
6163 }
6164
6165 /* Value belongs to this node or to the left-hand subtree. */
6166
c5d5d461
JL
6167 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6168 VOIDmode, 0),
6169 GE, NULL_RTX, mode, unsignedp, 0,
6170 label_rtx (node->code_label));
28d81abb
RK
6171
6172 /* Handle the left-hand subtree. */
6173 emit_case_nodes (index, node->left, default_label, index_type);
6174
6175 /* If right node had to be handled later, do that now. */
6176
6177 if (test_label)
6178 {
6179 /* If the left-hand subtree fell through,
6180 don't let it fall into the right-hand subtree. */
6181 emit_jump_if_reachable (default_label);
6182
6183 expand_label (test_label);
6184 emit_case_nodes (index, node->right, default_label, index_type);
6185 }
6186 }
6187
6188 else if (node->right != 0 && node->left == 0)
6189 {
6190 /* Deal with values to the left of this node,
6191 if they are possible. */
6192 if (!node_has_low_bound (node, index_type))
6193 {
c5d5d461
JL
6194 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6195 VOIDmode, 0),
6196 LT, NULL_RTX, mode, unsignedp, 0,
6197 default_label);
28d81abb
RK
6198 }
6199
6200 /* Value belongs to this node or to the right-hand subtree. */
6201
c5d5d461
JL
6202 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6203 VOIDmode, 0),
6204 LE, NULL_RTX, mode, unsignedp, 0,
6205 label_rtx (node->code_label));
28d81abb
RK
6206
6207 emit_case_nodes (index, node->right, default_label, index_type);
6208 }
6209
6210 else if (node->right == 0 && node->left != 0)
6211 {
6212 /* Deal with values to the right of this node,
6213 if they are possible. */
6214 if (!node_has_high_bound (node, index_type))
6215 {
c5d5d461
JL
6216 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6217 VOIDmode, 0),
6218 GT, NULL_RTX, mode, unsignedp, 0,
6219 default_label);
28d81abb
RK
6220 }
6221
6222 /* Value belongs to this node or to the left-hand subtree. */
6223
c5d5d461
JL
6224 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6225 VOIDmode, 0),
6226 GE, NULL_RTX, mode, unsignedp, 0,
6227 label_rtx (node->code_label));
28d81abb
RK
6228
6229 emit_case_nodes (index, node->left, default_label, index_type);
6230 }
6231
6232 else
6233 {
6234 /* Node has no children so we check low and high bounds to remove
6235 redundant tests. Only one of the bounds can exist,
6236 since otherwise this node is bounded--a case tested already. */
6237
6238 if (!node_has_high_bound (node, index_type))
6239 {
c5d5d461
JL
6240 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6241 VOIDmode, 0),
6242 GT, NULL_RTX, mode, unsignedp, 0,
6243 default_label);
28d81abb
RK
6244 }
6245
6246 if (!node_has_low_bound (node, index_type))
6247 {
c5d5d461
JL
6248 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6249 VOIDmode, 0),
6250 LT, NULL_RTX, mode, unsignedp, 0,
6251 default_label);
28d81abb
RK
6252 }
6253
6254 emit_jump (label_rtx (node->code_label));
6255 }
6256 }
6257}
6258\f
6259/* These routines are used by the loop unrolling code. They copy BLOCK trees
6260 so that the debugging info will be correct for the unrolled loop. */
6261
65d98182
JL
6262/* Indexed by block number, contains a pointer to the N'th block node.
6263
6264 Allocated by the call to identify_blocks, then released after the call
6265 to reorder_blocks in the function unroll_block_trees. */
28d81abb 6266
94dc8b56 6267static tree *block_vector;
28d81abb
RK
6268
6269void
94dc8b56 6270find_loop_tree_blocks ()
28d81abb 6271{
94dc8b56 6272 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6273
94dc8b56 6274 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
6275}
6276
28d81abb 6277void
94dc8b56 6278unroll_block_trees ()
28d81abb 6279{
94dc8b56 6280 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6281
94dc8b56 6282 reorder_blocks (block_vector, block, get_insns ());
65d98182
JL
6283
6284 /* Release any memory allocated by identify_blocks. */
6285 if (block_vector)
6286 free (block_vector);
28d81abb 6287}
This page took 1.643929 seconds and 5 git commands to generate.