]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
avr.h (INIT_TARGET_OPTABS): Remove most of it, was the same as the default library...
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
4559fd9e
RK
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
28d81abb
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
ccd043a9 38
28d81abb
RK
39#include "rtl.h"
40#include "tree.h"
6baf1cc8 41#include "tm_p.h"
28d81abb 42#include "flags.h"
6adb4e3a 43#include "except.h"
28d81abb
RK
44#include "function.h"
45#include "insn-flags.h"
46#include "insn-config.h"
47#include "insn-codes.h"
48#include "expr.h"
49#include "hard-reg-set.h"
50#include "obstack.h"
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
ca695ac9 57
28d81abb
RK
58#define obstack_chunk_alloc xmalloc
59#define obstack_chunk_free free
60struct obstack stmt_obstack;
61
18543a22
ILT
62/* Assume that case vectors are not pc-relative. */
63#ifndef CASE_VECTOR_PC_RELATIVE
64#define CASE_VECTOR_PC_RELATIVE 0
65#endif
28d81abb
RK
66\f
67/* Functions and data structures for expanding case statements. */
68
69/* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
72
5720c7e7
RK
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
28d81abb
RK
76
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
82
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
28d81abb
RK
88 and nodes on the right having higher values. We then output the tree
89 in order. */
90
91struct case_node
92{
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
57641239 99 int balance;
28d81abb
RK
100};
101
102typedef struct case_node case_node;
103typedef struct case_node *case_node_ptr;
104
105/* These are used by estimate_case_costs and balance_case_nodes. */
106
107/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 108static short cost_table_[129];
28d81abb
RK
109static short *cost_table;
110static int use_cost_table;
28d81abb
RK
111\f
112/* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
137struct nesting
138{
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 union
144 {
145 /* For conds (if-then and if-then-else statements). */
146 struct
147 {
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
151 rtx endif_label;
152 /* Label for the end of this alternative.
0f41302f 153 This may be the end of the if or the next else/elseif. */
28d81abb
RK
154 rtx next_label;
155 } cond;
156 /* For loops. */
157 struct
158 {
159 /* Label at the top of the loop; place to loop back to. */
160 rtx start_label;
161 /* Label at the end of the whole construct. */
162 rtx end_label;
8afad312
JW
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
165 rtx alt_end_label;
28d81abb
RK
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
168 rtx continue_label;
169 } loop;
170 /* For variable binding contours. */
171 struct
172 {
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count;
b93a436e 176 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
177 rtx stack_level;
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
181 rtx first_insn;
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting *innermost_stack_block;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
188 tree cleanups;
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
e976b8b2 193 The tail of this list can be 0,
28d81abb
RK
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
197 tree outer_cleanups;
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain *label_chain;
201 /* Number of function calls seen, as of start of this block. */
3f1d071b 202 int n_function_calls;
e976b8b2
MS
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
209 end of the block. */
3f1d071b 210 int block_target_temp_slot_level;
e976b8b2
MS
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
224 tree *cleanup_ptr;
28d81abb
RK
225 } block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct
229 {
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
57641239
RK
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
28d81abb
RK
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
28d81abb 243 /* Name of this kind of statement, for warnings. */
dff01034 244 const char *printname;
a11759a3
JR
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
28d81abb 249 } case_stmt;
28d81abb
RK
250 } data;
251};
252
28d81abb
RK
253/* Allocate and return a new `struct nesting'. */
254
255#define ALLOC_NESTING() \
256 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
257
6ed1d6c5
RS
258/* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
28d81abb
RK
262
263#define POPSTACK(STACK) \
6ed1d6c5
RS
264do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
6ed1d6c5 277 nesting_depth = nesting_stack->depth - 1; \
28d81abb 278 nesting_stack = this->all; \
28d81abb 279 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 280 while (this != target); } while (0)
28d81abb
RK
281\f
282/* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
289
290struct goto_fixup
291{
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
7629c936
RS
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
28d81abb
RK
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
e976b8b2 313 The tail of this list can be 0,
28d81abb
RK
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
319};
320
28d81abb
RK
321/* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
323
324struct label_chain
325{
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
329};
e9a25f70 330
3f1d071b
BS
331struct stmt_status
332{
333 /* Chain of all pending binding contours. */
334 struct nesting *x_block_stack;
335
336 /* If any new stacks are added here, add them to POPSTACKS too. */
337
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting *x_stack_block_stack;
341
342 /* Chain of all pending conditional statements. */
343 struct nesting *x_cond_stack;
344
345 /* Chain of all pending loops. */
346 struct nesting *x_loop_stack;
347
348 /* Chain of all pending case or switch statements. */
349 struct nesting *x_case_stack;
350
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting *x_nesting_stack;
354
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
357
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
360
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
365
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
369
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
47ee9bcb 372 const char *x_emit_filename;
3f1d071b
BS
373 int x_emit_lineno;
374
375 struct goto_fixup *x_goto_fixup_chain;
376};
377
01d939e8
BS
378#define block_stack (cfun->stmt->x_block_stack)
379#define stack_block_stack (cfun->stmt->x_stack_block_stack)
380#define cond_stack (cfun->stmt->x_cond_stack)
381#define loop_stack (cfun->stmt->x_loop_stack)
382#define case_stack (cfun->stmt->x_case_stack)
383#define nesting_stack (cfun->stmt->x_nesting_stack)
384#define nesting_depth (cfun->stmt->x_nesting_depth)
385#define current_block_start_count (cfun->stmt->x_block_start_count)
386#define last_expr_type (cfun->stmt->x_last_expr_type)
387#define last_expr_value (cfun->stmt->x_last_expr_value)
388#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389#define emit_filename (cfun->stmt->x_emit_filename)
390#define emit_lineno (cfun->stmt->x_emit_lineno)
391#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70
JL
392
393/* Non-zero if we are using EH to handle cleanus. */
394static int using_eh_for_cleanups_p = 0;
395
cdadb1dd
KG
396static int n_occurrences PARAMS ((int, const char *));
397static void expand_goto_internal PARAMS ((tree, rtx, rtx));
398static int expand_fixup PARAMS ((tree, rtx, rtx));
399static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
400static void expand_nl_goto_receiver PARAMS ((void));
401static void expand_nl_goto_receivers PARAMS ((struct nesting *));
402static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
cfc3d13f 403 rtx, int));
cdadb1dd
KG
404static void expand_null_return_1 PARAMS ((rtx, int));
405static void expand_value_return PARAMS ((rtx));
406static int tail_recursion_args PARAMS ((tree, tree));
407static void expand_cleanups PARAMS ((tree, tree, int, int));
408static void check_seenlabel PARAMS ((void));
409static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
410static int estimate_case_costs PARAMS ((case_node_ptr));
411static void group_case_nodes PARAMS ((case_node_ptr));
412static void balance_case_nodes PARAMS ((case_node_ptr *,
cfc3d13f 413 case_node_ptr));
cdadb1dd
KG
414static int node_has_low_bound PARAMS ((case_node_ptr, tree));
415static int node_has_high_bound PARAMS ((case_node_ptr, tree));
416static int node_is_bounded PARAMS ((case_node_ptr, tree));
417static void emit_jump_if_reachable PARAMS ((rtx));
418static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
cdadb1dd
KG
419static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
420static void mark_cond_nesting PARAMS ((struct nesting *));
421static void mark_loop_nesting PARAMS ((struct nesting *));
422static void mark_block_nesting PARAMS ((struct nesting *));
423static void mark_case_nesting PARAMS ((struct nesting *));
0876ac08 424static void mark_case_node PARAMS ((struct case_node *));
cdadb1dd 425static void mark_goto_fixup PARAMS ((struct goto_fixup *));
1f8f4a0b 426static void free_case_nodes PARAMS ((case_node_ptr));
28d81abb 427\f
e9a25f70
JL
428void
429using_eh_for_cleanups ()
430{
431 using_eh_for_cleanups_p = 1;
432}
433
87ff9c8e
RH
434/* Mark N (known to be a cond-nesting) for GC. */
435
436static void
437mark_cond_nesting (n)
438 struct nesting *n;
439{
440 while (n)
441 {
442 ggc_mark_rtx (n->exit_label);
443 ggc_mark_rtx (n->data.cond.endif_label);
444 ggc_mark_rtx (n->data.cond.next_label);
445
446 n = n->next;
447 }
448}
449
450/* Mark N (known to be a loop-nesting) for GC. */
451
452static void
453mark_loop_nesting (n)
454 struct nesting *n;
455{
456
457 while (n)
458 {
459 ggc_mark_rtx (n->exit_label);
460 ggc_mark_rtx (n->data.loop.start_label);
461 ggc_mark_rtx (n->data.loop.end_label);
462 ggc_mark_rtx (n->data.loop.alt_end_label);
463 ggc_mark_rtx (n->data.loop.continue_label);
464
465 n = n->next;
466 }
467}
468
469/* Mark N (known to be a block-nesting) for GC. */
470
471static void
472mark_block_nesting (n)
473 struct nesting *n;
474{
475 while (n)
476 {
477 struct label_chain *l;
478
479 ggc_mark_rtx (n->exit_label);
480 ggc_mark_rtx (n->data.block.stack_level);
481 ggc_mark_rtx (n->data.block.first_insn);
482 ggc_mark_tree (n->data.block.cleanups);
483 ggc_mark_tree (n->data.block.outer_cleanups);
484
1f8f4a0b
MM
485 for (l = n->data.block.label_chain; l != NULL; l = l->next)
486 {
487 ggc_mark (l);
488 ggc_mark_tree (l->label);
489 }
87ff9c8e
RH
490
491 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
492
493 /* ??? cleanup_ptr never points outside the stack, does it? */
494
495 n = n->next;
496 }
497}
498
499/* Mark N (known to be a case-nesting) for GC. */
500
501static void
502mark_case_nesting (n)
503 struct nesting *n;
504{
505 while (n)
506 {
87ff9c8e
RH
507 ggc_mark_rtx (n->exit_label);
508 ggc_mark_rtx (n->data.case_stmt.start);
509
87ff9c8e
RH
510 ggc_mark_tree (n->data.case_stmt.default_label);
511 ggc_mark_tree (n->data.case_stmt.index_expr);
512 ggc_mark_tree (n->data.case_stmt.nominal_type);
513
0876ac08 514 mark_case_node (n->data.case_stmt.case_list);
87ff9c8e
RH
515 n = n->next;
516 }
517}
518
0876ac08
RK
519/* Mark C for GC. */
520
521static void
522mark_case_node (c)
523 struct case_node *c;
524{
525 if (c != 0)
526 {
527 ggc_mark_tree (c->low);
528 ggc_mark_tree (c->high);
529 ggc_mark_tree (c->code_label);
530
531 mark_case_node (c->right);
532 mark_case_node (c->left);
533 }
534}
535
87ff9c8e
RH
536/* Mark G for GC. */
537
538static void
539mark_goto_fixup (g)
540 struct goto_fixup *g;
541{
542 while (g)
543 {
bedda2da 544 ggc_mark (g);
87ff9c8e
RH
545 ggc_mark_rtx (g->before_jump);
546 ggc_mark_tree (g->target);
547 ggc_mark_tree (g->context);
548 ggc_mark_rtx (g->target_rtl);
549 ggc_mark_rtx (g->stack_level);
550 ggc_mark_tree (g->cleanup_list_list);
551
552 g = g->next;
553 }
554}
555
21cd906e
MM
556/* Clear out all parts of the state in F that can safely be discarded
557 after the function has been compiled, to let garbage collection
0a8a198c 558 reclaim the memory. */
21cd906e
MM
559
560void
0a8a198c 561free_stmt_status (f)
21cd906e 562 struct function *f;
21cd906e
MM
563{
564 /* We're about to free the function obstack. If we hold pointers to
565 things allocated there, then we'll try to mark them when we do
566 GC. So, we clear them out here explicitly. */
5faf03ae
MM
567 if (f->stmt)
568 free (f->stmt);
fa51b01b 569 f->stmt = NULL;
21cd906e
MM
570}
571
87ff9c8e
RH
572/* Mark P for GC. */
573
574void
fa51b01b 575mark_stmt_status (p)
87ff9c8e
RH
576 struct stmt_status *p;
577{
578 if (p == 0)
579 return;
580
581 mark_block_nesting (p->x_block_stack);
582 mark_cond_nesting (p->x_cond_stack);
583 mark_loop_nesting (p->x_loop_stack);
584 mark_case_nesting (p->x_case_stack);
585
586 ggc_mark_tree (p->x_last_expr_type);
587 /* last_epxr_value is only valid if last_expr_type is nonzero. */
588 if (p->x_last_expr_type)
589 ggc_mark_rtx (p->x_last_expr_value);
590
591 mark_goto_fixup (p->x_goto_fixup_chain);
592}
593
28d81abb
RK
594void
595init_stmt ()
596{
597 gcc_obstack_init (&stmt_obstack);
28d81abb
RK
598}
599
600void
601init_stmt_for_function ()
602{
01d939e8 603 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
3f1d071b 604
28d81abb
RK
605 /* We are not currently within any block, conditional, loop or case. */
606 block_stack = 0;
0b931590 607 stack_block_stack = 0;
28d81abb
RK
608 loop_stack = 0;
609 case_stack = 0;
610 cond_stack = 0;
611 nesting_stack = 0;
612 nesting_depth = 0;
613
3f1d071b 614 current_block_start_count = 0;
28d81abb
RK
615
616 /* No gotos have been expanded yet. */
617 goto_fixup_chain = 0;
618
619 /* We are not processing a ({...}) grouping. */
620 expr_stmts_for_value = 0;
621 last_expr_type = 0;
21cd906e 622 last_expr_value = NULL_RTX;
28d81abb 623}
3f1d071b
BS
624\f
625/* Return nonzero if anything is pushed on the loop, condition, or case
626 stack. */
627int
628in_control_zone_p ()
629{
630 return cond_stack || loop_stack || case_stack;
28d81abb
RK
631}
632
3f1d071b 633/* Record the current file and line. Called from emit_line_note. */
28d81abb 634void
3f1d071b 635set_file_and_line_for_stmt (file, line)
47ee9bcb 636 const char *file;
3f1d071b
BS
637 int line;
638{
61d84605
MM
639 /* If we're outputting an inline function, and we add a line note,
640 there may be no CFUN->STMT information. So, there's no need to
641 update it. */
642 if (cfun->stmt)
643 {
644 emit_filename = file;
645 emit_lineno = line;
646 }
28d81abb 647}
3f1d071b 648
28d81abb
RK
649/* Emit a no-op instruction. */
650
651void
652emit_nop ()
653{
ca695ac9
JB
654 rtx last_insn;
655
b93a436e
JL
656 last_insn = get_last_insn ();
657 if (!optimize
658 && (GET_CODE (last_insn) == CODE_LABEL
659 || (GET_CODE (last_insn) == NOTE
660 && prev_real_insn (last_insn) == 0)))
661 emit_insn (gen_nop ());
28d81abb
RK
662}
663\f
664/* Return the rtx-label that corresponds to a LABEL_DECL,
665 creating it if necessary. */
666
667rtx
668label_rtx (label)
669 tree label;
670{
671 if (TREE_CODE (label) != LABEL_DECL)
672 abort ();
673
674 if (DECL_RTL (label))
675 return DECL_RTL (label);
676
677 return DECL_RTL (label) = gen_label_rtx ();
678}
679
680/* Add an unconditional jump to LABEL as the next sequential instruction. */
681
682void
683emit_jump (label)
684 rtx label;
685{
686 do_pending_stack_adjust ();
687 emit_jump_insn (gen_jump (label));
688 emit_barrier ();
689}
690
691/* Emit code to jump to the address
692 specified by the pointer expression EXP. */
693
694void
695expand_computed_goto (exp)
696 tree exp;
697{
b93a436e 698 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
699
700#ifdef POINTERS_EXTEND_UNSIGNED
b93a436e 701 x = convert_memory_address (Pmode, x);
ed9a9db1 702#endif
ffa1a1ce 703
b93a436e
JL
704 emit_queue ();
705 /* Be sure the function is executable. */
7d384cc0 706 if (current_function_check_memory_usage)
ebb1b59a 707 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
b93a436e 708 VOIDmode, 1, x, ptr_mode);
17f5f329 709
b93a436e
JL
710 do_pending_stack_adjust ();
711 emit_indirect_jump (x);
acd693d1
RH
712
713 current_function_has_computed_jump = 1;
28d81abb
RK
714}
715\f
716/* Handle goto statements and the labels that they can go to. */
717
718/* Specify the location in the RTL code of a label LABEL,
719 which is a LABEL_DECL tree node.
720
721 This is used for the kind of label that the user can jump to with a
722 goto statement, and for alternatives of a switch or case statement.
723 RTL labels generated for loops and conditionals don't go through here;
724 they are generated directly at the RTL level, by other functions below.
725
726 Note that this has nothing to do with defining label *names*.
727 Languages vary in how they do that and what that even means. */
728
729void
730expand_label (label)
731 tree label;
732{
733 struct label_chain *p;
734
735 do_pending_stack_adjust ();
736 emit_label (label_rtx (label));
737 if (DECL_NAME (label))
738 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
739
740 if (stack_block_stack != 0)
741 {
1f8f4a0b 742 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
743 p->next = stack_block_stack->data.block.label_chain;
744 stack_block_stack->data.block.label_chain = p;
745 p->label = label;
746 }
747}
748
749/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
750 from nested functions. */
751
752void
753declare_nonlocal_label (label)
754 tree label;
755{
ba716ac9
BS
756 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
757
28d81abb
RK
758 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
759 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 760 if (nonlocal_goto_handler_slots == 0)
28d81abb 761 {
59257ff7
RK
762 emit_stack_save (SAVE_NONLOCAL,
763 &nonlocal_goto_stack_level,
764 PREV_INSN (tail_recursion_reentry));
28d81abb 765 }
ba716ac9
BS
766 nonlocal_goto_handler_slots
767 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
768}
769
770/* Generate RTL code for a `goto' statement with target label LABEL.
771 LABEL should be a LABEL_DECL tree node that was or will later be
772 defined with `expand_label'. */
773
774void
775expand_goto (label)
776 tree label;
777{
ca695ac9
JB
778 tree context;
779
28d81abb 780 /* Check for a nonlocal goto to a containing function. */
ca695ac9 781 context = decl_function_context (label);
28d81abb
RK
782 if (context != 0 && context != current_function_decl)
783 {
784 struct function *p = find_function_data (context);
38a448ca 785 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
4b01bd16 786 rtx handler_slot, static_chain, save_area, insn;
ba716ac9
BS
787 tree link;
788
789 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
790 handler_slot = p->x_nonlocal_goto_handler_slots;
791 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
792 link = TREE_CHAIN (link))
793 handler_slot = XEXP (handler_slot, 1);
794 handler_slot = XEXP (handler_slot, 0);
dd132134 795
28d81abb 796 p->has_nonlocal_label = 1;
c1255328 797 current_function_has_nonlocal_goto = 1;
dd132134 798 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
799
800 /* Copy the rtl for the slots so that they won't be shared in
801 case the virtual stack vars register gets instantiated differently
802 in the parent than in the child. */
803
157229c3
RH
804 static_chain = copy_to_reg (lookup_static_chain (label));
805
806 /* Get addr of containing function's current nonlocal goto handler,
807 which will do any cleanups and then jump to the label. */
808 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
809 virtual_stack_vars_rtx,
810 static_chain));
811
812 /* Get addr of containing function's nonlocal save area. */
813 save_area = p->x_nonlocal_goto_stack_level;
814 if (save_area)
815 save_area = replace_rtx (copy_rtx (save_area),
816 virtual_stack_vars_rtx, static_chain);
817
28d81abb
RK
818#if HAVE_nonlocal_goto
819 if (HAVE_nonlocal_goto)
157229c3
RH
820 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
821 save_area, label_ref));
28d81abb
RK
822 else
823#endif
824 {
825 /* Restore frame pointer for containing function.
826 This sets the actual hard register used for the frame pointer
827 to the location of the function's incoming static chain info.
828 The non-local goto handler will then adjust it to contain the
829 proper value and reload the argument pointer, if needed. */
157229c3
RH
830 emit_move_insn (hard_frame_pointer_rtx, static_chain);
831 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
832
833 /* USE of hard_frame_pointer_rtx added for consistency;
834 not clear if really needed. */
38a448ca
RH
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
157229c3 837 emit_indirect_jump (handler_slot);
28d81abb 838 }
4b01bd16
RH
839
840 /* Search backwards to the jump insn and mark it as a
841 non-local goto. */
842 for (insn = get_last_insn ();
843 GET_CODE (insn) != JUMP_INSN;
844 insn = PREV_INSN (insn))
845 continue;
846 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
847 REG_NOTES (insn));
4381f7c2 848 }
28d81abb 849 else
37366632 850 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
851}
852
853/* Generate RTL code for a `goto' statement with target label BODY.
854 LABEL should be a LABEL_REF.
855 LAST_INSN, if non-0, is the rtx we should consider as the last
856 insn emitted (for the purposes of cleaning up a return). */
857
858static void
859expand_goto_internal (body, label, last_insn)
860 tree body;
861 rtx label;
862 rtx last_insn;
863{
864 struct nesting *block;
865 rtx stack_level = 0;
866
867 if (GET_CODE (label) != CODE_LABEL)
868 abort ();
869
870 /* If label has already been defined, we can tell now
871 whether and how we must alter the stack level. */
872
873 if (PREV_INSN (label) != 0)
874 {
875 /* Find the innermost pending block that contains the label.
876 (Check containment by comparing insn-uids.)
877 Then restore the outermost stack level within that block,
878 and do cleanups of all blocks contained in it. */
879 for (block = block_stack; block; block = block->next)
880 {
881 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
882 break;
883 if (block->data.block.stack_level != 0)
884 stack_level = block->data.block.stack_level;
885 /* Execute the cleanups for blocks we are exiting. */
886 if (block->data.block.cleanups != 0)
887 {
50d1b7a1 888 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
889 do_pending_stack_adjust ();
890 }
891 }
892
893 if (stack_level)
894 {
0f41302f
MS
895 /* Ensure stack adjust isn't done by emit_jump, as this
896 would clobber the stack pointer. This one should be
897 deleted as dead by flow. */
28d81abb
RK
898 clear_pending_stack_adjust ();
899 do_pending_stack_adjust ();
7393c642
RK
900
901 /* Don't do this adjust if it's to the end label and this function
902 is to return with a depressed stack pointer. */
903 if (label == return_label
c4a6c0f3
RK
904 && (((TREE_CODE (TREE_TYPE (current_function_decl))
905 == FUNCTION_TYPE)
906 && (TYPE_RETURNS_STACK_DEPRESSED
907 (TREE_TYPE (current_function_decl))))))
7393c642
RK
908 ;
909 else
910 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
911 }
912
913 if (body != 0 && DECL_TOO_LATE (body))
914 error ("jump to `%s' invalidly jumps into binding contour",
915 IDENTIFIER_POINTER (DECL_NAME (body)));
916 }
917 /* Label not yet defined: may need to put this goto
918 on the fixup list. */
919 else if (! expand_fixup (body, label, last_insn))
920 {
921 /* No fixup needed. Record that the label is the target
922 of at least one goto that has no fixup. */
923 if (body != 0)
924 TREE_ADDRESSABLE (body) = 1;
925 }
926
927 emit_jump (label);
928}
929\f
930/* Generate if necessary a fixup for a goto
931 whose target label in tree structure (if any) is TREE_LABEL
932 and whose target in rtl is RTL_LABEL.
933
934 If LAST_INSN is nonzero, we pretend that the jump appears
935 after insn LAST_INSN instead of at the current point in the insn stream.
936
023b57e6
RS
937 The fixup will be used later to insert insns just before the goto.
938 Those insns will restore the stack level as appropriate for the
939 target label, and will (in the case of C++) also invoke any object
940 destructors which have to be invoked when we exit the scopes which
941 are exited by the goto.
28d81abb
RK
942
943 Value is nonzero if a fixup is made. */
944
945static int
946expand_fixup (tree_label, rtl_label, last_insn)
947 tree tree_label;
948 rtx rtl_label;
949 rtx last_insn;
950{
951 struct nesting *block, *end_block;
952
953 /* See if we can recognize which block the label will be output in.
954 This is possible in some very common cases.
955 If we succeed, set END_BLOCK to that block.
956 Otherwise, set it to 0. */
957
958 if (cond_stack
959 && (rtl_label == cond_stack->data.cond.endif_label
960 || rtl_label == cond_stack->data.cond.next_label))
961 end_block = cond_stack;
962 /* If we are in a loop, recognize certain labels which
963 are likely targets. This reduces the number of fixups
964 we need to create. */
965 else if (loop_stack
966 && (rtl_label == loop_stack->data.loop.start_label
967 || rtl_label == loop_stack->data.loop.end_label
968 || rtl_label == loop_stack->data.loop.continue_label))
969 end_block = loop_stack;
970 else
971 end_block = 0;
972
973 /* Now set END_BLOCK to the binding level to which we will return. */
974
975 if (end_block)
976 {
977 struct nesting *next_block = end_block->all;
978 block = block_stack;
979
980 /* First see if the END_BLOCK is inside the innermost binding level.
981 If so, then no cleanups or stack levels are relevant. */
982 while (next_block && next_block != block)
983 next_block = next_block->all;
984
985 if (next_block)
986 return 0;
987
988 /* Otherwise, set END_BLOCK to the innermost binding level
989 which is outside the relevant control-structure nesting. */
990 next_block = block_stack->next;
991 for (block = block_stack; block != end_block; block = block->all)
992 if (block == next_block)
993 next_block = next_block->next;
994 end_block = next_block;
995 }
996
997 /* Does any containing block have a stack level or cleanups?
998 If not, no fixup is needed, and that is the normal case
999 (the only case, for standard C). */
1000 for (block = block_stack; block != end_block; block = block->next)
1001 if (block->data.block.stack_level != 0
1002 || block->data.block.cleanups != 0)
1003 break;
1004
1005 if (block != end_block)
1006 {
1007 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1008 struct goto_fixup *fixup
f8a83ee3 1009 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
1010 /* In case an old stack level is restored, make sure that comes
1011 after any pending stack adjust. */
1012 /* ?? If the fixup isn't to come at the present position,
1013 doing the stack adjust here isn't useful. Doing it with our
1014 settings at that location isn't useful either. Let's hope
1015 someone does it! */
1016 if (last_insn == 0)
1017 do_pending_stack_adjust ();
28d81abb
RK
1018 fixup->target = tree_label;
1019 fixup->target_rtl = rtl_label;
023b57e6
RS
1020
1021 /* Create a BLOCK node and a corresponding matched set of
12f61228 1022 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
1023 this point. The notes will encapsulate any and all fixup
1024 code which we might later insert at this point in the insn
1025 stream. Also, the BLOCK node will be the parent (i.e. the
1026 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
1027 later on when we are expanding the fixup code.
1028
1029 Note that optimization passes (including expand_end_loop)
1030 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1031 as a placeholder. */
023b57e6
RS
1032
1033 {
1034 register rtx original_before_jump
1035 = last_insn ? last_insn : get_last_insn ();
0679e3fc 1036 rtx start;
a97901e6 1037 rtx end;
e6fd097e
MM
1038 tree block;
1039
1040 block = make_node (BLOCK);
1041 TREE_USED (block) = 1;
1042
01d939e8 1043 if (!cfun->x_whole_function_mode_p)
a97901e6
MM
1044 insert_block (block);
1045 else
e6fd097e 1046 {
4381f7c2 1047 BLOCK_CHAIN (block)
a97901e6
MM
1048 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1049 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1050 = block;
e6fd097e 1051 }
023b57e6
RS
1052
1053 start_sequence ();
0679e3fc 1054 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
01d939e8 1055 if (cfun->x_whole_function_mode_p)
a97901e6 1056 NOTE_BLOCK (start) = block;
0679e3fc 1057 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
a97901e6 1058 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
01d939e8 1059 if (cfun->x_whole_function_mode_p)
a97901e6 1060 NOTE_BLOCK (end) = block;
e6fd097e 1061 fixup->context = block;
023b57e6 1062 end_sequence ();
0679e3fc 1063 emit_insns_after (start, original_before_jump);
023b57e6
RS
1064 }
1065
3f1d071b 1066 fixup->block_start_count = current_block_start_count;
28d81abb
RK
1067 fixup->stack_level = 0;
1068 fixup->cleanup_list_list
e976b8b2 1069 = ((block->data.block.outer_cleanups
28d81abb 1070 || block->data.block.cleanups)
37366632 1071 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
1072 block->data.block.outer_cleanups)
1073 : 0);
1074 fixup->next = goto_fixup_chain;
1075 goto_fixup_chain = fixup;
1076 }
1077
1078 return block != 0;
1079}
cfc3d13f
RK
1080\f
1081/* Expand any needed fixups in the outputmost binding level of the
1082 function. FIRST_INSN is the first insn in the function. */
ca695ac9 1083
cfc3d13f
RK
1084void
1085expand_fixups (first_insn)
1086 rtx first_insn;
1087{
1088 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1089}
ca695ac9 1090
28d81abb
RK
1091/* When exiting a binding contour, process all pending gotos requiring fixups.
1092 THISBLOCK is the structure that describes the block being exited.
1093 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1094 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1095 FIRST_INSN is the insn that began this contour.
1096
1097 Gotos that jump out of this contour must restore the
1098 stack level and do the cleanups before actually jumping.
1099
1100 DONT_JUMP_IN nonzero means report error there is a jump into this
1101 contour from before the beginning of the contour.
1102 This is also done if STACK_LEVEL is nonzero. */
1103
704f4dca 1104static void
28d81abb
RK
1105fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1106 struct nesting *thisblock;
1107 rtx stack_level;
1108 tree cleanup_list;
1109 rtx first_insn;
1110 int dont_jump_in;
1111{
1112 register struct goto_fixup *f, *prev;
1113
1114 /* F is the fixup we are considering; PREV is the previous one. */
1115 /* We run this loop in two passes so that cleanups of exited blocks
1116 are run first, and blocks that are exited are marked so
1117 afterwards. */
1118
1119 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1120 {
1121 /* Test for a fixup that is inactive because it is already handled. */
1122 if (f->before_jump == 0)
1123 {
1124 /* Delete inactive fixup from the chain, if that is easy to do. */
1125 if (prev != 0)
1126 prev->next = f->next;
1127 }
1128 /* Has this fixup's target label been defined?
1129 If so, we can finalize it. */
1130 else if (PREV_INSN (f->target_rtl) != 0)
1131 {
7629c936 1132 register rtx cleanup_insns;
7629c936 1133
28d81abb 1134 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
1135 of this contour, report an error. This code used to use
1136 the first non-label insn after f->target_rtl, but that's
1137 wrong since such can be added, by things like put_var_into_stack
1138 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
1139 /* ??? Bug: this does not detect jumping in through intermediate
1140 blocks that have stack levels or cleanups.
1141 It detects only a problem with the innermost block
1142 around the label. */
1143 if (f->target != 0
1144 && (dont_jump_in || stack_level || cleanup_list)
14a774a9 1145 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 1146 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1147 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1148 {
1149 error_with_decl (f->target,
1150 "label `%s' used before containing binding contour");
1151 /* Prevent multiple errors for one label. */
33bc3ff5 1152 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1153 }
1154
7629c936
RS
1155 /* We will expand the cleanups into a sequence of their own and
1156 then later on we will attach this new sequence to the insn
1157 stream just ahead of the actual jump insn. */
1158
1159 start_sequence ();
1160
023b57e6
RS
1161 /* Temporarily restore the lexical context where we will
1162 logically be inserting the fixup code. We do this for the
1163 sake of getting the debugging information right. */
1164
7629c936 1165 pushlevel (0);
023b57e6 1166 set_block (f->context);
7629c936
RS
1167
1168 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1169 if (f->cleanup_list_list)
1170 {
1171 tree lists;
1172 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1173 /* Marked elements correspond to blocks that have been closed.
1174 Do their cleanups. */
1175 if (TREE_ADDRESSABLE (lists)
1176 && TREE_VALUE (lists) != 0)
7629c936 1177 {
50d1b7a1 1178 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1179 /* Pop any pushes done in the cleanups,
1180 in case function is about to return. */
1181 do_pending_stack_adjust ();
1182 }
28d81abb
RK
1183 }
1184
1185 /* Restore stack level for the biggest contour that this
1186 jump jumps out of. */
7393c642
RK
1187 if (f->stack_level
1188 && ! (f->target_rtl == return_label
c4a6c0f3
RK
1189 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1190 == FUNCTION_TYPE)
4381f7c2 1191 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 1192 (TREE_TYPE (current_function_decl))))))
59257ff7 1193 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1194
1195 /* Finish up the sequence containing the insns which implement the
1196 necessary cleanups, and then attach that whole sequence to the
1197 insn stream just ahead of the actual jump insn. Attaching it
1198 at that point insures that any cleanups which are in fact
1199 implicit C++ object destructions (which must be executed upon
1200 leaving the block) appear (to the debugger) to be taking place
1201 in an area of the generated code where the object(s) being
1202 destructed are still "in scope". */
1203
1204 cleanup_insns = get_insns ();
023b57e6 1205 poplevel (1, 0, 0);
7629c936
RS
1206
1207 end_sequence ();
1208 emit_insns_after (cleanup_insns, f->before_jump);
1209
28d81abb
RK
1210 f->before_jump = 0;
1211 }
1212 }
1213
6bc2f582
RK
1214 /* For any still-undefined labels, do the cleanups for this block now.
1215 We must do this now since items in the cleanup list may go out
0f41302f 1216 of scope when the block ends. */
28d81abb
RK
1217 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1218 if (f->before_jump != 0
1219 && PREV_INSN (f->target_rtl) == 0
1220 /* Label has still not appeared. If we are exiting a block with
1221 a stack level to restore, that started before the fixup,
1222 mark this stack level as needing restoration
6bc2f582 1223 when the fixup is later finalized. */
28d81abb 1224 && thisblock != 0
6bc2f582
RK
1225 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1226 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1227 && (thisblock->data.block.block_start_count
1228 <= f->block_start_count))
1229 {
1230 tree lists = f->cleanup_list_list;
6bc2f582
RK
1231 rtx cleanup_insns;
1232
28d81abb
RK
1233 for (; lists; lists = TREE_CHAIN (lists))
1234 /* If the following elt. corresponds to our containing block
1235 then the elt. must be for this block. */
1236 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1237 {
1238 start_sequence ();
1239 pushlevel (0);
1240 set_block (f->context);
1241 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1242 do_pending_stack_adjust ();
6bc2f582
RK
1243 cleanup_insns = get_insns ();
1244 poplevel (1, 0, 0);
1245 end_sequence ();
412c00dc
RK
1246 if (cleanup_insns != 0)
1247 f->before_jump
1248 = emit_insns_after (cleanup_insns, f->before_jump);
6bc2f582 1249
e07ed33f 1250 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1251 }
28d81abb
RK
1252
1253 if (stack_level)
1254 f->stack_level = stack_level;
1255 }
1256}
2a230e9d
BS
1257\f
1258/* Return the number of times character C occurs in string S. */
1259static int
1260n_occurrences (c, s)
1261 int c;
dff01034 1262 const char *s;
2a230e9d
BS
1263{
1264 int n = 0;
1265 while (*s)
1266 n += (*s++ == c);
1267 return n;
1268}
28d81abb
RK
1269\f
1270/* Generate RTL for an asm statement (explicit assembler code).
1271 BODY is a STRING_CST node containing the assembler code text,
1272 or an ADDR_EXPR containing a STRING_CST. */
1273
1274void
1275expand_asm (body)
1276 tree body;
1277{
7d384cc0 1278 if (current_function_check_memory_usage)
17f5f329 1279 {
c5c76735 1280 error ("`asm' cannot be used in function where memory usage is checked");
17f5f329
RK
1281 return;
1282 }
1283
28d81abb
RK
1284 if (TREE_CODE (body) == ADDR_EXPR)
1285 body = TREE_OPERAND (body, 0);
1286
38a448ca
RH
1287 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1288 TREE_STRING_POINTER (body)));
28d81abb
RK
1289 last_expr_type = 0;
1290}
1291
1292/* Generate RTL for an asm statement with arguments.
1293 STRING is the instruction template.
1294 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1295 Each output or input has an expression in the TREE_VALUE and
1296 a constraint-string in the TREE_PURPOSE.
1297 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1298 that is clobbered by this insn.
1299
1300 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1301 Some elements of OUTPUTS may be replaced with trees representing temporary
1302 values. The caller should copy those temporary values to the originally
1303 specified lvalues.
1304
1305 VOL nonzero means the insn is volatile; don't optimize it. */
1306
1307void
1308expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1309 tree string, outputs, inputs, clobbers;
1310 int vol;
3b304f5b 1311 const char *filename;
28d81abb
RK
1312 int line;
1313{
1314 rtvec argvec, constraints;
1315 rtx body;
1316 int ninputs = list_length (inputs);
1317 int noutputs = list_length (outputs);
235c5021 1318 int ninout = 0;
b4ccaa16 1319 int nclobbers;
28d81abb
RK
1320 tree tail;
1321 register int i;
1322 /* Vector of RTX's of evaluated output operands. */
1323 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1324 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1325 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1326 enum machine_mode *inout_mode
1327 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
28d81abb
RK
1328 /* The insn we have emitted. */
1329 rtx insn;
1b3d8f8a 1330 int old_generating_concat_p = generating_concat_p;
28d81abb 1331
e5e809f4 1332 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1333 if (noutputs == 0)
1334 vol = 1;
1335
7d384cc0 1336 if (current_function_check_memory_usage)
17f5f329
RK
1337 {
1338 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1339 return;
1340 }
1341
57bcb97a
RH
1342#ifdef MD_ASM_CLOBBERS
1343 /* Sometimes we wish to automatically clobber registers across an asm.
1344 Case in point is when the i386 backend moved from cc0 to a hard reg --
1345 maintaining source-level compatability means automatically clobbering
1346 the flags register. */
1347 MD_ASM_CLOBBERS (clobbers);
1348#endif
1349
c5c76735
JL
1350 if (current_function_check_memory_usage)
1351 {
1352 error ("`asm' cannot be used in function where memory usage is checked");
1353 return;
1354 }
1355
b4ccaa16
RS
1356 /* Count the number of meaningful clobbered registers, ignoring what
1357 we would ignore later. */
1358 nclobbers = 0;
1359 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1360 {
47ee9bcb 1361 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1362
c09e6498
RS
1363 i = decode_reg_name (regname);
1364 if (i >= 0 || i == -4)
b4ccaa16 1365 ++nclobbers;
7859e3ac
DE
1366 else if (i == -2)
1367 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1368 }
1369
28d81abb
RK
1370 last_expr_type = 0;
1371
2a230e9d
BS
1372 /* Check that the number of alternatives is constant across all
1373 operands. */
1374 if (outputs || inputs)
1375 {
1376 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1377 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1378 tree next = inputs;
1379
f62a15e3
BS
1380 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1381 {
1382 error ("too many alternatives in `asm'");
1383 return;
1384 }
4381f7c2 1385
2a230e9d
BS
1386 tmp = outputs;
1387 while (tmp)
1388 {
47ee9bcb 1389 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
14a774a9 1390
2a230e9d
BS
1391 if (n_occurrences (',', constraint) != nalternatives)
1392 {
1393 error ("operand constraints for `asm' differ in number of alternatives");
1394 return;
1395 }
14a774a9 1396
2a230e9d
BS
1397 if (TREE_CHAIN (tmp))
1398 tmp = TREE_CHAIN (tmp);
1399 else
1400 tmp = next, next = 0;
1401 }
1402 }
1403
28d81abb
RK
1404 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1405 {
1406 tree val = TREE_VALUE (tail);
b50a024d 1407 tree type = TREE_TYPE (val);
520a57c8 1408 const char *constraint;
3fbd5c2c 1409 char *p;
2a230e9d 1410 int c_len;
28d81abb 1411 int j;
2a230e9d 1412 int is_inout = 0;
d09a75ae 1413 int allows_reg = 0;
1afbe1c4 1414 int allows_mem = 0;
28d81abb
RK
1415
1416 /* If there's an erroneous arg, emit no insn. */
1417 if (TREE_TYPE (val) == error_mark_node)
1418 return;
1419
d09a75ae
RK
1420 /* Make sure constraint has `=' and does not have `+'. Also, see
1421 if it allows any register. Be liberal on the latter test, since
1422 the worst that happens if we get it wrong is we issue an error
1423 message. */
28d81abb 1424
2a230e9d 1425 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
520a57c8 1426 c_len = strlen (constraint);
2a230e9d 1427
3fbd5c2c
RH
1428 /* Allow the `=' or `+' to not be at the beginning of the string,
1429 since it wasn't explicitly documented that way, and there is a
1430 large body of code that puts it last. Swap the character to
1431 the front, so as not to uglify any place else. */
1432 switch (c_len)
2a230e9d 1433 {
3fbd5c2c
RH
1434 default:
1435 if ((p = strchr (constraint, '=')) != NULL)
1436 break;
1437 if ((p = strchr (constraint, '+')) != NULL)
1438 break;
1439 case 0:
2a230e9d
BS
1440 error ("output operand constraint lacks `='");
1441 return;
1442 }
520a57c8
ZW
1443 j = p - constraint;
1444 is_inout = *p == '+';
2a230e9d 1445
520a57c8 1446 if (j || is_inout)
3fbd5c2c 1447 {
520a57c8
ZW
1448 /* Have to throw away this constraint string and get a new one. */
1449 char *buf = alloca (c_len + 1);
1450 buf[0] = '=';
1451 if (j)
1452 memcpy (buf + 1, constraint, j);
1453 memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
1454 constraint = ggc_alloc_string (buf, c_len);
1455
1456 if (j)
1457 warning (
1458 "output constraint `%c' for operand %d is not at the beginning",
1459 *p, i);
3fbd5c2c
RH
1460 }
1461
2a230e9d
BS
1462 /* Make sure we can specify the matching operand. */
1463 if (is_inout && i > 9)
1464 {
1465 error ("output operand constraint %d contains `+'", i);
1466 return;
1467 }
1468
1469 for (j = 1; j < c_len; j++)
1470 switch (constraint[j])
d09a75ae
RK
1471 {
1472 case '+':
2a230e9d
BS
1473 case '=':
1474 error ("operand constraint contains '+' or '=' at illegal position.");
1475 return;
1476
1477 case '%':
1478 if (i + 1 == ninputs + noutputs)
235c5021 1479 {
2a230e9d 1480 error ("`%%' constraint used with last operand");
235c5021
RK
1481 return;
1482 }
235c5021 1483 break;
d09a75ae 1484
afa223c6 1485 case '?': case '!': case '*': case '&': case '#':
1afbe1c4 1486 case 'E': case 'F': case 'G': case 'H':
d09a75ae
RK
1487 case 's': case 'i': case 'n':
1488 case 'I': case 'J': case 'K': case 'L': case 'M':
1489 case 'N': case 'O': case 'P': case ',':
d09a75ae
RK
1490 break;
1491
7b7a33b3 1492 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1493 case '5': case '6': case '7': case '8': case '9':
1494 error ("matching constraint not valid in output operand");
1495 break;
1496
1afbe1c4
RH
1497 case 'V': case 'm': case 'o':
1498 allows_mem = 1;
1499 break;
1500
1501 case '<': case '>':
1502 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1503 excepting those that expand_call created. So match memory
1504 and hope. */
1505 allows_mem = 1;
1506 break;
1507
1508 case 'g': case 'X':
1509 allows_reg = 1;
1510 allows_mem = 1;
1511 break;
1512
1513 case 'p': case 'r':
d09a75ae
RK
1514 allows_reg = 1;
1515 break;
c2cba7a9
RH
1516
1517 default:
1518 if (! ISALPHA (constraint[j]))
1519 {
1520 error ("invalid punctuation `%c' in constraint",
1521 constraint[j]);
1522 return;
1523 }
1524 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1525 allows_reg = 1;
1526#ifdef EXTRA_CONSTRAINT
1527 else
1528 {
1529 /* Otherwise we can't assume anything about the nature of
1530 the constraint except that it isn't purely registers.
1531 Treat it like "g" and hope for the best. */
1532 allows_reg = 1;
1533 allows_mem = 1;
1534 }
1535#endif
1536 break;
d09a75ae
RK
1537 }
1538
d09a75ae
RK
1539 /* If an output operand is not a decl or indirect ref and our constraint
1540 allows a register, make a temporary to act as an intermediate.
1541 Make the asm insn write into that, then our caller will copy it to
1542 the real output operand. Likewise for promoted variables. */
28d81abb 1543
1b3d8f8a
GK
1544 generating_concat_p = 0;
1545
947255ed 1546 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1547 if ((TREE_CODE (val) == INDIRECT_REF
1548 && allows_mem)
2f939d94 1549 || (DECL_P (val)
1afbe1c4 1550 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1551 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1552 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
235c5021 1553 || ! allows_reg
2a230e9d 1554 || is_inout)
d09a75ae
RK
1555 {
1556 if (! allows_reg)
1557 mark_addressable (TREE_VALUE (tail));
1558
1559 output_rtx[i]
17f5f329
RK
1560 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1561 EXPAND_MEMORY_USE_WO);
d09a75ae
RK
1562
1563 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1564 error ("output number %d not directly addressable", i);
1b3d8f8a
GK
1565 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1566 || GET_CODE (output_rtx[i]) == CONCAT)
947255ed
RH
1567 {
1568 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1569 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1570 if (is_inout)
1571 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1572 }
d09a75ae 1573 }
b50a024d 1574 else
e619bb8d 1575 {
17e71da2 1576 output_rtx[i] = assign_temp (type, 0, 0, 1);
b50a024d
RK
1577 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1578 }
235c5021 1579
1b3d8f8a
GK
1580 generating_concat_p = old_generating_concat_p;
1581
2a230e9d 1582 if (is_inout)
235c5021
RK
1583 {
1584 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1585 inout_opnum[ninout++] = i;
1586 }
28d81abb
RK
1587 }
1588
235c5021 1589 ninputs += ninout;
28d81abb
RK
1590 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1591 {
1592 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1593 return;
1594 }
1595
1596 /* Make vectors for the expression-rtx and constraint strings. */
1597
1598 argvec = rtvec_alloc (ninputs);
1599 constraints = rtvec_alloc (ninputs);
1600
6462bb43
AO
1601 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1602 : GET_MODE (output_rtx[0])),
1603 TREE_STRING_POINTER (string),
4381f7c2 1604 empty_string, 0, argvec, constraints,
21a427cc 1605 filename, line);
c85f7c16 1606
78418280 1607 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1608
1609 /* Eval the inputs and put them into ARGVEC.
1610 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1611
1612 i = 0;
1613 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1614 {
1615 int j;
1f06ee8d 1616 int allows_reg = 0, allows_mem = 0;
520a57c8 1617 const char *constraint, *orig_constraint;
2a230e9d 1618 int c_len;
1f06ee8d 1619 rtx op;
28d81abb
RK
1620
1621 /* If there's an erroneous arg, emit no insn,
1622 because the ASM_INPUT would get VOIDmode
1623 and that could cause a crash in reload. */
1624 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1625 return;
2a230e9d
BS
1626
1627 /* ??? Can this happen, and does the error message make any sense? */
28d81abb
RK
1628 if (TREE_PURPOSE (tail) == NULL_TREE)
1629 {
1630 error ("hard register `%s' listed as input operand to `asm'",
1631 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1632 return;
1633 }
1634
2a230e9d 1635 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
520a57c8 1636 c_len = strlen (constraint);
1f06ee8d 1637 orig_constraint = constraint;
28d81abb 1638
2a230e9d
BS
1639 /* Make sure constraint has neither `=', `+', nor '&'. */
1640
1641 for (j = 0; j < c_len; j++)
1642 switch (constraint[j])
28d81abb 1643 {
2a230e9d 1644 case '+': case '=': case '&':
1f06ee8d
RH
1645 if (constraint == orig_constraint)
1646 {
14a774a9
RK
1647 error ("input operand constraint contains `%c'",
1648 constraint[j]);
1f06ee8d
RH
1649 return;
1650 }
1651 break;
65fed0cb 1652
2a230e9d 1653 case '%':
1f06ee8d
RH
1654 if (constraint == orig_constraint
1655 && i + 1 == ninputs - ninout)
2a230e9d
BS
1656 {
1657 error ("`%%' constraint used with last operand");
1658 return;
1659 }
1660 break;
1661
1f06ee8d
RH
1662 case 'V': case 'm': case 'o':
1663 allows_mem = 1;
1664 break;
1665
1666 case '<': case '>':
afa223c6 1667 case '?': case '!': case '*': case '#':
0551c32d 1668 case 'E': case 'F': case 'G': case 'H':
65fed0cb
RK
1669 case 's': case 'i': case 'n':
1670 case 'I': case 'J': case 'K': case 'L': case 'M':
1671 case 'N': case 'O': case 'P': case ',':
65fed0cb
RK
1672 break;
1673
7b7a33b3
JW
1674 /* Whether or not a numeric constraint allows a register is
1675 decided by the matching constraint, and so there is no need
1676 to do anything special with them. We must handle them in
1677 the default case, so that we don't unnecessarily force
1678 operands to memory. */
1679 case '0': case '1': case '2': case '3': case '4':
cd76ea33 1680 case '5': case '6': case '7': case '8': case '9':
2a230e9d 1681 if (constraint[j] >= '0' + noutputs)
956d6950
JL
1682 {
1683 error
1684 ("matching constraint references invalid operand number");
1685 return;
1686 }
cd76ea33 1687
1f06ee8d 1688 /* Try and find the real constraint for this dup. */
1afbe1c4
RH
1689 if ((j == 0 && c_len == 1)
1690 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1f06ee8d
RH
1691 {
1692 tree o = outputs;
14a774a9 1693
1f06ee8d
RH
1694 for (j = constraint[j] - '0'; j > 0; --j)
1695 o = TREE_CHAIN (o);
4381f7c2 1696
1f06ee8d 1697 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
520a57c8 1698 c_len = strlen (constraint);
1f06ee8d
RH
1699 j = 0;
1700 break;
1701 }
1702
4381f7c2 1703 /* Fall through. */
cd76ea33 1704
1f06ee8d 1705 case 'p': case 'r':
65fed0cb
RK
1706 allows_reg = 1;
1707 break;
1f06ee8d 1708
0551c32d 1709 case 'g': case 'X':
1f06ee8d
RH
1710 allows_reg = 1;
1711 allows_mem = 1;
1712 break;
c2cba7a9
RH
1713
1714 default:
1715 if (! ISALPHA (constraint[j]))
1716 {
1717 error ("invalid punctuation `%c' in constraint",
1718 constraint[j]);
1719 return;
1720 }
1721 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1722 allows_reg = 1;
1723#ifdef EXTRA_CONSTRAINT
1724 else
1725 {
1726 /* Otherwise we can't assume anything about the nature of
1727 the constraint except that it isn't purely registers.
1728 Treat it like "g" and hope for the best. */
1729 allows_reg = 1;
1730 allows_mem = 1;
1731 }
1732#endif
1733 break;
28d81abb
RK
1734 }
1735
1f06ee8d 1736 if (! allows_reg && allows_mem)
65fed0cb
RK
1737 mark_addressable (TREE_VALUE (tail));
1738
1f06ee8d 1739 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
65fed0cb 1740
1b3d8f8a
GK
1741 /* Never pass a CONCAT to an ASM. */
1742 generating_concat_p = 0;
1743 if (GET_CODE (op) == CONCAT)
1744 op = force_reg (GET_MODE (op), op);
1745
1afbe1c4 1746 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1747 {
1f06ee8d
RH
1748 if (allows_reg)
1749 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1750 else if (!allows_mem)
1751 warning ("asm operand %d probably doesn't match constraints", i);
1752 else if (CONSTANT_P (op))
1753 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1754 op);
1755 else if (GET_CODE (op) == REG
1756 || GET_CODE (op) == SUBREG
1757 || GET_CODE (op) == CONCAT)
1758 {
1759 tree type = TREE_TYPE (TREE_VALUE (tail));
1da68f56
RK
1760 tree qual_type = build_qualified_type (type,
1761 (TYPE_QUALS (type)
1762 | TYPE_QUAL_CONST));
1763 rtx memloc = assign_temp (qual_type, 1, 1, 1);
65fed0cb 1764
1f06ee8d
RH
1765 emit_move_insn (memloc, op);
1766 op = memloc;
1767 }
14a774a9 1768
1f06ee8d
RH
1769 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1770 /* We won't recognize volatile memory as available a
1771 memory_operand at this point. Ignore it. */
1772 ;
1773 else if (queued_subexp_p (op))
1774 ;
1775 else
1776 /* ??? Leave this only until we have experience with what
1777 happens in combine and elsewhere when constraints are
1778 not satisfied. */
1779 warning ("asm operand %d probably doesn't match constraints", i);
65fed0cb 1780 }
1b3d8f8a 1781 generating_concat_p = old_generating_concat_p;
6462bb43 1782 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1783
6462bb43 1784 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
38a448ca 1785 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1f06ee8d 1786 orig_constraint);
28d81abb
RK
1787 i++;
1788 }
1789
14a774a9
RK
1790 /* Protect all the operands from the queue now that they have all been
1791 evaluated. */
28d81abb 1792
1b3d8f8a
GK
1793 generating_concat_p = 0;
1794
235c5021 1795 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1796 ASM_OPERANDS_INPUT (body, i)
1797 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1798
1799 for (i = 0; i < noutputs; i++)
1800 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1801
4381f7c2 1802 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1803 for (i = 0; i < ninout; i++)
1804 {
235c5021
RK
1805 int j = inout_opnum[i];
1806
6462bb43 1807 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1808 = output_rtx[j];
6462bb43 1809 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
a8a05998 1810 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
235c5021
RK
1811 }
1812
1b3d8f8a
GK
1813 generating_concat_p = old_generating_concat_p;
1814
28d81abb
RK
1815 /* Now, for each output, construct an rtx
1816 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1817 ARGVEC CONSTRAINTS))
1818 If there is more than one, put them inside a PARALLEL. */
1819
1820 if (noutputs == 1 && nclobbers == 0)
1821 {
6462bb43
AO
1822 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1823 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
38a448ca 1824 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1825 }
14a774a9 1826
28d81abb
RK
1827 else if (noutputs == 0 && nclobbers == 0)
1828 {
1829 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1830 insn = emit_insn (body);
1831 }
14a774a9 1832
28d81abb
RK
1833 else
1834 {
1835 rtx obody = body;
1836 int num = noutputs;
14a774a9
RK
1837
1838 if (num == 0)
1839 num = 1;
1840
38a448ca 1841 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1842
1843 /* For each output operand, store a SET. */
28d81abb
RK
1844 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1845 {
1846 XVECEXP (body, 0, i)
38a448ca
RH
1847 = gen_rtx_SET (VOIDmode,
1848 output_rtx[i],
c5c76735 1849 gen_rtx_ASM_OPERANDS
6462bb43 1850 (GET_MODE (output_rtx[i]),
c5c76735
JL
1851 TREE_STRING_POINTER (string),
1852 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1853 i, argvec, constraints,
1854 filename, line));
1855
28d81abb
RK
1856 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1857 }
1858
1859 /* If there are no outputs (but there are some clobbers)
1860 store the bare ASM_OPERANDS into the PARALLEL. */
1861
1862 if (i == 0)
1863 XVECEXP (body, 0, i++) = obody;
1864
1865 /* Store (clobber REG) for each clobbered register specified. */
1866
b4ccaa16 1867 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1868 {
47ee9bcb 1869 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1870 int j = decode_reg_name (regname);
28d81abb 1871
b4ac57ab 1872 if (j < 0)
28d81abb 1873 {
c09e6498 1874 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1875 continue;
1876
c09e6498
RS
1877 if (j == -4) /* `memory', don't cache memory across asm */
1878 {
bffc6177 1879 XVECEXP (body, 0, i++)
38a448ca 1880 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1881 gen_rtx_MEM
1882 (BLKmode,
1883 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1884 continue;
1885 }
1886
956d6950 1887 /* Ignore unknown register, error already signaled. */
cc1f5387 1888 continue;
28d81abb
RK
1889 }
1890
1891 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1892 XVECEXP (body, 0, i++)
38a448ca 1893 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
28d81abb
RK
1894 }
1895
1896 insn = emit_insn (body);
1897 }
1898
947255ed
RH
1899 /* For any outputs that needed reloading into registers, spill them
1900 back to where they belong. */
1901 for (i = 0; i < noutputs; ++i)
1902 if (real_output_rtx[i])
1903 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1904
28d81abb
RK
1905 free_temp_slots ();
1906}
1907\f
1908/* Generate RTL to evaluate the expression EXP
1909 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1910
1911void
1912expand_expr_stmt (exp)
1913 tree exp;
1914{
1915 /* If -W, warn about statements with no side effects,
1916 except for an explicit cast to void (e.g. for assert()), and
1917 except inside a ({...}) where they may be useful. */
1918 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1919 {
9790cefd
RH
1920 if (! TREE_SIDE_EFFECTS (exp))
1921 {
1922 if ((extra_warnings || warn_unused_value)
1923 && !(TREE_CODE (exp) == CONVERT_EXPR
1924 && VOID_TYPE_P (TREE_TYPE (exp))))
1925 warning_with_file_and_line (emit_filename, emit_lineno,
1926 "statement with no effect");
1927 }
078721e1 1928 else if (warn_unused_value)
28d81abb
RK
1929 warn_if_unused_value (exp);
1930 }
b6ec8c5f
RK
1931
1932 /* If EXP is of function type and we are expanding statements for
1933 value, convert it to pointer-to-function. */
1934 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1935 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1936
8f17b5c5
MM
1937 /* The call to `expand_expr' could cause last_expr_type and
1938 last_expr_value to get reset. Therefore, we set last_expr_value
1939 and last_expr_type *after* calling expand_expr. */
a2cf7deb
CB
1940 last_expr_value = expand_expr (exp,
1941 (expr_stmts_for_value
1942 ? NULL_RTX : const0_rtx),
1943 VOIDmode, 0);
8f17b5c5 1944 last_expr_type = TREE_TYPE (exp);
28d81abb
RK
1945
1946 /* If all we do is reference a volatile value in memory,
1947 copy it to a register to be sure it is actually touched. */
1948 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1949 && TREE_THIS_VOLATILE (exp))
1950 {
6a5bbbe6
RS
1951 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1952 ;
1953 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1954 copy_to_reg (last_expr_value);
1955 else
ddbe9812
RS
1956 {
1957 rtx lab = gen_label_rtx ();
4381f7c2 1958
ddbe9812 1959 /* Compare the value with itself to reference it. */
c5d5d461
JL
1960 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1961 expand_expr (TYPE_SIZE (last_expr_type),
1962 NULL_RTX, VOIDmode, 0),
1963 BLKmode, 0,
1964 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1965 lab);
ddbe9812
RS
1966 emit_label (lab);
1967 }
28d81abb
RK
1968 }
1969
1970 /* If this expression is part of a ({...}) and is in memory, we may have
1971 to preserve temporaries. */
1972 preserve_temp_slots (last_expr_value);
1973
1974 /* Free any temporaries used to evaluate this expression. Any temporary
1975 used as a result of this expression will already have been preserved
1976 above. */
1977 free_temp_slots ();
1978
1979 emit_queue ();
1980}
1981
1982/* Warn if EXP contains any computations whose results are not used.
1983 Return 1 if a warning is printed; 0 otherwise. */
1984
150a992a 1985int
28d81abb
RK
1986warn_if_unused_value (exp)
1987 tree exp;
1988{
1989 if (TREE_USED (exp))
1990 return 0;
1991
9790cefd
RH
1992 /* Don't warn about void constructs. This includes casting to void,
1993 void function calls, and statement expressions with a final cast
1994 to void. */
1995 if (VOID_TYPE_P (TREE_TYPE (exp)))
1996 return 0;
1997
eca93b05
RH
1998 /* If this is an expression with side effects, don't warn. */
1999 if (TREE_SIDE_EFFECTS (exp))
2000 return 0;
2001
28d81abb
RK
2002 switch (TREE_CODE (exp))
2003 {
2004 case PREINCREMENT_EXPR:
2005 case POSTINCREMENT_EXPR:
2006 case PREDECREMENT_EXPR:
2007 case POSTDECREMENT_EXPR:
2008 case MODIFY_EXPR:
2009 case INIT_EXPR:
2010 case TARGET_EXPR:
2011 case CALL_EXPR:
2012 case METHOD_CALL_EXPR:
2013 case RTL_EXPR:
81797aba 2014 case TRY_CATCH_EXPR:
28d81abb
RK
2015 case WITH_CLEANUP_EXPR:
2016 case EXIT_EXPR:
28d81abb
RK
2017 return 0;
2018
2019 case BIND_EXPR:
2020 /* For a binding, warn if no side effect within it. */
2021 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2022
de73f171
RK
2023 case SAVE_EXPR:
2024 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2025
28d81abb
RK
2026 case TRUTH_ORIF_EXPR:
2027 case TRUTH_ANDIF_EXPR:
2028 /* In && or ||, warn if 2nd operand has no side effect. */
2029 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2030
2031 case COMPOUND_EXPR:
a646a211
JM
2032 if (TREE_NO_UNUSED_WARNING (exp))
2033 return 0;
28d81abb
RK
2034 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2035 return 1;
4d23e509
RS
2036 /* Let people do `(foo (), 0)' without a warning. */
2037 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2038 return 0;
28d81abb
RK
2039 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2040
2041 case NOP_EXPR:
2042 case CONVERT_EXPR:
b4ac57ab 2043 case NON_LVALUE_EXPR:
28d81abb
RK
2044 /* Don't warn about conversions not explicit in the user's program. */
2045 if (TREE_NO_UNUSED_WARNING (exp))
2046 return 0;
2047 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2048 Don't complain about that. There can be an arbitrary number of
2049 casts before the modify, so we must loop until we find the first
2050 non-cast expression and then test to see if that is a modify. */
2051 {
2052 tree tem = TREE_OPERAND (exp, 0);
2053
2054 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2055 tem = TREE_OPERAND (tem, 0);
2056
de73f171
RK
2057 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2058 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2059 return 0;
2060 }
d1e1adfb 2061 goto warn;
28d81abb 2062
d1e1adfb
JM
2063 case INDIRECT_REF:
2064 /* Don't warn about automatic dereferencing of references, since
2065 the user cannot control it. */
2066 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2067 return warn_if_unused_value (TREE_OPERAND (exp, 0));
4381f7c2
KH
2068 /* Fall through. */
2069
28d81abb 2070 default:
ddbe9812 2071 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2072 if ((DECL_P (exp)
ddbe9812
RS
2073 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2074 && TREE_THIS_VOLATILE (exp))
2075 return 0;
8d5e6e25
RK
2076
2077 /* If this is an expression which has no operands, there is no value
2078 to be unused. There are no such language-independent codes,
2079 but front ends may define such. */
2080 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2081 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2082 return 0;
2083
d1e1adfb 2084 warn:
28d81abb
RK
2085 warning_with_file_and_line (emit_filename, emit_lineno,
2086 "value computed is not used");
2087 return 1;
2088 }
2089}
2090
2091/* Clear out the memory of the last expression evaluated. */
2092
2093void
2094clear_last_expr ()
2095{
2096 last_expr_type = 0;
2097}
2098
2099/* Begin a statement which will return a value.
2100 Return the RTL_EXPR for this statement expr.
2101 The caller must save that value and pass it to expand_end_stmt_expr. */
2102
2103tree
2104expand_start_stmt_expr ()
2105{
ca695ac9
JB
2106 tree t;
2107
28d81abb
RK
2108 /* Make the RTL_EXPR node temporary, not momentary,
2109 so that rtl_expr_chain doesn't become garbage. */
ca695ac9 2110 t = make_node (RTL_EXPR);
33c6ab80 2111 do_pending_stack_adjust ();
591ccf92 2112 start_sequence_for_rtl_expr (t);
28d81abb
RK
2113 NO_DEFER_POP;
2114 expr_stmts_for_value++;
2115 return t;
2116}
2117
2118/* Restore the previous state at the end of a statement that returns a value.
2119 Returns a tree node representing the statement's value and the
2120 insns to compute the value.
2121
2122 The nodes of that expression have been freed by now, so we cannot use them.
2123 But we don't want to do that anyway; the expression has already been
2124 evaluated and now we just want to use the value. So generate a RTL_EXPR
2125 with the proper type and RTL value.
2126
2127 If the last substatement was not an expression,
2128 return something with type `void'. */
2129
2130tree
2131expand_end_stmt_expr (t)
2132 tree t;
2133{
2134 OK_DEFER_POP;
2135
2136 if (last_expr_type == 0)
2137 {
2138 last_expr_type = void_type_node;
2139 last_expr_value = const0_rtx;
2140 }
2141 else if (last_expr_value == 0)
2142 /* There are some cases where this can happen, such as when the
2143 statement is void type. */
2144 last_expr_value = const0_rtx;
2145 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2146 /* Remove any possible QUEUED. */
2147 last_expr_value = protect_from_queue (last_expr_value, 0);
2148
2149 emit_queue ();
2150
2151 TREE_TYPE (t) = last_expr_type;
2152 RTL_EXPR_RTL (t) = last_expr_value;
2153 RTL_EXPR_SEQUENCE (t) = get_insns ();
2154
2155 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2156
2157 end_sequence ();
2158
2159 /* Don't consider deleting this expr or containing exprs at tree level. */
2160 TREE_SIDE_EFFECTS (t) = 1;
2161 /* Propagate volatility of the actual RTL expr. */
2162 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2163
2164 last_expr_type = 0;
2165 expr_stmts_for_value--;
2166
2167 return t;
2168}
2169\f
28d81abb
RK
2170/* Generate RTL for the start of an if-then. COND is the expression
2171 whose truth should be tested.
2172
2173 If EXITFLAG is nonzero, this conditional is visible to
2174 `exit_something'. */
2175
2176void
2177expand_start_cond (cond, exitflag)
2178 tree cond;
2179 int exitflag;
2180{
2181 struct nesting *thiscond = ALLOC_NESTING ();
2182
2183 /* Make an entry on cond_stack for the cond we are entering. */
2184
2185 thiscond->next = cond_stack;
2186 thiscond->all = nesting_stack;
2187 thiscond->depth = ++nesting_depth;
2188 thiscond->data.cond.next_label = gen_label_rtx ();
2189 /* Before we encounter an `else', we don't need a separate exit label
2190 unless there are supposed to be exit statements
2191 to exit this conditional. */
2192 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2193 thiscond->data.cond.endif_label = thiscond->exit_label;
2194 cond_stack = thiscond;
2195 nesting_stack = thiscond;
2196
b93a436e 2197 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2198}
2199
2200/* Generate RTL between then-clause and the elseif-clause
2201 of an if-then-elseif-.... */
2202
2203void
2204expand_start_elseif (cond)
2205 tree cond;
2206{
2207 if (cond_stack->data.cond.endif_label == 0)
2208 cond_stack->data.cond.endif_label = gen_label_rtx ();
2209 emit_jump (cond_stack->data.cond.endif_label);
2210 emit_label (cond_stack->data.cond.next_label);
2211 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2212 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2213}
2214
2215/* Generate RTL between the then-clause and the else-clause
2216 of an if-then-else. */
2217
2218void
2219expand_start_else ()
2220{
2221 if (cond_stack->data.cond.endif_label == 0)
2222 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2223
28d81abb
RK
2224 emit_jump (cond_stack->data.cond.endif_label);
2225 emit_label (cond_stack->data.cond.next_label);
0f41302f 2226 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2227}
2228
d947ba59
RK
2229/* After calling expand_start_else, turn this "else" into an "else if"
2230 by providing another condition. */
2231
2232void
2233expand_elseif (cond)
2234 tree cond;
2235{
2236 cond_stack->data.cond.next_label = gen_label_rtx ();
2237 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2238}
2239
28d81abb
RK
2240/* Generate RTL for the end of an if-then.
2241 Pop the record for it off of cond_stack. */
2242
2243void
2244expand_end_cond ()
2245{
2246 struct nesting *thiscond = cond_stack;
2247
b93a436e
JL
2248 do_pending_stack_adjust ();
2249 if (thiscond->data.cond.next_label)
2250 emit_label (thiscond->data.cond.next_label);
2251 if (thiscond->data.cond.endif_label)
2252 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2253
2254 POPSTACK (cond_stack);
2255 last_expr_type = 0;
2256}
2257\f
2258/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2259 loop should be exited by `exit_something'. This is a loop for which
2260 `expand_continue' will jump to the top of the loop.
2261
2262 Make an entry on loop_stack to record the labels associated with
2263 this loop. */
2264
2265struct nesting *
2266expand_start_loop (exit_flag)
2267 int exit_flag;
2268{
2269 register struct nesting *thisloop = ALLOC_NESTING ();
2270
2271 /* Make an entry on loop_stack for the loop we are entering. */
2272
2273 thisloop->next = loop_stack;
2274 thisloop->all = nesting_stack;
2275 thisloop->depth = ++nesting_depth;
2276 thisloop->data.loop.start_label = gen_label_rtx ();
2277 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2278 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2279 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2280 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2281 loop_stack = thisloop;
2282 nesting_stack = thisloop;
2283
2284 do_pending_stack_adjust ();
2285 emit_queue ();
37366632 2286 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2287 emit_label (thisloop->data.loop.start_label);
2288
2289 return thisloop;
2290}
2291
2292/* Like expand_start_loop but for a loop where the continuation point
2293 (for expand_continue_loop) will be specified explicitly. */
2294
2295struct nesting *
2296expand_start_loop_continue_elsewhere (exit_flag)
2297 int exit_flag;
2298{
2299 struct nesting *thisloop = expand_start_loop (exit_flag);
2300 loop_stack->data.loop.continue_label = gen_label_rtx ();
2301 return thisloop;
2302}
2303
f0de0c5d
RH
2304/* Begin a null, aka do { } while (0) "loop". But since the contents
2305 of said loop can still contain a break, we must frob the loop nest. */
2306
2307struct nesting *
2308expand_start_null_loop ()
2309{
2310 register struct nesting *thisloop = ALLOC_NESTING ();
2311
2312 /* Make an entry on loop_stack for the loop we are entering. */
2313
2314 thisloop->next = loop_stack;
2315 thisloop->all = nesting_stack;
2316 thisloop->depth = ++nesting_depth;
2317 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2318 thisloop->data.loop.end_label = gen_label_rtx ();
2319 thisloop->data.loop.alt_end_label = NULL_RTX;
ba89764a 2320 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
f0de0c5d
RH
2321 thisloop->exit_label = thisloop->data.loop.end_label;
2322 loop_stack = thisloop;
2323 nesting_stack = thisloop;
2324
2325 return thisloop;
2326}
2327
28d81abb
RK
2328/* Specify the continuation point for a loop started with
2329 expand_start_loop_continue_elsewhere.
2330 Use this at the point in the code to which a continue statement
2331 should jump. */
2332
2333void
2334expand_loop_continue_here ()
2335{
2336 do_pending_stack_adjust ();
37366632 2337 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2338 emit_label (loop_stack->data.loop.continue_label);
2339}
2340
2341/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2342 Pop the block off of loop_stack. */
2343
2344void
2345expand_end_loop ()
2346{
0720f6fb
MM
2347 rtx start_label = loop_stack->data.loop.start_label;
2348 rtx insn = get_last_insn ();
a7d308f7 2349 int needs_end_jump = 1;
28d81abb
RK
2350
2351 /* Mark the continue-point at the top of the loop if none elsewhere. */
2352 if (start_label == loop_stack->data.loop.continue_label)
2353 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2354
2355 do_pending_stack_adjust ();
2356
a7d308f7
R
2357 /* If optimizing, perhaps reorder the loop.
2358 First, try to use a condjump near the end.
2359 expand_exit_loop_if_false ends loops with unconditional jumps,
2360 like this:
2361
2362 if (test) goto label;
2363 optional: cleanup
2364 goto loop_stack->data.loop.end_label
2365 barrier
2366 label:
2367
2368 If we find such a pattern, we can end the loop earlier. */
2369
2370 if (optimize
2371 && GET_CODE (insn) == CODE_LABEL
2372 && LABEL_NAME (insn) == NULL
2373 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2374 {
2375 rtx label = insn;
2376 rtx jump = PREV_INSN (PREV_INSN (label));
2377
2378 if (GET_CODE (jump) == JUMP_INSN
2379 && GET_CODE (PATTERN (jump)) == SET
2380 && SET_DEST (PATTERN (jump)) == pc_rtx
2381 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2382 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2383 == loop_stack->data.loop.end_label))
2384 {
2385 rtx prev;
2386
2387 /* The test might be complex and reference LABEL multiple times,
2388 like the loop in loop_iterations to set vtop. To handle this,
2389 we move LABEL. */
2390 insn = PREV_INSN (label);
2391 reorder_insns (label, label, start_label);
2392
4381f7c2
KH
2393 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2394 {
a7d308f7
R
2395 /* We ignore line number notes, but if we see any other note,
2396 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2397 NOTE_INSN_LOOP_*, we disable this optimization. */
2398 if (GET_CODE (prev) == NOTE)
2399 {
2400 if (NOTE_LINE_NUMBER (prev) < 0)
2401 break;
2402 continue;
2403 }
2404 if (GET_CODE (prev) == CODE_LABEL)
2405 break;
2406 if (GET_CODE (prev) == JUMP_INSN)
2407 {
2408 if (GET_CODE (PATTERN (prev)) == SET
2409 && SET_DEST (PATTERN (prev)) == pc_rtx
2410 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2411 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2412 == LABEL_REF)
2413 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2414 {
2415 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2416 = start_label;
2417 emit_note_after (NOTE_INSN_LOOP_END, prev);
2418 needs_end_jump = 0;
2419 }
2420 break;
2421 }
2422 }
2423 }
2424 }
2425
2426 /* If the loop starts with a loop exit, roll that to the end where
2427 it will optimize together with the jump back.
93de5c31
MM
2428
2429 We look for the conditional branch to the exit, except that once
2430 we find such a branch, we don't look past 30 instructions.
2431
2432 In more detail, if the loop presently looks like this (in pseudo-C):
2433
2434 start_label:
2435 if (test) goto end_label;
2436 body;
2437 goto start_label;
0720f6fb 2438 end_label:
4381f7c2 2439
93de5c31
MM
2440 transform it to look like:
2441
2442 goto start_label;
2443 newstart_label:
2444 body;
2445 start_label:
2446 if (test) goto end_label;
2447 goto newstart_label;
0720f6fb 2448 end_label:
93de5c31
MM
2449
2450 Here, the `test' may actually consist of some reasonably complex
2451 code, terminating in a test. */
0720f6fb 2452
28d81abb 2453 if (optimize
a7d308f7 2454 && needs_end_jump
28d81abb
RK
2455 &&
2456 ! (GET_CODE (insn) == JUMP_INSN
2457 && GET_CODE (PATTERN (insn)) == SET
2458 && SET_DEST (PATTERN (insn)) == pc_rtx
2459 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2460 {
93de5c31 2461 int eh_regions = 0;
0720f6fb
MM
2462 int num_insns = 0;
2463 rtx last_test_insn = NULL_RTX;
93de5c31 2464
28d81abb
RK
2465 /* Scan insns from the top of the loop looking for a qualified
2466 conditional exit. */
2467 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2468 insn = NEXT_INSN (insn))
2469 {
4381f7c2 2470 if (GET_CODE (insn) == NOTE)
93de5c31
MM
2471 {
2472 if (optimize < 2
2473 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2474 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2475 /* The code that actually moves the exit test will
2476 carefully leave BLOCK notes in their original
2477 location. That means, however, that we can't debug
2478 the exit test itself. So, we refuse to move code
2479 containing BLOCK notes at low optimization levels. */
2480 break;
2481
2482 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2483 ++eh_regions;
2484 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2485 {
2486 --eh_regions;
4381f7c2 2487 if (eh_regions < 0)
93de5c31
MM
2488 /* We've come to the end of an EH region, but
2489 never saw the beginning of that region. That
2490 means that an EH region begins before the top
2491 of the loop, and ends in the middle of it. The
2492 existence of such a situation violates a basic
2493 assumption in this code, since that would imply
2494 that even when EH_REGIONS is zero, we might
2495 move code out of an exception region. */
2496 abort ();
2497 }
28d81abb 2498
f114df20
JL
2499 /* We must not walk into a nested loop. */
2500 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2501 break;
2502
93de5c31
MM
2503 /* We already know this INSN is a NOTE, so there's no
2504 point in looking at it to see if it's a JUMP. */
2505 continue;
2506 }
28d81abb
RK
2507
2508 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2509 num_insns++;
2510
2511 if (last_test_insn && num_insns > 30)
2512 break;
2513
4381f7c2 2514 if (eh_regions > 0)
93de5c31
MM
2515 /* We don't want to move a partial EH region. Consider:
2516
2517 while ( ( { try {
4381f7c2 2518 if (cond ()) 0;
93de5c31
MM
2519 else {
2520 bar();
2521 1;
2522 }
4381f7c2 2523 } catch (...) {
93de5c31
MM
2524 1;
2525 } )) {
2526 body;
4381f7c2 2527 }
93de5c31
MM
2528
2529 This isn't legal C++, but here's what it's supposed to
2530 mean: if cond() is true, stop looping. Otherwise,
2531 call bar, and keep looping. In addition, if cond
2532 throws an exception, catch it and keep looping. Such
4381f7c2 2533 constructs are certainy legal in LISP.
93de5c31
MM
2534
2535 We should not move the `if (cond()) 0' test since then
2536 the EH-region for the try-block would be broken up.
2537 (In this case we would the EH_BEG note for the `try'
2538 and `if cond()' but not the call to bar() or the
4381f7c2 2539 EH_END note.)
93de5c31
MM
2540
2541 So we don't look for tests within an EH region. */
2542 continue;
2543
4381f7c2 2544 if (GET_CODE (insn) == JUMP_INSN
28d81abb 2545 && GET_CODE (PATTERN (insn)) == SET
0720f6fb
MM
2546 && SET_DEST (PATTERN (insn)) == pc_rtx)
2547 {
2548 /* This is indeed a jump. */
2549 rtx dest1 = NULL_RTX;
2550 rtx dest2 = NULL_RTX;
2551 rtx potential_last_test;
2552 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2553 {
2554 /* A conditional jump. */
2555 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2556 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2557 potential_last_test = insn;
2558 }
2559 else
2560 {
2561 /* An unconditional jump. */
2562 dest1 = SET_SRC (PATTERN (insn));
2563 /* Include the BARRIER after the JUMP. */
2564 potential_last_test = NEXT_INSN (insn);
2565 }
2566
2567 do {
2568 if (dest1 && GET_CODE (dest1) == LABEL_REF
4381f7c2 2569 && ((XEXP (dest1, 0)
0720f6fb 2570 == loop_stack->data.loop.alt_end_label)
4381f7c2 2571 || (XEXP (dest1, 0)
0720f6fb
MM
2572 == loop_stack->data.loop.end_label)))
2573 {
2574 last_test_insn = potential_last_test;
2575 break;
2576 }
2577
2578 /* If this was a conditional jump, there may be
2579 another label at which we should look. */
2580 dest1 = dest2;
2581 dest2 = NULL_RTX;
2582 } while (dest1);
2583 }
28d81abb
RK
2584 }
2585
2586 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2587 {
2588 /* We found one. Move everything from there up
2589 to the end of the loop, and add a jump into the loop
2590 to jump to there. */
2591 register rtx newstart_label = gen_label_rtx ();
2592 register rtx start_move = start_label;
93de5c31 2593 rtx next_insn;
28d81abb 2594
b4ac57ab 2595 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2596 then we want to move this note also. */
2597 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2598 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2599 == NOTE_INSN_LOOP_CONT))
2600 start_move = PREV_INSN (start_move);
2601
2602 emit_label_after (newstart_label, PREV_INSN (start_move));
93de5c31
MM
2603
2604 /* Actually move the insns. Start at the beginning, and
2605 keep copying insns until we've copied the
2606 last_test_insn. */
2607 for (insn = start_move; insn; insn = next_insn)
2608 {
2609 /* Figure out which insn comes after this one. We have
2610 to do this before we move INSN. */
2611 if (insn == last_test_insn)
2612 /* We've moved all the insns. */
2613 next_insn = NULL_RTX;
2614 else
2615 next_insn = NEXT_INSN (insn);
2616
2617 if (GET_CODE (insn) == NOTE
2618 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2619 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2620 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2621 NOTE_INSN_BLOCK_ENDs because the correct generation
2622 of debugging information depends on these appearing
2623 in the same order in the RTL and in the tree
2624 structure, where they are represented as BLOCKs.
2625 So, we don't move block notes. Of course, moving
2626 the code inside the block is likely to make it
2627 impossible to debug the instructions in the exit
2628 test, but such is the price of optimization. */
2629 continue;
2630
2631 /* Move the INSN. */
2632 reorder_insns (insn, insn, get_last_insn ());
2633 }
2634
28d81abb
RK
2635 emit_jump_insn_after (gen_jump (start_label),
2636 PREV_INSN (newstart_label));
2637 emit_barrier_after (PREV_INSN (newstart_label));
2638 start_label = newstart_label;
2639 }
2640 }
2641
a7d308f7
R
2642 if (needs_end_jump)
2643 {
2644 emit_jump (start_label);
2645 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2646 }
28d81abb
RK
2647 emit_label (loop_stack->data.loop.end_label);
2648
2649 POPSTACK (loop_stack);
2650
2651 last_expr_type = 0;
2652}
2653
f0de0c5d
RH
2654/* Finish a null loop, aka do { } while (0). */
2655
2656void
2657expand_end_null_loop ()
2658{
2659 do_pending_stack_adjust ();
2660 emit_label (loop_stack->data.loop.end_label);
2661
2662 POPSTACK (loop_stack);
2663
2664 last_expr_type = 0;
2665}
2666
28d81abb
RK
2667/* Generate a jump to the current loop's continue-point.
2668 This is usually the top of the loop, but may be specified
2669 explicitly elsewhere. If not currently inside a loop,
2670 return 0 and do nothing; caller will print an error message. */
2671
2672int
2673expand_continue_loop (whichloop)
2674 struct nesting *whichloop;
2675{
2676 last_expr_type = 0;
2677 if (whichloop == 0)
2678 whichloop = loop_stack;
2679 if (whichloop == 0)
2680 return 0;
37366632
RK
2681 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2682 NULL_RTX);
28d81abb
RK
2683 return 1;
2684}
2685
2686/* Generate a jump to exit the current loop. If not currently inside a loop,
2687 return 0 and do nothing; caller will print an error message. */
2688
2689int
2690expand_exit_loop (whichloop)
2691 struct nesting *whichloop;
2692{
2693 last_expr_type = 0;
2694 if (whichloop == 0)
2695 whichloop = loop_stack;
2696 if (whichloop == 0)
2697 return 0;
37366632 2698 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2699 return 1;
2700}
2701
2702/* Generate a conditional jump to exit the current loop if COND
2703 evaluates to zero. If not currently inside a loop,
2704 return 0 and do nothing; caller will print an error message. */
2705
2706int
2707expand_exit_loop_if_false (whichloop, cond)
2708 struct nesting *whichloop;
2709 tree cond;
2710{
b93a436e
JL
2711 rtx label = gen_label_rtx ();
2712 rtx last_insn;
28d81abb 2713 last_expr_type = 0;
b93a436e 2714
28d81abb
RK
2715 if (whichloop == 0)
2716 whichloop = loop_stack;
2717 if (whichloop == 0)
2718 return 0;
b93a436e
JL
2719 /* In order to handle fixups, we actually create a conditional jump
2720 around a unconditional branch to exit the loop. If fixups are
2721 necessary, they go before the unconditional branch. */
d902c7ea 2722
b93a436e
JL
2723 do_jump (cond, NULL_RTX, label);
2724 last_insn = get_last_insn ();
2725 if (GET_CODE (last_insn) == CODE_LABEL)
2726 whichloop->data.loop.alt_end_label = last_insn;
2727 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2728 NULL_RTX);
2729 emit_label (label);
ca695ac9 2730
28d81abb
RK
2731 return 1;
2732}
2733
4a050cc2
JL
2734/* Return nonzero if the loop nest is empty. Else return zero. */
2735
2736int
2737stmt_loop_nest_empty ()
2738{
930b9832
JL
2739 /* cfun->stmt can be NULL if we are building a call to get the
2740 EH context for a setjmp/longjmp EH target and the current
2741 function was a deferred inline function. */
abe79fd4 2742 return (cfun->stmt == NULL || loop_stack == NULL);
4a050cc2
JL
2743}
2744
28d81abb
RK
2745/* Return non-zero if we should preserve sub-expressions as separate
2746 pseudos. We never do so if we aren't optimizing. We always do so
2747 if -fexpensive-optimizations.
2748
2749 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2750 the loop may still be a small one. */
2751
2752int
2753preserve_subexpressions_p ()
2754{
2755 rtx insn;
2756
2757 if (flag_expensive_optimizations)
2758 return 1;
2759
01d939e8 2760 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2761 return 0;
2762
2763 insn = get_last_insn_anywhere ();
2764
2765 return (insn
2766 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2767 < n_non_fixed_regs * 3));
2768
2769}
2770
2771/* Generate a jump to exit the current loop, conditional, binding contour
2772 or case statement. Not all such constructs are visible to this function,
2773 only those started with EXIT_FLAG nonzero. Individual languages use
2774 the EXIT_FLAG parameter to control which kinds of constructs you can
2775 exit this way.
2776
2777 If not currently inside anything that can be exited,
2778 return 0 and do nothing; caller will print an error message. */
2779
2780int
2781expand_exit_something ()
2782{
2783 struct nesting *n;
2784 last_expr_type = 0;
2785 for (n = nesting_stack; n; n = n->all)
2786 if (n->exit_label != 0)
2787 {
37366632 2788 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2789 return 1;
2790 }
2791
2792 return 0;
2793}
2794\f
2795/* Generate RTL to return from the current function, with no value.
2796 (That is, we do not do anything about returning any value.) */
2797
2798void
2799expand_null_return ()
2800{
2801 struct nesting *block = block_stack;
bd695e1e
RH
2802 rtx last_insn = get_last_insn ();
2803
4381f7c2 2804 /* If this function was declared to return a value, but we
bd695e1e
RH
2805 didn't, clobber the return registers so that they are not
2806 propogated live to the rest of the function. */
c13fde05 2807 clobber_return_register ();
28d81abb
RK
2808
2809 /* Does any pending block have cleanups? */
28d81abb
RK
2810 while (block && block->data.block.cleanups == 0)
2811 block = block->next;
2812
2813 /* If yes, use a goto to return, since that runs cleanups. */
2814
2815 expand_null_return_1 (last_insn, block != 0);
2816}
2817
2818/* Generate RTL to return from the current function, with value VAL. */
2819
8d800403 2820static void
28d81abb
RK
2821expand_value_return (val)
2822 rtx val;
2823{
2824 struct nesting *block = block_stack;
2825 rtx last_insn = get_last_insn ();
2826 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2827
2828 /* Copy the value to the return location
2829 unless it's already there. */
2830
2831 if (return_reg != val)
77636079 2832 {
77636079 2833 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
37877eb0 2834#ifdef PROMOTE_FUNCTION_RETURN
77636079 2835 int unsignedp = TREE_UNSIGNED (type);
14a774a9
RK
2836 enum machine_mode old_mode
2837 = DECL_MODE (DECL_RESULT (current_function_decl));
28612f9e 2838 enum machine_mode mode
14a774a9 2839 = promote_mode (type, old_mode, &unsignedp, 1);
77636079 2840
14a774a9
RK
2841 if (mode != old_mode)
2842 val = convert_modes (mode, old_mode, val, unsignedp);
77636079 2843#endif
14a774a9
RK
2844 if (GET_CODE (return_reg) == PARALLEL)
2845 emit_group_load (return_reg, val, int_size_in_bytes (type),
19caa751 2846 TYPE_ALIGN (type));
14a774a9 2847 else
77636079
RS
2848 emit_move_insn (return_reg, val);
2849 }
14a774a9 2850
28d81abb
RK
2851 /* Does any pending block have cleanups? */
2852
2853 while (block && block->data.block.cleanups == 0)
2854 block = block->next;
2855
2856 /* If yes, use a goto to return, since that runs cleanups.
2857 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2858
2859 expand_null_return_1 (last_insn, block != 0);
2860}
2861
2862/* Output a return with no value. If LAST_INSN is nonzero,
2863 pretend that the return takes place after LAST_INSN.
2864 If USE_GOTO is nonzero then don't use a return instruction;
2865 go to the return label instead. This causes any cleanups
2866 of pending blocks to be executed normally. */
2867
2868static void
2869expand_null_return_1 (last_insn, use_goto)
2870 rtx last_insn;
2871 int use_goto;
2872{
2873 rtx end_label = cleanup_label ? cleanup_label : return_label;
2874
2875 clear_pending_stack_adjust ();
2876 do_pending_stack_adjust ();
2877 last_expr_type = 0;
2878
2879 /* PCC-struct return always uses an epilogue. */
2880 if (current_function_returns_pcc_struct || use_goto)
2881 {
2882 if (end_label == 0)
2883 end_label = return_label = gen_label_rtx ();
37366632 2884 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2885 return;
2886 }
2887
2888 /* Otherwise output a simple return-insn if one is available,
2889 unless it won't do the job. */
2890#ifdef HAVE_return
2891 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2892 {
2893 emit_jump_insn (gen_return ());
2894 emit_barrier ();
2895 return;
2896 }
2897#endif
2898
2899 /* Otherwise jump to the epilogue. */
37366632 2900 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2901}
2902\f
2903/* Generate RTL to evaluate the expression RETVAL and return it
2904 from the current function. */
2905
2906void
2907expand_return (retval)
2908 tree retval;
2909{
2910 /* If there are any cleanups to be performed, then they will
2911 be inserted following LAST_INSN. It is desirable
2912 that the last_insn, for such purposes, should be the
2913 last insn before computing the return value. Otherwise, cleanups
2914 which call functions can clobber the return value. */
2915 /* ??? rms: I think that is erroneous, because in C++ it would
2916 run destructors on variables that might be used in the subsequent
2917 computation of the return value. */
2918 rtx last_insn = 0;
14a774a9 2919 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb 2920 register rtx val = 0;
28d81abb
RK
2921 tree retval_rhs;
2922 int cleanups;
28d81abb
RK
2923
2924 /* If function wants no value, give it none. */
2925 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2926 {
37366632 2927 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2928 emit_queue ();
28d81abb
RK
2929 expand_null_return ();
2930 return;
2931 }
2932
2933 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2934 /* This is not sufficient. We also need to watch for cleanups of the
2935 expression we are about to expand. Unfortunately, we cannot know
2936 if it has cleanups until we expand it, and we want to change how we
2937 expand it depending upon if we need cleanups. We can't win. */
2938#if 0
28d81abb 2939 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2940#else
2941 cleanups = 1;
2942#endif
28d81abb 2943
ea11ca7e
JM
2944 if (retval == error_mark_node)
2945 retval_rhs = NULL_TREE;
2946 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
2947 retval_rhs = retval;
2948 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2949 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2950 retval_rhs = TREE_OPERAND (retval, 1);
71653180 2951 else if (VOID_TYPE_P (TREE_TYPE (retval)))
28d81abb
RK
2952 /* Recognize tail-recursive call to void function. */
2953 retval_rhs = retval;
2954 else
2955 retval_rhs = NULL_TREE;
2956
2957 /* Only use `last_insn' if there are cleanups which must be run. */
2958 if (cleanups || cleanup_label != 0)
2959 last_insn = get_last_insn ();
2960
2961 /* Distribute return down conditional expr if either of the sides
2962 may involve tail recursion (see test below). This enhances the number
2963 of tail recursions we see. Don't do this always since it can produce
2964 sub-optimal code in some cases and we distribute assignments into
2965 conditional expressions when it would help. */
2966
2967 if (optimize && retval_rhs != 0
2968 && frame_offset == 0
2969 && TREE_CODE (retval_rhs) == COND_EXPR
2970 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2971 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2972 {
2973 rtx label = gen_label_rtx ();
a0a34f94
RK
2974 tree expr;
2975
37366632 2976 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 2977 start_cleanup_deferral ();
dd98f85c 2978 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2979 DECL_RESULT (current_function_decl),
2980 TREE_OPERAND (retval_rhs, 1));
2981 TREE_SIDE_EFFECTS (expr) = 1;
2982 expand_return (expr);
28d81abb 2983 emit_label (label);
a0a34f94 2984
dd98f85c 2985 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2986 DECL_RESULT (current_function_decl),
2987 TREE_OPERAND (retval_rhs, 2));
2988 TREE_SIDE_EFFECTS (expr) = 1;
2989 expand_return (expr);
1483bddb 2990 end_cleanup_deferral ();
28d81abb
RK
2991 return;
2992 }
2993
4c485b63
JL
2994 /* If the result is an aggregate that is being returned in one (or more)
2995 registers, load the registers here. The compiler currently can't handle
2996 copying a BLKmode value into registers. We could put this code in a
2997 more general area (for use by everyone instead of just function
2998 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2999 (and in expand_call). The value must go into a pseudo in case there
3000 are cleanups that will clobber the real return register. */
4c485b63
JL
3001
3002 if (retval_rhs != 0
3003 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 3004 && GET_CODE (result_rtl) == REG)
4c485b63 3005 {
770ae6cc
RK
3006 int i;
3007 unsigned HOST_WIDE_INT bitpos, xbitpos;
3008 unsigned HOST_WIDE_INT big_endian_correction = 0;
3009 unsigned HOST_WIDE_INT bytes
3010 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 3011 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
3012 unsigned int bitsize
3013 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
4c485b63 3014 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 3015 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 3016 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 3017 enum machine_mode tmpmode, result_reg_mode;
4c485b63 3018
a7f875d7
RK
3019 /* Structures whose size is not a multiple of a word are aligned
3020 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3021 machine, this means we must skip the empty high order bytes when
3022 calculating the bit offset. */
3023 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3024 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3025 * BITS_PER_UNIT));
3026
4381f7c2 3027 /* Copy the structure BITSIZE bits at a time. */
a7f875d7
RK
3028 for (bitpos = 0, xbitpos = big_endian_correction;
3029 bitpos < bytes * BITS_PER_UNIT;
3030 bitpos += bitsize, xbitpos += bitsize)
4c485b63 3031 {
a7f875d7 3032 /* We need a new destination pseudo each time xbitpos is
abc95ed3 3033 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
3034 (the first time through). */
3035 if (xbitpos % BITS_PER_WORD == 0
3036 || xbitpos == big_endian_correction)
4c485b63 3037 {
a7f875d7
RK
3038 /* Generate an appropriate register. */
3039 dst = gen_reg_rtx (word_mode);
3040 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3041
3042 /* Clobber the destination before we move anything into it. */
38a448ca 3043 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
4c485b63 3044 }
a7f875d7
RK
3045
3046 /* We need a new source operand each time bitpos is on a word
3047 boundary. */
3048 if (bitpos % BITS_PER_WORD == 0)
3049 src = operand_subword_force (result_val,
3050 bitpos / BITS_PER_WORD,
3051 BLKmode);
3052
3053 /* Use bitpos for the source extraction (left justified) and
3054 xbitpos for the destination store (right justified). */
3055 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3056 extract_bit_field (src, bitsize,
3057 bitpos % BITS_PER_WORD, 1,
19caa751
RK
3058 NULL_RTX, word_mode, word_mode,
3059 bitsize, BITS_PER_WORD),
3060 bitsize, BITS_PER_WORD);
4c485b63
JL
3061 }
3062
4c485b63
JL
3063 /* Find the smallest integer mode large enough to hold the
3064 entire structure and use that mode instead of BLKmode
3065 on the USE insn for the return register. */
3066 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3067 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 3068 tmpmode != VOIDmode;
4c485b63 3069 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
3070 {
3071 /* Have we found a large enough mode? */
3072 if (GET_MODE_SIZE (tmpmode) >= bytes)
3073 break;
3074 }
4c485b63
JL
3075
3076 /* No suitable mode found. */
0c61f541 3077 if (tmpmode == VOIDmode)
3ffeb8f1 3078 abort ();
4c485b63 3079
14a774a9 3080 PUT_MODE (result_rtl, tmpmode);
3ffeb8f1 3081
af55da56
JW
3082 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3083 result_reg_mode = word_mode;
3084 else
3085 result_reg_mode = tmpmode;
3086 result_reg = gen_reg_rtx (result_reg_mode);
3087
3ffeb8f1 3088 emit_queue ();
3ffeb8f1 3089 for (i = 0; i < n_regs; i++)
af55da56 3090 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3091 result_pseudos[i]);
4c485b63 3092
af55da56
JW
3093 if (tmpmode != result_reg_mode)
3094 result_reg = gen_lowpart (tmpmode, result_reg);
3095
4c485b63
JL
3096 expand_value_return (result_reg);
3097 }
3098 else if (cleanups
28d81abb 3099 && retval_rhs != 0
71653180 3100 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
14a774a9
RK
3101 && (GET_CODE (result_rtl) == REG
3102 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3103 {
14a774a9
RK
3104 /* Calculate the return value into a temporary (usually a pseudo
3105 reg). */
1da68f56
RK
3106 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3107 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3108
3109 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
3110 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3111 val = force_not_mem (val);
28d81abb 3112 emit_queue ();
28d81abb
RK
3113 /* Return the calculated value, doing cleanups first. */
3114 expand_value_return (val);
3115 }
3116 else
3117 {
3118 /* No cleanups or no hard reg used;
3119 calculate value into hard return reg. */
cba389cd 3120 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3121 emit_queue ();
14a774a9 3122 expand_value_return (result_rtl);
28d81abb
RK
3123 }
3124}
3125
3126/* Return 1 if the end of the generated RTX is not a barrier.
3127 This means code already compiled can drop through. */
3128
3129int
3130drop_through_at_end_p ()
3131{
3132 rtx insn = get_last_insn ();
3133 while (insn && GET_CODE (insn) == NOTE)
3134 insn = PREV_INSN (insn);
3135 return insn && GET_CODE (insn) != BARRIER;
3136}
3137\f
b06775f9
RH
3138/* Attempt to optimize a potential tail recursion call into a goto.
3139 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
4381f7c2
KH
3140 where to place the jump to the tail recursion label.
3141
b06775f9 3142 Return TRUE if the call was optimized into a goto. */
642cac7b 3143
564ea051 3144int
b06775f9
RH
3145optimize_tail_recursion (arguments, last_insn)
3146 tree arguments;
642cac7b
JL
3147 rtx last_insn;
3148{
b06775f9
RH
3149 /* Finish checking validity, and if valid emit code to set the
3150 argument variables for the new call. */
3151 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
642cac7b
JL
3152 {
3153 if (tail_recursion_label == 0)
3154 {
3155 tail_recursion_label = gen_label_rtx ();
3156 emit_label_after (tail_recursion_label,
3157 tail_recursion_reentry);
3158 }
3159 emit_queue ();
3160 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3161 emit_barrier ();
564ea051 3162 return 1;
642cac7b 3163 }
564ea051 3164 return 0;
642cac7b
JL
3165}
3166
28d81abb
RK
3167/* Emit code to alter this function's formal parms for a tail-recursive call.
3168 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3169 FORMALS is the chain of decls of formals.
3170 Return 1 if this can be done;
3171 otherwise return 0 and do not emit any code. */
3172
3173static int
3174tail_recursion_args (actuals, formals)
3175 tree actuals, formals;
3176{
3177 register tree a = actuals, f = formals;
3178 register int i;
3179 register rtx *argvec;
3180
3181 /* Check that number and types of actuals are compatible
3182 with the formals. This is not always true in valid C code.
3183 Also check that no formal needs to be addressable
3184 and that all formals are scalars. */
3185
3186 /* Also count the args. */
3187
3188 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3189 {
5c7fe359
RK
3190 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3191 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3192 return 0;
3193 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3194 return 0;
3195 }
3196 if (a != 0 || f != 0)
3197 return 0;
3198
3199 /* Compute all the actuals. */
3200
3201 argvec = (rtx *) alloca (i * sizeof (rtx));
3202
3203 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3204 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3205
3206 /* Find which actual values refer to current values of previous formals.
3207 Copy each of them now, before any formal is changed. */
3208
3209 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3210 {
3211 int copy = 0;
3212 register int j;
3213 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3214 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
4381f7c2
KH
3215 {
3216 copy = 1;
3217 break;
3218 }
28d81abb
RK
3219 if (copy)
3220 argvec[i] = copy_to_reg (argvec[i]);
3221 }
3222
3223 /* Store the values of the actuals into the formals. */
3224
3225 for (f = formals, a = actuals, i = 0; f;
3226 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3227 {
98f3b471 3228 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3229 emit_move_insn (DECL_RTL (f), argvec[i]);
3230 else
3231 convert_move (DECL_RTL (f), argvec[i],
3232 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3233 }
3234
3235 free_temp_slots ();
3236 return 1;
3237}
3238\f
3239/* Generate the RTL code for entering a binding contour.
3240 The variables are declared one by one, by calls to `expand_decl'.
3241
8e91754e
MM
3242 FLAGS is a bitwise or of the following flags:
3243
3244 1 - Nonzero if this construct should be visible to
3245 `exit_something'.
3246
3247 2 - Nonzero if this contour does not require a
3248 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3249 language-independent code should set this flag because they
3250 will not create corresponding BLOCK nodes. (There should be
3251 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3252 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 3253 when expand_end_bindings is called.
a97901e6
MM
3254
3255 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3256 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3257 note. */
28d81abb
RK
3258
3259void
a97901e6 3260expand_start_bindings_and_block (flags, block)
8e91754e 3261 int flags;
a97901e6 3262 tree block;
28d81abb
RK
3263{
3264 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3265 rtx note;
3266 int exit_flag = ((flags & 1) != 0);
3267 int block_flag = ((flags & 2) == 0);
4381f7c2 3268
a97901e6
MM
3269 /* If a BLOCK is supplied, then the caller should be requesting a
3270 NOTE_INSN_BLOCK_BEG note. */
3271 if (!block_flag && block)
3272 abort ();
8e91754e 3273
a97901e6
MM
3274 /* Create a note to mark the beginning of the block. */
3275 if (block_flag)
3276 {
3277 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3278 NOTE_BLOCK (note) = block;
3279 }
3280 else
3281 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4381f7c2 3282
28d81abb
RK
3283 /* Make an entry on block_stack for the block we are entering. */
3284
3285 thisblock->next = block_stack;
3286 thisblock->all = nesting_stack;
3287 thisblock->depth = ++nesting_depth;
3288 thisblock->data.block.stack_level = 0;
3289 thisblock->data.block.cleanups = 0;
3f1d071b 3290 thisblock->data.block.n_function_calls = 0;
e976b8b2 3291 thisblock->data.block.exception_region = 0;
3f1d071b 3292 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3293
3294 thisblock->data.block.conditional_code = 0;
3295 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3296 /* When we insert instructions after the last unconditional cleanup,
3297 we don't adjust last_insn. That means that a later add_insn will
3298 clobber the instructions we've just added. The easiest way to
3299 fix this is to just insert another instruction here, so that the
3300 instructions inserted after the last unconditional cleanup are
3301 never the last instruction. */
3302 emit_note (NULL_PTR, NOTE_INSN_DELETED);
e976b8b2
MS
3303 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3304
28d81abb
RK
3305 if (block_stack
3306 && !(block_stack->data.block.cleanups == NULL_TREE
3307 && block_stack->data.block.outer_cleanups == NULL_TREE))
3308 thisblock->data.block.outer_cleanups
3309 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3310 block_stack->data.block.outer_cleanups);
3311 else
3312 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3313 thisblock->data.block.label_chain = 0;
3314 thisblock->data.block.innermost_stack_block = stack_block_stack;
3315 thisblock->data.block.first_insn = note;
3f1d071b 3316 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3317 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3318 block_stack = thisblock;
3319 nesting_stack = thisblock;
3320
b93a436e
JL
3321 /* Make a new level for allocating stack slots. */
3322 push_temp_slots ();
28d81abb
RK
3323}
3324
e976b8b2
MS
3325/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3326 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3327 expand_expr are made. After we end the region, we know that all
3328 space for all temporaries that were created by TARGET_EXPRs will be
3329 destroyed and their space freed for reuse. */
3330
3331void
3332expand_start_target_temps ()
3333{
3334 /* This is so that even if the result is preserved, the space
3335 allocated will be freed, as we know that it is no longer in use. */
3336 push_temp_slots ();
3337
3338 /* Start a new binding layer that will keep track of all cleanup
3339 actions to be performed. */
8e91754e 3340 expand_start_bindings (2);
e976b8b2
MS
3341
3342 target_temp_slot_level = temp_slot_level;
3343}
3344
3345void
3346expand_end_target_temps ()
3347{
3348 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 3349
e976b8b2
MS
3350 /* This is so that even if the result is preserved, the space
3351 allocated will be freed, as we know that it is no longer in use. */
3352 pop_temp_slots ();
3353}
3354
deb5e280
JM
3355/* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3356 in question represents the outermost pair of curly braces (i.e. the "body
3357 block") of a function or method.
3358
3359 For any BLOCK node representing a "body block" of a function or method, the
3360 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3361 represents the outermost (function) scope for the function or method (i.e.
3362 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 3363 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
3364
3365int
3366is_body_block (stmt)
3367 register tree stmt;
3368{
3369 if (TREE_CODE (stmt) == BLOCK)
3370 {
3371 tree parent = BLOCK_SUPERCONTEXT (stmt);
3372
3373 if (parent && TREE_CODE (parent) == BLOCK)
3374 {
3375 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3376
3377 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3378 return 1;
3379 }
3380 }
3381
3382 return 0;
3383}
3384
e976b8b2
MS
3385/* Mark top block of block_stack as an implicit binding for an
3386 exception region. This is used to prevent infinite recursion when
3387 ending a binding with expand_end_bindings. It is only ever called
3388 by expand_eh_region_start, as that it the only way to create a
3389 block stack for a exception region. */
3390
3391void
3392mark_block_as_eh_region ()
3393{
3394 block_stack->data.block.exception_region = 1;
3395 if (block_stack->next
3396 && block_stack->next->data.block.conditional_code)
3397 {
3398 block_stack->data.block.conditional_code
3399 = block_stack->next->data.block.conditional_code;
3400 block_stack->data.block.last_unconditional_cleanup
3401 = block_stack->next->data.block.last_unconditional_cleanup;
3402 block_stack->data.block.cleanup_ptr
3403 = block_stack->next->data.block.cleanup_ptr;
3404 }
3405}
3406
3407/* True if we are currently emitting insns in an area of output code
3408 that is controlled by a conditional expression. This is used by
3409 the cleanup handling code to generate conditional cleanup actions. */
3410
3411int
3412conditional_context ()
3413{
3414 return block_stack && block_stack->data.block.conditional_code;
3415}
3416
3417/* Mark top block of block_stack as not for an implicit binding for an
3418 exception region. This is only ever done by expand_eh_region_end
3419 to let expand_end_bindings know that it is being called explicitly
3420 to end the binding layer for just the binding layer associated with
3421 the exception region, otherwise expand_end_bindings would try and
3422 end all implicit binding layers for exceptions regions, and then
3423 one normal binding layer. */
3424
3425void
3426mark_block_as_not_eh_region ()
3427{
3428 block_stack->data.block.exception_region = 0;
3429}
3430
3431/* True if the top block of block_stack was marked as for an exception
3432 region by mark_block_as_eh_region. */
3433
3434int
3435is_eh_region ()
3436{
01d939e8 3437 return cfun && block_stack && block_stack->data.block.exception_region;
e976b8b2
MS
3438}
3439
ba716ac9
BS
3440/* Emit a handler label for a nonlocal goto handler.
3441 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3442
e881bb1b 3443static rtx
ba716ac9
BS
3444expand_nl_handler_label (slot, before_insn)
3445 rtx slot, before_insn;
3446{
3447 rtx insns;
3448 rtx handler_label = gen_label_rtx ();
3449
3450 /* Don't let jump_optimize delete the handler. */
3451 LABEL_PRESERVE_P (handler_label) = 1;
3452
3453 start_sequence ();
3454 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3455 insns = get_insns ();
3456 end_sequence ();
3457 emit_insns_before (insns, before_insn);
3458
3459 emit_label (handler_label);
e881bb1b
RH
3460
3461 return handler_label;
ba716ac9
BS
3462}
3463
3464/* Emit code to restore vital registers at the beginning of a nonlocal goto
3465 handler. */
3466static void
3467expand_nl_goto_receiver ()
3468{
3469#ifdef HAVE_nonlocal_goto
3470 if (! HAVE_nonlocal_goto)
3471#endif
3472 /* First adjust our frame pointer to its actual value. It was
3473 previously set to the start of the virtual area corresponding to
3474 the stacked variables when we branched here and now needs to be
3475 adjusted to the actual hardware fp value.
3476
3477 Assignments are to virtual registers are converted by
3478 instantiate_virtual_regs into the corresponding assignment
3479 to the underlying register (fp in this case) that makes
3480 the original assignment true.
3481 So the following insn will actually be
3482 decrementing fp by STARTING_FRAME_OFFSET. */
3483 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3484
3485#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3486 if (fixed_regs[ARG_POINTER_REGNUM])
3487 {
3488#ifdef ELIMINABLE_REGS
3489 /* If the argument pointer can be eliminated in favor of the
3490 frame pointer, we don't need to restore it. We assume here
3491 that if such an elimination is present, it can always be used.
3492 This is the case on all known machines; if we don't make this
3493 assumption, we do unnecessary saving on many machines. */
3494 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3495 size_t i;
3496
b6a1cbae 3497 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
3498 if (elim_regs[i].from == ARG_POINTER_REGNUM
3499 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3500 break;
3501
b6a1cbae 3502 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
3503#endif
3504 {
3505 /* Now restore our arg pointer from the address at which it
3506 was saved in our stack frame.
3507 If there hasn't be space allocated for it yet, make
3508 some now. */
3509 if (arg_pointer_save_area == 0)
3510 arg_pointer_save_area
3511 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3512 emit_move_insn (virtual_incoming_args_rtx,
3513 /* We need a pseudo here, or else
3514 instantiate_virtual_regs_1 complains. */
3515 copy_to_reg (arg_pointer_save_area));
3516 }
3517 }
3518#endif
3519
3520#ifdef HAVE_nonlocal_goto_receiver
3521 if (HAVE_nonlocal_goto_receiver)
3522 emit_insn (gen_nonlocal_goto_receiver ());
3523#endif
3524}
3525
3526/* Make handlers for nonlocal gotos taking place in the function calls in
3527 block THISBLOCK. */
3528
3529static void
3530expand_nl_goto_receivers (thisblock)
3531 struct nesting *thisblock;
3532{
3533 tree link;
3534 rtx afterward = gen_label_rtx ();
3535 rtx insns, slot;
e881bb1b 3536 rtx label_list;
ba716ac9
BS
3537 int any_invalid;
3538
3539 /* Record the handler address in the stack slot for that purpose,
3540 during this block, saving and restoring the outer value. */
3541 if (thisblock->next != 0)
3542 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3543 {
3544 rtx save_receiver = gen_reg_rtx (Pmode);
3545 emit_move_insn (XEXP (slot, 0), save_receiver);
3546
3547 start_sequence ();
3548 emit_move_insn (save_receiver, XEXP (slot, 0));
3549 insns = get_insns ();
3550 end_sequence ();
3551 emit_insns_before (insns, thisblock->data.block.first_insn);
3552 }
3553
3554 /* Jump around the handlers; they run only when specially invoked. */
3555 emit_jump (afterward);
3556
3557 /* Make a separate handler for each label. */
3558 link = nonlocal_labels;
3559 slot = nonlocal_goto_handler_slots;
e881bb1b 3560 label_list = NULL_RTX;
ba716ac9
BS
3561 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3562 /* Skip any labels we shouldn't be able to jump to from here,
3563 we generate one special handler for all of them below which just calls
3564 abort. */
3565 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3566 {
e881bb1b
RH
3567 rtx lab;
3568 lab = expand_nl_handler_label (XEXP (slot, 0),
3569 thisblock->data.block.first_insn);
3570 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3571
ba716ac9
BS
3572 expand_nl_goto_receiver ();
3573
3574 /* Jump to the "real" nonlocal label. */
3575 expand_goto (TREE_VALUE (link));
3576 }
3577
3578 /* A second pass over all nonlocal labels; this time we handle those
3579 we should not be able to jump to at this point. */
3580 link = nonlocal_labels;
3581 slot = nonlocal_goto_handler_slots;
3582 any_invalid = 0;
3583 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3584 if (DECL_TOO_LATE (TREE_VALUE (link)))
3585 {
e881bb1b
RH
3586 rtx lab;
3587 lab = expand_nl_handler_label (XEXP (slot, 0),
3588 thisblock->data.block.first_insn);
3589 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3590 any_invalid = 1;
3591 }
3592
3593 if (any_invalid)
3594 {
3595 expand_nl_goto_receiver ();
3596 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3597 VOIDmode, 0);
3598 emit_barrier ();
3599 }
3600
e881bb1b 3601 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3602 emit_label (afterward);
3603}
3604
ba716677
MM
3605/* Warn about any unused VARS (which may contain nodes other than
3606 VAR_DECLs, but such nodes are ignored). The nodes are connected
3607 via the TREE_CHAIN field. */
3608
3609void
3610warn_about_unused_variables (vars)
3611 tree vars;
3612{
3613 tree decl;
3614
078721e1 3615 if (warn_unused_variable)
ba716677 3616 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 3617 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
3618 && ! TREE_USED (decl)
3619 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 3620 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ba716677
MM
3621 warning_with_decl (decl, "unused variable `%s'");
3622}
3623
28d81abb 3624/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3625
3626 VARS is the chain of VAR_DECL nodes for the variables bound in this
3627 contour. There may actually be other nodes in this chain, but any
3628 nodes other than VAR_DECLS are ignored.
3629
28d81abb
RK
3630 MARK_ENDS is nonzero if we should put a note at the beginning
3631 and end of this binding contour.
3632
3633 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3634 (That is true automatically if the contour has a saved stack level.) */
3635
3636void
3637expand_end_bindings (vars, mark_ends, dont_jump_in)
3638 tree vars;
3639 int mark_ends;
3640 int dont_jump_in;
3641{
e976b8b2 3642 register struct nesting *thisblock;
28d81abb 3643
e976b8b2
MS
3644 while (block_stack->data.block.exception_region)
3645 {
3646 /* Because we don't need or want a new temporary level and
3647 because we didn't create one in expand_eh_region_start,
3648 create a fake one now to avoid removing one in
3649 expand_end_bindings. */
3650 push_temp_slots ();
3651
3652 block_stack->data.block.exception_region = 0;
3653
3654 expand_end_bindings (NULL_TREE, 0, 0);
3655 }
3656
e976b8b2
MS
3657 /* Since expand_eh_region_start does an expand_start_bindings, we
3658 have to first end all the bindings that were created by
3659 expand_eh_region_start. */
4381f7c2 3660
e976b8b2
MS
3661 thisblock = block_stack;
3662
ba716677
MM
3663 /* If any of the variables in this scope were not used, warn the
3664 user. */
3665 warn_about_unused_variables (vars);
28d81abb 3666
28d81abb
RK
3667 if (thisblock->exit_label)
3668 {
3669 do_pending_stack_adjust ();
3670 emit_label (thisblock->exit_label);
3671 }
3672
ba716ac9 3673 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3674 place in the function calls in this block. */
3f1d071b 3675 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3676 && nonlocal_labels
3677 /* Make handler for outermost block
3678 if there were any nonlocal gotos to this function. */
3679 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3680 /* Make handler for inner block if it has something
3681 special to do when you jump out of it. */
3682 : (thisblock->data.block.cleanups != 0
3683 || thisblock->data.block.stack_level != 0)))
ba716ac9 3684 expand_nl_goto_receivers (thisblock);
28d81abb 3685
72eb1038
BH
3686 /* Don't allow jumping into a block that has a stack level.
3687 Cleanups are allowed, though. */
28d81abb 3688 if (dont_jump_in
72eb1038 3689 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3690 {
3691 struct label_chain *chain;
3692
3693 /* Any labels in this block are no longer valid to go to.
3694 Mark them to cause an error message. */
3695 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3696 {
3697 DECL_TOO_LATE (chain->label) = 1;
3698 /* If any goto without a fixup came to this label,
3699 that must be an error, because gotos without fixups
72eb1038 3700 come from outside all saved stack-levels. */
28d81abb
RK
3701 if (TREE_ADDRESSABLE (chain->label))
3702 error_with_decl (chain->label,
3703 "label `%s' used before containing binding contour");
3704 }
3705 }
3706
3707 /* Restore stack level in effect before the block
3708 (only if variable-size objects allocated). */
3709 /* Perform any cleanups associated with the block. */
3710
3711 if (thisblock->data.block.stack_level != 0
3712 || thisblock->data.block.cleanups != 0)
3713 {
04da69d3
JM
3714 int reachable;
3715 rtx insn;
28d81abb 3716
50d1b7a1
MS
3717 /* Don't let cleanups affect ({...}) constructs. */
3718 int old_expr_stmts_for_value = expr_stmts_for_value;
3719 rtx old_last_expr_value = last_expr_value;
3720 tree old_last_expr_type = last_expr_type;
3721 expr_stmts_for_value = 0;
28d81abb 3722
04da69d3
JM
3723 /* Only clean up here if this point can actually be reached. */
3724 insn = get_last_insn ();
3725 if (GET_CODE (insn) == NOTE)
3726 insn = prev_nonnote_insn (insn);
d1ee23e5 3727 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 3728
50d1b7a1
MS
3729 /* Do the cleanups. */
3730 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3731 if (reachable)
3732 do_pending_stack_adjust ();
28d81abb 3733
50d1b7a1
MS
3734 expr_stmts_for_value = old_expr_stmts_for_value;
3735 last_expr_value = old_last_expr_value;
3736 last_expr_type = old_last_expr_type;
3737
3738 /* Restore the stack level. */
3739
3740 if (reachable && thisblock->data.block.stack_level != 0)
3741 {
3742 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3743 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3744 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3745 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3746 NULL_RTX);
28d81abb
RK
3747 }
3748
3749 /* Any gotos out of this block must also do these things.
59257ff7
RK
3750 Also report any gotos with fixups that came to labels in this
3751 level. */
28d81abb
RK
3752 fixup_gotos (thisblock,
3753 thisblock->data.block.stack_level,
3754 thisblock->data.block.cleanups,
3755 thisblock->data.block.first_insn,
3756 dont_jump_in);
3757 }
3758
c7d2d61d
RS
3759 /* Mark the beginning and end of the scope if requested.
3760 We do this now, after running cleanups on the variables
3761 just going out of scope, so they are in scope for their cleanups. */
3762
3763 if (mark_ends)
a97901e6
MM
3764 {
3765 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3766 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3767 }
c7d2d61d
RS
3768 else
3769 /* Get rid of the beginning-mark if we don't make an end-mark. */
3770 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3771
e976b8b2 3772 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3773 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3774
28d81abb
RK
3775 /* Restore block_stack level for containing block. */
3776
3777 stack_block_stack = thisblock->data.block.innermost_stack_block;
3778 POPSTACK (block_stack);
3779
3780 /* Pop the stack slot nesting and free any slots at this level. */
3781 pop_temp_slots ();
3782}
3783\f
7393c642
RK
3784/* Generate code to save the stack pointer at the start of the current block
3785 and set up to restore it on exit. */
3786
3787void
3788save_stack_pointer ()
3789{
3790 struct nesting *thisblock = block_stack;
3791
3792 if (thisblock->data.block.stack_level == 0)
3793 {
3794 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3795 &thisblock->data.block.stack_level,
3796 thisblock->data.block.first_insn);
3797 stack_block_stack = thisblock;
3798 }
3799}
3800\f
28d81abb 3801/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3802 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3803
3804void
3805expand_decl (decl)
3806 register tree decl;
3807{
3f1d071b 3808 struct nesting *thisblock;
ca695ac9
JB
3809 tree type;
3810
ca695ac9 3811 type = TREE_TYPE (decl);
28d81abb
RK
3812
3813 /* Only automatic variables need any expansion done.
3814 Static and external variables, and external functions,
3815 will be handled by `assemble_variable' (called from finish_decl).
3816 TYPE_DECL and CONST_DECL require nothing.
3817 PARM_DECLs are handled in `assign_parms'. */
3818
3819 if (TREE_CODE (decl) != VAR_DECL)
3820 return;
44fe2e80 3821 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3822 return;
3823
3f1d071b
BS
3824 thisblock = block_stack;
3825
28d81abb
RK
3826 /* Create the RTL representation for the variable. */
3827
3828 if (type == error_mark_node)
38a448ca 3829 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
1da68f56 3830
28d81abb
RK
3831 else if (DECL_SIZE (decl) == 0)
3832 /* Variable with incomplete type. */
3833 {
3834 if (DECL_INITIAL (decl) == 0)
3835 /* Error message was already done; now avoid a crash. */
1da68f56 3836 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3837 else
3838 /* An initializer is going to decide the size of this array.
3839 Until we know the size, represent its address with a reg. */
38a448ca 3840 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1
RK
3841
3842 set_mem_attributes (DECL_RTL (decl), decl, 1);
28d81abb
RK
3843 }
3844 else if (DECL_MODE (decl) != BLKmode
3845 /* If -ffloat-store, don't put explicit float vars
3846 into regs. */
3847 && !(flag_float_store
3848 && TREE_CODE (type) == REAL_TYPE)
3849 && ! TREE_THIS_VOLATILE (decl)
d29c259b 3850 && (DECL_REGISTER (decl) || optimize)
e5e809f4 3851 /* if -fcheck-memory-usage, check all variables. */
7d384cc0 3852 && ! current_function_check_memory_usage)
28d81abb
RK
3853 {
3854 /* Automatic variable that can go in a register. */
98f3b471 3855 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3856 enum machine_mode reg_mode
3857 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3858
7f070d5e
RK
3859 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3860 mark_user_reg (DECL_RTL (decl));
3861
e5e809f4 3862 if (POINTER_TYPE_P (type))
7f070d5e 3863 mark_reg_pointer (DECL_RTL (decl),
bdb429a5 3864 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
258a120b
JM
3865
3866 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3867
3868 /* If something wants our address, try to use ADDRESSOF. */
3869 if (TREE_ADDRESSABLE (decl))
3870 put_var_into_stack (decl);
28d81abb 3871 }
0df15c2c 3872
4559fd9e 3873 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3874 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3875 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3876 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3877 {
3878 /* Variable of fixed size that goes on the stack. */
3879 rtx oldaddr = 0;
3880 rtx addr;
3881
3882 /* If we previously made RTL for this decl, it must be an array
3883 whose size was determined by the initializer.
3884 The old address was a register; set that register now
3885 to the proper address. */
3886 if (DECL_RTL (decl) != 0)
3887 {
3888 if (GET_CODE (DECL_RTL (decl)) != MEM
3889 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3890 abort ();
3891 oldaddr = XEXP (DECL_RTL (decl), 0);
3892 }
3893
d16790f2 3894 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
28d81abb
RK
3895
3896 /* Set alignment we actually gave this decl. */
3897 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3898 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3899 DECL_USER_ALIGN (decl) = 0;
28d81abb
RK
3900
3901 if (oldaddr)
3902 {
3903 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3904 if (addr != oldaddr)
3905 emit_move_insn (oldaddr, addr);
3906 }
28d81abb
RK
3907 }
3908 else
3909 /* Dynamic-size object: must push space on the stack. */
3910 {
3911 rtx address, size;
3912
3913 /* Record the stack pointer on entry to block, if have
3914 not already done so. */
7393c642
RK
3915 do_pending_stack_adjust ();
3916 save_stack_pointer ();
28d81abb 3917
4d9af632
JM
3918 /* In function-at-a-time mode, variable_size doesn't expand this,
3919 so do it now. */
3920 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3921 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3922 const0_rtx, VOIDmode, 0);
3923
28d81abb 3924 /* Compute the variable's size, in bytes. */
4559fd9e 3925 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3926 free_temp_slots ();
3927
ff91ad08 3928 /* Allocate space on the stack for the variable. Note that
4381f7c2 3929 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3930 cannot use it to conclude anything about the alignment of
3931 the size. */
37366632 3932 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3933 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3934
28d81abb 3935 /* Reference the variable indirect through that rtx. */
38a448ca 3936 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
28d81abb 3937
3bdf5ad1 3938 set_mem_attributes (DECL_RTL (decl), decl, 1);
2207e295 3939
28d81abb
RK
3940 /* Indicate the alignment we actually gave this variable. */
3941#ifdef STACK_BOUNDARY
3942 DECL_ALIGN (decl) = STACK_BOUNDARY;
3943#else
3944 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3945#endif
11cf4d18 3946 DECL_USER_ALIGN (decl) = 0;
28d81abb 3947 }
28d81abb
RK
3948}
3949\f
3950/* Emit code to perform the initialization of a declaration DECL. */
3951
3952void
3953expand_decl_init (decl)
3954 tree decl;
3955{
b4ac57ab
RS
3956 int was_used = TREE_USED (decl);
3957
3564e40e
RK
3958 /* If this is a CONST_DECL, we don't have to generate any code, but
3959 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3960 to be set while in the obstack containing the constant. If we don't
3961 do this, we can lose if we have functions nested three deep and the middle
3962 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3963 the innermost function is the first to expand that STRING_CST. */
3964 if (TREE_CODE (decl) == CONST_DECL)
3965 {
3966 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3967 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3968 EXPAND_INITIALIZER);
3969 return;
3970 }
3971
28d81abb
RK
3972 if (TREE_STATIC (decl))
3973 return;
3974
3975 /* Compute and store the initial value now. */
3976
3977 if (DECL_INITIAL (decl) == error_mark_node)
3978 {
3979 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3980
28d81abb 3981 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3982 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
3983 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3984 0, 0);
3985 emit_queue ();
3986 }
3987 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3988 {
3989 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3990 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3991 emit_queue ();
3992 }
3993
b4ac57ab
RS
3994 /* Don't let the initialization count as "using" the variable. */
3995 TREE_USED (decl) = was_used;
3996
28d81abb 3997 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3998 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3999 free_temp_slots ();
4000}
4001
4002/* CLEANUP is an expression to be executed at exit from this binding contour;
4003 for example, in C++, it might call the destructor for this variable.
4004
4847c938
MS
4005 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4006 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
4007 happens in exception handling, for gotos, returns, breaks that
4008 leave the current scope.
28d81abb
RK
4009
4010 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4011 that is not associated with any particular variable. */
4012
4013int
4014expand_decl_cleanup (decl, cleanup)
4015 tree decl, cleanup;
4016{
3f1d071b 4017 struct nesting *thisblock;
28d81abb
RK
4018
4019 /* Error if we are not in any block. */
01d939e8 4020 if (cfun == 0 || block_stack == 0)
28d81abb
RK
4021 return 0;
4022
3f1d071b
BS
4023 thisblock = block_stack;
4024
28d81abb
RK
4025 /* Record the cleanup if there is one. */
4026
4027 if (cleanup != 0)
4028 {
e976b8b2
MS
4029 tree t;
4030 rtx seq;
4031 tree *cleanups = &thisblock->data.block.cleanups;
4032 int cond_context = conditional_context ();
4033
4034 if (cond_context)
4035 {
4036 rtx flag = gen_reg_rtx (word_mode);
4037 rtx set_flag_0;
4038 tree cond;
4039
4040 start_sequence ();
4041 emit_move_insn (flag, const0_rtx);
4042 set_flag_0 = get_insns ();
4043 end_sequence ();
4044
4045 thisblock->data.block.last_unconditional_cleanup
4046 = emit_insns_after (set_flag_0,
4047 thisblock->data.block.last_unconditional_cleanup);
4048
4049 emit_move_insn (flag, const1_rtx);
4050
e976b8b2
MS
4051 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4052 DECL_RTL (cond) = flag;
4053
4054 /* Conditionalize the cleanup. */
4055 cleanup = build (COND_EXPR, void_type_node,
4056 truthvalue_conversion (cond),
4057 cleanup, integer_zero_node);
4058 cleanup = fold (cleanup);
4059
e976b8b2
MS
4060 cleanups = thisblock->data.block.cleanup_ptr;
4061 }
4062
4847c938 4063 cleanup = unsave_expr (cleanup);
e976b8b2 4064
1f8f4a0b 4065 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
4066
4067 if (! cond_context)
4068 /* If this block has a cleanup, it belongs in stack_block_stack. */
4069 stack_block_stack = thisblock;
4070
4071 if (cond_context)
4072 {
4073 start_sequence ();
4074 }
4847c938 4075
e976b8b2
MS
4076 /* If this was optimized so that there is no exception region for the
4077 cleanup, then mark the TREE_LIST node, so that we can later tell
4078 if we need to call expand_eh_region_end. */
e9a25f70
JL
4079 if (! using_eh_for_cleanups_p
4080 || expand_eh_region_start_tree (decl, cleanup))
e976b8b2 4081 TREE_ADDRESSABLE (t) = 1;
716cc7f7
JM
4082 /* If that started a new EH region, we're in a new block. */
4083 thisblock = block_stack;
e976b8b2
MS
4084
4085 if (cond_context)
4086 {
4087 seq = get_insns ();
4088 end_sequence ();
7e82801f
MS
4089 if (seq)
4090 thisblock->data.block.last_unconditional_cleanup
4091 = emit_insns_after (seq,
4092 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4093 }
4094 else
4095 {
4096 thisblock->data.block.last_unconditional_cleanup
4097 = get_last_insn ();
ef97beff
JJ
4098 /* When we insert instructions after the last unconditional cleanup,
4099 we don't adjust last_insn. That means that a later add_insn will
4100 clobber the instructions we've just added. The easiest way to
4101 fix this is to just insert another instruction here, so that the
4102 instructions inserted after the last unconditional cleanup are
4103 never the last instruction. */
4104 emit_note (NULL_PTR, NOTE_INSN_DELETED);
e976b8b2
MS
4105 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4106 }
28d81abb
RK
4107 }
4108 return 1;
4109}
e976b8b2 4110
c7ae64f2
JM
4111/* Like expand_decl_cleanup, but suppress generating an exception handler
4112 to perform the cleanup. */
4113
95d75019 4114#if 0
c7ae64f2
JM
4115int
4116expand_decl_cleanup_no_eh (decl, cleanup)
4117 tree decl, cleanup;
4118{
4119 int save_eh = using_eh_for_cleanups_p;
b472527b
JL
4120 int result;
4121
c7ae64f2 4122 using_eh_for_cleanups_p = 0;
b472527b 4123 result = expand_decl_cleanup (decl, cleanup);
c7ae64f2 4124 using_eh_for_cleanups_p = save_eh;
b472527b
JL
4125
4126 return result;
c7ae64f2 4127}
95d75019 4128#endif
c7ae64f2 4129
e976b8b2 4130/* Arrange for the top element of the dynamic cleanup chain to be
4c581243
MS
4131 popped if we exit the current binding contour. DECL is the
4132 associated declaration, if any, otherwise NULL_TREE. If the
4133 current contour is left via an exception, then __sjthrow will pop
4134 the top element off the dynamic cleanup chain. The code that
4135 avoids doing the action we push into the cleanup chain in the
4136 exceptional case is contained in expand_cleanups.
e976b8b2
MS
4137
4138 This routine is only used by expand_eh_region_start, and that is
4139 the only way in which an exception region should be started. This
4140 routine is only used when using the setjmp/longjmp codegen method
4141 for exception handling. */
4142
4143int
4c581243
MS
4144expand_dcc_cleanup (decl)
4145 tree decl;
e976b8b2 4146{
3f1d071b 4147 struct nesting *thisblock;
e976b8b2
MS
4148 tree cleanup;
4149
4150 /* Error if we are not in any block. */
01d939e8 4151 if (cfun == 0 || block_stack == 0)
e976b8b2 4152 return 0;
3f1d071b 4153 thisblock = block_stack;
e976b8b2
MS
4154
4155 /* Record the cleanup for the dynamic handler chain. */
4156
e976b8b2 4157 cleanup = make_node (POPDCC_EXPR);
e976b8b2
MS
4158
4159 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4160 thisblock->data.block.cleanups
1f8f4a0b 4161 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4162
4163 /* If this block has a cleanup, it belongs in stack_block_stack. */
4164 stack_block_stack = thisblock;
4165 return 1;
4166}
4167
4168/* Arrange for the top element of the dynamic handler chain to be
4c581243 4169 popped if we exit the current binding contour. DECL is the
956d6950 4170 associated declaration, if any, otherwise NULL_TREE. If the current
4c581243
MS
4171 contour is left via an exception, then __sjthrow will pop the top
4172 element off the dynamic handler chain. The code that avoids doing
4173 the action we push into the handler chain in the exceptional case
4174 is contained in expand_cleanups.
e976b8b2
MS
4175
4176 This routine is only used by expand_eh_region_start, and that is
4177 the only way in which an exception region should be started. This
4178 routine is only used when using the setjmp/longjmp codegen method
4179 for exception handling. */
4180
4181int
4c581243
MS
4182expand_dhc_cleanup (decl)
4183 tree decl;
e976b8b2 4184{
3f1d071b 4185 struct nesting *thisblock;
e976b8b2
MS
4186 tree cleanup;
4187
4188 /* Error if we are not in any block. */
01d939e8 4189 if (cfun == 0 || block_stack == 0)
e976b8b2 4190 return 0;
3f1d071b 4191 thisblock = block_stack;
e976b8b2
MS
4192
4193 /* Record the cleanup for the dynamic handler chain. */
4194
e976b8b2 4195 cleanup = make_node (POPDHC_EXPR);
e976b8b2
MS
4196
4197 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4198 thisblock->data.block.cleanups
1f8f4a0b 4199 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
4200
4201 /* If this block has a cleanup, it belongs in stack_block_stack. */
4202 stack_block_stack = thisblock;
4203 return 1;
4204}
28d81abb
RK
4205\f
4206/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4207 DECL_ELTS is the list of elements that belong to DECL's type.
4208 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4209
4210void
4211expand_anon_union_decl (decl, cleanup, decl_elts)
4212 tree decl, cleanup, decl_elts;
4213{
01d939e8 4214 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4215 rtx x;
8a693bd0 4216 tree t;
28d81abb 4217
8a693bd0
MM
4218 /* If any of the elements are addressable, so is the entire union. */
4219 for (t = decl_elts; t; t = TREE_CHAIN (t))
4220 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4221 {
4222 TREE_ADDRESSABLE (decl) = 1;
4223 break;
4224 }
4381f7c2 4225
ec5cd386
RK
4226 expand_decl (decl);
4227 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4228 x = DECL_RTL (decl);
4229
8a693bd0
MM
4230 /* Go through the elements, assigning RTL to each. */
4231 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4232 {
8a693bd0
MM
4233 tree decl_elt = TREE_VALUE (t);
4234 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4235 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4236
7b9032dd
JM
4237 /* Propagate the union's alignment to the elements. */
4238 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 4239 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
4240
4241 /* If the element has BLKmode and the union doesn't, the union is
4242 aligned such that the element doesn't need to have BLKmode, so
4243 change the element's mode to the appropriate one for its size. */
4244 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4245 DECL_MODE (decl_elt) = mode
05bccae2 4246 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 4247
28d81abb
RK
4248 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4249 instead create a new MEM rtx with the proper mode. */
4250 if (GET_CODE (x) == MEM)
4251 {
4252 if (mode == GET_MODE (x))
4253 DECL_RTL (decl_elt) = x;
4254 else
4255 {
38a448ca 4256 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
c6df88cb 4257 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
28d81abb
RK
4258 }
4259 }
4260 else if (GET_CODE (x) == REG)
4261 {
4262 if (mode == GET_MODE (x))
4263 DECL_RTL (decl_elt) = x;
4264 else
38a448ca 4265 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
28d81abb
RK
4266 }
4267 else
4268 abort ();
4269
4270 /* Record the cleanup if there is one. */
4271
4272 if (cleanup != 0)
4273 thisblock->data.block.cleanups
1f8f4a0b
MM
4274 = tree_cons (decl_elt, cleanup_elt,
4275 thisblock->data.block.cleanups);
28d81abb
RK
4276 }
4277}
4278\f
4279/* Expand a list of cleanups LIST.
4280 Elements may be expressions or may be nested lists.
4281
4282 If DONT_DO is nonnull, then any list-element
4283 whose TREE_PURPOSE matches DONT_DO is omitted.
4284 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4285 a value that is being returned out of the scope.
4286
4287 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
4288 goto and handle protection regions specially in that case.
4289
4290 If REACHABLE, we emit code, otherwise just inform the exception handling
4291 code about this finalization. */
28d81abb
RK
4292
4293static void
50d1b7a1 4294expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4295 tree list;
4296 tree dont_do;
4e44807b 4297 int in_fixup;
50d1b7a1 4298 int reachable;
28d81abb
RK
4299{
4300 tree tail;
4301 for (tail = list; tail; tail = TREE_CHAIN (tail))
4302 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4303 {
4304 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4305 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4306 else
4307 {
4e44807b 4308 if (! in_fixup)
e976b8b2
MS
4309 {
4310 tree cleanup = TREE_VALUE (tail);
4311
4312 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4313 if (TREE_CODE (cleanup) != POPDHC_EXPR
4314 && TREE_CODE (cleanup) != POPDCC_EXPR
4315 /* See expand_eh_region_start_tree for this case. */
4316 && ! TREE_ADDRESSABLE (tail))
4317 {
4318 cleanup = protect_with_terminate (cleanup);
4319 expand_eh_region_end (cleanup);
4320 }
4321 }
61d6b1cc 4322
50d1b7a1
MS
4323 if (reachable)
4324 {
4325 /* Cleanups may be run multiple times. For example,
4326 when exiting a binding contour, we expand the
4327 cleanups associated with that contour. When a goto
4328 within that binding contour has a target outside that
4329 contour, it will expand all cleanups from its scope to
4330 the target. Though the cleanups are expanded multiple
4331 times, the control paths are non-overlapping so the
4332 cleanups will not be executed twice. */
9762d48d
JM
4333
4334 /* We may need to protect fixups with rethrow regions. */
4335 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
e5e809f4 4336
9762d48d
JM
4337 if (protect)
4338 expand_fixup_region_start ();
e5e809f4 4339
f54a7f6f
MM
4340 /* The cleanup might contain try-blocks, so we have to
4341 preserve our current queue. */
4342 push_ehqueue ();
50d1b7a1 4343 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
f54a7f6f 4344 pop_ehqueue ();
9762d48d
JM
4345 if (protect)
4346 expand_fixup_region_end (TREE_VALUE (tail));
50d1b7a1
MS
4347 free_temp_slots ();
4348 }
28d81abb
RK
4349 }
4350 }
4351}
4352
e976b8b2
MS
4353/* Mark when the context we are emitting RTL for as a conditional
4354 context, so that any cleanup actions we register with
4355 expand_decl_init will be properly conditionalized when those
4356 cleanup actions are later performed. Must be called before any
956d6950 4357 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4358
4359void
956d6950 4360start_cleanup_deferral ()
e976b8b2 4361{
e3eef942
JW
4362 /* block_stack can be NULL if we are inside the parameter list. It is
4363 OK to do nothing, because cleanups aren't possible here. */
4364 if (block_stack)
4365 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4366}
4367
4368/* Mark the end of a conditional region of code. Because cleanup
956d6950 4369 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4370 after we end the currently deferred cleanups, only after we end all
4371 deferred cleanups, are we back in unconditional code. */
4372
4373void
956d6950 4374end_cleanup_deferral ()
e976b8b2 4375{
e3eef942
JW
4376 /* block_stack can be NULL if we are inside the parameter list. It is
4377 OK to do nothing, because cleanups aren't possible here. */
4378 if (block_stack)
4379 --block_stack->data.block.conditional_code;
e976b8b2
MS
4380}
4381
28d81abb
RK
4382/* Move all cleanups from the current block_stack
4383 to the containing block_stack, where they are assumed to
4384 have been created. If anything can cause a temporary to
4385 be created, but not expanded for more than one level of
4386 block_stacks, then this code will have to change. */
4387
4388void
4389move_cleanups_up ()
4390{
4391 struct nesting *block = block_stack;
4392 struct nesting *outer = block->next;
4393
4394 outer->data.block.cleanups
4395 = chainon (block->data.block.cleanups,
4396 outer->data.block.cleanups);
4397 block->data.block.cleanups = 0;
4398}
4399
4400tree
4401last_cleanup_this_contour ()
4402{
4403 if (block_stack == 0)
4404 return 0;
4405
4406 return block_stack->data.block.cleanups;
4407}
4408
4409/* Return 1 if there are any pending cleanups at this point.
4410 If THIS_CONTOUR is nonzero, check the current contour as well.
4411 Otherwise, look only at the contours that enclose this one. */
4412
4413int
4414any_pending_cleanups (this_contour)
4415 int this_contour;
4416{
4417 struct nesting *block;
4418
01d939e8 4419 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4420 return 0;
4421
4422 if (this_contour && block_stack->data.block.cleanups != NULL)
4423 return 1;
4424 if (block_stack->data.block.cleanups == 0
e976b8b2 4425 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4426 return 0;
4427
4428 for (block = block_stack->next; block; block = block->next)
4429 if (block->data.block.cleanups != 0)
4430 return 1;
4431
4432 return 0;
4433}
4434\f
4435/* Enter a case (Pascal) or switch (C) statement.
4436 Push a block onto case_stack and nesting_stack
4437 to accumulate the case-labels that are seen
4438 and to record the labels generated for the statement.
4439
4440 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4441 Otherwise, this construct is transparent for `exit_something'.
4442
4443 EXPR is the index-expression to be dispatched on.
4444 TYPE is its nominal type. We could simply convert EXPR to this type,
4445 but instead we take short cuts. */
4446
4447void
4448expand_start_case (exit_flag, expr, type, printname)
4449 int exit_flag;
4450 tree expr;
4451 tree type;
dff01034 4452 const char *printname;
28d81abb
RK
4453{
4454 register struct nesting *thiscase = ALLOC_NESTING ();
4455
4456 /* Make an entry on case_stack for the case we are entering. */
4457
4458 thiscase->next = case_stack;
4459 thiscase->all = nesting_stack;
4460 thiscase->depth = ++nesting_depth;
4461 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4462 thiscase->data.case_stmt.case_list = 0;
4463 thiscase->data.case_stmt.index_expr = expr;
4464 thiscase->data.case_stmt.nominal_type = type;
4465 thiscase->data.case_stmt.default_label = 0;
28d81abb 4466 thiscase->data.case_stmt.printname = printname;
a11759a3 4467 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4468 case_stack = thiscase;
4469 nesting_stack = thiscase;
4470
4471 do_pending_stack_adjust ();
4472
4473 /* Make sure case_stmt.start points to something that won't
4474 need any transformation before expand_end_case. */
4475 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 4476 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
4477
4478 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4479
956d6950 4480 start_cleanup_deferral ();
28d81abb
RK
4481}
4482
4483/* Start a "dummy case statement" within which case labels are invalid
4484 and are not connected to any larger real case statement.
4485 This can be used if you don't want to let a case statement jump
4486 into the middle of certain kinds of constructs. */
4487
4488void
4489expand_start_case_dummy ()
4490{
4491 register struct nesting *thiscase = ALLOC_NESTING ();
4492
4493 /* Make an entry on case_stack for the dummy. */
4494
4495 thiscase->next = case_stack;
4496 thiscase->all = nesting_stack;
4497 thiscase->depth = ++nesting_depth;
4498 thiscase->exit_label = 0;
4499 thiscase->data.case_stmt.case_list = 0;
4500 thiscase->data.case_stmt.start = 0;
4501 thiscase->data.case_stmt.nominal_type = 0;
4502 thiscase->data.case_stmt.default_label = 0;
28d81abb
RK
4503 case_stack = thiscase;
4504 nesting_stack = thiscase;
956d6950 4505 start_cleanup_deferral ();
28d81abb
RK
4506}
4507
4508/* End a dummy case statement. */
4509
4510void
4511expand_end_case_dummy ()
4512{
956d6950 4513 end_cleanup_deferral ();
28d81abb
RK
4514 POPSTACK (case_stack);
4515}
4516
4517/* Return the data type of the index-expression
4518 of the innermost case statement, or null if none. */
4519
4520tree
4521case_index_expr_type ()
4522{
4523 if (case_stack)
4524 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4525 return 0;
4526}
4527\f
a11759a3
JR
4528static void
4529check_seenlabel ()
4530{
4531 /* If this is the first label, warn if any insns have been emitted. */
4532 if (case_stack->data.case_stmt.line_number_status >= 0)
4533 {
4534 rtx insn;
4535
4536 restore_line_number_status
4537 (case_stack->data.case_stmt.line_number_status);
4538 case_stack->data.case_stmt.line_number_status = -1;
4539
4540 for (insn = case_stack->data.case_stmt.start;
4541 insn;
4542 insn = NEXT_INSN (insn))
4543 {
4544 if (GET_CODE (insn) == CODE_LABEL)
4545 break;
4546 if (GET_CODE (insn) != NOTE
4547 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4548 {
4549 do
4550 insn = PREV_INSN (insn);
0dacbd0e
JW
4551 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4552
4553 /* If insn is zero, then there must have been a syntax error. */
4554 if (insn)
4381f7c2
KH
4555 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4556 NOTE_LINE_NUMBER (insn),
0dacbd0e
JW
4557 "unreachable code at beginning of %s",
4558 case_stack->data.case_stmt.printname);
a11759a3
JR
4559 break;
4560 }
4561 }
4562 }
4563}
4564
28d81abb
RK
4565/* Accumulate one case or default label inside a case or switch statement.
4566 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4567 The function CONVERTER, when applied to arguments T and V,
4568 converts the value V to the type T.
28d81abb
RK
4569
4570 If not currently inside a case or switch statement, return 1 and do
4571 nothing. The caller will print a language-specific error message.
4572 If VALUE is a duplicate or overlaps, return 2 and do nothing
4573 except store the (first) duplicate node in *DUPLICATE.
4574 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4575 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4576 Return 0 on success.
4577
4578 Extended to handle range statements. */
4579
4580int
f52fba84 4581pushcase (value, converter, label, duplicate)
28d81abb 4582 register tree value;
cdadb1dd 4583 tree (*converter) PARAMS ((tree, tree));
28d81abb
RK
4584 register tree label;
4585 tree *duplicate;
4586{
28d81abb
RK
4587 tree index_type;
4588 tree nominal_type;
4589
4590 /* Fail if not inside a real case statement. */
4591 if (! (case_stack && case_stack->data.case_stmt.start))
4592 return 1;
4593
4594 if (stack_block_stack
4595 && stack_block_stack->depth > case_stack->depth)
4596 return 5;
4597
4598 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4599 nominal_type = case_stack->data.case_stmt.nominal_type;
4600
4601 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4602 if (index_type == error_mark_node)
4603 return 0;
4604
2f985ca6
JW
4605 /* Convert VALUE to the type in which the comparisons are nominally done. */
4606 if (value != 0)
4607 value = (*converter) (nominal_type, value);
4608
feb60352
R
4609 check_seenlabel ();
4610
28d81abb
RK
4611 /* Fail if this value is out of range for the actual type of the index
4612 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4613 if (value != 0
4614 && (TREE_CONSTANT_OVERFLOW (value)
4615 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4616 return 3;
4617
56cb9733 4618 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4619}
4620
956d6950
JL
4621/* Like pushcase but this case applies to all values between VALUE1 and
4622 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4623 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4624 starts at VALUE1 and ends at the highest value of the index type.
4625 If both are NULL, this case applies to all values.
4626
4627 The return value is the same as that of pushcase but there is one
4628 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4629
4630int
f52fba84 4631pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4632 register tree value1, value2;
cdadb1dd 4633 tree (*converter) PARAMS ((tree, tree));
28d81abb
RK
4634 register tree label;
4635 tree *duplicate;
4636{
28d81abb
RK
4637 tree index_type;
4638 tree nominal_type;
4639
4640 /* Fail if not inside a real case statement. */
4641 if (! (case_stack && case_stack->data.case_stmt.start))
4642 return 1;
4643
4644 if (stack_block_stack
4645 && stack_block_stack->depth > case_stack->depth)
4646 return 5;
4647
4648 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4649 nominal_type = case_stack->data.case_stmt.nominal_type;
4650
4651 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4652 if (index_type == error_mark_node)
4653 return 0;
4654
a11759a3 4655 check_seenlabel ();
28d81abb 4656
956d6950
JL
4657 /* Convert VALUEs to type in which the comparisons are nominally done
4658 and replace any unspecified value with the corresponding bound. */
4659 if (value1 == 0)
1974bfb1 4660 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4661 if (value2 == 0)
1974bfb1 4662 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4663
4664 /* Fail if the range is empty. Do this before any conversion since
4665 we want to allow out-of-range empty ranges. */
14a774a9 4666 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4667 return 4;
4668
4381f7c2 4669 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
4670 converting to. Do this after the < check above to suppress false
4671 positives. */
14a774a9 4672 if (value2 == 0)
e1ee5cdc 4673 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4674
2f985ca6
JW
4675 value1 = (*converter) (nominal_type, value1);
4676 value2 = (*converter) (nominal_type, value2);
4677
28d81abb 4678 /* Fail if these values are out of range. */
956d6950
JL
4679 if (TREE_CONSTANT_OVERFLOW (value1)
4680 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4681 return 3;
4682
956d6950
JL
4683 if (TREE_CONSTANT_OVERFLOW (value2)
4684 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4685 return 3;
4686
57641239
RK
4687 return add_case_node (value1, value2, label, duplicate);
4688}
4689
4690/* Do the actual insertion of a case label for pushcase and pushcase_range
4691 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4692 slowdown for large switch statements. */
4693
56cb9733 4694int
57641239
RK
4695add_case_node (low, high, label, duplicate)
4696 tree low, high;
4697 tree label;
4698 tree *duplicate;
4699{
4700 struct case_node *p, **q, *r;
4701
56cb9733
MM
4702 /* If there's no HIGH value, then this is not a case range; it's
4703 just a simple case label. But that's just a degenerate case
4704 range. */
4705 if (!high)
4706 high = low;
4707
4708 /* Handle default labels specially. */
4709 if (!high && !low)
4710 {
4711 if (case_stack->data.case_stmt.default_label != 0)
4712 {
4713 *duplicate = case_stack->data.case_stmt.default_label;
4714 return 2;
4715 }
4716 case_stack->data.case_stmt.default_label = label;
4717 expand_label (label);
4718 return 0;
4719 }
4720
57641239
RK
4721 q = &case_stack->data.case_stmt.case_list;
4722 p = *q;
4723
69d4ca36 4724 while ((r = *q))
28d81abb 4725 {
57641239
RK
4726 p = r;
4727
4728 /* Keep going past elements distinctly greater than HIGH. */
4729 if (tree_int_cst_lt (high, p->low))
4730 q = &p->left;
4731
4732 /* or distinctly less than LOW. */
4733 else if (tree_int_cst_lt (p->high, low))
4734 q = &p->right;
4735
4736 else
28d81abb 4737 {
57641239
RK
4738 /* We have an overlap; this is an error. */
4739 *duplicate = p->code_label;
28d81abb
RK
4740 return 2;
4741 }
4742 }
4743
1da68f56 4744 /* Add this label to the chain, and succeed. */
28d81abb 4745
1f8f4a0b 4746 r = (struct case_node *) xmalloc (sizeof (struct case_node));
1da68f56 4747 r->low = low;
28d81abb 4748
57641239 4749 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
4750 if (tree_int_cst_equal (low, high))
4751 r->high = r->low;
4752 else
1da68f56 4753 r->high = high;
57641239
RK
4754
4755 r->code_label = label;
28d81abb
RK
4756 expand_label (label);
4757
57641239
RK
4758 *q = r;
4759 r->parent = p;
4760 r->left = 0;
4761 r->right = 0;
4762 r->balance = 0;
4763
4764 while (p)
4765 {
4766 struct case_node *s;
4767
4768 if (r == p->left)
4769 {
4770 int b;
4771
4772 if (! (b = p->balance))
4773 /* Growth propagation from left side. */
4774 p->balance = -1;
4775 else if (b < 0)
4776 {
4777 if (r->balance < 0)
4778 {
4779 /* R-Rotation */
51723711 4780 if ((p->left = s = r->right))
57641239
RK
4781 s->parent = p;
4782
4783 r->right = p;
4784 p->balance = 0;
4785 r->balance = 0;
4786 s = p->parent;
4787 p->parent = r;
4788
51723711 4789 if ((r->parent = s))
57641239
RK
4790 {
4791 if (s->left == p)
4792 s->left = r;
4793 else
4794 s->right = r;
4795 }
4796 else
4797 case_stack->data.case_stmt.case_list = r;
4798 }
4799 else
4800 /* r->balance == +1 */
4801 {
5720c7e7
RK
4802 /* LR-Rotation */
4803
57641239
RK
4804 int b2;
4805 struct case_node *t = r->right;
4806
51723711 4807 if ((p->left = s = t->right))
57641239
RK
4808 s->parent = p;
4809
4810 t->right = p;
51723711 4811 if ((r->right = s = t->left))
57641239
RK
4812 s->parent = r;
4813
4814 t->left = r;
4815 b = t->balance;
4816 b2 = b < 0;
4817 p->balance = b2;
4818 b2 = -b2 - b;
4819 r->balance = b2;
4820 t->balance = 0;
4821 s = p->parent;
4822 p->parent = t;
4823 r->parent = t;
4824
51723711 4825 if ((t->parent = s))
57641239
RK
4826 {
4827 if (s->left == p)
4828 s->left = t;
4829 else
4830 s->right = t;
4831 }
4832 else
4833 case_stack->data.case_stmt.case_list = t;
4834 }
4835 break;
4836 }
4837
4838 else
4839 {
4840 /* p->balance == +1; growth of left side balances the node. */
4841 p->balance = 0;
4842 break;
4843 }
4844 }
4845 else
4846 /* r == p->right */
4847 {
4848 int b;
4849
4850 if (! (b = p->balance))
4851 /* Growth propagation from right side. */
4852 p->balance++;
4853 else if (b > 0)
4854 {
4855 if (r->balance > 0)
4856 {
4857 /* L-Rotation */
4858
51723711 4859 if ((p->right = s = r->left))
57641239
RK
4860 s->parent = p;
4861
4862 r->left = p;
4863 p->balance = 0;
4864 r->balance = 0;
4865 s = p->parent;
4866 p->parent = r;
51723711 4867 if ((r->parent = s))
57641239
RK
4868 {
4869 if (s->left == p)
4870 s->left = r;
4871 else
4872 s->right = r;
4873 }
4874
4875 else
4876 case_stack->data.case_stmt.case_list = r;
4877 }
4878
4879 else
4880 /* r->balance == -1 */
4881 {
4882 /* RL-Rotation */
4883 int b2;
4884 struct case_node *t = r->left;
4885
51723711 4886 if ((p->right = s = t->left))
57641239
RK
4887 s->parent = p;
4888
4889 t->left = p;
4890
51723711 4891 if ((r->left = s = t->right))
57641239
RK
4892 s->parent = r;
4893
4894 t->right = r;
4895 b = t->balance;
4896 b2 = b < 0;
4897 r->balance = b2;
4898 b2 = -b2 - b;
4899 p->balance = b2;
4900 t->balance = 0;
4901 s = p->parent;
4902 p->parent = t;
4903 r->parent = t;
4904
51723711 4905 if ((t->parent = s))
57641239
RK
4906 {
4907 if (s->left == p)
4908 s->left = t;
4909 else
4910 s->right = t;
4911 }
4912
4913 else
4914 case_stack->data.case_stmt.case_list = t;
4915 }
4916 break;
4917 }
4918 else
4919 {
4920 /* p->balance == -1; growth of right side balances the node. */
4921 p->balance = 0;
4922 break;
4923 }
4924 }
4925
4926 r = p;
4927 p = p->parent;
4928 }
28d81abb
RK
4929
4930 return 0;
4931}
4932\f
94d6511c 4933/* Returns the number of possible values of TYPE.
770ae6cc
RK
4934 Returns -1 if the number is unknown, variable, or if the number does not
4935 fit in a HOST_WIDE_INT.
94d6511c
PB
4936 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4937 do not increase monotonically (there may be duplicates);
4938 to 1 if the values increase monotonically, but not always by 1;
4939 otherwise sets it to 0. */
4940
4941HOST_WIDE_INT
4942all_cases_count (type, spareness)
4943 tree type;
4944 int *spareness;
4945{
770ae6cc
RK
4946 tree t;
4947 HOST_WIDE_INT count, minval, lastval;
4948
94d6511c
PB
4949 *spareness = 0;
4950
4951 switch (TREE_CODE (type))
4952 {
94d6511c
PB
4953 case BOOLEAN_TYPE:
4954 count = 2;
4955 break;
770ae6cc 4956
94d6511c
PB
4957 case CHAR_TYPE:
4958 count = 1 << BITS_PER_UNIT;
4959 break;
770ae6cc 4960
94d6511c
PB
4961 default:
4962 case INTEGER_TYPE:
770ae6cc
RK
4963 if (TYPE_MAX_VALUE (type) != 0
4964 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4965 TYPE_MIN_VALUE (type))))
4966 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4967 convert (type, integer_zero_node))))
4968 && host_integerp (t, 1))
4969 count = tree_low_cst (t, 1);
94d6511c 4970 else
770ae6cc 4971 return -1;
94d6511c 4972 break;
770ae6cc 4973
94d6511c 4974 case ENUMERAL_TYPE:
770ae6cc
RK
4975 /* Don't waste time with enumeral types with huge values. */
4976 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4977 || TYPE_MAX_VALUE (type) == 0
4978 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4979 return -1;
4980
4981 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
94d6511c 4982 count = 0;
770ae6cc 4983
94d6511c
PB
4984 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4985 {
770ae6cc
RK
4986 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4987
4988 if (*spareness == 2 || thisval < lastval)
4989 *spareness = 2;
4990 else if (thisval != minval + count)
94d6511c 4991 *spareness = 1;
770ae6cc 4992
94d6511c
PB
4993 count++;
4994 }
94d6511c 4995 }
770ae6cc 4996
94d6511c
PB
4997 return count;
4998}
4999
94d6511c 5000#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
5001 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5002 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 5003#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
5004 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5005 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
5006
5007/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5008 with the case values we have seen, assuming the case expression
5009 has the given TYPE.
5010 SPARSENESS is as determined by all_cases_count.
5011
9faa82d8 5012 The time needed is proportional to COUNT, unless
94d6511c
PB
5013 SPARSENESS is 2, in which case quadratic time is needed. */
5014
df03cab5 5015void
94d6511c
PB
5016mark_seen_cases (type, cases_seen, count, sparseness)
5017 tree type;
5018 unsigned char *cases_seen;
770ae6cc 5019 HOST_WIDE_INT count;
94d6511c
PB
5020 int sparseness;
5021{
94d6511c 5022 tree next_node_to_try = NULL_TREE;
770ae6cc 5023 HOST_WIDE_INT next_node_offset = 0;
94d6511c 5024
5720c7e7 5025 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c 5026 tree val = make_node (INTEGER_CST);
770ae6cc 5027
94d6511c 5028 TREE_TYPE (val) = type;
5720c7e7 5029 if (! root)
4381f7c2
KH
5030 /* Do nothing. */
5031 ;
5720c7e7 5032 else if (sparseness == 2)
94d6511c 5033 {
5720c7e7 5034 tree t;
770ae6cc 5035 unsigned HOST_WIDE_INT xlo;
5720c7e7
RK
5036
5037 /* This less efficient loop is only needed to handle
5038 duplicate case values (multiple enum constants
5039 with the same value). */
5040 TREE_TYPE (val) = TREE_TYPE (root->low);
4381f7c2 5041 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5720c7e7 5042 t = TREE_CHAIN (t), xlo++)
94d6511c 5043 {
5720c7e7
RK
5044 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5045 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5046 n = root;
5047 do
94d6511c 5048 {
5720c7e7
RK
5049 /* Keep going past elements distinctly greater than VAL. */
5050 if (tree_int_cst_lt (val, n->low))
5051 n = n->left;
4381f7c2 5052
5720c7e7
RK
5053 /* or distinctly less than VAL. */
5054 else if (tree_int_cst_lt (n->high, val))
5055 n = n->right;
4381f7c2 5056
5720c7e7 5057 else
94d6511c 5058 {
5720c7e7
RK
5059 /* We have found a matching range. */
5060 BITARRAY_SET (cases_seen, xlo);
5061 break;
94d6511c
PB
5062 }
5063 }
5720c7e7
RK
5064 while (n);
5065 }
5066 }
5067 else
5068 {
5069 if (root->left)
5070 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
770ae6cc 5071
5720c7e7
RK
5072 for (n = root; n; n = n->right)
5073 {
5074 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5075 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4381f7c2 5076 while (! tree_int_cst_lt (n->high, val))
94d6511c 5077 {
5720c7e7
RK
5078 /* Calculate (into xlo) the "offset" of the integer (val).
5079 The element with lowest value has offset 0, the next smallest
5080 element has offset 1, etc. */
5081
770ae6cc
RK
5082 unsigned HOST_WIDE_INT xlo;
5083 HOST_WIDE_INT xhi;
5720c7e7 5084 tree t;
770ae6cc 5085
94d6511c
PB
5086 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5087 {
5088 /* The TYPE_VALUES will be in increasing order, so
5089 starting searching where we last ended. */
5090 t = next_node_to_try;
5091 xlo = next_node_offset;
5092 xhi = 0;
5093 for (;;)
5094 {
5095 if (t == NULL_TREE)
5096 {
5097 t = TYPE_VALUES (type);
5098 xlo = 0;
5099 }
5100 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5101 {
5102 next_node_to_try = TREE_CHAIN (t);
5103 next_node_offset = xlo + 1;
5104 break;
5105 }
5106 xlo++;
5107 t = TREE_CHAIN (t);
5108 if (t == next_node_to_try)
5720c7e7
RK
5109 {
5110 xlo = -1;
5111 break;
5112 }
94d6511c
PB
5113 }
5114 }
5115 else
5116 {
5117 t = TYPE_MIN_VALUE (type);
5118 if (t)
5119 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5120 &xlo, &xhi);
5121 else
5122 xlo = xhi = 0;
5123 add_double (xlo, xhi,
5124 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5125 &xlo, &xhi);
5126 }
4381f7c2 5127
770ae6cc 5128 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
94d6511c 5129 BITARRAY_SET (cases_seen, xlo);
770ae6cc 5130
5720c7e7
RK
5131 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5132 1, 0,
5133 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5134 }
94d6511c
PB
5135 }
5136 }
5137}
5138
28d81abb
RK
5139/* Called when the index of a switch statement is an enumerated type
5140 and there is no default label.
5141
5142 Checks that all enumeration literals are covered by the case
5143 expressions of a switch. Also, warn if there are any extra
5144 switch cases that are *not* elements of the enumerated type.
5145
5146 If all enumeration literals were covered by the case expressions,
5147 turn one of the expressions into the default expression since it should
5148 not be possible to fall through such a switch. */
5149
5150void
5151check_for_full_enumeration_handling (type)
5152 tree type;
5153{
5154 register struct case_node *n;
28d81abb 5155 register tree chain;
4381f7c2 5156#if 0 /* variable used by 'if 0'ed code below. */
69d4ca36 5157 register struct case_node **l;
28d81abb 5158 int all_values = 1;
69d4ca36 5159#endif
28d81abb 5160
0f41302f 5161 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5162 int sparseness = 0;
5163
0f41302f 5164 /* The number of possible selector values. */
94d6511c
PB
5165 HOST_WIDE_INT size;
5166
5167 /* For each possible selector value. a one iff it has been matched
0f41302f 5168 by a case value alternative. */
94d6511c
PB
5169 unsigned char *cases_seen;
5170
0f41302f 5171 /* The allocated size of cases_seen, in chars. */
770ae6cc 5172 HOST_WIDE_INT bytes_needed;
94d6511c 5173
94d6511c
PB
5174 if (! warn_switch)
5175 return;
5176
5177 size = all_cases_count (type, &sparseness);
5178 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5179
94d6511c 5180 if (size > 0 && size < 600000
c5c76735 5181 /* We deliberately use calloc here, not cmalloc, so that we can suppress
4381f7c2 5182 this optimization if we don't have enough memory rather than
c5c76735 5183 aborting, as xmalloc would do. */
3de90026 5184 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
28d81abb 5185 {
770ae6cc 5186 HOST_WIDE_INT i;
94d6511c 5187 tree v = TYPE_VALUES (type);
28d81abb 5188
94d6511c
PB
5189 /* The time complexity of this code is normally O(N), where
5190 N being the number of members in the enumerated type.
5191 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 5192 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5193
5194 mark_seen_cases (type, cases_seen, size, sparseness);
5195
770ae6cc 5196 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4381f7c2 5197 if (BITARRAY_TEST (cases_seen, i) == 0)
770ae6cc
RK
5198 warning ("enumeration value `%s' not handled in switch",
5199 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
94d6511c
PB
5200
5201 free (cases_seen);
28d81abb
RK
5202 }
5203
5204 /* Now we go the other way around; we warn if there are case
ac2a9454 5205 expressions that don't correspond to enumerators. This can
28d81abb 5206 occur since C and C++ don't enforce type-checking of
0f41302f 5207 assignments to enumeration variables. */
28d81abb 5208
5720c7e7
RK
5209 if (case_stack->data.case_stmt.case_list
5210 && case_stack->data.case_stmt.case_list->left)
5211 case_stack->data.case_stmt.case_list
5212 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
5213 if (warn_switch)
5214 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5215 {
5216 for (chain = TYPE_VALUES (type);
5217 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5218 chain = TREE_CHAIN (chain))
5219 ;
5220
5221 if (!chain)
3b24f55b
RS
5222 {
5223 if (TYPE_NAME (type) == 0)
e016950d
KG
5224 warning ("case value `%ld' not in enumerated type",
5225 (long) TREE_INT_CST_LOW (n->low));
3b24f55b 5226 else
e016950d
KG
5227 warning ("case value `%ld' not in enumerated type `%s'",
5228 (long) TREE_INT_CST_LOW (n->low),
3b24f55b
RS
5229 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5230 == IDENTIFIER_NODE)
5231 ? TYPE_NAME (type)
5232 : DECL_NAME (TYPE_NAME (type))));
5233 }
1ddde1cd
RS
5234 if (!tree_int_cst_equal (n->low, n->high))
5235 {
5236 for (chain = TYPE_VALUES (type);
5237 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5238 chain = TREE_CHAIN (chain))
5239 ;
5240
5241 if (!chain)
3b24f55b
RS
5242 {
5243 if (TYPE_NAME (type) == 0)
e016950d
KG
5244 warning ("case value `%ld' not in enumerated type",
5245 (long) TREE_INT_CST_LOW (n->high));
3b24f55b 5246 else
e016950d
KG
5247 warning ("case value `%ld' not in enumerated type `%s'",
5248 (long) TREE_INT_CST_LOW (n->high),
3b24f55b
RS
5249 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5250 == IDENTIFIER_NODE)
5251 ? TYPE_NAME (type)
5252 : DECL_NAME (TYPE_NAME (type))));
5253 }
1ddde1cd 5254 }
28d81abb
RK
5255 }
5256
ae8cb346
RS
5257#if 0
5258 /* ??? This optimization is disabled because it causes valid programs to
5259 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 5260 will have a value that is the same as one of the enumeration literals. */
ae8cb346 5261
28d81abb
RK
5262 /* If all values were found as case labels, make one of them the default
5263 label. Thus, this switch will never fall through. We arbitrarily pick
5264 the last one to make the default since this is likely the most
5265 efficient choice. */
5266
5267 if (all_values)
5268 {
5269 for (l = &case_stack->data.case_stmt.case_list;
5270 (*l)->right != 0;
5271 l = &(*l)->right)
5272 ;
5273
5274 case_stack->data.case_stmt.default_label = (*l)->code_label;
5275 *l = 0;
5276 }
ae8cb346 5277#endif /* 0 */
28d81abb 5278}
ca695ac9 5279
1f8f4a0b
MM
5280/* Free CN, and its children. */
5281
5282static void
5283free_case_nodes (cn)
5284 case_node_ptr cn;
5285{
5286 if (cn)
5287 {
5288 free_case_nodes (cn->left);
5289 free_case_nodes (cn->right);
5290 free (cn);
5291 }
5292}
5293
28d81abb
RK
5294\f
5295/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5296 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
5297 Generate the code to test it and jump to the right place. */
5298
5299void
5300expand_end_case (orig_index)
5301 tree orig_index;
5302{
95d75019 5303 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
28d81abb
RK
5304 rtx default_label = 0;
5305 register struct case_node *n;
85066503 5306 unsigned int count;
28d81abb 5307 rtx index;
ca695ac9 5308 rtx table_label;
28d81abb
RK
5309 int ncases;
5310 rtx *labelvec;
5311 register int i;
5312 rtx before_case;
5313 register struct nesting *thiscase = case_stack;
1b0cb6fc 5314 tree index_expr, index_type;
ca695ac9
JB
5315 int unsignedp;
5316
03c03770
AS
5317 /* Don't crash due to previous errors. */
5318 if (thiscase == NULL)
5319 return;
5320
ca695ac9
JB
5321 table_label = gen_label_rtx ();
5322 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5323 index_type = TREE_TYPE (index_expr);
5324 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
5325
5326 do_pending_stack_adjust ();
5327
feb60352
R
5328 /* This might get an spurious warning in the presence of a syntax error;
5329 it could be fixed by moving the call to check_seenlabel after the
5330 check for error_mark_node, and copying the code of check_seenlabel that
5331 deals with case_stack->data.case_stmt.line_number_status /
5332 restore_line_number_status in front of the call to end_cleanup_deferral;
5333 However, this might miss some useful warnings in the presence of
5334 non-syntax errors. */
a11759a3
JR
5335 check_seenlabel ();
5336
28d81abb 5337 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5338 if (index_type != error_mark_node)
28d81abb
RK
5339 {
5340 /* If switch expression was an enumerated type, check that all
5341 enumeration literals are covered by the cases.
5342 No sense trying this if there's a default case, however. */
5343
5344 if (!thiscase->data.case_stmt.default_label
5345 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5346 && TREE_CODE (index_expr) != INTEGER_CST)
5347 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5348
28d81abb
RK
5349 /* If we don't have a default-label, create one here,
5350 after the body of the switch. */
5351 if (thiscase->data.case_stmt.default_label == 0)
5352 {
5353 thiscase->data.case_stmt.default_label
5354 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5355 expand_label (thiscase->data.case_stmt.default_label);
5356 }
5357 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5358
5359 before_case = get_last_insn ();
5360
5720c7e7
RK
5361 if (thiscase->data.case_stmt.case_list
5362 && thiscase->data.case_stmt.case_list->left)
b059139c 5363 thiscase->data.case_stmt.case_list
4381f7c2 5364 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 5365
28d81abb
RK
5366 /* Simplify the case-list before we count it. */
5367 group_case_nodes (thiscase->data.case_stmt.case_list);
5368
5369 /* Get upper and lower bounds of case values.
5370 Also convert all the case values to the index expr's data type. */
5371
5372 count = 0;
5373 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5374 {
5375 /* Check low and high label values are integers. */
5376 if (TREE_CODE (n->low) != INTEGER_CST)
5377 abort ();
5378 if (TREE_CODE (n->high) != INTEGER_CST)
5379 abort ();
5380
1b0cb6fc
RK
5381 n->low = convert (index_type, n->low);
5382 n->high = convert (index_type, n->high);
28d81abb
RK
5383
5384 /* Count the elements and track the largest and smallest
5385 of them (treating them as signed even if they are not). */
5386 if (count++ == 0)
5387 {
5388 minval = n->low;
5389 maxval = n->high;
5390 }
5391 else
5392 {
5393 if (INT_CST_LT (n->low, minval))
5394 minval = n->low;
5395 if (INT_CST_LT (maxval, n->high))
5396 maxval = n->high;
5397 }
5398 /* A range counts double, since it requires two compares. */
5399 if (! tree_int_cst_equal (n->low, n->high))
5400 count++;
5401 }
5402
3474db0e
RS
5403 orig_minval = minval;
5404
28d81abb
RK
5405 /* Compute span of values. */
5406 if (count != 0)
1b0cb6fc 5407 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5408
956d6950 5409 end_cleanup_deferral ();
4c581243 5410
1b0cb6fc 5411 if (count == 0)
28d81abb
RK
5412 {
5413 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5414 emit_queue ();
5415 emit_jump (default_label);
5416 }
3474db0e 5417
28d81abb
RK
5418 /* If range of values is much bigger than number of values,
5419 make a sequence of conditional branches instead of a dispatch.
5420 If the switch-index is a constant, do it this way
5421 because we can optimize it. */
4f73c5dd
TW
5422
5423#ifndef CASE_VALUES_THRESHOLD
28d81abb 5424#ifdef HAVE_casesi
4f73c5dd 5425#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5426#else
4f73c5dd
TW
5427 /* If machine does not have a case insn that compares the
5428 bounds, this means extra overhead for dispatch tables
5429 which raises the threshold for using them. */
5430#define CASE_VALUES_THRESHOLD 5
5431#endif /* HAVE_casesi */
5432#endif /* CASE_VALUES_THRESHOLD */
5433
05bccae2
RK
5434 else if (count < CASE_VALUES_THRESHOLD
5435 || compare_tree_int (range, 10 * count) > 0
f0c988c8
BS
5436 /* RANGE may be signed, and really large ranges will show up
5437 as negative numbers. */
5438 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
5439#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5440 || flag_pic
5441#endif
28d81abb 5442 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5443 /* These will reduce to a constant. */
28d81abb 5444 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5445 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5446 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
95815af9 5447 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
b4ac57ab
RS
5448 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5449 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5450 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5451 {
37366632 5452 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5453
5454 /* If the index is a short or char that we do not have
5455 an insn to handle comparisons directly, convert it to
5456 a full integer now, rather than letting each comparison
5457 generate the conversion. */
5458
5459 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4381f7c2 5460 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
28d81abb
RK
5461 == CODE_FOR_nothing))
5462 {
5463 enum machine_mode wider_mode;
5464 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5465 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5466 if (cmp_optab->handlers[(int) wider_mode].insn_code
5467 != CODE_FOR_nothing)
5468 {
5469 index = convert_to_mode (wider_mode, index, unsignedp);
5470 break;
5471 }
5472 }
5473
5474 emit_queue ();
5475 do_pending_stack_adjust ();
5476
5477 index = protect_from_queue (index, 0);
5478 if (GET_CODE (index) == MEM)
5479 index = copy_to_reg (index);
5480 if (GET_CODE (index) == CONST_INT
5481 || TREE_CODE (index_expr) == INTEGER_CST)
5482 {
5483 /* Make a tree node with the proper constant value
5484 if we don't already have one. */
5485 if (TREE_CODE (index_expr) != INTEGER_CST)
5486 {
5487 index_expr
5488 = build_int_2 (INTVAL (index),
e9a042b6 5489 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5490 index_expr = convert (index_type, index_expr);
28d81abb
RK
5491 }
5492
5493 /* For constant index expressions we need only
5494 issue a unconditional branch to the appropriate
5495 target code. The job of removing any unreachable
5496 code is left to the optimisation phase if the
5497 "-O" option is specified. */
1b0cb6fc
RK
5498 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5499 if (! tree_int_cst_lt (index_expr, n->low)
5500 && ! tree_int_cst_lt (n->high, index_expr))
5501 break;
5502
28d81abb
RK
5503 if (n)
5504 emit_jump (label_rtx (n->code_label));
5505 else
5506 emit_jump (default_label);
5507 }
5508 else
5509 {
5510 /* If the index expression is not constant we generate
5511 a binary decision tree to select the appropriate
5512 target code. This is done as follows:
5513
5514 The list of cases is rearranged into a binary tree,
5515 nearly optimal assuming equal probability for each case.
5516
5517 The tree is transformed into RTL, eliminating
5518 redundant test conditions at the same time.
5519
5520 If program flow could reach the end of the
5521 decision tree an unconditional jump to the
5522 default code is emitted. */
5523
5524 use_cost_table
5525 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5526 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4381f7c2 5527 balance_case_nodes (&thiscase->data.case_stmt.case_list,
37366632 5528 NULL_PTR);
28d81abb 5529 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5530 default_label, index_type);
28d81abb
RK
5531 emit_jump_if_reachable (default_label);
5532 }
5533 }
5534 else
5535 {
5536 int win = 0;
5537#ifdef HAVE_casesi
5538 if (HAVE_casesi)
5539 {
c4fcf531 5540 enum machine_mode index_mode = SImode;
5130a5cc 5541 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5542 rtx op1, op2;
5543 enum machine_mode op_mode;
c4fcf531 5544
28d81abb 5545 /* Convert the index to SImode. */
1b0cb6fc 5546 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5547 > GET_MODE_BITSIZE (index_mode))
28d81abb 5548 {
1b0cb6fc 5549 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5550 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5551
5552 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5553 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5554 index_expr, minval);
5555 minval = integer_zero_node;
37366632 5556 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
c5d5d461
JL
5557 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5558 omode, 1, 0, default_label);
af2682ef
RS
5559 /* Now we can safely truncate. */
5560 index = convert_to_mode (index_mode, index, 0);
5561 }
5562 else
5563 {
1b0cb6fc 5564 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5565 {
5566 index_expr = convert (type_for_size (index_bits, 0),
5567 index_expr);
5568 index_type = TREE_TYPE (index_expr);
5569 }
5570
37366632 5571 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5572 }
28d81abb
RK
5573 emit_queue ();
5574 index = protect_from_queue (index, 0);
5575 do_pending_stack_adjust ();
5576
4381f7c2
KH
5577 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5578 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
086f237d
JW
5579 (index, op_mode))
5580 index = copy_to_mode_reg (op_mode, index);
5581
5582 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5583
4381f7c2
KH
5584 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5585 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
086f237d
JW
5586 (op1, op_mode))
5587 op1 = copy_to_mode_reg (op_mode, op1);
5588
5589 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5590
4381f7c2
KH
5591 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5592 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
086f237d
JW
5593 (op2, op_mode))
5594 op2 = copy_to_mode_reg (op_mode, op2);
5595
5596 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5597 table_label, default_label));
5598 win = 1;
5599 }
5600#endif
5601#ifdef HAVE_tablejump
5602 if (! win && HAVE_tablejump)
5603 {
ecc9dd93
NS
5604 index_type = thiscase->data.case_stmt.nominal_type;
5605 index_expr = fold (build (MINUS_EXPR, index_type,
4381f7c2
KH
5606 convert (index_type, index_expr),
5607 convert (index_type, minval)));
37366632 5608 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5609 emit_queue ();
af2682ef 5610 index = protect_from_queue (index, 0);
28d81abb
RK
5611 do_pending_stack_adjust ();
5612
1b0cb6fc 5613 do_tablejump (index, TYPE_MODE (index_type),
37366632 5614 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5615 table_label, default_label);
5616 win = 1;
5617 }
5618#endif
5619 if (! win)
5620 abort ();
5621
5622 /* Get table of labels to jump to, in order of case index. */
5623
5624 ncases = TREE_INT_CST_LOW (range) + 1;
5625 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
961192e1 5626 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
5627
5628 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5629 {
37366632 5630 register HOST_WIDE_INT i
3474db0e 5631 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5632
5633 while (1)
5634 {
5635 labelvec[i]
38a448ca 5636 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3474db0e 5637 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5638 == TREE_INT_CST_LOW (n->high))
5639 break;
5640 i++;
5641 }
5642 }
5643
5644 /* Fill in the gaps with the default. */
5645 for (i = 0; i < ncases; i++)
5646 if (labelvec[i] == 0)
38a448ca 5647 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5648
5649 /* Output the table */
5650 emit_label (table_label);
5651
18543a22 5652 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5653 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5654 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5655 gen_rtvec_v (ncases, labelvec),
4381f7c2 5656 const0_rtx, const0_rtx));
28d81abb 5657 else
38a448ca
RH
5658 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5659 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5660
5661 /* If the case insn drops through the table,
5662 after the table we must jump to the default-label.
5663 Otherwise record no drop-through after the table. */
5664#ifdef CASE_DROPS_THROUGH
5665 emit_jump (default_label);
5666#else
5667 emit_barrier ();
5668#endif
5669 }
5670
915f619f
JW
5671 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5672 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5673 thiscase->data.case_stmt.start);
5674 }
4c581243 5675 else
956d6950 5676 end_cleanup_deferral ();
1b0cb6fc 5677
28d81abb
RK
5678 if (thiscase->exit_label)
5679 emit_label (thiscase->exit_label);
5680
1f8f4a0b 5681 free_case_nodes (case_stack->data.case_stmt.case_list);
28d81abb
RK
5682 POPSTACK (case_stack);
5683
5684 free_temp_slots ();
5685}
5686
57641239
RK
5687/* Convert the tree NODE into a list linked by the right field, with the left
5688 field zeroed. RIGHT is used for recursion; it is a list to be placed
5689 rightmost in the resulting list. */
5690
5691static struct case_node *
5692case_tree2list (node, right)
5693 struct case_node *node, *right;
5694{
5695 struct case_node *left;
5696
5697 if (node->right)
5698 right = case_tree2list (node->right, right);
5699
5700 node->right = right;
51723711 5701 if ((left = node->left))
57641239
RK
5702 {
5703 node->left = 0;
5704 return case_tree2list (left, node);
5705 }
5706
5707 return node;
5708}
ca695ac9 5709
28d81abb
RK
5710/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5711
5712static void
5713do_jump_if_equal (op1, op2, label, unsignedp)
5714 rtx op1, op2, label;
5715 int unsignedp;
5716{
5717 if (GET_CODE (op1) == CONST_INT
5718 && GET_CODE (op2) == CONST_INT)
5719 {
5720 if (INTVAL (op1) == INTVAL (op2))
5721 emit_jump (label);
5722 }
5723 else
5724 {
5725 enum machine_mode mode = GET_MODE (op1);
5726 if (mode == VOIDmode)
5727 mode = GET_MODE (op2);
c5d5d461
JL
5728 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5729 0, label);
28d81abb
RK
5730 }
5731}
5732\f
5733/* Not all case values are encountered equally. This function
5734 uses a heuristic to weight case labels, in cases where that
5735 looks like a reasonable thing to do.
5736
5737 Right now, all we try to guess is text, and we establish the
5738 following weights:
5739
5740 chars above space: 16
5741 digits: 16
5742 default: 12
5743 space, punct: 8
5744 tab: 4
5745 newline: 2
5746 other "\" chars: 1
5747 remaining chars: 0
5748
5749 If we find any cases in the switch that are not either -1 or in the range
5750 of valid ASCII characters, or are control characters other than those
5751 commonly used with "\", don't treat this switch scanning text.
5752
5753 Return 1 if these nodes are suitable for cost estimation, otherwise
5754 return 0. */
5755
5756static int
5757estimate_case_costs (node)
5758 case_node_ptr node;
5759{
5760 tree min_ascii = build_int_2 (-1, -1);
5761 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5762 case_node_ptr n;
5763 int i;
5764
5765 /* If we haven't already made the cost table, make it now. Note that the
5766 lower bound of the table is -1, not zero. */
5767
5768 if (cost_table == NULL)
5769 {
e7749837 5770 cost_table = cost_table_ + 1;
28d81abb
RK
5771
5772 for (i = 0; i < 128; i++)
5773 {
e9a780ec 5774 if (ISALNUM (i))
28d81abb 5775 cost_table[i] = 16;
e9a780ec 5776 else if (ISPUNCT (i))
28d81abb 5777 cost_table[i] = 8;
e9a780ec 5778 else if (ISCNTRL (i))
28d81abb
RK
5779 cost_table[i] = -1;
5780 }
5781
5782 cost_table[' '] = 8;
5783 cost_table['\t'] = 4;
5784 cost_table['\0'] = 4;
5785 cost_table['\n'] = 2;
5786 cost_table['\f'] = 1;
5787 cost_table['\v'] = 1;
5788 cost_table['\b'] = 1;
5789 }
5790
5791 /* See if all the case expressions look like text. It is text if the
5792 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5793 as signed arithmetic since we don't want to ever access cost_table with a
5794 value less than -1. Also check that none of the constants in a range
5795 are strange control characters. */
5796
5797 for (n = node; n; n = n->right)
5798 {
5799 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5800 return 0;
5801
05bccae2
RK
5802 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5803 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
28d81abb
RK
5804 if (cost_table[i] < 0)
5805 return 0;
5806 }
5807
5808 /* All interesting values are within the range of interesting
5809 ASCII characters. */
5810 return 1;
5811}
5812
5813/* Scan an ordered list of case nodes
5814 combining those with consecutive values or ranges.
5815
5816 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5817
5818static void
5819group_case_nodes (head)
5820 case_node_ptr head;
5821{
5822 case_node_ptr node = head;
5823
5824 while (node)
5825 {
5826 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5827 rtx lb2;
28d81abb
RK
5828 case_node_ptr np = node;
5829
5830 /* Try to group the successors of NODE with NODE. */
5831 while (((np = np->right) != 0)
5832 /* Do they jump to the same place? */
ad7e369f
JL
5833 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5834 || (lb != 0 && lb2 != 0
5835 && simplejump_p (lb)
5836 && simplejump_p (lb2)
5837 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5838 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5839 /* Are their ranges consecutive? */
5840 && tree_int_cst_equal (np->low,
5841 fold (build (PLUS_EXPR,
5842 TREE_TYPE (node->high),
5843 node->high,
5844 integer_one_node)))
5845 /* An overflow is not consecutive. */
5846 && tree_int_cst_lt (node->high,
5847 fold (build (PLUS_EXPR,
5848 TREE_TYPE (node->high),
5849 node->high,
5850 integer_one_node))))
5851 {
5852 node->high = np->high;
5853 }
5854 /* NP is the first node after NODE which can't be grouped with it.
5855 Delete the nodes in between, and move on to that node. */
5856 node->right = np;
5857 node = np;
5858 }
5859}
5860
5861/* Take an ordered list of case nodes
5862 and transform them into a near optimal binary tree,
6dc42e49 5863 on the assumption that any target code selection value is as
28d81abb
RK
5864 likely as any other.
5865
5866 The transformation is performed by splitting the ordered
5867 list into two equal sections plus a pivot. The parts are
5868 then attached to the pivot as left and right branches. Each
38e01259 5869 branch is then transformed recursively. */
28d81abb
RK
5870
5871static void
5872balance_case_nodes (head, parent)
5873 case_node_ptr *head;
5874 case_node_ptr parent;
5875{
5876 register case_node_ptr np;
5877
5878 np = *head;
5879 if (np)
5880 {
5881 int cost = 0;
5882 int i = 0;
5883 int ranges = 0;
5884 register case_node_ptr *npp;
5885 case_node_ptr left;
5886
5887 /* Count the number of entries on branch. Also count the ranges. */
5888
5889 while (np)
5890 {
5891 if (!tree_int_cst_equal (np->low, np->high))
5892 {
5893 ranges++;
5894 if (use_cost_table)
5895 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5896 }
5897
5898 if (use_cost_table)
5899 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5900
5901 i++;
5902 np = np->right;
5903 }
5904
5905 if (i > 2)
5906 {
5907 /* Split this list if it is long enough for that to help. */
5908 npp = head;
5909 left = *npp;
5910 if (use_cost_table)
5911 {
5912 /* Find the place in the list that bisects the list's total cost,
5913 Here I gets half the total cost. */
5914 int n_moved = 0;
5915 i = (cost + 1) / 2;
5916 while (1)
5917 {
5918 /* Skip nodes while their cost does not reach that amount. */
5919 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5920 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5921 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5922 if (i <= 0)
5923 break;
5924 npp = &(*npp)->right;
5925 n_moved += 1;
5926 }
5927 if (n_moved == 0)
5928 {
5929 /* Leave this branch lopsided, but optimize left-hand
5930 side and fill in `parent' fields for right-hand side. */
5931 np = *head;
5932 np->parent = parent;
5933 balance_case_nodes (&np->left, np);
5934 for (; np->right; np = np->right)
5935 np->right->parent = np;
5936 return;
5937 }
5938 }
5939 /* If there are just three nodes, split at the middle one. */
5940 else if (i == 3)
5941 npp = &(*npp)->right;
5942 else
5943 {
5944 /* Find the place in the list that bisects the list's total cost,
5945 where ranges count as 2.
5946 Here I gets half the total cost. */
5947 i = (i + ranges + 1) / 2;
5948 while (1)
5949 {
5950 /* Skip nodes while their cost does not reach that amount. */
5951 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5952 i--;
5953 i--;
5954 if (i <= 0)
5955 break;
5956 npp = &(*npp)->right;
5957 }
5958 }
5959 *head = np = *npp;
5960 *npp = 0;
5961 np->parent = parent;
5962 np->left = left;
5963
5964 /* Optimize each of the two split parts. */
5965 balance_case_nodes (&np->left, np);
5966 balance_case_nodes (&np->right, np);
5967 }
5968 else
5969 {
5970 /* Else leave this branch as one level,
5971 but fill in `parent' fields. */
5972 np = *head;
5973 np->parent = parent;
5974 for (; np->right; np = np->right)
5975 np->right->parent = np;
5976 }
5977 }
5978}
5979\f
5980/* Search the parent sections of the case node tree
5981 to see if a test for the lower bound of NODE would be redundant.
5982 INDEX_TYPE is the type of the index expression.
5983
5984 The instructions to generate the case decision tree are
5985 output in the same order as nodes are processed so it is
5986 known that if a parent node checks the range of the current
5987 node minus one that the current node is bounded at its lower
5988 span. Thus the test would be redundant. */
5989
5990static int
5991node_has_low_bound (node, index_type)
5992 case_node_ptr node;
5993 tree index_type;
5994{
5995 tree low_minus_one;
5996 case_node_ptr pnode;
5997
5998 /* If the lower bound of this node is the lowest value in the index type,
5999 we need not test it. */
6000
6001 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6002 return 1;
6003
6004 /* If this node has a left branch, the value at the left must be less
6005 than that at this node, so it cannot be bounded at the bottom and
6006 we need not bother testing any further. */
6007
6008 if (node->left)
6009 return 0;
6010
6011 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6012 node->low, integer_one_node));
6013
6014 /* If the subtraction above overflowed, we can't verify anything.
6015 Otherwise, look for a parent that tests our value - 1. */
6016
6017 if (! tree_int_cst_lt (low_minus_one, node->low))
6018 return 0;
6019
6020 for (pnode = node->parent; pnode; pnode = pnode->parent)
6021 if (tree_int_cst_equal (low_minus_one, pnode->high))
6022 return 1;
6023
6024 return 0;
6025}
6026
6027/* Search the parent sections of the case node tree
6028 to see if a test for the upper bound of NODE would be redundant.
6029 INDEX_TYPE is the type of the index expression.
6030
6031 The instructions to generate the case decision tree are
6032 output in the same order as nodes are processed so it is
6033 known that if a parent node checks the range of the current
6034 node plus one that the current node is bounded at its upper
6035 span. Thus the test would be redundant. */
6036
6037static int
6038node_has_high_bound (node, index_type)
6039 case_node_ptr node;
6040 tree index_type;
6041{
6042 tree high_plus_one;
6043 case_node_ptr pnode;
6044
e1ee5cdc
RH
6045 /* If there is no upper bound, obviously no test is needed. */
6046
6047 if (TYPE_MAX_VALUE (index_type) == NULL)
6048 return 1;
6049
28d81abb
RK
6050 /* If the upper bound of this node is the highest value in the type
6051 of the index expression, we need not test against it. */
6052
6053 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6054 return 1;
6055
6056 /* If this node has a right branch, the value at the right must be greater
6057 than that at this node, so it cannot be bounded at the top and
6058 we need not bother testing any further. */
6059
6060 if (node->right)
6061 return 0;
6062
6063 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6064 node->high, integer_one_node));
6065
6066 /* If the addition above overflowed, we can't verify anything.
6067 Otherwise, look for a parent that tests our value + 1. */
6068
6069 if (! tree_int_cst_lt (node->high, high_plus_one))
6070 return 0;
6071
6072 for (pnode = node->parent; pnode; pnode = pnode->parent)
6073 if (tree_int_cst_equal (high_plus_one, pnode->low))
6074 return 1;
6075
6076 return 0;
6077}
6078
6079/* Search the parent sections of the
6080 case node tree to see if both tests for the upper and lower
6081 bounds of NODE would be redundant. */
6082
6083static int
6084node_is_bounded (node, index_type)
6085 case_node_ptr node;
6086 tree index_type;
6087{
6088 return (node_has_low_bound (node, index_type)
6089 && node_has_high_bound (node, index_type));
6090}
6091
6092/* Emit an unconditional jump to LABEL unless it would be dead code. */
6093
6094static void
6095emit_jump_if_reachable (label)
6096 rtx label;
6097{
6098 if (GET_CODE (get_last_insn ()) != BARRIER)
6099 emit_jump (label);
6100}
6101\f
6102/* Emit step-by-step code to select a case for the value of INDEX.
6103 The thus generated decision tree follows the form of the
6104 case-node binary tree NODE, whose nodes represent test conditions.
6105 INDEX_TYPE is the type of the index of the switch.
6106
6107 Care is taken to prune redundant tests from the decision tree
6108 by detecting any boundary conditions already checked by
6109 emitted rtx. (See node_has_high_bound, node_has_low_bound
6110 and node_is_bounded, above.)
6111
6112 Where the test conditions can be shown to be redundant we emit
6113 an unconditional jump to the target code. As a further
6114 optimization, the subordinates of a tree node are examined to
6115 check for bounded nodes. In this case conditional and/or
6116 unconditional jumps as a result of the boundary check for the
6117 current node are arranged to target the subordinates associated
38e01259 6118 code for out of bound conditions on the current node.
28d81abb 6119
f72aed24 6120 We can assume that when control reaches the code generated here,
28d81abb
RK
6121 the index value has already been compared with the parents
6122 of this node, and determined to be on the same side of each parent
6123 as this node is. Thus, if this node tests for the value 51,
6124 and a parent tested for 52, we don't need to consider
6125 the possibility of a value greater than 51. If another parent
6126 tests for the value 50, then this node need not test anything. */
6127
6128static void
6129emit_case_nodes (index, node, default_label, index_type)
6130 rtx index;
6131 case_node_ptr node;
6132 rtx default_label;
6133 tree index_type;
6134{
6135 /* If INDEX has an unsigned type, we must make unsigned branches. */
6136 int unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
6137 enum machine_mode mode = GET_MODE (index);
6138
6139 /* See if our parents have already tested everything for us.
6140 If they have, emit an unconditional jump for this node. */
6141 if (node_is_bounded (node, index_type))
6142 emit_jump (label_rtx (node->code_label));
6143
6144 else if (tree_int_cst_equal (node->low, node->high))
6145 {
6146 /* Node is single valued. First see if the index expression matches
0f41302f 6147 this node and then check our children, if any. */
28d81abb 6148
37366632 6149 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
6150 label_rtx (node->code_label), unsignedp);
6151
6152 if (node->right != 0 && node->left != 0)
6153 {
6154 /* This node has children on both sides.
6155 Dispatch to one side or the other
6156 by comparing the index value with this node's value.
6157 If one subtree is bounded, check that one first,
6158 so we can avoid real branches in the tree. */
6159
6160 if (node_is_bounded (node->right, index_type))
6161 {
4381f7c2
KH
6162 emit_cmp_and_jump_insns (index,
6163 expand_expr (node->high, NULL_RTX,
6164 VOIDmode, 0),
6165 GT, NULL_RTX, mode, unsignedp, 0,
6166 label_rtx (node->right->code_label));
28d81abb
RK
6167 emit_case_nodes (index, node->left, default_label, index_type);
6168 }
6169
6170 else if (node_is_bounded (node->left, index_type))
6171 {
4381f7c2
KH
6172 emit_cmp_and_jump_insns (index,
6173 expand_expr (node->high, NULL_RTX,
6174 VOIDmode, 0),
c5d5d461
JL
6175 LT, NULL_RTX, mode, unsignedp, 0,
6176 label_rtx (node->left->code_label));
28d81abb
RK
6177 emit_case_nodes (index, node->right, default_label, index_type);
6178 }
6179
6180 else
6181 {
6182 /* Neither node is bounded. First distinguish the two sides;
6183 then emit the code for one side at a time. */
6184
4381f7c2 6185 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
6186
6187 /* See if the value is on the right. */
4381f7c2
KH
6188 emit_cmp_and_jump_insns (index,
6189 expand_expr (node->high, NULL_RTX,
6190 VOIDmode, 0),
c5d5d461
JL
6191 GT, NULL_RTX, mode, unsignedp, 0,
6192 label_rtx (test_label));
28d81abb
RK
6193
6194 /* Value must be on the left.
6195 Handle the left-hand subtree. */
6196 emit_case_nodes (index, node->left, default_label, index_type);
6197 /* If left-hand subtree does nothing,
6198 go to default. */
6199 emit_jump_if_reachable (default_label);
6200
6201 /* Code branches here for the right-hand subtree. */
6202 expand_label (test_label);
6203 emit_case_nodes (index, node->right, default_label, index_type);
6204 }
6205 }
6206
6207 else if (node->right != 0 && node->left == 0)
6208 {
6209 /* Here we have a right child but no left so we issue conditional
6210 branch to default and process the right child.
6211
6212 Omit the conditional branch to default if we it avoid only one
6213 right child; it costs too much space to save so little time. */
6214
de14fd73 6215 if (node->right->right || node->right->left
28d81abb
RK
6216 || !tree_int_cst_equal (node->right->low, node->right->high))
6217 {
6218 if (!node_has_low_bound (node, index_type))
6219 {
4381f7c2
KH
6220 emit_cmp_and_jump_insns (index,
6221 expand_expr (node->high, NULL_RTX,
6222 VOIDmode, 0),
c5d5d461
JL
6223 LT, NULL_RTX, mode, unsignedp, 0,
6224 default_label);
28d81abb
RK
6225 }
6226
6227 emit_case_nodes (index, node->right, default_label, index_type);
6228 }
6229 else
6230 /* We cannot process node->right normally
6231 since we haven't ruled out the numbers less than
6232 this node's value. So handle node->right explicitly. */
6233 do_jump_if_equal (index,
37366632
RK
6234 expand_expr (node->right->low, NULL_RTX,
6235 VOIDmode, 0),
28d81abb
RK
6236 label_rtx (node->right->code_label), unsignedp);
6237 }
6238
6239 else if (node->right == 0 && node->left != 0)
6240 {
6241 /* Just one subtree, on the left. */
6242
de14fd73
RK
6243#if 0 /* The following code and comment were formerly part
6244 of the condition here, but they didn't work
6245 and I don't understand what the idea was. -- rms. */
6246 /* If our "most probable entry" is less probable
28d81abb
RK
6247 than the default label, emit a jump to
6248 the default label using condition codes
6249 already lying around. With no right branch,
6250 a branch-greater-than will get us to the default
6251 label correctly. */
de14fd73 6252 if (use_cost_table
4381f7c2 6253 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
de14fd73
RK
6254 ;
6255#endif /* 0 */
4381f7c2 6256 if (node->left->left || node->left->right
28d81abb
RK
6257 || !tree_int_cst_equal (node->left->low, node->left->high))
6258 {
6259 if (!node_has_high_bound (node, index_type))
6260 {
c5d5d461
JL
6261 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6262 NULL_RTX,
6263 VOIDmode, 0),
6264 GT, NULL_RTX, mode, unsignedp, 0,
6265 default_label);
28d81abb
RK
6266 }
6267
6268 emit_case_nodes (index, node->left, default_label, index_type);
6269 }
6270 else
6271 /* We cannot process node->left normally
6272 since we haven't ruled out the numbers less than
6273 this node's value. So handle node->left explicitly. */
6274 do_jump_if_equal (index,
37366632
RK
6275 expand_expr (node->left->low, NULL_RTX,
6276 VOIDmode, 0),
28d81abb
RK
6277 label_rtx (node->left->code_label), unsignedp);
6278 }
6279 }
6280 else
6281 {
6282 /* Node is a range. These cases are very similar to those for a single
6283 value, except that we do not start by testing whether this node
6284 is the one to branch to. */
6285
6286 if (node->right != 0 && node->left != 0)
6287 {
6288 /* Node has subtrees on both sides.
6289 If the right-hand subtree is bounded,
6290 test for it first, since we can go straight there.
6291 Otherwise, we need to make a branch in the control structure,
6292 then handle the two subtrees. */
6293 tree test_label = 0;
6294
28d81abb
RK
6295 if (node_is_bounded (node->right, index_type))
6296 /* Right hand node is fully bounded so we can eliminate any
6297 testing and branch directly to the target code. */
c5d5d461
JL
6298 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6299 VOIDmode, 0),
6300 GT, NULL_RTX, mode, unsignedp, 0,
6301 label_rtx (node->right->code_label));
28d81abb
RK
6302 else
6303 {
6304 /* Right hand node requires testing.
6305 Branch to a label where we will handle it later. */
6306
6307 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2
KH
6308 emit_cmp_and_jump_insns (index,
6309 expand_expr (node->high, NULL_RTX,
6310 VOIDmode, 0),
c5d5d461
JL
6311 GT, NULL_RTX, mode, unsignedp, 0,
6312 label_rtx (test_label));
28d81abb
RK
6313 }
6314
6315 /* Value belongs to this node or to the left-hand subtree. */
6316
c5d5d461
JL
6317 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6318 VOIDmode, 0),
6319 GE, NULL_RTX, mode, unsignedp, 0,
6320 label_rtx (node->code_label));
28d81abb
RK
6321
6322 /* Handle the left-hand subtree. */
6323 emit_case_nodes (index, node->left, default_label, index_type);
6324
6325 /* If right node had to be handled later, do that now. */
6326
6327 if (test_label)
6328 {
6329 /* If the left-hand subtree fell through,
6330 don't let it fall into the right-hand subtree. */
6331 emit_jump_if_reachable (default_label);
6332
6333 expand_label (test_label);
6334 emit_case_nodes (index, node->right, default_label, index_type);
6335 }
6336 }
6337
6338 else if (node->right != 0 && node->left == 0)
6339 {
6340 /* Deal with values to the left of this node,
6341 if they are possible. */
6342 if (!node_has_low_bound (node, index_type))
6343 {
4381f7c2
KH
6344 emit_cmp_and_jump_insns (index,
6345 expand_expr (node->low, NULL_RTX,
6346 VOIDmode, 0),
c5d5d461
JL
6347 LT, NULL_RTX, mode, unsignedp, 0,
6348 default_label);
28d81abb
RK
6349 }
6350
6351 /* Value belongs to this node or to the right-hand subtree. */
6352
c5d5d461
JL
6353 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6354 VOIDmode, 0),
6355 LE, NULL_RTX, mode, unsignedp, 0,
6356 label_rtx (node->code_label));
28d81abb
RK
6357
6358 emit_case_nodes (index, node->right, default_label, index_type);
6359 }
6360
6361 else if (node->right == 0 && node->left != 0)
6362 {
6363 /* Deal with values to the right of this node,
6364 if they are possible. */
6365 if (!node_has_high_bound (node, index_type))
6366 {
4381f7c2
KH
6367 emit_cmp_and_jump_insns (index,
6368 expand_expr (node->high, NULL_RTX,
6369 VOIDmode, 0),
c5d5d461
JL
6370 GT, NULL_RTX, mode, unsignedp, 0,
6371 default_label);
28d81abb
RK
6372 }
6373
6374 /* Value belongs to this node or to the left-hand subtree. */
6375
4381f7c2
KH
6376 emit_cmp_and_jump_insns (index,
6377 expand_expr (node->low, NULL_RTX,
6378 VOIDmode, 0),
c5d5d461
JL
6379 GE, NULL_RTX, mode, unsignedp, 0,
6380 label_rtx (node->code_label));
28d81abb
RK
6381
6382 emit_case_nodes (index, node->left, default_label, index_type);
6383 }
6384
6385 else
6386 {
6387 /* Node has no children so we check low and high bounds to remove
6388 redundant tests. Only one of the bounds can exist,
6389 since otherwise this node is bounded--a case tested already. */
6390
6391 if (!node_has_high_bound (node, index_type))
6392 {
4381f7c2
KH
6393 emit_cmp_and_jump_insns (index,
6394 expand_expr (node->high, NULL_RTX,
6395 VOIDmode, 0),
c5d5d461
JL
6396 GT, NULL_RTX, mode, unsignedp, 0,
6397 default_label);
28d81abb
RK
6398 }
6399
6400 if (!node_has_low_bound (node, index_type))
6401 {
4381f7c2
KH
6402 emit_cmp_and_jump_insns (index,
6403 expand_expr (node->low, NULL_RTX,
6404 VOIDmode, 0),
c5d5d461
JL
6405 LT, NULL_RTX, mode, unsignedp, 0,
6406 default_label);
28d81abb
RK
6407 }
6408
6409 emit_jump (label_rtx (node->code_label));
6410 }
6411 }
6412}
This page took 2.218304 seconds and 5 git commands to generate.