]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
h8300.c (compute_saved_regs): Use a macro instead of a hard register number.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
1574ef13 3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
ccd043a9 38
28d81abb
RK
39#include "rtl.h"
40#include "tree.h"
6baf1cc8 41#include "tm_p.h"
28d81abb 42#include "flags.h"
6adb4e3a 43#include "except.h"
28d81abb 44#include "function.h"
28d81abb 45#include "insn-config.h"
28d81abb 46#include "expr.h"
e78d8e51 47#include "libfuncs.h"
28d81abb 48#include "hard-reg-set.h"
28d81abb
RK
49#include "loop.h"
50#include "recog.h"
ca695ac9 51#include "machmode.h"
10f0ad3d 52#include "toplev.h"
d6f4ec51 53#include "output.h"
87ff9c8e 54#include "ggc.h"
43577e6b 55#include "langhooks.h"
969d70ca 56#include "predict.h"
ca695ac9 57
18543a22
ILT
58/* Assume that case vectors are not pc-relative. */
59#ifndef CASE_VECTOR_PC_RELATIVE
60#define CASE_VECTOR_PC_RELATIVE 0
61#endif
28d81abb
RK
62\f
63/* Functions and data structures for expanding case statements. */
64
65/* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
5720c7e7
RK
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
28d81abb
RK
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
28d81abb
RK
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
e2500fed 87struct case_node GTY(())
28d81abb
RK
88{
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
57641239 95 int balance;
28d81abb
RK
96};
97
98typedef struct case_node case_node;
99typedef struct case_node *case_node_ptr;
100
101/* These are used by estimate_case_costs and balance_case_nodes. */
102
103/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 104static short cost_table_[129];
28d81abb 105static int use_cost_table;
2a2137c4
RH
106static int cost_table_initialized;
107
108/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
cf403648 110#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
111\f
112/* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
e2500fed 137struct nesting GTY(())
28d81abb
RK
138{
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
e2500fed
GK
143 enum nesting_desc {
144 COND_NESTING,
145 LOOP_NESTING,
146 BLOCK_NESTING,
147 CASE_NESTING
148 } desc;
149 union nesting_u
28d81abb
RK
150 {
151 /* For conds (if-then and if-then-else statements). */
e2500fed 152 struct nesting_cond
28d81abb
RK
153 {
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
157 rtx endif_label;
158 /* Label for the end of this alternative.
0f41302f 159 This may be the end of the if or the next else/elseif. */
28d81abb 160 rtx next_label;
e2500fed 161 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 162 /* For loops. */
e2500fed 163 struct nesting_loop
28d81abb
RK
164 {
165 /* Label at the top of the loop; place to loop back to. */
166 rtx start_label;
167 /* Label at the end of the whole construct. */
168 rtx end_label;
8afad312
JW
169 /* Label before a jump that branches to the end of the whole
170 construct. This is where destructors go if any. */
171 rtx alt_end_label;
28d81abb
RK
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
174 rtx continue_label;
e2500fed 175 } GTY ((tag ("LOOP_NESTING"))) loop;
28d81abb 176 /* For variable binding contours. */
e2500fed 177 struct nesting_block
28d81abb
RK
178 {
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
b93a436e 182 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
183 rtx stack_level;
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
187 rtx first_insn;
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
194 tree cleanups;
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
e976b8b2 199 The tail of this list can be 0,
28d81abb
RK
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
203 tree outer_cleanups;
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Number of function calls seen, as of start of this block. */
3f1d071b 208 int n_function_calls;
e0a2f705 209 /* Nonzero if this is associated with an EH region. */
e976b8b2
MS
210 int exception_region;
211 /* The saved target_temp_slot_level from our outer block.
212 We may reset target_temp_slot_level to be the level of
213 this block, if that is done, target_temp_slot_level
214 reverts to the saved target_temp_slot_level at the very
215 end of the block. */
3f1d071b 216 int block_target_temp_slot_level;
e976b8b2
MS
217 /* True if we are currently emitting insns in an area of
218 output code that is controlled by a conditional
219 expression. This is used by the cleanup handling code to
220 generate conditional cleanup actions. */
221 int conditional_code;
222 /* A place to move the start of the exception region for any
223 of the conditional cleanups, must be at the end or after
224 the start of the last unconditional cleanup, and before any
225 conditional branch points. */
226 rtx last_unconditional_cleanup;
e2500fed 227 } GTY ((tag ("BLOCK_NESTING"))) block;
28d81abb
RK
228 /* For switch (C) or case (Pascal) statements,
229 and also for dummies (see `expand_start_case_dummy'). */
e2500fed 230 struct nesting_case
28d81abb
RK
231 {
232 /* The insn after which the case dispatch should finally
233 be emitted. Zero for a dummy. */
234 rtx start;
57641239
RK
235 /* A list of case labels; it is first built as an AVL tree.
236 During expand_end_case, this is converted to a list, and may be
237 rearranged into a nearly balanced binary tree. */
28d81abb
RK
238 struct case_node *case_list;
239 /* Label to jump to if no case matches. */
240 tree default_label;
241 /* The expression to be dispatched on. */
242 tree index_expr;
243 /* Type that INDEX_EXPR should be converted to. */
244 tree nominal_type;
28d81abb 245 /* Name of this kind of statement, for warnings. */
dff01034 246 const char *printname;
a11759a3
JR
247 /* Used to save no_line_numbers till we see the first case label.
248 We set this to -1 when we see the first case label in this
249 case statement. */
250 int line_number_status;
e2500fed
GK
251 } GTY ((tag ("CASE_NESTING"))) case_stmt;
252 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
253};
254
28d81abb
RK
255/* Allocate and return a new `struct nesting'. */
256
257#define ALLOC_NESTING() \
e2500fed 258 (struct nesting *) ggc_alloc (sizeof (struct nesting))
28d81abb 259
6ed1d6c5
RS
260/* Pop the nesting stack element by element until we pop off
261 the element which is at the top of STACK.
262 Update all the other stacks, popping off elements from them
263 as we pop them from nesting_stack. */
28d81abb
RK
264
265#define POPSTACK(STACK) \
6ed1d6c5
RS
266do { struct nesting *target = STACK; \
267 struct nesting *this; \
268 do { this = nesting_stack; \
269 if (loop_stack == this) \
270 loop_stack = loop_stack->next; \
271 if (cond_stack == this) \
272 cond_stack = cond_stack->next; \
273 if (block_stack == this) \
274 block_stack = block_stack->next; \
275 if (stack_block_stack == this) \
276 stack_block_stack = stack_block_stack->next; \
277 if (case_stack == this) \
278 case_stack = case_stack->next; \
6ed1d6c5 279 nesting_depth = nesting_stack->depth - 1; \
e2500fed 280 nesting_stack = this->all; } \
6ed1d6c5 281 while (this != target); } while (0)
28d81abb
RK
282\f
283/* In some cases it is impossible to generate code for a forward goto
284 until the label definition is seen. This happens when it may be necessary
285 for the goto to reset the stack pointer: we don't yet know how to do that.
286 So expand_goto puts an entry on this fixup list.
287 Each time a binding contour that resets the stack is exited,
288 we check each fixup.
289 If the target label has now been defined, we can insert the proper code. */
290
e2500fed 291struct goto_fixup GTY(())
28d81abb
RK
292{
293 /* Points to following fixup. */
294 struct goto_fixup *next;
295 /* Points to the insn before the jump insn.
296 If more code must be inserted, it goes after this insn. */
297 rtx before_jump;
298 /* The LABEL_DECL that this jump is jumping to, or 0
299 for break, continue or return. */
300 tree target;
7629c936
RS
301 /* The BLOCK for the place where this goto was found. */
302 tree context;
28d81abb
RK
303 /* The CODE_LABEL rtx that this is jumping to. */
304 rtx target_rtl;
305 /* Number of binding contours started in current function
306 before the label reference. */
307 int block_start_count;
308 /* The outermost stack level that should be restored for this jump.
309 Each time a binding contour that resets the stack is exited,
310 if the target label is *not* yet defined, this slot is updated. */
311 rtx stack_level;
312 /* List of lists of cleanup expressions to be run by this goto.
313 There is one element for each block that this goto is within.
e976b8b2 314 The tail of this list can be 0,
28d81abb
RK
315 if all remaining elements would be empty.
316 The TREE_VALUE contains the cleanup list of that block as of the
317 time this goto was seen.
318 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
319 tree cleanup_list_list;
320};
321
28d81abb
RK
322/* Within any binding contour that must restore a stack level,
323 all labels are recorded with a chain of these structures. */
324
e2500fed 325struct label_chain GTY(())
28d81abb
RK
326{
327 /* Points to following fixup. */
328 struct label_chain *next;
329 tree label;
330};
e9a25f70 331
e2500fed 332struct stmt_status GTY(())
3f1d071b
BS
333{
334 /* Chain of all pending binding contours. */
e2500fed 335 struct nesting * x_block_stack;
3f1d071b
BS
336
337 /* If any new stacks are added here, add them to POPSTACKS too. */
338
339 /* Chain of all pending binding contours that restore stack levels
340 or have cleanups. */
e2500fed 341 struct nesting * x_stack_block_stack;
3f1d071b
BS
342
343 /* Chain of all pending conditional statements. */
e2500fed 344 struct nesting * x_cond_stack;
3f1d071b
BS
345
346 /* Chain of all pending loops. */
e2500fed 347 struct nesting * x_loop_stack;
3f1d071b
BS
348
349 /* Chain of all pending case or switch statements. */
e2500fed 350 struct nesting * x_case_stack;
3f1d071b
BS
351
352 /* Separate chain including all of the above,
353 chained through the `all' field. */
e2500fed 354 struct nesting * x_nesting_stack;
3f1d071b
BS
355
356 /* Number of entries on nesting_stack now. */
357 int x_nesting_depth;
358
359 /* Number of binding contours started so far in this function. */
360 int x_block_start_count;
361
362 /* Each time we expand an expression-statement,
363 record the expr's type and its RTL value here. */
364 tree x_last_expr_type;
365 rtx x_last_expr_value;
366
367 /* Nonzero if within a ({...}) grouping, in which case we must
368 always compute a value for each expr-stmt in case it is the last one. */
369 int x_expr_stmts_for_value;
370
371 /* Filename and line number of last line-number note,
372 whether we actually emitted it or not. */
47ee9bcb 373 const char *x_emit_filename;
3f1d071b
BS
374 int x_emit_lineno;
375
376 struct goto_fixup *x_goto_fixup_chain;
377};
378
01d939e8
BS
379#define block_stack (cfun->stmt->x_block_stack)
380#define stack_block_stack (cfun->stmt->x_stack_block_stack)
381#define cond_stack (cfun->stmt->x_cond_stack)
382#define loop_stack (cfun->stmt->x_loop_stack)
383#define case_stack (cfun->stmt->x_case_stack)
384#define nesting_stack (cfun->stmt->x_nesting_stack)
385#define nesting_depth (cfun->stmt->x_nesting_depth)
386#define current_block_start_count (cfun->stmt->x_block_start_count)
387#define last_expr_type (cfun->stmt->x_last_expr_type)
388#define last_expr_value (cfun->stmt->x_last_expr_value)
389#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
390#define emit_filename (cfun->stmt->x_emit_filename)
391#define emit_lineno (cfun->stmt->x_emit_lineno)
392#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70
JL
393
394/* Non-zero if we are using EH to handle cleanus. */
395static int using_eh_for_cleanups_p = 0;
396
cdadb1dd 397static int n_occurrences PARAMS ((int, const char *));
6be2e1f8
RH
398static bool parse_input_constraint PARAMS ((const char **, int, int, int,
399 int, const char * const *,
400 bool *, bool *));
acb5d088 401static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
cdadb1dd
KG
402static void expand_goto_internal PARAMS ((tree, rtx, rtx));
403static int expand_fixup PARAMS ((tree, rtx, rtx));
404static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
405static void expand_nl_goto_receiver PARAMS ((void));
406static void expand_nl_goto_receivers PARAMS ((struct nesting *));
407static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
cfc3d13f 408 rtx, int));
84b72302
RH
409static bool check_operand_nalternatives PARAMS ((tree, tree));
410static bool check_unique_operand_names PARAMS ((tree, tree));
411static tree resolve_operand_names PARAMS ((tree, tree, tree,
412 const char **));
413static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
396ad517 414static void expand_null_return_1 PARAMS ((rtx));
969d70ca 415static enum br_predictor return_prediction PARAMS ((rtx));
cdadb1dd
KG
416static void expand_value_return PARAMS ((rtx));
417static int tail_recursion_args PARAMS ((tree, tree));
418static void expand_cleanups PARAMS ((tree, tree, int, int));
419static void check_seenlabel PARAMS ((void));
420static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
421static int estimate_case_costs PARAMS ((case_node_ptr));
422static void group_case_nodes PARAMS ((case_node_ptr));
423static void balance_case_nodes PARAMS ((case_node_ptr *,
cfc3d13f 424 case_node_ptr));
cdadb1dd
KG
425static int node_has_low_bound PARAMS ((case_node_ptr, tree));
426static int node_has_high_bound PARAMS ((case_node_ptr, tree));
427static int node_is_bounded PARAMS ((case_node_ptr, tree));
428static void emit_jump_if_reachable PARAMS ((rtx));
429static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
cdadb1dd 430static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
28d81abb 431\f
e9a25f70
JL
432void
433using_eh_for_cleanups ()
434{
435 using_eh_for_cleanups_p = 1;
436}
437
28d81abb
RK
438void
439init_stmt_for_function ()
440{
e2500fed 441 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
3f1d071b 442
28d81abb
RK
443 /* We are not currently within any block, conditional, loop or case. */
444 block_stack = 0;
0b931590 445 stack_block_stack = 0;
28d81abb
RK
446 loop_stack = 0;
447 case_stack = 0;
448 cond_stack = 0;
449 nesting_stack = 0;
450 nesting_depth = 0;
451
3f1d071b 452 current_block_start_count = 0;
28d81abb
RK
453
454 /* No gotos have been expanded yet. */
455 goto_fixup_chain = 0;
456
457 /* We are not processing a ({...}) grouping. */
458 expr_stmts_for_value = 0;
e2500fed 459 clear_last_expr ();
28d81abb 460}
3f1d071b
BS
461\f
462/* Return nonzero if anything is pushed on the loop, condition, or case
463 stack. */
464int
465in_control_zone_p ()
466{
467 return cond_stack || loop_stack || case_stack;
28d81abb
RK
468}
469
3f1d071b 470/* Record the current file and line. Called from emit_line_note. */
28d81abb 471void
3f1d071b 472set_file_and_line_for_stmt (file, line)
47ee9bcb 473 const char *file;
3f1d071b
BS
474 int line;
475{
61d84605
MM
476 /* If we're outputting an inline function, and we add a line note,
477 there may be no CFUN->STMT information. So, there's no need to
478 update it. */
479 if (cfun->stmt)
480 {
481 emit_filename = file;
482 emit_lineno = line;
483 }
28d81abb 484}
3f1d071b 485
28d81abb
RK
486/* Emit a no-op instruction. */
487
488void
489emit_nop ()
490{
ca695ac9
JB
491 rtx last_insn;
492
b93a436e
JL
493 last_insn = get_last_insn ();
494 if (!optimize
495 && (GET_CODE (last_insn) == CODE_LABEL
496 || (GET_CODE (last_insn) == NOTE
497 && prev_real_insn (last_insn) == 0)))
498 emit_insn (gen_nop ());
28d81abb
RK
499}
500\f
501/* Return the rtx-label that corresponds to a LABEL_DECL,
502 creating it if necessary. */
503
504rtx
505label_rtx (label)
506 tree label;
507{
508 if (TREE_CODE (label) != LABEL_DECL)
509 abort ();
510
19e7881c
MM
511 if (!DECL_RTL_SET_P (label))
512 SET_DECL_RTL (label, gen_label_rtx ());
28d81abb 513
19e7881c 514 return DECL_RTL (label);
28d81abb
RK
515}
516
19e7881c 517
28d81abb
RK
518/* Add an unconditional jump to LABEL as the next sequential instruction. */
519
520void
521emit_jump (label)
522 rtx label;
523{
524 do_pending_stack_adjust ();
525 emit_jump_insn (gen_jump (label));
526 emit_barrier ();
527}
528
529/* Emit code to jump to the address
530 specified by the pointer expression EXP. */
531
532void
533expand_computed_goto (exp)
534 tree exp;
535{
b93a436e 536 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
537
538#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
539 if (GET_MODE (x) != Pmode)
540 x = convert_memory_address (Pmode, x);
ed9a9db1 541#endif
ffa1a1ce 542
b93a436e 543 emit_queue ();
b93a436e
JL
544 do_pending_stack_adjust ();
545 emit_indirect_jump (x);
acd693d1
RH
546
547 current_function_has_computed_jump = 1;
28d81abb
RK
548}
549\f
550/* Handle goto statements and the labels that they can go to. */
551
552/* Specify the location in the RTL code of a label LABEL,
553 which is a LABEL_DECL tree node.
554
555 This is used for the kind of label that the user can jump to with a
556 goto statement, and for alternatives of a switch or case statement.
557 RTL labels generated for loops and conditionals don't go through here;
558 they are generated directly at the RTL level, by other functions below.
559
560 Note that this has nothing to do with defining label *names*.
561 Languages vary in how they do that and what that even means. */
562
563void
564expand_label (label)
565 tree label;
566{
567 struct label_chain *p;
568
569 do_pending_stack_adjust ();
570 emit_label (label_rtx (label));
571 if (DECL_NAME (label))
572 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
573
574 if (stack_block_stack != 0)
575 {
1f8f4a0b 576 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
577 p->next = stack_block_stack->data.block.label_chain;
578 stack_block_stack->data.block.label_chain = p;
579 p->label = label;
580 }
581}
582
583/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
584 from nested functions. */
585
586void
587declare_nonlocal_label (label)
588 tree label;
589{
ba716ac9
BS
590 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
591
28d81abb
RK
592 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
593 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 594 if (nonlocal_goto_handler_slots == 0)
28d81abb 595 {
59257ff7
RK
596 emit_stack_save (SAVE_NONLOCAL,
597 &nonlocal_goto_stack_level,
598 PREV_INSN (tail_recursion_reentry));
28d81abb 599 }
ba716ac9
BS
600 nonlocal_goto_handler_slots
601 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
602}
603
604/* Generate RTL code for a `goto' statement with target label LABEL.
605 LABEL should be a LABEL_DECL tree node that was or will later be
606 defined with `expand_label'. */
607
608void
609expand_goto (label)
610 tree label;
611{
ca695ac9
JB
612 tree context;
613
28d81abb 614 /* Check for a nonlocal goto to a containing function. */
ca695ac9 615 context = decl_function_context (label);
28d81abb
RK
616 if (context != 0 && context != current_function_decl)
617 {
618 struct function *p = find_function_data (context);
38a448ca 619 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
4b01bd16 620 rtx handler_slot, static_chain, save_area, insn;
ba716ac9
BS
621 tree link;
622
623 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
624 handler_slot = p->x_nonlocal_goto_handler_slots;
625 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
626 link = TREE_CHAIN (link))
627 handler_slot = XEXP (handler_slot, 1);
628 handler_slot = XEXP (handler_slot, 0);
dd132134 629
28d81abb 630 p->has_nonlocal_label = 1;
c1255328 631 current_function_has_nonlocal_goto = 1;
dd132134 632 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
633
634 /* Copy the rtl for the slots so that they won't be shared in
635 case the virtual stack vars register gets instantiated differently
636 in the parent than in the child. */
637
157229c3
RH
638 static_chain = copy_to_reg (lookup_static_chain (label));
639
640 /* Get addr of containing function's current nonlocal goto handler,
641 which will do any cleanups and then jump to the label. */
642 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
643 virtual_stack_vars_rtx,
644 static_chain));
645
646 /* Get addr of containing function's nonlocal save area. */
647 save_area = p->x_nonlocal_goto_stack_level;
648 if (save_area)
649 save_area = replace_rtx (copy_rtx (save_area),
650 virtual_stack_vars_rtx, static_chain);
651
28d81abb
RK
652#if HAVE_nonlocal_goto
653 if (HAVE_nonlocal_goto)
157229c3
RH
654 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
655 save_area, label_ref));
28d81abb
RK
656 else
657#endif
658 {
659 /* Restore frame pointer for containing function.
660 This sets the actual hard register used for the frame pointer
661 to the location of the function's incoming static chain info.
662 The non-local goto handler will then adjust it to contain the
663 proper value and reload the argument pointer, if needed. */
157229c3
RH
664 emit_move_insn (hard_frame_pointer_rtx, static_chain);
665 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
666
667 /* USE of hard_frame_pointer_rtx added for consistency;
668 not clear if really needed. */
38a448ca
RH
669 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
670 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
157229c3 671 emit_indirect_jump (handler_slot);
28d81abb 672 }
4b01bd16 673
786de7eb 674 /* Search backwards to the jump insn and mark it as a
4b01bd16 675 non-local goto. */
7c2b017c
AM
676 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
677 {
678 if (GET_CODE (insn) == JUMP_INSN)
679 {
680 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
681 const0_rtx, REG_NOTES (insn));
682 break;
683 }
684 else if (GET_CODE (insn) == CALL_INSN)
685 break;
686 }
4381f7c2 687 }
28d81abb 688 else
37366632 689 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
690}
691
692/* Generate RTL code for a `goto' statement with target label BODY.
693 LABEL should be a LABEL_REF.
694 LAST_INSN, if non-0, is the rtx we should consider as the last
695 insn emitted (for the purposes of cleaning up a return). */
696
697static void
698expand_goto_internal (body, label, last_insn)
699 tree body;
700 rtx label;
701 rtx last_insn;
702{
703 struct nesting *block;
704 rtx stack_level = 0;
705
706 if (GET_CODE (label) != CODE_LABEL)
707 abort ();
708
709 /* If label has already been defined, we can tell now
710 whether and how we must alter the stack level. */
711
712 if (PREV_INSN (label) != 0)
713 {
714 /* Find the innermost pending block that contains the label.
715 (Check containment by comparing insn-uids.)
716 Then restore the outermost stack level within that block,
717 and do cleanups of all blocks contained in it. */
718 for (block = block_stack; block; block = block->next)
719 {
720 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
721 break;
722 if (block->data.block.stack_level != 0)
723 stack_level = block->data.block.stack_level;
724 /* Execute the cleanups for blocks we are exiting. */
725 if (block->data.block.cleanups != 0)
726 {
50d1b7a1 727 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
728 do_pending_stack_adjust ();
729 }
730 }
731
732 if (stack_level)
733 {
0f41302f
MS
734 /* Ensure stack adjust isn't done by emit_jump, as this
735 would clobber the stack pointer. This one should be
736 deleted as dead by flow. */
28d81abb
RK
737 clear_pending_stack_adjust ();
738 do_pending_stack_adjust ();
7393c642
RK
739
740 /* Don't do this adjust if it's to the end label and this function
741 is to return with a depressed stack pointer. */
742 if (label == return_label
c4a6c0f3
RK
743 && (((TREE_CODE (TREE_TYPE (current_function_decl))
744 == FUNCTION_TYPE)
745 && (TYPE_RETURNS_STACK_DEPRESSED
746 (TREE_TYPE (current_function_decl))))))
7393c642
RK
747 ;
748 else
749 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
750 }
751
752 if (body != 0 && DECL_TOO_LATE (body))
753 error ("jump to `%s' invalidly jumps into binding contour",
754 IDENTIFIER_POINTER (DECL_NAME (body)));
755 }
756 /* Label not yet defined: may need to put this goto
757 on the fixup list. */
758 else if (! expand_fixup (body, label, last_insn))
759 {
760 /* No fixup needed. Record that the label is the target
761 of at least one goto that has no fixup. */
762 if (body != 0)
763 TREE_ADDRESSABLE (body) = 1;
764 }
765
766 emit_jump (label);
767}
768\f
769/* Generate if necessary a fixup for a goto
770 whose target label in tree structure (if any) is TREE_LABEL
771 and whose target in rtl is RTL_LABEL.
772
773 If LAST_INSN is nonzero, we pretend that the jump appears
774 after insn LAST_INSN instead of at the current point in the insn stream.
775
023b57e6
RS
776 The fixup will be used later to insert insns just before the goto.
777 Those insns will restore the stack level as appropriate for the
778 target label, and will (in the case of C++) also invoke any object
779 destructors which have to be invoked when we exit the scopes which
780 are exited by the goto.
28d81abb
RK
781
782 Value is nonzero if a fixup is made. */
783
784static int
785expand_fixup (tree_label, rtl_label, last_insn)
786 tree tree_label;
787 rtx rtl_label;
788 rtx last_insn;
789{
790 struct nesting *block, *end_block;
791
792 /* See if we can recognize which block the label will be output in.
793 This is possible in some very common cases.
794 If we succeed, set END_BLOCK to that block.
795 Otherwise, set it to 0. */
796
797 if (cond_stack
798 && (rtl_label == cond_stack->data.cond.endif_label
799 || rtl_label == cond_stack->data.cond.next_label))
800 end_block = cond_stack;
801 /* If we are in a loop, recognize certain labels which
802 are likely targets. This reduces the number of fixups
803 we need to create. */
804 else if (loop_stack
805 && (rtl_label == loop_stack->data.loop.start_label
806 || rtl_label == loop_stack->data.loop.end_label
807 || rtl_label == loop_stack->data.loop.continue_label))
808 end_block = loop_stack;
809 else
810 end_block = 0;
811
812 /* Now set END_BLOCK to the binding level to which we will return. */
813
814 if (end_block)
815 {
816 struct nesting *next_block = end_block->all;
817 block = block_stack;
818
819 /* First see if the END_BLOCK is inside the innermost binding level.
820 If so, then no cleanups or stack levels are relevant. */
821 while (next_block && next_block != block)
822 next_block = next_block->all;
823
824 if (next_block)
825 return 0;
826
827 /* Otherwise, set END_BLOCK to the innermost binding level
828 which is outside the relevant control-structure nesting. */
829 next_block = block_stack->next;
830 for (block = block_stack; block != end_block; block = block->all)
831 if (block == next_block)
832 next_block = next_block->next;
833 end_block = next_block;
834 }
835
836 /* Does any containing block have a stack level or cleanups?
837 If not, no fixup is needed, and that is the normal case
838 (the only case, for standard C). */
839 for (block = block_stack; block != end_block; block = block->next)
840 if (block->data.block.stack_level != 0
841 || block->data.block.cleanups != 0)
842 break;
843
844 if (block != end_block)
845 {
846 /* Ok, a fixup is needed. Add a fixup to the list of such. */
847 struct goto_fixup *fixup
f8a83ee3 848 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
849 /* In case an old stack level is restored, make sure that comes
850 after any pending stack adjust. */
851 /* ?? If the fixup isn't to come at the present position,
852 doing the stack adjust here isn't useful. Doing it with our
853 settings at that location isn't useful either. Let's hope
854 someone does it! */
855 if (last_insn == 0)
856 do_pending_stack_adjust ();
28d81abb
RK
857 fixup->target = tree_label;
858 fixup->target_rtl = rtl_label;
023b57e6
RS
859
860 /* Create a BLOCK node and a corresponding matched set of
12f61228 861 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
862 this point. The notes will encapsulate any and all fixup
863 code which we might later insert at this point in the insn
864 stream. Also, the BLOCK node will be the parent (i.e. the
865 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
866 later on when we are expanding the fixup code.
867
868 Note that optimization passes (including expand_end_loop)
869 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
870 as a placeholder. */
023b57e6
RS
871
872 {
786de7eb
KH
873 rtx original_before_jump
874 = last_insn ? last_insn : get_last_insn ();
0679e3fc 875 rtx start;
a97901e6 876 rtx end;
e6fd097e
MM
877 tree block;
878
879 block = make_node (BLOCK);
880 TREE_USED (block) = 1;
881
01d939e8 882 if (!cfun->x_whole_function_mode_p)
43577e6b 883 (*lang_hooks.decls.insert_block) (block);
a97901e6 884 else
e6fd097e 885 {
4381f7c2 886 BLOCK_CHAIN (block)
a97901e6
MM
887 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
888 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
889 = block;
e6fd097e 890 }
023b57e6 891
786de7eb
KH
892 start_sequence ();
893 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
01d939e8 894 if (cfun->x_whole_function_mode_p)
a97901e6 895 NOTE_BLOCK (start) = block;
6496a589
KG
896 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
897 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
01d939e8 898 if (cfun->x_whole_function_mode_p)
a97901e6 899 NOTE_BLOCK (end) = block;
786de7eb
KH
900 fixup->context = block;
901 end_sequence ();
2f937369 902 emit_insn_after (start, original_before_jump);
023b57e6
RS
903 }
904
3f1d071b 905 fixup->block_start_count = current_block_start_count;
28d81abb
RK
906 fixup->stack_level = 0;
907 fixup->cleanup_list_list
e976b8b2 908 = ((block->data.block.outer_cleanups
28d81abb 909 || block->data.block.cleanups)
37366632 910 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
911 block->data.block.outer_cleanups)
912 : 0);
913 fixup->next = goto_fixup_chain;
914 goto_fixup_chain = fixup;
915 }
916
917 return block != 0;
918}
cfc3d13f
RK
919\f
920/* Expand any needed fixups in the outputmost binding level of the
921 function. FIRST_INSN is the first insn in the function. */
ca695ac9 922
cfc3d13f
RK
923void
924expand_fixups (first_insn)
925 rtx first_insn;
926{
9714cf43 927 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
cfc3d13f 928}
ca695ac9 929
28d81abb
RK
930/* When exiting a binding contour, process all pending gotos requiring fixups.
931 THISBLOCK is the structure that describes the block being exited.
932 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
933 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
934 FIRST_INSN is the insn that began this contour.
935
936 Gotos that jump out of this contour must restore the
937 stack level and do the cleanups before actually jumping.
938
939 DONT_JUMP_IN nonzero means report error there is a jump into this
940 contour from before the beginning of the contour.
941 This is also done if STACK_LEVEL is nonzero. */
942
704f4dca 943static void
28d81abb
RK
944fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
945 struct nesting *thisblock;
946 rtx stack_level;
947 tree cleanup_list;
948 rtx first_insn;
949 int dont_jump_in;
950{
b3694847 951 struct goto_fixup *f, *prev;
28d81abb
RK
952
953 /* F is the fixup we are considering; PREV is the previous one. */
954 /* We run this loop in two passes so that cleanups of exited blocks
955 are run first, and blocks that are exited are marked so
956 afterwards. */
957
958 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
959 {
960 /* Test for a fixup that is inactive because it is already handled. */
961 if (f->before_jump == 0)
962 {
963 /* Delete inactive fixup from the chain, if that is easy to do. */
964 if (prev != 0)
965 prev->next = f->next;
966 }
967 /* Has this fixup's target label been defined?
968 If so, we can finalize it. */
969 else if (PREV_INSN (f->target_rtl) != 0)
970 {
b3694847 971 rtx cleanup_insns;
7629c936 972
28d81abb 973 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
974 of this contour, report an error. This code used to use
975 the first non-label insn after f->target_rtl, but that's
976 wrong since such can be added, by things like put_var_into_stack
977 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
978 /* ??? Bug: this does not detect jumping in through intermediate
979 blocks that have stack levels or cleanups.
980 It detects only a problem with the innermost block
981 around the label. */
982 if (f->target != 0
983 && (dont_jump_in || stack_level || cleanup_list)
14a774a9 984 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 985 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 986 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
987 {
988 error_with_decl (f->target,
989 "label `%s' used before containing binding contour");
990 /* Prevent multiple errors for one label. */
33bc3ff5 991 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
992 }
993
7629c936
RS
994 /* We will expand the cleanups into a sequence of their own and
995 then later on we will attach this new sequence to the insn
996 stream just ahead of the actual jump insn. */
997
998 start_sequence ();
999
023b57e6
RS
1000 /* Temporarily restore the lexical context where we will
1001 logically be inserting the fixup code. We do this for the
1002 sake of getting the debugging information right. */
1003
43577e6b
NB
1004 (*lang_hooks.decls.pushlevel) (0);
1005 (*lang_hooks.decls.set_block) (f->context);
7629c936
RS
1006
1007 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1008 if (f->cleanup_list_list)
1009 {
1010 tree lists;
1011 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1012 /* Marked elements correspond to blocks that have been closed.
1013 Do their cleanups. */
1014 if (TREE_ADDRESSABLE (lists)
1015 && TREE_VALUE (lists) != 0)
7629c936 1016 {
50d1b7a1 1017 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1018 /* Pop any pushes done in the cleanups,
1019 in case function is about to return. */
1020 do_pending_stack_adjust ();
1021 }
28d81abb
RK
1022 }
1023
1024 /* Restore stack level for the biggest contour that this
1025 jump jumps out of. */
7393c642
RK
1026 if (f->stack_level
1027 && ! (f->target_rtl == return_label
c4a6c0f3
RK
1028 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1029 == FUNCTION_TYPE)
4381f7c2 1030 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 1031 (TREE_TYPE (current_function_decl))))))
59257ff7 1032 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1033
1034 /* Finish up the sequence containing the insns which implement the
1035 necessary cleanups, and then attach that whole sequence to the
1036 insn stream just ahead of the actual jump insn. Attaching it
1037 at that point insures that any cleanups which are in fact
1038 implicit C++ object destructions (which must be executed upon
1039 leaving the block) appear (to the debugger) to be taking place
1040 in an area of the generated code where the object(s) being
1041 destructed are still "in scope". */
1042
1043 cleanup_insns = get_insns ();
43577e6b 1044 (*lang_hooks.decls.poplevel) (1, 0, 0);
7629c936
RS
1045
1046 end_sequence ();
2f937369 1047 emit_insn_after (cleanup_insns, f->before_jump);
7629c936 1048
28d81abb
RK
1049 f->before_jump = 0;
1050 }
1051 }
1052
6bc2f582
RK
1053 /* For any still-undefined labels, do the cleanups for this block now.
1054 We must do this now since items in the cleanup list may go out
0f41302f 1055 of scope when the block ends. */
28d81abb
RK
1056 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1057 if (f->before_jump != 0
1058 && PREV_INSN (f->target_rtl) == 0
1059 /* Label has still not appeared. If we are exiting a block with
1060 a stack level to restore, that started before the fixup,
1061 mark this stack level as needing restoration
6d2f8887 1062 when the fixup is later finalized. */
28d81abb 1063 && thisblock != 0
6bc2f582
RK
1064 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1065 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1066 && (thisblock->data.block.block_start_count
1067 <= f->block_start_count))
1068 {
1069 tree lists = f->cleanup_list_list;
6bc2f582
RK
1070 rtx cleanup_insns;
1071
28d81abb
RK
1072 for (; lists; lists = TREE_CHAIN (lists))
1073 /* If the following elt. corresponds to our containing block
1074 then the elt. must be for this block. */
1075 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1076 {
1077 start_sequence ();
43577e6b
NB
1078 (*lang_hooks.decls.pushlevel) (0);
1079 (*lang_hooks.decls.set_block) (f->context);
6bc2f582 1080 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1081 do_pending_stack_adjust ();
6bc2f582 1082 cleanup_insns = get_insns ();
43577e6b 1083 (*lang_hooks.decls.poplevel) (1, 0, 0);
6bc2f582 1084 end_sequence ();
412c00dc
RK
1085 if (cleanup_insns != 0)
1086 f->before_jump
2f937369 1087 = emit_insn_after (cleanup_insns, f->before_jump);
6bc2f582 1088
e07ed33f 1089 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1090 }
28d81abb
RK
1091
1092 if (stack_level)
1093 f->stack_level = stack_level;
1094 }
1095}
2a230e9d
BS
1096\f
1097/* Return the number of times character C occurs in string S. */
1098static int
1099n_occurrences (c, s)
1100 int c;
dff01034 1101 const char *s;
2a230e9d
BS
1102{
1103 int n = 0;
1104 while (*s)
1105 n += (*s++ == c);
1106 return n;
1107}
28d81abb
RK
1108\f
1109/* Generate RTL for an asm statement (explicit assembler code).
1110 BODY is a STRING_CST node containing the assembler code text,
1111 or an ADDR_EXPR containing a STRING_CST. */
1112
1113void
1114expand_asm (body)
1115 tree body;
1116{
1117 if (TREE_CODE (body) == ADDR_EXPR)
1118 body = TREE_OPERAND (body, 0);
1119
38a448ca
RH
1120 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1121 TREE_STRING_POINTER (body)));
e2500fed 1122 clear_last_expr ();
28d81abb
RK
1123}
1124
40b18c0a
MM
1125/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1126 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1127 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1128 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1129 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1130 constraint allows the use of a register operand. And, *IS_INOUT
1131 will be true if the operand is read-write, i.e., if it is used as
1132 an input as well as an output. If *CONSTRAINT_P is not in
1133 canonical form, it will be made canonical. (Note that `+' will be
1134 rpelaced with `=' as part of this process.)
1135
1136 Returns TRUE if all went well; FALSE if an error occurred. */
1137
1138bool
6be2e1f8
RH
1139parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1140 allows_mem, allows_reg, is_inout)
40b18c0a
MM
1141 const char **constraint_p;
1142 int operand_num;
1143 int ninputs;
1144 int noutputs;
1145 bool *allows_mem;
1146 bool *allows_reg;
1147 bool *is_inout;
1148{
1149 const char *constraint = *constraint_p;
1150 const char *p;
1151
1152 /* Assume the constraint doesn't allow the use of either a register
1153 or memory. */
1154 *allows_mem = false;
1155 *allows_reg = false;
1156
1157 /* Allow the `=' or `+' to not be at the beginning of the string,
1158 since it wasn't explicitly documented that way, and there is a
1159 large body of code that puts it last. Swap the character to
1160 the front, so as not to uglify any place else. */
1161 p = strchr (constraint, '=');
1162 if (!p)
1163 p = strchr (constraint, '+');
1164
1165 /* If the string doesn't contain an `=', issue an error
1166 message. */
1167 if (!p)
1168 {
1169 error ("output operand constraint lacks `='");
1170 return false;
1171 }
1172
1173 /* If the constraint begins with `+', then the operand is both read
1174 from and written to. */
1175 *is_inout = (*p == '+');
1176
40b18c0a
MM
1177 /* Canonicalize the output constraint so that it begins with `='. */
1178 if (p != constraint || is_inout)
1179 {
1180 char *buf;
1181 size_t c_len = strlen (constraint);
1182
1183 if (p != constraint)
1184 warning ("output constraint `%c' for operand %d is not at the beginning",
1185 *p, operand_num);
1186
1187 /* Make a copy of the constraint. */
1188 buf = alloca (c_len + 1);
1189 strcpy (buf, constraint);
1190 /* Swap the first character and the `=' or `+'. */
1191 buf[p - constraint] = buf[0];
1192 /* Make sure the first character is an `='. (Until we do this,
1193 it might be a `+'.) */
1194 buf[0] = '=';
1195 /* Replace the constraint with the canonicalized string. */
1196 *constraint_p = ggc_alloc_string (buf, c_len);
1197 constraint = *constraint_p;
1198 }
1199
1200 /* Loop through the constraint string. */
1201 for (p = constraint + 1; *p; ++p)
1202 switch (*p)
1203 {
1204 case '+':
1205 case '=':
357351e5 1206 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 1207 return false;
786de7eb 1208
40b18c0a
MM
1209 case '%':
1210 if (operand_num + 1 == ninputs + noutputs)
1211 {
1212 error ("`%%' constraint used with last operand");
1213 return false;
1214 }
1215 break;
1216
1217 case 'V': case 'm': case 'o':
1218 *allows_mem = true;
1219 break;
1220
1221 case '?': case '!': case '*': case '&': case '#':
1222 case 'E': case 'F': case 'G': case 'H':
1223 case 's': case 'i': case 'n':
1224 case 'I': case 'J': case 'K': case 'L': case 'M':
1225 case 'N': case 'O': case 'P': case ',':
1226 break;
1227
1228 case '0': case '1': case '2': case '3': case '4':
1229 case '5': case '6': case '7': case '8': case '9':
84b72302 1230 case '[':
40b18c0a
MM
1231 error ("matching constraint not valid in output operand");
1232 return false;
1233
1234 case '<': case '>':
1235 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1236 excepting those that expand_call created. So match memory
1237 and hope. */
1238 *allows_mem = true;
1239 break;
1240
1241 case 'g': case 'X':
1242 *allows_reg = true;
1243 *allows_mem = true;
1244 break;
786de7eb 1245
40b18c0a
MM
1246 case 'p': case 'r':
1247 *allows_reg = true;
1248 break;
1249
1250 default:
1251 if (!ISALPHA (*p))
1252 break;
1253 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1254 *allows_reg = true;
1255#ifdef EXTRA_CONSTRAINT
ccfc6cc8
UW
1256 else if (EXTRA_ADDRESS_CONSTRAINT (*p))
1257 *allows_reg = true;
1258 else if (EXTRA_MEMORY_CONSTRAINT (*p))
1259 *allows_mem = true;
40b18c0a
MM
1260 else
1261 {
1262 /* Otherwise we can't assume anything about the nature of
1263 the constraint except that it isn't purely registers.
1264 Treat it like "g" and hope for the best. */
1265 *allows_reg = true;
1266 *allows_mem = true;
1267 }
1268#endif
1269 break;
1270 }
1271
1272 return true;
1273}
1274
6be2e1f8
RH
1275/* Similar, but for input constraints. */
1276
1277static bool
1278parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1279 constraints, allows_mem, allows_reg)
1280 const char **constraint_p;
1281 int input_num;
1282 int ninputs;
1283 int noutputs;
1284 int ninout;
1285 const char * const * constraints;
1286 bool *allows_mem;
1287 bool *allows_reg;
1288{
1289 const char *constraint = *constraint_p;
1290 const char *orig_constraint = constraint;
1291 size_t c_len = strlen (constraint);
1292 size_t j;
1293
1294 /* Assume the constraint doesn't allow the use of either
1295 a register or memory. */
1296 *allows_mem = false;
1297 *allows_reg = false;
1298
1299 /* Make sure constraint has neither `=', `+', nor '&'. */
1300
1301 for (j = 0; j < c_len; j++)
1302 switch (constraint[j])
1303 {
1304 case '+': case '=': case '&':
1305 if (constraint == orig_constraint)
1306 {
1307 error ("input operand constraint contains `%c'", constraint[j]);
1308 return false;
1309 }
1310 break;
1311
1312 case '%':
1313 if (constraint == orig_constraint
1314 && input_num + 1 == ninputs - ninout)
1315 {
1316 error ("`%%' constraint used with last operand");
1317 return false;
1318 }
1319 break;
1320
1321 case 'V': case 'm': case 'o':
1322 *allows_mem = true;
1323 break;
1324
1325 case '<': case '>':
1326 case '?': case '!': case '*': case '#':
1327 case 'E': case 'F': case 'G': case 'H':
1328 case 's': case 'i': case 'n':
1329 case 'I': case 'J': case 'K': case 'L': case 'M':
1330 case 'N': case 'O': case 'P': case ',':
1331 break;
1332
1333 /* Whether or not a numeric constraint allows a register is
1334 decided by the matching constraint, and so there is no need
1335 to do anything special with them. We must handle them in
1336 the default case, so that we don't unnecessarily force
1337 operands to memory. */
1338 case '0': case '1': case '2': case '3': case '4':
1339 case '5': case '6': case '7': case '8': case '9':
1340 {
1341 char *end;
1342 unsigned long match;
1343
1344 match = strtoul (constraint + j, &end, 10);
1345 if (match >= (unsigned long) noutputs)
1346 {
1347 error ("matching constraint references invalid operand number");
1348 return false;
1349 }
1350
1351 /* Try and find the real constraint for this dup. Only do this
1352 if the matching constraint is the only alternative. */
1353 if (*end == '\0'
1354 && (j == 0 || (j == 1 && constraint[0] == '%')))
1355 {
1356 constraint = constraints[match];
1357 *constraint_p = constraint;
1358 c_len = strlen (constraint);
1359 j = 0;
1360 break;
1361 }
1362 else
1363 j = end - constraint;
1364 }
1365 /* Fall through. */
1366
1367 case 'p': case 'r':
1368 *allows_reg = true;
1369 break;
1370
1371 case 'g': case 'X':
1372 *allows_reg = true;
1373 *allows_mem = true;
1374 break;
1375
1376 default:
1377 if (! ISALPHA (constraint[j]))
1378 {
1379 error ("invalid punctuation `%c' in constraint", constraint[j]);
1380 return false;
1381 }
1382 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1383 *allows_reg = true;
1384#ifdef EXTRA_CONSTRAINT
ccfc6cc8
UW
1385 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j]))
1386 *allows_reg = true;
1387 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j]))
1388 *allows_mem = true;
6be2e1f8
RH
1389 else
1390 {
1391 /* Otherwise we can't assume anything about the nature of
1392 the constraint except that it isn't purely registers.
1393 Treat it like "g" and hope for the best. */
1394 *allows_reg = true;
1395 *allows_mem = true;
1396 }
1397#endif
1398 break;
1399 }
1400
1401 return true;
1402}
1403
acb5d088
HPN
1404/* Check for overlap between registers marked in CLOBBERED_REGS and
1405 anything inappropriate in DECL. Emit error and return TRUE for error,
1406 FALSE for ok. */
1407
1408static bool
1409decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1410 tree decl;
1411 const HARD_REG_SET clobbered_regs;
1412{
1413 /* Conflicts between asm-declared register variables and the clobber
1414 list are not allowed. */
1415 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1416 && DECL_REGISTER (decl)
1417 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1418 {
1419 rtx reg = DECL_RTL (decl);
1420 unsigned int regno;
1421
1422 for (regno = REGNO (reg);
1423 regno < (REGNO (reg)
1424 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1425 regno++)
1426 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1427 {
1428 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1429 IDENTIFIER_POINTER (DECL_NAME (decl)));
1430
1431 /* Reset registerness to stop multiple errors emitted for a
1432 single variable. */
1433 DECL_REGISTER (decl) = 0;
1434 return true;
1435 }
1436 }
1437 return false;
1438}
1439
28d81abb
RK
1440/* Generate RTL for an asm statement with arguments.
1441 STRING is the instruction template.
1442 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1443 Each output or input has an expression in the TREE_VALUE and
2ec37136 1444 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 1445 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 1446 in TREE_PURPOSE.
28d81abb
RK
1447 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1448 that is clobbered by this insn.
1449
1450 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1451 Some elements of OUTPUTS may be replaced with trees representing temporary
1452 values. The caller should copy those temporary values to the originally
1453 specified lvalues.
1454
1455 VOL nonzero means the insn is volatile; don't optimize it. */
1456
1457void
1458expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1459 tree string, outputs, inputs, clobbers;
1460 int vol;
3b304f5b 1461 const char *filename;
28d81abb
RK
1462 int line;
1463{
84b72302 1464 rtvec argvec, constraintvec;
28d81abb
RK
1465 rtx body;
1466 int ninputs = list_length (inputs);
1467 int noutputs = list_length (outputs);
6be2e1f8 1468 int ninout;
b4ccaa16 1469 int nclobbers;
acb5d088
HPN
1470 HARD_REG_SET clobbered_regs;
1471 int clobber_conflict_found = 0;
28d81abb 1472 tree tail;
b3694847 1473 int i;
28d81abb
RK
1474 /* Vector of RTX's of evaluated output operands. */
1475 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1476 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1477 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1478 enum machine_mode *inout_mode
1479 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
84b72302
RH
1480 const char **constraints
1481 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
28d81abb
RK
1482 /* The insn we have emitted. */
1483 rtx insn;
1b3d8f8a 1484 int old_generating_concat_p = generating_concat_p;
28d81abb 1485
e5e809f4 1486 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1487 if (noutputs == 0)
1488 vol = 1;
1489
84b72302
RH
1490 if (! check_operand_nalternatives (outputs, inputs))
1491 return;
1492
1493 if (! check_unique_operand_names (outputs, inputs))
1494 return;
1495
1496 string = resolve_operand_names (string, outputs, inputs, constraints);
1497
57bcb97a
RH
1498#ifdef MD_ASM_CLOBBERS
1499 /* Sometimes we wish to automatically clobber registers across an asm.
1500 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 1501 maintaining source-level compatibility means automatically clobbering
57bcb97a
RH
1502 the flags register. */
1503 MD_ASM_CLOBBERS (clobbers);
1504#endif
1505
b4ccaa16
RS
1506 /* Count the number of meaningful clobbered registers, ignoring what
1507 we would ignore later. */
1508 nclobbers = 0;
acb5d088 1509 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
1510 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1511 {
47ee9bcb 1512 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1513
c09e6498
RS
1514 i = decode_reg_name (regname);
1515 if (i >= 0 || i == -4)
b4ccaa16 1516 ++nclobbers;
7859e3ac
DE
1517 else if (i == -2)
1518 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
1519
1520 /* Mark clobbered registers. */
1521 if (i >= 0)
1522 SET_HARD_REG_BIT (clobbered_regs, i);
b4ccaa16
RS
1523 }
1524
e2500fed 1525 clear_last_expr ();
28d81abb 1526
6be2e1f8
RH
1527 /* First pass over inputs and outputs checks validity and sets
1528 mark_addressable if needed. */
1529
1530 ninout = 0;
28d81abb
RK
1531 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1532 {
1533 tree val = TREE_VALUE (tail);
b50a024d 1534 tree type = TREE_TYPE (val);
6be2e1f8 1535 const char *constraint;
40b18c0a
MM
1536 bool is_inout;
1537 bool allows_reg;
1538 bool allows_mem;
28d81abb
RK
1539
1540 /* If there's an erroneous arg, emit no insn. */
40b18c0a 1541 if (type == error_mark_node)
28d81abb
RK
1542 return;
1543
40b18c0a
MM
1544 /* Try to parse the output constraint. If that fails, there's
1545 no point in going further. */
6be2e1f8
RH
1546 constraint = constraints[i];
1547 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1548 &allows_mem, &allows_reg, &is_inout))
1549 return;
1550
1551 if (! allows_reg
1552 && (allows_mem
1553 || is_inout
1554 || (DECL_P (val)
1555 && GET_CODE (DECL_RTL (val)) == REG
1556 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
dffd7eb6 1557 (*lang_hooks.mark_addressable) (val);
6be2e1f8
RH
1558
1559 if (is_inout)
1560 ninout++;
1561 }
1562
1563 ninputs += ninout;
1564 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1565 {
1566 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1567 return;
1568 }
1569
1570 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1571 {
1572 bool allows_reg, allows_mem;
1573 const char *constraint;
1574
1575 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1576 would get VOIDmode and that could cause a crash in reload. */
1577 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1578 return;
1579
1580 constraint = constraints[i + noutputs];
1581 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1582 constraints, &allows_mem, &allows_reg))
40b18c0a 1583 return;
d09a75ae 1584
6be2e1f8 1585 if (! allows_reg && allows_mem)
dffd7eb6 1586 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
6be2e1f8
RH
1587 }
1588
1589 /* Second pass evaluates arguments. */
1590
1591 ninout = 0;
1592 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1593 {
1594 tree val = TREE_VALUE (tail);
1595 tree type = TREE_TYPE (val);
1596 bool is_inout;
1597 bool allows_reg;
1598 bool allows_mem;
1599
1600 if (!parse_output_constraint (&constraints[i], i, ninputs,
1601 noutputs, &allows_mem, &allows_reg,
1602 &is_inout))
1603 abort ();
1604
d09a75ae
RK
1605 /* If an output operand is not a decl or indirect ref and our constraint
1606 allows a register, make a temporary to act as an intermediate.
1607 Make the asm insn write into that, then our caller will copy it to
1608 the real output operand. Likewise for promoted variables. */
28d81abb 1609
1b3d8f8a
GK
1610 generating_concat_p = 0;
1611
947255ed 1612 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1613 if ((TREE_CODE (val) == INDIRECT_REF
1614 && allows_mem)
2f939d94 1615 || (DECL_P (val)
1afbe1c4 1616 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1617 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1618 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1619 || ! allows_reg
2a230e9d 1620 || is_inout)
d09a75ae 1621 {
11579f33 1622 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
d09a75ae 1623
11579f33 1624 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
d09a75ae 1625 error ("output number %d not directly addressable", i);
11579f33
AJ
1626 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1627 || GET_CODE (output_rtx[i]) == CONCAT)
947255ed 1628 {
786de7eb 1629 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
11579f33
AJ
1630 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1631 if (is_inout)
1632 emit_move_insn (output_rtx[i], real_output_rtx[i]);
947255ed 1633 }
d09a75ae 1634 }
b50a024d 1635 else
e619bb8d 1636 {
11579f33 1637 output_rtx[i] = assign_temp (type, 0, 0, 1);
b50a024d
RK
1638 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1639 }
235c5021 1640
1b3d8f8a
GK
1641 generating_concat_p = old_generating_concat_p;
1642
2a230e9d 1643 if (is_inout)
235c5021 1644 {
6be2e1f8 1645 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1646 inout_opnum[ninout++] = i;
1647 }
acb5d088
HPN
1648
1649 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1650 clobber_conflict_found = 1;
28d81abb
RK
1651 }
1652
84b72302
RH
1653 /* Make vectors for the expression-rtx, constraint strings,
1654 and named operands. */
28d81abb
RK
1655
1656 argvec = rtvec_alloc (ninputs);
84b72302 1657 constraintvec = rtvec_alloc (ninputs);
28d81abb 1658
6462bb43
AO
1659 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1660 : GET_MODE (output_rtx[0])),
786de7eb 1661 TREE_STRING_POINTER (string),
84b72302 1662 empty_string, 0, argvec, constraintvec,
21a427cc 1663 filename, line);
c85f7c16 1664
78418280 1665 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1666
1667 /* Eval the inputs and put them into ARGVEC.
1668 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1669
84b72302 1670 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1671 {
6be2e1f8
RH
1672 bool allows_reg, allows_mem;
1673 const char *constraint;
1674 tree val, type;
1f06ee8d 1675 rtx op;
28d81abb 1676
6be2e1f8
RH
1677 constraint = constraints[i + noutputs];
1678 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1679 constraints, &allows_mem, &allows_reg))
1680 abort ();
2a230e9d 1681
6be2e1f8 1682 generating_concat_p = 0;
65fed0cb 1683
6be2e1f8
RH
1684 val = TREE_VALUE (tail);
1685 type = TREE_TYPE (val);
1686 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
65fed0cb 1687
1b3d8f8a 1688 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1689 if (GET_CODE (op) == CONCAT)
1690 op = force_reg (GET_MODE (op), op);
1691
1afbe1c4 1692 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1693 {
11579f33 1694 if (allows_reg)
6be2e1f8 1695 op = force_reg (TYPE_MODE (type), op);
11579f33 1696 else if (!allows_mem)
84b72302
RH
1697 warning ("asm operand %d probably doesn't match constraints",
1698 i + noutputs);
1f06ee8d 1699 else if (CONSTANT_P (op))
6be2e1f8 1700 op = force_const_mem (TYPE_MODE (type), op);
1f06ee8d
RH
1701 else if (GET_CODE (op) == REG
1702 || GET_CODE (op) == SUBREG
d96ecaf4 1703 || GET_CODE (op) == ADDRESSOF
1f06ee8d
RH
1704 || GET_CODE (op) == CONCAT)
1705 {
1da68f56
RK
1706 tree qual_type = build_qualified_type (type,
1707 (TYPE_QUALS (type)
1708 | TYPE_QUAL_CONST));
1709 rtx memloc = assign_temp (qual_type, 1, 1, 1);
65fed0cb 1710
1f06ee8d
RH
1711 emit_move_insn (memloc, op);
1712 op = memloc;
1713 }
14a774a9 1714
1f06ee8d 1715 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
6be2e1f8
RH
1716 {
1717 /* We won't recognize volatile memory as available a
1718 memory_operand at this point. Ignore it. */
1719 }
1f06ee8d
RH
1720 else if (queued_subexp_p (op))
1721 ;
1722 else
1723 /* ??? Leave this only until we have experience with what
1724 happens in combine and elsewhere when constraints are
1725 not satisfied. */
84b72302
RH
1726 warning ("asm operand %d probably doesn't match constraints",
1727 i + noutputs);
65fed0cb 1728 }
6be2e1f8 1729
1b3d8f8a 1730 generating_concat_p = old_generating_concat_p;
6462bb43 1731 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1732
6462bb43 1733 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
6be2e1f8 1734 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1735
1736 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1737 clobber_conflict_found = 1;
28d81abb
RK
1738 }
1739
14a774a9
RK
1740 /* Protect all the operands from the queue now that they have all been
1741 evaluated. */
28d81abb 1742
1b3d8f8a
GK
1743 generating_concat_p = 0;
1744
235c5021 1745 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1746 ASM_OPERANDS_INPUT (body, i)
1747 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1748
1749 for (i = 0; i < noutputs; i++)
1750 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1751
4381f7c2 1752 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1753 for (i = 0; i < ninout; i++)
1754 {
235c5021 1755 int j = inout_opnum[i];
84b72302 1756 char buffer[16];
235c5021 1757
6462bb43 1758 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1759 = output_rtx[j];
84b72302
RH
1760
1761 sprintf (buffer, "%d", j);
6462bb43 1762 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
84b72302 1763 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
235c5021
RK
1764 }
1765
1b3d8f8a
GK
1766 generating_concat_p = old_generating_concat_p;
1767
28d81abb 1768 /* Now, for each output, construct an rtx
84b72302
RH
1769 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1770 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1771 If there is more than one, put them inside a PARALLEL. */
1772
1773 if (noutputs == 1 && nclobbers == 0)
1774 {
84b72302 1775 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
38a448ca 1776 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1777 }
14a774a9 1778
28d81abb
RK
1779 else if (noutputs == 0 && nclobbers == 0)
1780 {
1781 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1782 insn = emit_insn (body);
1783 }
14a774a9 1784
28d81abb
RK
1785 else
1786 {
1787 rtx obody = body;
1788 int num = noutputs;
14a774a9
RK
1789
1790 if (num == 0)
1791 num = 1;
1792
38a448ca 1793 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1794
1795 /* For each output operand, store a SET. */
28d81abb
RK
1796 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1797 {
1798 XVECEXP (body, 0, i)
38a448ca
RH
1799 = gen_rtx_SET (VOIDmode,
1800 output_rtx[i],
c5c76735 1801 gen_rtx_ASM_OPERANDS
6462bb43 1802 (GET_MODE (output_rtx[i]),
c5c76735 1803 TREE_STRING_POINTER (string),
84b72302 1804 constraints[i], i, argvec, constraintvec,
c5c76735
JL
1805 filename, line));
1806
28d81abb
RK
1807 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1808 }
1809
1810 /* If there are no outputs (but there are some clobbers)
1811 store the bare ASM_OPERANDS into the PARALLEL. */
1812
1813 if (i == 0)
1814 XVECEXP (body, 0, i++) = obody;
1815
1816 /* Store (clobber REG) for each clobbered register specified. */
1817
b4ccaa16 1818 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1819 {
47ee9bcb 1820 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1821 int j = decode_reg_name (regname);
acb5d088 1822 rtx clobbered_reg;
28d81abb 1823
b4ac57ab 1824 if (j < 0)
28d81abb 1825 {
c09e6498 1826 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1827 continue;
1828
c09e6498
RS
1829 if (j == -4) /* `memory', don't cache memory across asm */
1830 {
bffc6177 1831 XVECEXP (body, 0, i++)
38a448ca 1832 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1833 gen_rtx_MEM
1834 (BLKmode,
1835 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1836 continue;
1837 }
1838
956d6950 1839 /* Ignore unknown register, error already signaled. */
cc1f5387 1840 continue;
28d81abb
RK
1841 }
1842
1843 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1844 clobbered_reg = gen_rtx_REG (QImode, j);
1845
1846 /* Do sanity check for overlap between clobbers and respectively
1847 input and outputs that hasn't been handled. Such overlap
1848 should have been detected and reported above. */
1849 if (!clobber_conflict_found)
1850 {
1851 int opno;
1852
1853 /* We test the old body (obody) contents to avoid tripping
1854 over the under-construction body. */
1855 for (opno = 0; opno < noutputs; opno++)
1856 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1857 internal_error ("asm clobber conflict with output operand");
1858
1859 for (opno = 0; opno < ninputs - ninout; opno++)
1860 if (reg_overlap_mentioned_p (clobbered_reg,
1861 ASM_OPERANDS_INPUT (obody, opno)))
1862 internal_error ("asm clobber conflict with input operand");
1863 }
1864
b4ccaa16 1865 XVECEXP (body, 0, i++)
acb5d088 1866 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1867 }
1868
1869 insn = emit_insn (body);
1870 }
1871
947255ed
RH
1872 /* For any outputs that needed reloading into registers, spill them
1873 back to where they belong. */
1874 for (i = 0; i < noutputs; ++i)
1875 if (real_output_rtx[i])
1876 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1877
28d81abb
RK
1878 free_temp_slots ();
1879}
84b72302
RH
1880
1881/* A subroutine of expand_asm_operands. Check that all operands have
1882 the same number of alternatives. Return true if so. */
1883
1884static bool
1885check_operand_nalternatives (outputs, inputs)
1886 tree outputs, inputs;
1887{
1888 if (outputs || inputs)
1889 {
1890 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1891 int nalternatives
1892 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1893 tree next = inputs;
1894
1895 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1896 {
1897 error ("too many alternatives in `asm'");
1898 return false;
1899 }
1900
1901 tmp = outputs;
1902 while (tmp)
1903 {
1904 const char *constraint
1905 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1906
1907 if (n_occurrences (',', constraint) != nalternatives)
1908 {
1909 error ("operand constraints for `asm' differ in number of alternatives");
1910 return false;
1911 }
1912
1913 if (TREE_CHAIN (tmp))
1914 tmp = TREE_CHAIN (tmp);
1915 else
1916 tmp = next, next = 0;
1917 }
1918 }
1919
1920 return true;
1921}
1922
1923/* A subroutine of expand_asm_operands. Check that all operand names
1924 are unique. Return true if so. We rely on the fact that these names
1925 are identifiers, and so have been canonicalized by get_identifier,
1926 so all we need are pointer comparisons. */
1927
1928static bool
1929check_unique_operand_names (outputs, inputs)
1930 tree outputs, inputs;
1931{
1932 tree i, j;
1933
1934 for (i = outputs; i ; i = TREE_CHAIN (i))
1935 {
1936 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1937 if (! i_name)
1938 continue;
1939
1940 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1941 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1942 goto failure;
1943 }
1944
1945 for (i = inputs; i ; i = TREE_CHAIN (i))
1946 {
1947 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1948 if (! i_name)
1949 continue;
1950
1951 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1952 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1953 goto failure;
1954 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1956 goto failure;
1957 }
1958
1959 return true;
1960
1961 failure:
1962 error ("duplicate asm operand name '%s'",
fc552851 1963 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1964 return false;
1965}
1966
1967/* A subroutine of expand_asm_operands. Resolve the names of the operands
1968 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1969 STRING and in the constraints to those numbers. */
1970
1971static tree
1972resolve_operand_names (string, outputs, inputs, pconstraints)
1973 tree string;
1974 tree outputs, inputs;
1975 const char **pconstraints;
1976{
1977 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1978 char *p;
1979 tree t;
1980
1981 /* Assume that we will not need extra space to perform the substitution.
1982 This because we get to remove '[' and ']', which means we cannot have
1983 a problem until we have more than 999 operands. */
1984
1985 p = buffer;
1986 while ((p = strchr (p, '%')) != NULL)
1987 {
7abcb63a
RH
1988 if (p[1] == '[')
1989 p += 1;
1990 else if (ISALPHA (p[1]) && p[2] == '[')
1991 p += 2;
1992 else
1993 {
1994 p += 1;
1995 continue;
1996 }
1997
84b72302
RH
1998 p = resolve_operand_name_1 (p, outputs, inputs);
1999 }
2000
2001 string = build_string (strlen (buffer), buffer);
2002 free (buffer);
2003
2004 /* Collect output constraints here because it's convenient.
2005 There should be no named operands here; this is verified
2006 in expand_asm_operand. */
2007 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2008 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2009
2010 /* Substitute [<name>] in input constraint strings. */
2011 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2012 {
2013 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2014 if (strchr (c, '[') == NULL)
2015 *pconstraints = c;
2016 else
2017 {
2018 p = buffer = xstrdup (c);
2019 while ((p = strchr (p, '[')) != NULL)
2020 p = resolve_operand_name_1 (p, outputs, inputs);
2021
2022 *pconstraints = ggc_alloc_string (buffer, -1);
2023 free (buffer);
2024 }
2025 }
2026
2027 return string;
2028}
2029
2030/* A subroutine of resolve_operand_names. P points to the '[' for a
2031 potential named operand of the form [<name>]. In place, replace
786de7eb 2032 the name and brackets with a number. Return a pointer to the
84b72302
RH
2033 balance of the string after substitution. */
2034
2035static char *
2036resolve_operand_name_1 (p, outputs, inputs)
2037 char *p;
2038 tree outputs, inputs;
2039{
2040 char *q;
2041 int op;
2042 tree t;
2043 size_t len;
2044
2045 /* Collect the operand name. */
2046 q = strchr (p, ']');
2047 if (!q)
2048 {
2049 error ("missing close brace for named operand");
2050 return strchr (p, '\0');
2051 }
2052 len = q - p - 1;
2053
2054 /* Resolve the name to a number. */
2055 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2056 {
fc552851
RS
2057 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2058 if (name)
edd1967d 2059 {
fc552851 2060 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2061 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2062 goto found;
2063 }
84b72302
RH
2064 }
2065 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2066 {
fc552851
RS
2067 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2068 if (name)
edd1967d 2069 {
fc552851 2070 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2071 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2072 goto found;
2073 }
84b72302
RH
2074 }
2075
2076 *q = '\0';
2077 error ("undefined named operand '%s'", p + 1);
2078 op = 0;
2079 found:
2080
2081 /* Replace the name with the number. Unfortunately, not all libraries
2082 get the return value of sprintf correct, so search for the end of the
2083 generated string by hand. */
2084 sprintf (p, "%d", op);
2085 p = strchr (p, '\0');
2086
2087 /* Verify the no extra buffer space assumption. */
2088 if (p > q)
2089 abort ();
2090
2091 /* Shift the rest of the buffer down to fill the gap. */
2092 memmove (p, q + 1, strlen (q + 1) + 1);
2093
2094 return p;
2095}
28d81abb
RK
2096\f
2097/* Generate RTL to evaluate the expression EXP
1574ef13
AO
2098 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2099 Provided just for backward-compatibility. expand_expr_stmt_value()
2100 should be used for new code. */
28d81abb
RK
2101
2102void
2103expand_expr_stmt (exp)
2104 tree exp;
2105{
b0832fe1 2106 expand_expr_stmt_value (exp, -1, 1);
1574ef13
AO
2107}
2108
2109/* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2110 whether to (1) save the value of the expression, (0) discard it or
2111 (-1) use expr_stmts_for_value to tell. The use of -1 is
2112 deprecated, and retained only for backward compatibility. */
2113
2114void
b0832fe1 2115expand_expr_stmt_value (exp, want_value, maybe_last)
1574ef13 2116 tree exp;
b0832fe1 2117 int want_value, maybe_last;
1574ef13
AO
2118{
2119 rtx value;
2120 tree type;
2121
2122 if (want_value == -1)
2123 want_value = expr_stmts_for_value != 0;
2124
28d81abb
RK
2125 /* If -W, warn about statements with no side effects,
2126 except for an explicit cast to void (e.g. for assert()), and
b0832fe1
JJ
2127 except for last statement in ({...}) where they may be useful. */
2128 if (! want_value
2129 && (expr_stmts_for_value == 0 || ! maybe_last)
2130 && exp != error_mark_node)
28d81abb 2131 {
9790cefd
RH
2132 if (! TREE_SIDE_EFFECTS (exp))
2133 {
2134 if ((extra_warnings || warn_unused_value)
2135 && !(TREE_CODE (exp) == CONVERT_EXPR
2136 && VOID_TYPE_P (TREE_TYPE (exp))))
2137 warning_with_file_and_line (emit_filename, emit_lineno,
2138 "statement with no effect");
2139 }
078721e1 2140 else if (warn_unused_value)
28d81abb
RK
2141 warn_if_unused_value (exp);
2142 }
b6ec8c5f
RK
2143
2144 /* If EXP is of function type and we are expanding statements for
2145 value, convert it to pointer-to-function. */
1574ef13 2146 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
b6ec8c5f
RK
2147 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2148
8f17b5c5
MM
2149 /* The call to `expand_expr' could cause last_expr_type and
2150 last_expr_value to get reset. Therefore, we set last_expr_value
2151 and last_expr_type *after* calling expand_expr. */
1574ef13
AO
2152 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2153 VOIDmode, 0);
2154 type = TREE_TYPE (exp);
28d81abb
RK
2155
2156 /* If all we do is reference a volatile value in memory,
2157 copy it to a register to be sure it is actually touched. */
1574ef13 2158 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
28d81abb 2159 {
1574ef13 2160 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 2161 ;
1574ef13
AO
2162 else if (TYPE_MODE (type) != BLKmode)
2163 value = copy_to_reg (value);
28d81abb 2164 else
ddbe9812
RS
2165 {
2166 rtx lab = gen_label_rtx ();
4381f7c2 2167
ddbe9812 2168 /* Compare the value with itself to reference it. */
1574ef13
AO
2169 emit_cmp_and_jump_insns (value, value, EQ,
2170 expand_expr (TYPE_SIZE (type),
c5d5d461 2171 NULL_RTX, VOIDmode, 0),
d43e0b7d 2172 BLKmode, 0, lab);
ddbe9812
RS
2173 emit_label (lab);
2174 }
28d81abb
RK
2175 }
2176
2177 /* If this expression is part of a ({...}) and is in memory, we may have
2178 to preserve temporaries. */
1574ef13 2179 preserve_temp_slots (value);
28d81abb
RK
2180
2181 /* Free any temporaries used to evaluate this expression. Any temporary
2182 used as a result of this expression will already have been preserved
2183 above. */
2184 free_temp_slots ();
2185
1574ef13
AO
2186 if (want_value)
2187 {
2188 last_expr_value = value;
2189 last_expr_type = type;
2190 }
2191
28d81abb
RK
2192 emit_queue ();
2193}
2194
2195/* Warn if EXP contains any computations whose results are not used.
2196 Return 1 if a warning is printed; 0 otherwise. */
2197
150a992a 2198int
28d81abb
RK
2199warn_if_unused_value (exp)
2200 tree exp;
2201{
2202 if (TREE_USED (exp))
2203 return 0;
2204
9790cefd
RH
2205 /* Don't warn about void constructs. This includes casting to void,
2206 void function calls, and statement expressions with a final cast
2207 to void. */
2208 if (VOID_TYPE_P (TREE_TYPE (exp)))
2209 return 0;
2210
28d81abb
RK
2211 switch (TREE_CODE (exp))
2212 {
2213 case PREINCREMENT_EXPR:
2214 case POSTINCREMENT_EXPR:
2215 case PREDECREMENT_EXPR:
2216 case POSTDECREMENT_EXPR:
2217 case MODIFY_EXPR:
2218 case INIT_EXPR:
2219 case TARGET_EXPR:
2220 case CALL_EXPR:
2221 case METHOD_CALL_EXPR:
2222 case RTL_EXPR:
81797aba 2223 case TRY_CATCH_EXPR:
28d81abb
RK
2224 case WITH_CLEANUP_EXPR:
2225 case EXIT_EXPR:
28d81abb
RK
2226 return 0;
2227
2228 case BIND_EXPR:
2229 /* For a binding, warn if no side effect within it. */
2230 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2231
de73f171
RK
2232 case SAVE_EXPR:
2233 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2234
28d81abb
RK
2235 case TRUTH_ORIF_EXPR:
2236 case TRUTH_ANDIF_EXPR:
2237 /* In && or ||, warn if 2nd operand has no side effect. */
2238 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2239
2240 case COMPOUND_EXPR:
a646a211
JM
2241 if (TREE_NO_UNUSED_WARNING (exp))
2242 return 0;
28d81abb
RK
2243 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2244 return 1;
4d23e509
RS
2245 /* Let people do `(foo (), 0)' without a warning. */
2246 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2247 return 0;
28d81abb
RK
2248 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2249
2250 case NOP_EXPR:
2251 case CONVERT_EXPR:
b4ac57ab 2252 case NON_LVALUE_EXPR:
28d81abb
RK
2253 /* Don't warn about conversions not explicit in the user's program. */
2254 if (TREE_NO_UNUSED_WARNING (exp))
2255 return 0;
2256 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2257 Don't complain about that. There can be an arbitrary number of
2258 casts before the modify, so we must loop until we find the first
2259 non-cast expression and then test to see if that is a modify. */
2260 {
2261 tree tem = TREE_OPERAND (exp, 0);
2262
2263 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2264 tem = TREE_OPERAND (tem, 0);
2265
de73f171
RK
2266 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2267 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2268 return 0;
2269 }
7133e992 2270 goto maybe_warn;
28d81abb 2271
d1e1adfb
JM
2272 case INDIRECT_REF:
2273 /* Don't warn about automatic dereferencing of references, since
2274 the user cannot control it. */
2275 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2276 return warn_if_unused_value (TREE_OPERAND (exp, 0));
4381f7c2
KH
2277 /* Fall through. */
2278
28d81abb 2279 default:
ddbe9812 2280 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2281 if ((DECL_P (exp)
ddbe9812
RS
2282 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2283 && TREE_THIS_VOLATILE (exp))
2284 return 0;
8d5e6e25
RK
2285
2286 /* If this is an expression which has no operands, there is no value
2287 to be unused. There are no such language-independent codes,
2288 but front ends may define such. */
2289 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2290 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2291 return 0;
2292
7133e992
JJ
2293 maybe_warn:
2294 /* If this is an expression with side effects, don't warn. */
2295 if (TREE_SIDE_EFFECTS (exp))
2296 return 0;
2297
28d81abb
RK
2298 warning_with_file_and_line (emit_filename, emit_lineno,
2299 "value computed is not used");
2300 return 1;
2301 }
2302}
2303
2304/* Clear out the memory of the last expression evaluated. */
2305
2306void
2307clear_last_expr ()
2308{
e2500fed
GK
2309 last_expr_type = NULL_TREE;
2310 last_expr_value = NULL_RTX;
28d81abb
RK
2311}
2312
b2123dc0
MM
2313/* Begin a statement-expression, i.e., a series of statements which
2314 may return a value. Return the RTL_EXPR for this statement expr.
2315 The caller must save that value and pass it to
2316 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2317 in the statement-expression are deallocated at the end of the
2318 expression. */
28d81abb
RK
2319
2320tree
b2123dc0
MM
2321expand_start_stmt_expr (has_scope)
2322 int has_scope;
28d81abb 2323{
ca695ac9
JB
2324 tree t;
2325
28d81abb
RK
2326 /* Make the RTL_EXPR node temporary, not momentary,
2327 so that rtl_expr_chain doesn't become garbage. */
ca695ac9 2328 t = make_node (RTL_EXPR);
33c6ab80 2329 do_pending_stack_adjust ();
b2123dc0
MM
2330 if (has_scope)
2331 start_sequence_for_rtl_expr (t);
2332 else
2333 start_sequence ();
28d81abb
RK
2334 NO_DEFER_POP;
2335 expr_stmts_for_value++;
2336 return t;
2337}
2338
2339/* Restore the previous state at the end of a statement that returns a value.
2340 Returns a tree node representing the statement's value and the
2341 insns to compute the value.
2342
2343 The nodes of that expression have been freed by now, so we cannot use them.
2344 But we don't want to do that anyway; the expression has already been
2345 evaluated and now we just want to use the value. So generate a RTL_EXPR
2346 with the proper type and RTL value.
2347
2348 If the last substatement was not an expression,
2349 return something with type `void'. */
2350
2351tree
2352expand_end_stmt_expr (t)
2353 tree t;
2354{
2355 OK_DEFER_POP;
2356
1574ef13 2357 if (! last_expr_value || ! last_expr_type)
28d81abb 2358 {
28d81abb 2359 last_expr_value = const0_rtx;
1574ef13 2360 last_expr_type = void_type_node;
28d81abb 2361 }
28d81abb
RK
2362 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2363 /* Remove any possible QUEUED. */
2364 last_expr_value = protect_from_queue (last_expr_value, 0);
2365
2366 emit_queue ();
2367
2368 TREE_TYPE (t) = last_expr_type;
2369 RTL_EXPR_RTL (t) = last_expr_value;
2370 RTL_EXPR_SEQUENCE (t) = get_insns ();
2371
2372 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2373
2374 end_sequence ();
2375
2376 /* Don't consider deleting this expr or containing exprs at tree level. */
2377 TREE_SIDE_EFFECTS (t) = 1;
2378 /* Propagate volatility of the actual RTL expr. */
2379 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2380
e2500fed 2381 clear_last_expr ();
28d81abb
RK
2382 expr_stmts_for_value--;
2383
2384 return t;
2385}
2386\f
28d81abb
RK
2387/* Generate RTL for the start of an if-then. COND is the expression
2388 whose truth should be tested.
2389
2390 If EXITFLAG is nonzero, this conditional is visible to
2391 `exit_something'. */
2392
2393void
2394expand_start_cond (cond, exitflag)
2395 tree cond;
2396 int exitflag;
2397{
2398 struct nesting *thiscond = ALLOC_NESTING ();
2399
2400 /* Make an entry on cond_stack for the cond we are entering. */
2401
e2500fed 2402 thiscond->desc = COND_NESTING;
28d81abb
RK
2403 thiscond->next = cond_stack;
2404 thiscond->all = nesting_stack;
2405 thiscond->depth = ++nesting_depth;
2406 thiscond->data.cond.next_label = gen_label_rtx ();
2407 /* Before we encounter an `else', we don't need a separate exit label
2408 unless there are supposed to be exit statements
2409 to exit this conditional. */
2410 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2411 thiscond->data.cond.endif_label = thiscond->exit_label;
2412 cond_stack = thiscond;
2413 nesting_stack = thiscond;
2414
b93a436e 2415 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2416}
2417
2418/* Generate RTL between then-clause and the elseif-clause
2419 of an if-then-elseif-.... */
2420
2421void
2422expand_start_elseif (cond)
2423 tree cond;
2424{
2425 if (cond_stack->data.cond.endif_label == 0)
2426 cond_stack->data.cond.endif_label = gen_label_rtx ();
2427 emit_jump (cond_stack->data.cond.endif_label);
2428 emit_label (cond_stack->data.cond.next_label);
2429 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2430 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2431}
2432
2433/* Generate RTL between the then-clause and the else-clause
2434 of an if-then-else. */
2435
2436void
2437expand_start_else ()
2438{
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2441
28d81abb
RK
2442 emit_jump (cond_stack->data.cond.endif_label);
2443 emit_label (cond_stack->data.cond.next_label);
0f41302f 2444 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2445}
2446
d947ba59
RK
2447/* After calling expand_start_else, turn this "else" into an "else if"
2448 by providing another condition. */
2449
2450void
2451expand_elseif (cond)
2452 tree cond;
2453{
2454 cond_stack->data.cond.next_label = gen_label_rtx ();
2455 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2456}
2457
28d81abb
RK
2458/* Generate RTL for the end of an if-then.
2459 Pop the record for it off of cond_stack. */
2460
2461void
2462expand_end_cond ()
2463{
2464 struct nesting *thiscond = cond_stack;
2465
b93a436e
JL
2466 do_pending_stack_adjust ();
2467 if (thiscond->data.cond.next_label)
2468 emit_label (thiscond->data.cond.next_label);
2469 if (thiscond->data.cond.endif_label)
2470 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2471
2472 POPSTACK (cond_stack);
e2500fed 2473 clear_last_expr ();
28d81abb
RK
2474}
2475\f
2476/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2477 loop should be exited by `exit_something'. This is a loop for which
2478 `expand_continue' will jump to the top of the loop.
2479
2480 Make an entry on loop_stack to record the labels associated with
2481 this loop. */
2482
2483struct nesting *
2484expand_start_loop (exit_flag)
2485 int exit_flag;
2486{
b3694847 2487 struct nesting *thisloop = ALLOC_NESTING ();
28d81abb
RK
2488
2489 /* Make an entry on loop_stack for the loop we are entering. */
2490
e2500fed 2491 thisloop->desc = LOOP_NESTING;
28d81abb
RK
2492 thisloop->next = loop_stack;
2493 thisloop->all = nesting_stack;
2494 thisloop->depth = ++nesting_depth;
2495 thisloop->data.loop.start_label = gen_label_rtx ();
2496 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2497 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2498 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2499 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2500 loop_stack = thisloop;
2501 nesting_stack = thisloop;
2502
2503 do_pending_stack_adjust ();
2504 emit_queue ();
6496a589 2505 emit_note (NULL, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2506 emit_label (thisloop->data.loop.start_label);
2507
2508 return thisloop;
2509}
2510
2511/* Like expand_start_loop but for a loop where the continuation point
2512 (for expand_continue_loop) will be specified explicitly. */
2513
2514struct nesting *
2515expand_start_loop_continue_elsewhere (exit_flag)
2516 int exit_flag;
2517{
2518 struct nesting *thisloop = expand_start_loop (exit_flag);
2519 loop_stack->data.loop.continue_label = gen_label_rtx ();
2520 return thisloop;
2521}
2522
f0de0c5d
RH
2523/* Begin a null, aka do { } while (0) "loop". But since the contents
2524 of said loop can still contain a break, we must frob the loop nest. */
2525
2526struct nesting *
2527expand_start_null_loop ()
2528{
b3694847 2529 struct nesting *thisloop = ALLOC_NESTING ();
f0de0c5d
RH
2530
2531 /* Make an entry on loop_stack for the loop we are entering. */
2532
e2500fed 2533 thisloop->desc = LOOP_NESTING;
f0de0c5d
RH
2534 thisloop->next = loop_stack;
2535 thisloop->all = nesting_stack;
2536 thisloop->depth = ++nesting_depth;
2537 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2538 thisloop->data.loop.end_label = gen_label_rtx ();
2539 thisloop->data.loop.alt_end_label = NULL_RTX;
ba89764a 2540 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
f0de0c5d
RH
2541 thisloop->exit_label = thisloop->data.loop.end_label;
2542 loop_stack = thisloop;
2543 nesting_stack = thisloop;
2544
2545 return thisloop;
2546}
2547
28d81abb
RK
2548/* Specify the continuation point for a loop started with
2549 expand_start_loop_continue_elsewhere.
2550 Use this at the point in the code to which a continue statement
2551 should jump. */
2552
2553void
2554expand_loop_continue_here ()
2555{
2556 do_pending_stack_adjust ();
6496a589 2557 emit_note (NULL, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2558 emit_label (loop_stack->data.loop.continue_label);
2559}
2560
2561/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2562 Pop the block off of loop_stack. */
2563
2564void
2565expand_end_loop ()
2566{
0720f6fb 2567 rtx start_label = loop_stack->data.loop.start_label;
e803a64b
RH
2568 rtx etc_note;
2569 int eh_regions, debug_blocks;
28d81abb
RK
2570
2571 /* Mark the continue-point at the top of the loop if none elsewhere. */
2572 if (start_label == loop_stack->data.loop.continue_label)
2573 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2574
2575 do_pending_stack_adjust ();
2576
e803a64b 2577 /* If the loop starts with a loop exit, roll that to the end where
a7d308f7 2578 it will optimize together with the jump back.
93de5c31 2579
e803a64b 2580 If the loop presently looks like this (in pseudo-C):
93de5c31 2581
e803a64b
RH
2582 LOOP_BEG
2583 start_label:
2584 if (test) goto end_label;
2585 LOOP_END_TOP_COND
2586 body;
2587 goto start_label;
2588 end_label:
4381f7c2 2589
93de5c31
MM
2590 transform it to look like:
2591
e803a64b
RH
2592 LOOP_BEG
2593 goto start_label;
2594 top_label:
2595 body;
2596 start_label:
2597 if (test) goto end_label;
2598 goto top_label;
2599 end_label:
2600
2601 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2602 the end of the entry condtional. Without this, our lexical scan
2603 can't tell the difference between an entry conditional and a
2604 body conditional that exits the loop. Mistaking the two means
786de7eb 2605 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
e803a64b
RH
2606 screw up loop unrolling.
2607
2608 Things will be oh so much better when loop optimization is done
2609 off of a proper control flow graph... */
2610
2611 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2612
2613 eh_regions = debug_blocks = 0;
2614 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2615 if (GET_CODE (etc_note) == NOTE)
2616 {
2617 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2618 break;
28d81abb 2619
e803a64b
RH
2620 /* We must not walk into a nested loop. */
2621 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2622 {
2623 etc_note = NULL_RTX;
28d81abb 2624 break;
e803a64b 2625 }
28d81abb 2626
e803a64b
RH
2627 /* At the same time, scan for EH region notes, as we don't want
2628 to scrog region nesting. This shouldn't happen, but... */
2629 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2630 eh_regions++;
2631 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2632 {
2633 if (--eh_regions < 0)
2634 /* We've come to the end of an EH region, but never saw the
2635 beginning of that region. That means that an EH region
2636 begins before the top of the loop, and ends in the middle
2637 of it. The existence of such a situation violates a basic
2638 assumption in this code, since that would imply that even
2639 when EH_REGIONS is zero, we might move code out of an
2640 exception region. */
2641 abort ();
2642 }
0720f6fb 2643
e803a64b
RH
2644 /* Likewise for debug scopes. In this case we'll either (1) move
2645 all of the notes if they are properly nested or (2) leave the
786de7eb 2646 notes alone and only rotate the loop at high optimization
e803a64b
RH
2647 levels when we expect to scrog debug info. */
2648 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2649 debug_blocks++;
2650 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2651 debug_blocks--;
2652 }
28d81abb 2653
e803a64b
RH
2654 if (etc_note
2655 && optimize
2656 && eh_regions == 0
2657 && (debug_blocks == 0 || optimize >= 2)
2658 && NEXT_INSN (etc_note) != NULL_RTX
2659 && ! any_condjump_p (get_last_insn ()))
2660 {
2661 /* We found one. Move everything from START to ETC to the end
2662 of the loop, and add a jump from the top of the loop. */
2663 rtx top_label = gen_label_rtx ();
2664 rtx start_move = start_label;
2665
2666 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2667 then we want to move this note also. */
2668 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2669 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2670 start_move = PREV_INSN (start_move);
2671
2672 emit_label_before (top_label, start_move);
2673
2674 /* Actually move the insns. If the debug scopes are nested, we
2675 can move everything at once. Otherwise we have to move them
2676 one by one and squeeze out the block notes. */
2677 if (debug_blocks == 0)
2678 reorder_insns (start_move, etc_note, get_last_insn ());
2679 else
28d81abb 2680 {
e803a64b 2681 rtx insn, next_insn;
93de5c31
MM
2682 for (insn = start_move; insn; insn = next_insn)
2683 {
2684 /* Figure out which insn comes after this one. We have
2685 to do this before we move INSN. */
e803a64b 2686 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
93de5c31
MM
2687
2688 if (GET_CODE (insn) == NOTE
2689 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2690 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
93de5c31
MM
2691 continue;
2692
93de5c31
MM
2693 reorder_insns (insn, insn, get_last_insn ());
2694 }
28d81abb 2695 }
28d81abb 2696
e803a64b
RH
2697 /* Add the jump from the top of the loop. */
2698 emit_jump_insn_before (gen_jump (start_label), top_label);
2699 emit_barrier_before (top_label);
2700 start_label = top_label;
a7d308f7 2701 }
e803a64b
RH
2702
2703 emit_jump (start_label);
2704 emit_note (NULL, NOTE_INSN_LOOP_END);
28d81abb
RK
2705 emit_label (loop_stack->data.loop.end_label);
2706
2707 POPSTACK (loop_stack);
2708
e2500fed 2709 clear_last_expr ();
28d81abb
RK
2710}
2711
f0de0c5d
RH
2712/* Finish a null loop, aka do { } while (0). */
2713
2714void
2715expand_end_null_loop ()
2716{
2717 do_pending_stack_adjust ();
2718 emit_label (loop_stack->data.loop.end_label);
2719
2720 POPSTACK (loop_stack);
2721
e2500fed 2722 clear_last_expr ();
f0de0c5d
RH
2723}
2724
28d81abb
RK
2725/* Generate a jump to the current loop's continue-point.
2726 This is usually the top of the loop, but may be specified
2727 explicitly elsewhere. If not currently inside a loop,
2728 return 0 and do nothing; caller will print an error message. */
2729
2730int
2731expand_continue_loop (whichloop)
2732 struct nesting *whichloop;
2733{
969d70ca
JH
2734 /* Emit information for branch prediction. */
2735 rtx note;
2736
2737 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2738 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
e2500fed 2739 clear_last_expr ();
28d81abb
RK
2740 if (whichloop == 0)
2741 whichloop = loop_stack;
2742 if (whichloop == 0)
2743 return 0;
37366632
RK
2744 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2745 NULL_RTX);
28d81abb
RK
2746 return 1;
2747}
2748
2749/* Generate a jump to exit the current loop. If not currently inside a loop,
2750 return 0 and do nothing; caller will print an error message. */
2751
2752int
2753expand_exit_loop (whichloop)
2754 struct nesting *whichloop;
2755{
e2500fed 2756 clear_last_expr ();
28d81abb
RK
2757 if (whichloop == 0)
2758 whichloop = loop_stack;
2759 if (whichloop == 0)
2760 return 0;
37366632 2761 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2762 return 1;
2763}
2764
2765/* Generate a conditional jump to exit the current loop if COND
2766 evaluates to zero. If not currently inside a loop,
2767 return 0 and do nothing; caller will print an error message. */
2768
2769int
2770expand_exit_loop_if_false (whichloop, cond)
2771 struct nesting *whichloop;
2772 tree cond;
2773{
b93a436e
JL
2774 rtx label = gen_label_rtx ();
2775 rtx last_insn;
e2500fed 2776 clear_last_expr ();
b93a436e 2777
28d81abb
RK
2778 if (whichloop == 0)
2779 whichloop = loop_stack;
2780 if (whichloop == 0)
2781 return 0;
b93a436e 2782 /* In order to handle fixups, we actually create a conditional jump
4fe9b91c 2783 around an unconditional branch to exit the loop. If fixups are
b93a436e 2784 necessary, they go before the unconditional branch. */
d902c7ea 2785
b93a436e
JL
2786 do_jump (cond, NULL_RTX, label);
2787 last_insn = get_last_insn ();
2788 if (GET_CODE (last_insn) == CODE_LABEL)
2789 whichloop->data.loop.alt_end_label = last_insn;
2790 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2791 NULL_RTX);
2792 emit_label (label);
ca695ac9 2793
28d81abb
RK
2794 return 1;
2795}
2796
e803a64b 2797/* Like expand_exit_loop_if_false except also emit a note marking
786de7eb 2798 the end of the conditional. Should only be used immediately
e803a64b
RH
2799 after expand_loop_start. */
2800
2801int
2802expand_exit_loop_top_cond (whichloop, cond)
2803 struct nesting *whichloop;
2804 tree cond;
2805{
2806 if (! expand_exit_loop_if_false (whichloop, cond))
2807 return 0;
2808
2809 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2810 return 1;
2811}
2812
4a050cc2
JL
2813/* Return nonzero if the loop nest is empty. Else return zero. */
2814
2815int
2816stmt_loop_nest_empty ()
2817{
930b9832
JL
2818 /* cfun->stmt can be NULL if we are building a call to get the
2819 EH context for a setjmp/longjmp EH target and the current
2820 function was a deferred inline function. */
abe79fd4 2821 return (cfun->stmt == NULL || loop_stack == NULL);
4a050cc2
JL
2822}
2823
28d81abb
RK
2824/* Return non-zero if we should preserve sub-expressions as separate
2825 pseudos. We never do so if we aren't optimizing. We always do so
2826 if -fexpensive-optimizations.
2827
2828 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2829 the loop may still be a small one. */
2830
2831int
2832preserve_subexpressions_p ()
2833{
2834 rtx insn;
2835
2836 if (flag_expensive_optimizations)
2837 return 1;
2838
01d939e8 2839 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2840 return 0;
2841
2842 insn = get_last_insn_anywhere ();
2843
2844 return (insn
2845 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2846 < n_non_fixed_regs * 3));
2847
2848}
2849
2850/* Generate a jump to exit the current loop, conditional, binding contour
2851 or case statement. Not all such constructs are visible to this function,
2852 only those started with EXIT_FLAG nonzero. Individual languages use
2853 the EXIT_FLAG parameter to control which kinds of constructs you can
2854 exit this way.
2855
2856 If not currently inside anything that can be exited,
2857 return 0 and do nothing; caller will print an error message. */
2858
2859int
2860expand_exit_something ()
2861{
2862 struct nesting *n;
e2500fed 2863 clear_last_expr ();
28d81abb
RK
2864 for (n = nesting_stack; n; n = n->all)
2865 if (n->exit_label != 0)
2866 {
37366632 2867 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2868 return 1;
2869 }
2870
2871 return 0;
2872}
2873\f
2874/* Generate RTL to return from the current function, with no value.
2875 (That is, we do not do anything about returning any value.) */
2876
2877void
2878expand_null_return ()
2879{
969d70ca
JH
2880 rtx last_insn;
2881
2882 last_insn = get_last_insn ();
bd695e1e 2883
4381f7c2 2884 /* If this function was declared to return a value, but we
bd695e1e 2885 didn't, clobber the return registers so that they are not
a1f300c0 2886 propagated live to the rest of the function. */
c13fde05 2887 clobber_return_register ();
28d81abb 2888
396ad517 2889 expand_null_return_1 (last_insn);
28d81abb
RK
2890}
2891
969d70ca
JH
2892/* Try to guess whether the value of return means error code. */
2893static enum br_predictor
2894return_prediction (val)
2895 rtx val;
2896{
2897 /* Different heuristics for pointers and scalars. */
2898 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2899 {
2900 /* NULL is usually not returned. */
2901 if (val == const0_rtx)
2902 return PRED_NULL_RETURN;
2903 }
2904 else
2905 {
2906 /* Negative return values are often used to indicate
2907 errors. */
2908 if (GET_CODE (val) == CONST_INT
2909 && INTVAL (val) < 0)
2910 return PRED_NEGATIVE_RETURN;
2911 /* Constant return values are also usually erors,
2912 zero/one often mean booleans so exclude them from the
2913 heuristics. */
2914 if (CONSTANT_P (val)
2915 && (val != const0_rtx && val != const1_rtx))
2916 return PRED_CONST_RETURN;
2917 }
2918 return PRED_NO_PREDICTION;
2919}
2920
28d81abb
RK
2921/* Generate RTL to return from the current function, with value VAL. */
2922
8d800403 2923static void
28d81abb
RK
2924expand_value_return (val)
2925 rtx val;
2926{
969d70ca
JH
2927 rtx last_insn;
2928 rtx return_reg;
2929 enum br_predictor pred;
2930
2931 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2932 {
2933 /* Emit information for branch prediction. */
2934 rtx note;
2935
2936 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2937
2938 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2939
2940 }
2941
2942 last_insn = get_last_insn ();
2943 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
2944
2945 /* Copy the value to the return location
2946 unless it's already there. */
2947
2948 if (return_reg != val)
77636079 2949 {
77636079 2950 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
37877eb0 2951#ifdef PROMOTE_FUNCTION_RETURN
77636079 2952 int unsignedp = TREE_UNSIGNED (type);
14a774a9
RK
2953 enum machine_mode old_mode
2954 = DECL_MODE (DECL_RESULT (current_function_decl));
28612f9e 2955 enum machine_mode mode
14a774a9 2956 = promote_mode (type, old_mode, &unsignedp, 1);
77636079 2957
14a774a9
RK
2958 if (mode != old_mode)
2959 val = convert_modes (mode, old_mode, val, unsignedp);
77636079 2960#endif
14a774a9 2961 if (GET_CODE (return_reg) == PARALLEL)
04050c69 2962 emit_group_load (return_reg, val, int_size_in_bytes (type));
14a774a9 2963 else
77636079
RS
2964 emit_move_insn (return_reg, val);
2965 }
14a774a9 2966
396ad517 2967 expand_null_return_1 (last_insn);
28d81abb
RK
2968}
2969
2970/* Output a return with no value. If LAST_INSN is nonzero,
396ad517 2971 pretend that the return takes place after LAST_INSN. */
28d81abb
RK
2972
2973static void
396ad517 2974expand_null_return_1 (last_insn)
28d81abb 2975 rtx last_insn;
28d81abb
RK
2976{
2977 rtx end_label = cleanup_label ? cleanup_label : return_label;
2978
2979 clear_pending_stack_adjust ();
2980 do_pending_stack_adjust ();
e2500fed 2981 clear_last_expr ();
28d81abb 2982
396ad517
JDA
2983 if (end_label == 0)
2984 end_label = return_label = gen_label_rtx ();
37366632 2985 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2986}
2987\f
2988/* Generate RTL to evaluate the expression RETVAL and return it
2989 from the current function. */
2990
2991void
2992expand_return (retval)
2993 tree retval;
2994{
2995 /* If there are any cleanups to be performed, then they will
2996 be inserted following LAST_INSN. It is desirable
2997 that the last_insn, for such purposes, should be the
2998 last insn before computing the return value. Otherwise, cleanups
2999 which call functions can clobber the return value. */
3000 /* ??? rms: I think that is erroneous, because in C++ it would
3001 run destructors on variables that might be used in the subsequent
3002 computation of the return value. */
3003 rtx last_insn = 0;
19e7881c 3004 rtx result_rtl;
b3694847 3005 rtx val = 0;
28d81abb 3006 tree retval_rhs;
28d81abb
RK
3007
3008 /* If function wants no value, give it none. */
3009 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3010 {
37366632 3011 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 3012 emit_queue ();
28d81abb
RK
3013 expand_null_return ();
3014 return;
3015 }
3016
ea11ca7e 3017 if (retval == error_mark_node)
c9407e4c
MM
3018 {
3019 /* Treat this like a return of no value from a function that
3020 returns a value. */
3021 expand_null_return ();
786de7eb 3022 return;
c9407e4c 3023 }
ea11ca7e 3024 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
3025 retval_rhs = retval;
3026 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3027 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3028 retval_rhs = TREE_OPERAND (retval, 1);
71653180 3029 else if (VOID_TYPE_P (TREE_TYPE (retval)))
28d81abb
RK
3030 /* Recognize tail-recursive call to void function. */
3031 retval_rhs = retval;
3032 else
3033 retval_rhs = NULL_TREE;
3034
7cc8342c 3035 last_insn = get_last_insn ();
28d81abb
RK
3036
3037 /* Distribute return down conditional expr if either of the sides
3038 may involve tail recursion (see test below). This enhances the number
3039 of tail recursions we see. Don't do this always since it can produce
3040 sub-optimal code in some cases and we distribute assignments into
3041 conditional expressions when it would help. */
3042
3043 if (optimize && retval_rhs != 0
3044 && frame_offset == 0
3045 && TREE_CODE (retval_rhs) == COND_EXPR
3046 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3047 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3048 {
3049 rtx label = gen_label_rtx ();
a0a34f94
RK
3050 tree expr;
3051
37366632 3052 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 3053 start_cleanup_deferral ();
dd98f85c 3054 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3055 DECL_RESULT (current_function_decl),
3056 TREE_OPERAND (retval_rhs, 1));
3057 TREE_SIDE_EFFECTS (expr) = 1;
3058 expand_return (expr);
28d81abb 3059 emit_label (label);
a0a34f94 3060
dd98f85c 3061 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3062 DECL_RESULT (current_function_decl),
3063 TREE_OPERAND (retval_rhs, 2));
3064 TREE_SIDE_EFFECTS (expr) = 1;
3065 expand_return (expr);
1483bddb 3066 end_cleanup_deferral ();
28d81abb
RK
3067 return;
3068 }
3069
19e7881c
MM
3070 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3071
4c485b63
JL
3072 /* If the result is an aggregate that is being returned in one (or more)
3073 registers, load the registers here. The compiler currently can't handle
3074 copying a BLKmode value into registers. We could put this code in a
3075 more general area (for use by everyone instead of just function
3076 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
3077 (and in expand_call). The value must go into a pseudo in case there
3078 are cleanups that will clobber the real return register. */
4c485b63
JL
3079
3080 if (retval_rhs != 0
3081 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 3082 && GET_CODE (result_rtl) == REG)
4c485b63 3083 {
770ae6cc
RK
3084 int i;
3085 unsigned HOST_WIDE_INT bitpos, xbitpos;
3086 unsigned HOST_WIDE_INT big_endian_correction = 0;
3087 unsigned HOST_WIDE_INT bytes
3088 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 3089 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
3090 unsigned int bitsize
3091 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
4c485b63 3092 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 3093 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 3094 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 3095 enum machine_mode tmpmode, result_reg_mode;
4c485b63 3096
2954d7db
RK
3097 if (bytes == 0)
3098 {
3099 expand_null_return ();
3100 return;
3101 }
3102
a7f875d7
RK
3103 /* Structures whose size is not a multiple of a word are aligned
3104 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3105 machine, this means we must skip the empty high order bytes when
3106 calculating the bit offset. */
0d7839da
SE
3107 if (BYTES_BIG_ENDIAN
3108 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3109 && bytes % UNITS_PER_WORD)
a7f875d7
RK
3110 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3111 * BITS_PER_UNIT));
3112
4381f7c2 3113 /* Copy the structure BITSIZE bits at a time. */
a7f875d7
RK
3114 for (bitpos = 0, xbitpos = big_endian_correction;
3115 bitpos < bytes * BITS_PER_UNIT;
3116 bitpos += bitsize, xbitpos += bitsize)
4c485b63 3117 {
a7f875d7 3118 /* We need a new destination pseudo each time xbitpos is
abc95ed3 3119 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
3120 (the first time through). */
3121 if (xbitpos % BITS_PER_WORD == 0
3122 || xbitpos == big_endian_correction)
4c485b63 3123 {
a7f875d7
RK
3124 /* Generate an appropriate register. */
3125 dst = gen_reg_rtx (word_mode);
3126 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3127
8a38ed86
AM
3128 /* Clear the destination before we move anything into it. */
3129 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 3130 }
a7f875d7
RK
3131
3132 /* We need a new source operand each time bitpos is on a word
3133 boundary. */
3134 if (bitpos % BITS_PER_WORD == 0)
3135 src = operand_subword_force (result_val,
3136 bitpos / BITS_PER_WORD,
3137 BLKmode);
3138
3139 /* Use bitpos for the source extraction (left justified) and
3140 xbitpos for the destination store (right justified). */
3141 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3142 extract_bit_field (src, bitsize,
3143 bitpos % BITS_PER_WORD, 1,
19caa751 3144 NULL_RTX, word_mode, word_mode,
04050c69
RK
3145 BITS_PER_WORD),
3146 BITS_PER_WORD);
4c485b63
JL
3147 }
3148
4c485b63
JL
3149 /* Find the smallest integer mode large enough to hold the
3150 entire structure and use that mode instead of BLKmode
6d2f8887 3151 on the USE insn for the return register. */
4c485b63 3152 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 3153 tmpmode != VOIDmode;
4c485b63 3154 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2954d7db
RK
3155 /* Have we found a large enough mode? */
3156 if (GET_MODE_SIZE (tmpmode) >= bytes)
3157 break;
4c485b63
JL
3158
3159 /* No suitable mode found. */
0c61f541 3160 if (tmpmode == VOIDmode)
3ffeb8f1 3161 abort ();
4c485b63 3162
14a774a9 3163 PUT_MODE (result_rtl, tmpmode);
3ffeb8f1 3164
af55da56
JW
3165 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3166 result_reg_mode = word_mode;
3167 else
3168 result_reg_mode = tmpmode;
3169 result_reg = gen_reg_rtx (result_reg_mode);
3170
3ffeb8f1 3171 emit_queue ();
3ffeb8f1 3172 for (i = 0; i < n_regs; i++)
af55da56 3173 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3174 result_pseudos[i]);
4c485b63 3175
af55da56
JW
3176 if (tmpmode != result_reg_mode)
3177 result_reg = gen_lowpart (tmpmode, result_reg);
3178
4c485b63
JL
3179 expand_value_return (result_reg);
3180 }
7cc8342c
RH
3181 else if (retval_rhs != 0
3182 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3183 && (GET_CODE (result_rtl) == REG
3184 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3185 {
14a774a9
RK
3186 /* Calculate the return value into a temporary (usually a pseudo
3187 reg). */
1da68f56
RK
3188 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3189 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3190
3191 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
3192 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3193 val = force_not_mem (val);
28d81abb 3194 emit_queue ();
28d81abb
RK
3195 /* Return the calculated value, doing cleanups first. */
3196 expand_value_return (val);
3197 }
3198 else
3199 {
3200 /* No cleanups or no hard reg used;
3201 calculate value into hard return reg. */
cba389cd 3202 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3203 emit_queue ();
14a774a9 3204 expand_value_return (result_rtl);
28d81abb
RK
3205 }
3206}
3207
3208/* Return 1 if the end of the generated RTX is not a barrier.
3209 This means code already compiled can drop through. */
3210
3211int
3212drop_through_at_end_p ()
3213{
3214 rtx insn = get_last_insn ();
3215 while (insn && GET_CODE (insn) == NOTE)
3216 insn = PREV_INSN (insn);
3217 return insn && GET_CODE (insn) != BARRIER;
3218}
3219\f
b06775f9
RH
3220/* Attempt to optimize a potential tail recursion call into a goto.
3221 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
4381f7c2
KH
3222 where to place the jump to the tail recursion label.
3223
b06775f9 3224 Return TRUE if the call was optimized into a goto. */
642cac7b 3225
564ea051 3226int
b06775f9
RH
3227optimize_tail_recursion (arguments, last_insn)
3228 tree arguments;
642cac7b
JL
3229 rtx last_insn;
3230{
b06775f9
RH
3231 /* Finish checking validity, and if valid emit code to set the
3232 argument variables for the new call. */
3233 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
642cac7b
JL
3234 {
3235 if (tail_recursion_label == 0)
3236 {
3237 tail_recursion_label = gen_label_rtx ();
3238 emit_label_after (tail_recursion_label,
3239 tail_recursion_reentry);
3240 }
3241 emit_queue ();
3242 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3243 emit_barrier ();
564ea051 3244 return 1;
642cac7b 3245 }
564ea051 3246 return 0;
642cac7b
JL
3247}
3248
28d81abb
RK
3249/* Emit code to alter this function's formal parms for a tail-recursive call.
3250 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3251 FORMALS is the chain of decls of formals.
3252 Return 1 if this can be done;
3253 otherwise return 0 and do not emit any code. */
3254
3255static int
3256tail_recursion_args (actuals, formals)
3257 tree actuals, formals;
3258{
b3694847
SS
3259 tree a = actuals, f = formals;
3260 int i;
3261 rtx *argvec;
28d81abb
RK
3262
3263 /* Check that number and types of actuals are compatible
3264 with the formals. This is not always true in valid C code.
3265 Also check that no formal needs to be addressable
3266 and that all formals are scalars. */
3267
3268 /* Also count the args. */
3269
3270 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3271 {
5c7fe359
RK
3272 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3273 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3274 return 0;
3275 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3276 return 0;
3277 }
3278 if (a != 0 || f != 0)
3279 return 0;
3280
3281 /* Compute all the actuals. */
3282
3283 argvec = (rtx *) alloca (i * sizeof (rtx));
3284
3285 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3286 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3287
3288 /* Find which actual values refer to current values of previous formals.
3289 Copy each of them now, before any formal is changed. */
3290
3291 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3292 {
3293 int copy = 0;
b3694847 3294 int j;
28d81abb
RK
3295 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3296 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
4381f7c2
KH
3297 {
3298 copy = 1;
3299 break;
3300 }
28d81abb
RK
3301 if (copy)
3302 argvec[i] = copy_to_reg (argvec[i]);
3303 }
3304
3305 /* Store the values of the actuals into the formals. */
3306
3307 for (f = formals, a = actuals, i = 0; f;
3308 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3309 {
98f3b471 3310 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3311 emit_move_insn (DECL_RTL (f), argvec[i]);
3312 else
3313 convert_move (DECL_RTL (f), argvec[i],
3314 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3315 }
3316
3317 free_temp_slots ();
3318 return 1;
3319}
3320\f
3321/* Generate the RTL code for entering a binding contour.
3322 The variables are declared one by one, by calls to `expand_decl'.
3323
8e91754e
MM
3324 FLAGS is a bitwise or of the following flags:
3325
3326 1 - Nonzero if this construct should be visible to
3327 `exit_something'.
3328
3329 2 - Nonzero if this contour does not require a
3330 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3331 language-independent code should set this flag because they
3332 will not create corresponding BLOCK nodes. (There should be
3333 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3334 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 3335 when expand_end_bindings is called.
a97901e6
MM
3336
3337 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3338 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3339 note. */
28d81abb
RK
3340
3341void
a97901e6 3342expand_start_bindings_and_block (flags, block)
8e91754e 3343 int flags;
a97901e6 3344 tree block;
28d81abb
RK
3345{
3346 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3347 rtx note;
3348 int exit_flag = ((flags & 1) != 0);
3349 int block_flag = ((flags & 2) == 0);
4381f7c2 3350
a97901e6
MM
3351 /* If a BLOCK is supplied, then the caller should be requesting a
3352 NOTE_INSN_BLOCK_BEG note. */
3353 if (!block_flag && block)
3354 abort ();
8e91754e 3355
a97901e6
MM
3356 /* Create a note to mark the beginning of the block. */
3357 if (block_flag)
3358 {
6496a589 3359 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
a97901e6
MM
3360 NOTE_BLOCK (note) = block;
3361 }
3362 else
6496a589 3363 note = emit_note (NULL, NOTE_INSN_DELETED);
4381f7c2 3364
28d81abb
RK
3365 /* Make an entry on block_stack for the block we are entering. */
3366
e2500fed 3367 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
3368 thisblock->next = block_stack;
3369 thisblock->all = nesting_stack;
3370 thisblock->depth = ++nesting_depth;
3371 thisblock->data.block.stack_level = 0;
3372 thisblock->data.block.cleanups = 0;
3f1d071b 3373 thisblock->data.block.n_function_calls = 0;
e976b8b2 3374 thisblock->data.block.exception_region = 0;
3f1d071b 3375 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3376
3377 thisblock->data.block.conditional_code = 0;
3378 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3379 /* When we insert instructions after the last unconditional cleanup,
3380 we don't adjust last_insn. That means that a later add_insn will
3381 clobber the instructions we've just added. The easiest way to
3382 fix this is to just insert another instruction here, so that the
3383 instructions inserted after the last unconditional cleanup are
3384 never the last instruction. */
6496a589 3385 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 3386
28d81abb
RK
3387 if (block_stack
3388 && !(block_stack->data.block.cleanups == NULL_TREE
3389 && block_stack->data.block.outer_cleanups == NULL_TREE))
3390 thisblock->data.block.outer_cleanups
3391 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3392 block_stack->data.block.outer_cleanups);
3393 else
3394 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3395 thisblock->data.block.label_chain = 0;
3396 thisblock->data.block.innermost_stack_block = stack_block_stack;
3397 thisblock->data.block.first_insn = note;
3f1d071b 3398 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3399 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3400 block_stack = thisblock;
3401 nesting_stack = thisblock;
3402
b93a436e
JL
3403 /* Make a new level for allocating stack slots. */
3404 push_temp_slots ();
28d81abb
RK
3405}
3406
e976b8b2
MS
3407/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3408 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3409 expand_expr are made. After we end the region, we know that all
3410 space for all temporaries that were created by TARGET_EXPRs will be
3411 destroyed and their space freed for reuse. */
3412
3413void
3414expand_start_target_temps ()
3415{
3416 /* This is so that even if the result is preserved, the space
3417 allocated will be freed, as we know that it is no longer in use. */
3418 push_temp_slots ();
3419
3420 /* Start a new binding layer that will keep track of all cleanup
3421 actions to be performed. */
8e91754e 3422 expand_start_bindings (2);
e976b8b2
MS
3423
3424 target_temp_slot_level = temp_slot_level;
3425}
3426
3427void
3428expand_end_target_temps ()
3429{
3430 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 3431
e976b8b2
MS
3432 /* This is so that even if the result is preserved, the space
3433 allocated will be freed, as we know that it is no longer in use. */
3434 pop_temp_slots ();
3435}
3436
deb5e280
JM
3437/* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3438 in question represents the outermost pair of curly braces (i.e. the "body
3439 block") of a function or method.
3440
3441 For any BLOCK node representing a "body block" of a function or method, the
3442 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3443 represents the outermost (function) scope for the function or method (i.e.
3444 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 3445 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
3446
3447int
3448is_body_block (stmt)
b3694847 3449 tree stmt;
deb5e280
JM
3450{
3451 if (TREE_CODE (stmt) == BLOCK)
3452 {
3453 tree parent = BLOCK_SUPERCONTEXT (stmt);
3454
3455 if (parent && TREE_CODE (parent) == BLOCK)
3456 {
3457 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3458
3459 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3460 return 1;
3461 }
3462 }
3463
3464 return 0;
3465}
3466
e976b8b2
MS
3467/* True if we are currently emitting insns in an area of output code
3468 that is controlled by a conditional expression. This is used by
3469 the cleanup handling code to generate conditional cleanup actions. */
3470
3471int
3472conditional_context ()
3473{
3474 return block_stack && block_stack->data.block.conditional_code;
3475}
3476
91088ddb
JM
3477/* Return an opaque pointer to the current nesting level, so frontend code
3478 can check its own sanity. */
3479
3480struct nesting *
3481current_nesting_level ()
3482{
3483 return cfun ? block_stack : 0;
3484}
3485
ba716ac9
BS
3486/* Emit a handler label for a nonlocal goto handler.
3487 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3488
e881bb1b 3489static rtx
ba716ac9
BS
3490expand_nl_handler_label (slot, before_insn)
3491 rtx slot, before_insn;
3492{
3493 rtx insns;
3494 rtx handler_label = gen_label_rtx ();
3495
0045d504 3496 /* Don't let cleanup_cfg delete the handler. */
ba716ac9
BS
3497 LABEL_PRESERVE_P (handler_label) = 1;
3498
3499 start_sequence ();
3500 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3501 insns = get_insns ();
3502 end_sequence ();
2f937369 3503 emit_insn_before (insns, before_insn);
ba716ac9
BS
3504
3505 emit_label (handler_label);
e881bb1b
RH
3506
3507 return handler_label;
ba716ac9
BS
3508}
3509
3510/* Emit code to restore vital registers at the beginning of a nonlocal goto
3511 handler. */
3512static void
3513expand_nl_goto_receiver ()
3514{
3515#ifdef HAVE_nonlocal_goto
3516 if (! HAVE_nonlocal_goto)
3517#endif
3518 /* First adjust our frame pointer to its actual value. It was
3519 previously set to the start of the virtual area corresponding to
3520 the stacked variables when we branched here and now needs to be
3521 adjusted to the actual hardware fp value.
3522
3523 Assignments are to virtual registers are converted by
3524 instantiate_virtual_regs into the corresponding assignment
3525 to the underlying register (fp in this case) that makes
3526 the original assignment true.
3527 So the following insn will actually be
3528 decrementing fp by STARTING_FRAME_OFFSET. */
3529 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3530
3531#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3532 if (fixed_regs[ARG_POINTER_REGNUM])
3533 {
3534#ifdef ELIMINABLE_REGS
3535 /* If the argument pointer can be eliminated in favor of the
3536 frame pointer, we don't need to restore it. We assume here
3537 that if such an elimination is present, it can always be used.
3538 This is the case on all known machines; if we don't make this
3539 assumption, we do unnecessary saving on many machines. */
8b60264b 3540 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
3541 size_t i;
3542
b6a1cbae 3543 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
3544 if (elim_regs[i].from == ARG_POINTER_REGNUM
3545 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3546 break;
3547
b6a1cbae 3548 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
3549#endif
3550 {
3551 /* Now restore our arg pointer from the address at which it
278ed218 3552 was saved in our stack frame. */
ba716ac9 3553 emit_move_insn (virtual_incoming_args_rtx,
278ed218 3554 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
3555 }
3556 }
3557#endif
3558
3559#ifdef HAVE_nonlocal_goto_receiver
3560 if (HAVE_nonlocal_goto_receiver)
3561 emit_insn (gen_nonlocal_goto_receiver ());
3562#endif
3563}
3564
3565/* Make handlers for nonlocal gotos taking place in the function calls in
3566 block THISBLOCK. */
3567
3568static void
3569expand_nl_goto_receivers (thisblock)
3570 struct nesting *thisblock;
3571{
3572 tree link;
3573 rtx afterward = gen_label_rtx ();
3574 rtx insns, slot;
e881bb1b 3575 rtx label_list;
ba716ac9
BS
3576 int any_invalid;
3577
3578 /* Record the handler address in the stack slot for that purpose,
3579 during this block, saving and restoring the outer value. */
3580 if (thisblock->next != 0)
3581 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3582 {
3583 rtx save_receiver = gen_reg_rtx (Pmode);
3584 emit_move_insn (XEXP (slot, 0), save_receiver);
3585
3586 start_sequence ();
3587 emit_move_insn (save_receiver, XEXP (slot, 0));
3588 insns = get_insns ();
3589 end_sequence ();
2f937369 3590 emit_insn_before (insns, thisblock->data.block.first_insn);
ba716ac9
BS
3591 }
3592
3593 /* Jump around the handlers; they run only when specially invoked. */
3594 emit_jump (afterward);
3595
3596 /* Make a separate handler for each label. */
3597 link = nonlocal_labels;
3598 slot = nonlocal_goto_handler_slots;
e881bb1b 3599 label_list = NULL_RTX;
ba716ac9
BS
3600 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3601 /* Skip any labels we shouldn't be able to jump to from here,
3602 we generate one special handler for all of them below which just calls
3603 abort. */
3604 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3605 {
e881bb1b
RH
3606 rtx lab;
3607 lab = expand_nl_handler_label (XEXP (slot, 0),
3608 thisblock->data.block.first_insn);
3609 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3610
ba716ac9
BS
3611 expand_nl_goto_receiver ();
3612
3613 /* Jump to the "real" nonlocal label. */
3614 expand_goto (TREE_VALUE (link));
3615 }
3616
3617 /* A second pass over all nonlocal labels; this time we handle those
3618 we should not be able to jump to at this point. */
3619 link = nonlocal_labels;
3620 slot = nonlocal_goto_handler_slots;
3621 any_invalid = 0;
3622 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3623 if (DECL_TOO_LATE (TREE_VALUE (link)))
3624 {
e881bb1b
RH
3625 rtx lab;
3626 lab = expand_nl_handler_label (XEXP (slot, 0),
3627 thisblock->data.block.first_insn);
3628 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3629 any_invalid = 1;
3630 }
3631
3632 if (any_invalid)
3633 {
3634 expand_nl_goto_receiver ();
9602f5a0 3635 expand_builtin_trap ();
ba716ac9
BS
3636 }
3637
e881bb1b 3638 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3639 emit_label (afterward);
3640}
3641
ba716677
MM
3642/* Warn about any unused VARS (which may contain nodes other than
3643 VAR_DECLs, but such nodes are ignored). The nodes are connected
3644 via the TREE_CHAIN field. */
3645
3646void
3647warn_about_unused_variables (vars)
3648 tree vars;
3649{
3650 tree decl;
3651
078721e1 3652 if (warn_unused_variable)
ba716677 3653 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 3654 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
3655 && ! TREE_USED (decl)
3656 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 3657 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ba716677
MM
3658 warning_with_decl (decl, "unused variable `%s'");
3659}
3660
28d81abb 3661/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3662
3663 VARS is the chain of VAR_DECL nodes for the variables bound in this
3664 contour. There may actually be other nodes in this chain, but any
3665 nodes other than VAR_DECLS are ignored.
3666
28d81abb
RK
3667 MARK_ENDS is nonzero if we should put a note at the beginning
3668 and end of this binding contour.
3669
3670 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3671 (That is true automatically if the contour has a saved stack level.) */
3672
3673void
3674expand_end_bindings (vars, mark_ends, dont_jump_in)
3675 tree vars;
3676 int mark_ends;
3677 int dont_jump_in;
3678{
b3694847 3679 struct nesting *thisblock = block_stack;
e976b8b2 3680
ba716677
MM
3681 /* If any of the variables in this scope were not used, warn the
3682 user. */
3683 warn_about_unused_variables (vars);
28d81abb 3684
28d81abb
RK
3685 if (thisblock->exit_label)
3686 {
3687 do_pending_stack_adjust ();
3688 emit_label (thisblock->exit_label);
3689 }
3690
ba716ac9 3691 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3692 place in the function calls in this block. */
3f1d071b 3693 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3694 && nonlocal_labels
3695 /* Make handler for outermost block
3696 if there were any nonlocal gotos to this function. */
3697 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3698 /* Make handler for inner block if it has something
3699 special to do when you jump out of it. */
3700 : (thisblock->data.block.cleanups != 0
3701 || thisblock->data.block.stack_level != 0)))
ba716ac9 3702 expand_nl_goto_receivers (thisblock);
28d81abb 3703
72eb1038
BH
3704 /* Don't allow jumping into a block that has a stack level.
3705 Cleanups are allowed, though. */
28d81abb 3706 if (dont_jump_in
72eb1038 3707 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3708 {
3709 struct label_chain *chain;
3710
3711 /* Any labels in this block are no longer valid to go to.
3712 Mark them to cause an error message. */
3713 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3714 {
3715 DECL_TOO_LATE (chain->label) = 1;
3716 /* If any goto without a fixup came to this label,
3717 that must be an error, because gotos without fixups
72eb1038 3718 come from outside all saved stack-levels. */
28d81abb
RK
3719 if (TREE_ADDRESSABLE (chain->label))
3720 error_with_decl (chain->label,
3721 "label `%s' used before containing binding contour");
3722 }
3723 }
3724
3725 /* Restore stack level in effect before the block
3726 (only if variable-size objects allocated). */
3727 /* Perform any cleanups associated with the block. */
3728
3729 if (thisblock->data.block.stack_level != 0
3730 || thisblock->data.block.cleanups != 0)
3731 {
04da69d3
JM
3732 int reachable;
3733 rtx insn;
28d81abb 3734
50d1b7a1
MS
3735 /* Don't let cleanups affect ({...}) constructs. */
3736 int old_expr_stmts_for_value = expr_stmts_for_value;
3737 rtx old_last_expr_value = last_expr_value;
3738 tree old_last_expr_type = last_expr_type;
3739 expr_stmts_for_value = 0;
28d81abb 3740
04da69d3
JM
3741 /* Only clean up here if this point can actually be reached. */
3742 insn = get_last_insn ();
3743 if (GET_CODE (insn) == NOTE)
3744 insn = prev_nonnote_insn (insn);
d1ee23e5 3745 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 3746
50d1b7a1
MS
3747 /* Do the cleanups. */
3748 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3749 if (reachable)
3750 do_pending_stack_adjust ();
28d81abb 3751
50d1b7a1
MS
3752 expr_stmts_for_value = old_expr_stmts_for_value;
3753 last_expr_value = old_last_expr_value;
3754 last_expr_type = old_last_expr_type;
3755
3756 /* Restore the stack level. */
3757
3758 if (reachable && thisblock->data.block.stack_level != 0)
3759 {
3760 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3761 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3762 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3763 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3764 NULL_RTX);
28d81abb
RK
3765 }
3766
3767 /* Any gotos out of this block must also do these things.
59257ff7
RK
3768 Also report any gotos with fixups that came to labels in this
3769 level. */
28d81abb
RK
3770 fixup_gotos (thisblock,
3771 thisblock->data.block.stack_level,
3772 thisblock->data.block.cleanups,
3773 thisblock->data.block.first_insn,
3774 dont_jump_in);
3775 }
3776
c7d2d61d
RS
3777 /* Mark the beginning and end of the scope if requested.
3778 We do this now, after running cleanups on the variables
3779 just going out of scope, so they are in scope for their cleanups. */
3780
3781 if (mark_ends)
a97901e6 3782 {
6496a589 3783 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
a97901e6
MM
3784 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3785 }
c7d2d61d
RS
3786 else
3787 /* Get rid of the beginning-mark if we don't make an end-mark. */
3788 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3789
e976b8b2 3790 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3791 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3792
28d81abb
RK
3793 /* Restore block_stack level for containing block. */
3794
3795 stack_block_stack = thisblock->data.block.innermost_stack_block;
3796 POPSTACK (block_stack);
3797
3798 /* Pop the stack slot nesting and free any slots at this level. */
3799 pop_temp_slots ();
3800}
3801\f
7393c642
RK
3802/* Generate code to save the stack pointer at the start of the current block
3803 and set up to restore it on exit. */
3804
3805void
3806save_stack_pointer ()
3807{
3808 struct nesting *thisblock = block_stack;
3809
3810 if (thisblock->data.block.stack_level == 0)
3811 {
3812 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3813 &thisblock->data.block.stack_level,
3814 thisblock->data.block.first_insn);
3815 stack_block_stack = thisblock;
3816 }
3817}
3818\f
28d81abb 3819/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3820 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3821
3822void
3823expand_decl (decl)
b3694847 3824 tree decl;
28d81abb 3825{
3f1d071b 3826 struct nesting *thisblock;
ca695ac9
JB
3827 tree type;
3828
ca695ac9 3829 type = TREE_TYPE (decl);
28d81abb 3830
eabb9ed0
RK
3831 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3832 type in case this node is used in a reference. */
3833 if (TREE_CODE (decl) == CONST_DECL)
3834 {
3835 DECL_MODE (decl) = TYPE_MODE (type);
3836 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3837 DECL_SIZE (decl) = TYPE_SIZE (type);
3838 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3839 return;
3840 }
28d81abb 3841
eabb9ed0
RK
3842 /* Otherwise, only automatic variables need any expansion done. Static and
3843 external variables, and external functions, will be handled by
3844 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3845 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
3846 if (TREE_CODE (decl) != VAR_DECL)
3847 return;
eabb9ed0 3848
44fe2e80 3849 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3850 return;
3851
3f1d071b
BS
3852 thisblock = block_stack;
3853
28d81abb
RK
3854 /* Create the RTL representation for the variable. */
3855
3856 if (type == error_mark_node)
19e7881c 3857 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 3858
28d81abb
RK
3859 else if (DECL_SIZE (decl) == 0)
3860 /* Variable with incomplete type. */
3861 {
abde42f7 3862 rtx x;
28d81abb
RK
3863 if (DECL_INITIAL (decl) == 0)
3864 /* Error message was already done; now avoid a crash. */
abde42f7 3865 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3866 else
3867 /* An initializer is going to decide the size of this array.
3868 Until we know the size, represent its address with a reg. */
abde42f7 3869 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 3870
abde42f7
JH
3871 set_mem_attributes (x, decl, 1);
3872 SET_DECL_RTL (decl, x);
28d81abb
RK
3873 }
3874 else if (DECL_MODE (decl) != BLKmode
3875 /* If -ffloat-store, don't put explicit float vars
3876 into regs. */
3877 && !(flag_float_store
3878 && TREE_CODE (type) == REAL_TYPE)
3879 && ! TREE_THIS_VOLATILE (decl)
37a08a29 3880 && (DECL_REGISTER (decl) || optimize))
28d81abb
RK
3881 {
3882 /* Automatic variable that can go in a register. */
98f3b471 3883 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3884 enum machine_mode reg_mode
3885 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3886
19e7881c 3887 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8
RK
3888
3889 if (GET_CODE (DECL_RTL (decl)) == REG)
3890 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3891 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3892 {
3893 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3894 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3895 }
3896
7f070d5e
RK
3897 mark_user_reg (DECL_RTL (decl));
3898
e5e809f4 3899 if (POINTER_TYPE_P (type))
7f070d5e 3900 mark_reg_pointer (DECL_RTL (decl),
bdb429a5 3901 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
258a120b
JM
3902
3903 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3904
3905 /* If something wants our address, try to use ADDRESSOF. */
3906 if (TREE_ADDRESSABLE (decl))
3907 put_var_into_stack (decl);
28d81abb 3908 }
0df15c2c 3909
4559fd9e 3910 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3911 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3912 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3913 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3914 {
3915 /* Variable of fixed size that goes on the stack. */
3916 rtx oldaddr = 0;
3917 rtx addr;
0d4903b8 3918 rtx x;
28d81abb
RK
3919
3920 /* If we previously made RTL for this decl, it must be an array
3921 whose size was determined by the initializer.
3922 The old address was a register; set that register now
3923 to the proper address. */
19e7881c 3924 if (DECL_RTL_SET_P (decl))
28d81abb
RK
3925 {
3926 if (GET_CODE (DECL_RTL (decl)) != MEM
3927 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3928 abort ();
3929 oldaddr = XEXP (DECL_RTL (decl), 0);
3930 }
3931
28d81abb
RK
3932 /* Set alignment we actually gave this decl. */
3933 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3934 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3935 DECL_USER_ALIGN (decl) = 0;
28d81abb 3936
9432c136 3937 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
3938 set_mem_attributes (x, decl, 1);
3939 SET_DECL_RTL (decl, x);
3940
28d81abb
RK
3941 if (oldaddr)
3942 {
3943 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3944 if (addr != oldaddr)
3945 emit_move_insn (oldaddr, addr);
3946 }
28d81abb
RK
3947 }
3948 else
3949 /* Dynamic-size object: must push space on the stack. */
3950 {
abde42f7 3951 rtx address, size, x;
28d81abb
RK
3952
3953 /* Record the stack pointer on entry to block, if have
3954 not already done so. */
7393c642
RK
3955 do_pending_stack_adjust ();
3956 save_stack_pointer ();
28d81abb 3957
4d9af632
JM
3958 /* In function-at-a-time mode, variable_size doesn't expand this,
3959 so do it now. */
3960 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3961 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3962 const0_rtx, VOIDmode, 0);
3963
28d81abb 3964 /* Compute the variable's size, in bytes. */
4559fd9e 3965 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3966 free_temp_slots ();
3967
ff91ad08 3968 /* Allocate space on the stack for the variable. Note that
4381f7c2 3969 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3970 cannot use it to conclude anything about the alignment of
3971 the size. */
37366632 3972 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3973 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3974
28d81abb 3975 /* Reference the variable indirect through that rtx. */
abde42f7
JH
3976 x = gen_rtx_MEM (DECL_MODE (decl), address);
3977 set_mem_attributes (x, decl, 1);
3978 SET_DECL_RTL (decl, x);
28d81abb 3979
2207e295 3980
28d81abb
RK
3981 /* Indicate the alignment we actually gave this variable. */
3982#ifdef STACK_BOUNDARY
3983 DECL_ALIGN (decl) = STACK_BOUNDARY;
3984#else
3985 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3986#endif
11cf4d18 3987 DECL_USER_ALIGN (decl) = 0;
28d81abb 3988 }
28d81abb
RK
3989}
3990\f
3991/* Emit code to perform the initialization of a declaration DECL. */
3992
3993void
3994expand_decl_init (decl)
3995 tree decl;
3996{
b4ac57ab
RS
3997 int was_used = TREE_USED (decl);
3998
ac79cd5a
RK
3999 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4000 for static decls. */
4001 if (TREE_CODE (decl) == CONST_DECL
4002 || TREE_STATIC (decl))
28d81abb
RK
4003 return;
4004
4005 /* Compute and store the initial value now. */
4006
4007 if (DECL_INITIAL (decl) == error_mark_node)
4008 {
4009 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 4010
28d81abb 4011 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 4012 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
4013 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4014 0, 0);
4015 emit_queue ();
4016 }
4017 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4018 {
4019 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4020 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4021 emit_queue ();
4022 }
4023
b4ac57ab
RS
4024 /* Don't let the initialization count as "using" the variable. */
4025 TREE_USED (decl) = was_used;
4026
28d81abb 4027 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 4028 preserve_temp_slots (NULL_RTX);
28d81abb
RK
4029 free_temp_slots ();
4030}
4031
4032/* CLEANUP is an expression to be executed at exit from this binding contour;
4033 for example, in C++, it might call the destructor for this variable.
4034
4847c938
MS
4035 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4036 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
4037 happens in exception handling, for gotos, returns, breaks that
4038 leave the current scope.
28d81abb
RK
4039
4040 If CLEANUP is nonzero and DECL is zero, we record a cleanup
6d2f8887 4041 that is not associated with any particular variable. */
28d81abb
RK
4042
4043int
4044expand_decl_cleanup (decl, cleanup)
4045 tree decl, cleanup;
4046{
3f1d071b 4047 struct nesting *thisblock;
28d81abb
RK
4048
4049 /* Error if we are not in any block. */
01d939e8 4050 if (cfun == 0 || block_stack == 0)
28d81abb
RK
4051 return 0;
4052
3f1d071b
BS
4053 thisblock = block_stack;
4054
28d81abb
RK
4055 /* Record the cleanup if there is one. */
4056
4057 if (cleanup != 0)
4058 {
e976b8b2
MS
4059 tree t;
4060 rtx seq;
4061 tree *cleanups = &thisblock->data.block.cleanups;
4062 int cond_context = conditional_context ();
4063
4064 if (cond_context)
4065 {
4066 rtx flag = gen_reg_rtx (word_mode);
4067 rtx set_flag_0;
4068 tree cond;
4069
4070 start_sequence ();
4071 emit_move_insn (flag, const0_rtx);
4072 set_flag_0 = get_insns ();
4073 end_sequence ();
4074
4075 thisblock->data.block.last_unconditional_cleanup
2f937369 4076 = emit_insn_after (set_flag_0,
e976b8b2
MS
4077 thisblock->data.block.last_unconditional_cleanup);
4078
4079 emit_move_insn (flag, const1_rtx);
4080
b0c48229
NB
4081 cond = build_decl (VAR_DECL, NULL_TREE,
4082 (*lang_hooks.types.type_for_mode) (word_mode, 1));
19e7881c 4083 SET_DECL_RTL (cond, flag);
e976b8b2
MS
4084
4085 /* Conditionalize the cleanup. */
4086 cleanup = build (COND_EXPR, void_type_node,
78ef5b89 4087 (*lang_hooks.truthvalue_conversion) (cond),
e976b8b2
MS
4088 cleanup, integer_zero_node);
4089 cleanup = fold (cleanup);
4090
e2500fed 4091 cleanups = &thisblock->data.block.cleanups;
e976b8b2
MS
4092 }
4093
4847c938 4094 cleanup = unsave_expr (cleanup);
e976b8b2 4095
1f8f4a0b 4096 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
4097
4098 if (! cond_context)
4099 /* If this block has a cleanup, it belongs in stack_block_stack. */
4100 stack_block_stack = thisblock;
4101
4102 if (cond_context)
4103 {
4104 start_sequence ();
4105 }
4847c938 4106
52a11cbf 4107 if (! using_eh_for_cleanups_p)
e976b8b2 4108 TREE_ADDRESSABLE (t) = 1;
52a11cbf
RH
4109 else
4110 expand_eh_region_start ();
4111
e976b8b2
MS
4112 if (cond_context)
4113 {
4114 seq = get_insns ();
4115 end_sequence ();
7e82801f
MS
4116 if (seq)
4117 thisblock->data.block.last_unconditional_cleanup
2f937369
DM
4118 = emit_insn_after (seq,
4119 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4120 }
4121 else
4122 {
4123 thisblock->data.block.last_unconditional_cleanup
4124 = get_last_insn ();
ef97beff
JJ
4125 /* When we insert instructions after the last unconditional cleanup,
4126 we don't adjust last_insn. That means that a later add_insn will
4127 clobber the instructions we've just added. The easiest way to
4128 fix this is to just insert another instruction here, so that the
4129 instructions inserted after the last unconditional cleanup are
4130 never the last instruction. */
6496a589 4131 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 4132 }
28d81abb
RK
4133 }
4134 return 1;
4135}
659e5a7a
JM
4136
4137/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4138 is thrown. */
4139
4140int
4141expand_decl_cleanup_eh (decl, cleanup, eh_only)
4142 tree decl, cleanup;
4143 int eh_only;
4144{
4145 int ret = expand_decl_cleanup (decl, cleanup);
4146 if (cleanup && ret)
4147 {
4148 tree node = block_stack->data.block.cleanups;
4149 CLEANUP_EH_ONLY (node) = eh_only;
4150 }
4151 return ret;
4152}
28d81abb
RK
4153\f
4154/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4155 DECL_ELTS is the list of elements that belong to DECL's type.
4156 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4157
4158void
4159expand_anon_union_decl (decl, cleanup, decl_elts)
4160 tree decl, cleanup, decl_elts;
4161{
01d939e8 4162 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4163 rtx x;
8a693bd0 4164 tree t;
28d81abb 4165
8a693bd0
MM
4166 /* If any of the elements are addressable, so is the entire union. */
4167 for (t = decl_elts; t; t = TREE_CHAIN (t))
4168 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4169 {
4170 TREE_ADDRESSABLE (decl) = 1;
4171 break;
4172 }
4381f7c2 4173
ec5cd386
RK
4174 expand_decl (decl);
4175 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4176 x = DECL_RTL (decl);
4177
8a693bd0
MM
4178 /* Go through the elements, assigning RTL to each. */
4179 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4180 {
8a693bd0
MM
4181 tree decl_elt = TREE_VALUE (t);
4182 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4183 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4184
3256b817
JJ
4185 /* If any of the elements are addressable, so is the entire
4186 union. */
4187 if (TREE_USED (decl_elt))
4188 TREE_USED (decl) = 1;
4189
7b9032dd
JM
4190 /* Propagate the union's alignment to the elements. */
4191 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 4192 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
4193
4194 /* If the element has BLKmode and the union doesn't, the union is
4195 aligned such that the element doesn't need to have BLKmode, so
4196 change the element's mode to the appropriate one for its size. */
4197 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4198 DECL_MODE (decl_elt) = mode
05bccae2 4199 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 4200
28d81abb
RK
4201 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4202 instead create a new MEM rtx with the proper mode. */
4203 if (GET_CODE (x) == MEM)
4204 {
4205 if (mode == GET_MODE (x))
19e7881c 4206 SET_DECL_RTL (decl_elt, x);
28d81abb 4207 else
f1ec5147 4208 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb
RK
4209 }
4210 else if (GET_CODE (x) == REG)
4211 {
4212 if (mode == GET_MODE (x))
19e7881c 4213 SET_DECL_RTL (decl_elt, x);
28d81abb 4214 else
ddef6bc7 4215 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
4216 }
4217 else
4218 abort ();
4219
4220 /* Record the cleanup if there is one. */
4221
4222 if (cleanup != 0)
4223 thisblock->data.block.cleanups
1f8f4a0b
MM
4224 = tree_cons (decl_elt, cleanup_elt,
4225 thisblock->data.block.cleanups);
28d81abb
RK
4226 }
4227}
4228\f
4229/* Expand a list of cleanups LIST.
4230 Elements may be expressions or may be nested lists.
4231
4232 If DONT_DO is nonnull, then any list-element
4233 whose TREE_PURPOSE matches DONT_DO is omitted.
4234 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4235 a value that is being returned out of the scope.
4236
4237 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
4238 goto and handle protection regions specially in that case.
4239
4240 If REACHABLE, we emit code, otherwise just inform the exception handling
4241 code about this finalization. */
28d81abb
RK
4242
4243static void
50d1b7a1 4244expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4245 tree list;
4246 tree dont_do;
4e44807b 4247 int in_fixup;
50d1b7a1 4248 int reachable;
28d81abb
RK
4249{
4250 tree tail;
4251 for (tail = list; tail; tail = TREE_CHAIN (tail))
4252 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4253 {
4254 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4255 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4256 else
4257 {
52a11cbf
RH
4258 if (! in_fixup && using_eh_for_cleanups_p)
4259 expand_eh_region_end_cleanup (TREE_VALUE (tail));
61d6b1cc 4260
659e5a7a 4261 if (reachable && !CLEANUP_EH_ONLY (tail))
50d1b7a1
MS
4262 {
4263 /* Cleanups may be run multiple times. For example,
4264 when exiting a binding contour, we expand the
4265 cleanups associated with that contour. When a goto
4266 within that binding contour has a target outside that
4267 contour, it will expand all cleanups from its scope to
4268 the target. Though the cleanups are expanded multiple
4269 times, the control paths are non-overlapping so the
4270 cleanups will not be executed twice. */
9762d48d 4271
52a11cbf
RH
4272 /* We may need to protect from outer cleanups. */
4273 if (in_fixup && using_eh_for_cleanups_p)
4274 {
4275 expand_eh_region_start ();
e5e809f4 4276
52a11cbf
RH
4277 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4278
4279 expand_eh_region_end_fixup (TREE_VALUE (tail));
4280 }
4281 else
4282 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
e5e809f4 4283
50d1b7a1
MS
4284 free_temp_slots ();
4285 }
28d81abb
RK
4286 }
4287 }
4288}
4289
e976b8b2
MS
4290/* Mark when the context we are emitting RTL for as a conditional
4291 context, so that any cleanup actions we register with
4292 expand_decl_init will be properly conditionalized when those
4293 cleanup actions are later performed. Must be called before any
956d6950 4294 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4295
4296void
956d6950 4297start_cleanup_deferral ()
e976b8b2 4298{
e3eef942
JW
4299 /* block_stack can be NULL if we are inside the parameter list. It is
4300 OK to do nothing, because cleanups aren't possible here. */
4301 if (block_stack)
4302 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4303}
4304
4305/* Mark the end of a conditional region of code. Because cleanup
956d6950 4306 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4307 after we end the currently deferred cleanups, only after we end all
4308 deferred cleanups, are we back in unconditional code. */
4309
4310void
956d6950 4311end_cleanup_deferral ()
e976b8b2 4312{
e3eef942
JW
4313 /* block_stack can be NULL if we are inside the parameter list. It is
4314 OK to do nothing, because cleanups aren't possible here. */
4315 if (block_stack)
4316 --block_stack->data.block.conditional_code;
e976b8b2
MS
4317}
4318
28d81abb
RK
4319/* Move all cleanups from the current block_stack
4320 to the containing block_stack, where they are assumed to
4321 have been created. If anything can cause a temporary to
4322 be created, but not expanded for more than one level of
4323 block_stacks, then this code will have to change. */
4324
4325void
4326move_cleanups_up ()
4327{
4328 struct nesting *block = block_stack;
4329 struct nesting *outer = block->next;
4330
4331 outer->data.block.cleanups
4332 = chainon (block->data.block.cleanups,
4333 outer->data.block.cleanups);
4334 block->data.block.cleanups = 0;
4335}
4336
4337tree
4338last_cleanup_this_contour ()
4339{
4340 if (block_stack == 0)
4341 return 0;
4342
4343 return block_stack->data.block.cleanups;
4344}
4345
4346/* Return 1 if there are any pending cleanups at this point.
4347 If THIS_CONTOUR is nonzero, check the current contour as well.
4348 Otherwise, look only at the contours that enclose this one. */
4349
4350int
4351any_pending_cleanups (this_contour)
4352 int this_contour;
4353{
4354 struct nesting *block;
4355
01d939e8 4356 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4357 return 0;
4358
4359 if (this_contour && block_stack->data.block.cleanups != NULL)
4360 return 1;
4361 if (block_stack->data.block.cleanups == 0
e976b8b2 4362 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4363 return 0;
4364
4365 for (block = block_stack->next; block; block = block->next)
4366 if (block->data.block.cleanups != 0)
4367 return 1;
4368
4369 return 0;
4370}
4371\f
4372/* Enter a case (Pascal) or switch (C) statement.
4373 Push a block onto case_stack and nesting_stack
4374 to accumulate the case-labels that are seen
4375 and to record the labels generated for the statement.
4376
4377 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4378 Otherwise, this construct is transparent for `exit_something'.
4379
4380 EXPR is the index-expression to be dispatched on.
4381 TYPE is its nominal type. We could simply convert EXPR to this type,
4382 but instead we take short cuts. */
4383
4384void
4385expand_start_case (exit_flag, expr, type, printname)
4386 int exit_flag;
4387 tree expr;
4388 tree type;
dff01034 4389 const char *printname;
28d81abb 4390{
b3694847 4391 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4392
4393 /* Make an entry on case_stack for the case we are entering. */
4394
e2500fed 4395 thiscase->desc = CASE_NESTING;
28d81abb
RK
4396 thiscase->next = case_stack;
4397 thiscase->all = nesting_stack;
4398 thiscase->depth = ++nesting_depth;
4399 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4400 thiscase->data.case_stmt.case_list = 0;
4401 thiscase->data.case_stmt.index_expr = expr;
4402 thiscase->data.case_stmt.nominal_type = type;
4403 thiscase->data.case_stmt.default_label = 0;
28d81abb 4404 thiscase->data.case_stmt.printname = printname;
a11759a3 4405 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4406 case_stack = thiscase;
4407 nesting_stack = thiscase;
4408
4409 do_pending_stack_adjust ();
4410
4411 /* Make sure case_stmt.start points to something that won't
4412 need any transformation before expand_end_case. */
4413 if (GET_CODE (get_last_insn ()) != NOTE)
6496a589 4414 emit_note (NULL, NOTE_INSN_DELETED);
28d81abb
RK
4415
4416 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4417
956d6950 4418 start_cleanup_deferral ();
28d81abb
RK
4419}
4420
4421/* Start a "dummy case statement" within which case labels are invalid
4422 and are not connected to any larger real case statement.
4423 This can be used if you don't want to let a case statement jump
4424 into the middle of certain kinds of constructs. */
4425
4426void
4427expand_start_case_dummy ()
4428{
b3694847 4429 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4430
4431 /* Make an entry on case_stack for the dummy. */
4432
e2500fed 4433 thiscase->desc = CASE_NESTING;
28d81abb
RK
4434 thiscase->next = case_stack;
4435 thiscase->all = nesting_stack;
4436 thiscase->depth = ++nesting_depth;
4437 thiscase->exit_label = 0;
4438 thiscase->data.case_stmt.case_list = 0;
4439 thiscase->data.case_stmt.start = 0;
4440 thiscase->data.case_stmt.nominal_type = 0;
4441 thiscase->data.case_stmt.default_label = 0;
28d81abb
RK
4442 case_stack = thiscase;
4443 nesting_stack = thiscase;
956d6950 4444 start_cleanup_deferral ();
28d81abb
RK
4445}
4446
4447/* End a dummy case statement. */
4448
4449void
4450expand_end_case_dummy ()
4451{
956d6950 4452 end_cleanup_deferral ();
28d81abb
RK
4453 POPSTACK (case_stack);
4454}
4455
4456/* Return the data type of the index-expression
4457 of the innermost case statement, or null if none. */
4458
4459tree
4460case_index_expr_type ()
4461{
4462 if (case_stack)
4463 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4464 return 0;
4465}
4466\f
a11759a3
JR
4467static void
4468check_seenlabel ()
4469{
4470 /* If this is the first label, warn if any insns have been emitted. */
4471 if (case_stack->data.case_stmt.line_number_status >= 0)
4472 {
4473 rtx insn;
4474
4475 restore_line_number_status
4476 (case_stack->data.case_stmt.line_number_status);
4477 case_stack->data.case_stmt.line_number_status = -1;
4478
4479 for (insn = case_stack->data.case_stmt.start;
4480 insn;
4481 insn = NEXT_INSN (insn))
4482 {
4483 if (GET_CODE (insn) == CODE_LABEL)
4484 break;
4485 if (GET_CODE (insn) != NOTE
4486 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4487 {
4488 do
4489 insn = PREV_INSN (insn);
0dacbd0e
JW
4490 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4491
4492 /* If insn is zero, then there must have been a syntax error. */
4493 if (insn)
4381f7c2
KH
4494 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4495 NOTE_LINE_NUMBER (insn),
0dacbd0e
JW
4496 "unreachable code at beginning of %s",
4497 case_stack->data.case_stmt.printname);
a11759a3
JR
4498 break;
4499 }
4500 }
4501 }
4502}
4503
28d81abb
RK
4504/* Accumulate one case or default label inside a case or switch statement.
4505 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4506 The function CONVERTER, when applied to arguments T and V,
4507 converts the value V to the type T.
28d81abb
RK
4508
4509 If not currently inside a case or switch statement, return 1 and do
4510 nothing. The caller will print a language-specific error message.
4511 If VALUE is a duplicate or overlaps, return 2 and do nothing
4512 except store the (first) duplicate node in *DUPLICATE.
4513 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4514 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4515 Return 0 on success.
4516
4517 Extended to handle range statements. */
4518
4519int
f52fba84 4520pushcase (value, converter, label, duplicate)
b3694847 4521 tree value;
cdadb1dd 4522 tree (*converter) PARAMS ((tree, tree));
b3694847 4523 tree label;
28d81abb
RK
4524 tree *duplicate;
4525{
28d81abb
RK
4526 tree index_type;
4527 tree nominal_type;
4528
4529 /* Fail if not inside a real case statement. */
4530 if (! (case_stack && case_stack->data.case_stmt.start))
4531 return 1;
4532
4533 if (stack_block_stack
4534 && stack_block_stack->depth > case_stack->depth)
4535 return 5;
4536
4537 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4538 nominal_type = case_stack->data.case_stmt.nominal_type;
4539
4540 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4541 if (index_type == error_mark_node)
4542 return 0;
4543
2f985ca6
JW
4544 /* Convert VALUE to the type in which the comparisons are nominally done. */
4545 if (value != 0)
4546 value = (*converter) (nominal_type, value);
4547
feb60352
R
4548 check_seenlabel ();
4549
28d81abb
RK
4550 /* Fail if this value is out of range for the actual type of the index
4551 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4552 if (value != 0
4553 && (TREE_CONSTANT_OVERFLOW (value)
4554 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4555 return 3;
4556
56cb9733 4557 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4558}
4559
956d6950
JL
4560/* Like pushcase but this case applies to all values between VALUE1 and
4561 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4562 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4563 starts at VALUE1 and ends at the highest value of the index type.
4564 If both are NULL, this case applies to all values.
4565
4566 The return value is the same as that of pushcase but there is one
4567 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4568
4569int
f52fba84 4570pushcase_range (value1, value2, converter, label, duplicate)
b3694847 4571 tree value1, value2;
cdadb1dd 4572 tree (*converter) PARAMS ((tree, tree));
b3694847 4573 tree label;
28d81abb
RK
4574 tree *duplicate;
4575{
28d81abb
RK
4576 tree index_type;
4577 tree nominal_type;
4578
4579 /* Fail if not inside a real case statement. */
4580 if (! (case_stack && case_stack->data.case_stmt.start))
4581 return 1;
4582
4583 if (stack_block_stack
4584 && stack_block_stack->depth > case_stack->depth)
4585 return 5;
4586
4587 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4588 nominal_type = case_stack->data.case_stmt.nominal_type;
4589
4590 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4591 if (index_type == error_mark_node)
4592 return 0;
4593
a11759a3 4594 check_seenlabel ();
28d81abb 4595
956d6950
JL
4596 /* Convert VALUEs to type in which the comparisons are nominally done
4597 and replace any unspecified value with the corresponding bound. */
4598 if (value1 == 0)
1974bfb1 4599 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4600 if (value2 == 0)
1974bfb1 4601 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4602
4603 /* Fail if the range is empty. Do this before any conversion since
4604 we want to allow out-of-range empty ranges. */
14a774a9 4605 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4606 return 4;
4607
4381f7c2 4608 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
4609 converting to. Do this after the < check above to suppress false
4610 positives. */
14a774a9 4611 if (value2 == 0)
e1ee5cdc 4612 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4613
2f985ca6
JW
4614 value1 = (*converter) (nominal_type, value1);
4615 value2 = (*converter) (nominal_type, value2);
4616
28d81abb 4617 /* Fail if these values are out of range. */
956d6950
JL
4618 if (TREE_CONSTANT_OVERFLOW (value1)
4619 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4620 return 3;
4621
956d6950
JL
4622 if (TREE_CONSTANT_OVERFLOW (value2)
4623 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4624 return 3;
4625
57641239
RK
4626 return add_case_node (value1, value2, label, duplicate);
4627}
4628
4629/* Do the actual insertion of a case label for pushcase and pushcase_range
4630 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4631 slowdown for large switch statements. */
4632
56cb9733 4633int
57641239
RK
4634add_case_node (low, high, label, duplicate)
4635 tree low, high;
4636 tree label;
4637 tree *duplicate;
4638{
4639 struct case_node *p, **q, *r;
4640
56cb9733
MM
4641 /* If there's no HIGH value, then this is not a case range; it's
4642 just a simple case label. But that's just a degenerate case
4643 range. */
4644 if (!high)
4645 high = low;
4646
4647 /* Handle default labels specially. */
4648 if (!high && !low)
4649 {
4650 if (case_stack->data.case_stmt.default_label != 0)
4651 {
4652 *duplicate = case_stack->data.case_stmt.default_label;
4653 return 2;
4654 }
4655 case_stack->data.case_stmt.default_label = label;
4656 expand_label (label);
4657 return 0;
4658 }
4659
57641239
RK
4660 q = &case_stack->data.case_stmt.case_list;
4661 p = *q;
4662
69d4ca36 4663 while ((r = *q))
28d81abb 4664 {
57641239
RK
4665 p = r;
4666
4667 /* Keep going past elements distinctly greater than HIGH. */
4668 if (tree_int_cst_lt (high, p->low))
4669 q = &p->left;
4670
4671 /* or distinctly less than LOW. */
4672 else if (tree_int_cst_lt (p->high, low))
4673 q = &p->right;
4674
4675 else
28d81abb 4676 {
57641239
RK
4677 /* We have an overlap; this is an error. */
4678 *duplicate = p->code_label;
28d81abb
RK
4679 return 2;
4680 }
4681 }
4682
1da68f56 4683 /* Add this label to the chain, and succeed. */
28d81abb 4684
e2500fed 4685 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
1da68f56 4686 r->low = low;
28d81abb 4687
57641239 4688 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
4689 if (tree_int_cst_equal (low, high))
4690 r->high = r->low;
4691 else
1da68f56 4692 r->high = high;
57641239
RK
4693
4694 r->code_label = label;
28d81abb
RK
4695 expand_label (label);
4696
57641239
RK
4697 *q = r;
4698 r->parent = p;
4699 r->left = 0;
4700 r->right = 0;
4701 r->balance = 0;
4702
4703 while (p)
4704 {
4705 struct case_node *s;
4706
4707 if (r == p->left)
4708 {
4709 int b;
4710
4711 if (! (b = p->balance))
4712 /* Growth propagation from left side. */
4713 p->balance = -1;
4714 else if (b < 0)
4715 {
4716 if (r->balance < 0)
4717 {
4718 /* R-Rotation */
51723711 4719 if ((p->left = s = r->right))
57641239
RK
4720 s->parent = p;
4721
4722 r->right = p;
4723 p->balance = 0;
4724 r->balance = 0;
4725 s = p->parent;
4726 p->parent = r;
4727
51723711 4728 if ((r->parent = s))
57641239
RK
4729 {
4730 if (s->left == p)
4731 s->left = r;
4732 else
4733 s->right = r;
4734 }
4735 else
4736 case_stack->data.case_stmt.case_list = r;
4737 }
4738 else
4739 /* r->balance == +1 */
4740 {
5720c7e7
RK
4741 /* LR-Rotation */
4742
57641239
RK
4743 int b2;
4744 struct case_node *t = r->right;
4745
51723711 4746 if ((p->left = s = t->right))
57641239
RK
4747 s->parent = p;
4748
4749 t->right = p;
51723711 4750 if ((r->right = s = t->left))
57641239
RK
4751 s->parent = r;
4752
4753 t->left = r;
4754 b = t->balance;
4755 b2 = b < 0;
4756 p->balance = b2;
4757 b2 = -b2 - b;
4758 r->balance = b2;
4759 t->balance = 0;
4760 s = p->parent;
4761 p->parent = t;
4762 r->parent = t;
4763
51723711 4764 if ((t->parent = s))
57641239
RK
4765 {
4766 if (s->left == p)
4767 s->left = t;
4768 else
4769 s->right = t;
4770 }
4771 else
4772 case_stack->data.case_stmt.case_list = t;
4773 }
4774 break;
4775 }
4776
4777 else
4778 {
4779 /* p->balance == +1; growth of left side balances the node. */
4780 p->balance = 0;
4781 break;
4782 }
4783 }
4784 else
4785 /* r == p->right */
4786 {
4787 int b;
4788
4789 if (! (b = p->balance))
4790 /* Growth propagation from right side. */
4791 p->balance++;
4792 else if (b > 0)
4793 {
4794 if (r->balance > 0)
4795 {
4796 /* L-Rotation */
4797
51723711 4798 if ((p->right = s = r->left))
57641239
RK
4799 s->parent = p;
4800
4801 r->left = p;
4802 p->balance = 0;
4803 r->balance = 0;
4804 s = p->parent;
4805 p->parent = r;
51723711 4806 if ((r->parent = s))
57641239
RK
4807 {
4808 if (s->left == p)
4809 s->left = r;
4810 else
4811 s->right = r;
4812 }
4813
4814 else
4815 case_stack->data.case_stmt.case_list = r;
4816 }
4817
4818 else
4819 /* r->balance == -1 */
4820 {
4821 /* RL-Rotation */
4822 int b2;
4823 struct case_node *t = r->left;
4824
51723711 4825 if ((p->right = s = t->left))
57641239
RK
4826 s->parent = p;
4827
4828 t->left = p;
4829
51723711 4830 if ((r->left = s = t->right))
57641239
RK
4831 s->parent = r;
4832
4833 t->right = r;
4834 b = t->balance;
4835 b2 = b < 0;
4836 r->balance = b2;
4837 b2 = -b2 - b;
4838 p->balance = b2;
4839 t->balance = 0;
4840 s = p->parent;
4841 p->parent = t;
4842 r->parent = t;
4843
51723711 4844 if ((t->parent = s))
57641239
RK
4845 {
4846 if (s->left == p)
4847 s->left = t;
4848 else
4849 s->right = t;
4850 }
4851
4852 else
4853 case_stack->data.case_stmt.case_list = t;
4854 }
4855 break;
4856 }
4857 else
4858 {
4859 /* p->balance == -1; growth of right side balances the node. */
4860 p->balance = 0;
4861 break;
4862 }
4863 }
4864
4865 r = p;
4866 p = p->parent;
4867 }
28d81abb
RK
4868
4869 return 0;
4870}
4871\f
94d6511c 4872/* Returns the number of possible values of TYPE.
770ae6cc
RK
4873 Returns -1 if the number is unknown, variable, or if the number does not
4874 fit in a HOST_WIDE_INT.
6f9fdf4d 4875 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
94d6511c
PB
4876 do not increase monotonically (there may be duplicates);
4877 to 1 if the values increase monotonically, but not always by 1;
4878 otherwise sets it to 0. */
4879
4880HOST_WIDE_INT
6f9fdf4d 4881all_cases_count (type, sparseness)
94d6511c 4882 tree type;
6f9fdf4d 4883 int *sparseness;
94d6511c 4884{
770ae6cc
RK
4885 tree t;
4886 HOST_WIDE_INT count, minval, lastval;
4887
6f9fdf4d 4888 *sparseness = 0;
94d6511c
PB
4889
4890 switch (TREE_CODE (type))
4891 {
94d6511c
PB
4892 case BOOLEAN_TYPE:
4893 count = 2;
4894 break;
770ae6cc 4895
94d6511c
PB
4896 case CHAR_TYPE:
4897 count = 1 << BITS_PER_UNIT;
4898 break;
770ae6cc 4899
94d6511c
PB
4900 default:
4901 case INTEGER_TYPE:
770ae6cc
RK
4902 if (TYPE_MAX_VALUE (type) != 0
4903 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4904 TYPE_MIN_VALUE (type))))
4905 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4906 convert (type, integer_zero_node))))
4907 && host_integerp (t, 1))
4908 count = tree_low_cst (t, 1);
94d6511c 4909 else
770ae6cc 4910 return -1;
94d6511c 4911 break;
770ae6cc 4912
94d6511c 4913 case ENUMERAL_TYPE:
770ae6cc
RK
4914 /* Don't waste time with enumeral types with huge values. */
4915 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4916 || TYPE_MAX_VALUE (type) == 0
4917 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4918 return -1;
4919
4920 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
94d6511c 4921 count = 0;
770ae6cc 4922
94d6511c
PB
4923 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4924 {
770ae6cc
RK
4925 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4926
6f9fdf4d
JJ
4927 if (*sparseness == 2 || thisval <= lastval)
4928 *sparseness = 2;
770ae6cc 4929 else if (thisval != minval + count)
6f9fdf4d 4930 *sparseness = 1;
770ae6cc 4931
6f9fdf4d 4932 lastval = thisval;
94d6511c
PB
4933 count++;
4934 }
94d6511c 4935 }
770ae6cc 4936
94d6511c
PB
4937 return count;
4938}
4939
94d6511c 4940#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4941 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4942 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4943#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4944 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4945 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4946
4947/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4948 with the case values we have seen, assuming the case expression
4949 has the given TYPE.
4950 SPARSENESS is as determined by all_cases_count.
4951
9faa82d8 4952 The time needed is proportional to COUNT, unless
94d6511c
PB
4953 SPARSENESS is 2, in which case quadratic time is needed. */
4954
df03cab5 4955void
94d6511c
PB
4956mark_seen_cases (type, cases_seen, count, sparseness)
4957 tree type;
4958 unsigned char *cases_seen;
770ae6cc 4959 HOST_WIDE_INT count;
94d6511c
PB
4960 int sparseness;
4961{
94d6511c 4962 tree next_node_to_try = NULL_TREE;
770ae6cc 4963 HOST_WIDE_INT next_node_offset = 0;
94d6511c 4964
b3694847 4965 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c 4966 tree val = make_node (INTEGER_CST);
770ae6cc 4967
94d6511c 4968 TREE_TYPE (val) = type;
5720c7e7 4969 if (! root)
4381f7c2
KH
4970 /* Do nothing. */
4971 ;
5720c7e7 4972 else if (sparseness == 2)
94d6511c 4973 {
5720c7e7 4974 tree t;
770ae6cc 4975 unsigned HOST_WIDE_INT xlo;
5720c7e7
RK
4976
4977 /* This less efficient loop is only needed to handle
4978 duplicate case values (multiple enum constants
4979 with the same value). */
4980 TREE_TYPE (val) = TREE_TYPE (root->low);
4381f7c2 4981 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5720c7e7 4982 t = TREE_CHAIN (t), xlo++)
94d6511c 4983 {
5720c7e7
RK
4984 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4985 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4986 n = root;
4987 do
94d6511c 4988 {
5720c7e7
RK
4989 /* Keep going past elements distinctly greater than VAL. */
4990 if (tree_int_cst_lt (val, n->low))
4991 n = n->left;
4381f7c2 4992
5720c7e7
RK
4993 /* or distinctly less than VAL. */
4994 else if (tree_int_cst_lt (n->high, val))
4995 n = n->right;
4381f7c2 4996
5720c7e7 4997 else
94d6511c 4998 {
5720c7e7
RK
4999 /* We have found a matching range. */
5000 BITARRAY_SET (cases_seen, xlo);
5001 break;
94d6511c
PB
5002 }
5003 }
5720c7e7
RK
5004 while (n);
5005 }
5006 }
5007 else
5008 {
5009 if (root->left)
5010 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
770ae6cc 5011
5720c7e7
RK
5012 for (n = root; n; n = n->right)
5013 {
5014 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5015 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4381f7c2 5016 while (! tree_int_cst_lt (n->high, val))
94d6511c 5017 {
5720c7e7
RK
5018 /* Calculate (into xlo) the "offset" of the integer (val).
5019 The element with lowest value has offset 0, the next smallest
5020 element has offset 1, etc. */
5021
770ae6cc
RK
5022 unsigned HOST_WIDE_INT xlo;
5023 HOST_WIDE_INT xhi;
5720c7e7 5024 tree t;
770ae6cc 5025
94d6511c
PB
5026 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5027 {
5028 /* The TYPE_VALUES will be in increasing order, so
5029 starting searching where we last ended. */
5030 t = next_node_to_try;
5031 xlo = next_node_offset;
5032 xhi = 0;
5033 for (;;)
5034 {
5035 if (t == NULL_TREE)
5036 {
5037 t = TYPE_VALUES (type);
5038 xlo = 0;
5039 }
5040 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5041 {
5042 next_node_to_try = TREE_CHAIN (t);
5043 next_node_offset = xlo + 1;
5044 break;
5045 }
5046 xlo++;
5047 t = TREE_CHAIN (t);
5048 if (t == next_node_to_try)
5720c7e7
RK
5049 {
5050 xlo = -1;
5051 break;
5052 }
94d6511c
PB
5053 }
5054 }
5055 else
5056 {
5057 t = TYPE_MIN_VALUE (type);
5058 if (t)
5059 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5060 &xlo, &xhi);
5061 else
5062 xlo = xhi = 0;
5063 add_double (xlo, xhi,
5064 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5065 &xlo, &xhi);
5066 }
4381f7c2 5067
770ae6cc 5068 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
94d6511c 5069 BITARRAY_SET (cases_seen, xlo);
770ae6cc 5070
5720c7e7
RK
5071 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5072 1, 0,
5073 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5074 }
94d6511c
PB
5075 }
5076 }
5077}
5078
740b77b6
AC
5079/* Given a switch statement with an expression that is an enumeration
5080 type, warn if any of the enumeration type's literals are not
5081 covered by the case expressions of the switch. Also, warn if there
5082 are any extra switch cases that are *not* elements of the
5083 enumerated type.
28d81abb 5084
740b77b6 5085 Historical note:
28d81abb 5086
740b77b6
AC
5087 At one stage this function would: ``If all enumeration literals
5088 were covered by the case expressions, turn one of the expressions
5089 into the default expression since it should not be possible to fall
5090 through such a switch.''
5091
5092 That code has since been removed as: ``This optimization is
5093 disabled because it causes valid programs to fail. ANSI C does not
5094 guarantee that an expression with enum type will have a value that
5095 is the same as one of the enumeration literals.'' */
28d81abb
RK
5096
5097void
5098check_for_full_enumeration_handling (type)
5099 tree type;
5100{
b3694847
SS
5101 struct case_node *n;
5102 tree chain;
28d81abb 5103
0f41302f 5104 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5105 int sparseness = 0;
5106
0f41302f 5107 /* The number of possible selector values. */
94d6511c
PB
5108 HOST_WIDE_INT size;
5109
5110 /* For each possible selector value. a one iff it has been matched
0f41302f 5111 by a case value alternative. */
94d6511c
PB
5112 unsigned char *cases_seen;
5113
0f41302f 5114 /* The allocated size of cases_seen, in chars. */
770ae6cc 5115 HOST_WIDE_INT bytes_needed;
94d6511c 5116
94d6511c
PB
5117 size = all_cases_count (type, &sparseness);
5118 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5119
94d6511c 5120 if (size > 0 && size < 600000
c5c76735 5121 /* We deliberately use calloc here, not cmalloc, so that we can suppress
4381f7c2 5122 this optimization if we don't have enough memory rather than
c5c76735 5123 aborting, as xmalloc would do. */
f4ce9d90
KG
5124 && (cases_seen =
5125 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
28d81abb 5126 {
770ae6cc 5127 HOST_WIDE_INT i;
94d6511c 5128 tree v = TYPE_VALUES (type);
28d81abb 5129
94d6511c
PB
5130 /* The time complexity of this code is normally O(N), where
5131 N being the number of members in the enumerated type.
e0a2f705 5132 However, if type is an ENUMERAL_TYPE whose values do not
0f41302f 5133 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5134
5135 mark_seen_cases (type, cases_seen, size, sparseness);
5136
770ae6cc 5137 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4381f7c2 5138 if (BITARRAY_TEST (cases_seen, i) == 0)
770ae6cc
RK
5139 warning ("enumeration value `%s' not handled in switch",
5140 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
94d6511c
PB
5141
5142 free (cases_seen);
28d81abb
RK
5143 }
5144
5145 /* Now we go the other way around; we warn if there are case
ac2a9454 5146 expressions that don't correspond to enumerators. This can
28d81abb 5147 occur since C and C++ don't enforce type-checking of
0f41302f 5148 assignments to enumeration variables. */
28d81abb 5149
5720c7e7
RK
5150 if (case_stack->data.case_stmt.case_list
5151 && case_stack->data.case_stmt.case_list->left)
5152 case_stack->data.case_stmt.case_list
5153 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
740b77b6
AC
5154 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5155 {
5156 for (chain = TYPE_VALUES (type);
5157 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5158 chain = TREE_CHAIN (chain))
5159 ;
786de7eb 5160
740b77b6
AC
5161 if (!chain)
5162 {
5163 if (TYPE_NAME (type) == 0)
5164 warning ("case value `%ld' not in enumerated type",
5165 (long) TREE_INT_CST_LOW (n->low));
5166 else
5167 warning ("case value `%ld' not in enumerated type `%s'",
5168 (long) TREE_INT_CST_LOW (n->low),
5169 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5170 == IDENTIFIER_NODE)
5171 ? TYPE_NAME (type)
5172 : DECL_NAME (TYPE_NAME (type))));
5173 }
5174 if (!tree_int_cst_equal (n->low, n->high))
5175 {
5176 for (chain = TYPE_VALUES (type);
5177 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5178 chain = TREE_CHAIN (chain))
5179 ;
786de7eb 5180
740b77b6
AC
5181 if (!chain)
5182 {
5183 if (TYPE_NAME (type) == 0)
5184 warning ("case value `%ld' not in enumerated type",
5185 (long) TREE_INT_CST_LOW (n->high));
5186 else
5187 warning ("case value `%ld' not in enumerated type `%s'",
5188 (long) TREE_INT_CST_LOW (n->high),
5189 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5190 == IDENTIFIER_NODE)
5191 ? TYPE_NAME (type)
5192 : DECL_NAME (TYPE_NAME (type))));
5193 }
5194 }
5195 }
28d81abb 5196}
ca695ac9 5197
28d81abb 5198\f
ad82abb8 5199
28d81abb 5200/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5201 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
5202 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5203 type as given in the source before any compiler conversions.
28d81abb
RK
5204 Generate the code to test it and jump to the right place. */
5205
5206void
6f9fdf4d
JJ
5207expand_end_case_type (orig_index, orig_type)
5208 tree orig_index, orig_type;
28d81abb 5209{
9fb60a0d 5210 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 5211 rtx default_label = 0;
b3694847 5212 struct case_node *n;
85066503 5213 unsigned int count;
28d81abb 5214 rtx index;
ca695ac9 5215 rtx table_label;
28d81abb
RK
5216 int ncases;
5217 rtx *labelvec;
b3694847 5218 int i;
2270623a 5219 rtx before_case, end;
b3694847 5220 struct nesting *thiscase = case_stack;
1b0cb6fc 5221 tree index_expr, index_type;
ca695ac9
JB
5222 int unsignedp;
5223
03c03770
AS
5224 /* Don't crash due to previous errors. */
5225 if (thiscase == NULL)
5226 return;
5227
ca695ac9
JB
5228 table_label = gen_label_rtx ();
5229 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5230 index_type = TREE_TYPE (index_expr);
5231 unsignedp = TREE_UNSIGNED (index_type);
6f9fdf4d
JJ
5232 if (orig_type == NULL)
5233 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
5234
5235 do_pending_stack_adjust ();
5236
feb60352
R
5237 /* This might get an spurious warning in the presence of a syntax error;
5238 it could be fixed by moving the call to check_seenlabel after the
5239 check for error_mark_node, and copying the code of check_seenlabel that
5240 deals with case_stack->data.case_stmt.line_number_status /
5241 restore_line_number_status in front of the call to end_cleanup_deferral;
5242 However, this might miss some useful warnings in the presence of
5243 non-syntax errors. */
a11759a3
JR
5244 check_seenlabel ();
5245
28d81abb 5246 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5247 if (index_type != error_mark_node)
28d81abb 5248 {
740b77b6
AC
5249 /* If the switch expression was an enumerated type, check that
5250 exactly all enumeration literals are covered by the cases.
173028e5
AC
5251 The check is made when -Wswitch was specified and there is no
5252 default case, or when -Wswitch-enum was specified. */
5253 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5254 || warn_switch_enum)
6f9fdf4d 5255 && TREE_CODE (orig_type) == ENUMERAL_TYPE
28d81abb 5256 && TREE_CODE (index_expr) != INTEGER_CST)
6f9fdf4d 5257 check_for_full_enumeration_handling (orig_type);
28d81abb 5258
d6961341
AC
5259 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5260 warning ("switch missing default case");
5261
28d81abb
RK
5262 /* If we don't have a default-label, create one here,
5263 after the body of the switch. */
5264 if (thiscase->data.case_stmt.default_label == 0)
5265 {
5266 thiscase->data.case_stmt.default_label
5267 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5268 expand_label (thiscase->data.case_stmt.default_label);
5269 }
5270 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5271
5272 before_case = get_last_insn ();
5273
5720c7e7
RK
5274 if (thiscase->data.case_stmt.case_list
5275 && thiscase->data.case_stmt.case_list->left)
b059139c 5276 thiscase->data.case_stmt.case_list
4381f7c2 5277 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 5278
28d81abb
RK
5279 /* Simplify the case-list before we count it. */
5280 group_case_nodes (thiscase->data.case_stmt.case_list);
5281
5282 /* Get upper and lower bounds of case values.
5283 Also convert all the case values to the index expr's data type. */
5284
5285 count = 0;
5286 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5287 {
5288 /* Check low and high label values are integers. */
5289 if (TREE_CODE (n->low) != INTEGER_CST)
5290 abort ();
5291 if (TREE_CODE (n->high) != INTEGER_CST)
5292 abort ();
5293
1b0cb6fc
RK
5294 n->low = convert (index_type, n->low);
5295 n->high = convert (index_type, n->high);
28d81abb
RK
5296
5297 /* Count the elements and track the largest and smallest
5298 of them (treating them as signed even if they are not). */
5299 if (count++ == 0)
5300 {
5301 minval = n->low;
5302 maxval = n->high;
5303 }
5304 else
5305 {
5306 if (INT_CST_LT (n->low, minval))
5307 minval = n->low;
5308 if (INT_CST_LT (maxval, n->high))
5309 maxval = n->high;
5310 }
5311 /* A range counts double, since it requires two compares. */
5312 if (! tree_int_cst_equal (n->low, n->high))
5313 count++;
5314 }
5315
5316 /* Compute span of values. */
5317 if (count != 0)
1b0cb6fc 5318 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5319
956d6950 5320 end_cleanup_deferral ();
4c581243 5321
1b0cb6fc 5322 if (count == 0)
28d81abb
RK
5323 {
5324 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5325 emit_queue ();
5326 emit_jump (default_label);
5327 }
3474db0e 5328
28d81abb
RK
5329 /* If range of values is much bigger than number of values,
5330 make a sequence of conditional branches instead of a dispatch.
5331 If the switch-index is a constant, do it this way
5332 because we can optimize it. */
4f73c5dd 5333
ad82abb8 5334 else if (count < case_values_threshold ()
05bccae2 5335 || compare_tree_int (range, 10 * count) > 0
f0c988c8
BS
5336 /* RANGE may be signed, and really large ranges will show up
5337 as negative numbers. */
5338 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
5339#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5340 || flag_pic
5341#endif
28d81abb 5342 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab
RS
5343 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5344 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5345 {
37366632 5346 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5347
5348 /* If the index is a short or char that we do not have
5349 an insn to handle comparisons directly, convert it to
5350 a full integer now, rather than letting each comparison
5351 generate the conversion. */
5352
5353 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 5354 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
5355 {
5356 enum machine_mode wider_mode;
5357 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5358 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 5359 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
5360 {
5361 index = convert_to_mode (wider_mode, index, unsignedp);
5362 break;
5363 }
5364 }
5365
5366 emit_queue ();
5367 do_pending_stack_adjust ();
5368
5369 index = protect_from_queue (index, 0);
5370 if (GET_CODE (index) == MEM)
5371 index = copy_to_reg (index);
5372 if (GET_CODE (index) == CONST_INT
5373 || TREE_CODE (index_expr) == INTEGER_CST)
5374 {
5375 /* Make a tree node with the proper constant value
5376 if we don't already have one. */
5377 if (TREE_CODE (index_expr) != INTEGER_CST)
5378 {
5379 index_expr
5380 = build_int_2 (INTVAL (index),
e9a042b6 5381 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5382 index_expr = convert (index_type, index_expr);
28d81abb
RK
5383 }
5384
5385 /* For constant index expressions we need only
4fe9b91c 5386 issue an unconditional branch to the appropriate
28d81abb
RK
5387 target code. The job of removing any unreachable
5388 code is left to the optimisation phase if the
5389 "-O" option is specified. */
1b0cb6fc
RK
5390 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5391 if (! tree_int_cst_lt (index_expr, n->low)
5392 && ! tree_int_cst_lt (n->high, index_expr))
5393 break;
5394
28d81abb
RK
5395 if (n)
5396 emit_jump (label_rtx (n->code_label));
5397 else
5398 emit_jump (default_label);
5399 }
5400 else
5401 {
5402 /* If the index expression is not constant we generate
5403 a binary decision tree to select the appropriate
5404 target code. This is done as follows:
5405
5406 The list of cases is rearranged into a binary tree,
5407 nearly optimal assuming equal probability for each case.
5408
5409 The tree is transformed into RTL, eliminating
5410 redundant test conditions at the same time.
5411
5412 If program flow could reach the end of the
5413 decision tree an unconditional jump to the
5414 default code is emitted. */
5415
5416 use_cost_table
6f9fdf4d 5417 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 5418 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 5419 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 5420 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5421 default_label, index_type);
28d81abb
RK
5422 emit_jump_if_reachable (default_label);
5423 }
5424 }
5425 else
5426 {
ad82abb8
ZW
5427 if (! try_casesi (index_type, index_expr, minval, range,
5428 table_label, default_label))
28d81abb 5429 {
ecc9dd93 5430 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 5431
786de7eb 5432 /* Index jumptables from zero for suitable values of
1ff37128 5433 minval to avoid a subtraction. */
786de7eb
KH
5434 if (! optimize_size
5435 && compare_tree_int (minval, 0) > 0
5436 && compare_tree_int (minval, 3) < 0)
5437 {
5438 minval = integer_zero_node;
5439 range = maxval;
5440 }
1ff37128 5441
ad82abb8
ZW
5442 if (! try_tablejump (index_type, index_expr, minval, range,
5443 table_label, default_label))
5444 abort ();
28d81abb 5445 }
786de7eb 5446
28d81abb
RK
5447 /* Get table of labels to jump to, in order of case index. */
5448
1ff37128 5449 ncases = tree_low_cst (range, 0) + 1;
28d81abb 5450 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
961192e1 5451 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
5452
5453 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5454 {
2d9d49e4
OH
5455 /* Compute the low and high bounds relative to the minimum
5456 value since that should fit in a HOST_WIDE_INT while the
5457 actual values may not. */
5458 HOST_WIDE_INT i_low
786de7eb
KH
5459 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5460 n->low, minval)), 1);
2d9d49e4 5461 HOST_WIDE_INT i_high
786de7eb
KH
5462 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5463 n->high, minval)), 1);
2d9d49e4
OH
5464 HOST_WIDE_INT i;
5465
5466 for (i = i_low; i <= i_high; i ++)
5467 labelvec[i]
5468 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
5469 }
5470
5471 /* Fill in the gaps with the default. */
5472 for (i = 0; i < ncases; i++)
5473 if (labelvec[i] == 0)
38a448ca 5474 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5475
5476 /* Output the table */
5477 emit_label (table_label);
5478
18543a22 5479 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5480 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5481 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5482 gen_rtvec_v (ncases, labelvec),
4381f7c2 5483 const0_rtx, const0_rtx));
28d81abb 5484 else
38a448ca
RH
5485 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5486 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5487
5488 /* If the case insn drops through the table,
5489 after the table we must jump to the default-label.
5490 Otherwise record no drop-through after the table. */
5491#ifdef CASE_DROPS_THROUGH
5492 emit_jump (default_label);
5493#else
5494 emit_barrier ();
5495#endif
5496 }
5497
2270623a
JM
5498 before_case = NEXT_INSN (before_case);
5499 end = get_last_insn ();
2b7d71b2
JJ
5500 if (squeeze_notes (&before_case, &end))
5501 abort ();
2270623a 5502 reorder_insns (before_case, end,
28d81abb
RK
5503 thiscase->data.case_stmt.start);
5504 }
4c581243 5505 else
956d6950 5506 end_cleanup_deferral ();
1b0cb6fc 5507
28d81abb
RK
5508 if (thiscase->exit_label)
5509 emit_label (thiscase->exit_label);
5510
5511 POPSTACK (case_stack);
5512
5513 free_temp_slots ();
5514}
5515
57641239
RK
5516/* Convert the tree NODE into a list linked by the right field, with the left
5517 field zeroed. RIGHT is used for recursion; it is a list to be placed
5518 rightmost in the resulting list. */
5519
5520static struct case_node *
5521case_tree2list (node, right)
5522 struct case_node *node, *right;
5523{
5524 struct case_node *left;
5525
5526 if (node->right)
5527 right = case_tree2list (node->right, right);
5528
5529 node->right = right;
51723711 5530 if ((left = node->left))
57641239
RK
5531 {
5532 node->left = 0;
5533 return case_tree2list (left, node);
5534 }
5535
5536 return node;
5537}
ca695ac9 5538
28d81abb
RK
5539/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5540
5541static void
5542do_jump_if_equal (op1, op2, label, unsignedp)
5543 rtx op1, op2, label;
5544 int unsignedp;
5545{
d43e0b7d 5546 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb
RK
5547 {
5548 if (INTVAL (op1) == INTVAL (op2))
5549 emit_jump (label);
5550 }
5551 else
d43e0b7d
RK
5552 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5553 (GET_MODE (op1) == VOIDmode
5554 ? GET_MODE (op2) : GET_MODE (op1)),
5555 unsignedp, label);
28d81abb
RK
5556}
5557\f
5558/* Not all case values are encountered equally. This function
5559 uses a heuristic to weight case labels, in cases where that
5560 looks like a reasonable thing to do.
5561
5562 Right now, all we try to guess is text, and we establish the
5563 following weights:
5564
5565 chars above space: 16
5566 digits: 16
5567 default: 12
5568 space, punct: 8
5569 tab: 4
5570 newline: 2
5571 other "\" chars: 1
5572 remaining chars: 0
5573
5574 If we find any cases in the switch that are not either -1 or in the range
5575 of valid ASCII characters, or are control characters other than those
5576 commonly used with "\", don't treat this switch scanning text.
5577
5578 Return 1 if these nodes are suitable for cost estimation, otherwise
5579 return 0. */
5580
5581static int
5582estimate_case_costs (node)
5583 case_node_ptr node;
5584{
f2d1f0ba 5585 tree min_ascii = integer_minus_one_node;
28d81abb
RK
5586 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5587 case_node_ptr n;
5588 int i;
5589
5590 /* If we haven't already made the cost table, make it now. Note that the
5591 lower bound of the table is -1, not zero. */
5592
2a2137c4 5593 if (! cost_table_initialized)
28d81abb 5594 {
2a2137c4 5595 cost_table_initialized = 1;
28d81abb
RK
5596
5597 for (i = 0; i < 128; i++)
5598 {
e9a780ec 5599 if (ISALNUM (i))
2a2137c4 5600 COST_TABLE (i) = 16;
e9a780ec 5601 else if (ISPUNCT (i))
2a2137c4 5602 COST_TABLE (i) = 8;
e9a780ec 5603 else if (ISCNTRL (i))
2a2137c4 5604 COST_TABLE (i) = -1;
28d81abb
RK
5605 }
5606
2a2137c4
RH
5607 COST_TABLE (' ') = 8;
5608 COST_TABLE ('\t') = 4;
5609 COST_TABLE ('\0') = 4;
5610 COST_TABLE ('\n') = 2;
5611 COST_TABLE ('\f') = 1;
5612 COST_TABLE ('\v') = 1;
5613 COST_TABLE ('\b') = 1;
28d81abb
RK
5614 }
5615
5616 /* See if all the case expressions look like text. It is text if the
5617 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5618 as signed arithmetic since we don't want to ever access cost_table with a
5619 value less than -1. Also check that none of the constants in a range
5620 are strange control characters. */
5621
5622 for (n = node; n; n = n->right)
5623 {
5624 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5625 return 0;
5626
05bccae2
RK
5627 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5628 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 5629 if (COST_TABLE (i) < 0)
28d81abb
RK
5630 return 0;
5631 }
5632
5633 /* All interesting values are within the range of interesting
5634 ASCII characters. */
5635 return 1;
5636}
5637
5638/* Scan an ordered list of case nodes
5639 combining those with consecutive values or ranges.
5640
5641 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5642
5643static void
5644group_case_nodes (head)
5645 case_node_ptr head;
5646{
5647 case_node_ptr node = head;
5648
5649 while (node)
5650 {
5651 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5652 rtx lb2;
28d81abb
RK
5653 case_node_ptr np = node;
5654
5655 /* Try to group the successors of NODE with NODE. */
5656 while (((np = np->right) != 0)
5657 /* Do they jump to the same place? */
ad7e369f
JL
5658 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5659 || (lb != 0 && lb2 != 0
5660 && simplejump_p (lb)
5661 && simplejump_p (lb2)
5662 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5663 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5664 /* Are their ranges consecutive? */
5665 && tree_int_cst_equal (np->low,
5666 fold (build (PLUS_EXPR,
5667 TREE_TYPE (node->high),
5668 node->high,
5669 integer_one_node)))
5670 /* An overflow is not consecutive. */
5671 && tree_int_cst_lt (node->high,
5672 fold (build (PLUS_EXPR,
5673 TREE_TYPE (node->high),
5674 node->high,
5675 integer_one_node))))
5676 {
5677 node->high = np->high;
5678 }
5679 /* NP is the first node after NODE which can't be grouped with it.
5680 Delete the nodes in between, and move on to that node. */
5681 node->right = np;
5682 node = np;
5683 }
5684}
5685
5686/* Take an ordered list of case nodes
5687 and transform them into a near optimal binary tree,
6dc42e49 5688 on the assumption that any target code selection value is as
28d81abb
RK
5689 likely as any other.
5690
5691 The transformation is performed by splitting the ordered
5692 list into two equal sections plus a pivot. The parts are
5693 then attached to the pivot as left and right branches. Each
38e01259 5694 branch is then transformed recursively. */
28d81abb
RK
5695
5696static void
5697balance_case_nodes (head, parent)
5698 case_node_ptr *head;
5699 case_node_ptr parent;
5700{
b3694847 5701 case_node_ptr np;
28d81abb
RK
5702
5703 np = *head;
5704 if (np)
5705 {
5706 int cost = 0;
5707 int i = 0;
5708 int ranges = 0;
b3694847 5709 case_node_ptr *npp;
28d81abb
RK
5710 case_node_ptr left;
5711
5712 /* Count the number of entries on branch. Also count the ranges. */
5713
5714 while (np)
5715 {
5716 if (!tree_int_cst_equal (np->low, np->high))
5717 {
5718 ranges++;
5719 if (use_cost_table)
2a2137c4 5720 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
5721 }
5722
5723 if (use_cost_table)
2a2137c4 5724 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
5725
5726 i++;
5727 np = np->right;
5728 }
5729
5730 if (i > 2)
5731 {
5732 /* Split this list if it is long enough for that to help. */
5733 npp = head;
5734 left = *npp;
5735 if (use_cost_table)
5736 {
5737 /* Find the place in the list that bisects the list's total cost,
5738 Here I gets half the total cost. */
5739 int n_moved = 0;
5740 i = (cost + 1) / 2;
5741 while (1)
5742 {
5743 /* Skip nodes while their cost does not reach that amount. */
5744 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
5745 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5746 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
5747 if (i <= 0)
5748 break;
5749 npp = &(*npp)->right;
5750 n_moved += 1;
5751 }
5752 if (n_moved == 0)
5753 {
5754 /* Leave this branch lopsided, but optimize left-hand
5755 side and fill in `parent' fields for right-hand side. */
5756 np = *head;
5757 np->parent = parent;
5758 balance_case_nodes (&np->left, np);
5759 for (; np->right; np = np->right)
5760 np->right->parent = np;
5761 return;
5762 }
5763 }
5764 /* If there are just three nodes, split at the middle one. */
5765 else if (i == 3)
5766 npp = &(*npp)->right;
5767 else
5768 {
5769 /* Find the place in the list that bisects the list's total cost,
5770 where ranges count as 2.
5771 Here I gets half the total cost. */
5772 i = (i + ranges + 1) / 2;
5773 while (1)
5774 {
5775 /* Skip nodes while their cost does not reach that amount. */
5776 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5777 i--;
5778 i--;
5779 if (i <= 0)
5780 break;
5781 npp = &(*npp)->right;
5782 }
5783 }
5784 *head = np = *npp;
5785 *npp = 0;
5786 np->parent = parent;
5787 np->left = left;
5788
5789 /* Optimize each of the two split parts. */
5790 balance_case_nodes (&np->left, np);
5791 balance_case_nodes (&np->right, np);
5792 }
5793 else
5794 {
5795 /* Else leave this branch as one level,
5796 but fill in `parent' fields. */
5797 np = *head;
5798 np->parent = parent;
5799 for (; np->right; np = np->right)
5800 np->right->parent = np;
5801 }
5802 }
5803}
5804\f
5805/* Search the parent sections of the case node tree
5806 to see if a test for the lower bound of NODE would be redundant.
5807 INDEX_TYPE is the type of the index expression.
5808
5809 The instructions to generate the case decision tree are
5810 output in the same order as nodes are processed so it is
5811 known that if a parent node checks the range of the current
5812 node minus one that the current node is bounded at its lower
5813 span. Thus the test would be redundant. */
5814
5815static int
5816node_has_low_bound (node, index_type)
5817 case_node_ptr node;
5818 tree index_type;
5819{
5820 tree low_minus_one;
5821 case_node_ptr pnode;
5822
5823 /* If the lower bound of this node is the lowest value in the index type,
5824 we need not test it. */
5825
5826 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5827 return 1;
5828
5829 /* If this node has a left branch, the value at the left must be less
5830 than that at this node, so it cannot be bounded at the bottom and
5831 we need not bother testing any further. */
5832
5833 if (node->left)
5834 return 0;
5835
5836 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5837 node->low, integer_one_node));
5838
5839 /* If the subtraction above overflowed, we can't verify anything.
5840 Otherwise, look for a parent that tests our value - 1. */
5841
5842 if (! tree_int_cst_lt (low_minus_one, node->low))
5843 return 0;
5844
5845 for (pnode = node->parent; pnode; pnode = pnode->parent)
5846 if (tree_int_cst_equal (low_minus_one, pnode->high))
5847 return 1;
5848
5849 return 0;
5850}
5851
5852/* Search the parent sections of the case node tree
5853 to see if a test for the upper bound of NODE would be redundant.
5854 INDEX_TYPE is the type of the index expression.
5855
5856 The instructions to generate the case decision tree are
5857 output in the same order as nodes are processed so it is
5858 known that if a parent node checks the range of the current
5859 node plus one that the current node is bounded at its upper
5860 span. Thus the test would be redundant. */
5861
5862static int
5863node_has_high_bound (node, index_type)
5864 case_node_ptr node;
5865 tree index_type;
5866{
5867 tree high_plus_one;
5868 case_node_ptr pnode;
5869
e1ee5cdc
RH
5870 /* If there is no upper bound, obviously no test is needed. */
5871
5872 if (TYPE_MAX_VALUE (index_type) == NULL)
5873 return 1;
5874
28d81abb
RK
5875 /* If the upper bound of this node is the highest value in the type
5876 of the index expression, we need not test against it. */
5877
5878 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5879 return 1;
5880
5881 /* If this node has a right branch, the value at the right must be greater
5882 than that at this node, so it cannot be bounded at the top and
5883 we need not bother testing any further. */
5884
5885 if (node->right)
5886 return 0;
5887
5888 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5889 node->high, integer_one_node));
5890
5891 /* If the addition above overflowed, we can't verify anything.
5892 Otherwise, look for a parent that tests our value + 1. */
5893
5894 if (! tree_int_cst_lt (node->high, high_plus_one))
5895 return 0;
5896
5897 for (pnode = node->parent; pnode; pnode = pnode->parent)
5898 if (tree_int_cst_equal (high_plus_one, pnode->low))
5899 return 1;
5900
5901 return 0;
5902}
5903
5904/* Search the parent sections of the
5905 case node tree to see if both tests for the upper and lower
5906 bounds of NODE would be redundant. */
5907
5908static int
5909node_is_bounded (node, index_type)
5910 case_node_ptr node;
5911 tree index_type;
5912{
5913 return (node_has_low_bound (node, index_type)
5914 && node_has_high_bound (node, index_type));
5915}
5916
5917/* Emit an unconditional jump to LABEL unless it would be dead code. */
5918
5919static void
5920emit_jump_if_reachable (label)
5921 rtx label;
5922{
5923 if (GET_CODE (get_last_insn ()) != BARRIER)
5924 emit_jump (label);
5925}
5926\f
5927/* Emit step-by-step code to select a case for the value of INDEX.
5928 The thus generated decision tree follows the form of the
5929 case-node binary tree NODE, whose nodes represent test conditions.
5930 INDEX_TYPE is the type of the index of the switch.
5931
5932 Care is taken to prune redundant tests from the decision tree
5933 by detecting any boundary conditions already checked by
5934 emitted rtx. (See node_has_high_bound, node_has_low_bound
5935 and node_is_bounded, above.)
5936
5937 Where the test conditions can be shown to be redundant we emit
5938 an unconditional jump to the target code. As a further
5939 optimization, the subordinates of a tree node are examined to
5940 check for bounded nodes. In this case conditional and/or
5941 unconditional jumps as a result of the boundary check for the
5942 current node are arranged to target the subordinates associated
38e01259 5943 code for out of bound conditions on the current node.
28d81abb 5944
f72aed24 5945 We can assume that when control reaches the code generated here,
28d81abb
RK
5946 the index value has already been compared with the parents
5947 of this node, and determined to be on the same side of each parent
5948 as this node is. Thus, if this node tests for the value 51,
5949 and a parent tested for 52, we don't need to consider
5950 the possibility of a value greater than 51. If another parent
5951 tests for the value 50, then this node need not test anything. */
5952
5953static void
5954emit_case_nodes (index, node, default_label, index_type)
5955 rtx index;
5956 case_node_ptr node;
5957 rtx default_label;
5958 tree index_type;
5959{
5960 /* If INDEX has an unsigned type, we must make unsigned branches. */
5961 int unsignedp = TREE_UNSIGNED (index_type);
28d81abb 5962 enum machine_mode mode = GET_MODE (index);
69107307 5963 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
5964
5965 /* See if our parents have already tested everything for us.
5966 If they have, emit an unconditional jump for this node. */
5967 if (node_is_bounded (node, index_type))
5968 emit_jump (label_rtx (node->code_label));
5969
5970 else if (tree_int_cst_equal (node->low, node->high))
5971 {
5972 /* Node is single valued. First see if the index expression matches
0f41302f 5973 this node and then check our children, if any. */
28d81abb 5974
69107307
AO
5975 do_jump_if_equal (index,
5976 convert_modes (mode, imode,
5977 expand_expr (node->low, NULL_RTX,
5978 VOIDmode, 0),
5979 unsignedp),
28d81abb
RK
5980 label_rtx (node->code_label), unsignedp);
5981
5982 if (node->right != 0 && node->left != 0)
5983 {
5984 /* This node has children on both sides.
5985 Dispatch to one side or the other
5986 by comparing the index value with this node's value.
5987 If one subtree is bounded, check that one first,
5988 so we can avoid real branches in the tree. */
5989
5990 if (node_is_bounded (node->right, index_type))
5991 {
4381f7c2 5992 emit_cmp_and_jump_insns (index,
69107307
AO
5993 convert_modes
5994 (mode, imode,
5995 expand_expr (node->high, NULL_RTX,
5996 VOIDmode, 0),
5997 unsignedp),
d43e0b7d 5998 GT, NULL_RTX, mode, unsignedp,
4381f7c2 5999 label_rtx (node->right->code_label));
28d81abb
RK
6000 emit_case_nodes (index, node->left, default_label, index_type);
6001 }
6002
6003 else if (node_is_bounded (node->left, index_type))
6004 {
4381f7c2 6005 emit_cmp_and_jump_insns (index,
69107307
AO
6006 convert_modes
6007 (mode, imode,
6008 expand_expr (node->high, NULL_RTX,
6009 VOIDmode, 0),
6010 unsignedp),
d43e0b7d 6011 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6012 label_rtx (node->left->code_label));
28d81abb
RK
6013 emit_case_nodes (index, node->right, default_label, index_type);
6014 }
6015
6016 else
6017 {
6018 /* Neither node is bounded. First distinguish the two sides;
6019 then emit the code for one side at a time. */
6020
4381f7c2 6021 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
6022
6023 /* See if the value is on the right. */
4381f7c2 6024 emit_cmp_and_jump_insns (index,
69107307
AO
6025 convert_modes
6026 (mode, imode,
6027 expand_expr (node->high, NULL_RTX,
6028 VOIDmode, 0),
6029 unsignedp),
d43e0b7d 6030 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6031 label_rtx (test_label));
28d81abb
RK
6032
6033 /* Value must be on the left.
6034 Handle the left-hand subtree. */
6035 emit_case_nodes (index, node->left, default_label, index_type);
6036 /* If left-hand subtree does nothing,
6037 go to default. */
6038 emit_jump_if_reachable (default_label);
6039
6040 /* Code branches here for the right-hand subtree. */
6041 expand_label (test_label);
6042 emit_case_nodes (index, node->right, default_label, index_type);
6043 }
6044 }
6045
6046 else if (node->right != 0 && node->left == 0)
6047 {
6048 /* Here we have a right child but no left so we issue conditional
6049 branch to default and process the right child.
6050
6051 Omit the conditional branch to default if we it avoid only one
6052 right child; it costs too much space to save so little time. */
6053
de14fd73 6054 if (node->right->right || node->right->left
28d81abb
RK
6055 || !tree_int_cst_equal (node->right->low, node->right->high))
6056 {
6057 if (!node_has_low_bound (node, index_type))
6058 {
4381f7c2 6059 emit_cmp_and_jump_insns (index,
69107307
AO
6060 convert_modes
6061 (mode, imode,
6062 expand_expr (node->high, NULL_RTX,
6063 VOIDmode, 0),
6064 unsignedp),
d43e0b7d 6065 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6066 default_label);
28d81abb
RK
6067 }
6068
6069 emit_case_nodes (index, node->right, default_label, index_type);
6070 }
6071 else
6072 /* We cannot process node->right normally
6073 since we haven't ruled out the numbers less than
6074 this node's value. So handle node->right explicitly. */
6075 do_jump_if_equal (index,
69107307
AO
6076 convert_modes
6077 (mode, imode,
6078 expand_expr (node->right->low, NULL_RTX,
6079 VOIDmode, 0),
6080 unsignedp),
28d81abb
RK
6081 label_rtx (node->right->code_label), unsignedp);
6082 }
6083
6084 else if (node->right == 0 && node->left != 0)
6085 {
6086 /* Just one subtree, on the left. */
4381f7c2 6087 if (node->left->left || node->left->right
28d81abb
RK
6088 || !tree_int_cst_equal (node->left->low, node->left->high))
6089 {
6090 if (!node_has_high_bound (node, index_type))
6091 {
69107307
AO
6092 emit_cmp_and_jump_insns (index,
6093 convert_modes
6094 (mode, imode,
6095 expand_expr (node->high, NULL_RTX,
6096 VOIDmode, 0),
6097 unsignedp),
d43e0b7d 6098 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6099 default_label);
28d81abb
RK
6100 }
6101
6102 emit_case_nodes (index, node->left, default_label, index_type);
6103 }
6104 else
6105 /* We cannot process node->left normally
6106 since we haven't ruled out the numbers less than
6107 this node's value. So handle node->left explicitly. */
6108 do_jump_if_equal (index,
69107307
AO
6109 convert_modes
6110 (mode, imode,
6111 expand_expr (node->left->low, NULL_RTX,
6112 VOIDmode, 0),
6113 unsignedp),
28d81abb
RK
6114 label_rtx (node->left->code_label), unsignedp);
6115 }
6116 }
6117 else
6118 {
6119 /* Node is a range. These cases are very similar to those for a single
6120 value, except that we do not start by testing whether this node
6121 is the one to branch to. */
6122
6123 if (node->right != 0 && node->left != 0)
6124 {
6125 /* Node has subtrees on both sides.
6126 If the right-hand subtree is bounded,
6127 test for it first, since we can go straight there.
6128 Otherwise, we need to make a branch in the control structure,
6129 then handle the two subtrees. */
6130 tree test_label = 0;
6131
28d81abb
RK
6132 if (node_is_bounded (node->right, index_type))
6133 /* Right hand node is fully bounded so we can eliminate any
6134 testing and branch directly to the target code. */
69107307
AO
6135 emit_cmp_and_jump_insns (index,
6136 convert_modes
6137 (mode, imode,
6138 expand_expr (node->high, NULL_RTX,
6139 VOIDmode, 0),
6140 unsignedp),
d43e0b7d 6141 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6142 label_rtx (node->right->code_label));
28d81abb
RK
6143 else
6144 {
6145 /* Right hand node requires testing.
6146 Branch to a label where we will handle it later. */
6147
6148 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 6149 emit_cmp_and_jump_insns (index,
69107307
AO
6150 convert_modes
6151 (mode, imode,
6152 expand_expr (node->high, NULL_RTX,
6153 VOIDmode, 0),
6154 unsignedp),
d43e0b7d 6155 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6156 label_rtx (test_label));
28d81abb
RK
6157 }
6158
6159 /* Value belongs to this node or to the left-hand subtree. */
6160
69107307
AO
6161 emit_cmp_and_jump_insns (index,
6162 convert_modes
6163 (mode, imode,
6164 expand_expr (node->low, NULL_RTX,
6165 VOIDmode, 0),
6166 unsignedp),
d43e0b7d 6167 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6168 label_rtx (node->code_label));
28d81abb
RK
6169
6170 /* Handle the left-hand subtree. */
6171 emit_case_nodes (index, node->left, default_label, index_type);
6172
6173 /* If right node had to be handled later, do that now. */
6174
6175 if (test_label)
6176 {
6177 /* If the left-hand subtree fell through,
6178 don't let it fall into the right-hand subtree. */
6179 emit_jump_if_reachable (default_label);
6180
6181 expand_label (test_label);
6182 emit_case_nodes (index, node->right, default_label, index_type);
6183 }
6184 }
6185
6186 else if (node->right != 0 && node->left == 0)
6187 {
6188 /* Deal with values to the left of this node,
6189 if they are possible. */
6190 if (!node_has_low_bound (node, index_type))
6191 {
4381f7c2 6192 emit_cmp_and_jump_insns (index,
69107307
AO
6193 convert_modes
6194 (mode, imode,
6195 expand_expr (node->low, NULL_RTX,
6196 VOIDmode, 0),
6197 unsignedp),
d43e0b7d 6198 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6199 default_label);
28d81abb
RK
6200 }
6201
6202 /* Value belongs to this node or to the right-hand subtree. */
6203
69107307
AO
6204 emit_cmp_and_jump_insns (index,
6205 convert_modes
6206 (mode, imode,
6207 expand_expr (node->high, NULL_RTX,
6208 VOIDmode, 0),
6209 unsignedp),
d43e0b7d 6210 LE, NULL_RTX, mode, unsignedp,
c5d5d461 6211 label_rtx (node->code_label));
28d81abb
RK
6212
6213 emit_case_nodes (index, node->right, default_label, index_type);
6214 }
6215
6216 else if (node->right == 0 && node->left != 0)
6217 {
6218 /* Deal with values to the right of this node,
6219 if they are possible. */
6220 if (!node_has_high_bound (node, index_type))
6221 {
4381f7c2 6222 emit_cmp_and_jump_insns (index,
69107307
AO
6223 convert_modes
6224 (mode, imode,
6225 expand_expr (node->high, NULL_RTX,
6226 VOIDmode, 0),
6227 unsignedp),
d43e0b7d 6228 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6229 default_label);
28d81abb
RK
6230 }
6231
6232 /* Value belongs to this node or to the left-hand subtree. */
6233
4381f7c2 6234 emit_cmp_and_jump_insns (index,
69107307
AO
6235 convert_modes
6236 (mode, imode,
6237 expand_expr (node->low, NULL_RTX,
6238 VOIDmode, 0),
6239 unsignedp),
d43e0b7d 6240 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6241 label_rtx (node->code_label));
28d81abb
RK
6242
6243 emit_case_nodes (index, node->left, default_label, index_type);
6244 }
6245
6246 else
6247 {
6248 /* Node has no children so we check low and high bounds to remove
6249 redundant tests. Only one of the bounds can exist,
6250 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
6251 int high_bound = node_has_high_bound (node, index_type);
6252 int low_bound = node_has_low_bound (node, index_type);
28d81abb 6253
923cbdc3 6254 if (!high_bound && low_bound)
28d81abb 6255 {
4381f7c2 6256 emit_cmp_and_jump_insns (index,
69107307
AO
6257 convert_modes
6258 (mode, imode,
6259 expand_expr (node->high, NULL_RTX,
6260 VOIDmode, 0),
6261 unsignedp),
d43e0b7d 6262 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6263 default_label);
28d81abb
RK
6264 }
6265
923cbdc3 6266 else if (!low_bound && high_bound)
28d81abb 6267 {
4381f7c2 6268 emit_cmp_and_jump_insns (index,
69107307
AO
6269 convert_modes
6270 (mode, imode,
6271 expand_expr (node->low, NULL_RTX,
6272 VOIDmode, 0),
6273 unsignedp),
d43e0b7d 6274 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6275 default_label);
28d81abb 6276 }
923cbdc3
JH
6277 else if (!low_bound && !high_bound)
6278 {
9312aecc 6279 /* Widen LOW and HIGH to the same width as INDEX. */
b0c48229 6280 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9312aecc
JDA
6281 tree low = build1 (CONVERT_EXPR, type, node->low);
6282 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 6283 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
6284
6285 /* Instead of doing two branches, emit one unsigned branch for
6286 (index-low) > (high-low). */
ef89d648
ZW
6287 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6288 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6289 NULL_RTX, unsignedp,
6290 OPTAB_WIDEN);
9312aecc
JDA
6291 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6292 high, low)),
6293 NULL_RTX, mode, 0);
786de7eb 6294
9312aecc 6295 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 6296 mode, 1, default_label);
923cbdc3 6297 }
28d81abb
RK
6298
6299 emit_jump (label_rtx (node->code_label));
6300 }
6301 }
6302}
e2500fed
GK
6303
6304#include "gt-stmt.h"
This page took 2.997459 seconds and 5 git commands to generate.