]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
emit-rtl.c (reg_attrs_htab): New static variable.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
58b23af8 3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
ccd043a9 40
28d81abb
RK
41#include "rtl.h"
42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
28d81abb 44#include "flags.h"
6adb4e3a 45#include "except.h"
28d81abb 46#include "function.h"
28d81abb 47#include "insn-config.h"
28d81abb 48#include "expr.h"
e78d8e51 49#include "libfuncs.h"
28d81abb 50#include "hard-reg-set.h"
28d81abb
RK
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
43577e6b 57#include "langhooks.h"
969d70ca 58#include "predict.h"
ca695ac9 59
18543a22
ILT
60/* Assume that case vectors are not pc-relative. */
61#ifndef CASE_VECTOR_PC_RELATIVE
62#define CASE_VECTOR_PC_RELATIVE 0
63#endif
28d81abb
RK
64\f
65/* Functions and data structures for expanding case statements. */
66
67/* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
70
5720c7e7
RK
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
28d81abb
RK
74
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
80
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
28d81abb
RK
86 and nodes on the right having higher values. We then output the tree
87 in order. */
88
e2500fed 89struct case_node GTY(())
28d81abb
RK
90{
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
57641239 97 int balance;
28d81abb
RK
98};
99
100typedef struct case_node case_node;
101typedef struct case_node *case_node_ptr;
102
103/* These are used by estimate_case_costs and balance_case_nodes. */
104
105/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 106static short cost_table_[129];
28d81abb 107static int use_cost_table;
2a2137c4
RH
108static int cost_table_initialized;
109
110/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
111 is unsigned. */
cf403648 112#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
113\f
114/* Stack of control and binding constructs we are currently inside.
115
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
122
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
127
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
131
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
138
e2500fed 139struct nesting GTY(())
28d81abb
RK
140{
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
e2500fed
GK
145 enum nesting_desc {
146 COND_NESTING,
147 LOOP_NESTING,
148 BLOCK_NESTING,
149 CASE_NESTING
150 } desc;
151 union nesting_u
28d81abb
RK
152 {
153 /* For conds (if-then and if-then-else statements). */
e2500fed 154 struct nesting_cond
28d81abb
RK
155 {
156 /* Label for the end of the if construct.
157 There is none if EXITFLAG was not set
158 and no `else' has been seen yet. */
159 rtx endif_label;
160 /* Label for the end of this alternative.
0f41302f 161 This may be the end of the if or the next else/elseif. */
28d81abb 162 rtx next_label;
e2500fed 163 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 164 /* For loops. */
e2500fed 165 struct nesting_loop
28d81abb
RK
166 {
167 /* Label at the top of the loop; place to loop back to. */
168 rtx start_label;
169 /* Label at the end of the whole construct. */
170 rtx end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
e2500fed 174 } GTY ((tag ("LOOP_NESTING"))) loop;
28d81abb 175 /* For variable binding contours. */
e2500fed 176 struct nesting_block
28d81abb
RK
177 {
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
b93a436e 181 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
e976b8b2 198 The tail of this list can be 0,
28d81abb
RK
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
3f1d071b 207 int n_function_calls;
e0a2f705 208 /* Nonzero if this is associated with an EH region. */
e976b8b2
MS
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
3f1d071b 215 int block_target_temp_slot_level;
e976b8b2
MS
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
e2500fed 226 } GTY ((tag ("BLOCK_NESTING"))) block;
28d81abb
RK
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
e2500fed 229 struct nesting_case
28d81abb
RK
230 {
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
57641239
RK
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
28d81abb
RK
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
28d81abb 244 /* Name of this kind of statement, for warnings. */
dff01034 245 const char *printname;
a11759a3
JR
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
e2500fed
GK
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
252};
253
28d81abb
RK
254/* Allocate and return a new `struct nesting'. */
255
256#define ALLOC_NESTING() \
e2500fed 257 (struct nesting *) ggc_alloc (sizeof (struct nesting))
28d81abb 258
6ed1d6c5
RS
259/* Pop the nesting stack element by element until we pop off
260 the element which is at the top of STACK.
261 Update all the other stacks, popping off elements from them
262 as we pop them from nesting_stack. */
28d81abb
RK
263
264#define POPSTACK(STACK) \
6ed1d6c5
RS
265do { struct nesting *target = STACK; \
266 struct nesting *this; \
267 do { this = nesting_stack; \
268 if (loop_stack == this) \
269 loop_stack = loop_stack->next; \
270 if (cond_stack == this) \
271 cond_stack = cond_stack->next; \
272 if (block_stack == this) \
273 block_stack = block_stack->next; \
274 if (stack_block_stack == this) \
275 stack_block_stack = stack_block_stack->next; \
276 if (case_stack == this) \
277 case_stack = case_stack->next; \
6ed1d6c5 278 nesting_depth = nesting_stack->depth - 1; \
e2500fed 279 nesting_stack = this->all; } \
6ed1d6c5 280 while (this != target); } while (0)
28d81abb
RK
281\f
282/* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
289
e2500fed 290struct goto_fixup GTY(())
28d81abb
RK
291{
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
7629c936
RS
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
28d81abb
RK
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
e976b8b2 313 The tail of this list can be 0,
28d81abb
RK
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
319};
320
28d81abb
RK
321/* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
323
e2500fed 324struct label_chain GTY(())
28d81abb
RK
325{
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
329};
e9a25f70 330
e2500fed 331struct stmt_status GTY(())
3f1d071b
BS
332{
333 /* Chain of all pending binding contours. */
e2500fed 334 struct nesting * x_block_stack;
3f1d071b
BS
335
336 /* If any new stacks are added here, add them to POPSTACKS too. */
337
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
e2500fed 340 struct nesting * x_stack_block_stack;
3f1d071b
BS
341
342 /* Chain of all pending conditional statements. */
e2500fed 343 struct nesting * x_cond_stack;
3f1d071b
BS
344
345 /* Chain of all pending loops. */
e2500fed 346 struct nesting * x_loop_stack;
3f1d071b
BS
347
348 /* Chain of all pending case or switch statements. */
e2500fed 349 struct nesting * x_case_stack;
3f1d071b
BS
350
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
e2500fed 353 struct nesting * x_nesting_stack;
3f1d071b
BS
354
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
357
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
360
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
365
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
369
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
47ee9bcb 372 const char *x_emit_filename;
3f1d071b
BS
373 int x_emit_lineno;
374
375 struct goto_fixup *x_goto_fixup_chain;
376};
377
01d939e8
BS
378#define block_stack (cfun->stmt->x_block_stack)
379#define stack_block_stack (cfun->stmt->x_stack_block_stack)
380#define cond_stack (cfun->stmt->x_cond_stack)
381#define loop_stack (cfun->stmt->x_loop_stack)
382#define case_stack (cfun->stmt->x_case_stack)
383#define nesting_stack (cfun->stmt->x_nesting_stack)
384#define nesting_depth (cfun->stmt->x_nesting_depth)
385#define current_block_start_count (cfun->stmt->x_block_start_count)
386#define last_expr_type (cfun->stmt->x_last_expr_type)
387#define last_expr_value (cfun->stmt->x_last_expr_value)
388#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389#define emit_filename (cfun->stmt->x_emit_filename)
390#define emit_lineno (cfun->stmt->x_emit_lineno)
391#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70 392
272d0bee 393/* Nonzero if we are using EH to handle cleanups. */
e9a25f70
JL
394static int using_eh_for_cleanups_p = 0;
395
cdadb1dd 396static int n_occurrences PARAMS ((int, const char *));
6be2e1f8
RH
397static bool parse_input_constraint PARAMS ((const char **, int, int, int,
398 int, const char * const *,
399 bool *, bool *));
acb5d088 400static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
cdadb1dd
KG
401static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402static int expand_fixup PARAMS ((tree, rtx, rtx));
403static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404static void expand_nl_goto_receiver PARAMS ((void));
405static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
cfc3d13f 407 rtx, int));
84b72302
RH
408static bool check_operand_nalternatives PARAMS ((tree, tree));
409static bool check_unique_operand_names PARAMS ((tree, tree));
410static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 const char **));
412static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
396ad517 413static void expand_null_return_1 PARAMS ((rtx));
969d70ca 414static enum br_predictor return_prediction PARAMS ((rtx));
cdadb1dd
KG
415static void expand_value_return PARAMS ((rtx));
416static int tail_recursion_args PARAMS ((tree, tree));
417static void expand_cleanups PARAMS ((tree, tree, int, int));
418static void check_seenlabel PARAMS ((void));
419static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
420static int estimate_case_costs PARAMS ((case_node_ptr));
100e3acb
RS
421static bool same_case_target_p PARAMS ((rtx, rtx));
422static void strip_default_case_nodes PARAMS ((case_node_ptr *, rtx));
cdadb1dd
KG
423static void group_case_nodes PARAMS ((case_node_ptr));
424static void balance_case_nodes PARAMS ((case_node_ptr *,
cfc3d13f 425 case_node_ptr));
cdadb1dd
KG
426static int node_has_low_bound PARAMS ((case_node_ptr, tree));
427static int node_has_high_bound PARAMS ((case_node_ptr, tree));
428static int node_is_bounded PARAMS ((case_node_ptr, tree));
429static void emit_jump_if_reachable PARAMS ((rtx));
430static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
cdadb1dd 431static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
28d81abb 432\f
e9a25f70
JL
433void
434using_eh_for_cleanups ()
435{
436 using_eh_for_cleanups_p = 1;
437}
438
28d81abb
RK
439void
440init_stmt_for_function ()
441{
e2500fed 442 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
3f1d071b 443
28d81abb
RK
444 /* We are not currently within any block, conditional, loop or case. */
445 block_stack = 0;
0b931590 446 stack_block_stack = 0;
28d81abb
RK
447 loop_stack = 0;
448 case_stack = 0;
449 cond_stack = 0;
450 nesting_stack = 0;
451 nesting_depth = 0;
452
3f1d071b 453 current_block_start_count = 0;
28d81abb
RK
454
455 /* No gotos have been expanded yet. */
456 goto_fixup_chain = 0;
457
458 /* We are not processing a ({...}) grouping. */
459 expr_stmts_for_value = 0;
e2500fed 460 clear_last_expr ();
28d81abb 461}
3f1d071b 462\f
3f1d071b 463/* Record the current file and line. Called from emit_line_note. */
28d81abb 464void
3f1d071b 465set_file_and_line_for_stmt (file, line)
47ee9bcb 466 const char *file;
3f1d071b
BS
467 int line;
468{
61d84605
MM
469 /* If we're outputting an inline function, and we add a line note,
470 there may be no CFUN->STMT information. So, there's no need to
471 update it. */
472 if (cfun->stmt)
473 {
474 emit_filename = file;
475 emit_lineno = line;
476 }
28d81abb 477}
3f1d071b 478
28d81abb
RK
479/* Emit a no-op instruction. */
480
481void
482emit_nop ()
483{
ca695ac9
JB
484 rtx last_insn;
485
b93a436e
JL
486 last_insn = get_last_insn ();
487 if (!optimize
488 && (GET_CODE (last_insn) == CODE_LABEL
489 || (GET_CODE (last_insn) == NOTE
490 && prev_real_insn (last_insn) == 0)))
491 emit_insn (gen_nop ());
28d81abb
RK
492}
493\f
494/* Return the rtx-label that corresponds to a LABEL_DECL,
495 creating it if necessary. */
496
497rtx
498label_rtx (label)
499 tree label;
500{
501 if (TREE_CODE (label) != LABEL_DECL)
502 abort ();
503
19e7881c
MM
504 if (!DECL_RTL_SET_P (label))
505 SET_DECL_RTL (label, gen_label_rtx ());
28d81abb 506
19e7881c 507 return DECL_RTL (label);
28d81abb
RK
508}
509
19e7881c 510
28d81abb
RK
511/* Add an unconditional jump to LABEL as the next sequential instruction. */
512
513void
514emit_jump (label)
515 rtx label;
516{
517 do_pending_stack_adjust ();
518 emit_jump_insn (gen_jump (label));
519 emit_barrier ();
520}
521
522/* Emit code to jump to the address
523 specified by the pointer expression EXP. */
524
525void
526expand_computed_goto (exp)
527 tree exp;
528{
b93a436e 529 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
530
531#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
532 if (GET_MODE (x) != Pmode)
533 x = convert_memory_address (Pmode, x);
ed9a9db1 534#endif
ffa1a1ce 535
b93a436e 536 emit_queue ();
b93a436e
JL
537 do_pending_stack_adjust ();
538 emit_indirect_jump (x);
acd693d1
RH
539
540 current_function_has_computed_jump = 1;
28d81abb
RK
541}
542\f
543/* Handle goto statements and the labels that they can go to. */
544
545/* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556void
557expand_label (label)
558 tree label;
559{
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
1f8f4a0b 569 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574}
575
576/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579void
580declare_nonlocal_label (label)
581 tree label;
582{
ba716ac9
BS
583 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
584
28d81abb
RK
585 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
586 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 587 if (nonlocal_goto_handler_slots == 0)
28d81abb 588 {
59257ff7
RK
589 emit_stack_save (SAVE_NONLOCAL,
590 &nonlocal_goto_stack_level,
591 PREV_INSN (tail_recursion_reentry));
28d81abb 592 }
ba716ac9
BS
593 nonlocal_goto_handler_slots
594 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
595}
596
597/* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
600
601void
602expand_goto (label)
603 tree label;
604{
ca695ac9
JB
605 tree context;
606
28d81abb 607 /* Check for a nonlocal goto to a containing function. */
ca695ac9 608 context = decl_function_context (label);
28d81abb
RK
609 if (context != 0 && context != current_function_decl)
610 {
611 struct function *p = find_function_data (context);
38a448ca 612 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
4b01bd16 613 rtx handler_slot, static_chain, save_area, insn;
ba716ac9
BS
614 tree link;
615
616 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
617 handler_slot = p->x_nonlocal_goto_handler_slots;
618 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
619 link = TREE_CHAIN (link))
620 handler_slot = XEXP (handler_slot, 1);
621 handler_slot = XEXP (handler_slot, 0);
dd132134 622
28d81abb 623 p->has_nonlocal_label = 1;
c1255328 624 current_function_has_nonlocal_goto = 1;
dd132134 625 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
626
627 /* Copy the rtl for the slots so that they won't be shared in
628 case the virtual stack vars register gets instantiated differently
629 in the parent than in the child. */
630
157229c3
RH
631 static_chain = copy_to_reg (lookup_static_chain (label));
632
633 /* Get addr of containing function's current nonlocal goto handler,
634 which will do any cleanups and then jump to the label. */
635 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
636 virtual_stack_vars_rtx,
637 static_chain));
638
639 /* Get addr of containing function's nonlocal save area. */
640 save_area = p->x_nonlocal_goto_stack_level;
641 if (save_area)
642 save_area = replace_rtx (copy_rtx (save_area),
643 virtual_stack_vars_rtx, static_chain);
644
28d81abb
RK
645#if HAVE_nonlocal_goto
646 if (HAVE_nonlocal_goto)
157229c3
RH
647 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
648 save_area, label_ref));
28d81abb
RK
649 else
650#endif
651 {
652 /* Restore frame pointer for containing function.
653 This sets the actual hard register used for the frame pointer
654 to the location of the function's incoming static chain info.
655 The non-local goto handler will then adjust it to contain the
656 proper value and reload the argument pointer, if needed. */
157229c3
RH
657 emit_move_insn (hard_frame_pointer_rtx, static_chain);
658 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
659
660 /* USE of hard_frame_pointer_rtx added for consistency;
661 not clear if really needed. */
38a448ca
RH
662 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
663 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
157229c3 664 emit_indirect_jump (handler_slot);
28d81abb 665 }
4b01bd16 666
786de7eb 667 /* Search backwards to the jump insn and mark it as a
4b01bd16 668 non-local goto. */
7c2b017c
AM
669 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
670 {
671 if (GET_CODE (insn) == JUMP_INSN)
672 {
673 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
674 const0_rtx, REG_NOTES (insn));
675 break;
676 }
677 else if (GET_CODE (insn) == CALL_INSN)
678 break;
679 }
4381f7c2 680 }
28d81abb 681 else
37366632 682 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
683}
684
685/* Generate RTL code for a `goto' statement with target label BODY.
686 LABEL should be a LABEL_REF.
687 LAST_INSN, if non-0, is the rtx we should consider as the last
688 insn emitted (for the purposes of cleaning up a return). */
689
690static void
691expand_goto_internal (body, label, last_insn)
692 tree body;
693 rtx label;
694 rtx last_insn;
695{
696 struct nesting *block;
697 rtx stack_level = 0;
698
699 if (GET_CODE (label) != CODE_LABEL)
700 abort ();
701
702 /* If label has already been defined, we can tell now
703 whether and how we must alter the stack level. */
704
705 if (PREV_INSN (label) != 0)
706 {
707 /* Find the innermost pending block that contains the label.
708 (Check containment by comparing insn-uids.)
709 Then restore the outermost stack level within that block,
710 and do cleanups of all blocks contained in it. */
711 for (block = block_stack; block; block = block->next)
712 {
713 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
714 break;
715 if (block->data.block.stack_level != 0)
716 stack_level = block->data.block.stack_level;
717 /* Execute the cleanups for blocks we are exiting. */
718 if (block->data.block.cleanups != 0)
719 {
50d1b7a1 720 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
721 do_pending_stack_adjust ();
722 }
723 }
724
725 if (stack_level)
726 {
0f41302f
MS
727 /* Ensure stack adjust isn't done by emit_jump, as this
728 would clobber the stack pointer. This one should be
729 deleted as dead by flow. */
28d81abb
RK
730 clear_pending_stack_adjust ();
731 do_pending_stack_adjust ();
7393c642
RK
732
733 /* Don't do this adjust if it's to the end label and this function
734 is to return with a depressed stack pointer. */
735 if (label == return_label
c4a6c0f3
RK
736 && (((TREE_CODE (TREE_TYPE (current_function_decl))
737 == FUNCTION_TYPE)
738 && (TYPE_RETURNS_STACK_DEPRESSED
739 (TREE_TYPE (current_function_decl))))))
7393c642
RK
740 ;
741 else
742 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
743 }
744
745 if (body != 0 && DECL_TOO_LATE (body))
746 error ("jump to `%s' invalidly jumps into binding contour",
747 IDENTIFIER_POINTER (DECL_NAME (body)));
748 }
749 /* Label not yet defined: may need to put this goto
750 on the fixup list. */
751 else if (! expand_fixup (body, label, last_insn))
752 {
753 /* No fixup needed. Record that the label is the target
754 of at least one goto that has no fixup. */
755 if (body != 0)
756 TREE_ADDRESSABLE (body) = 1;
757 }
758
759 emit_jump (label);
760}
761\f
762/* Generate if necessary a fixup for a goto
763 whose target label in tree structure (if any) is TREE_LABEL
764 and whose target in rtl is RTL_LABEL.
765
766 If LAST_INSN is nonzero, we pretend that the jump appears
767 after insn LAST_INSN instead of at the current point in the insn stream.
768
023b57e6
RS
769 The fixup will be used later to insert insns just before the goto.
770 Those insns will restore the stack level as appropriate for the
771 target label, and will (in the case of C++) also invoke any object
772 destructors which have to be invoked when we exit the scopes which
773 are exited by the goto.
28d81abb
RK
774
775 Value is nonzero if a fixup is made. */
776
777static int
778expand_fixup (tree_label, rtl_label, last_insn)
779 tree tree_label;
780 rtx rtl_label;
781 rtx last_insn;
782{
783 struct nesting *block, *end_block;
784
785 /* See if we can recognize which block the label will be output in.
786 This is possible in some very common cases.
787 If we succeed, set END_BLOCK to that block.
788 Otherwise, set it to 0. */
789
790 if (cond_stack
791 && (rtl_label == cond_stack->data.cond.endif_label
792 || rtl_label == cond_stack->data.cond.next_label))
793 end_block = cond_stack;
794 /* If we are in a loop, recognize certain labels which
795 are likely targets. This reduces the number of fixups
796 we need to create. */
797 else if (loop_stack
798 && (rtl_label == loop_stack->data.loop.start_label
799 || rtl_label == loop_stack->data.loop.end_label
800 || rtl_label == loop_stack->data.loop.continue_label))
801 end_block = loop_stack;
802 else
803 end_block = 0;
804
805 /* Now set END_BLOCK to the binding level to which we will return. */
806
807 if (end_block)
808 {
809 struct nesting *next_block = end_block->all;
810 block = block_stack;
811
812 /* First see if the END_BLOCK is inside the innermost binding level.
813 If so, then no cleanups or stack levels are relevant. */
814 while (next_block && next_block != block)
815 next_block = next_block->all;
816
817 if (next_block)
818 return 0;
819
820 /* Otherwise, set END_BLOCK to the innermost binding level
821 which is outside the relevant control-structure nesting. */
822 next_block = block_stack->next;
823 for (block = block_stack; block != end_block; block = block->all)
824 if (block == next_block)
825 next_block = next_block->next;
826 end_block = next_block;
827 }
828
829 /* Does any containing block have a stack level or cleanups?
830 If not, no fixup is needed, and that is the normal case
831 (the only case, for standard C). */
832 for (block = block_stack; block != end_block; block = block->next)
833 if (block->data.block.stack_level != 0
834 || block->data.block.cleanups != 0)
835 break;
836
837 if (block != end_block)
838 {
839 /* Ok, a fixup is needed. Add a fixup to the list of such. */
840 struct goto_fixup *fixup
f8a83ee3 841 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
842 /* In case an old stack level is restored, make sure that comes
843 after any pending stack adjust. */
844 /* ?? If the fixup isn't to come at the present position,
845 doing the stack adjust here isn't useful. Doing it with our
846 settings at that location isn't useful either. Let's hope
847 someone does it! */
848 if (last_insn == 0)
849 do_pending_stack_adjust ();
28d81abb
RK
850 fixup->target = tree_label;
851 fixup->target_rtl = rtl_label;
023b57e6
RS
852
853 /* Create a BLOCK node and a corresponding matched set of
12f61228 854 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
855 this point. The notes will encapsulate any and all fixup
856 code which we might later insert at this point in the insn
857 stream. Also, the BLOCK node will be the parent (i.e. the
858 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
859 later on when we are expanding the fixup code.
860
861 Note that optimization passes (including expand_end_loop)
862 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
863 as a placeholder. */
023b57e6
RS
864
865 {
786de7eb
KH
866 rtx original_before_jump
867 = last_insn ? last_insn : get_last_insn ();
0679e3fc 868 rtx start;
a97901e6 869 rtx end;
e6fd097e
MM
870 tree block;
871
872 block = make_node (BLOCK);
873 TREE_USED (block) = 1;
874
01d939e8 875 if (!cfun->x_whole_function_mode_p)
43577e6b 876 (*lang_hooks.decls.insert_block) (block);
a97901e6 877 else
e6fd097e 878 {
4381f7c2 879 BLOCK_CHAIN (block)
a97901e6
MM
880 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
881 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
882 = block;
e6fd097e 883 }
023b57e6 884
786de7eb
KH
885 start_sequence ();
886 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
01d939e8 887 if (cfun->x_whole_function_mode_p)
a97901e6 888 NOTE_BLOCK (start) = block;
6496a589
KG
889 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
890 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
01d939e8 891 if (cfun->x_whole_function_mode_p)
a97901e6 892 NOTE_BLOCK (end) = block;
786de7eb
KH
893 fixup->context = block;
894 end_sequence ();
2f937369 895 emit_insn_after (start, original_before_jump);
023b57e6
RS
896 }
897
3f1d071b 898 fixup->block_start_count = current_block_start_count;
28d81abb
RK
899 fixup->stack_level = 0;
900 fixup->cleanup_list_list
e976b8b2 901 = ((block->data.block.outer_cleanups
28d81abb 902 || block->data.block.cleanups)
37366632 903 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
904 block->data.block.outer_cleanups)
905 : 0);
906 fixup->next = goto_fixup_chain;
907 goto_fixup_chain = fixup;
908 }
909
910 return block != 0;
911}
cfc3d13f
RK
912\f
913/* Expand any needed fixups in the outputmost binding level of the
914 function. FIRST_INSN is the first insn in the function. */
ca695ac9 915
cfc3d13f
RK
916void
917expand_fixups (first_insn)
918 rtx first_insn;
919{
9714cf43 920 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
cfc3d13f 921}
ca695ac9 922
28d81abb
RK
923/* When exiting a binding contour, process all pending gotos requiring fixups.
924 THISBLOCK is the structure that describes the block being exited.
925 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
926 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
927 FIRST_INSN is the insn that began this contour.
928
929 Gotos that jump out of this contour must restore the
930 stack level and do the cleanups before actually jumping.
931
932 DONT_JUMP_IN nonzero means report error there is a jump into this
933 contour from before the beginning of the contour.
934 This is also done if STACK_LEVEL is nonzero. */
935
704f4dca 936static void
28d81abb
RK
937fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
938 struct nesting *thisblock;
939 rtx stack_level;
940 tree cleanup_list;
941 rtx first_insn;
942 int dont_jump_in;
943{
b3694847 944 struct goto_fixup *f, *prev;
28d81abb
RK
945
946 /* F is the fixup we are considering; PREV is the previous one. */
947 /* We run this loop in two passes so that cleanups of exited blocks
948 are run first, and blocks that are exited are marked so
949 afterwards. */
950
951 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
952 {
953 /* Test for a fixup that is inactive because it is already handled. */
954 if (f->before_jump == 0)
955 {
956 /* Delete inactive fixup from the chain, if that is easy to do. */
957 if (prev != 0)
958 prev->next = f->next;
959 }
960 /* Has this fixup's target label been defined?
961 If so, we can finalize it. */
962 else if (PREV_INSN (f->target_rtl) != 0)
963 {
b3694847 964 rtx cleanup_insns;
7629c936 965
28d81abb 966 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
967 of this contour, report an error. This code used to use
968 the first non-label insn after f->target_rtl, but that's
969 wrong since such can be added, by things like put_var_into_stack
970 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
971 /* ??? Bug: this does not detect jumping in through intermediate
972 blocks that have stack levels or cleanups.
973 It detects only a problem with the innermost block
974 around the label. */
975 if (f->target != 0
976 && (dont_jump_in || stack_level || cleanup_list)
14a774a9 977 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 978 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 979 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
980 {
981 error_with_decl (f->target,
982 "label `%s' used before containing binding contour");
983 /* Prevent multiple errors for one label. */
33bc3ff5 984 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
985 }
986
7629c936
RS
987 /* We will expand the cleanups into a sequence of their own and
988 then later on we will attach this new sequence to the insn
989 stream just ahead of the actual jump insn. */
990
991 start_sequence ();
992
023b57e6
RS
993 /* Temporarily restore the lexical context where we will
994 logically be inserting the fixup code. We do this for the
995 sake of getting the debugging information right. */
996
43577e6b
NB
997 (*lang_hooks.decls.pushlevel) (0);
998 (*lang_hooks.decls.set_block) (f->context);
7629c936
RS
999
1000 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1001 if (f->cleanup_list_list)
1002 {
1003 tree lists;
1004 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1005 /* Marked elements correspond to blocks that have been closed.
1006 Do their cleanups. */
1007 if (TREE_ADDRESSABLE (lists)
1008 && TREE_VALUE (lists) != 0)
7629c936 1009 {
50d1b7a1 1010 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1011 /* Pop any pushes done in the cleanups,
1012 in case function is about to return. */
1013 do_pending_stack_adjust ();
1014 }
28d81abb
RK
1015 }
1016
1017 /* Restore stack level for the biggest contour that this
1018 jump jumps out of. */
7393c642
RK
1019 if (f->stack_level
1020 && ! (f->target_rtl == return_label
c4a6c0f3
RK
1021 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1022 == FUNCTION_TYPE)
4381f7c2 1023 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 1024 (TREE_TYPE (current_function_decl))))))
59257ff7 1025 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1026
1027 /* Finish up the sequence containing the insns which implement the
1028 necessary cleanups, and then attach that whole sequence to the
1029 insn stream just ahead of the actual jump insn. Attaching it
1030 at that point insures that any cleanups which are in fact
1031 implicit C++ object destructions (which must be executed upon
1032 leaving the block) appear (to the debugger) to be taking place
1033 in an area of the generated code where the object(s) being
1034 destructed are still "in scope". */
1035
1036 cleanup_insns = get_insns ();
43577e6b 1037 (*lang_hooks.decls.poplevel) (1, 0, 0);
7629c936
RS
1038
1039 end_sequence ();
2f937369 1040 emit_insn_after (cleanup_insns, f->before_jump);
7629c936 1041
28d81abb
RK
1042 f->before_jump = 0;
1043 }
1044 }
1045
6bc2f582
RK
1046 /* For any still-undefined labels, do the cleanups for this block now.
1047 We must do this now since items in the cleanup list may go out
0f41302f 1048 of scope when the block ends. */
28d81abb
RK
1049 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1050 if (f->before_jump != 0
1051 && PREV_INSN (f->target_rtl) == 0
1052 /* Label has still not appeared. If we are exiting a block with
1053 a stack level to restore, that started before the fixup,
1054 mark this stack level as needing restoration
6d2f8887 1055 when the fixup is later finalized. */
28d81abb 1056 && thisblock != 0
6bc2f582
RK
1057 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1058 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1059 && (thisblock->data.block.block_start_count
1060 <= f->block_start_count))
1061 {
1062 tree lists = f->cleanup_list_list;
6bc2f582
RK
1063 rtx cleanup_insns;
1064
28d81abb
RK
1065 for (; lists; lists = TREE_CHAIN (lists))
1066 /* If the following elt. corresponds to our containing block
1067 then the elt. must be for this block. */
1068 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1069 {
1070 start_sequence ();
43577e6b
NB
1071 (*lang_hooks.decls.pushlevel) (0);
1072 (*lang_hooks.decls.set_block) (f->context);
6bc2f582 1073 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1074 do_pending_stack_adjust ();
6bc2f582 1075 cleanup_insns = get_insns ();
43577e6b 1076 (*lang_hooks.decls.poplevel) (1, 0, 0);
6bc2f582 1077 end_sequence ();
412c00dc
RK
1078 if (cleanup_insns != 0)
1079 f->before_jump
2f937369 1080 = emit_insn_after (cleanup_insns, f->before_jump);
6bc2f582 1081
e07ed33f 1082 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1083 }
28d81abb
RK
1084
1085 if (stack_level)
1086 f->stack_level = stack_level;
1087 }
1088}
2a230e9d
BS
1089\f
1090/* Return the number of times character C occurs in string S. */
1091static int
1092n_occurrences (c, s)
1093 int c;
dff01034 1094 const char *s;
2a230e9d
BS
1095{
1096 int n = 0;
1097 while (*s)
1098 n += (*s++ == c);
1099 return n;
1100}
28d81abb
RK
1101\f
1102/* Generate RTL for an asm statement (explicit assembler code).
4c46ea23
EB
1103 STRING is a STRING_CST node containing the assembler code text,
1104 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1105 insn is volatile; don't optimize it. */
28d81abb
RK
1106
1107void
4c46ea23
EB
1108expand_asm (string, vol)
1109 tree string;
1110 int vol;
28d81abb 1111{
4c46ea23
EB
1112 rtx body;
1113
1114 if (TREE_CODE (string) == ADDR_EXPR)
1115 string = TREE_OPERAND (string, 0);
1116
1117 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1118
1119 MEM_VOLATILE_P (body) = vol;
28d81abb 1120
4c46ea23
EB
1121 emit_insn (body);
1122
e2500fed 1123 clear_last_expr ();
28d81abb
RK
1124}
1125
40b18c0a
MM
1126/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1127 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1128 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1129 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1130 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1131 constraint allows the use of a register operand. And, *IS_INOUT
1132 will be true if the operand is read-write, i.e., if it is used as
1133 an input as well as an output. If *CONSTRAINT_P is not in
1134 canonical form, it will be made canonical. (Note that `+' will be
14b493d6 1135 replaced with `=' as part of this process.)
40b18c0a
MM
1136
1137 Returns TRUE if all went well; FALSE if an error occurred. */
1138
1139bool
6be2e1f8
RH
1140parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1141 allows_mem, allows_reg, is_inout)
40b18c0a
MM
1142 const char **constraint_p;
1143 int operand_num;
1144 int ninputs;
1145 int noutputs;
1146 bool *allows_mem;
1147 bool *allows_reg;
1148 bool *is_inout;
1149{
1150 const char *constraint = *constraint_p;
1151 const char *p;
1152
1153 /* Assume the constraint doesn't allow the use of either a register
1154 or memory. */
1155 *allows_mem = false;
1156 *allows_reg = false;
1157
1158 /* Allow the `=' or `+' to not be at the beginning of the string,
1159 since it wasn't explicitly documented that way, and there is a
1160 large body of code that puts it last. Swap the character to
1161 the front, so as not to uglify any place else. */
1162 p = strchr (constraint, '=');
1163 if (!p)
1164 p = strchr (constraint, '+');
1165
1166 /* If the string doesn't contain an `=', issue an error
1167 message. */
1168 if (!p)
1169 {
1170 error ("output operand constraint lacks `='");
1171 return false;
1172 }
1173
1174 /* If the constraint begins with `+', then the operand is both read
1175 from and written to. */
1176 *is_inout = (*p == '+');
1177
40b18c0a
MM
1178 /* Canonicalize the output constraint so that it begins with `='. */
1179 if (p != constraint || is_inout)
1180 {
1181 char *buf;
1182 size_t c_len = strlen (constraint);
1183
1184 if (p != constraint)
1185 warning ("output constraint `%c' for operand %d is not at the beginning",
1186 *p, operand_num);
1187
1188 /* Make a copy of the constraint. */
1189 buf = alloca (c_len + 1);
1190 strcpy (buf, constraint);
1191 /* Swap the first character and the `=' or `+'. */
1192 buf[p - constraint] = buf[0];
1193 /* Make sure the first character is an `='. (Until we do this,
1194 it might be a `+'.) */
1195 buf[0] = '=';
1196 /* Replace the constraint with the canonicalized string. */
1197 *constraint_p = ggc_alloc_string (buf, c_len);
1198 constraint = *constraint_p;
1199 }
1200
1201 /* Loop through the constraint string. */
97488870 1202 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
40b18c0a
MM
1203 switch (*p)
1204 {
1205 case '+':
1206 case '=':
357351e5 1207 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 1208 return false;
786de7eb 1209
40b18c0a
MM
1210 case '%':
1211 if (operand_num + 1 == ninputs + noutputs)
1212 {
1213 error ("`%%' constraint used with last operand");
1214 return false;
1215 }
1216 break;
1217
1218 case 'V': case 'm': case 'o':
1219 *allows_mem = true;
1220 break;
1221
1222 case '?': case '!': case '*': case '&': case '#':
1223 case 'E': case 'F': case 'G': case 'H':
1224 case 's': case 'i': case 'n':
1225 case 'I': case 'J': case 'K': case 'L': case 'M':
1226 case 'N': case 'O': case 'P': case ',':
1227 break;
1228
1229 case '0': case '1': case '2': case '3': case '4':
1230 case '5': case '6': case '7': case '8': case '9':
84b72302 1231 case '[':
40b18c0a
MM
1232 error ("matching constraint not valid in output operand");
1233 return false;
1234
1235 case '<': case '>':
1236 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1237 excepting those that expand_call created. So match memory
1238 and hope. */
1239 *allows_mem = true;
1240 break;
1241
1242 case 'g': case 'X':
1243 *allows_reg = true;
1244 *allows_mem = true;
1245 break;
786de7eb 1246
40b18c0a
MM
1247 case 'p': case 'r':
1248 *allows_reg = true;
1249 break;
1250
1251 default:
1252 if (!ISALPHA (*p))
1253 break;
97488870 1254 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
40b18c0a 1255 *allows_reg = true;
97488870
R
1256#ifdef EXTRA_CONSTRAINT_STR
1257 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
ccfc6cc8 1258 *allows_reg = true;
97488870 1259 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
ccfc6cc8 1260 *allows_mem = true;
40b18c0a
MM
1261 else
1262 {
1263 /* Otherwise we can't assume anything about the nature of
1264 the constraint except that it isn't purely registers.
1265 Treat it like "g" and hope for the best. */
1266 *allows_reg = true;
1267 *allows_mem = true;
1268 }
1269#endif
1270 break;
1271 }
1272
1273 return true;
1274}
1275
6be2e1f8
RH
1276/* Similar, but for input constraints. */
1277
1278static bool
1279parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1280 constraints, allows_mem, allows_reg)
1281 const char **constraint_p;
1282 int input_num;
1283 int ninputs;
1284 int noutputs;
1285 int ninout;
1286 const char * const * constraints;
1287 bool *allows_mem;
1288 bool *allows_reg;
1289{
1290 const char *constraint = *constraint_p;
1291 const char *orig_constraint = constraint;
1292 size_t c_len = strlen (constraint);
1293 size_t j;
1294
1295 /* Assume the constraint doesn't allow the use of either
1296 a register or memory. */
1297 *allows_mem = false;
1298 *allows_reg = false;
1299
1300 /* Make sure constraint has neither `=', `+', nor '&'. */
1301
97488870 1302 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
6be2e1f8
RH
1303 switch (constraint[j])
1304 {
1305 case '+': case '=': case '&':
1306 if (constraint == orig_constraint)
1307 {
1308 error ("input operand constraint contains `%c'", constraint[j]);
1309 return false;
1310 }
1311 break;
1312
1313 case '%':
1314 if (constraint == orig_constraint
1315 && input_num + 1 == ninputs - ninout)
1316 {
1317 error ("`%%' constraint used with last operand");
1318 return false;
1319 }
1320 break;
1321
1322 case 'V': case 'm': case 'o':
1323 *allows_mem = true;
1324 break;
1325
1326 case '<': case '>':
1327 case '?': case '!': case '*': case '#':
1328 case 'E': case 'F': case 'G': case 'H':
1329 case 's': case 'i': case 'n':
1330 case 'I': case 'J': case 'K': case 'L': case 'M':
1331 case 'N': case 'O': case 'P': case ',':
1332 break;
1333
1334 /* Whether or not a numeric constraint allows a register is
1335 decided by the matching constraint, and so there is no need
1336 to do anything special with them. We must handle them in
1337 the default case, so that we don't unnecessarily force
1338 operands to memory. */
1339 case '0': case '1': case '2': case '3': case '4':
1340 case '5': case '6': case '7': case '8': case '9':
1341 {
1342 char *end;
1343 unsigned long match;
1344
1345 match = strtoul (constraint + j, &end, 10);
1346 if (match >= (unsigned long) noutputs)
1347 {
1348 error ("matching constraint references invalid operand number");
1349 return false;
1350 }
1351
1352 /* Try and find the real constraint for this dup. Only do this
1353 if the matching constraint is the only alternative. */
1354 if (*end == '\0'
1355 && (j == 0 || (j == 1 && constraint[0] == '%')))
1356 {
1357 constraint = constraints[match];
1358 *constraint_p = constraint;
1359 c_len = strlen (constraint);
1360 j = 0;
97488870
R
1361 /* ??? At the end of the loop, we will skip the first part of
1362 the matched constraint. This assumes not only that the
1363 other constraint is an output constraint, but also that
1364 the '=' or '+' come first. */
6be2e1f8
RH
1365 break;
1366 }
1367 else
1368 j = end - constraint;
97488870
R
1369 /* Anticipate increment at end of loop. */
1370 j--;
6be2e1f8
RH
1371 }
1372 /* Fall through. */
1373
1374 case 'p': case 'r':
1375 *allows_reg = true;
1376 break;
1377
1378 case 'g': case 'X':
1379 *allows_reg = true;
1380 *allows_mem = true;
1381 break;
1382
1383 default:
1384 if (! ISALPHA (constraint[j]))
1385 {
1386 error ("invalid punctuation `%c' in constraint", constraint[j]);
1387 return false;
1388 }
97488870
R
1389 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1390 != NO_REGS)
6be2e1f8 1391 *allows_reg = true;
97488870
R
1392#ifdef EXTRA_CONSTRAINT_STR
1393 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1394 *allows_reg = true;
97488870 1395 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1396 *allows_mem = true;
6be2e1f8
RH
1397 else
1398 {
1399 /* Otherwise we can't assume anything about the nature of
1400 the constraint except that it isn't purely registers.
1401 Treat it like "g" and hope for the best. */
1402 *allows_reg = true;
1403 *allows_mem = true;
1404 }
1405#endif
1406 break;
1407 }
1408
1409 return true;
1410}
1411
acb5d088
HPN
1412/* Check for overlap between registers marked in CLOBBERED_REGS and
1413 anything inappropriate in DECL. Emit error and return TRUE for error,
1414 FALSE for ok. */
1415
1416static bool
1417decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1418 tree decl;
1419 const HARD_REG_SET clobbered_regs;
1420{
1421 /* Conflicts between asm-declared register variables and the clobber
1422 list are not allowed. */
1423 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1424 && DECL_REGISTER (decl)
34146b94 1425 && REG_P (DECL_RTL (decl))
acb5d088
HPN
1426 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1427 {
1428 rtx reg = DECL_RTL (decl);
1429 unsigned int regno;
1430
1431 for (regno = REGNO (reg);
1432 regno < (REGNO (reg)
1433 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1434 regno++)
1435 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1436 {
1437 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1438 IDENTIFIER_POINTER (DECL_NAME (decl)));
1439
1440 /* Reset registerness to stop multiple errors emitted for a
1441 single variable. */
1442 DECL_REGISTER (decl) = 0;
1443 return true;
1444 }
1445 }
1446 return false;
1447}
1448
28d81abb
RK
1449/* Generate RTL for an asm statement with arguments.
1450 STRING is the instruction template.
1451 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1452 Each output or input has an expression in the TREE_VALUE and
2ec37136 1453 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 1454 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 1455 in TREE_PURPOSE.
28d81abb
RK
1456 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1457 that is clobbered by this insn.
1458
1459 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1460 Some elements of OUTPUTS may be replaced with trees representing temporary
1461 values. The caller should copy those temporary values to the originally
1462 specified lvalues.
1463
1464 VOL nonzero means the insn is volatile; don't optimize it. */
1465
1466void
1467expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1468 tree string, outputs, inputs, clobbers;
1469 int vol;
3b304f5b 1470 const char *filename;
28d81abb
RK
1471 int line;
1472{
84b72302 1473 rtvec argvec, constraintvec;
28d81abb
RK
1474 rtx body;
1475 int ninputs = list_length (inputs);
1476 int noutputs = list_length (outputs);
6be2e1f8 1477 int ninout;
b4ccaa16 1478 int nclobbers;
acb5d088
HPN
1479 HARD_REG_SET clobbered_regs;
1480 int clobber_conflict_found = 0;
28d81abb 1481 tree tail;
b3694847 1482 int i;
28d81abb
RK
1483 /* Vector of RTX's of evaluated output operands. */
1484 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1485 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1486 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1487 enum machine_mode *inout_mode
1488 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
84b72302
RH
1489 const char **constraints
1490 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1b3d8f8a 1491 int old_generating_concat_p = generating_concat_p;
28d81abb 1492
e5e809f4 1493 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1494 if (noutputs == 0)
1495 vol = 1;
1496
84b72302
RH
1497 if (! check_operand_nalternatives (outputs, inputs))
1498 return;
1499
1500 if (! check_unique_operand_names (outputs, inputs))
1501 return;
1502
1503 string = resolve_operand_names (string, outputs, inputs, constraints);
1504
57bcb97a
RH
1505#ifdef MD_ASM_CLOBBERS
1506 /* Sometimes we wish to automatically clobber registers across an asm.
1507 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 1508 maintaining source-level compatibility means automatically clobbering
57bcb97a
RH
1509 the flags register. */
1510 MD_ASM_CLOBBERS (clobbers);
1511#endif
1512
b4ccaa16
RS
1513 /* Count the number of meaningful clobbered registers, ignoring what
1514 we would ignore later. */
1515 nclobbers = 0;
acb5d088 1516 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
1517 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1518 {
47ee9bcb 1519 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1520
c09e6498
RS
1521 i = decode_reg_name (regname);
1522 if (i >= 0 || i == -4)
b4ccaa16 1523 ++nclobbers;
7859e3ac
DE
1524 else if (i == -2)
1525 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
1526
1527 /* Mark clobbered registers. */
1528 if (i >= 0)
e54b4cae
EB
1529 {
1530 /* Clobbering the PIC register is an error */
fc555370 1531 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
e54b4cae
EB
1532 {
1533 error ("PIC register `%s' clobbered in `asm'", regname);
1534 return;
1535 }
1536
1537 SET_HARD_REG_BIT (clobbered_regs, i);
1538 }
b4ccaa16
RS
1539 }
1540
e2500fed 1541 clear_last_expr ();
28d81abb 1542
6be2e1f8
RH
1543 /* First pass over inputs and outputs checks validity and sets
1544 mark_addressable if needed. */
1545
1546 ninout = 0;
28d81abb
RK
1547 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1548 {
1549 tree val = TREE_VALUE (tail);
b50a024d 1550 tree type = TREE_TYPE (val);
6be2e1f8 1551 const char *constraint;
40b18c0a
MM
1552 bool is_inout;
1553 bool allows_reg;
1554 bool allows_mem;
28d81abb
RK
1555
1556 /* If there's an erroneous arg, emit no insn. */
40b18c0a 1557 if (type == error_mark_node)
28d81abb
RK
1558 return;
1559
40b18c0a
MM
1560 /* Try to parse the output constraint. If that fails, there's
1561 no point in going further. */
6be2e1f8
RH
1562 constraint = constraints[i];
1563 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1564 &allows_mem, &allows_reg, &is_inout))
1565 return;
1566
1567 if (! allows_reg
1568 && (allows_mem
1569 || is_inout
1570 || (DECL_P (val)
1571 && GET_CODE (DECL_RTL (val)) == REG
1572 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
dffd7eb6 1573 (*lang_hooks.mark_addressable) (val);
6be2e1f8
RH
1574
1575 if (is_inout)
1576 ninout++;
1577 }
1578
1579 ninputs += ninout;
1580 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1581 {
1582 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1583 return;
1584 }
1585
1586 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1587 {
1588 bool allows_reg, allows_mem;
1589 const char *constraint;
1590
1591 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1592 would get VOIDmode and that could cause a crash in reload. */
1593 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1594 return;
1595
1596 constraint = constraints[i + noutputs];
1597 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1598 constraints, &allows_mem, &allows_reg))
40b18c0a 1599 return;
d09a75ae 1600
6be2e1f8 1601 if (! allows_reg && allows_mem)
dffd7eb6 1602 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
6be2e1f8
RH
1603 }
1604
1605 /* Second pass evaluates arguments. */
1606
1607 ninout = 0;
1608 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1609 {
1610 tree val = TREE_VALUE (tail);
1611 tree type = TREE_TYPE (val);
1612 bool is_inout;
1613 bool allows_reg;
1614 bool allows_mem;
5b50aa9d 1615 rtx op;
6be2e1f8
RH
1616
1617 if (!parse_output_constraint (&constraints[i], i, ninputs,
1618 noutputs, &allows_mem, &allows_reg,
1619 &is_inout))
1620 abort ();
1621
d09a75ae
RK
1622 /* If an output operand is not a decl or indirect ref and our constraint
1623 allows a register, make a temporary to act as an intermediate.
1624 Make the asm insn write into that, then our caller will copy it to
1625 the real output operand. Likewise for promoted variables. */
28d81abb 1626
1b3d8f8a
GK
1627 generating_concat_p = 0;
1628
947255ed 1629 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1630 if ((TREE_CODE (val) == INDIRECT_REF
1631 && allows_mem)
2f939d94 1632 || (DECL_P (val)
1afbe1c4 1633 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1634 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1635 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1636 || ! allows_reg
2a230e9d 1637 || is_inout)
d09a75ae 1638 {
5b50aa9d
RH
1639 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1640 if (GET_CODE (op) == MEM)
1641 op = validize_mem (op);
d09a75ae 1642
5b50aa9d 1643 if (! allows_reg && GET_CODE (op) != MEM)
d09a75ae 1644 error ("output number %d not directly addressable", i);
5b50aa9d
RH
1645 if ((! allows_mem && GET_CODE (op) == MEM)
1646 || GET_CODE (op) == CONCAT)
947255ed 1647 {
5b50aa9d
RH
1648 real_output_rtx[i] = protect_from_queue (op, 1);
1649 op = gen_reg_rtx (GET_MODE (op));
11579f33 1650 if (is_inout)
5b50aa9d 1651 emit_move_insn (op, real_output_rtx[i]);
947255ed 1652 }
d09a75ae 1653 }
b50a024d 1654 else
e619bb8d 1655 {
5b50aa9d
RH
1656 op = assign_temp (type, 0, 0, 1);
1657 op = validize_mem (op);
1658 TREE_VALUE (tail) = make_tree (type, op);
b50a024d 1659 }
5b50aa9d 1660 output_rtx[i] = op;
235c5021 1661
1b3d8f8a
GK
1662 generating_concat_p = old_generating_concat_p;
1663
2a230e9d 1664 if (is_inout)
235c5021 1665 {
6be2e1f8 1666 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1667 inout_opnum[ninout++] = i;
1668 }
acb5d088
HPN
1669
1670 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1671 clobber_conflict_found = 1;
28d81abb
RK
1672 }
1673
84b72302
RH
1674 /* Make vectors for the expression-rtx, constraint strings,
1675 and named operands. */
28d81abb
RK
1676
1677 argvec = rtvec_alloc (ninputs);
84b72302 1678 constraintvec = rtvec_alloc (ninputs);
28d81abb 1679
6462bb43
AO
1680 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1681 : GET_MODE (output_rtx[0])),
786de7eb 1682 TREE_STRING_POINTER (string),
84b72302 1683 empty_string, 0, argvec, constraintvec,
21a427cc 1684 filename, line);
c85f7c16 1685
78418280 1686 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1687
1688 /* Eval the inputs and put them into ARGVEC.
1689 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1690
84b72302 1691 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1692 {
6be2e1f8
RH
1693 bool allows_reg, allows_mem;
1694 const char *constraint;
1695 tree val, type;
1f06ee8d 1696 rtx op;
28d81abb 1697
6be2e1f8
RH
1698 constraint = constraints[i + noutputs];
1699 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1700 constraints, &allows_mem, &allows_reg))
1701 abort ();
2a230e9d 1702
6be2e1f8 1703 generating_concat_p = 0;
65fed0cb 1704
6be2e1f8
RH
1705 val = TREE_VALUE (tail);
1706 type = TREE_TYPE (val);
1707 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
65fed0cb 1708
1b3d8f8a 1709 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1710 if (GET_CODE (op) == CONCAT)
1711 op = force_reg (GET_MODE (op), op);
5b50aa9d
RH
1712 else if (GET_CODE (op) == MEM)
1713 op = validize_mem (op);
1b3d8f8a 1714
1afbe1c4 1715 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1716 {
11579f33 1717 if (allows_reg)
6be2e1f8 1718 op = force_reg (TYPE_MODE (type), op);
11579f33 1719 else if (!allows_mem)
84b72302
RH
1720 warning ("asm operand %d probably doesn't match constraints",
1721 i + noutputs);
1f06ee8d 1722 else if (CONSTANT_P (op))
5b50aa9d
RH
1723 {
1724 op = force_const_mem (TYPE_MODE (type), op);
1725 op = validize_mem (op);
1726 }
1f06ee8d
RH
1727 else if (GET_CODE (op) == REG
1728 || GET_CODE (op) == SUBREG
d96ecaf4 1729 || GET_CODE (op) == ADDRESSOF
1f06ee8d
RH
1730 || GET_CODE (op) == CONCAT)
1731 {
1da68f56
RK
1732 tree qual_type = build_qualified_type (type,
1733 (TYPE_QUALS (type)
1734 | TYPE_QUAL_CONST));
1735 rtx memloc = assign_temp (qual_type, 1, 1, 1);
5b50aa9d 1736 memloc = validize_mem (memloc);
1f06ee8d
RH
1737 emit_move_insn (memloc, op);
1738 op = memloc;
1739 }
14a774a9 1740
1f06ee8d 1741 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
6be2e1f8
RH
1742 {
1743 /* We won't recognize volatile memory as available a
1744 memory_operand at this point. Ignore it. */
1745 }
1f06ee8d
RH
1746 else if (queued_subexp_p (op))
1747 ;
1748 else
1749 /* ??? Leave this only until we have experience with what
1750 happens in combine and elsewhere when constraints are
1751 not satisfied. */
84b72302
RH
1752 warning ("asm operand %d probably doesn't match constraints",
1753 i + noutputs);
65fed0cb 1754 }
6be2e1f8 1755
1b3d8f8a 1756 generating_concat_p = old_generating_concat_p;
6462bb43 1757 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1758
6462bb43 1759 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
6be2e1f8 1760 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1761
1762 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1763 clobber_conflict_found = 1;
28d81abb
RK
1764 }
1765
14a774a9
RK
1766 /* Protect all the operands from the queue now that they have all been
1767 evaluated. */
28d81abb 1768
1b3d8f8a
GK
1769 generating_concat_p = 0;
1770
235c5021 1771 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1772 ASM_OPERANDS_INPUT (body, i)
1773 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1774
1775 for (i = 0; i < noutputs; i++)
1776 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1777
4381f7c2 1778 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1779 for (i = 0; i < ninout; i++)
1780 {
235c5021 1781 int j = inout_opnum[i];
84b72302 1782 char buffer[16];
235c5021 1783
6462bb43 1784 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1785 = output_rtx[j];
84b72302
RH
1786
1787 sprintf (buffer, "%d", j);
6462bb43 1788 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
84b72302 1789 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
235c5021
RK
1790 }
1791
1b3d8f8a
GK
1792 generating_concat_p = old_generating_concat_p;
1793
28d81abb 1794 /* Now, for each output, construct an rtx
84b72302
RH
1795 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1796 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1797 If there is more than one, put them inside a PARALLEL. */
1798
1799 if (noutputs == 1 && nclobbers == 0)
1800 {
84b72302 1801 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
4977bab6 1802 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1803 }
14a774a9 1804
28d81abb
RK
1805 else if (noutputs == 0 && nclobbers == 0)
1806 {
1807 /* No output operands: put in a raw ASM_OPERANDS rtx. */
4977bab6 1808 emit_insn (body);
28d81abb 1809 }
14a774a9 1810
28d81abb
RK
1811 else
1812 {
1813 rtx obody = body;
1814 int num = noutputs;
14a774a9
RK
1815
1816 if (num == 0)
1817 num = 1;
1818
38a448ca 1819 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1820
1821 /* For each output operand, store a SET. */
28d81abb
RK
1822 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1823 {
1824 XVECEXP (body, 0, i)
38a448ca
RH
1825 = gen_rtx_SET (VOIDmode,
1826 output_rtx[i],
c5c76735 1827 gen_rtx_ASM_OPERANDS
6462bb43 1828 (GET_MODE (output_rtx[i]),
c5c76735 1829 TREE_STRING_POINTER (string),
84b72302 1830 constraints[i], i, argvec, constraintvec,
c5c76735
JL
1831 filename, line));
1832
28d81abb
RK
1833 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1834 }
1835
1836 /* If there are no outputs (but there are some clobbers)
1837 store the bare ASM_OPERANDS into the PARALLEL. */
1838
1839 if (i == 0)
1840 XVECEXP (body, 0, i++) = obody;
1841
1842 /* Store (clobber REG) for each clobbered register specified. */
1843
b4ccaa16 1844 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1845 {
47ee9bcb 1846 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1847 int j = decode_reg_name (regname);
acb5d088 1848 rtx clobbered_reg;
28d81abb 1849
b4ac57ab 1850 if (j < 0)
28d81abb 1851 {
c09e6498 1852 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1853 continue;
1854
c09e6498
RS
1855 if (j == -4) /* `memory', don't cache memory across asm */
1856 {
bffc6177 1857 XVECEXP (body, 0, i++)
38a448ca 1858 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1859 gen_rtx_MEM
1860 (BLKmode,
1861 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1862 continue;
1863 }
1864
956d6950 1865 /* Ignore unknown register, error already signaled. */
cc1f5387 1866 continue;
28d81abb
RK
1867 }
1868
1869 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1870 clobbered_reg = gen_rtx_REG (QImode, j);
1871
1872 /* Do sanity check for overlap between clobbers and respectively
1873 input and outputs that hasn't been handled. Such overlap
1874 should have been detected and reported above. */
1875 if (!clobber_conflict_found)
1876 {
1877 int opno;
1878
1879 /* We test the old body (obody) contents to avoid tripping
1880 over the under-construction body. */
1881 for (opno = 0; opno < noutputs; opno++)
1882 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1883 internal_error ("asm clobber conflict with output operand");
1884
1885 for (opno = 0; opno < ninputs - ninout; opno++)
1886 if (reg_overlap_mentioned_p (clobbered_reg,
1887 ASM_OPERANDS_INPUT (obody, opno)))
1888 internal_error ("asm clobber conflict with input operand");
1889 }
1890
b4ccaa16 1891 XVECEXP (body, 0, i++)
acb5d088 1892 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1893 }
1894
4977bab6 1895 emit_insn (body);
28d81abb
RK
1896 }
1897
947255ed
RH
1898 /* For any outputs that needed reloading into registers, spill them
1899 back to where they belong. */
1900 for (i = 0; i < noutputs; ++i)
1901 if (real_output_rtx[i])
1902 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1903
28d81abb
RK
1904 free_temp_slots ();
1905}
84b72302
RH
1906
1907/* A subroutine of expand_asm_operands. Check that all operands have
1908 the same number of alternatives. Return true if so. */
1909
1910static bool
1911check_operand_nalternatives (outputs, inputs)
1912 tree outputs, inputs;
1913{
1914 if (outputs || inputs)
1915 {
1916 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1917 int nalternatives
1918 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1919 tree next = inputs;
1920
1921 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1922 {
1923 error ("too many alternatives in `asm'");
1924 return false;
1925 }
1926
1927 tmp = outputs;
1928 while (tmp)
1929 {
1930 const char *constraint
1931 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1932
1933 if (n_occurrences (',', constraint) != nalternatives)
1934 {
1935 error ("operand constraints for `asm' differ in number of alternatives");
1936 return false;
1937 }
1938
1939 if (TREE_CHAIN (tmp))
1940 tmp = TREE_CHAIN (tmp);
1941 else
1942 tmp = next, next = 0;
1943 }
1944 }
1945
1946 return true;
1947}
1948
1949/* A subroutine of expand_asm_operands. Check that all operand names
1950 are unique. Return true if so. We rely on the fact that these names
1951 are identifiers, and so have been canonicalized by get_identifier,
1952 so all we need are pointer comparisons. */
1953
1954static bool
1955check_unique_operand_names (outputs, inputs)
1956 tree outputs, inputs;
1957{
1958 tree i, j;
1959
1960 for (i = outputs; i ; i = TREE_CHAIN (i))
1961 {
1962 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1963 if (! i_name)
1964 continue;
1965
1966 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1967 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1968 goto failure;
1969 }
1970
1971 for (i = inputs; i ; i = TREE_CHAIN (i))
1972 {
1973 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1974 if (! i_name)
1975 continue;
1976
1977 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1978 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1979 goto failure;
1980 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1981 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1982 goto failure;
1983 }
1984
1985 return true;
1986
1987 failure:
1988 error ("duplicate asm operand name '%s'",
fc552851 1989 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1990 return false;
1991}
1992
1993/* A subroutine of expand_asm_operands. Resolve the names of the operands
1994 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1995 STRING and in the constraints to those numbers. */
1996
1997static tree
1998resolve_operand_names (string, outputs, inputs, pconstraints)
1999 tree string;
2000 tree outputs, inputs;
2001 const char **pconstraints;
2002{
2003 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2004 char *p;
2005 tree t;
2006
2007 /* Assume that we will not need extra space to perform the substitution.
2008 This because we get to remove '[' and ']', which means we cannot have
2009 a problem until we have more than 999 operands. */
2010
2011 p = buffer;
2012 while ((p = strchr (p, '%')) != NULL)
2013 {
7abcb63a
RH
2014 if (p[1] == '[')
2015 p += 1;
2016 else if (ISALPHA (p[1]) && p[2] == '[')
2017 p += 2;
2018 else
2019 {
2020 p += 1;
2021 continue;
2022 }
2023
84b72302
RH
2024 p = resolve_operand_name_1 (p, outputs, inputs);
2025 }
2026
2027 string = build_string (strlen (buffer), buffer);
2028 free (buffer);
2029
2030 /* Collect output constraints here because it's convenient.
2031 There should be no named operands here; this is verified
2032 in expand_asm_operand. */
2033 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2034 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2035
2036 /* Substitute [<name>] in input constraint strings. */
2037 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2038 {
2039 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2040 if (strchr (c, '[') == NULL)
2041 *pconstraints = c;
2042 else
2043 {
2044 p = buffer = xstrdup (c);
2045 while ((p = strchr (p, '[')) != NULL)
2046 p = resolve_operand_name_1 (p, outputs, inputs);
2047
2048 *pconstraints = ggc_alloc_string (buffer, -1);
2049 free (buffer);
2050 }
2051 }
2052
2053 return string;
2054}
2055
2056/* A subroutine of resolve_operand_names. P points to the '[' for a
2057 potential named operand of the form [<name>]. In place, replace
786de7eb 2058 the name and brackets with a number. Return a pointer to the
84b72302
RH
2059 balance of the string after substitution. */
2060
2061static char *
2062resolve_operand_name_1 (p, outputs, inputs)
2063 char *p;
2064 tree outputs, inputs;
2065{
2066 char *q;
2067 int op;
2068 tree t;
2069 size_t len;
2070
2071 /* Collect the operand name. */
2072 q = strchr (p, ']');
2073 if (!q)
2074 {
2075 error ("missing close brace for named operand");
2076 return strchr (p, '\0');
2077 }
2078 len = q - p - 1;
2079
2080 /* Resolve the name to a number. */
2081 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2082 {
fc552851
RS
2083 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2084 if (name)
edd1967d 2085 {
fc552851 2086 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2087 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2088 goto found;
2089 }
84b72302
RH
2090 }
2091 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2092 {
fc552851
RS
2093 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2094 if (name)
edd1967d 2095 {
fc552851 2096 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2097 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2098 goto found;
2099 }
84b72302
RH
2100 }
2101
2102 *q = '\0';
2103 error ("undefined named operand '%s'", p + 1);
2104 op = 0;
2105 found:
2106
2107 /* Replace the name with the number. Unfortunately, not all libraries
2108 get the return value of sprintf correct, so search for the end of the
2109 generated string by hand. */
2110 sprintf (p, "%d", op);
2111 p = strchr (p, '\0');
2112
2113 /* Verify the no extra buffer space assumption. */
2114 if (p > q)
2115 abort ();
2116
2117 /* Shift the rest of the buffer down to fill the gap. */
2118 memmove (p, q + 1, strlen (q + 1) + 1);
2119
2120 return p;
2121}
28d81abb
RK
2122\f
2123/* Generate RTL to evaluate the expression EXP
1574ef13
AO
2124 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2125 Provided just for backward-compatibility. expand_expr_stmt_value()
2126 should be used for new code. */
28d81abb
RK
2127
2128void
2129expand_expr_stmt (exp)
2130 tree exp;
2131{
b0832fe1 2132 expand_expr_stmt_value (exp, -1, 1);
1574ef13
AO
2133}
2134
2135/* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2136 whether to (1) save the value of the expression, (0) discard it or
2137 (-1) use expr_stmts_for_value to tell. The use of -1 is
2138 deprecated, and retained only for backward compatibility. */
2139
2140void
b0832fe1 2141expand_expr_stmt_value (exp, want_value, maybe_last)
1574ef13 2142 tree exp;
b0832fe1 2143 int want_value, maybe_last;
1574ef13
AO
2144{
2145 rtx value;
2146 tree type;
2147
2148 if (want_value == -1)
2149 want_value = expr_stmts_for_value != 0;
2150
65ca2d60 2151 /* If -Wextra, warn about statements with no side effects,
28d81abb 2152 except for an explicit cast to void (e.g. for assert()), and
b0832fe1
JJ
2153 except for last statement in ({...}) where they may be useful. */
2154 if (! want_value
2155 && (expr_stmts_for_value == 0 || ! maybe_last)
2156 && exp != error_mark_node)
28d81abb 2157 {
9790cefd
RH
2158 if (! TREE_SIDE_EFFECTS (exp))
2159 {
2160 if ((extra_warnings || warn_unused_value)
2161 && !(TREE_CODE (exp) == CONVERT_EXPR
2162 && VOID_TYPE_P (TREE_TYPE (exp))))
2163 warning_with_file_and_line (emit_filename, emit_lineno,
2164 "statement with no effect");
2165 }
078721e1 2166 else if (warn_unused_value)
28d81abb
RK
2167 warn_if_unused_value (exp);
2168 }
b6ec8c5f
RK
2169
2170 /* If EXP is of function type and we are expanding statements for
2171 value, convert it to pointer-to-function. */
1574ef13 2172 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
b6ec8c5f
RK
2173 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2174
8f17b5c5
MM
2175 /* The call to `expand_expr' could cause last_expr_type and
2176 last_expr_value to get reset. Therefore, we set last_expr_value
2177 and last_expr_type *after* calling expand_expr. */
1574ef13
AO
2178 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2179 VOIDmode, 0);
2180 type = TREE_TYPE (exp);
28d81abb
RK
2181
2182 /* If all we do is reference a volatile value in memory,
2183 copy it to a register to be sure it is actually touched. */
1574ef13 2184 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
28d81abb 2185 {
1574ef13 2186 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 2187 ;
1574ef13
AO
2188 else if (TYPE_MODE (type) != BLKmode)
2189 value = copy_to_reg (value);
28d81abb 2190 else
ddbe9812
RS
2191 {
2192 rtx lab = gen_label_rtx ();
4381f7c2 2193
ddbe9812 2194 /* Compare the value with itself to reference it. */
1574ef13
AO
2195 emit_cmp_and_jump_insns (value, value, EQ,
2196 expand_expr (TYPE_SIZE (type),
c5d5d461 2197 NULL_RTX, VOIDmode, 0),
d43e0b7d 2198 BLKmode, 0, lab);
ddbe9812
RS
2199 emit_label (lab);
2200 }
28d81abb
RK
2201 }
2202
2203 /* If this expression is part of a ({...}) and is in memory, we may have
2204 to preserve temporaries. */
1574ef13 2205 preserve_temp_slots (value);
28d81abb
RK
2206
2207 /* Free any temporaries used to evaluate this expression. Any temporary
2208 used as a result of this expression will already have been preserved
2209 above. */
2210 free_temp_slots ();
2211
1574ef13
AO
2212 if (want_value)
2213 {
2214 last_expr_value = value;
2215 last_expr_type = type;
2216 }
2217
28d81abb
RK
2218 emit_queue ();
2219}
2220
2221/* Warn if EXP contains any computations whose results are not used.
2222 Return 1 if a warning is printed; 0 otherwise. */
2223
150a992a 2224int
28d81abb
RK
2225warn_if_unused_value (exp)
2226 tree exp;
2227{
2228 if (TREE_USED (exp))
2229 return 0;
2230
9790cefd
RH
2231 /* Don't warn about void constructs. This includes casting to void,
2232 void function calls, and statement expressions with a final cast
2233 to void. */
2234 if (VOID_TYPE_P (TREE_TYPE (exp)))
2235 return 0;
2236
28d81abb
RK
2237 switch (TREE_CODE (exp))
2238 {
2239 case PREINCREMENT_EXPR:
2240 case POSTINCREMENT_EXPR:
2241 case PREDECREMENT_EXPR:
2242 case POSTDECREMENT_EXPR:
2243 case MODIFY_EXPR:
2244 case INIT_EXPR:
2245 case TARGET_EXPR:
2246 case CALL_EXPR:
2247 case METHOD_CALL_EXPR:
2248 case RTL_EXPR:
81797aba 2249 case TRY_CATCH_EXPR:
28d81abb
RK
2250 case WITH_CLEANUP_EXPR:
2251 case EXIT_EXPR:
28d81abb
RK
2252 return 0;
2253
2254 case BIND_EXPR:
2255 /* For a binding, warn if no side effect within it. */
2256 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2257
de73f171
RK
2258 case SAVE_EXPR:
2259 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2260
28d81abb
RK
2261 case TRUTH_ORIF_EXPR:
2262 case TRUTH_ANDIF_EXPR:
2263 /* In && or ||, warn if 2nd operand has no side effect. */
2264 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2265
2266 case COMPOUND_EXPR:
a646a211
JM
2267 if (TREE_NO_UNUSED_WARNING (exp))
2268 return 0;
28d81abb
RK
2269 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2270 return 1;
4d23e509
RS
2271 /* Let people do `(foo (), 0)' without a warning. */
2272 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2273 return 0;
28d81abb
RK
2274 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2275
2276 case NOP_EXPR:
2277 case CONVERT_EXPR:
b4ac57ab 2278 case NON_LVALUE_EXPR:
28d81abb
RK
2279 /* Don't warn about conversions not explicit in the user's program. */
2280 if (TREE_NO_UNUSED_WARNING (exp))
2281 return 0;
2282 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2283 Don't complain about that. There can be an arbitrary number of
2284 casts before the modify, so we must loop until we find the first
2285 non-cast expression and then test to see if that is a modify. */
2286 {
2287 tree tem = TREE_OPERAND (exp, 0);
2288
2289 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2290 tem = TREE_OPERAND (tem, 0);
2291
de73f171
RK
2292 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2293 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2294 return 0;
2295 }
7133e992 2296 goto maybe_warn;
28d81abb 2297
d1e1adfb
JM
2298 case INDIRECT_REF:
2299 /* Don't warn about automatic dereferencing of references, since
2300 the user cannot control it. */
2301 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2302 return warn_if_unused_value (TREE_OPERAND (exp, 0));
4381f7c2
KH
2303 /* Fall through. */
2304
28d81abb 2305 default:
ddbe9812 2306 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2307 if ((DECL_P (exp)
ddbe9812
RS
2308 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2309 && TREE_THIS_VOLATILE (exp))
2310 return 0;
8d5e6e25
RK
2311
2312 /* If this is an expression which has no operands, there is no value
2313 to be unused. There are no such language-independent codes,
2314 but front ends may define such. */
2315 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2316 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2317 return 0;
2318
7133e992
JJ
2319 maybe_warn:
2320 /* If this is an expression with side effects, don't warn. */
2321 if (TREE_SIDE_EFFECTS (exp))
2322 return 0;
2323
28d81abb
RK
2324 warning_with_file_and_line (emit_filename, emit_lineno,
2325 "value computed is not used");
2326 return 1;
2327 }
2328}
2329
2330/* Clear out the memory of the last expression evaluated. */
2331
2332void
2333clear_last_expr ()
2334{
e2500fed
GK
2335 last_expr_type = NULL_TREE;
2336 last_expr_value = NULL_RTX;
28d81abb
RK
2337}
2338
b2123dc0
MM
2339/* Begin a statement-expression, i.e., a series of statements which
2340 may return a value. Return the RTL_EXPR for this statement expr.
2341 The caller must save that value and pass it to
2342 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2343 in the statement-expression are deallocated at the end of the
2344 expression. */
28d81abb
RK
2345
2346tree
b2123dc0
MM
2347expand_start_stmt_expr (has_scope)
2348 int has_scope;
28d81abb 2349{
ca695ac9
JB
2350 tree t;
2351
28d81abb
RK
2352 /* Make the RTL_EXPR node temporary, not momentary,
2353 so that rtl_expr_chain doesn't become garbage. */
ca695ac9 2354 t = make_node (RTL_EXPR);
33c6ab80 2355 do_pending_stack_adjust ();
b2123dc0
MM
2356 if (has_scope)
2357 start_sequence_for_rtl_expr (t);
2358 else
2359 start_sequence ();
28d81abb
RK
2360 NO_DEFER_POP;
2361 expr_stmts_for_value++;
2362 return t;
2363}
2364
2365/* Restore the previous state at the end of a statement that returns a value.
2366 Returns a tree node representing the statement's value and the
2367 insns to compute the value.
2368
2369 The nodes of that expression have been freed by now, so we cannot use them.
2370 But we don't want to do that anyway; the expression has already been
2371 evaluated and now we just want to use the value. So generate a RTL_EXPR
2372 with the proper type and RTL value.
2373
2374 If the last substatement was not an expression,
2375 return something with type `void'. */
2376
2377tree
2378expand_end_stmt_expr (t)
2379 tree t;
2380{
2381 OK_DEFER_POP;
2382
1574ef13 2383 if (! last_expr_value || ! last_expr_type)
28d81abb 2384 {
28d81abb 2385 last_expr_value = const0_rtx;
1574ef13 2386 last_expr_type = void_type_node;
28d81abb 2387 }
28d81abb
RK
2388 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2389 /* Remove any possible QUEUED. */
2390 last_expr_value = protect_from_queue (last_expr_value, 0);
2391
2392 emit_queue ();
2393
2394 TREE_TYPE (t) = last_expr_type;
2395 RTL_EXPR_RTL (t) = last_expr_value;
2396 RTL_EXPR_SEQUENCE (t) = get_insns ();
2397
2398 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2399
2400 end_sequence ();
2401
2402 /* Don't consider deleting this expr or containing exprs at tree level. */
2403 TREE_SIDE_EFFECTS (t) = 1;
2404 /* Propagate volatility of the actual RTL expr. */
2405 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2406
e2500fed 2407 clear_last_expr ();
28d81abb
RK
2408 expr_stmts_for_value--;
2409
2410 return t;
2411}
2412\f
28d81abb
RK
2413/* Generate RTL for the start of an if-then. COND is the expression
2414 whose truth should be tested.
2415
2416 If EXITFLAG is nonzero, this conditional is visible to
2417 `exit_something'. */
2418
2419void
2420expand_start_cond (cond, exitflag)
2421 tree cond;
2422 int exitflag;
2423{
2424 struct nesting *thiscond = ALLOC_NESTING ();
2425
2426 /* Make an entry on cond_stack for the cond we are entering. */
2427
e2500fed 2428 thiscond->desc = COND_NESTING;
28d81abb
RK
2429 thiscond->next = cond_stack;
2430 thiscond->all = nesting_stack;
2431 thiscond->depth = ++nesting_depth;
2432 thiscond->data.cond.next_label = gen_label_rtx ();
2433 /* Before we encounter an `else', we don't need a separate exit label
2434 unless there are supposed to be exit statements
2435 to exit this conditional. */
2436 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2437 thiscond->data.cond.endif_label = thiscond->exit_label;
2438 cond_stack = thiscond;
2439 nesting_stack = thiscond;
2440
b93a436e 2441 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2442}
2443
2444/* Generate RTL between then-clause and the elseif-clause
2445 of an if-then-elseif-.... */
2446
2447void
2448expand_start_elseif (cond)
2449 tree cond;
2450{
2451 if (cond_stack->data.cond.endif_label == 0)
2452 cond_stack->data.cond.endif_label = gen_label_rtx ();
2453 emit_jump (cond_stack->data.cond.endif_label);
2454 emit_label (cond_stack->data.cond.next_label);
2455 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2456 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2457}
2458
2459/* Generate RTL between the then-clause and the else-clause
2460 of an if-then-else. */
2461
2462void
2463expand_start_else ()
2464{
2465 if (cond_stack->data.cond.endif_label == 0)
2466 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2467
28d81abb
RK
2468 emit_jump (cond_stack->data.cond.endif_label);
2469 emit_label (cond_stack->data.cond.next_label);
0f41302f 2470 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2471}
2472
d947ba59
RK
2473/* After calling expand_start_else, turn this "else" into an "else if"
2474 by providing another condition. */
2475
2476void
2477expand_elseif (cond)
2478 tree cond;
2479{
2480 cond_stack->data.cond.next_label = gen_label_rtx ();
2481 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2482}
2483
28d81abb
RK
2484/* Generate RTL for the end of an if-then.
2485 Pop the record for it off of cond_stack. */
2486
2487void
2488expand_end_cond ()
2489{
2490 struct nesting *thiscond = cond_stack;
2491
b93a436e
JL
2492 do_pending_stack_adjust ();
2493 if (thiscond->data.cond.next_label)
2494 emit_label (thiscond->data.cond.next_label);
2495 if (thiscond->data.cond.endif_label)
2496 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2497
2498 POPSTACK (cond_stack);
e2500fed 2499 clear_last_expr ();
28d81abb
RK
2500}
2501\f
2502/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2503 loop should be exited by `exit_something'. This is a loop for which
2504 `expand_continue' will jump to the top of the loop.
2505
2506 Make an entry on loop_stack to record the labels associated with
2507 this loop. */
2508
2509struct nesting *
2510expand_start_loop (exit_flag)
2511 int exit_flag;
2512{
b3694847 2513 struct nesting *thisloop = ALLOC_NESTING ();
28d81abb
RK
2514
2515 /* Make an entry on loop_stack for the loop we are entering. */
2516
e2500fed 2517 thisloop->desc = LOOP_NESTING;
28d81abb
RK
2518 thisloop->next = loop_stack;
2519 thisloop->all = nesting_stack;
2520 thisloop->depth = ++nesting_depth;
2521 thisloop->data.loop.start_label = gen_label_rtx ();
2522 thisloop->data.loop.end_label = gen_label_rtx ();
2523 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2524 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2525 loop_stack = thisloop;
2526 nesting_stack = thisloop;
2527
2528 do_pending_stack_adjust ();
2529 emit_queue ();
6496a589 2530 emit_note (NULL, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2531 emit_label (thisloop->data.loop.start_label);
2532
2533 return thisloop;
2534}
2535
2536/* Like expand_start_loop but for a loop where the continuation point
2537 (for expand_continue_loop) will be specified explicitly. */
2538
2539struct nesting *
2540expand_start_loop_continue_elsewhere (exit_flag)
2541 int exit_flag;
2542{
2543 struct nesting *thisloop = expand_start_loop (exit_flag);
2544 loop_stack->data.loop.continue_label = gen_label_rtx ();
2545 return thisloop;
2546}
2547
f0de0c5d
RH
2548/* Begin a null, aka do { } while (0) "loop". But since the contents
2549 of said loop can still contain a break, we must frob the loop nest. */
2550
2551struct nesting *
2552expand_start_null_loop ()
2553{
b3694847 2554 struct nesting *thisloop = ALLOC_NESTING ();
f0de0c5d
RH
2555
2556 /* Make an entry on loop_stack for the loop we are entering. */
2557
e2500fed 2558 thisloop->desc = LOOP_NESTING;
f0de0c5d
RH
2559 thisloop->next = loop_stack;
2560 thisloop->all = nesting_stack;
2561 thisloop->depth = ++nesting_depth;
2562 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2563 thisloop->data.loop.end_label = gen_label_rtx ();
ba89764a 2564 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
f0de0c5d
RH
2565 thisloop->exit_label = thisloop->data.loop.end_label;
2566 loop_stack = thisloop;
2567 nesting_stack = thisloop;
2568
2569 return thisloop;
2570}
2571
28d81abb
RK
2572/* Specify the continuation point for a loop started with
2573 expand_start_loop_continue_elsewhere.
2574 Use this at the point in the code to which a continue statement
2575 should jump. */
2576
2577void
2578expand_loop_continue_here ()
2579{
2580 do_pending_stack_adjust ();
6496a589 2581 emit_note (NULL, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2582 emit_label (loop_stack->data.loop.continue_label);
2583}
2584
2585/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2586 Pop the block off of loop_stack. */
2587
2588void
2589expand_end_loop ()
2590{
0720f6fb 2591 rtx start_label = loop_stack->data.loop.start_label;
e803a64b
RH
2592 rtx etc_note;
2593 int eh_regions, debug_blocks;
4977bab6 2594 bool empty_test;
28d81abb
RK
2595
2596 /* Mark the continue-point at the top of the loop if none elsewhere. */
2597 if (start_label == loop_stack->data.loop.continue_label)
2598 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2599
2600 do_pending_stack_adjust ();
2601
e803a64b 2602 /* If the loop starts with a loop exit, roll that to the end where
a7d308f7 2603 it will optimize together with the jump back.
93de5c31 2604
e803a64b 2605 If the loop presently looks like this (in pseudo-C):
93de5c31 2606
e803a64b
RH
2607 LOOP_BEG
2608 start_label:
2609 if (test) goto end_label;
2610 LOOP_END_TOP_COND
2611 body;
2612 goto start_label;
2613 end_label:
4381f7c2 2614
93de5c31
MM
2615 transform it to look like:
2616
e803a64b
RH
2617 LOOP_BEG
2618 goto start_label;
2619 top_label:
2620 body;
2621 start_label:
2622 if (test) goto end_label;
2623 goto top_label;
2624 end_label:
2625
2626 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
14b493d6 2627 the end of the entry conditional. Without this, our lexical scan
e803a64b
RH
2628 can't tell the difference between an entry conditional and a
2629 body conditional that exits the loop. Mistaking the two means
786de7eb 2630 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
e803a64b
RH
2631 screw up loop unrolling.
2632
2633 Things will be oh so much better when loop optimization is done
2634 off of a proper control flow graph... */
2635
2636 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2637
4977bab6 2638 empty_test = true;
e803a64b
RH
2639 eh_regions = debug_blocks = 0;
2640 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2641 if (GET_CODE (etc_note) == NOTE)
2642 {
2643 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2644 break;
28d81abb 2645
e803a64b
RH
2646 /* We must not walk into a nested loop. */
2647 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2648 {
2649 etc_note = NULL_RTX;
28d81abb 2650 break;
e803a64b 2651 }
28d81abb 2652
e803a64b
RH
2653 /* At the same time, scan for EH region notes, as we don't want
2654 to scrog region nesting. This shouldn't happen, but... */
2655 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2656 eh_regions++;
2657 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2658 {
2659 if (--eh_regions < 0)
2660 /* We've come to the end of an EH region, but never saw the
2661 beginning of that region. That means that an EH region
2662 begins before the top of the loop, and ends in the middle
2663 of it. The existence of such a situation violates a basic
2664 assumption in this code, since that would imply that even
2665 when EH_REGIONS is zero, we might move code out of an
2666 exception region. */
2667 abort ();
2668 }
0720f6fb 2669
e803a64b
RH
2670 /* Likewise for debug scopes. In this case we'll either (1) move
2671 all of the notes if they are properly nested or (2) leave the
786de7eb 2672 notes alone and only rotate the loop at high optimization
e803a64b
RH
2673 levels when we expect to scrog debug info. */
2674 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2675 debug_blocks++;
2676 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2677 debug_blocks--;
2678 }
4977bab6
ZW
2679 else if (INSN_P (etc_note))
2680 empty_test = false;
28d81abb 2681
e803a64b
RH
2682 if (etc_note
2683 && optimize
4977bab6 2684 && ! empty_test
e803a64b
RH
2685 && eh_regions == 0
2686 && (debug_blocks == 0 || optimize >= 2)
2687 && NEXT_INSN (etc_note) != NULL_RTX
2688 && ! any_condjump_p (get_last_insn ()))
2689 {
2690 /* We found one. Move everything from START to ETC to the end
2691 of the loop, and add a jump from the top of the loop. */
2692 rtx top_label = gen_label_rtx ();
2693 rtx start_move = start_label;
2694
2695 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2696 then we want to move this note also. */
2697 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2698 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2699 start_move = PREV_INSN (start_move);
2700
2701 emit_label_before (top_label, start_move);
2702
2703 /* Actually move the insns. If the debug scopes are nested, we
2704 can move everything at once. Otherwise we have to move them
2705 one by one and squeeze out the block notes. */
2706 if (debug_blocks == 0)
2707 reorder_insns (start_move, etc_note, get_last_insn ());
2708 else
28d81abb 2709 {
e803a64b 2710 rtx insn, next_insn;
93de5c31
MM
2711 for (insn = start_move; insn; insn = next_insn)
2712 {
2713 /* Figure out which insn comes after this one. We have
2714 to do this before we move INSN. */
e803a64b 2715 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
93de5c31
MM
2716
2717 if (GET_CODE (insn) == NOTE
2718 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2719 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
93de5c31
MM
2720 continue;
2721
93de5c31
MM
2722 reorder_insns (insn, insn, get_last_insn ());
2723 }
28d81abb 2724 }
28d81abb 2725
e803a64b
RH
2726 /* Add the jump from the top of the loop. */
2727 emit_jump_insn_before (gen_jump (start_label), top_label);
2728 emit_barrier_before (top_label);
2729 start_label = top_label;
a7d308f7 2730 }
e803a64b
RH
2731
2732 emit_jump (start_label);
2733 emit_note (NULL, NOTE_INSN_LOOP_END);
28d81abb
RK
2734 emit_label (loop_stack->data.loop.end_label);
2735
2736 POPSTACK (loop_stack);
2737
e2500fed 2738 clear_last_expr ();
28d81abb
RK
2739}
2740
f0de0c5d
RH
2741/* Finish a null loop, aka do { } while (0). */
2742
2743void
2744expand_end_null_loop ()
2745{
2746 do_pending_stack_adjust ();
2747 emit_label (loop_stack->data.loop.end_label);
2748
2749 POPSTACK (loop_stack);
2750
e2500fed 2751 clear_last_expr ();
f0de0c5d
RH
2752}
2753
28d81abb
RK
2754/* Generate a jump to the current loop's continue-point.
2755 This is usually the top of the loop, but may be specified
2756 explicitly elsewhere. If not currently inside a loop,
2757 return 0 and do nothing; caller will print an error message. */
2758
2759int
2760expand_continue_loop (whichloop)
2761 struct nesting *whichloop;
2762{
969d70ca
JH
2763 /* Emit information for branch prediction. */
2764 rtx note;
2765
2766 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2767 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
e2500fed 2768 clear_last_expr ();
28d81abb
RK
2769 if (whichloop == 0)
2770 whichloop = loop_stack;
2771 if (whichloop == 0)
2772 return 0;
37366632
RK
2773 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2774 NULL_RTX);
28d81abb
RK
2775 return 1;
2776}
2777
2778/* Generate a jump to exit the current loop. If not currently inside a loop,
2779 return 0 and do nothing; caller will print an error message. */
2780
2781int
2782expand_exit_loop (whichloop)
2783 struct nesting *whichloop;
2784{
e2500fed 2785 clear_last_expr ();
28d81abb
RK
2786 if (whichloop == 0)
2787 whichloop = loop_stack;
2788 if (whichloop == 0)
2789 return 0;
37366632 2790 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2791 return 1;
2792}
2793
2794/* Generate a conditional jump to exit the current loop if COND
2795 evaluates to zero. If not currently inside a loop,
2796 return 0 and do nothing; caller will print an error message. */
2797
2798int
2799expand_exit_loop_if_false (whichloop, cond)
2800 struct nesting *whichloop;
2801 tree cond;
2802{
4977bab6 2803 rtx label;
e2500fed 2804 clear_last_expr ();
b93a436e 2805
28d81abb
RK
2806 if (whichloop == 0)
2807 whichloop = loop_stack;
2808 if (whichloop == 0)
2809 return 0;
4977bab6
ZW
2810
2811 if (integer_nonzerop (cond))
2812 return 1;
2813 if (integer_zerop (cond))
2814 return expand_exit_loop (whichloop);
2815
2816 /* Check if we definitely won't need a fixup. */
2817 if (whichloop == nesting_stack)
2818 {
2819 jumpifnot (cond, whichloop->data.loop.end_label);
2820 return 1;
2821 }
2822
b93a436e 2823 /* In order to handle fixups, we actually create a conditional jump
4fe9b91c 2824 around an unconditional branch to exit the loop. If fixups are
b93a436e 2825 necessary, they go before the unconditional branch. */
d902c7ea 2826
4977bab6
ZW
2827 label = gen_label_rtx ();
2828 jumpif (cond, label);
b93a436e
JL
2829 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2830 NULL_RTX);
2831 emit_label (label);
ca695ac9 2832
28d81abb
RK
2833 return 1;
2834}
2835
e803a64b 2836/* Like expand_exit_loop_if_false except also emit a note marking
786de7eb 2837 the end of the conditional. Should only be used immediately
e803a64b
RH
2838 after expand_loop_start. */
2839
2840int
2841expand_exit_loop_top_cond (whichloop, cond)
2842 struct nesting *whichloop;
2843 tree cond;
2844{
2845 if (! expand_exit_loop_if_false (whichloop, cond))
2846 return 0;
2847
2848 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2849 return 1;
2850}
2851
0e9e1e0a 2852/* Return nonzero if we should preserve sub-expressions as separate
28d81abb
RK
2853 pseudos. We never do so if we aren't optimizing. We always do so
2854 if -fexpensive-optimizations.
2855
2856 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2857 the loop may still be a small one. */
2858
2859int
2860preserve_subexpressions_p ()
2861{
2862 rtx insn;
2863
2864 if (flag_expensive_optimizations)
2865 return 1;
2866
01d939e8 2867 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2868 return 0;
2869
2870 insn = get_last_insn_anywhere ();
2871
2872 return (insn
2873 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2874 < n_non_fixed_regs * 3));
2875
2876}
2877
2878/* Generate a jump to exit the current loop, conditional, binding contour
2879 or case statement. Not all such constructs are visible to this function,
2880 only those started with EXIT_FLAG nonzero. Individual languages use
2881 the EXIT_FLAG parameter to control which kinds of constructs you can
2882 exit this way.
2883
2884 If not currently inside anything that can be exited,
2885 return 0 and do nothing; caller will print an error message. */
2886
2887int
2888expand_exit_something ()
2889{
2890 struct nesting *n;
e2500fed 2891 clear_last_expr ();
28d81abb
RK
2892 for (n = nesting_stack; n; n = n->all)
2893 if (n->exit_label != 0)
2894 {
37366632 2895 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2896 return 1;
2897 }
2898
2899 return 0;
2900}
2901\f
2902/* Generate RTL to return from the current function, with no value.
2903 (That is, we do not do anything about returning any value.) */
2904
2905void
2906expand_null_return ()
2907{
969d70ca
JH
2908 rtx last_insn;
2909
2910 last_insn = get_last_insn ();
bd695e1e 2911
4381f7c2 2912 /* If this function was declared to return a value, but we
bd695e1e 2913 didn't, clobber the return registers so that they are not
a1f300c0 2914 propagated live to the rest of the function. */
c13fde05 2915 clobber_return_register ();
28d81abb 2916
396ad517 2917 expand_null_return_1 (last_insn);
28d81abb
RK
2918}
2919
969d70ca
JH
2920/* Try to guess whether the value of return means error code. */
2921static enum br_predictor
2922return_prediction (val)
2923 rtx val;
2924{
2925 /* Different heuristics for pointers and scalars. */
2926 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2927 {
2928 /* NULL is usually not returned. */
2929 if (val == const0_rtx)
2930 return PRED_NULL_RETURN;
2931 }
2932 else
2933 {
2934 /* Negative return values are often used to indicate
2935 errors. */
2936 if (GET_CODE (val) == CONST_INT
2937 && INTVAL (val) < 0)
2938 return PRED_NEGATIVE_RETURN;
2939 /* Constant return values are also usually erors,
2940 zero/one often mean booleans so exclude them from the
2941 heuristics. */
2942 if (CONSTANT_P (val)
2943 && (val != const0_rtx && val != const1_rtx))
2944 return PRED_CONST_RETURN;
2945 }
2946 return PRED_NO_PREDICTION;
2947}
2948
28d81abb
RK
2949/* Generate RTL to return from the current function, with value VAL. */
2950
8d800403 2951static void
28d81abb
RK
2952expand_value_return (val)
2953 rtx val;
2954{
969d70ca
JH
2955 rtx last_insn;
2956 rtx return_reg;
2957 enum br_predictor pred;
2958
2959 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2960 {
2961 /* Emit information for branch prediction. */
2962 rtx note;
2963
2964 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2965
2966 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2967
2968 }
2969
2970 last_insn = get_last_insn ();
2971 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
2972
2973 /* Copy the value to the return location
2974 unless it's already there. */
2975
2976 if (return_reg != val)
77636079 2977 {
77636079 2978 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
37877eb0 2979#ifdef PROMOTE_FUNCTION_RETURN
77636079 2980 int unsignedp = TREE_UNSIGNED (type);
14a774a9
RK
2981 enum machine_mode old_mode
2982 = DECL_MODE (DECL_RESULT (current_function_decl));
28612f9e 2983 enum machine_mode mode
14a774a9 2984 = promote_mode (type, old_mode, &unsignedp, 1);
77636079 2985
14a774a9
RK
2986 if (mode != old_mode)
2987 val = convert_modes (mode, old_mode, val, unsignedp);
77636079 2988#endif
14a774a9 2989 if (GET_CODE (return_reg) == PARALLEL)
04050c69 2990 emit_group_load (return_reg, val, int_size_in_bytes (type));
14a774a9 2991 else
77636079
RS
2992 emit_move_insn (return_reg, val);
2993 }
14a774a9 2994
396ad517 2995 expand_null_return_1 (last_insn);
28d81abb
RK
2996}
2997
2998/* Output a return with no value. If LAST_INSN is nonzero,
396ad517 2999 pretend that the return takes place after LAST_INSN. */
28d81abb
RK
3000
3001static void
396ad517 3002expand_null_return_1 (last_insn)
28d81abb 3003 rtx last_insn;
28d81abb
RK
3004{
3005 rtx end_label = cleanup_label ? cleanup_label : return_label;
3006
3007 clear_pending_stack_adjust ();
3008 do_pending_stack_adjust ();
e2500fed 3009 clear_last_expr ();
28d81abb 3010
396ad517
JDA
3011 if (end_label == 0)
3012 end_label = return_label = gen_label_rtx ();
37366632 3013 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
3014}
3015\f
3016/* Generate RTL to evaluate the expression RETVAL and return it
3017 from the current function. */
3018
3019void
3020expand_return (retval)
3021 tree retval;
3022{
3023 /* If there are any cleanups to be performed, then they will
3024 be inserted following LAST_INSN. It is desirable
3025 that the last_insn, for such purposes, should be the
3026 last insn before computing the return value. Otherwise, cleanups
3027 which call functions can clobber the return value. */
3028 /* ??? rms: I think that is erroneous, because in C++ it would
3029 run destructors on variables that might be used in the subsequent
3030 computation of the return value. */
3031 rtx last_insn = 0;
19e7881c 3032 rtx result_rtl;
b3694847 3033 rtx val = 0;
28d81abb 3034 tree retval_rhs;
28d81abb
RK
3035
3036 /* If function wants no value, give it none. */
3037 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3038 {
37366632 3039 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 3040 emit_queue ();
28d81abb
RK
3041 expand_null_return ();
3042 return;
3043 }
3044
ea11ca7e 3045 if (retval == error_mark_node)
c9407e4c
MM
3046 {
3047 /* Treat this like a return of no value from a function that
3048 returns a value. */
3049 expand_null_return ();
786de7eb 3050 return;
c9407e4c 3051 }
ea11ca7e 3052 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
3053 retval_rhs = retval;
3054 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3055 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3056 retval_rhs = TREE_OPERAND (retval, 1);
71653180 3057 else if (VOID_TYPE_P (TREE_TYPE (retval)))
28d81abb
RK
3058 /* Recognize tail-recursive call to void function. */
3059 retval_rhs = retval;
3060 else
3061 retval_rhs = NULL_TREE;
3062
7cc8342c 3063 last_insn = get_last_insn ();
28d81abb
RK
3064
3065 /* Distribute return down conditional expr if either of the sides
3066 may involve tail recursion (see test below). This enhances the number
3067 of tail recursions we see. Don't do this always since it can produce
3068 sub-optimal code in some cases and we distribute assignments into
3069 conditional expressions when it would help. */
3070
3071 if (optimize && retval_rhs != 0
3072 && frame_offset == 0
3073 && TREE_CODE (retval_rhs) == COND_EXPR
3074 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3075 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3076 {
3077 rtx label = gen_label_rtx ();
a0a34f94
RK
3078 tree expr;
3079
37366632 3080 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 3081 start_cleanup_deferral ();
dd98f85c 3082 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3083 DECL_RESULT (current_function_decl),
3084 TREE_OPERAND (retval_rhs, 1));
3085 TREE_SIDE_EFFECTS (expr) = 1;
3086 expand_return (expr);
28d81abb 3087 emit_label (label);
a0a34f94 3088
dd98f85c 3089 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3090 DECL_RESULT (current_function_decl),
3091 TREE_OPERAND (retval_rhs, 2));
3092 TREE_SIDE_EFFECTS (expr) = 1;
3093 expand_return (expr);
1483bddb 3094 end_cleanup_deferral ();
28d81abb
RK
3095 return;
3096 }
3097
19e7881c
MM
3098 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3099
4c485b63
JL
3100 /* If the result is an aggregate that is being returned in one (or more)
3101 registers, load the registers here. The compiler currently can't handle
3102 copying a BLKmode value into registers. We could put this code in a
3103 more general area (for use by everyone instead of just function
3104 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
3105 (and in expand_call). The value must go into a pseudo in case there
3106 are cleanups that will clobber the real return register. */
4c485b63
JL
3107
3108 if (retval_rhs != 0
3109 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 3110 && GET_CODE (result_rtl) == REG)
4c485b63 3111 {
770ae6cc
RK
3112 int i;
3113 unsigned HOST_WIDE_INT bitpos, xbitpos;
3114 unsigned HOST_WIDE_INT big_endian_correction = 0;
3115 unsigned HOST_WIDE_INT bytes
3116 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 3117 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
3118 unsigned int bitsize
3119 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
4c485b63 3120 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 3121 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 3122 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 3123 enum machine_mode tmpmode, result_reg_mode;
4c485b63 3124
2954d7db
RK
3125 if (bytes == 0)
3126 {
3127 expand_null_return ();
3128 return;
3129 }
3130
a7f875d7
RK
3131 /* Structures whose size is not a multiple of a word are aligned
3132 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3133 machine, this means we must skip the empty high order bytes when
3134 calculating the bit offset. */
0d7839da 3135 if (BYTES_BIG_ENDIAN
0d7839da 3136 && bytes % UNITS_PER_WORD)
a7f875d7
RK
3137 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3138 * BITS_PER_UNIT));
3139
4381f7c2 3140 /* Copy the structure BITSIZE bits at a time. */
a7f875d7
RK
3141 for (bitpos = 0, xbitpos = big_endian_correction;
3142 bitpos < bytes * BITS_PER_UNIT;
3143 bitpos += bitsize, xbitpos += bitsize)
4c485b63 3144 {
a7f875d7 3145 /* We need a new destination pseudo each time xbitpos is
abc95ed3 3146 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
3147 (the first time through). */
3148 if (xbitpos % BITS_PER_WORD == 0
3149 || xbitpos == big_endian_correction)
4c485b63 3150 {
a7f875d7
RK
3151 /* Generate an appropriate register. */
3152 dst = gen_reg_rtx (word_mode);
3153 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3154
8a38ed86
AM
3155 /* Clear the destination before we move anything into it. */
3156 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 3157 }
a7f875d7
RK
3158
3159 /* We need a new source operand each time bitpos is on a word
3160 boundary. */
3161 if (bitpos % BITS_PER_WORD == 0)
3162 src = operand_subword_force (result_val,
3163 bitpos / BITS_PER_WORD,
3164 BLKmode);
3165
3166 /* Use bitpos for the source extraction (left justified) and
3167 xbitpos for the destination store (right justified). */
3168 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3169 extract_bit_field (src, bitsize,
3170 bitpos % BITS_PER_WORD, 1,
19caa751 3171 NULL_RTX, word_mode, word_mode,
04050c69
RK
3172 BITS_PER_WORD),
3173 BITS_PER_WORD);
4c485b63
JL
3174 }
3175
4c485b63
JL
3176 /* Find the smallest integer mode large enough to hold the
3177 entire structure and use that mode instead of BLKmode
6d2f8887 3178 on the USE insn for the return register. */
4c485b63 3179 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 3180 tmpmode != VOIDmode;
4c485b63 3181 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2954d7db
RK
3182 /* Have we found a large enough mode? */
3183 if (GET_MODE_SIZE (tmpmode) >= bytes)
3184 break;
4c485b63
JL
3185
3186 /* No suitable mode found. */
0c61f541 3187 if (tmpmode == VOIDmode)
3ffeb8f1 3188 abort ();
4c485b63 3189
14a774a9 3190 PUT_MODE (result_rtl, tmpmode);
3ffeb8f1 3191
af55da56
JW
3192 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3193 result_reg_mode = word_mode;
3194 else
3195 result_reg_mode = tmpmode;
3196 result_reg = gen_reg_rtx (result_reg_mode);
3197
3ffeb8f1 3198 emit_queue ();
3ffeb8f1 3199 for (i = 0; i < n_regs; i++)
af55da56 3200 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3201 result_pseudos[i]);
4c485b63 3202
af55da56
JW
3203 if (tmpmode != result_reg_mode)
3204 result_reg = gen_lowpart (tmpmode, result_reg);
3205
4c485b63
JL
3206 expand_value_return (result_reg);
3207 }
7cc8342c
RH
3208 else if (retval_rhs != 0
3209 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3210 && (GET_CODE (result_rtl) == REG
3211 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3212 {
14a774a9
RK
3213 /* Calculate the return value into a temporary (usually a pseudo
3214 reg). */
1da68f56
RK
3215 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3216 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3217
3218 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
3219 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3220 val = force_not_mem (val);
28d81abb 3221 emit_queue ();
28d81abb
RK
3222 /* Return the calculated value, doing cleanups first. */
3223 expand_value_return (val);
3224 }
3225 else
3226 {
3227 /* No cleanups or no hard reg used;
3228 calculate value into hard return reg. */
cba389cd 3229 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3230 emit_queue ();
14a774a9 3231 expand_value_return (result_rtl);
28d81abb
RK
3232 }
3233}
28d81abb 3234\f
b06775f9
RH
3235/* Attempt to optimize a potential tail recursion call into a goto.
3236 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
4381f7c2
KH
3237 where to place the jump to the tail recursion label.
3238
b06775f9 3239 Return TRUE if the call was optimized into a goto. */
642cac7b 3240
564ea051 3241int
b06775f9
RH
3242optimize_tail_recursion (arguments, last_insn)
3243 tree arguments;
642cac7b
JL
3244 rtx last_insn;
3245{
b06775f9
RH
3246 /* Finish checking validity, and if valid emit code to set the
3247 argument variables for the new call. */
3248 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
642cac7b
JL
3249 {
3250 if (tail_recursion_label == 0)
3251 {
3252 tail_recursion_label = gen_label_rtx ();
3253 emit_label_after (tail_recursion_label,
3254 tail_recursion_reentry);
3255 }
3256 emit_queue ();
3257 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3258 emit_barrier ();
564ea051 3259 return 1;
642cac7b 3260 }
564ea051 3261 return 0;
642cac7b
JL
3262}
3263
28d81abb
RK
3264/* Emit code to alter this function's formal parms for a tail-recursive call.
3265 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3266 FORMALS is the chain of decls of formals.
3267 Return 1 if this can be done;
3268 otherwise return 0 and do not emit any code. */
3269
3270static int
3271tail_recursion_args (actuals, formals)
3272 tree actuals, formals;
3273{
b3694847
SS
3274 tree a = actuals, f = formals;
3275 int i;
3276 rtx *argvec;
28d81abb
RK
3277
3278 /* Check that number and types of actuals are compatible
3279 with the formals. This is not always true in valid C code.
3280 Also check that no formal needs to be addressable
3281 and that all formals are scalars. */
3282
3283 /* Also count the args. */
3284
3285 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3286 {
5c7fe359
RK
3287 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3288 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3289 return 0;
3290 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3291 return 0;
3292 }
3293 if (a != 0 || f != 0)
3294 return 0;
3295
3296 /* Compute all the actuals. */
3297
3298 argvec = (rtx *) alloca (i * sizeof (rtx));
3299
3300 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3301 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3302
3303 /* Find which actual values refer to current values of previous formals.
3304 Copy each of them now, before any formal is changed. */
3305
3306 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3307 {
3308 int copy = 0;
b3694847 3309 int j;
28d81abb
RK
3310 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3311 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
4381f7c2
KH
3312 {
3313 copy = 1;
3314 break;
3315 }
28d81abb
RK
3316 if (copy)
3317 argvec[i] = copy_to_reg (argvec[i]);
3318 }
3319
3320 /* Store the values of the actuals into the formals. */
3321
3322 for (f = formals, a = actuals, i = 0; f;
3323 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3324 {
98f3b471 3325 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3326 emit_move_insn (DECL_RTL (f), argvec[i]);
3327 else
303b90b0
FS
3328 {
3329 rtx tmp = argvec[i];
3330
3331 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3332 {
3333 tmp = gen_reg_rtx (DECL_MODE (f));
3334 convert_move (tmp, argvec[i],
3335 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3336 }
3337 convert_move (DECL_RTL (f), tmp,
3338 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3339 }
28d81abb
RK
3340 }
3341
3342 free_temp_slots ();
3343 return 1;
3344}
3345\f
3346/* Generate the RTL code for entering a binding contour.
3347 The variables are declared one by one, by calls to `expand_decl'.
3348
8e91754e
MM
3349 FLAGS is a bitwise or of the following flags:
3350
3351 1 - Nonzero if this construct should be visible to
3352 `exit_something'.
3353
3354 2 - Nonzero if this contour does not require a
3355 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3356 language-independent code should set this flag because they
3357 will not create corresponding BLOCK nodes. (There should be
3358 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3359 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 3360 when expand_end_bindings is called.
a97901e6
MM
3361
3362 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3363 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3364 note. */
28d81abb
RK
3365
3366void
a97901e6 3367expand_start_bindings_and_block (flags, block)
8e91754e 3368 int flags;
a97901e6 3369 tree block;
28d81abb
RK
3370{
3371 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3372 rtx note;
3373 int exit_flag = ((flags & 1) != 0);
3374 int block_flag = ((flags & 2) == 0);
4381f7c2 3375
a97901e6
MM
3376 /* If a BLOCK is supplied, then the caller should be requesting a
3377 NOTE_INSN_BLOCK_BEG note. */
3378 if (!block_flag && block)
3379 abort ();
8e91754e 3380
a97901e6
MM
3381 /* Create a note to mark the beginning of the block. */
3382 if (block_flag)
3383 {
6496a589 3384 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
a97901e6
MM
3385 NOTE_BLOCK (note) = block;
3386 }
3387 else
6496a589 3388 note = emit_note (NULL, NOTE_INSN_DELETED);
4381f7c2 3389
28d81abb
RK
3390 /* Make an entry on block_stack for the block we are entering. */
3391
e2500fed 3392 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
3393 thisblock->next = block_stack;
3394 thisblock->all = nesting_stack;
3395 thisblock->depth = ++nesting_depth;
3396 thisblock->data.block.stack_level = 0;
3397 thisblock->data.block.cleanups = 0;
3f1d071b 3398 thisblock->data.block.n_function_calls = 0;
e976b8b2 3399 thisblock->data.block.exception_region = 0;
3f1d071b 3400 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3401
3402 thisblock->data.block.conditional_code = 0;
3403 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3404 /* When we insert instructions after the last unconditional cleanup,
3405 we don't adjust last_insn. That means that a later add_insn will
3406 clobber the instructions we've just added. The easiest way to
3407 fix this is to just insert another instruction here, so that the
3408 instructions inserted after the last unconditional cleanup are
3409 never the last instruction. */
6496a589 3410 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 3411
28d81abb
RK
3412 if (block_stack
3413 && !(block_stack->data.block.cleanups == NULL_TREE
3414 && block_stack->data.block.outer_cleanups == NULL_TREE))
3415 thisblock->data.block.outer_cleanups
3416 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3417 block_stack->data.block.outer_cleanups);
3418 else
3419 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3420 thisblock->data.block.label_chain = 0;
3421 thisblock->data.block.innermost_stack_block = stack_block_stack;
3422 thisblock->data.block.first_insn = note;
3f1d071b 3423 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3424 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3425 block_stack = thisblock;
3426 nesting_stack = thisblock;
3427
b93a436e
JL
3428 /* Make a new level for allocating stack slots. */
3429 push_temp_slots ();
28d81abb
RK
3430}
3431
e976b8b2
MS
3432/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3433 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3434 expand_expr are made. After we end the region, we know that all
3435 space for all temporaries that were created by TARGET_EXPRs will be
3436 destroyed and their space freed for reuse. */
3437
3438void
3439expand_start_target_temps ()
3440{
3441 /* This is so that even if the result is preserved, the space
3442 allocated will be freed, as we know that it is no longer in use. */
3443 push_temp_slots ();
3444
3445 /* Start a new binding layer that will keep track of all cleanup
3446 actions to be performed. */
8e91754e 3447 expand_start_bindings (2);
e976b8b2
MS
3448
3449 target_temp_slot_level = temp_slot_level;
3450}
3451
3452void
3453expand_end_target_temps ()
3454{
3455 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 3456
e976b8b2
MS
3457 /* This is so that even if the result is preserved, the space
3458 allocated will be freed, as we know that it is no longer in use. */
3459 pop_temp_slots ();
3460}
3461
0e9e1e0a 3462/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
deb5e280
JM
3463 in question represents the outermost pair of curly braces (i.e. the "body
3464 block") of a function or method.
3465
3466 For any BLOCK node representing a "body block" of a function or method, the
3467 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3468 represents the outermost (function) scope for the function or method (i.e.
3469 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 3470 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
3471
3472int
3473is_body_block (stmt)
b3694847 3474 tree stmt;
deb5e280
JM
3475{
3476 if (TREE_CODE (stmt) == BLOCK)
3477 {
3478 tree parent = BLOCK_SUPERCONTEXT (stmt);
3479
3480 if (parent && TREE_CODE (parent) == BLOCK)
3481 {
3482 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3483
3484 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3485 return 1;
3486 }
3487 }
3488
3489 return 0;
3490}
3491
e976b8b2
MS
3492/* True if we are currently emitting insns in an area of output code
3493 that is controlled by a conditional expression. This is used by
3494 the cleanup handling code to generate conditional cleanup actions. */
3495
3496int
3497conditional_context ()
3498{
3499 return block_stack && block_stack->data.block.conditional_code;
3500}
3501
91088ddb
JM
3502/* Return an opaque pointer to the current nesting level, so frontend code
3503 can check its own sanity. */
3504
3505struct nesting *
3506current_nesting_level ()
3507{
3508 return cfun ? block_stack : 0;
3509}
3510
ba716ac9
BS
3511/* Emit a handler label for a nonlocal goto handler.
3512 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3513
e881bb1b 3514static rtx
ba716ac9
BS
3515expand_nl_handler_label (slot, before_insn)
3516 rtx slot, before_insn;
3517{
3518 rtx insns;
3519 rtx handler_label = gen_label_rtx ();
3520
0045d504 3521 /* Don't let cleanup_cfg delete the handler. */
ba716ac9
BS
3522 LABEL_PRESERVE_P (handler_label) = 1;
3523
3524 start_sequence ();
3525 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3526 insns = get_insns ();
3527 end_sequence ();
2f937369 3528 emit_insn_before (insns, before_insn);
ba716ac9
BS
3529
3530 emit_label (handler_label);
e881bb1b
RH
3531
3532 return handler_label;
ba716ac9
BS
3533}
3534
3535/* Emit code to restore vital registers at the beginning of a nonlocal goto
3536 handler. */
3537static void
3538expand_nl_goto_receiver ()
3539{
3540#ifdef HAVE_nonlocal_goto
3541 if (! HAVE_nonlocal_goto)
3542#endif
3543 /* First adjust our frame pointer to its actual value. It was
3544 previously set to the start of the virtual area corresponding to
3545 the stacked variables when we branched here and now needs to be
3546 adjusted to the actual hardware fp value.
3547
3548 Assignments are to virtual registers are converted by
3549 instantiate_virtual_regs into the corresponding assignment
3550 to the underlying register (fp in this case) that makes
3551 the original assignment true.
3552 So the following insn will actually be
3553 decrementing fp by STARTING_FRAME_OFFSET. */
3554 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3555
3556#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3557 if (fixed_regs[ARG_POINTER_REGNUM])
3558 {
3559#ifdef ELIMINABLE_REGS
3560 /* If the argument pointer can be eliminated in favor of the
3561 frame pointer, we don't need to restore it. We assume here
3562 that if such an elimination is present, it can always be used.
3563 This is the case on all known machines; if we don't make this
3564 assumption, we do unnecessary saving on many machines. */
8b60264b 3565 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
3566 size_t i;
3567
b6a1cbae 3568 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
3569 if (elim_regs[i].from == ARG_POINTER_REGNUM
3570 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3571 break;
3572
b6a1cbae 3573 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
3574#endif
3575 {
3576 /* Now restore our arg pointer from the address at which it
278ed218 3577 was saved in our stack frame. */
ba716ac9 3578 emit_move_insn (virtual_incoming_args_rtx,
278ed218 3579 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
3580 }
3581 }
3582#endif
3583
3584#ifdef HAVE_nonlocal_goto_receiver
3585 if (HAVE_nonlocal_goto_receiver)
3586 emit_insn (gen_nonlocal_goto_receiver ());
3587#endif
3588}
3589
3590/* Make handlers for nonlocal gotos taking place in the function calls in
3591 block THISBLOCK. */
3592
3593static void
3594expand_nl_goto_receivers (thisblock)
3595 struct nesting *thisblock;
3596{
3597 tree link;
3598 rtx afterward = gen_label_rtx ();
3599 rtx insns, slot;
e881bb1b 3600 rtx label_list;
ba716ac9
BS
3601 int any_invalid;
3602
3603 /* Record the handler address in the stack slot for that purpose,
3604 during this block, saving and restoring the outer value. */
3605 if (thisblock->next != 0)
3606 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3607 {
3608 rtx save_receiver = gen_reg_rtx (Pmode);
3609 emit_move_insn (XEXP (slot, 0), save_receiver);
3610
3611 start_sequence ();
3612 emit_move_insn (save_receiver, XEXP (slot, 0));
3613 insns = get_insns ();
3614 end_sequence ();
2f937369 3615 emit_insn_before (insns, thisblock->data.block.first_insn);
ba716ac9
BS
3616 }
3617
3618 /* Jump around the handlers; they run only when specially invoked. */
3619 emit_jump (afterward);
3620
3621 /* Make a separate handler for each label. */
3622 link = nonlocal_labels;
3623 slot = nonlocal_goto_handler_slots;
e881bb1b 3624 label_list = NULL_RTX;
ba716ac9
BS
3625 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3626 /* Skip any labels we shouldn't be able to jump to from here,
3627 we generate one special handler for all of them below which just calls
3628 abort. */
3629 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3630 {
e881bb1b
RH
3631 rtx lab;
3632 lab = expand_nl_handler_label (XEXP (slot, 0),
3633 thisblock->data.block.first_insn);
3634 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3635
ba716ac9
BS
3636 expand_nl_goto_receiver ();
3637
3638 /* Jump to the "real" nonlocal label. */
3639 expand_goto (TREE_VALUE (link));
3640 }
3641
3642 /* A second pass over all nonlocal labels; this time we handle those
3643 we should not be able to jump to at this point. */
3644 link = nonlocal_labels;
3645 slot = nonlocal_goto_handler_slots;
3646 any_invalid = 0;
3647 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3648 if (DECL_TOO_LATE (TREE_VALUE (link)))
3649 {
e881bb1b
RH
3650 rtx lab;
3651 lab = expand_nl_handler_label (XEXP (slot, 0),
3652 thisblock->data.block.first_insn);
3653 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3654 any_invalid = 1;
3655 }
3656
3657 if (any_invalid)
3658 {
3659 expand_nl_goto_receiver ();
9602f5a0 3660 expand_builtin_trap ();
ba716ac9
BS
3661 }
3662
e881bb1b 3663 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3664 emit_label (afterward);
3665}
3666
ba716677
MM
3667/* Warn about any unused VARS (which may contain nodes other than
3668 VAR_DECLs, but such nodes are ignored). The nodes are connected
3669 via the TREE_CHAIN field. */
3670
3671void
3672warn_about_unused_variables (vars)
3673 tree vars;
3674{
3675 tree decl;
3676
078721e1 3677 if (warn_unused_variable)
ba716677 3678 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 3679 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
3680 && ! TREE_USED (decl)
3681 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 3682 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ba716677
MM
3683 warning_with_decl (decl, "unused variable `%s'");
3684}
3685
28d81abb 3686/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3687
3688 VARS is the chain of VAR_DECL nodes for the variables bound in this
3689 contour. There may actually be other nodes in this chain, but any
3690 nodes other than VAR_DECLS are ignored.
3691
28d81abb
RK
3692 MARK_ENDS is nonzero if we should put a note at the beginning
3693 and end of this binding contour.
3694
3695 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3696 (That is true automatically if the contour has a saved stack level.) */
3697
3698void
3699expand_end_bindings (vars, mark_ends, dont_jump_in)
3700 tree vars;
3701 int mark_ends;
3702 int dont_jump_in;
3703{
b3694847 3704 struct nesting *thisblock = block_stack;
e976b8b2 3705
ba716677
MM
3706 /* If any of the variables in this scope were not used, warn the
3707 user. */
3708 warn_about_unused_variables (vars);
28d81abb 3709
28d81abb
RK
3710 if (thisblock->exit_label)
3711 {
3712 do_pending_stack_adjust ();
3713 emit_label (thisblock->exit_label);
3714 }
3715
ba716ac9 3716 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3717 place in the function calls in this block. */
3f1d071b 3718 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3719 && nonlocal_labels
3720 /* Make handler for outermost block
3721 if there were any nonlocal gotos to this function. */
3722 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3723 /* Make handler for inner block if it has something
3724 special to do when you jump out of it. */
3725 : (thisblock->data.block.cleanups != 0
3726 || thisblock->data.block.stack_level != 0)))
ba716ac9 3727 expand_nl_goto_receivers (thisblock);
28d81abb 3728
72eb1038
BH
3729 /* Don't allow jumping into a block that has a stack level.
3730 Cleanups are allowed, though. */
28d81abb 3731 if (dont_jump_in
72eb1038 3732 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3733 {
3734 struct label_chain *chain;
3735
3736 /* Any labels in this block are no longer valid to go to.
3737 Mark them to cause an error message. */
3738 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3739 {
3740 DECL_TOO_LATE (chain->label) = 1;
3741 /* If any goto without a fixup came to this label,
3742 that must be an error, because gotos without fixups
72eb1038 3743 come from outside all saved stack-levels. */
28d81abb
RK
3744 if (TREE_ADDRESSABLE (chain->label))
3745 error_with_decl (chain->label,
3746 "label `%s' used before containing binding contour");
3747 }
3748 }
3749
3750 /* Restore stack level in effect before the block
3751 (only if variable-size objects allocated). */
3752 /* Perform any cleanups associated with the block. */
3753
3754 if (thisblock->data.block.stack_level != 0
3755 || thisblock->data.block.cleanups != 0)
3756 {
04da69d3
JM
3757 int reachable;
3758 rtx insn;
28d81abb 3759
50d1b7a1
MS
3760 /* Don't let cleanups affect ({...}) constructs. */
3761 int old_expr_stmts_for_value = expr_stmts_for_value;
3762 rtx old_last_expr_value = last_expr_value;
3763 tree old_last_expr_type = last_expr_type;
3764 expr_stmts_for_value = 0;
28d81abb 3765
04da69d3
JM
3766 /* Only clean up here if this point can actually be reached. */
3767 insn = get_last_insn ();
3768 if (GET_CODE (insn) == NOTE)
3769 insn = prev_nonnote_insn (insn);
d1ee23e5 3770 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 3771
50d1b7a1
MS
3772 /* Do the cleanups. */
3773 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3774 if (reachable)
3775 do_pending_stack_adjust ();
28d81abb 3776
50d1b7a1
MS
3777 expr_stmts_for_value = old_expr_stmts_for_value;
3778 last_expr_value = old_last_expr_value;
3779 last_expr_type = old_last_expr_type;
3780
3781 /* Restore the stack level. */
3782
3783 if (reachable && thisblock->data.block.stack_level != 0)
3784 {
3785 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3786 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3787 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3788 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3789 NULL_RTX);
28d81abb
RK
3790 }
3791
3792 /* Any gotos out of this block must also do these things.
59257ff7
RK
3793 Also report any gotos with fixups that came to labels in this
3794 level. */
28d81abb
RK
3795 fixup_gotos (thisblock,
3796 thisblock->data.block.stack_level,
3797 thisblock->data.block.cleanups,
3798 thisblock->data.block.first_insn,
3799 dont_jump_in);
3800 }
3801
c7d2d61d
RS
3802 /* Mark the beginning and end of the scope if requested.
3803 We do this now, after running cleanups on the variables
3804 just going out of scope, so they are in scope for their cleanups. */
3805
3806 if (mark_ends)
a97901e6 3807 {
6496a589 3808 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
a97901e6
MM
3809 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3810 }
c7d2d61d
RS
3811 else
3812 /* Get rid of the beginning-mark if we don't make an end-mark. */
3813 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3814
e976b8b2 3815 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3816 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3817
28d81abb
RK
3818 /* Restore block_stack level for containing block. */
3819
3820 stack_block_stack = thisblock->data.block.innermost_stack_block;
3821 POPSTACK (block_stack);
3822
3823 /* Pop the stack slot nesting and free any slots at this level. */
3824 pop_temp_slots ();
3825}
3826\f
7393c642
RK
3827/* Generate code to save the stack pointer at the start of the current block
3828 and set up to restore it on exit. */
3829
3830void
3831save_stack_pointer ()
3832{
3833 struct nesting *thisblock = block_stack;
3834
3835 if (thisblock->data.block.stack_level == 0)
3836 {
3837 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3838 &thisblock->data.block.stack_level,
3839 thisblock->data.block.first_insn);
3840 stack_block_stack = thisblock;
3841 }
3842}
3843\f
28d81abb 3844/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3845 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3846
3847void
3848expand_decl (decl)
b3694847 3849 tree decl;
28d81abb 3850{
ca695ac9
JB
3851 tree type;
3852
ca695ac9 3853 type = TREE_TYPE (decl);
28d81abb 3854
eabb9ed0
RK
3855 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3856 type in case this node is used in a reference. */
3857 if (TREE_CODE (decl) == CONST_DECL)
3858 {
3859 DECL_MODE (decl) = TYPE_MODE (type);
3860 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3861 DECL_SIZE (decl) = TYPE_SIZE (type);
3862 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3863 return;
3864 }
28d81abb 3865
eabb9ed0
RK
3866 /* Otherwise, only automatic variables need any expansion done. Static and
3867 external variables, and external functions, will be handled by
3868 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3869 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
3870 if (TREE_CODE (decl) != VAR_DECL)
3871 return;
eabb9ed0 3872
44fe2e80 3873 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3874 return;
3875
3876 /* Create the RTL representation for the variable. */
3877
3878 if (type == error_mark_node)
19e7881c 3879 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 3880
28d81abb
RK
3881 else if (DECL_SIZE (decl) == 0)
3882 /* Variable with incomplete type. */
3883 {
abde42f7 3884 rtx x;
28d81abb
RK
3885 if (DECL_INITIAL (decl) == 0)
3886 /* Error message was already done; now avoid a crash. */
abde42f7 3887 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3888 else
3889 /* An initializer is going to decide the size of this array.
3890 Until we know the size, represent its address with a reg. */
abde42f7 3891 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 3892
abde42f7
JH
3893 set_mem_attributes (x, decl, 1);
3894 SET_DECL_RTL (decl, x);
28d81abb
RK
3895 }
3896 else if (DECL_MODE (decl) != BLKmode
3897 /* If -ffloat-store, don't put explicit float vars
3898 into regs. */
3899 && !(flag_float_store
3900 && TREE_CODE (type) == REAL_TYPE)
3901 && ! TREE_THIS_VOLATILE (decl)
37a08a29 3902 && (DECL_REGISTER (decl) || optimize))
28d81abb
RK
3903 {
3904 /* Automatic variable that can go in a register. */
98f3b471 3905 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3906 enum machine_mode reg_mode
3907 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3908
19e7881c 3909 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8 3910
7f070d5e
RK
3911 mark_user_reg (DECL_RTL (decl));
3912
e5e809f4 3913 if (POINTER_TYPE_P (type))
7f070d5e 3914 mark_reg_pointer (DECL_RTL (decl),
bdb429a5 3915 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
258a120b
JM
3916
3917 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3918
3919 /* If something wants our address, try to use ADDRESSOF. */
3920 if (TREE_ADDRESSABLE (decl))
3921 put_var_into_stack (decl);
28d81abb 3922 }
0df15c2c 3923
4559fd9e 3924 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3925 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3926 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3927 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3928 {
3929 /* Variable of fixed size that goes on the stack. */
3930 rtx oldaddr = 0;
3931 rtx addr;
0d4903b8 3932 rtx x;
28d81abb
RK
3933
3934 /* If we previously made RTL for this decl, it must be an array
3935 whose size was determined by the initializer.
3936 The old address was a register; set that register now
3937 to the proper address. */
19e7881c 3938 if (DECL_RTL_SET_P (decl))
28d81abb
RK
3939 {
3940 if (GET_CODE (DECL_RTL (decl)) != MEM
3941 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3942 abort ();
3943 oldaddr = XEXP (DECL_RTL (decl), 0);
3944 }
3945
28d81abb
RK
3946 /* Set alignment we actually gave this decl. */
3947 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3948 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3949 DECL_USER_ALIGN (decl) = 0;
28d81abb 3950
9432c136 3951 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
3952 set_mem_attributes (x, decl, 1);
3953 SET_DECL_RTL (decl, x);
3954
28d81abb
RK
3955 if (oldaddr)
3956 {
3957 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3958 if (addr != oldaddr)
3959 emit_move_insn (oldaddr, addr);
3960 }
28d81abb
RK
3961 }
3962 else
3963 /* Dynamic-size object: must push space on the stack. */
3964 {
abde42f7 3965 rtx address, size, x;
28d81abb
RK
3966
3967 /* Record the stack pointer on entry to block, if have
3968 not already done so. */
7393c642
RK
3969 do_pending_stack_adjust ();
3970 save_stack_pointer ();
28d81abb 3971
4d9af632
JM
3972 /* In function-at-a-time mode, variable_size doesn't expand this,
3973 so do it now. */
3974 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3975 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3976 const0_rtx, VOIDmode, 0);
3977
28d81abb 3978 /* Compute the variable's size, in bytes. */
4559fd9e 3979 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3980 free_temp_slots ();
3981
ff91ad08 3982 /* Allocate space on the stack for the variable. Note that
4381f7c2 3983 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3984 cannot use it to conclude anything about the alignment of
3985 the size. */
37366632 3986 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3987 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3988
28d81abb 3989 /* Reference the variable indirect through that rtx. */
abde42f7
JH
3990 x = gen_rtx_MEM (DECL_MODE (decl), address);
3991 set_mem_attributes (x, decl, 1);
3992 SET_DECL_RTL (decl, x);
28d81abb 3993
2207e295 3994
28d81abb
RK
3995 /* Indicate the alignment we actually gave this variable. */
3996#ifdef STACK_BOUNDARY
3997 DECL_ALIGN (decl) = STACK_BOUNDARY;
3998#else
3999 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4000#endif
11cf4d18 4001 DECL_USER_ALIGN (decl) = 0;
28d81abb 4002 }
28d81abb
RK
4003}
4004\f
4005/* Emit code to perform the initialization of a declaration DECL. */
4006
4007void
4008expand_decl_init (decl)
4009 tree decl;
4010{
b4ac57ab
RS
4011 int was_used = TREE_USED (decl);
4012
ac79cd5a
RK
4013 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4014 for static decls. */
4015 if (TREE_CODE (decl) == CONST_DECL
4016 || TREE_STATIC (decl))
28d81abb
RK
4017 return;
4018
4019 /* Compute and store the initial value now. */
4020
4021 if (DECL_INITIAL (decl) == error_mark_node)
4022 {
4023 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 4024
28d81abb 4025 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 4026 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
4027 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4028 0, 0);
4029 emit_queue ();
4030 }
4031 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4032 {
4033 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4034 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4035 emit_queue ();
4036 }
4037
b4ac57ab
RS
4038 /* Don't let the initialization count as "using" the variable. */
4039 TREE_USED (decl) = was_used;
4040
28d81abb 4041 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 4042 preserve_temp_slots (NULL_RTX);
28d81abb
RK
4043 free_temp_slots ();
4044}
4045
4046/* CLEANUP is an expression to be executed at exit from this binding contour;
4047 for example, in C++, it might call the destructor for this variable.
4048
4847c938
MS
4049 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4050 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
4051 happens in exception handling, for gotos, returns, breaks that
4052 leave the current scope.
28d81abb
RK
4053
4054 If CLEANUP is nonzero and DECL is zero, we record a cleanup
6d2f8887 4055 that is not associated with any particular variable. */
28d81abb
RK
4056
4057int
4058expand_decl_cleanup (decl, cleanup)
4059 tree decl, cleanup;
4060{
3f1d071b 4061 struct nesting *thisblock;
28d81abb
RK
4062
4063 /* Error if we are not in any block. */
01d939e8 4064 if (cfun == 0 || block_stack == 0)
28d81abb
RK
4065 return 0;
4066
3f1d071b
BS
4067 thisblock = block_stack;
4068
28d81abb
RK
4069 /* Record the cleanup if there is one. */
4070
4071 if (cleanup != 0)
4072 {
e976b8b2
MS
4073 tree t;
4074 rtx seq;
4075 tree *cleanups = &thisblock->data.block.cleanups;
4076 int cond_context = conditional_context ();
4077
4078 if (cond_context)
4079 {
4080 rtx flag = gen_reg_rtx (word_mode);
4081 rtx set_flag_0;
4082 tree cond;
4083
4084 start_sequence ();
4085 emit_move_insn (flag, const0_rtx);
4086 set_flag_0 = get_insns ();
4087 end_sequence ();
4088
4089 thisblock->data.block.last_unconditional_cleanup
2f937369 4090 = emit_insn_after (set_flag_0,
e976b8b2
MS
4091 thisblock->data.block.last_unconditional_cleanup);
4092
4093 emit_move_insn (flag, const1_rtx);
4094
b0c48229
NB
4095 cond = build_decl (VAR_DECL, NULL_TREE,
4096 (*lang_hooks.types.type_for_mode) (word_mode, 1));
19e7881c 4097 SET_DECL_RTL (cond, flag);
e976b8b2
MS
4098
4099 /* Conditionalize the cleanup. */
4100 cleanup = build (COND_EXPR, void_type_node,
78ef5b89 4101 (*lang_hooks.truthvalue_conversion) (cond),
e976b8b2
MS
4102 cleanup, integer_zero_node);
4103 cleanup = fold (cleanup);
4104
e2500fed 4105 cleanups = &thisblock->data.block.cleanups;
e976b8b2
MS
4106 }
4107
4847c938 4108 cleanup = unsave_expr (cleanup);
e976b8b2 4109
1f8f4a0b 4110 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
4111
4112 if (! cond_context)
4113 /* If this block has a cleanup, it belongs in stack_block_stack. */
4114 stack_block_stack = thisblock;
4115
4116 if (cond_context)
4117 {
4118 start_sequence ();
4119 }
4847c938 4120
52a11cbf 4121 if (! using_eh_for_cleanups_p)
e976b8b2 4122 TREE_ADDRESSABLE (t) = 1;
52a11cbf
RH
4123 else
4124 expand_eh_region_start ();
4125
e976b8b2
MS
4126 if (cond_context)
4127 {
4128 seq = get_insns ();
4129 end_sequence ();
7e82801f
MS
4130 if (seq)
4131 thisblock->data.block.last_unconditional_cleanup
2f937369
DM
4132 = emit_insn_after (seq,
4133 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4134 }
4135 else
4136 {
4137 thisblock->data.block.last_unconditional_cleanup
4138 = get_last_insn ();
ef97beff
JJ
4139 /* When we insert instructions after the last unconditional cleanup,
4140 we don't adjust last_insn. That means that a later add_insn will
4141 clobber the instructions we've just added. The easiest way to
4142 fix this is to just insert another instruction here, so that the
4143 instructions inserted after the last unconditional cleanup are
4144 never the last instruction. */
6496a589 4145 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 4146 }
28d81abb
RK
4147 }
4148 return 1;
4149}
659e5a7a
JM
4150
4151/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4152 is thrown. */
4153
4154int
4155expand_decl_cleanup_eh (decl, cleanup, eh_only)
4156 tree decl, cleanup;
4157 int eh_only;
4158{
4159 int ret = expand_decl_cleanup (decl, cleanup);
4160 if (cleanup && ret)
4161 {
4162 tree node = block_stack->data.block.cleanups;
4163 CLEANUP_EH_ONLY (node) = eh_only;
4164 }
4165 return ret;
4166}
28d81abb
RK
4167\f
4168/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4169 DECL_ELTS is the list of elements that belong to DECL's type.
4170 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4171
4172void
4173expand_anon_union_decl (decl, cleanup, decl_elts)
4174 tree decl, cleanup, decl_elts;
4175{
01d939e8 4176 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4177 rtx x;
8a693bd0 4178 tree t;
28d81abb 4179
8a693bd0
MM
4180 /* If any of the elements are addressable, so is the entire union. */
4181 for (t = decl_elts; t; t = TREE_CHAIN (t))
4182 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4183 {
4184 TREE_ADDRESSABLE (decl) = 1;
4185 break;
4186 }
4381f7c2 4187
ec5cd386
RK
4188 expand_decl (decl);
4189 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4190 x = DECL_RTL (decl);
4191
8a693bd0
MM
4192 /* Go through the elements, assigning RTL to each. */
4193 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4194 {
8a693bd0
MM
4195 tree decl_elt = TREE_VALUE (t);
4196 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4197 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4198
3256b817
JJ
4199 /* If any of the elements are addressable, so is the entire
4200 union. */
4201 if (TREE_USED (decl_elt))
4202 TREE_USED (decl) = 1;
4203
7b9032dd
JM
4204 /* Propagate the union's alignment to the elements. */
4205 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 4206 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
4207
4208 /* If the element has BLKmode and the union doesn't, the union is
4209 aligned such that the element doesn't need to have BLKmode, so
4210 change the element's mode to the appropriate one for its size. */
4211 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4212 DECL_MODE (decl_elt) = mode
05bccae2 4213 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 4214
28d81abb
RK
4215 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4216 instead create a new MEM rtx with the proper mode. */
4217 if (GET_CODE (x) == MEM)
4218 {
4219 if (mode == GET_MODE (x))
19e7881c 4220 SET_DECL_RTL (decl_elt, x);
28d81abb 4221 else
f1ec5147 4222 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb
RK
4223 }
4224 else if (GET_CODE (x) == REG)
4225 {
4226 if (mode == GET_MODE (x))
19e7881c 4227 SET_DECL_RTL (decl_elt, x);
28d81abb 4228 else
ddef6bc7 4229 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
4230 }
4231 else
4232 abort ();
4233
4234 /* Record the cleanup if there is one. */
4235
4236 if (cleanup != 0)
4237 thisblock->data.block.cleanups
1f8f4a0b
MM
4238 = tree_cons (decl_elt, cleanup_elt,
4239 thisblock->data.block.cleanups);
28d81abb
RK
4240 }
4241}
4242\f
4243/* Expand a list of cleanups LIST.
4244 Elements may be expressions or may be nested lists.
4245
4246 If DONT_DO is nonnull, then any list-element
4247 whose TREE_PURPOSE matches DONT_DO is omitted.
4248 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4249 a value that is being returned out of the scope.
4250
0e9e1e0a 4251 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
50d1b7a1
MS
4252 goto and handle protection regions specially in that case.
4253
4254 If REACHABLE, we emit code, otherwise just inform the exception handling
4255 code about this finalization. */
28d81abb
RK
4256
4257static void
50d1b7a1 4258expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4259 tree list;
4260 tree dont_do;
4e44807b 4261 int in_fixup;
50d1b7a1 4262 int reachable;
28d81abb
RK
4263{
4264 tree tail;
4265 for (tail = list; tail; tail = TREE_CHAIN (tail))
4266 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4267 {
4268 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4269 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4270 else
4271 {
52a11cbf
RH
4272 if (! in_fixup && using_eh_for_cleanups_p)
4273 expand_eh_region_end_cleanup (TREE_VALUE (tail));
61d6b1cc 4274
659e5a7a 4275 if (reachable && !CLEANUP_EH_ONLY (tail))
50d1b7a1
MS
4276 {
4277 /* Cleanups may be run multiple times. For example,
4278 when exiting a binding contour, we expand the
4279 cleanups associated with that contour. When a goto
4280 within that binding contour has a target outside that
4281 contour, it will expand all cleanups from its scope to
4282 the target. Though the cleanups are expanded multiple
4283 times, the control paths are non-overlapping so the
4284 cleanups will not be executed twice. */
9762d48d 4285
52a11cbf
RH
4286 /* We may need to protect from outer cleanups. */
4287 if (in_fixup && using_eh_for_cleanups_p)
4288 {
4289 expand_eh_region_start ();
e5e809f4 4290
52a11cbf
RH
4291 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4292
4293 expand_eh_region_end_fixup (TREE_VALUE (tail));
4294 }
4295 else
4296 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
e5e809f4 4297
50d1b7a1
MS
4298 free_temp_slots ();
4299 }
28d81abb
RK
4300 }
4301 }
4302}
4303
e976b8b2
MS
4304/* Mark when the context we are emitting RTL for as a conditional
4305 context, so that any cleanup actions we register with
4306 expand_decl_init will be properly conditionalized when those
4307 cleanup actions are later performed. Must be called before any
956d6950 4308 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4309
4310void
956d6950 4311start_cleanup_deferral ()
e976b8b2 4312{
e3eef942
JW
4313 /* block_stack can be NULL if we are inside the parameter list. It is
4314 OK to do nothing, because cleanups aren't possible here. */
4315 if (block_stack)
4316 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4317}
4318
4319/* Mark the end of a conditional region of code. Because cleanup
956d6950 4320 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4321 after we end the currently deferred cleanups, only after we end all
4322 deferred cleanups, are we back in unconditional code. */
4323
4324void
956d6950 4325end_cleanup_deferral ()
e976b8b2 4326{
e3eef942
JW
4327 /* block_stack can be NULL if we are inside the parameter list. It is
4328 OK to do nothing, because cleanups aren't possible here. */
4329 if (block_stack)
4330 --block_stack->data.block.conditional_code;
e976b8b2
MS
4331}
4332
28d81abb
RK
4333tree
4334last_cleanup_this_contour ()
4335{
4336 if (block_stack == 0)
4337 return 0;
4338
4339 return block_stack->data.block.cleanups;
4340}
4341
4342/* Return 1 if there are any pending cleanups at this point.
4343 If THIS_CONTOUR is nonzero, check the current contour as well.
4344 Otherwise, look only at the contours that enclose this one. */
4345
4346int
4347any_pending_cleanups (this_contour)
4348 int this_contour;
4349{
4350 struct nesting *block;
4351
01d939e8 4352 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4353 return 0;
4354
4355 if (this_contour && block_stack->data.block.cleanups != NULL)
4356 return 1;
4357 if (block_stack->data.block.cleanups == 0
e976b8b2 4358 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4359 return 0;
4360
4361 for (block = block_stack->next; block; block = block->next)
4362 if (block->data.block.cleanups != 0)
4363 return 1;
4364
4365 return 0;
4366}
4367\f
4368/* Enter a case (Pascal) or switch (C) statement.
4369 Push a block onto case_stack and nesting_stack
4370 to accumulate the case-labels that are seen
4371 and to record the labels generated for the statement.
4372
4373 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4374 Otherwise, this construct is transparent for `exit_something'.
4375
4376 EXPR is the index-expression to be dispatched on.
4377 TYPE is its nominal type. We could simply convert EXPR to this type,
4378 but instead we take short cuts. */
4379
4380void
4381expand_start_case (exit_flag, expr, type, printname)
4382 int exit_flag;
4383 tree expr;
4384 tree type;
dff01034 4385 const char *printname;
28d81abb 4386{
b3694847 4387 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4388
4389 /* Make an entry on case_stack for the case we are entering. */
4390
e2500fed 4391 thiscase->desc = CASE_NESTING;
28d81abb
RK
4392 thiscase->next = case_stack;
4393 thiscase->all = nesting_stack;
4394 thiscase->depth = ++nesting_depth;
4395 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4396 thiscase->data.case_stmt.case_list = 0;
4397 thiscase->data.case_stmt.index_expr = expr;
4398 thiscase->data.case_stmt.nominal_type = type;
4399 thiscase->data.case_stmt.default_label = 0;
28d81abb 4400 thiscase->data.case_stmt.printname = printname;
a11759a3 4401 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4402 case_stack = thiscase;
4403 nesting_stack = thiscase;
4404
4405 do_pending_stack_adjust ();
4406
4407 /* Make sure case_stmt.start points to something that won't
4408 need any transformation before expand_end_case. */
4409 if (GET_CODE (get_last_insn ()) != NOTE)
6496a589 4410 emit_note (NULL, NOTE_INSN_DELETED);
28d81abb
RK
4411
4412 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4413
956d6950 4414 start_cleanup_deferral ();
28d81abb
RK
4415}
4416
4417/* Start a "dummy case statement" within which case labels are invalid
4418 and are not connected to any larger real case statement.
4419 This can be used if you don't want to let a case statement jump
4420 into the middle of certain kinds of constructs. */
4421
4422void
4423expand_start_case_dummy ()
4424{
b3694847 4425 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4426
4427 /* Make an entry on case_stack for the dummy. */
4428
e2500fed 4429 thiscase->desc = CASE_NESTING;
28d81abb
RK
4430 thiscase->next = case_stack;
4431 thiscase->all = nesting_stack;
4432 thiscase->depth = ++nesting_depth;
4433 thiscase->exit_label = 0;
4434 thiscase->data.case_stmt.case_list = 0;
4435 thiscase->data.case_stmt.start = 0;
4436 thiscase->data.case_stmt.nominal_type = 0;
4437 thiscase->data.case_stmt.default_label = 0;
28d81abb
RK
4438 case_stack = thiscase;
4439 nesting_stack = thiscase;
956d6950 4440 start_cleanup_deferral ();
28d81abb 4441}
28d81abb 4442\f
a11759a3
JR
4443static void
4444check_seenlabel ()
4445{
4446 /* If this is the first label, warn if any insns have been emitted. */
4447 if (case_stack->data.case_stmt.line_number_status >= 0)
4448 {
4449 rtx insn;
4450
4451 restore_line_number_status
4452 (case_stack->data.case_stmt.line_number_status);
4453 case_stack->data.case_stmt.line_number_status = -1;
4454
4455 for (insn = case_stack->data.case_stmt.start;
4456 insn;
4457 insn = NEXT_INSN (insn))
4458 {
4459 if (GET_CODE (insn) == CODE_LABEL)
4460 break;
4461 if (GET_CODE (insn) != NOTE
4462 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4463 {
4464 do
4465 insn = PREV_INSN (insn);
0dacbd0e
JW
4466 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4467
4468 /* If insn is zero, then there must have been a syntax error. */
4469 if (insn)
4381f7c2
KH
4470 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4471 NOTE_LINE_NUMBER (insn),
0dacbd0e
JW
4472 "unreachable code at beginning of %s",
4473 case_stack->data.case_stmt.printname);
a11759a3
JR
4474 break;
4475 }
4476 }
4477 }
4478}
4479
28d81abb
RK
4480/* Accumulate one case or default label inside a case or switch statement.
4481 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4482 The function CONVERTER, when applied to arguments T and V,
4483 converts the value V to the type T.
28d81abb
RK
4484
4485 If not currently inside a case or switch statement, return 1 and do
4486 nothing. The caller will print a language-specific error message.
4487 If VALUE is a duplicate or overlaps, return 2 and do nothing
4488 except store the (first) duplicate node in *DUPLICATE.
4489 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4490 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4491 Return 0 on success.
4492
4493 Extended to handle range statements. */
4494
4495int
f52fba84 4496pushcase (value, converter, label, duplicate)
b3694847 4497 tree value;
cdadb1dd 4498 tree (*converter) PARAMS ((tree, tree));
b3694847 4499 tree label;
28d81abb
RK
4500 tree *duplicate;
4501{
28d81abb
RK
4502 tree index_type;
4503 tree nominal_type;
4504
4505 /* Fail if not inside a real case statement. */
4506 if (! (case_stack && case_stack->data.case_stmt.start))
4507 return 1;
4508
4509 if (stack_block_stack
4510 && stack_block_stack->depth > case_stack->depth)
4511 return 5;
4512
4513 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4514 nominal_type = case_stack->data.case_stmt.nominal_type;
4515
4516 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4517 if (index_type == error_mark_node)
4518 return 0;
4519
2f985ca6
JW
4520 /* Convert VALUE to the type in which the comparisons are nominally done. */
4521 if (value != 0)
4522 value = (*converter) (nominal_type, value);
4523
feb60352
R
4524 check_seenlabel ();
4525
28d81abb
RK
4526 /* Fail if this value is out of range for the actual type of the index
4527 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4528 if (value != 0
4529 && (TREE_CONSTANT_OVERFLOW (value)
4530 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4531 return 3;
4532
56cb9733 4533 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4534}
4535
956d6950
JL
4536/* Like pushcase but this case applies to all values between VALUE1 and
4537 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4538 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4539 starts at VALUE1 and ends at the highest value of the index type.
4540 If both are NULL, this case applies to all values.
4541
4542 The return value is the same as that of pushcase but there is one
4543 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4544
4545int
f52fba84 4546pushcase_range (value1, value2, converter, label, duplicate)
b3694847 4547 tree value1, value2;
cdadb1dd 4548 tree (*converter) PARAMS ((tree, tree));
b3694847 4549 tree label;
28d81abb
RK
4550 tree *duplicate;
4551{
28d81abb
RK
4552 tree index_type;
4553 tree nominal_type;
4554
4555 /* Fail if not inside a real case statement. */
4556 if (! (case_stack && case_stack->data.case_stmt.start))
4557 return 1;
4558
4559 if (stack_block_stack
4560 && stack_block_stack->depth > case_stack->depth)
4561 return 5;
4562
4563 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4564 nominal_type = case_stack->data.case_stmt.nominal_type;
4565
4566 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4567 if (index_type == error_mark_node)
4568 return 0;
4569
a11759a3 4570 check_seenlabel ();
28d81abb 4571
956d6950
JL
4572 /* Convert VALUEs to type in which the comparisons are nominally done
4573 and replace any unspecified value with the corresponding bound. */
4574 if (value1 == 0)
1974bfb1 4575 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4576 if (value2 == 0)
1974bfb1 4577 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4578
4579 /* Fail if the range is empty. Do this before any conversion since
4580 we want to allow out-of-range empty ranges. */
14a774a9 4581 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4582 return 4;
4583
4381f7c2 4584 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
4585 converting to. Do this after the < check above to suppress false
4586 positives. */
14a774a9 4587 if (value2 == 0)
e1ee5cdc 4588 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4589
2f985ca6
JW
4590 value1 = (*converter) (nominal_type, value1);
4591 value2 = (*converter) (nominal_type, value2);
4592
28d81abb 4593 /* Fail if these values are out of range. */
956d6950
JL
4594 if (TREE_CONSTANT_OVERFLOW (value1)
4595 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4596 return 3;
4597
956d6950
JL
4598 if (TREE_CONSTANT_OVERFLOW (value2)
4599 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4600 return 3;
4601
57641239
RK
4602 return add_case_node (value1, value2, label, duplicate);
4603}
4604
4605/* Do the actual insertion of a case label for pushcase and pushcase_range
4606 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4607 slowdown for large switch statements. */
4608
56cb9733 4609int
57641239
RK
4610add_case_node (low, high, label, duplicate)
4611 tree low, high;
4612 tree label;
4613 tree *duplicate;
4614{
4615 struct case_node *p, **q, *r;
4616
56cb9733
MM
4617 /* If there's no HIGH value, then this is not a case range; it's
4618 just a simple case label. But that's just a degenerate case
4619 range. */
4620 if (!high)
4621 high = low;
4622
4623 /* Handle default labels specially. */
4624 if (!high && !low)
4625 {
4626 if (case_stack->data.case_stmt.default_label != 0)
4627 {
4628 *duplicate = case_stack->data.case_stmt.default_label;
4629 return 2;
4630 }
4631 case_stack->data.case_stmt.default_label = label;
4632 expand_label (label);
4633 return 0;
4634 }
4635
57641239
RK
4636 q = &case_stack->data.case_stmt.case_list;
4637 p = *q;
4638
69d4ca36 4639 while ((r = *q))
28d81abb 4640 {
57641239
RK
4641 p = r;
4642
4643 /* Keep going past elements distinctly greater than HIGH. */
4644 if (tree_int_cst_lt (high, p->low))
4645 q = &p->left;
4646
4647 /* or distinctly less than LOW. */
4648 else if (tree_int_cst_lt (p->high, low))
4649 q = &p->right;
4650
4651 else
28d81abb 4652 {
57641239
RK
4653 /* We have an overlap; this is an error. */
4654 *duplicate = p->code_label;
28d81abb
RK
4655 return 2;
4656 }
4657 }
4658
1da68f56 4659 /* Add this label to the chain, and succeed. */
28d81abb 4660
e2500fed 4661 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
1da68f56 4662 r->low = low;
28d81abb 4663
57641239 4664 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
4665 if (tree_int_cst_equal (low, high))
4666 r->high = r->low;
4667 else
1da68f56 4668 r->high = high;
57641239
RK
4669
4670 r->code_label = label;
28d81abb
RK
4671 expand_label (label);
4672
57641239
RK
4673 *q = r;
4674 r->parent = p;
4675 r->left = 0;
4676 r->right = 0;
4677 r->balance = 0;
4678
4679 while (p)
4680 {
4681 struct case_node *s;
4682
4683 if (r == p->left)
4684 {
4685 int b;
4686
4687 if (! (b = p->balance))
4688 /* Growth propagation from left side. */
4689 p->balance = -1;
4690 else if (b < 0)
4691 {
4692 if (r->balance < 0)
4693 {
4694 /* R-Rotation */
51723711 4695 if ((p->left = s = r->right))
57641239
RK
4696 s->parent = p;
4697
4698 r->right = p;
4699 p->balance = 0;
4700 r->balance = 0;
4701 s = p->parent;
4702 p->parent = r;
4703
51723711 4704 if ((r->parent = s))
57641239
RK
4705 {
4706 if (s->left == p)
4707 s->left = r;
4708 else
4709 s->right = r;
4710 }
4711 else
4712 case_stack->data.case_stmt.case_list = r;
4713 }
4714 else
4715 /* r->balance == +1 */
4716 {
5720c7e7
RK
4717 /* LR-Rotation */
4718
57641239
RK
4719 int b2;
4720 struct case_node *t = r->right;
4721
51723711 4722 if ((p->left = s = t->right))
57641239
RK
4723 s->parent = p;
4724
4725 t->right = p;
51723711 4726 if ((r->right = s = t->left))
57641239
RK
4727 s->parent = r;
4728
4729 t->left = r;
4730 b = t->balance;
4731 b2 = b < 0;
4732 p->balance = b2;
4733 b2 = -b2 - b;
4734 r->balance = b2;
4735 t->balance = 0;
4736 s = p->parent;
4737 p->parent = t;
4738 r->parent = t;
4739
51723711 4740 if ((t->parent = s))
57641239
RK
4741 {
4742 if (s->left == p)
4743 s->left = t;
4744 else
4745 s->right = t;
4746 }
4747 else
4748 case_stack->data.case_stmt.case_list = t;
4749 }
4750 break;
4751 }
4752
4753 else
4754 {
4755 /* p->balance == +1; growth of left side balances the node. */
4756 p->balance = 0;
4757 break;
4758 }
4759 }
4760 else
4761 /* r == p->right */
4762 {
4763 int b;
4764
4765 if (! (b = p->balance))
4766 /* Growth propagation from right side. */
4767 p->balance++;
4768 else if (b > 0)
4769 {
4770 if (r->balance > 0)
4771 {
4772 /* L-Rotation */
4773
51723711 4774 if ((p->right = s = r->left))
57641239
RK
4775 s->parent = p;
4776
4777 r->left = p;
4778 p->balance = 0;
4779 r->balance = 0;
4780 s = p->parent;
4781 p->parent = r;
51723711 4782 if ((r->parent = s))
57641239
RK
4783 {
4784 if (s->left == p)
4785 s->left = r;
4786 else
4787 s->right = r;
4788 }
4789
4790 else
4791 case_stack->data.case_stmt.case_list = r;
4792 }
4793
4794 else
4795 /* r->balance == -1 */
4796 {
4797 /* RL-Rotation */
4798 int b2;
4799 struct case_node *t = r->left;
4800
51723711 4801 if ((p->right = s = t->left))
57641239
RK
4802 s->parent = p;
4803
4804 t->left = p;
4805
51723711 4806 if ((r->left = s = t->right))
57641239
RK
4807 s->parent = r;
4808
4809 t->right = r;
4810 b = t->balance;
4811 b2 = b < 0;
4812 r->balance = b2;
4813 b2 = -b2 - b;
4814 p->balance = b2;
4815 t->balance = 0;
4816 s = p->parent;
4817 p->parent = t;
4818 r->parent = t;
4819
51723711 4820 if ((t->parent = s))
57641239
RK
4821 {
4822 if (s->left == p)
4823 s->left = t;
4824 else
4825 s->right = t;
4826 }
4827
4828 else
4829 case_stack->data.case_stmt.case_list = t;
4830 }
4831 break;
4832 }
4833 else
4834 {
4835 /* p->balance == -1; growth of right side balances the node. */
4836 p->balance = 0;
4837 break;
4838 }
4839 }
4840
4841 r = p;
4842 p = p->parent;
4843 }
28d81abb
RK
4844
4845 return 0;
4846}
4847\f
94d6511c 4848/* Returns the number of possible values of TYPE.
770ae6cc
RK
4849 Returns -1 if the number is unknown, variable, or if the number does not
4850 fit in a HOST_WIDE_INT.
6f9fdf4d 4851 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
94d6511c
PB
4852 do not increase monotonically (there may be duplicates);
4853 to 1 if the values increase monotonically, but not always by 1;
4854 otherwise sets it to 0. */
4855
4856HOST_WIDE_INT
6f9fdf4d 4857all_cases_count (type, sparseness)
94d6511c 4858 tree type;
6f9fdf4d 4859 int *sparseness;
94d6511c 4860{
770ae6cc
RK
4861 tree t;
4862 HOST_WIDE_INT count, minval, lastval;
4863
6f9fdf4d 4864 *sparseness = 0;
94d6511c
PB
4865
4866 switch (TREE_CODE (type))
4867 {
94d6511c
PB
4868 case BOOLEAN_TYPE:
4869 count = 2;
4870 break;
770ae6cc 4871
94d6511c
PB
4872 case CHAR_TYPE:
4873 count = 1 << BITS_PER_UNIT;
4874 break;
770ae6cc 4875
94d6511c
PB
4876 default:
4877 case INTEGER_TYPE:
770ae6cc
RK
4878 if (TYPE_MAX_VALUE (type) != 0
4879 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4880 TYPE_MIN_VALUE (type))))
4881 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4882 convert (type, integer_zero_node))))
4883 && host_integerp (t, 1))
4884 count = tree_low_cst (t, 1);
94d6511c 4885 else
770ae6cc 4886 return -1;
94d6511c 4887 break;
770ae6cc 4888
94d6511c 4889 case ENUMERAL_TYPE:
770ae6cc
RK
4890 /* Don't waste time with enumeral types with huge values. */
4891 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4892 || TYPE_MAX_VALUE (type) == 0
4893 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4894 return -1;
4895
4896 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
94d6511c 4897 count = 0;
770ae6cc 4898
94d6511c
PB
4899 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4900 {
770ae6cc
RK
4901 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4902
6f9fdf4d
JJ
4903 if (*sparseness == 2 || thisval <= lastval)
4904 *sparseness = 2;
770ae6cc 4905 else if (thisval != minval + count)
6f9fdf4d 4906 *sparseness = 1;
770ae6cc 4907
6f9fdf4d 4908 lastval = thisval;
94d6511c
PB
4909 count++;
4910 }
94d6511c 4911 }
770ae6cc 4912
94d6511c
PB
4913 return count;
4914}
4915
94d6511c 4916#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4917 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4918 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4919#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4920 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4921 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4922
4923/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4924 with the case values we have seen, assuming the case expression
4925 has the given TYPE.
4926 SPARSENESS is as determined by all_cases_count.
4927
9faa82d8 4928 The time needed is proportional to COUNT, unless
94d6511c
PB
4929 SPARSENESS is 2, in which case quadratic time is needed. */
4930
df03cab5 4931void
94d6511c
PB
4932mark_seen_cases (type, cases_seen, count, sparseness)
4933 tree type;
4934 unsigned char *cases_seen;
770ae6cc 4935 HOST_WIDE_INT count;
94d6511c
PB
4936 int sparseness;
4937{
94d6511c 4938 tree next_node_to_try = NULL_TREE;
770ae6cc 4939 HOST_WIDE_INT next_node_offset = 0;
94d6511c 4940
b3694847 4941 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c 4942 tree val = make_node (INTEGER_CST);
770ae6cc 4943
94d6511c 4944 TREE_TYPE (val) = type;
5720c7e7 4945 if (! root)
4381f7c2
KH
4946 /* Do nothing. */
4947 ;
5720c7e7 4948 else if (sparseness == 2)
94d6511c 4949 {
5720c7e7 4950 tree t;
770ae6cc 4951 unsigned HOST_WIDE_INT xlo;
5720c7e7
RK
4952
4953 /* This less efficient loop is only needed to handle
4954 duplicate case values (multiple enum constants
4955 with the same value). */
4956 TREE_TYPE (val) = TREE_TYPE (root->low);
4381f7c2 4957 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5720c7e7 4958 t = TREE_CHAIN (t), xlo++)
94d6511c 4959 {
5720c7e7
RK
4960 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4961 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4962 n = root;
4963 do
94d6511c 4964 {
5720c7e7
RK
4965 /* Keep going past elements distinctly greater than VAL. */
4966 if (tree_int_cst_lt (val, n->low))
4967 n = n->left;
4381f7c2 4968
5720c7e7
RK
4969 /* or distinctly less than VAL. */
4970 else if (tree_int_cst_lt (n->high, val))
4971 n = n->right;
4381f7c2 4972
5720c7e7 4973 else
94d6511c 4974 {
5720c7e7
RK
4975 /* We have found a matching range. */
4976 BITARRAY_SET (cases_seen, xlo);
4977 break;
94d6511c
PB
4978 }
4979 }
5720c7e7
RK
4980 while (n);
4981 }
4982 }
4983 else
4984 {
4985 if (root->left)
4986 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
770ae6cc 4987
5720c7e7
RK
4988 for (n = root; n; n = n->right)
4989 {
4990 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4991 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4381f7c2 4992 while (! tree_int_cst_lt (n->high, val))
94d6511c 4993 {
5720c7e7
RK
4994 /* Calculate (into xlo) the "offset" of the integer (val).
4995 The element with lowest value has offset 0, the next smallest
4996 element has offset 1, etc. */
4997
770ae6cc
RK
4998 unsigned HOST_WIDE_INT xlo;
4999 HOST_WIDE_INT xhi;
5720c7e7 5000 tree t;
770ae6cc 5001
94d6511c
PB
5002 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5003 {
5004 /* The TYPE_VALUES will be in increasing order, so
5005 starting searching where we last ended. */
5006 t = next_node_to_try;
5007 xlo = next_node_offset;
5008 xhi = 0;
5009 for (;;)
5010 {
5011 if (t == NULL_TREE)
5012 {
5013 t = TYPE_VALUES (type);
5014 xlo = 0;
5015 }
5016 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5017 {
5018 next_node_to_try = TREE_CHAIN (t);
5019 next_node_offset = xlo + 1;
5020 break;
5021 }
5022 xlo++;
5023 t = TREE_CHAIN (t);
5024 if (t == next_node_to_try)
5720c7e7
RK
5025 {
5026 xlo = -1;
5027 break;
5028 }
94d6511c
PB
5029 }
5030 }
5031 else
5032 {
5033 t = TYPE_MIN_VALUE (type);
5034 if (t)
5035 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5036 &xlo, &xhi);
5037 else
5038 xlo = xhi = 0;
5039 add_double (xlo, xhi,
5040 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5041 &xlo, &xhi);
5042 }
4381f7c2 5043
770ae6cc 5044 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
94d6511c 5045 BITARRAY_SET (cases_seen, xlo);
770ae6cc 5046
5720c7e7
RK
5047 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5048 1, 0,
5049 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5050 }
94d6511c
PB
5051 }
5052 }
5053}
5054
740b77b6
AC
5055/* Given a switch statement with an expression that is an enumeration
5056 type, warn if any of the enumeration type's literals are not
5057 covered by the case expressions of the switch. Also, warn if there
5058 are any extra switch cases that are *not* elements of the
5059 enumerated type.
28d81abb 5060
740b77b6 5061 Historical note:
28d81abb 5062
740b77b6
AC
5063 At one stage this function would: ``If all enumeration literals
5064 were covered by the case expressions, turn one of the expressions
5065 into the default expression since it should not be possible to fall
5066 through such a switch.''
5067
5068 That code has since been removed as: ``This optimization is
5069 disabled because it causes valid programs to fail. ANSI C does not
5070 guarantee that an expression with enum type will have a value that
5071 is the same as one of the enumeration literals.'' */
28d81abb
RK
5072
5073void
5074check_for_full_enumeration_handling (type)
5075 tree type;
5076{
b3694847
SS
5077 struct case_node *n;
5078 tree chain;
28d81abb 5079
0f41302f 5080 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5081 int sparseness = 0;
5082
0f41302f 5083 /* The number of possible selector values. */
94d6511c
PB
5084 HOST_WIDE_INT size;
5085
5086 /* For each possible selector value. a one iff it has been matched
0f41302f 5087 by a case value alternative. */
94d6511c
PB
5088 unsigned char *cases_seen;
5089
0f41302f 5090 /* The allocated size of cases_seen, in chars. */
770ae6cc 5091 HOST_WIDE_INT bytes_needed;
94d6511c 5092
94d6511c
PB
5093 size = all_cases_count (type, &sparseness);
5094 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5095
94d6511c 5096 if (size > 0 && size < 600000
c5c76735 5097 /* We deliberately use calloc here, not cmalloc, so that we can suppress
4381f7c2 5098 this optimization if we don't have enough memory rather than
c5c76735 5099 aborting, as xmalloc would do. */
f4ce9d90
KG
5100 && (cases_seen =
5101 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
28d81abb 5102 {
770ae6cc 5103 HOST_WIDE_INT i;
94d6511c 5104 tree v = TYPE_VALUES (type);
28d81abb 5105
94d6511c
PB
5106 /* The time complexity of this code is normally O(N), where
5107 N being the number of members in the enumerated type.
e0a2f705 5108 However, if type is an ENUMERAL_TYPE whose values do not
0f41302f 5109 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5110
5111 mark_seen_cases (type, cases_seen, size, sparseness);
5112
770ae6cc 5113 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4381f7c2 5114 if (BITARRAY_TEST (cases_seen, i) == 0)
770ae6cc
RK
5115 warning ("enumeration value `%s' not handled in switch",
5116 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
94d6511c
PB
5117
5118 free (cases_seen);
28d81abb
RK
5119 }
5120
5121 /* Now we go the other way around; we warn if there are case
ac2a9454 5122 expressions that don't correspond to enumerators. This can
28d81abb 5123 occur since C and C++ don't enforce type-checking of
0f41302f 5124 assignments to enumeration variables. */
28d81abb 5125
5720c7e7
RK
5126 if (case_stack->data.case_stmt.case_list
5127 && case_stack->data.case_stmt.case_list->left)
5128 case_stack->data.case_stmt.case_list
5129 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
740b77b6
AC
5130 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5131 {
5132 for (chain = TYPE_VALUES (type);
5133 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5134 chain = TREE_CHAIN (chain))
5135 ;
786de7eb 5136
740b77b6
AC
5137 if (!chain)
5138 {
5139 if (TYPE_NAME (type) == 0)
5140 warning ("case value `%ld' not in enumerated type",
5141 (long) TREE_INT_CST_LOW (n->low));
5142 else
5143 warning ("case value `%ld' not in enumerated type `%s'",
5144 (long) TREE_INT_CST_LOW (n->low),
5145 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5146 == IDENTIFIER_NODE)
5147 ? TYPE_NAME (type)
5148 : DECL_NAME (TYPE_NAME (type))));
5149 }
5150 if (!tree_int_cst_equal (n->low, n->high))
5151 {
5152 for (chain = TYPE_VALUES (type);
5153 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5154 chain = TREE_CHAIN (chain))
5155 ;
786de7eb 5156
740b77b6
AC
5157 if (!chain)
5158 {
5159 if (TYPE_NAME (type) == 0)
5160 warning ("case value `%ld' not in enumerated type",
5161 (long) TREE_INT_CST_LOW (n->high));
5162 else
5163 warning ("case value `%ld' not in enumerated type `%s'",
5164 (long) TREE_INT_CST_LOW (n->high),
5165 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5166 == IDENTIFIER_NODE)
5167 ? TYPE_NAME (type)
5168 : DECL_NAME (TYPE_NAME (type))));
5169 }
5170 }
5171 }
28d81abb 5172}
ca695ac9 5173
28d81abb 5174\f
ad82abb8 5175
28d81abb 5176/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5177 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
5178 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5179 type as given in the source before any compiler conversions.
28d81abb
RK
5180 Generate the code to test it and jump to the right place. */
5181
5182void
6f9fdf4d
JJ
5183expand_end_case_type (orig_index, orig_type)
5184 tree orig_index, orig_type;
28d81abb 5185{
9fb60a0d 5186 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 5187 rtx default_label = 0;
b3694847 5188 struct case_node *n;
85066503 5189 unsigned int count;
28d81abb 5190 rtx index;
ca695ac9 5191 rtx table_label;
28d81abb
RK
5192 int ncases;
5193 rtx *labelvec;
b3694847 5194 int i;
2270623a 5195 rtx before_case, end;
b3694847 5196 struct nesting *thiscase = case_stack;
1b0cb6fc 5197 tree index_expr, index_type;
100e3acb 5198 bool exit_done = false;
ca695ac9
JB
5199 int unsignedp;
5200
03c03770
AS
5201 /* Don't crash due to previous errors. */
5202 if (thiscase == NULL)
5203 return;
5204
ca695ac9 5205 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5206 index_type = TREE_TYPE (index_expr);
5207 unsignedp = TREE_UNSIGNED (index_type);
6f9fdf4d
JJ
5208 if (orig_type == NULL)
5209 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
5210
5211 do_pending_stack_adjust ();
5212
09da1532 5213 /* This might get a spurious warning in the presence of a syntax error;
feb60352
R
5214 it could be fixed by moving the call to check_seenlabel after the
5215 check for error_mark_node, and copying the code of check_seenlabel that
5216 deals with case_stack->data.case_stmt.line_number_status /
5217 restore_line_number_status in front of the call to end_cleanup_deferral;
5218 However, this might miss some useful warnings in the presence of
5219 non-syntax errors. */
a11759a3
JR
5220 check_seenlabel ();
5221
28d81abb 5222 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5223 if (index_type != error_mark_node)
28d81abb 5224 {
740b77b6
AC
5225 /* If the switch expression was an enumerated type, check that
5226 exactly all enumeration literals are covered by the cases.
173028e5
AC
5227 The check is made when -Wswitch was specified and there is no
5228 default case, or when -Wswitch-enum was specified. */
5229 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5230 || warn_switch_enum)
6f9fdf4d 5231 && TREE_CODE (orig_type) == ENUMERAL_TYPE
28d81abb 5232 && TREE_CODE (index_expr) != INTEGER_CST)
6f9fdf4d 5233 check_for_full_enumeration_handling (orig_type);
28d81abb 5234
d6961341
AC
5235 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5236 warning ("switch missing default case");
5237
28d81abb
RK
5238 /* If we don't have a default-label, create one here,
5239 after the body of the switch. */
5240 if (thiscase->data.case_stmt.default_label == 0)
5241 {
5242 thiscase->data.case_stmt.default_label
5243 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
100e3acb
RS
5244 /* Share the exit label if possible. */
5245 if (thiscase->exit_label)
5246 {
5247 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5248 thiscase->exit_label);
5249 exit_done = true;
5250 }
28d81abb
RK
5251 expand_label (thiscase->data.case_stmt.default_label);
5252 }
5253 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5254
5255 before_case = get_last_insn ();
5256
5720c7e7
RK
5257 if (thiscase->data.case_stmt.case_list
5258 && thiscase->data.case_stmt.case_list->left)
b059139c 5259 thiscase->data.case_stmt.case_list
4381f7c2 5260 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 5261
28d81abb
RK
5262 /* Simplify the case-list before we count it. */
5263 group_case_nodes (thiscase->data.case_stmt.case_list);
100e3acb
RS
5264 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5265 default_label);
28d81abb
RK
5266
5267 /* Get upper and lower bounds of case values.
5268 Also convert all the case values to the index expr's data type. */
5269
5270 count = 0;
5271 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5272 {
5273 /* Check low and high label values are integers. */
5274 if (TREE_CODE (n->low) != INTEGER_CST)
5275 abort ();
5276 if (TREE_CODE (n->high) != INTEGER_CST)
5277 abort ();
5278
1b0cb6fc
RK
5279 n->low = convert (index_type, n->low);
5280 n->high = convert (index_type, n->high);
28d81abb
RK
5281
5282 /* Count the elements and track the largest and smallest
5283 of them (treating them as signed even if they are not). */
5284 if (count++ == 0)
5285 {
5286 minval = n->low;
5287 maxval = n->high;
5288 }
5289 else
5290 {
5291 if (INT_CST_LT (n->low, minval))
5292 minval = n->low;
5293 if (INT_CST_LT (maxval, n->high))
5294 maxval = n->high;
5295 }
5296 /* A range counts double, since it requires two compares. */
5297 if (! tree_int_cst_equal (n->low, n->high))
5298 count++;
5299 }
5300
5301 /* Compute span of values. */
5302 if (count != 0)
1b0cb6fc 5303 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5304
956d6950 5305 end_cleanup_deferral ();
4c581243 5306
1b0cb6fc 5307 if (count == 0)
28d81abb
RK
5308 {
5309 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5310 emit_queue ();
5311 emit_jump (default_label);
5312 }
3474db0e 5313
28d81abb
RK
5314 /* If range of values is much bigger than number of values,
5315 make a sequence of conditional branches instead of a dispatch.
5316 If the switch-index is a constant, do it this way
5317 because we can optimize it. */
4f73c5dd 5318
ad82abb8 5319 else if (count < case_values_threshold ()
05bccae2 5320 || compare_tree_int (range, 10 * count) > 0
f0c988c8
BS
5321 /* RANGE may be signed, and really large ranges will show up
5322 as negative numbers. */
5323 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
5324#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5325 || flag_pic
5326#endif
100e3acb 5327 || TREE_CONSTANT (index_expr))
28d81abb 5328 {
37366632 5329 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5330
5331 /* If the index is a short or char that we do not have
5332 an insn to handle comparisons directly, convert it to
5333 a full integer now, rather than letting each comparison
5334 generate the conversion. */
5335
5336 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 5337 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
5338 {
5339 enum machine_mode wider_mode;
5340 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5341 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 5342 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
5343 {
5344 index = convert_to_mode (wider_mode, index, unsignedp);
5345 break;
5346 }
5347 }
5348
5349 emit_queue ();
5350 do_pending_stack_adjust ();
5351
5352 index = protect_from_queue (index, 0);
5353 if (GET_CODE (index) == MEM)
5354 index = copy_to_reg (index);
5355 if (GET_CODE (index) == CONST_INT
5356 || TREE_CODE (index_expr) == INTEGER_CST)
5357 {
5358 /* Make a tree node with the proper constant value
5359 if we don't already have one. */
5360 if (TREE_CODE (index_expr) != INTEGER_CST)
5361 {
5362 index_expr
5363 = build_int_2 (INTVAL (index),
e9a042b6 5364 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5365 index_expr = convert (index_type, index_expr);
28d81abb
RK
5366 }
5367
5368 /* For constant index expressions we need only
4fe9b91c 5369 issue an unconditional branch to the appropriate
28d81abb
RK
5370 target code. The job of removing any unreachable
5371 code is left to the optimisation phase if the
5372 "-O" option is specified. */
1b0cb6fc
RK
5373 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5374 if (! tree_int_cst_lt (index_expr, n->low)
5375 && ! tree_int_cst_lt (n->high, index_expr))
5376 break;
5377
28d81abb
RK
5378 if (n)
5379 emit_jump (label_rtx (n->code_label));
5380 else
5381 emit_jump (default_label);
5382 }
5383 else
5384 {
5385 /* If the index expression is not constant we generate
5386 a binary decision tree to select the appropriate
5387 target code. This is done as follows:
5388
5389 The list of cases is rearranged into a binary tree,
5390 nearly optimal assuming equal probability for each case.
5391
5392 The tree is transformed into RTL, eliminating
5393 redundant test conditions at the same time.
5394
5395 If program flow could reach the end of the
5396 decision tree an unconditional jump to the
5397 default code is emitted. */
5398
5399 use_cost_table
6f9fdf4d 5400 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 5401 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 5402 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 5403 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5404 default_label, index_type);
28d81abb
RK
5405 emit_jump_if_reachable (default_label);
5406 }
5407 }
5408 else
5409 {
100e3acb 5410 table_label = gen_label_rtx ();
ad82abb8
ZW
5411 if (! try_casesi (index_type, index_expr, minval, range,
5412 table_label, default_label))
28d81abb 5413 {
ecc9dd93 5414 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 5415
786de7eb 5416 /* Index jumptables from zero for suitable values of
1ff37128 5417 minval to avoid a subtraction. */
786de7eb
KH
5418 if (! optimize_size
5419 && compare_tree_int (minval, 0) > 0
5420 && compare_tree_int (minval, 3) < 0)
5421 {
5422 minval = integer_zero_node;
5423 range = maxval;
5424 }
1ff37128 5425
ad82abb8
ZW
5426 if (! try_tablejump (index_type, index_expr, minval, range,
5427 table_label, default_label))
5428 abort ();
28d81abb 5429 }
786de7eb 5430
28d81abb
RK
5431 /* Get table of labels to jump to, in order of case index. */
5432
1ff37128 5433 ncases = tree_low_cst (range, 0) + 1;
28d81abb 5434 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
961192e1 5435 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
5436
5437 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5438 {
2d9d49e4
OH
5439 /* Compute the low and high bounds relative to the minimum
5440 value since that should fit in a HOST_WIDE_INT while the
5441 actual values may not. */
5442 HOST_WIDE_INT i_low
786de7eb
KH
5443 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5444 n->low, minval)), 1);
2d9d49e4 5445 HOST_WIDE_INT i_high
786de7eb
KH
5446 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5447 n->high, minval)), 1);
2d9d49e4
OH
5448 HOST_WIDE_INT i;
5449
5450 for (i = i_low; i <= i_high; i ++)
5451 labelvec[i]
5452 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
5453 }
5454
5455 /* Fill in the gaps with the default. */
5456 for (i = 0; i < ncases; i++)
5457 if (labelvec[i] == 0)
38a448ca 5458 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5459
5460 /* Output the table */
5461 emit_label (table_label);
5462
18543a22 5463 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5464 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5465 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5466 gen_rtvec_v (ncases, labelvec),
4381f7c2 5467 const0_rtx, const0_rtx));
28d81abb 5468 else
38a448ca
RH
5469 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5470 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5471
5472 /* If the case insn drops through the table,
5473 after the table we must jump to the default-label.
5474 Otherwise record no drop-through after the table. */
5475#ifdef CASE_DROPS_THROUGH
5476 emit_jump (default_label);
5477#else
5478 emit_barrier ();
5479#endif
5480 }
5481
2270623a
JM
5482 before_case = NEXT_INSN (before_case);
5483 end = get_last_insn ();
2b7d71b2
JJ
5484 if (squeeze_notes (&before_case, &end))
5485 abort ();
2270623a 5486 reorder_insns (before_case, end,
28d81abb
RK
5487 thiscase->data.case_stmt.start);
5488 }
4c581243 5489 else
956d6950 5490 end_cleanup_deferral ();
1b0cb6fc 5491
100e3acb 5492 if (thiscase->exit_label && !exit_done)
28d81abb
RK
5493 emit_label (thiscase->exit_label);
5494
5495 POPSTACK (case_stack);
5496
5497 free_temp_slots ();
5498}
5499
57641239
RK
5500/* Convert the tree NODE into a list linked by the right field, with the left
5501 field zeroed. RIGHT is used for recursion; it is a list to be placed
5502 rightmost in the resulting list. */
5503
5504static struct case_node *
5505case_tree2list (node, right)
5506 struct case_node *node, *right;
5507{
5508 struct case_node *left;
5509
5510 if (node->right)
5511 right = case_tree2list (node->right, right);
5512
5513 node->right = right;
51723711 5514 if ((left = node->left))
57641239
RK
5515 {
5516 node->left = 0;
5517 return case_tree2list (left, node);
5518 }
5519
5520 return node;
5521}
ca695ac9 5522
28d81abb
RK
5523/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5524
5525static void
5526do_jump_if_equal (op1, op2, label, unsignedp)
5527 rtx op1, op2, label;
5528 int unsignedp;
5529{
d43e0b7d 5530 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb
RK
5531 {
5532 if (INTVAL (op1) == INTVAL (op2))
5533 emit_jump (label);
5534 }
5535 else
d43e0b7d
RK
5536 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5537 (GET_MODE (op1) == VOIDmode
5538 ? GET_MODE (op2) : GET_MODE (op1)),
5539 unsignedp, label);
28d81abb
RK
5540}
5541\f
5542/* Not all case values are encountered equally. This function
5543 uses a heuristic to weight case labels, in cases where that
5544 looks like a reasonable thing to do.
5545
5546 Right now, all we try to guess is text, and we establish the
5547 following weights:
5548
5549 chars above space: 16
5550 digits: 16
5551 default: 12
5552 space, punct: 8
5553 tab: 4
5554 newline: 2
5555 other "\" chars: 1
5556 remaining chars: 0
5557
5558 If we find any cases in the switch that are not either -1 or in the range
5559 of valid ASCII characters, or are control characters other than those
5560 commonly used with "\", don't treat this switch scanning text.
5561
5562 Return 1 if these nodes are suitable for cost estimation, otherwise
5563 return 0. */
5564
5565static int
5566estimate_case_costs (node)
5567 case_node_ptr node;
5568{
f2d1f0ba 5569 tree min_ascii = integer_minus_one_node;
28d81abb
RK
5570 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5571 case_node_ptr n;
5572 int i;
5573
5574 /* If we haven't already made the cost table, make it now. Note that the
5575 lower bound of the table is -1, not zero. */
5576
2a2137c4 5577 if (! cost_table_initialized)
28d81abb 5578 {
2a2137c4 5579 cost_table_initialized = 1;
28d81abb
RK
5580
5581 for (i = 0; i < 128; i++)
5582 {
e9a780ec 5583 if (ISALNUM (i))
2a2137c4 5584 COST_TABLE (i) = 16;
e9a780ec 5585 else if (ISPUNCT (i))
2a2137c4 5586 COST_TABLE (i) = 8;
e9a780ec 5587 else if (ISCNTRL (i))
2a2137c4 5588 COST_TABLE (i) = -1;
28d81abb
RK
5589 }
5590
2a2137c4
RH
5591 COST_TABLE (' ') = 8;
5592 COST_TABLE ('\t') = 4;
5593 COST_TABLE ('\0') = 4;
5594 COST_TABLE ('\n') = 2;
5595 COST_TABLE ('\f') = 1;
5596 COST_TABLE ('\v') = 1;
5597 COST_TABLE ('\b') = 1;
28d81abb
RK
5598 }
5599
5600 /* See if all the case expressions look like text. It is text if the
5601 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5602 as signed arithmetic since we don't want to ever access cost_table with a
5603 value less than -1. Also check that none of the constants in a range
5604 are strange control characters. */
5605
5606 for (n = node; n; n = n->right)
5607 {
5608 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5609 return 0;
5610
05bccae2
RK
5611 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5612 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 5613 if (COST_TABLE (i) < 0)
28d81abb
RK
5614 return 0;
5615 }
5616
5617 /* All interesting values are within the range of interesting
5618 ASCII characters. */
5619 return 1;
5620}
5621
100e3acb
RS
5622/* Determine whether two case labels branch to the same target. */
5623
5624static bool
5625same_case_target_p (l1, l2)
5626 rtx l1, l2;
5627{
5628 rtx i1, i2;
5629
5630 if (l1 == l2)
5631 return true;
5632
5633 i1 = next_real_insn (l1);
5634 i2 = next_real_insn (l2);
5635 if (i1 == i2)
5636 return true;
5637
5638 if (i1 && simplejump_p (i1))
5639 {
5640 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5641 }
5642
5643 if (i2 && simplejump_p (i2))
5644 {
5645 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5646 }
5647 return l1 == l2;
5648}
5649
5650/* Delete nodes that branch to the default label from a list of
5651 case nodes. Eg. case 5: default: becomes just default: */
5652
5653static void
5654strip_default_case_nodes (prev, deflab)
5655 case_node_ptr *prev;
5656 rtx deflab;
5657{
5658 case_node_ptr ptr;
5659
5660 while (*prev)
5661 {
5662 ptr = *prev;
5663 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5664 *prev = ptr->right;
5665 else
5666 prev = &ptr->right;
5667 }
5668}
5669
28d81abb
RK
5670/* Scan an ordered list of case nodes
5671 combining those with consecutive values or ranges.
5672
5673 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5674
5675static void
5676group_case_nodes (head)
5677 case_node_ptr head;
5678{
5679 case_node_ptr node = head;
5680
5681 while (node)
5682 {
100e3acb 5683 rtx lab = label_rtx (node->code_label);
28d81abb
RK
5684 case_node_ptr np = node;
5685
5686 /* Try to group the successors of NODE with NODE. */
5687 while (((np = np->right) != 0)
5688 /* Do they jump to the same place? */
100e3acb 5689 && same_case_target_p (label_rtx (np->code_label), lab)
28d81abb
RK
5690 /* Are their ranges consecutive? */
5691 && tree_int_cst_equal (np->low,
5692 fold (build (PLUS_EXPR,
5693 TREE_TYPE (node->high),
5694 node->high,
5695 integer_one_node)))
5696 /* An overflow is not consecutive. */
5697 && tree_int_cst_lt (node->high,
5698 fold (build (PLUS_EXPR,
5699 TREE_TYPE (node->high),
5700 node->high,
5701 integer_one_node))))
5702 {
5703 node->high = np->high;
5704 }
5705 /* NP is the first node after NODE which can't be grouped with it.
5706 Delete the nodes in between, and move on to that node. */
5707 node->right = np;
5708 node = np;
5709 }
5710}
5711
5712/* Take an ordered list of case nodes
5713 and transform them into a near optimal binary tree,
6dc42e49 5714 on the assumption that any target code selection value is as
28d81abb
RK
5715 likely as any other.
5716
5717 The transformation is performed by splitting the ordered
5718 list into two equal sections plus a pivot. The parts are
5719 then attached to the pivot as left and right branches. Each
38e01259 5720 branch is then transformed recursively. */
28d81abb
RK
5721
5722static void
5723balance_case_nodes (head, parent)
5724 case_node_ptr *head;
5725 case_node_ptr parent;
5726{
b3694847 5727 case_node_ptr np;
28d81abb
RK
5728
5729 np = *head;
5730 if (np)
5731 {
5732 int cost = 0;
5733 int i = 0;
5734 int ranges = 0;
b3694847 5735 case_node_ptr *npp;
28d81abb
RK
5736 case_node_ptr left;
5737
5738 /* Count the number of entries on branch. Also count the ranges. */
5739
5740 while (np)
5741 {
5742 if (!tree_int_cst_equal (np->low, np->high))
5743 {
5744 ranges++;
5745 if (use_cost_table)
2a2137c4 5746 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
5747 }
5748
5749 if (use_cost_table)
2a2137c4 5750 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
5751
5752 i++;
5753 np = np->right;
5754 }
5755
5756 if (i > 2)
5757 {
5758 /* Split this list if it is long enough for that to help. */
5759 npp = head;
5760 left = *npp;
5761 if (use_cost_table)
5762 {
5763 /* Find the place in the list that bisects the list's total cost,
5764 Here I gets half the total cost. */
5765 int n_moved = 0;
5766 i = (cost + 1) / 2;
5767 while (1)
5768 {
5769 /* Skip nodes while their cost does not reach that amount. */
5770 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
5771 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5772 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
5773 if (i <= 0)
5774 break;
5775 npp = &(*npp)->right;
5776 n_moved += 1;
5777 }
5778 if (n_moved == 0)
5779 {
5780 /* Leave this branch lopsided, but optimize left-hand
5781 side and fill in `parent' fields for right-hand side. */
5782 np = *head;
5783 np->parent = parent;
5784 balance_case_nodes (&np->left, np);
5785 for (; np->right; np = np->right)
5786 np->right->parent = np;
5787 return;
5788 }
5789 }
5790 /* If there are just three nodes, split at the middle one. */
5791 else if (i == 3)
5792 npp = &(*npp)->right;
5793 else
5794 {
5795 /* Find the place in the list that bisects the list's total cost,
5796 where ranges count as 2.
5797 Here I gets half the total cost. */
5798 i = (i + ranges + 1) / 2;
5799 while (1)
5800 {
5801 /* Skip nodes while their cost does not reach that amount. */
5802 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5803 i--;
5804 i--;
5805 if (i <= 0)
5806 break;
5807 npp = &(*npp)->right;
5808 }
5809 }
5810 *head = np = *npp;
5811 *npp = 0;
5812 np->parent = parent;
5813 np->left = left;
5814
5815 /* Optimize each of the two split parts. */
5816 balance_case_nodes (&np->left, np);
5817 balance_case_nodes (&np->right, np);
5818 }
5819 else
5820 {
5821 /* Else leave this branch as one level,
5822 but fill in `parent' fields. */
5823 np = *head;
5824 np->parent = parent;
5825 for (; np->right; np = np->right)
5826 np->right->parent = np;
5827 }
5828 }
5829}
5830\f
5831/* Search the parent sections of the case node tree
5832 to see if a test for the lower bound of NODE would be redundant.
5833 INDEX_TYPE is the type of the index expression.
5834
5835 The instructions to generate the case decision tree are
5836 output in the same order as nodes are processed so it is
5837 known that if a parent node checks the range of the current
5838 node minus one that the current node is bounded at its lower
5839 span. Thus the test would be redundant. */
5840
5841static int
5842node_has_low_bound (node, index_type)
5843 case_node_ptr node;
5844 tree index_type;
5845{
5846 tree low_minus_one;
5847 case_node_ptr pnode;
5848
5849 /* If the lower bound of this node is the lowest value in the index type,
5850 we need not test it. */
5851
5852 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5853 return 1;
5854
5855 /* If this node has a left branch, the value at the left must be less
5856 than that at this node, so it cannot be bounded at the bottom and
5857 we need not bother testing any further. */
5858
5859 if (node->left)
5860 return 0;
5861
5862 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5863 node->low, integer_one_node));
5864
5865 /* If the subtraction above overflowed, we can't verify anything.
5866 Otherwise, look for a parent that tests our value - 1. */
5867
5868 if (! tree_int_cst_lt (low_minus_one, node->low))
5869 return 0;
5870
5871 for (pnode = node->parent; pnode; pnode = pnode->parent)
5872 if (tree_int_cst_equal (low_minus_one, pnode->high))
5873 return 1;
5874
5875 return 0;
5876}
5877
5878/* Search the parent sections of the case node tree
5879 to see if a test for the upper bound of NODE would be redundant.
5880 INDEX_TYPE is the type of the index expression.
5881
5882 The instructions to generate the case decision tree are
5883 output in the same order as nodes are processed so it is
5884 known that if a parent node checks the range of the current
5885 node plus one that the current node is bounded at its upper
5886 span. Thus the test would be redundant. */
5887
5888static int
5889node_has_high_bound (node, index_type)
5890 case_node_ptr node;
5891 tree index_type;
5892{
5893 tree high_plus_one;
5894 case_node_ptr pnode;
5895
e1ee5cdc
RH
5896 /* If there is no upper bound, obviously no test is needed. */
5897
5898 if (TYPE_MAX_VALUE (index_type) == NULL)
5899 return 1;
5900
28d81abb
RK
5901 /* If the upper bound of this node is the highest value in the type
5902 of the index expression, we need not test against it. */
5903
5904 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5905 return 1;
5906
5907 /* If this node has a right branch, the value at the right must be greater
5908 than that at this node, so it cannot be bounded at the top and
5909 we need not bother testing any further. */
5910
5911 if (node->right)
5912 return 0;
5913
5914 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5915 node->high, integer_one_node));
5916
5917 /* If the addition above overflowed, we can't verify anything.
5918 Otherwise, look for a parent that tests our value + 1. */
5919
5920 if (! tree_int_cst_lt (node->high, high_plus_one))
5921 return 0;
5922
5923 for (pnode = node->parent; pnode; pnode = pnode->parent)
5924 if (tree_int_cst_equal (high_plus_one, pnode->low))
5925 return 1;
5926
5927 return 0;
5928}
5929
5930/* Search the parent sections of the
5931 case node tree to see if both tests for the upper and lower
5932 bounds of NODE would be redundant. */
5933
5934static int
5935node_is_bounded (node, index_type)
5936 case_node_ptr node;
5937 tree index_type;
5938{
5939 return (node_has_low_bound (node, index_type)
5940 && node_has_high_bound (node, index_type));
5941}
5942
5943/* Emit an unconditional jump to LABEL unless it would be dead code. */
5944
5945static void
5946emit_jump_if_reachable (label)
5947 rtx label;
5948{
5949 if (GET_CODE (get_last_insn ()) != BARRIER)
5950 emit_jump (label);
5951}
5952\f
5953/* Emit step-by-step code to select a case for the value of INDEX.
5954 The thus generated decision tree follows the form of the
5955 case-node binary tree NODE, whose nodes represent test conditions.
5956 INDEX_TYPE is the type of the index of the switch.
5957
5958 Care is taken to prune redundant tests from the decision tree
5959 by detecting any boundary conditions already checked by
5960 emitted rtx. (See node_has_high_bound, node_has_low_bound
5961 and node_is_bounded, above.)
5962
5963 Where the test conditions can be shown to be redundant we emit
5964 an unconditional jump to the target code. As a further
5965 optimization, the subordinates of a tree node are examined to
5966 check for bounded nodes. In this case conditional and/or
5967 unconditional jumps as a result of the boundary check for the
5968 current node are arranged to target the subordinates associated
38e01259 5969 code for out of bound conditions on the current node.
28d81abb 5970
f72aed24 5971 We can assume that when control reaches the code generated here,
28d81abb
RK
5972 the index value has already been compared with the parents
5973 of this node, and determined to be on the same side of each parent
5974 as this node is. Thus, if this node tests for the value 51,
5975 and a parent tested for 52, we don't need to consider
5976 the possibility of a value greater than 51. If another parent
5977 tests for the value 50, then this node need not test anything. */
5978
5979static void
5980emit_case_nodes (index, node, default_label, index_type)
5981 rtx index;
5982 case_node_ptr node;
5983 rtx default_label;
5984 tree index_type;
5985{
5986 /* If INDEX has an unsigned type, we must make unsigned branches. */
5987 int unsignedp = TREE_UNSIGNED (index_type);
28d81abb 5988 enum machine_mode mode = GET_MODE (index);
69107307 5989 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
5990
5991 /* See if our parents have already tested everything for us.
5992 If they have, emit an unconditional jump for this node. */
5993 if (node_is_bounded (node, index_type))
5994 emit_jump (label_rtx (node->code_label));
5995
5996 else if (tree_int_cst_equal (node->low, node->high))
5997 {
5998 /* Node is single valued. First see if the index expression matches
0f41302f 5999 this node and then check our children, if any. */
28d81abb 6000
69107307
AO
6001 do_jump_if_equal (index,
6002 convert_modes (mode, imode,
6003 expand_expr (node->low, NULL_RTX,
6004 VOIDmode, 0),
6005 unsignedp),
28d81abb
RK
6006 label_rtx (node->code_label), unsignedp);
6007
6008 if (node->right != 0 && node->left != 0)
6009 {
6010 /* This node has children on both sides.
6011 Dispatch to one side or the other
6012 by comparing the index value with this node's value.
6013 If one subtree is bounded, check that one first,
6014 so we can avoid real branches in the tree. */
6015
6016 if (node_is_bounded (node->right, index_type))
6017 {
4381f7c2 6018 emit_cmp_and_jump_insns (index,
69107307
AO
6019 convert_modes
6020 (mode, imode,
6021 expand_expr (node->high, NULL_RTX,
6022 VOIDmode, 0),
6023 unsignedp),
d43e0b7d 6024 GT, NULL_RTX, mode, unsignedp,
4381f7c2 6025 label_rtx (node->right->code_label));
28d81abb
RK
6026 emit_case_nodes (index, node->left, default_label, index_type);
6027 }
6028
6029 else if (node_is_bounded (node->left, index_type))
6030 {
4381f7c2 6031 emit_cmp_and_jump_insns (index,
69107307
AO
6032 convert_modes
6033 (mode, imode,
6034 expand_expr (node->high, NULL_RTX,
6035 VOIDmode, 0),
6036 unsignedp),
d43e0b7d 6037 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6038 label_rtx (node->left->code_label));
28d81abb
RK
6039 emit_case_nodes (index, node->right, default_label, index_type);
6040 }
6041
6042 else
6043 {
6044 /* Neither node is bounded. First distinguish the two sides;
6045 then emit the code for one side at a time. */
6046
4381f7c2 6047 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
6048
6049 /* See if the value is on the right. */
4381f7c2 6050 emit_cmp_and_jump_insns (index,
69107307
AO
6051 convert_modes
6052 (mode, imode,
6053 expand_expr (node->high, NULL_RTX,
6054 VOIDmode, 0),
6055 unsignedp),
d43e0b7d 6056 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6057 label_rtx (test_label));
28d81abb
RK
6058
6059 /* Value must be on the left.
6060 Handle the left-hand subtree. */
6061 emit_case_nodes (index, node->left, default_label, index_type);
6062 /* If left-hand subtree does nothing,
6063 go to default. */
6064 emit_jump_if_reachable (default_label);
6065
6066 /* Code branches here for the right-hand subtree. */
6067 expand_label (test_label);
6068 emit_case_nodes (index, node->right, default_label, index_type);
6069 }
6070 }
6071
6072 else if (node->right != 0 && node->left == 0)
6073 {
6074 /* Here we have a right child but no left so we issue conditional
6075 branch to default and process the right child.
6076
6077 Omit the conditional branch to default if we it avoid only one
6078 right child; it costs too much space to save so little time. */
6079
de14fd73 6080 if (node->right->right || node->right->left
28d81abb
RK
6081 || !tree_int_cst_equal (node->right->low, node->right->high))
6082 {
6083 if (!node_has_low_bound (node, index_type))
6084 {
4381f7c2 6085 emit_cmp_and_jump_insns (index,
69107307
AO
6086 convert_modes
6087 (mode, imode,
6088 expand_expr (node->high, NULL_RTX,
6089 VOIDmode, 0),
6090 unsignedp),
d43e0b7d 6091 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6092 default_label);
28d81abb
RK
6093 }
6094
6095 emit_case_nodes (index, node->right, default_label, index_type);
6096 }
6097 else
6098 /* We cannot process node->right normally
6099 since we haven't ruled out the numbers less than
6100 this node's value. So handle node->right explicitly. */
6101 do_jump_if_equal (index,
69107307
AO
6102 convert_modes
6103 (mode, imode,
6104 expand_expr (node->right->low, NULL_RTX,
6105 VOIDmode, 0),
6106 unsignedp),
28d81abb
RK
6107 label_rtx (node->right->code_label), unsignedp);
6108 }
6109
6110 else if (node->right == 0 && node->left != 0)
6111 {
6112 /* Just one subtree, on the left. */
4381f7c2 6113 if (node->left->left || node->left->right
28d81abb
RK
6114 || !tree_int_cst_equal (node->left->low, node->left->high))
6115 {
6116 if (!node_has_high_bound (node, index_type))
6117 {
69107307
AO
6118 emit_cmp_and_jump_insns (index,
6119 convert_modes
6120 (mode, imode,
6121 expand_expr (node->high, NULL_RTX,
6122 VOIDmode, 0),
6123 unsignedp),
d43e0b7d 6124 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6125 default_label);
28d81abb
RK
6126 }
6127
6128 emit_case_nodes (index, node->left, default_label, index_type);
6129 }
6130 else
6131 /* We cannot process node->left normally
6132 since we haven't ruled out the numbers less than
6133 this node's value. So handle node->left explicitly. */
6134 do_jump_if_equal (index,
69107307
AO
6135 convert_modes
6136 (mode, imode,
6137 expand_expr (node->left->low, NULL_RTX,
6138 VOIDmode, 0),
6139 unsignedp),
28d81abb
RK
6140 label_rtx (node->left->code_label), unsignedp);
6141 }
6142 }
6143 else
6144 {
6145 /* Node is a range. These cases are very similar to those for a single
6146 value, except that we do not start by testing whether this node
6147 is the one to branch to. */
6148
6149 if (node->right != 0 && node->left != 0)
6150 {
6151 /* Node has subtrees on both sides.
6152 If the right-hand subtree is bounded,
6153 test for it first, since we can go straight there.
6154 Otherwise, we need to make a branch in the control structure,
6155 then handle the two subtrees. */
6156 tree test_label = 0;
6157
28d81abb
RK
6158 if (node_is_bounded (node->right, index_type))
6159 /* Right hand node is fully bounded so we can eliminate any
6160 testing and branch directly to the target code. */
69107307
AO
6161 emit_cmp_and_jump_insns (index,
6162 convert_modes
6163 (mode, imode,
6164 expand_expr (node->high, NULL_RTX,
6165 VOIDmode, 0),
6166 unsignedp),
d43e0b7d 6167 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6168 label_rtx (node->right->code_label));
28d81abb
RK
6169 else
6170 {
6171 /* Right hand node requires testing.
6172 Branch to a label where we will handle it later. */
6173
6174 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 6175 emit_cmp_and_jump_insns (index,
69107307
AO
6176 convert_modes
6177 (mode, imode,
6178 expand_expr (node->high, NULL_RTX,
6179 VOIDmode, 0),
6180 unsignedp),
d43e0b7d 6181 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6182 label_rtx (test_label));
28d81abb
RK
6183 }
6184
6185 /* Value belongs to this node or to the left-hand subtree. */
6186
69107307
AO
6187 emit_cmp_and_jump_insns (index,
6188 convert_modes
6189 (mode, imode,
6190 expand_expr (node->low, NULL_RTX,
6191 VOIDmode, 0),
6192 unsignedp),
d43e0b7d 6193 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6194 label_rtx (node->code_label));
28d81abb
RK
6195
6196 /* Handle the left-hand subtree. */
6197 emit_case_nodes (index, node->left, default_label, index_type);
6198
6199 /* If right node had to be handled later, do that now. */
6200
6201 if (test_label)
6202 {
6203 /* If the left-hand subtree fell through,
6204 don't let it fall into the right-hand subtree. */
6205 emit_jump_if_reachable (default_label);
6206
6207 expand_label (test_label);
6208 emit_case_nodes (index, node->right, default_label, index_type);
6209 }
6210 }
6211
6212 else if (node->right != 0 && node->left == 0)
6213 {
6214 /* Deal with values to the left of this node,
6215 if they are possible. */
6216 if (!node_has_low_bound (node, index_type))
6217 {
4381f7c2 6218 emit_cmp_and_jump_insns (index,
69107307
AO
6219 convert_modes
6220 (mode, imode,
6221 expand_expr (node->low, NULL_RTX,
6222 VOIDmode, 0),
6223 unsignedp),
d43e0b7d 6224 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6225 default_label);
28d81abb
RK
6226 }
6227
6228 /* Value belongs to this node or to the right-hand subtree. */
6229
69107307
AO
6230 emit_cmp_and_jump_insns (index,
6231 convert_modes
6232 (mode, imode,
6233 expand_expr (node->high, NULL_RTX,
6234 VOIDmode, 0),
6235 unsignedp),
d43e0b7d 6236 LE, NULL_RTX, mode, unsignedp,
c5d5d461 6237 label_rtx (node->code_label));
28d81abb
RK
6238
6239 emit_case_nodes (index, node->right, default_label, index_type);
6240 }
6241
6242 else if (node->right == 0 && node->left != 0)
6243 {
6244 /* Deal with values to the right of this node,
6245 if they are possible. */
6246 if (!node_has_high_bound (node, index_type))
6247 {
4381f7c2 6248 emit_cmp_and_jump_insns (index,
69107307
AO
6249 convert_modes
6250 (mode, imode,
6251 expand_expr (node->high, NULL_RTX,
6252 VOIDmode, 0),
6253 unsignedp),
d43e0b7d 6254 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6255 default_label);
28d81abb
RK
6256 }
6257
6258 /* Value belongs to this node or to the left-hand subtree. */
6259
4381f7c2 6260 emit_cmp_and_jump_insns (index,
69107307
AO
6261 convert_modes
6262 (mode, imode,
6263 expand_expr (node->low, NULL_RTX,
6264 VOIDmode, 0),
6265 unsignedp),
d43e0b7d 6266 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6267 label_rtx (node->code_label));
28d81abb
RK
6268
6269 emit_case_nodes (index, node->left, default_label, index_type);
6270 }
6271
6272 else
6273 {
6274 /* Node has no children so we check low and high bounds to remove
6275 redundant tests. Only one of the bounds can exist,
6276 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
6277 int high_bound = node_has_high_bound (node, index_type);
6278 int low_bound = node_has_low_bound (node, index_type);
28d81abb 6279
923cbdc3 6280 if (!high_bound && low_bound)
28d81abb 6281 {
4381f7c2 6282 emit_cmp_and_jump_insns (index,
69107307
AO
6283 convert_modes
6284 (mode, imode,
6285 expand_expr (node->high, NULL_RTX,
6286 VOIDmode, 0),
6287 unsignedp),
d43e0b7d 6288 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6289 default_label);
28d81abb
RK
6290 }
6291
923cbdc3 6292 else if (!low_bound && high_bound)
28d81abb 6293 {
4381f7c2 6294 emit_cmp_and_jump_insns (index,
69107307
AO
6295 convert_modes
6296 (mode, imode,
6297 expand_expr (node->low, NULL_RTX,
6298 VOIDmode, 0),
6299 unsignedp),
d43e0b7d 6300 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6301 default_label);
28d81abb 6302 }
923cbdc3
JH
6303 else if (!low_bound && !high_bound)
6304 {
9312aecc 6305 /* Widen LOW and HIGH to the same width as INDEX. */
b0c48229 6306 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9312aecc
JDA
6307 tree low = build1 (CONVERT_EXPR, type, node->low);
6308 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 6309 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
6310
6311 /* Instead of doing two branches, emit one unsigned branch for
6312 (index-low) > (high-low). */
ef89d648
ZW
6313 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6314 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6315 NULL_RTX, unsignedp,
6316 OPTAB_WIDEN);
9312aecc
JDA
6317 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6318 high, low)),
6319 NULL_RTX, mode, 0);
786de7eb 6320
9312aecc 6321 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 6322 mode, 1, default_label);
923cbdc3 6323 }
28d81abb
RK
6324
6325 emit_jump (label_rtx (node->code_label));
6326 }
6327 }
6328}
e2500fed
GK
6329
6330#include "gt-stmt.h"
This page took 3.261289 seconds and 5 git commands to generate.