]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
* sh.h (OVERRIDE_OPTIONS): Allow first scheduling pass for SH5.
[gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
1574ef13 3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
ccd043a9 40
28d81abb
RK
41#include "rtl.h"
42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
28d81abb 44#include "flags.h"
6adb4e3a 45#include "except.h"
28d81abb 46#include "function.h"
28d81abb 47#include "insn-config.h"
28d81abb 48#include "expr.h"
e78d8e51 49#include "libfuncs.h"
28d81abb 50#include "hard-reg-set.h"
28d81abb
RK
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
43577e6b 57#include "langhooks.h"
969d70ca 58#include "predict.h"
ca695ac9 59
18543a22
ILT
60/* Assume that case vectors are not pc-relative. */
61#ifndef CASE_VECTOR_PC_RELATIVE
62#define CASE_VECTOR_PC_RELATIVE 0
63#endif
28d81abb
RK
64\f
65/* Functions and data structures for expanding case statements. */
66
67/* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
70
5720c7e7
RK
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
28d81abb
RK
74
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
80
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
28d81abb
RK
86 and nodes on the right having higher values. We then output the tree
87 in order. */
88
e2500fed 89struct case_node GTY(())
28d81abb
RK
90{
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
57641239 97 int balance;
28d81abb
RK
98};
99
100typedef struct case_node case_node;
101typedef struct case_node *case_node_ptr;
102
103/* These are used by estimate_case_costs and balance_case_nodes. */
104
105/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 106static short cost_table_[129];
28d81abb 107static int use_cost_table;
2a2137c4
RH
108static int cost_table_initialized;
109
110/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
111 is unsigned. */
cf403648 112#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
113\f
114/* Stack of control and binding constructs we are currently inside.
115
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
122
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
127
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
131
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
138
e2500fed 139struct nesting GTY(())
28d81abb
RK
140{
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
e2500fed
GK
145 enum nesting_desc {
146 COND_NESTING,
147 LOOP_NESTING,
148 BLOCK_NESTING,
149 CASE_NESTING
150 } desc;
151 union nesting_u
28d81abb
RK
152 {
153 /* For conds (if-then and if-then-else statements). */
e2500fed 154 struct nesting_cond
28d81abb
RK
155 {
156 /* Label for the end of the if construct.
157 There is none if EXITFLAG was not set
158 and no `else' has been seen yet. */
159 rtx endif_label;
160 /* Label for the end of this alternative.
0f41302f 161 This may be the end of the if or the next else/elseif. */
28d81abb 162 rtx next_label;
e2500fed 163 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 164 /* For loops. */
e2500fed 165 struct nesting_loop
28d81abb
RK
166 {
167 /* Label at the top of the loop; place to loop back to. */
168 rtx start_label;
169 /* Label at the end of the whole construct. */
170 rtx end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
e2500fed 174 } GTY ((tag ("LOOP_NESTING"))) loop;
28d81abb 175 /* For variable binding contours. */
e2500fed 176 struct nesting_block
28d81abb
RK
177 {
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
b93a436e 181 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
e976b8b2 198 The tail of this list can be 0,
28d81abb
RK
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
3f1d071b 207 int n_function_calls;
e0a2f705 208 /* Nonzero if this is associated with an EH region. */
e976b8b2
MS
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
3f1d071b 215 int block_target_temp_slot_level;
e976b8b2
MS
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
e2500fed 226 } GTY ((tag ("BLOCK_NESTING"))) block;
28d81abb
RK
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
e2500fed 229 struct nesting_case
28d81abb
RK
230 {
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
57641239
RK
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
28d81abb
RK
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
28d81abb 244 /* Name of this kind of statement, for warnings. */
dff01034 245 const char *printname;
a11759a3
JR
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
e2500fed
GK
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
252};
253
28d81abb
RK
254/* Allocate and return a new `struct nesting'. */
255
256#define ALLOC_NESTING() \
e2500fed 257 (struct nesting *) ggc_alloc (sizeof (struct nesting))
28d81abb 258
6ed1d6c5
RS
259/* Pop the nesting stack element by element until we pop off
260 the element which is at the top of STACK.
261 Update all the other stacks, popping off elements from them
262 as we pop them from nesting_stack. */
28d81abb
RK
263
264#define POPSTACK(STACK) \
6ed1d6c5
RS
265do { struct nesting *target = STACK; \
266 struct nesting *this; \
267 do { this = nesting_stack; \
268 if (loop_stack == this) \
269 loop_stack = loop_stack->next; \
270 if (cond_stack == this) \
271 cond_stack = cond_stack->next; \
272 if (block_stack == this) \
273 block_stack = block_stack->next; \
274 if (stack_block_stack == this) \
275 stack_block_stack = stack_block_stack->next; \
276 if (case_stack == this) \
277 case_stack = case_stack->next; \
6ed1d6c5 278 nesting_depth = nesting_stack->depth - 1; \
e2500fed 279 nesting_stack = this->all; } \
6ed1d6c5 280 while (this != target); } while (0)
28d81abb
RK
281\f
282/* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
289
e2500fed 290struct goto_fixup GTY(())
28d81abb
RK
291{
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
7629c936
RS
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
28d81abb
RK
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
e976b8b2 313 The tail of this list can be 0,
28d81abb
RK
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
319};
320
28d81abb
RK
321/* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
323
e2500fed 324struct label_chain GTY(())
28d81abb
RK
325{
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
329};
e9a25f70 330
e2500fed 331struct stmt_status GTY(())
3f1d071b
BS
332{
333 /* Chain of all pending binding contours. */
e2500fed 334 struct nesting * x_block_stack;
3f1d071b
BS
335
336 /* If any new stacks are added here, add them to POPSTACKS too. */
337
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
e2500fed 340 struct nesting * x_stack_block_stack;
3f1d071b
BS
341
342 /* Chain of all pending conditional statements. */
e2500fed 343 struct nesting * x_cond_stack;
3f1d071b
BS
344
345 /* Chain of all pending loops. */
e2500fed 346 struct nesting * x_loop_stack;
3f1d071b
BS
347
348 /* Chain of all pending case or switch statements. */
e2500fed 349 struct nesting * x_case_stack;
3f1d071b
BS
350
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
e2500fed 353 struct nesting * x_nesting_stack;
3f1d071b
BS
354
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
357
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
360
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
365
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
369
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
47ee9bcb 372 const char *x_emit_filename;
3f1d071b
BS
373 int x_emit_lineno;
374
375 struct goto_fixup *x_goto_fixup_chain;
376};
377
01d939e8
BS
378#define block_stack (cfun->stmt->x_block_stack)
379#define stack_block_stack (cfun->stmt->x_stack_block_stack)
380#define cond_stack (cfun->stmt->x_cond_stack)
381#define loop_stack (cfun->stmt->x_loop_stack)
382#define case_stack (cfun->stmt->x_case_stack)
383#define nesting_stack (cfun->stmt->x_nesting_stack)
384#define nesting_depth (cfun->stmt->x_nesting_depth)
385#define current_block_start_count (cfun->stmt->x_block_start_count)
386#define last_expr_type (cfun->stmt->x_last_expr_type)
387#define last_expr_value (cfun->stmt->x_last_expr_value)
388#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389#define emit_filename (cfun->stmt->x_emit_filename)
390#define emit_lineno (cfun->stmt->x_emit_lineno)
391#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70 392
272d0bee 393/* Nonzero if we are using EH to handle cleanups. */
e9a25f70
JL
394static int using_eh_for_cleanups_p = 0;
395
cdadb1dd 396static int n_occurrences PARAMS ((int, const char *));
6be2e1f8
RH
397static bool parse_input_constraint PARAMS ((const char **, int, int, int,
398 int, const char * const *,
399 bool *, bool *));
acb5d088 400static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
cdadb1dd
KG
401static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402static int expand_fixup PARAMS ((tree, rtx, rtx));
403static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404static void expand_nl_goto_receiver PARAMS ((void));
405static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
cfc3d13f 407 rtx, int));
84b72302
RH
408static bool check_operand_nalternatives PARAMS ((tree, tree));
409static bool check_unique_operand_names PARAMS ((tree, tree));
410static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 const char **));
412static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
396ad517 413static void expand_null_return_1 PARAMS ((rtx));
969d70ca 414static enum br_predictor return_prediction PARAMS ((rtx));
cdadb1dd
KG
415static void expand_value_return PARAMS ((rtx));
416static int tail_recursion_args PARAMS ((tree, tree));
417static void expand_cleanups PARAMS ((tree, tree, int, int));
418static void check_seenlabel PARAMS ((void));
419static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
420static int estimate_case_costs PARAMS ((case_node_ptr));
421static void group_case_nodes PARAMS ((case_node_ptr));
422static void balance_case_nodes PARAMS ((case_node_ptr *,
cfc3d13f 423 case_node_ptr));
cdadb1dd
KG
424static int node_has_low_bound PARAMS ((case_node_ptr, tree));
425static int node_has_high_bound PARAMS ((case_node_ptr, tree));
426static int node_is_bounded PARAMS ((case_node_ptr, tree));
427static void emit_jump_if_reachable PARAMS ((rtx));
428static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
cdadb1dd 429static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
28d81abb 430\f
e9a25f70
JL
431void
432using_eh_for_cleanups ()
433{
434 using_eh_for_cleanups_p = 1;
435}
436
28d81abb
RK
437void
438init_stmt_for_function ()
439{
e2500fed 440 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
3f1d071b 441
28d81abb
RK
442 /* We are not currently within any block, conditional, loop or case. */
443 block_stack = 0;
0b931590 444 stack_block_stack = 0;
28d81abb
RK
445 loop_stack = 0;
446 case_stack = 0;
447 cond_stack = 0;
448 nesting_stack = 0;
449 nesting_depth = 0;
450
3f1d071b 451 current_block_start_count = 0;
28d81abb
RK
452
453 /* No gotos have been expanded yet. */
454 goto_fixup_chain = 0;
455
456 /* We are not processing a ({...}) grouping. */
457 expr_stmts_for_value = 0;
e2500fed 458 clear_last_expr ();
28d81abb 459}
3f1d071b 460\f
3f1d071b 461/* Record the current file and line. Called from emit_line_note. */
28d81abb 462void
3f1d071b 463set_file_and_line_for_stmt (file, line)
47ee9bcb 464 const char *file;
3f1d071b
BS
465 int line;
466{
61d84605
MM
467 /* If we're outputting an inline function, and we add a line note,
468 there may be no CFUN->STMT information. So, there's no need to
469 update it. */
470 if (cfun->stmt)
471 {
472 emit_filename = file;
473 emit_lineno = line;
474 }
28d81abb 475}
3f1d071b 476
28d81abb
RK
477/* Emit a no-op instruction. */
478
479void
480emit_nop ()
481{
ca695ac9
JB
482 rtx last_insn;
483
b93a436e
JL
484 last_insn = get_last_insn ();
485 if (!optimize
486 && (GET_CODE (last_insn) == CODE_LABEL
487 || (GET_CODE (last_insn) == NOTE
488 && prev_real_insn (last_insn) == 0)))
489 emit_insn (gen_nop ());
28d81abb
RK
490}
491\f
492/* Return the rtx-label that corresponds to a LABEL_DECL,
493 creating it if necessary. */
494
495rtx
496label_rtx (label)
497 tree label;
498{
499 if (TREE_CODE (label) != LABEL_DECL)
500 abort ();
501
19e7881c
MM
502 if (!DECL_RTL_SET_P (label))
503 SET_DECL_RTL (label, gen_label_rtx ());
28d81abb 504
19e7881c 505 return DECL_RTL (label);
28d81abb
RK
506}
507
19e7881c 508
28d81abb
RK
509/* Add an unconditional jump to LABEL as the next sequential instruction. */
510
511void
512emit_jump (label)
513 rtx label;
514{
515 do_pending_stack_adjust ();
516 emit_jump_insn (gen_jump (label));
517 emit_barrier ();
518}
519
520/* Emit code to jump to the address
521 specified by the pointer expression EXP. */
522
523void
524expand_computed_goto (exp)
525 tree exp;
526{
b93a436e 527 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
528
529#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
530 if (GET_MODE (x) != Pmode)
531 x = convert_memory_address (Pmode, x);
ed9a9db1 532#endif
ffa1a1ce 533
b93a436e 534 emit_queue ();
b93a436e
JL
535 do_pending_stack_adjust ();
536 emit_indirect_jump (x);
acd693d1
RH
537
538 current_function_has_computed_jump = 1;
28d81abb
RK
539}
540\f
541/* Handle goto statements and the labels that they can go to. */
542
543/* Specify the location in the RTL code of a label LABEL,
544 which is a LABEL_DECL tree node.
545
546 This is used for the kind of label that the user can jump to with a
547 goto statement, and for alternatives of a switch or case statement.
548 RTL labels generated for loops and conditionals don't go through here;
549 they are generated directly at the RTL level, by other functions below.
550
551 Note that this has nothing to do with defining label *names*.
552 Languages vary in how they do that and what that even means. */
553
554void
555expand_label (label)
556 tree label;
557{
558 struct label_chain *p;
559
560 do_pending_stack_adjust ();
561 emit_label (label_rtx (label));
562 if (DECL_NAME (label))
563 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
564
565 if (stack_block_stack != 0)
566 {
1f8f4a0b 567 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
568 p->next = stack_block_stack->data.block.label_chain;
569 stack_block_stack->data.block.label_chain = p;
570 p->label = label;
571 }
572}
573
574/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
575 from nested functions. */
576
577void
578declare_nonlocal_label (label)
579 tree label;
580{
ba716ac9
BS
581 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
582
28d81abb
RK
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 585 if (nonlocal_goto_handler_slots == 0)
28d81abb 586 {
59257ff7
RK
587 emit_stack_save (SAVE_NONLOCAL,
588 &nonlocal_goto_stack_level,
589 PREV_INSN (tail_recursion_reentry));
28d81abb 590 }
ba716ac9
BS
591 nonlocal_goto_handler_slots
592 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
593}
594
595/* Generate RTL code for a `goto' statement with target label LABEL.
596 LABEL should be a LABEL_DECL tree node that was or will later be
597 defined with `expand_label'. */
598
599void
600expand_goto (label)
601 tree label;
602{
ca695ac9
JB
603 tree context;
604
28d81abb 605 /* Check for a nonlocal goto to a containing function. */
ca695ac9 606 context = decl_function_context (label);
28d81abb
RK
607 if (context != 0 && context != current_function_decl)
608 {
609 struct function *p = find_function_data (context);
38a448ca 610 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
4b01bd16 611 rtx handler_slot, static_chain, save_area, insn;
ba716ac9
BS
612 tree link;
613
614 /* Find the corresponding handler slot for this label. */
49ad7cfa
BS
615 handler_slot = p->x_nonlocal_goto_handler_slots;
616 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
ba716ac9
BS
617 link = TREE_CHAIN (link))
618 handler_slot = XEXP (handler_slot, 1);
619 handler_slot = XEXP (handler_slot, 0);
dd132134 620
28d81abb 621 p->has_nonlocal_label = 1;
c1255328 622 current_function_has_nonlocal_goto = 1;
dd132134 623 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
624
625 /* Copy the rtl for the slots so that they won't be shared in
626 case the virtual stack vars register gets instantiated differently
627 in the parent than in the child. */
628
157229c3
RH
629 static_chain = copy_to_reg (lookup_static_chain (label));
630
631 /* Get addr of containing function's current nonlocal goto handler,
632 which will do any cleanups and then jump to the label. */
633 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
634 virtual_stack_vars_rtx,
635 static_chain));
636
637 /* Get addr of containing function's nonlocal save area. */
638 save_area = p->x_nonlocal_goto_stack_level;
639 if (save_area)
640 save_area = replace_rtx (copy_rtx (save_area),
641 virtual_stack_vars_rtx, static_chain);
642
28d81abb
RK
643#if HAVE_nonlocal_goto
644 if (HAVE_nonlocal_goto)
157229c3
RH
645 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
646 save_area, label_ref));
28d81abb
RK
647 else
648#endif
649 {
650 /* Restore frame pointer for containing function.
651 This sets the actual hard register used for the frame pointer
652 to the location of the function's incoming static chain info.
653 The non-local goto handler will then adjust it to contain the
654 proper value and reload the argument pointer, if needed. */
157229c3
RH
655 emit_move_insn (hard_frame_pointer_rtx, static_chain);
656 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
657
658 /* USE of hard_frame_pointer_rtx added for consistency;
659 not clear if really needed. */
38a448ca
RH
660 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
661 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
157229c3 662 emit_indirect_jump (handler_slot);
28d81abb 663 }
4b01bd16 664
786de7eb 665 /* Search backwards to the jump insn and mark it as a
4b01bd16 666 non-local goto. */
7c2b017c
AM
667 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
668 {
669 if (GET_CODE (insn) == JUMP_INSN)
670 {
671 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
672 const0_rtx, REG_NOTES (insn));
673 break;
674 }
675 else if (GET_CODE (insn) == CALL_INSN)
676 break;
677 }
4381f7c2 678 }
28d81abb 679 else
37366632 680 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
681}
682
683/* Generate RTL code for a `goto' statement with target label BODY.
684 LABEL should be a LABEL_REF.
685 LAST_INSN, if non-0, is the rtx we should consider as the last
686 insn emitted (for the purposes of cleaning up a return). */
687
688static void
689expand_goto_internal (body, label, last_insn)
690 tree body;
691 rtx label;
692 rtx last_insn;
693{
694 struct nesting *block;
695 rtx stack_level = 0;
696
697 if (GET_CODE (label) != CODE_LABEL)
698 abort ();
699
700 /* If label has already been defined, we can tell now
701 whether and how we must alter the stack level. */
702
703 if (PREV_INSN (label) != 0)
704 {
705 /* Find the innermost pending block that contains the label.
706 (Check containment by comparing insn-uids.)
707 Then restore the outermost stack level within that block,
708 and do cleanups of all blocks contained in it. */
709 for (block = block_stack; block; block = block->next)
710 {
711 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
712 break;
713 if (block->data.block.stack_level != 0)
714 stack_level = block->data.block.stack_level;
715 /* Execute the cleanups for blocks we are exiting. */
716 if (block->data.block.cleanups != 0)
717 {
50d1b7a1 718 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
719 do_pending_stack_adjust ();
720 }
721 }
722
723 if (stack_level)
724 {
0f41302f
MS
725 /* Ensure stack adjust isn't done by emit_jump, as this
726 would clobber the stack pointer. This one should be
727 deleted as dead by flow. */
28d81abb
RK
728 clear_pending_stack_adjust ();
729 do_pending_stack_adjust ();
7393c642
RK
730
731 /* Don't do this adjust if it's to the end label and this function
732 is to return with a depressed stack pointer. */
733 if (label == return_label
c4a6c0f3
RK
734 && (((TREE_CODE (TREE_TYPE (current_function_decl))
735 == FUNCTION_TYPE)
736 && (TYPE_RETURNS_STACK_DEPRESSED
737 (TREE_TYPE (current_function_decl))))))
7393c642
RK
738 ;
739 else
740 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
741 }
742
743 if (body != 0 && DECL_TOO_LATE (body))
744 error ("jump to `%s' invalidly jumps into binding contour",
745 IDENTIFIER_POINTER (DECL_NAME (body)));
746 }
747 /* Label not yet defined: may need to put this goto
748 on the fixup list. */
749 else if (! expand_fixup (body, label, last_insn))
750 {
751 /* No fixup needed. Record that the label is the target
752 of at least one goto that has no fixup. */
753 if (body != 0)
754 TREE_ADDRESSABLE (body) = 1;
755 }
756
757 emit_jump (label);
758}
759\f
760/* Generate if necessary a fixup for a goto
761 whose target label in tree structure (if any) is TREE_LABEL
762 and whose target in rtl is RTL_LABEL.
763
764 If LAST_INSN is nonzero, we pretend that the jump appears
765 after insn LAST_INSN instead of at the current point in the insn stream.
766
023b57e6
RS
767 The fixup will be used later to insert insns just before the goto.
768 Those insns will restore the stack level as appropriate for the
769 target label, and will (in the case of C++) also invoke any object
770 destructors which have to be invoked when we exit the scopes which
771 are exited by the goto.
28d81abb
RK
772
773 Value is nonzero if a fixup is made. */
774
775static int
776expand_fixup (tree_label, rtl_label, last_insn)
777 tree tree_label;
778 rtx rtl_label;
779 rtx last_insn;
780{
781 struct nesting *block, *end_block;
782
783 /* See if we can recognize which block the label will be output in.
784 This is possible in some very common cases.
785 If we succeed, set END_BLOCK to that block.
786 Otherwise, set it to 0. */
787
788 if (cond_stack
789 && (rtl_label == cond_stack->data.cond.endif_label
790 || rtl_label == cond_stack->data.cond.next_label))
791 end_block = cond_stack;
792 /* If we are in a loop, recognize certain labels which
793 are likely targets. This reduces the number of fixups
794 we need to create. */
795 else if (loop_stack
796 && (rtl_label == loop_stack->data.loop.start_label
797 || rtl_label == loop_stack->data.loop.end_label
798 || rtl_label == loop_stack->data.loop.continue_label))
799 end_block = loop_stack;
800 else
801 end_block = 0;
802
803 /* Now set END_BLOCK to the binding level to which we will return. */
804
805 if (end_block)
806 {
807 struct nesting *next_block = end_block->all;
808 block = block_stack;
809
810 /* First see if the END_BLOCK is inside the innermost binding level.
811 If so, then no cleanups or stack levels are relevant. */
812 while (next_block && next_block != block)
813 next_block = next_block->all;
814
815 if (next_block)
816 return 0;
817
818 /* Otherwise, set END_BLOCK to the innermost binding level
819 which is outside the relevant control-structure nesting. */
820 next_block = block_stack->next;
821 for (block = block_stack; block != end_block; block = block->all)
822 if (block == next_block)
823 next_block = next_block->next;
824 end_block = next_block;
825 }
826
827 /* Does any containing block have a stack level or cleanups?
828 If not, no fixup is needed, and that is the normal case
829 (the only case, for standard C). */
830 for (block = block_stack; block != end_block; block = block->next)
831 if (block->data.block.stack_level != 0
832 || block->data.block.cleanups != 0)
833 break;
834
835 if (block != end_block)
836 {
837 /* Ok, a fixup is needed. Add a fixup to the list of such. */
838 struct goto_fixup *fixup
f8a83ee3 839 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
840 /* In case an old stack level is restored, make sure that comes
841 after any pending stack adjust. */
842 /* ?? If the fixup isn't to come at the present position,
843 doing the stack adjust here isn't useful. Doing it with our
844 settings at that location isn't useful either. Let's hope
845 someone does it! */
846 if (last_insn == 0)
847 do_pending_stack_adjust ();
28d81abb
RK
848 fixup->target = tree_label;
849 fixup->target_rtl = rtl_label;
023b57e6
RS
850
851 /* Create a BLOCK node and a corresponding matched set of
12f61228 852 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
853 this point. The notes will encapsulate any and all fixup
854 code which we might later insert at this point in the insn
855 stream. Also, the BLOCK node will be the parent (i.e. the
856 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
857 later on when we are expanding the fixup code.
858
859 Note that optimization passes (including expand_end_loop)
860 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
861 as a placeholder. */
023b57e6
RS
862
863 {
786de7eb
KH
864 rtx original_before_jump
865 = last_insn ? last_insn : get_last_insn ();
0679e3fc 866 rtx start;
a97901e6 867 rtx end;
e6fd097e
MM
868 tree block;
869
870 block = make_node (BLOCK);
871 TREE_USED (block) = 1;
872
01d939e8 873 if (!cfun->x_whole_function_mode_p)
43577e6b 874 (*lang_hooks.decls.insert_block) (block);
a97901e6 875 else
e6fd097e 876 {
4381f7c2 877 BLOCK_CHAIN (block)
a97901e6
MM
878 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
879 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
880 = block;
e6fd097e 881 }
023b57e6 882
786de7eb
KH
883 start_sequence ();
884 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
01d939e8 885 if (cfun->x_whole_function_mode_p)
a97901e6 886 NOTE_BLOCK (start) = block;
6496a589
KG
887 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
888 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
01d939e8 889 if (cfun->x_whole_function_mode_p)
a97901e6 890 NOTE_BLOCK (end) = block;
786de7eb
KH
891 fixup->context = block;
892 end_sequence ();
2f937369 893 emit_insn_after (start, original_before_jump);
023b57e6
RS
894 }
895
3f1d071b 896 fixup->block_start_count = current_block_start_count;
28d81abb
RK
897 fixup->stack_level = 0;
898 fixup->cleanup_list_list
e976b8b2 899 = ((block->data.block.outer_cleanups
28d81abb 900 || block->data.block.cleanups)
37366632 901 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
902 block->data.block.outer_cleanups)
903 : 0);
904 fixup->next = goto_fixup_chain;
905 goto_fixup_chain = fixup;
906 }
907
908 return block != 0;
909}
cfc3d13f
RK
910\f
911/* Expand any needed fixups in the outputmost binding level of the
912 function. FIRST_INSN is the first insn in the function. */
ca695ac9 913
cfc3d13f
RK
914void
915expand_fixups (first_insn)
916 rtx first_insn;
917{
9714cf43 918 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
cfc3d13f 919}
ca695ac9 920
28d81abb
RK
921/* When exiting a binding contour, process all pending gotos requiring fixups.
922 THISBLOCK is the structure that describes the block being exited.
923 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
924 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
925 FIRST_INSN is the insn that began this contour.
926
927 Gotos that jump out of this contour must restore the
928 stack level and do the cleanups before actually jumping.
929
930 DONT_JUMP_IN nonzero means report error there is a jump into this
931 contour from before the beginning of the contour.
932 This is also done if STACK_LEVEL is nonzero. */
933
704f4dca 934static void
28d81abb
RK
935fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
936 struct nesting *thisblock;
937 rtx stack_level;
938 tree cleanup_list;
939 rtx first_insn;
940 int dont_jump_in;
941{
b3694847 942 struct goto_fixup *f, *prev;
28d81abb
RK
943
944 /* F is the fixup we are considering; PREV is the previous one. */
945 /* We run this loop in two passes so that cleanups of exited blocks
946 are run first, and blocks that are exited are marked so
947 afterwards. */
948
949 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
950 {
951 /* Test for a fixup that is inactive because it is already handled. */
952 if (f->before_jump == 0)
953 {
954 /* Delete inactive fixup from the chain, if that is easy to do. */
955 if (prev != 0)
956 prev->next = f->next;
957 }
958 /* Has this fixup's target label been defined?
959 If so, we can finalize it. */
960 else if (PREV_INSN (f->target_rtl) != 0)
961 {
b3694847 962 rtx cleanup_insns;
7629c936 963
28d81abb 964 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
965 of this contour, report an error. This code used to use
966 the first non-label insn after f->target_rtl, but that's
967 wrong since such can be added, by things like put_var_into_stack
968 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
969 /* ??? Bug: this does not detect jumping in through intermediate
970 blocks that have stack levels or cleanups.
971 It detects only a problem with the innermost block
972 around the label. */
973 if (f->target != 0
974 && (dont_jump_in || stack_level || cleanup_list)
14a774a9 975 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 976 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 977 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
978 {
979 error_with_decl (f->target,
980 "label `%s' used before containing binding contour");
981 /* Prevent multiple errors for one label. */
33bc3ff5 982 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
983 }
984
7629c936
RS
985 /* We will expand the cleanups into a sequence of their own and
986 then later on we will attach this new sequence to the insn
987 stream just ahead of the actual jump insn. */
988
989 start_sequence ();
990
023b57e6
RS
991 /* Temporarily restore the lexical context where we will
992 logically be inserting the fixup code. We do this for the
993 sake of getting the debugging information right. */
994
43577e6b
NB
995 (*lang_hooks.decls.pushlevel) (0);
996 (*lang_hooks.decls.set_block) (f->context);
7629c936
RS
997
998 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
999 if (f->cleanup_list_list)
1000 {
1001 tree lists;
1002 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1003 /* Marked elements correspond to blocks that have been closed.
1004 Do their cleanups. */
1005 if (TREE_ADDRESSABLE (lists)
1006 && TREE_VALUE (lists) != 0)
7629c936 1007 {
50d1b7a1 1008 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1009 /* Pop any pushes done in the cleanups,
1010 in case function is about to return. */
1011 do_pending_stack_adjust ();
1012 }
28d81abb
RK
1013 }
1014
1015 /* Restore stack level for the biggest contour that this
1016 jump jumps out of. */
7393c642
RK
1017 if (f->stack_level
1018 && ! (f->target_rtl == return_label
c4a6c0f3
RK
1019 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1020 == FUNCTION_TYPE)
4381f7c2 1021 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 1022 (TREE_TYPE (current_function_decl))))))
59257ff7 1023 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1024
1025 /* Finish up the sequence containing the insns which implement the
1026 necessary cleanups, and then attach that whole sequence to the
1027 insn stream just ahead of the actual jump insn. Attaching it
1028 at that point insures that any cleanups which are in fact
1029 implicit C++ object destructions (which must be executed upon
1030 leaving the block) appear (to the debugger) to be taking place
1031 in an area of the generated code where the object(s) being
1032 destructed are still "in scope". */
1033
1034 cleanup_insns = get_insns ();
43577e6b 1035 (*lang_hooks.decls.poplevel) (1, 0, 0);
7629c936
RS
1036
1037 end_sequence ();
2f937369 1038 emit_insn_after (cleanup_insns, f->before_jump);
7629c936 1039
28d81abb
RK
1040 f->before_jump = 0;
1041 }
1042 }
1043
6bc2f582
RK
1044 /* For any still-undefined labels, do the cleanups for this block now.
1045 We must do this now since items in the cleanup list may go out
0f41302f 1046 of scope when the block ends. */
28d81abb
RK
1047 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1048 if (f->before_jump != 0
1049 && PREV_INSN (f->target_rtl) == 0
1050 /* Label has still not appeared. If we are exiting a block with
1051 a stack level to restore, that started before the fixup,
1052 mark this stack level as needing restoration
6d2f8887 1053 when the fixup is later finalized. */
28d81abb 1054 && thisblock != 0
6bc2f582
RK
1055 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1056 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1057 && (thisblock->data.block.block_start_count
1058 <= f->block_start_count))
1059 {
1060 tree lists = f->cleanup_list_list;
6bc2f582
RK
1061 rtx cleanup_insns;
1062
28d81abb
RK
1063 for (; lists; lists = TREE_CHAIN (lists))
1064 /* If the following elt. corresponds to our containing block
1065 then the elt. must be for this block. */
1066 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1067 {
1068 start_sequence ();
43577e6b
NB
1069 (*lang_hooks.decls.pushlevel) (0);
1070 (*lang_hooks.decls.set_block) (f->context);
6bc2f582 1071 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1072 do_pending_stack_adjust ();
6bc2f582 1073 cleanup_insns = get_insns ();
43577e6b 1074 (*lang_hooks.decls.poplevel) (1, 0, 0);
6bc2f582 1075 end_sequence ();
412c00dc
RK
1076 if (cleanup_insns != 0)
1077 f->before_jump
2f937369 1078 = emit_insn_after (cleanup_insns, f->before_jump);
6bc2f582 1079
e07ed33f 1080 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1081 }
28d81abb
RK
1082
1083 if (stack_level)
1084 f->stack_level = stack_level;
1085 }
1086}
2a230e9d
BS
1087\f
1088/* Return the number of times character C occurs in string S. */
1089static int
1090n_occurrences (c, s)
1091 int c;
dff01034 1092 const char *s;
2a230e9d
BS
1093{
1094 int n = 0;
1095 while (*s)
1096 n += (*s++ == c);
1097 return n;
1098}
28d81abb
RK
1099\f
1100/* Generate RTL for an asm statement (explicit assembler code).
4c46ea23
EB
1101 STRING is a STRING_CST node containing the assembler code text,
1102 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1103 insn is volatile; don't optimize it. */
28d81abb
RK
1104
1105void
4c46ea23
EB
1106expand_asm (string, vol)
1107 tree string;
1108 int vol;
28d81abb 1109{
4c46ea23
EB
1110 rtx body;
1111
1112 if (TREE_CODE (string) == ADDR_EXPR)
1113 string = TREE_OPERAND (string, 0);
1114
1115 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1116
1117 MEM_VOLATILE_P (body) = vol;
28d81abb 1118
4c46ea23
EB
1119 emit_insn (body);
1120
e2500fed 1121 clear_last_expr ();
28d81abb
RK
1122}
1123
40b18c0a
MM
1124/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1125 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1126 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1127 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1128 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1129 constraint allows the use of a register operand. And, *IS_INOUT
1130 will be true if the operand is read-write, i.e., if it is used as
1131 an input as well as an output. If *CONSTRAINT_P is not in
1132 canonical form, it will be made canonical. (Note that `+' will be
14b493d6 1133 replaced with `=' as part of this process.)
40b18c0a
MM
1134
1135 Returns TRUE if all went well; FALSE if an error occurred. */
1136
1137bool
6be2e1f8
RH
1138parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1139 allows_mem, allows_reg, is_inout)
40b18c0a
MM
1140 const char **constraint_p;
1141 int operand_num;
1142 int ninputs;
1143 int noutputs;
1144 bool *allows_mem;
1145 bool *allows_reg;
1146 bool *is_inout;
1147{
1148 const char *constraint = *constraint_p;
1149 const char *p;
1150
1151 /* Assume the constraint doesn't allow the use of either a register
1152 or memory. */
1153 *allows_mem = false;
1154 *allows_reg = false;
1155
1156 /* Allow the `=' or `+' to not be at the beginning of the string,
1157 since it wasn't explicitly documented that way, and there is a
1158 large body of code that puts it last. Swap the character to
1159 the front, so as not to uglify any place else. */
1160 p = strchr (constraint, '=');
1161 if (!p)
1162 p = strchr (constraint, '+');
1163
1164 /* If the string doesn't contain an `=', issue an error
1165 message. */
1166 if (!p)
1167 {
1168 error ("output operand constraint lacks `='");
1169 return false;
1170 }
1171
1172 /* If the constraint begins with `+', then the operand is both read
1173 from and written to. */
1174 *is_inout = (*p == '+');
1175
40b18c0a
MM
1176 /* Canonicalize the output constraint so that it begins with `='. */
1177 if (p != constraint || is_inout)
1178 {
1179 char *buf;
1180 size_t c_len = strlen (constraint);
1181
1182 if (p != constraint)
1183 warning ("output constraint `%c' for operand %d is not at the beginning",
1184 *p, operand_num);
1185
1186 /* Make a copy of the constraint. */
1187 buf = alloca (c_len + 1);
1188 strcpy (buf, constraint);
1189 /* Swap the first character and the `=' or `+'. */
1190 buf[p - constraint] = buf[0];
1191 /* Make sure the first character is an `='. (Until we do this,
1192 it might be a `+'.) */
1193 buf[0] = '=';
1194 /* Replace the constraint with the canonicalized string. */
1195 *constraint_p = ggc_alloc_string (buf, c_len);
1196 constraint = *constraint_p;
1197 }
1198
1199 /* Loop through the constraint string. */
1200 for (p = constraint + 1; *p; ++p)
1201 switch (*p)
1202 {
1203 case '+':
1204 case '=':
357351e5 1205 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 1206 return false;
786de7eb 1207
40b18c0a
MM
1208 case '%':
1209 if (operand_num + 1 == ninputs + noutputs)
1210 {
1211 error ("`%%' constraint used with last operand");
1212 return false;
1213 }
1214 break;
1215
1216 case 'V': case 'm': case 'o':
1217 *allows_mem = true;
1218 break;
1219
1220 case '?': case '!': case '*': case '&': case '#':
1221 case 'E': case 'F': case 'G': case 'H':
1222 case 's': case 'i': case 'n':
1223 case 'I': case 'J': case 'K': case 'L': case 'M':
1224 case 'N': case 'O': case 'P': case ',':
1225 break;
1226
1227 case '0': case '1': case '2': case '3': case '4':
1228 case '5': case '6': case '7': case '8': case '9':
84b72302 1229 case '[':
40b18c0a
MM
1230 error ("matching constraint not valid in output operand");
1231 return false;
1232
1233 case '<': case '>':
1234 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1235 excepting those that expand_call created. So match memory
1236 and hope. */
1237 *allows_mem = true;
1238 break;
1239
1240 case 'g': case 'X':
1241 *allows_reg = true;
1242 *allows_mem = true;
1243 break;
786de7eb 1244
40b18c0a
MM
1245 case 'p': case 'r':
1246 *allows_reg = true;
1247 break;
1248
1249 default:
1250 if (!ISALPHA (*p))
1251 break;
1252 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1253 *allows_reg = true;
1254#ifdef EXTRA_CONSTRAINT
ccfc6cc8
UW
1255 else if (EXTRA_ADDRESS_CONSTRAINT (*p))
1256 *allows_reg = true;
1257 else if (EXTRA_MEMORY_CONSTRAINT (*p))
1258 *allows_mem = true;
40b18c0a
MM
1259 else
1260 {
1261 /* Otherwise we can't assume anything about the nature of
1262 the constraint except that it isn't purely registers.
1263 Treat it like "g" and hope for the best. */
1264 *allows_reg = true;
1265 *allows_mem = true;
1266 }
1267#endif
1268 break;
1269 }
1270
1271 return true;
1272}
1273
6be2e1f8
RH
1274/* Similar, but for input constraints. */
1275
1276static bool
1277parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1278 constraints, allows_mem, allows_reg)
1279 const char **constraint_p;
1280 int input_num;
1281 int ninputs;
1282 int noutputs;
1283 int ninout;
1284 const char * const * constraints;
1285 bool *allows_mem;
1286 bool *allows_reg;
1287{
1288 const char *constraint = *constraint_p;
1289 const char *orig_constraint = constraint;
1290 size_t c_len = strlen (constraint);
1291 size_t j;
1292
1293 /* Assume the constraint doesn't allow the use of either
1294 a register or memory. */
1295 *allows_mem = false;
1296 *allows_reg = false;
1297
1298 /* Make sure constraint has neither `=', `+', nor '&'. */
1299
1300 for (j = 0; j < c_len; j++)
1301 switch (constraint[j])
1302 {
1303 case '+': case '=': case '&':
1304 if (constraint == orig_constraint)
1305 {
1306 error ("input operand constraint contains `%c'", constraint[j]);
1307 return false;
1308 }
1309 break;
1310
1311 case '%':
1312 if (constraint == orig_constraint
1313 && input_num + 1 == ninputs - ninout)
1314 {
1315 error ("`%%' constraint used with last operand");
1316 return false;
1317 }
1318 break;
1319
1320 case 'V': case 'm': case 'o':
1321 *allows_mem = true;
1322 break;
1323
1324 case '<': case '>':
1325 case '?': case '!': case '*': case '#':
1326 case 'E': case 'F': case 'G': case 'H':
1327 case 's': case 'i': case 'n':
1328 case 'I': case 'J': case 'K': case 'L': case 'M':
1329 case 'N': case 'O': case 'P': case ',':
1330 break;
1331
1332 /* Whether or not a numeric constraint allows a register is
1333 decided by the matching constraint, and so there is no need
1334 to do anything special with them. We must handle them in
1335 the default case, so that we don't unnecessarily force
1336 operands to memory. */
1337 case '0': case '1': case '2': case '3': case '4':
1338 case '5': case '6': case '7': case '8': case '9':
1339 {
1340 char *end;
1341 unsigned long match;
1342
1343 match = strtoul (constraint + j, &end, 10);
1344 if (match >= (unsigned long) noutputs)
1345 {
1346 error ("matching constraint references invalid operand number");
1347 return false;
1348 }
1349
1350 /* Try and find the real constraint for this dup. Only do this
1351 if the matching constraint is the only alternative. */
1352 if (*end == '\0'
1353 && (j == 0 || (j == 1 && constraint[0] == '%')))
1354 {
1355 constraint = constraints[match];
1356 *constraint_p = constraint;
1357 c_len = strlen (constraint);
1358 j = 0;
1359 break;
1360 }
1361 else
1362 j = end - constraint;
1363 }
1364 /* Fall through. */
1365
1366 case 'p': case 'r':
1367 *allows_reg = true;
1368 break;
1369
1370 case 'g': case 'X':
1371 *allows_reg = true;
1372 *allows_mem = true;
1373 break;
1374
1375 default:
1376 if (! ISALPHA (constraint[j]))
1377 {
1378 error ("invalid punctuation `%c' in constraint", constraint[j]);
1379 return false;
1380 }
1381 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1382 *allows_reg = true;
1383#ifdef EXTRA_CONSTRAINT
ccfc6cc8
UW
1384 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j]))
1385 *allows_reg = true;
1386 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j]))
1387 *allows_mem = true;
6be2e1f8
RH
1388 else
1389 {
1390 /* Otherwise we can't assume anything about the nature of
1391 the constraint except that it isn't purely registers.
1392 Treat it like "g" and hope for the best. */
1393 *allows_reg = true;
1394 *allows_mem = true;
1395 }
1396#endif
1397 break;
1398 }
1399
1400 return true;
1401}
1402
acb5d088
HPN
1403/* Check for overlap between registers marked in CLOBBERED_REGS and
1404 anything inappropriate in DECL. Emit error and return TRUE for error,
1405 FALSE for ok. */
1406
1407static bool
1408decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1409 tree decl;
1410 const HARD_REG_SET clobbered_regs;
1411{
1412 /* Conflicts between asm-declared register variables and the clobber
1413 list are not allowed. */
1414 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1415 && DECL_REGISTER (decl)
34146b94 1416 && REG_P (DECL_RTL (decl))
acb5d088
HPN
1417 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1418 {
1419 rtx reg = DECL_RTL (decl);
1420 unsigned int regno;
1421
1422 for (regno = REGNO (reg);
1423 regno < (REGNO (reg)
1424 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1425 regno++)
1426 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1427 {
1428 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1429 IDENTIFIER_POINTER (DECL_NAME (decl)));
1430
1431 /* Reset registerness to stop multiple errors emitted for a
1432 single variable. */
1433 DECL_REGISTER (decl) = 0;
1434 return true;
1435 }
1436 }
1437 return false;
1438}
1439
28d81abb
RK
1440/* Generate RTL for an asm statement with arguments.
1441 STRING is the instruction template.
1442 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1443 Each output or input has an expression in the TREE_VALUE and
2ec37136 1444 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 1445 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 1446 in TREE_PURPOSE.
28d81abb
RK
1447 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1448 that is clobbered by this insn.
1449
1450 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1451 Some elements of OUTPUTS may be replaced with trees representing temporary
1452 values. The caller should copy those temporary values to the originally
1453 specified lvalues.
1454
1455 VOL nonzero means the insn is volatile; don't optimize it. */
1456
1457void
1458expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1459 tree string, outputs, inputs, clobbers;
1460 int vol;
3b304f5b 1461 const char *filename;
28d81abb
RK
1462 int line;
1463{
84b72302 1464 rtvec argvec, constraintvec;
28d81abb
RK
1465 rtx body;
1466 int ninputs = list_length (inputs);
1467 int noutputs = list_length (outputs);
6be2e1f8 1468 int ninout;
b4ccaa16 1469 int nclobbers;
acb5d088
HPN
1470 HARD_REG_SET clobbered_regs;
1471 int clobber_conflict_found = 0;
28d81abb 1472 tree tail;
b3694847 1473 int i;
28d81abb
RK
1474 /* Vector of RTX's of evaluated output operands. */
1475 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021 1476 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
947255ed 1477 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1478 enum machine_mode *inout_mode
1479 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
84b72302
RH
1480 const char **constraints
1481 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1b3d8f8a 1482 int old_generating_concat_p = generating_concat_p;
28d81abb 1483
e5e809f4 1484 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1485 if (noutputs == 0)
1486 vol = 1;
1487
84b72302
RH
1488 if (! check_operand_nalternatives (outputs, inputs))
1489 return;
1490
1491 if (! check_unique_operand_names (outputs, inputs))
1492 return;
1493
1494 string = resolve_operand_names (string, outputs, inputs, constraints);
1495
57bcb97a
RH
1496#ifdef MD_ASM_CLOBBERS
1497 /* Sometimes we wish to automatically clobber registers across an asm.
1498 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 1499 maintaining source-level compatibility means automatically clobbering
57bcb97a
RH
1500 the flags register. */
1501 MD_ASM_CLOBBERS (clobbers);
1502#endif
1503
b4ccaa16
RS
1504 /* Count the number of meaningful clobbered registers, ignoring what
1505 we would ignore later. */
1506 nclobbers = 0;
acb5d088 1507 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
1508 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1509 {
47ee9bcb 1510 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1511
c09e6498
RS
1512 i = decode_reg_name (regname);
1513 if (i >= 0 || i == -4)
b4ccaa16 1514 ++nclobbers;
7859e3ac
DE
1515 else if (i == -2)
1516 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
1517
1518 /* Mark clobbered registers. */
1519 if (i >= 0)
e54b4cae
EB
1520 {
1521 /* Clobbering the PIC register is an error */
1522 if ((unsigned) i == PIC_OFFSET_TABLE_REGNUM)
1523 {
1524 error ("PIC register `%s' clobbered in `asm'", regname);
1525 return;
1526 }
1527
1528 SET_HARD_REG_BIT (clobbered_regs, i);
1529 }
b4ccaa16
RS
1530 }
1531
e2500fed 1532 clear_last_expr ();
28d81abb 1533
6be2e1f8
RH
1534 /* First pass over inputs and outputs checks validity and sets
1535 mark_addressable if needed. */
1536
1537 ninout = 0;
28d81abb
RK
1538 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1539 {
1540 tree val = TREE_VALUE (tail);
b50a024d 1541 tree type = TREE_TYPE (val);
6be2e1f8 1542 const char *constraint;
40b18c0a
MM
1543 bool is_inout;
1544 bool allows_reg;
1545 bool allows_mem;
28d81abb
RK
1546
1547 /* If there's an erroneous arg, emit no insn. */
40b18c0a 1548 if (type == error_mark_node)
28d81abb
RK
1549 return;
1550
40b18c0a
MM
1551 /* Try to parse the output constraint. If that fails, there's
1552 no point in going further. */
6be2e1f8
RH
1553 constraint = constraints[i];
1554 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1555 &allows_mem, &allows_reg, &is_inout))
1556 return;
1557
1558 if (! allows_reg
1559 && (allows_mem
1560 || is_inout
1561 || (DECL_P (val)
1562 && GET_CODE (DECL_RTL (val)) == REG
1563 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
dffd7eb6 1564 (*lang_hooks.mark_addressable) (val);
6be2e1f8
RH
1565
1566 if (is_inout)
1567 ninout++;
1568 }
1569
1570 ninputs += ninout;
1571 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1572 {
1573 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1574 return;
1575 }
1576
1577 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1578 {
1579 bool allows_reg, allows_mem;
1580 const char *constraint;
1581
1582 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1583 would get VOIDmode and that could cause a crash in reload. */
1584 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1585 return;
1586
1587 constraint = constraints[i + noutputs];
1588 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1589 constraints, &allows_mem, &allows_reg))
40b18c0a 1590 return;
d09a75ae 1591
6be2e1f8 1592 if (! allows_reg && allows_mem)
dffd7eb6 1593 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
6be2e1f8
RH
1594 }
1595
1596 /* Second pass evaluates arguments. */
1597
1598 ninout = 0;
1599 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1600 {
1601 tree val = TREE_VALUE (tail);
1602 tree type = TREE_TYPE (val);
1603 bool is_inout;
1604 bool allows_reg;
1605 bool allows_mem;
1606
1607 if (!parse_output_constraint (&constraints[i], i, ninputs,
1608 noutputs, &allows_mem, &allows_reg,
1609 &is_inout))
1610 abort ();
1611
d09a75ae
RK
1612 /* If an output operand is not a decl or indirect ref and our constraint
1613 allows a register, make a temporary to act as an intermediate.
1614 Make the asm insn write into that, then our caller will copy it to
1615 the real output operand. Likewise for promoted variables. */
28d81abb 1616
1b3d8f8a
GK
1617 generating_concat_p = 0;
1618
947255ed 1619 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1620 if ((TREE_CODE (val) == INDIRECT_REF
1621 && allows_mem)
2f939d94 1622 || (DECL_P (val)
1afbe1c4 1623 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1624 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1625 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1626 || ! allows_reg
2a230e9d 1627 || is_inout)
d09a75ae 1628 {
11579f33 1629 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
d09a75ae 1630
11579f33 1631 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
d09a75ae 1632 error ("output number %d not directly addressable", i);
11579f33
AJ
1633 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1634 || GET_CODE (output_rtx[i]) == CONCAT)
947255ed 1635 {
786de7eb 1636 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
11579f33
AJ
1637 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1638 if (is_inout)
1639 emit_move_insn (output_rtx[i], real_output_rtx[i]);
947255ed 1640 }
d09a75ae 1641 }
b50a024d 1642 else
e619bb8d 1643 {
11579f33 1644 output_rtx[i] = assign_temp (type, 0, 0, 1);
b50a024d
RK
1645 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1646 }
235c5021 1647
1b3d8f8a
GK
1648 generating_concat_p = old_generating_concat_p;
1649
2a230e9d 1650 if (is_inout)
235c5021 1651 {
6be2e1f8 1652 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1653 inout_opnum[ninout++] = i;
1654 }
acb5d088
HPN
1655
1656 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1657 clobber_conflict_found = 1;
28d81abb
RK
1658 }
1659
84b72302
RH
1660 /* Make vectors for the expression-rtx, constraint strings,
1661 and named operands. */
28d81abb
RK
1662
1663 argvec = rtvec_alloc (ninputs);
84b72302 1664 constraintvec = rtvec_alloc (ninputs);
28d81abb 1665
6462bb43
AO
1666 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1667 : GET_MODE (output_rtx[0])),
786de7eb 1668 TREE_STRING_POINTER (string),
84b72302 1669 empty_string, 0, argvec, constraintvec,
21a427cc 1670 filename, line);
c85f7c16 1671
78418280 1672 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1673
1674 /* Eval the inputs and put them into ARGVEC.
1675 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1676
84b72302 1677 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1678 {
6be2e1f8
RH
1679 bool allows_reg, allows_mem;
1680 const char *constraint;
1681 tree val, type;
1f06ee8d 1682 rtx op;
28d81abb 1683
6be2e1f8
RH
1684 constraint = constraints[i + noutputs];
1685 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1686 constraints, &allows_mem, &allows_reg))
1687 abort ();
2a230e9d 1688
6be2e1f8 1689 generating_concat_p = 0;
65fed0cb 1690
6be2e1f8
RH
1691 val = TREE_VALUE (tail);
1692 type = TREE_TYPE (val);
1693 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
65fed0cb 1694
1b3d8f8a 1695 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1696 if (GET_CODE (op) == CONCAT)
1697 op = force_reg (GET_MODE (op), op);
1698
1afbe1c4 1699 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1700 {
11579f33 1701 if (allows_reg)
6be2e1f8 1702 op = force_reg (TYPE_MODE (type), op);
11579f33 1703 else if (!allows_mem)
84b72302
RH
1704 warning ("asm operand %d probably doesn't match constraints",
1705 i + noutputs);
1f06ee8d 1706 else if (CONSTANT_P (op))
6be2e1f8 1707 op = force_const_mem (TYPE_MODE (type), op);
1f06ee8d
RH
1708 else if (GET_CODE (op) == REG
1709 || GET_CODE (op) == SUBREG
d96ecaf4 1710 || GET_CODE (op) == ADDRESSOF
1f06ee8d
RH
1711 || GET_CODE (op) == CONCAT)
1712 {
1da68f56
RK
1713 tree qual_type = build_qualified_type (type,
1714 (TYPE_QUALS (type)
1715 | TYPE_QUAL_CONST));
1716 rtx memloc = assign_temp (qual_type, 1, 1, 1);
65fed0cb 1717
1f06ee8d
RH
1718 emit_move_insn (memloc, op);
1719 op = memloc;
1720 }
14a774a9 1721
1f06ee8d 1722 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
6be2e1f8
RH
1723 {
1724 /* We won't recognize volatile memory as available a
1725 memory_operand at this point. Ignore it. */
1726 }
1f06ee8d
RH
1727 else if (queued_subexp_p (op))
1728 ;
1729 else
1730 /* ??? Leave this only until we have experience with what
1731 happens in combine and elsewhere when constraints are
1732 not satisfied. */
84b72302
RH
1733 warning ("asm operand %d probably doesn't match constraints",
1734 i + noutputs);
65fed0cb 1735 }
6be2e1f8 1736
1b3d8f8a 1737 generating_concat_p = old_generating_concat_p;
6462bb43 1738 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1739
6462bb43 1740 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
6be2e1f8 1741 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1742
1743 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1744 clobber_conflict_found = 1;
28d81abb
RK
1745 }
1746
14a774a9
RK
1747 /* Protect all the operands from the queue now that they have all been
1748 evaluated. */
28d81abb 1749
1b3d8f8a
GK
1750 generating_concat_p = 0;
1751
235c5021 1752 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1753 ASM_OPERANDS_INPUT (body, i)
1754 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1755
1756 for (i = 0; i < noutputs; i++)
1757 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1758
4381f7c2 1759 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1760 for (i = 0; i < ninout; i++)
1761 {
235c5021 1762 int j = inout_opnum[i];
84b72302 1763 char buffer[16];
235c5021 1764
6462bb43 1765 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1766 = output_rtx[j];
84b72302
RH
1767
1768 sprintf (buffer, "%d", j);
6462bb43 1769 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
84b72302 1770 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
235c5021
RK
1771 }
1772
1b3d8f8a
GK
1773 generating_concat_p = old_generating_concat_p;
1774
28d81abb 1775 /* Now, for each output, construct an rtx
84b72302
RH
1776 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1777 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1778 If there is more than one, put them inside a PARALLEL. */
1779
1780 if (noutputs == 1 && nclobbers == 0)
1781 {
84b72302 1782 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
4977bab6 1783 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1784 }
14a774a9 1785
28d81abb
RK
1786 else if (noutputs == 0 && nclobbers == 0)
1787 {
1788 /* No output operands: put in a raw ASM_OPERANDS rtx. */
4977bab6 1789 emit_insn (body);
28d81abb 1790 }
14a774a9 1791
28d81abb
RK
1792 else
1793 {
1794 rtx obody = body;
1795 int num = noutputs;
14a774a9
RK
1796
1797 if (num == 0)
1798 num = 1;
1799
38a448ca 1800 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1801
1802 /* For each output operand, store a SET. */
28d81abb
RK
1803 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1804 {
1805 XVECEXP (body, 0, i)
38a448ca
RH
1806 = gen_rtx_SET (VOIDmode,
1807 output_rtx[i],
c5c76735 1808 gen_rtx_ASM_OPERANDS
6462bb43 1809 (GET_MODE (output_rtx[i]),
c5c76735 1810 TREE_STRING_POINTER (string),
84b72302 1811 constraints[i], i, argvec, constraintvec,
c5c76735
JL
1812 filename, line));
1813
28d81abb
RK
1814 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1815 }
1816
1817 /* If there are no outputs (but there are some clobbers)
1818 store the bare ASM_OPERANDS into the PARALLEL. */
1819
1820 if (i == 0)
1821 XVECEXP (body, 0, i++) = obody;
1822
1823 /* Store (clobber REG) for each clobbered register specified. */
1824
b4ccaa16 1825 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1826 {
47ee9bcb 1827 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1828 int j = decode_reg_name (regname);
acb5d088 1829 rtx clobbered_reg;
28d81abb 1830
b4ac57ab 1831 if (j < 0)
28d81abb 1832 {
c09e6498 1833 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1834 continue;
1835
c09e6498
RS
1836 if (j == -4) /* `memory', don't cache memory across asm */
1837 {
bffc6177 1838 XVECEXP (body, 0, i++)
38a448ca 1839 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1840 gen_rtx_MEM
1841 (BLKmode,
1842 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1843 continue;
1844 }
1845
956d6950 1846 /* Ignore unknown register, error already signaled. */
cc1f5387 1847 continue;
28d81abb
RK
1848 }
1849
1850 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1851 clobbered_reg = gen_rtx_REG (QImode, j);
1852
1853 /* Do sanity check for overlap between clobbers and respectively
1854 input and outputs that hasn't been handled. Such overlap
1855 should have been detected and reported above. */
1856 if (!clobber_conflict_found)
1857 {
1858 int opno;
1859
1860 /* We test the old body (obody) contents to avoid tripping
1861 over the under-construction body. */
1862 for (opno = 0; opno < noutputs; opno++)
1863 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1864 internal_error ("asm clobber conflict with output operand");
1865
1866 for (opno = 0; opno < ninputs - ninout; opno++)
1867 if (reg_overlap_mentioned_p (clobbered_reg,
1868 ASM_OPERANDS_INPUT (obody, opno)))
1869 internal_error ("asm clobber conflict with input operand");
1870 }
1871
b4ccaa16 1872 XVECEXP (body, 0, i++)
acb5d088 1873 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1874 }
1875
4977bab6 1876 emit_insn (body);
28d81abb
RK
1877 }
1878
947255ed
RH
1879 /* For any outputs that needed reloading into registers, spill them
1880 back to where they belong. */
1881 for (i = 0; i < noutputs; ++i)
1882 if (real_output_rtx[i])
1883 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1884
28d81abb
RK
1885 free_temp_slots ();
1886}
84b72302
RH
1887
1888/* A subroutine of expand_asm_operands. Check that all operands have
1889 the same number of alternatives. Return true if so. */
1890
1891static bool
1892check_operand_nalternatives (outputs, inputs)
1893 tree outputs, inputs;
1894{
1895 if (outputs || inputs)
1896 {
1897 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1898 int nalternatives
1899 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1900 tree next = inputs;
1901
1902 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1903 {
1904 error ("too many alternatives in `asm'");
1905 return false;
1906 }
1907
1908 tmp = outputs;
1909 while (tmp)
1910 {
1911 const char *constraint
1912 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1913
1914 if (n_occurrences (',', constraint) != nalternatives)
1915 {
1916 error ("operand constraints for `asm' differ in number of alternatives");
1917 return false;
1918 }
1919
1920 if (TREE_CHAIN (tmp))
1921 tmp = TREE_CHAIN (tmp);
1922 else
1923 tmp = next, next = 0;
1924 }
1925 }
1926
1927 return true;
1928}
1929
1930/* A subroutine of expand_asm_operands. Check that all operand names
1931 are unique. Return true if so. We rely on the fact that these names
1932 are identifiers, and so have been canonicalized by get_identifier,
1933 so all we need are pointer comparisons. */
1934
1935static bool
1936check_unique_operand_names (outputs, inputs)
1937 tree outputs, inputs;
1938{
1939 tree i, j;
1940
1941 for (i = outputs; i ; i = TREE_CHAIN (i))
1942 {
1943 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1944 if (! i_name)
1945 continue;
1946
1947 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1948 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1949 goto failure;
1950 }
1951
1952 for (i = inputs; i ; i = TREE_CHAIN (i))
1953 {
1954 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1955 if (! i_name)
1956 continue;
1957
1958 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1959 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1960 goto failure;
1961 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1962 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1963 goto failure;
1964 }
1965
1966 return true;
1967
1968 failure:
1969 error ("duplicate asm operand name '%s'",
fc552851 1970 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1971 return false;
1972}
1973
1974/* A subroutine of expand_asm_operands. Resolve the names of the operands
1975 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1976 STRING and in the constraints to those numbers. */
1977
1978static tree
1979resolve_operand_names (string, outputs, inputs, pconstraints)
1980 tree string;
1981 tree outputs, inputs;
1982 const char **pconstraints;
1983{
1984 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1985 char *p;
1986 tree t;
1987
1988 /* Assume that we will not need extra space to perform the substitution.
1989 This because we get to remove '[' and ']', which means we cannot have
1990 a problem until we have more than 999 operands. */
1991
1992 p = buffer;
1993 while ((p = strchr (p, '%')) != NULL)
1994 {
7abcb63a
RH
1995 if (p[1] == '[')
1996 p += 1;
1997 else if (ISALPHA (p[1]) && p[2] == '[')
1998 p += 2;
1999 else
2000 {
2001 p += 1;
2002 continue;
2003 }
2004
84b72302
RH
2005 p = resolve_operand_name_1 (p, outputs, inputs);
2006 }
2007
2008 string = build_string (strlen (buffer), buffer);
2009 free (buffer);
2010
2011 /* Collect output constraints here because it's convenient.
2012 There should be no named operands here; this is verified
2013 in expand_asm_operand. */
2014 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2015 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2016
2017 /* Substitute [<name>] in input constraint strings. */
2018 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2019 {
2020 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2021 if (strchr (c, '[') == NULL)
2022 *pconstraints = c;
2023 else
2024 {
2025 p = buffer = xstrdup (c);
2026 while ((p = strchr (p, '[')) != NULL)
2027 p = resolve_operand_name_1 (p, outputs, inputs);
2028
2029 *pconstraints = ggc_alloc_string (buffer, -1);
2030 free (buffer);
2031 }
2032 }
2033
2034 return string;
2035}
2036
2037/* A subroutine of resolve_operand_names. P points to the '[' for a
2038 potential named operand of the form [<name>]. In place, replace
786de7eb 2039 the name and brackets with a number. Return a pointer to the
84b72302
RH
2040 balance of the string after substitution. */
2041
2042static char *
2043resolve_operand_name_1 (p, outputs, inputs)
2044 char *p;
2045 tree outputs, inputs;
2046{
2047 char *q;
2048 int op;
2049 tree t;
2050 size_t len;
2051
2052 /* Collect the operand name. */
2053 q = strchr (p, ']');
2054 if (!q)
2055 {
2056 error ("missing close brace for named operand");
2057 return strchr (p, '\0');
2058 }
2059 len = q - p - 1;
2060
2061 /* Resolve the name to a number. */
2062 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2063 {
fc552851
RS
2064 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2065 if (name)
edd1967d 2066 {
fc552851 2067 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2068 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2069 goto found;
2070 }
84b72302
RH
2071 }
2072 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2073 {
fc552851
RS
2074 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2075 if (name)
edd1967d 2076 {
fc552851 2077 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2078 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2079 goto found;
2080 }
84b72302
RH
2081 }
2082
2083 *q = '\0';
2084 error ("undefined named operand '%s'", p + 1);
2085 op = 0;
2086 found:
2087
2088 /* Replace the name with the number. Unfortunately, not all libraries
2089 get the return value of sprintf correct, so search for the end of the
2090 generated string by hand. */
2091 sprintf (p, "%d", op);
2092 p = strchr (p, '\0');
2093
2094 /* Verify the no extra buffer space assumption. */
2095 if (p > q)
2096 abort ();
2097
2098 /* Shift the rest of the buffer down to fill the gap. */
2099 memmove (p, q + 1, strlen (q + 1) + 1);
2100
2101 return p;
2102}
28d81abb
RK
2103\f
2104/* Generate RTL to evaluate the expression EXP
1574ef13
AO
2105 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2106 Provided just for backward-compatibility. expand_expr_stmt_value()
2107 should be used for new code. */
28d81abb
RK
2108
2109void
2110expand_expr_stmt (exp)
2111 tree exp;
2112{
b0832fe1 2113 expand_expr_stmt_value (exp, -1, 1);
1574ef13
AO
2114}
2115
2116/* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2117 whether to (1) save the value of the expression, (0) discard it or
2118 (-1) use expr_stmts_for_value to tell. The use of -1 is
2119 deprecated, and retained only for backward compatibility. */
2120
2121void
b0832fe1 2122expand_expr_stmt_value (exp, want_value, maybe_last)
1574ef13 2123 tree exp;
b0832fe1 2124 int want_value, maybe_last;
1574ef13
AO
2125{
2126 rtx value;
2127 tree type;
2128
2129 if (want_value == -1)
2130 want_value = expr_stmts_for_value != 0;
2131
28d81abb
RK
2132 /* If -W, warn about statements with no side effects,
2133 except for an explicit cast to void (e.g. for assert()), and
b0832fe1
JJ
2134 except for last statement in ({...}) where they may be useful. */
2135 if (! want_value
2136 && (expr_stmts_for_value == 0 || ! maybe_last)
2137 && exp != error_mark_node)
28d81abb 2138 {
9790cefd
RH
2139 if (! TREE_SIDE_EFFECTS (exp))
2140 {
2141 if ((extra_warnings || warn_unused_value)
2142 && !(TREE_CODE (exp) == CONVERT_EXPR
2143 && VOID_TYPE_P (TREE_TYPE (exp))))
2144 warning_with_file_and_line (emit_filename, emit_lineno,
2145 "statement with no effect");
2146 }
078721e1 2147 else if (warn_unused_value)
28d81abb
RK
2148 warn_if_unused_value (exp);
2149 }
b6ec8c5f
RK
2150
2151 /* If EXP is of function type and we are expanding statements for
2152 value, convert it to pointer-to-function. */
1574ef13 2153 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
b6ec8c5f
RK
2154 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2155
8f17b5c5
MM
2156 /* The call to `expand_expr' could cause last_expr_type and
2157 last_expr_value to get reset. Therefore, we set last_expr_value
2158 and last_expr_type *after* calling expand_expr. */
1574ef13
AO
2159 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2160 VOIDmode, 0);
2161 type = TREE_TYPE (exp);
28d81abb
RK
2162
2163 /* If all we do is reference a volatile value in memory,
2164 copy it to a register to be sure it is actually touched. */
1574ef13 2165 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
28d81abb 2166 {
1574ef13 2167 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 2168 ;
1574ef13
AO
2169 else if (TYPE_MODE (type) != BLKmode)
2170 value = copy_to_reg (value);
28d81abb 2171 else
ddbe9812
RS
2172 {
2173 rtx lab = gen_label_rtx ();
4381f7c2 2174
ddbe9812 2175 /* Compare the value with itself to reference it. */
1574ef13
AO
2176 emit_cmp_and_jump_insns (value, value, EQ,
2177 expand_expr (TYPE_SIZE (type),
c5d5d461 2178 NULL_RTX, VOIDmode, 0),
d43e0b7d 2179 BLKmode, 0, lab);
ddbe9812
RS
2180 emit_label (lab);
2181 }
28d81abb
RK
2182 }
2183
2184 /* If this expression is part of a ({...}) and is in memory, we may have
2185 to preserve temporaries. */
1574ef13 2186 preserve_temp_slots (value);
28d81abb
RK
2187
2188 /* Free any temporaries used to evaluate this expression. Any temporary
2189 used as a result of this expression will already have been preserved
2190 above. */
2191 free_temp_slots ();
2192
1574ef13
AO
2193 if (want_value)
2194 {
2195 last_expr_value = value;
2196 last_expr_type = type;
2197 }
2198
28d81abb
RK
2199 emit_queue ();
2200}
2201
2202/* Warn if EXP contains any computations whose results are not used.
2203 Return 1 if a warning is printed; 0 otherwise. */
2204
150a992a 2205int
28d81abb
RK
2206warn_if_unused_value (exp)
2207 tree exp;
2208{
2209 if (TREE_USED (exp))
2210 return 0;
2211
9790cefd
RH
2212 /* Don't warn about void constructs. This includes casting to void,
2213 void function calls, and statement expressions with a final cast
2214 to void. */
2215 if (VOID_TYPE_P (TREE_TYPE (exp)))
2216 return 0;
2217
28d81abb
RK
2218 switch (TREE_CODE (exp))
2219 {
2220 case PREINCREMENT_EXPR:
2221 case POSTINCREMENT_EXPR:
2222 case PREDECREMENT_EXPR:
2223 case POSTDECREMENT_EXPR:
2224 case MODIFY_EXPR:
2225 case INIT_EXPR:
2226 case TARGET_EXPR:
2227 case CALL_EXPR:
2228 case METHOD_CALL_EXPR:
2229 case RTL_EXPR:
81797aba 2230 case TRY_CATCH_EXPR:
28d81abb
RK
2231 case WITH_CLEANUP_EXPR:
2232 case EXIT_EXPR:
28d81abb
RK
2233 return 0;
2234
2235 case BIND_EXPR:
2236 /* For a binding, warn if no side effect within it. */
2237 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2238
de73f171
RK
2239 case SAVE_EXPR:
2240 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2241
28d81abb
RK
2242 case TRUTH_ORIF_EXPR:
2243 case TRUTH_ANDIF_EXPR:
2244 /* In && or ||, warn if 2nd operand has no side effect. */
2245 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2246
2247 case COMPOUND_EXPR:
a646a211
JM
2248 if (TREE_NO_UNUSED_WARNING (exp))
2249 return 0;
28d81abb
RK
2250 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2251 return 1;
4d23e509
RS
2252 /* Let people do `(foo (), 0)' without a warning. */
2253 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2254 return 0;
28d81abb
RK
2255 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2256
2257 case NOP_EXPR:
2258 case CONVERT_EXPR:
b4ac57ab 2259 case NON_LVALUE_EXPR:
28d81abb
RK
2260 /* Don't warn about conversions not explicit in the user's program. */
2261 if (TREE_NO_UNUSED_WARNING (exp))
2262 return 0;
2263 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2264 Don't complain about that. There can be an arbitrary number of
2265 casts before the modify, so we must loop until we find the first
2266 non-cast expression and then test to see if that is a modify. */
2267 {
2268 tree tem = TREE_OPERAND (exp, 0);
2269
2270 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2271 tem = TREE_OPERAND (tem, 0);
2272
de73f171
RK
2273 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2274 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2275 return 0;
2276 }
7133e992 2277 goto maybe_warn;
28d81abb 2278
d1e1adfb
JM
2279 case INDIRECT_REF:
2280 /* Don't warn about automatic dereferencing of references, since
2281 the user cannot control it. */
2282 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2283 return warn_if_unused_value (TREE_OPERAND (exp, 0));
4381f7c2
KH
2284 /* Fall through. */
2285
28d81abb 2286 default:
ddbe9812 2287 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2288 if ((DECL_P (exp)
ddbe9812
RS
2289 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2290 && TREE_THIS_VOLATILE (exp))
2291 return 0;
8d5e6e25
RK
2292
2293 /* If this is an expression which has no operands, there is no value
2294 to be unused. There are no such language-independent codes,
2295 but front ends may define such. */
2296 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2297 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2298 return 0;
2299
7133e992
JJ
2300 maybe_warn:
2301 /* If this is an expression with side effects, don't warn. */
2302 if (TREE_SIDE_EFFECTS (exp))
2303 return 0;
2304
28d81abb
RK
2305 warning_with_file_and_line (emit_filename, emit_lineno,
2306 "value computed is not used");
2307 return 1;
2308 }
2309}
2310
2311/* Clear out the memory of the last expression evaluated. */
2312
2313void
2314clear_last_expr ()
2315{
e2500fed
GK
2316 last_expr_type = NULL_TREE;
2317 last_expr_value = NULL_RTX;
28d81abb
RK
2318}
2319
b2123dc0
MM
2320/* Begin a statement-expression, i.e., a series of statements which
2321 may return a value. Return the RTL_EXPR for this statement expr.
2322 The caller must save that value and pass it to
2323 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2324 in the statement-expression are deallocated at the end of the
2325 expression. */
28d81abb
RK
2326
2327tree
b2123dc0
MM
2328expand_start_stmt_expr (has_scope)
2329 int has_scope;
28d81abb 2330{
ca695ac9
JB
2331 tree t;
2332
28d81abb
RK
2333 /* Make the RTL_EXPR node temporary, not momentary,
2334 so that rtl_expr_chain doesn't become garbage. */
ca695ac9 2335 t = make_node (RTL_EXPR);
33c6ab80 2336 do_pending_stack_adjust ();
b2123dc0
MM
2337 if (has_scope)
2338 start_sequence_for_rtl_expr (t);
2339 else
2340 start_sequence ();
28d81abb
RK
2341 NO_DEFER_POP;
2342 expr_stmts_for_value++;
2343 return t;
2344}
2345
2346/* Restore the previous state at the end of a statement that returns a value.
2347 Returns a tree node representing the statement's value and the
2348 insns to compute the value.
2349
2350 The nodes of that expression have been freed by now, so we cannot use them.
2351 But we don't want to do that anyway; the expression has already been
2352 evaluated and now we just want to use the value. So generate a RTL_EXPR
2353 with the proper type and RTL value.
2354
2355 If the last substatement was not an expression,
2356 return something with type `void'. */
2357
2358tree
2359expand_end_stmt_expr (t)
2360 tree t;
2361{
2362 OK_DEFER_POP;
2363
1574ef13 2364 if (! last_expr_value || ! last_expr_type)
28d81abb 2365 {
28d81abb 2366 last_expr_value = const0_rtx;
1574ef13 2367 last_expr_type = void_type_node;
28d81abb 2368 }
28d81abb
RK
2369 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2370 /* Remove any possible QUEUED. */
2371 last_expr_value = protect_from_queue (last_expr_value, 0);
2372
2373 emit_queue ();
2374
2375 TREE_TYPE (t) = last_expr_type;
2376 RTL_EXPR_RTL (t) = last_expr_value;
2377 RTL_EXPR_SEQUENCE (t) = get_insns ();
2378
2379 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2380
2381 end_sequence ();
2382
2383 /* Don't consider deleting this expr or containing exprs at tree level. */
2384 TREE_SIDE_EFFECTS (t) = 1;
2385 /* Propagate volatility of the actual RTL expr. */
2386 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2387
e2500fed 2388 clear_last_expr ();
28d81abb
RK
2389 expr_stmts_for_value--;
2390
2391 return t;
2392}
2393\f
28d81abb
RK
2394/* Generate RTL for the start of an if-then. COND is the expression
2395 whose truth should be tested.
2396
2397 If EXITFLAG is nonzero, this conditional is visible to
2398 `exit_something'. */
2399
2400void
2401expand_start_cond (cond, exitflag)
2402 tree cond;
2403 int exitflag;
2404{
2405 struct nesting *thiscond = ALLOC_NESTING ();
2406
2407 /* Make an entry on cond_stack for the cond we are entering. */
2408
e2500fed 2409 thiscond->desc = COND_NESTING;
28d81abb
RK
2410 thiscond->next = cond_stack;
2411 thiscond->all = nesting_stack;
2412 thiscond->depth = ++nesting_depth;
2413 thiscond->data.cond.next_label = gen_label_rtx ();
2414 /* Before we encounter an `else', we don't need a separate exit label
2415 unless there are supposed to be exit statements
2416 to exit this conditional. */
2417 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2418 thiscond->data.cond.endif_label = thiscond->exit_label;
2419 cond_stack = thiscond;
2420 nesting_stack = thiscond;
2421
b93a436e 2422 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2423}
2424
2425/* Generate RTL between then-clause and the elseif-clause
2426 of an if-then-elseif-.... */
2427
2428void
2429expand_start_elseif (cond)
2430 tree cond;
2431{
2432 if (cond_stack->data.cond.endif_label == 0)
2433 cond_stack->data.cond.endif_label = gen_label_rtx ();
2434 emit_jump (cond_stack->data.cond.endif_label);
2435 emit_label (cond_stack->data.cond.next_label);
2436 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2437 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2438}
2439
2440/* Generate RTL between the then-clause and the else-clause
2441 of an if-then-else. */
2442
2443void
2444expand_start_else ()
2445{
2446 if (cond_stack->data.cond.endif_label == 0)
2447 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2448
28d81abb
RK
2449 emit_jump (cond_stack->data.cond.endif_label);
2450 emit_label (cond_stack->data.cond.next_label);
0f41302f 2451 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2452}
2453
d947ba59
RK
2454/* After calling expand_start_else, turn this "else" into an "else if"
2455 by providing another condition. */
2456
2457void
2458expand_elseif (cond)
2459 tree cond;
2460{
2461 cond_stack->data.cond.next_label = gen_label_rtx ();
2462 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2463}
2464
28d81abb
RK
2465/* Generate RTL for the end of an if-then.
2466 Pop the record for it off of cond_stack. */
2467
2468void
2469expand_end_cond ()
2470{
2471 struct nesting *thiscond = cond_stack;
2472
b93a436e
JL
2473 do_pending_stack_adjust ();
2474 if (thiscond->data.cond.next_label)
2475 emit_label (thiscond->data.cond.next_label);
2476 if (thiscond->data.cond.endif_label)
2477 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2478
2479 POPSTACK (cond_stack);
e2500fed 2480 clear_last_expr ();
28d81abb
RK
2481}
2482\f
2483/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2484 loop should be exited by `exit_something'. This is a loop for which
2485 `expand_continue' will jump to the top of the loop.
2486
2487 Make an entry on loop_stack to record the labels associated with
2488 this loop. */
2489
2490struct nesting *
2491expand_start_loop (exit_flag)
2492 int exit_flag;
2493{
b3694847 2494 struct nesting *thisloop = ALLOC_NESTING ();
28d81abb
RK
2495
2496 /* Make an entry on loop_stack for the loop we are entering. */
2497
e2500fed 2498 thisloop->desc = LOOP_NESTING;
28d81abb
RK
2499 thisloop->next = loop_stack;
2500 thisloop->all = nesting_stack;
2501 thisloop->depth = ++nesting_depth;
2502 thisloop->data.loop.start_label = gen_label_rtx ();
2503 thisloop->data.loop.end_label = gen_label_rtx ();
2504 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2505 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2506 loop_stack = thisloop;
2507 nesting_stack = thisloop;
2508
2509 do_pending_stack_adjust ();
2510 emit_queue ();
6496a589 2511 emit_note (NULL, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2512 emit_label (thisloop->data.loop.start_label);
2513
2514 return thisloop;
2515}
2516
2517/* Like expand_start_loop but for a loop where the continuation point
2518 (for expand_continue_loop) will be specified explicitly. */
2519
2520struct nesting *
2521expand_start_loop_continue_elsewhere (exit_flag)
2522 int exit_flag;
2523{
2524 struct nesting *thisloop = expand_start_loop (exit_flag);
2525 loop_stack->data.loop.continue_label = gen_label_rtx ();
2526 return thisloop;
2527}
2528
f0de0c5d
RH
2529/* Begin a null, aka do { } while (0) "loop". But since the contents
2530 of said loop can still contain a break, we must frob the loop nest. */
2531
2532struct nesting *
2533expand_start_null_loop ()
2534{
b3694847 2535 struct nesting *thisloop = ALLOC_NESTING ();
f0de0c5d
RH
2536
2537 /* Make an entry on loop_stack for the loop we are entering. */
2538
e2500fed 2539 thisloop->desc = LOOP_NESTING;
f0de0c5d
RH
2540 thisloop->next = loop_stack;
2541 thisloop->all = nesting_stack;
2542 thisloop->depth = ++nesting_depth;
2543 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2544 thisloop->data.loop.end_label = gen_label_rtx ();
ba89764a 2545 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
f0de0c5d
RH
2546 thisloop->exit_label = thisloop->data.loop.end_label;
2547 loop_stack = thisloop;
2548 nesting_stack = thisloop;
2549
2550 return thisloop;
2551}
2552
28d81abb
RK
2553/* Specify the continuation point for a loop started with
2554 expand_start_loop_continue_elsewhere.
2555 Use this at the point in the code to which a continue statement
2556 should jump. */
2557
2558void
2559expand_loop_continue_here ()
2560{
2561 do_pending_stack_adjust ();
6496a589 2562 emit_note (NULL, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2563 emit_label (loop_stack->data.loop.continue_label);
2564}
2565
2566/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2567 Pop the block off of loop_stack. */
2568
2569void
2570expand_end_loop ()
2571{
0720f6fb 2572 rtx start_label = loop_stack->data.loop.start_label;
e803a64b
RH
2573 rtx etc_note;
2574 int eh_regions, debug_blocks;
4977bab6 2575 bool empty_test;
28d81abb
RK
2576
2577 /* Mark the continue-point at the top of the loop if none elsewhere. */
2578 if (start_label == loop_stack->data.loop.continue_label)
2579 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2580
2581 do_pending_stack_adjust ();
2582
e803a64b 2583 /* If the loop starts with a loop exit, roll that to the end where
a7d308f7 2584 it will optimize together with the jump back.
93de5c31 2585
e803a64b 2586 If the loop presently looks like this (in pseudo-C):
93de5c31 2587
e803a64b
RH
2588 LOOP_BEG
2589 start_label:
2590 if (test) goto end_label;
2591 LOOP_END_TOP_COND
2592 body;
2593 goto start_label;
2594 end_label:
4381f7c2 2595
93de5c31
MM
2596 transform it to look like:
2597
e803a64b
RH
2598 LOOP_BEG
2599 goto start_label;
2600 top_label:
2601 body;
2602 start_label:
2603 if (test) goto end_label;
2604 goto top_label;
2605 end_label:
2606
2607 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
14b493d6 2608 the end of the entry conditional. Without this, our lexical scan
e803a64b
RH
2609 can't tell the difference between an entry conditional and a
2610 body conditional that exits the loop. Mistaking the two means
786de7eb 2611 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
e803a64b
RH
2612 screw up loop unrolling.
2613
2614 Things will be oh so much better when loop optimization is done
2615 off of a proper control flow graph... */
2616
2617 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2618
4977bab6 2619 empty_test = true;
e803a64b
RH
2620 eh_regions = debug_blocks = 0;
2621 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2622 if (GET_CODE (etc_note) == NOTE)
2623 {
2624 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2625 break;
28d81abb 2626
e803a64b
RH
2627 /* We must not walk into a nested loop. */
2628 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2629 {
2630 etc_note = NULL_RTX;
28d81abb 2631 break;
e803a64b 2632 }
28d81abb 2633
e803a64b
RH
2634 /* At the same time, scan for EH region notes, as we don't want
2635 to scrog region nesting. This shouldn't happen, but... */
2636 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2637 eh_regions++;
2638 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2639 {
2640 if (--eh_regions < 0)
2641 /* We've come to the end of an EH region, but never saw the
2642 beginning of that region. That means that an EH region
2643 begins before the top of the loop, and ends in the middle
2644 of it. The existence of such a situation violates a basic
2645 assumption in this code, since that would imply that even
2646 when EH_REGIONS is zero, we might move code out of an
2647 exception region. */
2648 abort ();
2649 }
0720f6fb 2650
e803a64b
RH
2651 /* Likewise for debug scopes. In this case we'll either (1) move
2652 all of the notes if they are properly nested or (2) leave the
786de7eb 2653 notes alone and only rotate the loop at high optimization
e803a64b
RH
2654 levels when we expect to scrog debug info. */
2655 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2656 debug_blocks++;
2657 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2658 debug_blocks--;
2659 }
4977bab6
ZW
2660 else if (INSN_P (etc_note))
2661 empty_test = false;
28d81abb 2662
e803a64b
RH
2663 if (etc_note
2664 && optimize
4977bab6 2665 && ! empty_test
e803a64b
RH
2666 && eh_regions == 0
2667 && (debug_blocks == 0 || optimize >= 2)
2668 && NEXT_INSN (etc_note) != NULL_RTX
2669 && ! any_condjump_p (get_last_insn ()))
2670 {
2671 /* We found one. Move everything from START to ETC to the end
2672 of the loop, and add a jump from the top of the loop. */
2673 rtx top_label = gen_label_rtx ();
2674 rtx start_move = start_label;
2675
2676 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2677 then we want to move this note also. */
2678 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2679 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2680 start_move = PREV_INSN (start_move);
2681
2682 emit_label_before (top_label, start_move);
2683
2684 /* Actually move the insns. If the debug scopes are nested, we
2685 can move everything at once. Otherwise we have to move them
2686 one by one and squeeze out the block notes. */
2687 if (debug_blocks == 0)
2688 reorder_insns (start_move, etc_note, get_last_insn ());
2689 else
28d81abb 2690 {
e803a64b 2691 rtx insn, next_insn;
93de5c31
MM
2692 for (insn = start_move; insn; insn = next_insn)
2693 {
2694 /* Figure out which insn comes after this one. We have
2695 to do this before we move INSN. */
e803a64b 2696 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
93de5c31
MM
2697
2698 if (GET_CODE (insn) == NOTE
2699 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2700 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
93de5c31
MM
2701 continue;
2702
93de5c31
MM
2703 reorder_insns (insn, insn, get_last_insn ());
2704 }
28d81abb 2705 }
28d81abb 2706
e803a64b
RH
2707 /* Add the jump from the top of the loop. */
2708 emit_jump_insn_before (gen_jump (start_label), top_label);
2709 emit_barrier_before (top_label);
2710 start_label = top_label;
a7d308f7 2711 }
e803a64b
RH
2712
2713 emit_jump (start_label);
2714 emit_note (NULL, NOTE_INSN_LOOP_END);
28d81abb
RK
2715 emit_label (loop_stack->data.loop.end_label);
2716
2717 POPSTACK (loop_stack);
2718
e2500fed 2719 clear_last_expr ();
28d81abb
RK
2720}
2721
f0de0c5d
RH
2722/* Finish a null loop, aka do { } while (0). */
2723
2724void
2725expand_end_null_loop ()
2726{
2727 do_pending_stack_adjust ();
2728 emit_label (loop_stack->data.loop.end_label);
2729
2730 POPSTACK (loop_stack);
2731
e2500fed 2732 clear_last_expr ();
f0de0c5d
RH
2733}
2734
28d81abb
RK
2735/* Generate a jump to the current loop's continue-point.
2736 This is usually the top of the loop, but may be specified
2737 explicitly elsewhere. If not currently inside a loop,
2738 return 0 and do nothing; caller will print an error message. */
2739
2740int
2741expand_continue_loop (whichloop)
2742 struct nesting *whichloop;
2743{
969d70ca
JH
2744 /* Emit information for branch prediction. */
2745 rtx note;
2746
2747 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2748 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
e2500fed 2749 clear_last_expr ();
28d81abb
RK
2750 if (whichloop == 0)
2751 whichloop = loop_stack;
2752 if (whichloop == 0)
2753 return 0;
37366632
RK
2754 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2755 NULL_RTX);
28d81abb
RK
2756 return 1;
2757}
2758
2759/* Generate a jump to exit the current loop. If not currently inside a loop,
2760 return 0 and do nothing; caller will print an error message. */
2761
2762int
2763expand_exit_loop (whichloop)
2764 struct nesting *whichloop;
2765{
e2500fed 2766 clear_last_expr ();
28d81abb
RK
2767 if (whichloop == 0)
2768 whichloop = loop_stack;
2769 if (whichloop == 0)
2770 return 0;
37366632 2771 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2772 return 1;
2773}
2774
2775/* Generate a conditional jump to exit the current loop if COND
2776 evaluates to zero. If not currently inside a loop,
2777 return 0 and do nothing; caller will print an error message. */
2778
2779int
2780expand_exit_loop_if_false (whichloop, cond)
2781 struct nesting *whichloop;
2782 tree cond;
2783{
4977bab6 2784 rtx label;
e2500fed 2785 clear_last_expr ();
b93a436e 2786
28d81abb
RK
2787 if (whichloop == 0)
2788 whichloop = loop_stack;
2789 if (whichloop == 0)
2790 return 0;
4977bab6
ZW
2791
2792 if (integer_nonzerop (cond))
2793 return 1;
2794 if (integer_zerop (cond))
2795 return expand_exit_loop (whichloop);
2796
2797 /* Check if we definitely won't need a fixup. */
2798 if (whichloop == nesting_stack)
2799 {
2800 jumpifnot (cond, whichloop->data.loop.end_label);
2801 return 1;
2802 }
2803
b93a436e 2804 /* In order to handle fixups, we actually create a conditional jump
4fe9b91c 2805 around an unconditional branch to exit the loop. If fixups are
b93a436e 2806 necessary, they go before the unconditional branch. */
d902c7ea 2807
4977bab6
ZW
2808 label = gen_label_rtx ();
2809 jumpif (cond, label);
b93a436e
JL
2810 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2811 NULL_RTX);
2812 emit_label (label);
ca695ac9 2813
28d81abb
RK
2814 return 1;
2815}
2816
e803a64b 2817/* Like expand_exit_loop_if_false except also emit a note marking
786de7eb 2818 the end of the conditional. Should only be used immediately
e803a64b
RH
2819 after expand_loop_start. */
2820
2821int
2822expand_exit_loop_top_cond (whichloop, cond)
2823 struct nesting *whichloop;
2824 tree cond;
2825{
2826 if (! expand_exit_loop_if_false (whichloop, cond))
2827 return 0;
2828
2829 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2830 return 1;
2831}
2832
0e9e1e0a 2833/* Return nonzero if we should preserve sub-expressions as separate
28d81abb
RK
2834 pseudos. We never do so if we aren't optimizing. We always do so
2835 if -fexpensive-optimizations.
2836
2837 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2838 the loop may still be a small one. */
2839
2840int
2841preserve_subexpressions_p ()
2842{
2843 rtx insn;
2844
2845 if (flag_expensive_optimizations)
2846 return 1;
2847
01d939e8 2848 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2849 return 0;
2850
2851 insn = get_last_insn_anywhere ();
2852
2853 return (insn
2854 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2855 < n_non_fixed_regs * 3));
2856
2857}
2858
2859/* Generate a jump to exit the current loop, conditional, binding contour
2860 or case statement. Not all such constructs are visible to this function,
2861 only those started with EXIT_FLAG nonzero. Individual languages use
2862 the EXIT_FLAG parameter to control which kinds of constructs you can
2863 exit this way.
2864
2865 If not currently inside anything that can be exited,
2866 return 0 and do nothing; caller will print an error message. */
2867
2868int
2869expand_exit_something ()
2870{
2871 struct nesting *n;
e2500fed 2872 clear_last_expr ();
28d81abb
RK
2873 for (n = nesting_stack; n; n = n->all)
2874 if (n->exit_label != 0)
2875 {
37366632 2876 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2877 return 1;
2878 }
2879
2880 return 0;
2881}
2882\f
2883/* Generate RTL to return from the current function, with no value.
2884 (That is, we do not do anything about returning any value.) */
2885
2886void
2887expand_null_return ()
2888{
969d70ca
JH
2889 rtx last_insn;
2890
2891 last_insn = get_last_insn ();
bd695e1e 2892
4381f7c2 2893 /* If this function was declared to return a value, but we
bd695e1e 2894 didn't, clobber the return registers so that they are not
a1f300c0 2895 propagated live to the rest of the function. */
c13fde05 2896 clobber_return_register ();
28d81abb 2897
396ad517 2898 expand_null_return_1 (last_insn);
28d81abb
RK
2899}
2900
969d70ca
JH
2901/* Try to guess whether the value of return means error code. */
2902static enum br_predictor
2903return_prediction (val)
2904 rtx val;
2905{
2906 /* Different heuristics for pointers and scalars. */
2907 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2908 {
2909 /* NULL is usually not returned. */
2910 if (val == const0_rtx)
2911 return PRED_NULL_RETURN;
2912 }
2913 else
2914 {
2915 /* Negative return values are often used to indicate
2916 errors. */
2917 if (GET_CODE (val) == CONST_INT
2918 && INTVAL (val) < 0)
2919 return PRED_NEGATIVE_RETURN;
2920 /* Constant return values are also usually erors,
2921 zero/one often mean booleans so exclude them from the
2922 heuristics. */
2923 if (CONSTANT_P (val)
2924 && (val != const0_rtx && val != const1_rtx))
2925 return PRED_CONST_RETURN;
2926 }
2927 return PRED_NO_PREDICTION;
2928}
2929
28d81abb
RK
2930/* Generate RTL to return from the current function, with value VAL. */
2931
8d800403 2932static void
28d81abb
RK
2933expand_value_return (val)
2934 rtx val;
2935{
969d70ca
JH
2936 rtx last_insn;
2937 rtx return_reg;
2938 enum br_predictor pred;
2939
2940 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2941 {
2942 /* Emit information for branch prediction. */
2943 rtx note;
2944
2945 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2946
2947 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2948
2949 }
2950
2951 last_insn = get_last_insn ();
2952 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
2953
2954 /* Copy the value to the return location
2955 unless it's already there. */
2956
2957 if (return_reg != val)
77636079 2958 {
77636079 2959 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
37877eb0 2960#ifdef PROMOTE_FUNCTION_RETURN
77636079 2961 int unsignedp = TREE_UNSIGNED (type);
14a774a9
RK
2962 enum machine_mode old_mode
2963 = DECL_MODE (DECL_RESULT (current_function_decl));
28612f9e 2964 enum machine_mode mode
14a774a9 2965 = promote_mode (type, old_mode, &unsignedp, 1);
77636079 2966
14a774a9
RK
2967 if (mode != old_mode)
2968 val = convert_modes (mode, old_mode, val, unsignedp);
77636079 2969#endif
14a774a9 2970 if (GET_CODE (return_reg) == PARALLEL)
04050c69 2971 emit_group_load (return_reg, val, int_size_in_bytes (type));
14a774a9 2972 else
77636079
RS
2973 emit_move_insn (return_reg, val);
2974 }
14a774a9 2975
396ad517 2976 expand_null_return_1 (last_insn);
28d81abb
RK
2977}
2978
2979/* Output a return with no value. If LAST_INSN is nonzero,
396ad517 2980 pretend that the return takes place after LAST_INSN. */
28d81abb
RK
2981
2982static void
396ad517 2983expand_null_return_1 (last_insn)
28d81abb 2984 rtx last_insn;
28d81abb
RK
2985{
2986 rtx end_label = cleanup_label ? cleanup_label : return_label;
2987
2988 clear_pending_stack_adjust ();
2989 do_pending_stack_adjust ();
e2500fed 2990 clear_last_expr ();
28d81abb 2991
396ad517
JDA
2992 if (end_label == 0)
2993 end_label = return_label = gen_label_rtx ();
37366632 2994 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2995}
2996\f
2997/* Generate RTL to evaluate the expression RETVAL and return it
2998 from the current function. */
2999
3000void
3001expand_return (retval)
3002 tree retval;
3003{
3004 /* If there are any cleanups to be performed, then they will
3005 be inserted following LAST_INSN. It is desirable
3006 that the last_insn, for such purposes, should be the
3007 last insn before computing the return value. Otherwise, cleanups
3008 which call functions can clobber the return value. */
3009 /* ??? rms: I think that is erroneous, because in C++ it would
3010 run destructors on variables that might be used in the subsequent
3011 computation of the return value. */
3012 rtx last_insn = 0;
19e7881c 3013 rtx result_rtl;
b3694847 3014 rtx val = 0;
28d81abb 3015 tree retval_rhs;
28d81abb
RK
3016
3017 /* If function wants no value, give it none. */
3018 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3019 {
37366632 3020 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 3021 emit_queue ();
28d81abb
RK
3022 expand_null_return ();
3023 return;
3024 }
3025
ea11ca7e 3026 if (retval == error_mark_node)
c9407e4c
MM
3027 {
3028 /* Treat this like a return of no value from a function that
3029 returns a value. */
3030 expand_null_return ();
786de7eb 3031 return;
c9407e4c 3032 }
ea11ca7e 3033 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
3034 retval_rhs = retval;
3035 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3036 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3037 retval_rhs = TREE_OPERAND (retval, 1);
71653180 3038 else if (VOID_TYPE_P (TREE_TYPE (retval)))
28d81abb
RK
3039 /* Recognize tail-recursive call to void function. */
3040 retval_rhs = retval;
3041 else
3042 retval_rhs = NULL_TREE;
3043
7cc8342c 3044 last_insn = get_last_insn ();
28d81abb
RK
3045
3046 /* Distribute return down conditional expr if either of the sides
3047 may involve tail recursion (see test below). This enhances the number
3048 of tail recursions we see. Don't do this always since it can produce
3049 sub-optimal code in some cases and we distribute assignments into
3050 conditional expressions when it would help. */
3051
3052 if (optimize && retval_rhs != 0
3053 && frame_offset == 0
3054 && TREE_CODE (retval_rhs) == COND_EXPR
3055 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3056 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3057 {
3058 rtx label = gen_label_rtx ();
a0a34f94
RK
3059 tree expr;
3060
37366632 3061 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 3062 start_cleanup_deferral ();
dd98f85c 3063 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3064 DECL_RESULT (current_function_decl),
3065 TREE_OPERAND (retval_rhs, 1));
3066 TREE_SIDE_EFFECTS (expr) = 1;
3067 expand_return (expr);
28d81abb 3068 emit_label (label);
a0a34f94 3069
dd98f85c 3070 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3071 DECL_RESULT (current_function_decl),
3072 TREE_OPERAND (retval_rhs, 2));
3073 TREE_SIDE_EFFECTS (expr) = 1;
3074 expand_return (expr);
1483bddb 3075 end_cleanup_deferral ();
28d81abb
RK
3076 return;
3077 }
3078
19e7881c
MM
3079 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3080
4c485b63
JL
3081 /* If the result is an aggregate that is being returned in one (or more)
3082 registers, load the registers here. The compiler currently can't handle
3083 copying a BLKmode value into registers. We could put this code in a
3084 more general area (for use by everyone instead of just function
3085 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
3086 (and in expand_call). The value must go into a pseudo in case there
3087 are cleanups that will clobber the real return register. */
4c485b63
JL
3088
3089 if (retval_rhs != 0
3090 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 3091 && GET_CODE (result_rtl) == REG)
4c485b63 3092 {
770ae6cc
RK
3093 int i;
3094 unsigned HOST_WIDE_INT bitpos, xbitpos;
3095 unsigned HOST_WIDE_INT big_endian_correction = 0;
3096 unsigned HOST_WIDE_INT bytes
3097 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 3098 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
3099 unsigned int bitsize
3100 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
4c485b63 3101 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 3102 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 3103 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 3104 enum machine_mode tmpmode, result_reg_mode;
4c485b63 3105
2954d7db
RK
3106 if (bytes == 0)
3107 {
3108 expand_null_return ();
3109 return;
3110 }
3111
a7f875d7
RK
3112 /* Structures whose size is not a multiple of a word are aligned
3113 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3114 machine, this means we must skip the empty high order bytes when
3115 calculating the bit offset. */
0d7839da 3116 if (BYTES_BIG_ENDIAN
0d7839da 3117 && bytes % UNITS_PER_WORD)
a7f875d7
RK
3118 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3119 * BITS_PER_UNIT));
3120
4381f7c2 3121 /* Copy the structure BITSIZE bits at a time. */
a7f875d7
RK
3122 for (bitpos = 0, xbitpos = big_endian_correction;
3123 bitpos < bytes * BITS_PER_UNIT;
3124 bitpos += bitsize, xbitpos += bitsize)
4c485b63 3125 {
a7f875d7 3126 /* We need a new destination pseudo each time xbitpos is
abc95ed3 3127 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
3128 (the first time through). */
3129 if (xbitpos % BITS_PER_WORD == 0
3130 || xbitpos == big_endian_correction)
4c485b63 3131 {
a7f875d7
RK
3132 /* Generate an appropriate register. */
3133 dst = gen_reg_rtx (word_mode);
3134 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3135
8a38ed86
AM
3136 /* Clear the destination before we move anything into it. */
3137 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 3138 }
a7f875d7
RK
3139
3140 /* We need a new source operand each time bitpos is on a word
3141 boundary. */
3142 if (bitpos % BITS_PER_WORD == 0)
3143 src = operand_subword_force (result_val,
3144 bitpos / BITS_PER_WORD,
3145 BLKmode);
3146
3147 /* Use bitpos for the source extraction (left justified) and
3148 xbitpos for the destination store (right justified). */
3149 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3150 extract_bit_field (src, bitsize,
3151 bitpos % BITS_PER_WORD, 1,
19caa751 3152 NULL_RTX, word_mode, word_mode,
04050c69
RK
3153 BITS_PER_WORD),
3154 BITS_PER_WORD);
4c485b63
JL
3155 }
3156
4c485b63
JL
3157 /* Find the smallest integer mode large enough to hold the
3158 entire structure and use that mode instead of BLKmode
6d2f8887 3159 on the USE insn for the return register. */
4c485b63 3160 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 3161 tmpmode != VOIDmode;
4c485b63 3162 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2954d7db
RK
3163 /* Have we found a large enough mode? */
3164 if (GET_MODE_SIZE (tmpmode) >= bytes)
3165 break;
4c485b63
JL
3166
3167 /* No suitable mode found. */
0c61f541 3168 if (tmpmode == VOIDmode)
3ffeb8f1 3169 abort ();
4c485b63 3170
14a774a9 3171 PUT_MODE (result_rtl, tmpmode);
3ffeb8f1 3172
af55da56
JW
3173 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3174 result_reg_mode = word_mode;
3175 else
3176 result_reg_mode = tmpmode;
3177 result_reg = gen_reg_rtx (result_reg_mode);
3178
3ffeb8f1 3179 emit_queue ();
3ffeb8f1 3180 for (i = 0; i < n_regs; i++)
af55da56 3181 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3182 result_pseudos[i]);
4c485b63 3183
af55da56
JW
3184 if (tmpmode != result_reg_mode)
3185 result_reg = gen_lowpart (tmpmode, result_reg);
3186
4c485b63
JL
3187 expand_value_return (result_reg);
3188 }
7cc8342c
RH
3189 else if (retval_rhs != 0
3190 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3191 && (GET_CODE (result_rtl) == REG
3192 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3193 {
14a774a9
RK
3194 /* Calculate the return value into a temporary (usually a pseudo
3195 reg). */
1da68f56
RK
3196 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3197 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3198
3199 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
3200 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3201 val = force_not_mem (val);
28d81abb 3202 emit_queue ();
28d81abb
RK
3203 /* Return the calculated value, doing cleanups first. */
3204 expand_value_return (val);
3205 }
3206 else
3207 {
3208 /* No cleanups or no hard reg used;
3209 calculate value into hard return reg. */
cba389cd 3210 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3211 emit_queue ();
14a774a9 3212 expand_value_return (result_rtl);
28d81abb
RK
3213 }
3214}
28d81abb 3215\f
b06775f9
RH
3216/* Attempt to optimize a potential tail recursion call into a goto.
3217 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
4381f7c2
KH
3218 where to place the jump to the tail recursion label.
3219
b06775f9 3220 Return TRUE if the call was optimized into a goto. */
642cac7b 3221
564ea051 3222int
b06775f9
RH
3223optimize_tail_recursion (arguments, last_insn)
3224 tree arguments;
642cac7b
JL
3225 rtx last_insn;
3226{
b06775f9
RH
3227 /* Finish checking validity, and if valid emit code to set the
3228 argument variables for the new call. */
3229 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
642cac7b
JL
3230 {
3231 if (tail_recursion_label == 0)
3232 {
3233 tail_recursion_label = gen_label_rtx ();
3234 emit_label_after (tail_recursion_label,
3235 tail_recursion_reentry);
3236 }
3237 emit_queue ();
3238 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3239 emit_barrier ();
564ea051 3240 return 1;
642cac7b 3241 }
564ea051 3242 return 0;
642cac7b
JL
3243}
3244
28d81abb
RK
3245/* Emit code to alter this function's formal parms for a tail-recursive call.
3246 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3247 FORMALS is the chain of decls of formals.
3248 Return 1 if this can be done;
3249 otherwise return 0 and do not emit any code. */
3250
3251static int
3252tail_recursion_args (actuals, formals)
3253 tree actuals, formals;
3254{
b3694847
SS
3255 tree a = actuals, f = formals;
3256 int i;
3257 rtx *argvec;
28d81abb
RK
3258
3259 /* Check that number and types of actuals are compatible
3260 with the formals. This is not always true in valid C code.
3261 Also check that no formal needs to be addressable
3262 and that all formals are scalars. */
3263
3264 /* Also count the args. */
3265
3266 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3267 {
5c7fe359
RK
3268 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3269 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
3270 return 0;
3271 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3272 return 0;
3273 }
3274 if (a != 0 || f != 0)
3275 return 0;
3276
3277 /* Compute all the actuals. */
3278
3279 argvec = (rtx *) alloca (i * sizeof (rtx));
3280
3281 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3282 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3283
3284 /* Find which actual values refer to current values of previous formals.
3285 Copy each of them now, before any formal is changed. */
3286
3287 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3288 {
3289 int copy = 0;
b3694847 3290 int j;
28d81abb
RK
3291 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3292 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
4381f7c2
KH
3293 {
3294 copy = 1;
3295 break;
3296 }
28d81abb
RK
3297 if (copy)
3298 argvec[i] = copy_to_reg (argvec[i]);
3299 }
3300
3301 /* Store the values of the actuals into the formals. */
3302
3303 for (f = formals, a = actuals, i = 0; f;
3304 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3305 {
98f3b471 3306 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3307 emit_move_insn (DECL_RTL (f), argvec[i]);
3308 else
303b90b0
FS
3309 {
3310 rtx tmp = argvec[i];
3311
3312 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3313 {
3314 tmp = gen_reg_rtx (DECL_MODE (f));
3315 convert_move (tmp, argvec[i],
3316 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3317 }
3318 convert_move (DECL_RTL (f), tmp,
3319 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3320 }
28d81abb
RK
3321 }
3322
3323 free_temp_slots ();
3324 return 1;
3325}
3326\f
3327/* Generate the RTL code for entering a binding contour.
3328 The variables are declared one by one, by calls to `expand_decl'.
3329
8e91754e
MM
3330 FLAGS is a bitwise or of the following flags:
3331
3332 1 - Nonzero if this construct should be visible to
3333 `exit_something'.
3334
3335 2 - Nonzero if this contour does not require a
3336 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3337 language-independent code should set this flag because they
3338 will not create corresponding BLOCK nodes. (There should be
3339 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3340 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 3341 when expand_end_bindings is called.
a97901e6
MM
3342
3343 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3344 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3345 note. */
28d81abb
RK
3346
3347void
a97901e6 3348expand_start_bindings_and_block (flags, block)
8e91754e 3349 int flags;
a97901e6 3350 tree block;
28d81abb
RK
3351{
3352 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3353 rtx note;
3354 int exit_flag = ((flags & 1) != 0);
3355 int block_flag = ((flags & 2) == 0);
4381f7c2 3356
a97901e6
MM
3357 /* If a BLOCK is supplied, then the caller should be requesting a
3358 NOTE_INSN_BLOCK_BEG note. */
3359 if (!block_flag && block)
3360 abort ();
8e91754e 3361
a97901e6
MM
3362 /* Create a note to mark the beginning of the block. */
3363 if (block_flag)
3364 {
6496a589 3365 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
a97901e6
MM
3366 NOTE_BLOCK (note) = block;
3367 }
3368 else
6496a589 3369 note = emit_note (NULL, NOTE_INSN_DELETED);
4381f7c2 3370
28d81abb
RK
3371 /* Make an entry on block_stack for the block we are entering. */
3372
e2500fed 3373 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
3374 thisblock->next = block_stack;
3375 thisblock->all = nesting_stack;
3376 thisblock->depth = ++nesting_depth;
3377 thisblock->data.block.stack_level = 0;
3378 thisblock->data.block.cleanups = 0;
3f1d071b 3379 thisblock->data.block.n_function_calls = 0;
e976b8b2 3380 thisblock->data.block.exception_region = 0;
3f1d071b 3381 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3382
3383 thisblock->data.block.conditional_code = 0;
3384 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3385 /* When we insert instructions after the last unconditional cleanup,
3386 we don't adjust last_insn. That means that a later add_insn will
3387 clobber the instructions we've just added. The easiest way to
3388 fix this is to just insert another instruction here, so that the
3389 instructions inserted after the last unconditional cleanup are
3390 never the last instruction. */
6496a589 3391 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 3392
28d81abb
RK
3393 if (block_stack
3394 && !(block_stack->data.block.cleanups == NULL_TREE
3395 && block_stack->data.block.outer_cleanups == NULL_TREE))
3396 thisblock->data.block.outer_cleanups
3397 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3398 block_stack->data.block.outer_cleanups);
3399 else
3400 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3401 thisblock->data.block.label_chain = 0;
3402 thisblock->data.block.innermost_stack_block = stack_block_stack;
3403 thisblock->data.block.first_insn = note;
3f1d071b 3404 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3405 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3406 block_stack = thisblock;
3407 nesting_stack = thisblock;
3408
b93a436e
JL
3409 /* Make a new level for allocating stack slots. */
3410 push_temp_slots ();
28d81abb
RK
3411}
3412
e976b8b2
MS
3413/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3414 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3415 expand_expr are made. After we end the region, we know that all
3416 space for all temporaries that were created by TARGET_EXPRs will be
3417 destroyed and their space freed for reuse. */
3418
3419void
3420expand_start_target_temps ()
3421{
3422 /* This is so that even if the result is preserved, the space
3423 allocated will be freed, as we know that it is no longer in use. */
3424 push_temp_slots ();
3425
3426 /* Start a new binding layer that will keep track of all cleanup
3427 actions to be performed. */
8e91754e 3428 expand_start_bindings (2);
e976b8b2
MS
3429
3430 target_temp_slot_level = temp_slot_level;
3431}
3432
3433void
3434expand_end_target_temps ()
3435{
3436 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 3437
e976b8b2
MS
3438 /* This is so that even if the result is preserved, the space
3439 allocated will be freed, as we know that it is no longer in use. */
3440 pop_temp_slots ();
3441}
3442
0e9e1e0a 3443/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
deb5e280
JM
3444 in question represents the outermost pair of curly braces (i.e. the "body
3445 block") of a function or method.
3446
3447 For any BLOCK node representing a "body block" of a function or method, the
3448 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3449 represents the outermost (function) scope for the function or method (i.e.
3450 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 3451 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
3452
3453int
3454is_body_block (stmt)
b3694847 3455 tree stmt;
deb5e280
JM
3456{
3457 if (TREE_CODE (stmt) == BLOCK)
3458 {
3459 tree parent = BLOCK_SUPERCONTEXT (stmt);
3460
3461 if (parent && TREE_CODE (parent) == BLOCK)
3462 {
3463 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3464
3465 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3466 return 1;
3467 }
3468 }
3469
3470 return 0;
3471}
3472
e976b8b2
MS
3473/* True if we are currently emitting insns in an area of output code
3474 that is controlled by a conditional expression. This is used by
3475 the cleanup handling code to generate conditional cleanup actions. */
3476
3477int
3478conditional_context ()
3479{
3480 return block_stack && block_stack->data.block.conditional_code;
3481}
3482
91088ddb
JM
3483/* Return an opaque pointer to the current nesting level, so frontend code
3484 can check its own sanity. */
3485
3486struct nesting *
3487current_nesting_level ()
3488{
3489 return cfun ? block_stack : 0;
3490}
3491
ba716ac9
BS
3492/* Emit a handler label for a nonlocal goto handler.
3493 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3494
e881bb1b 3495static rtx
ba716ac9
BS
3496expand_nl_handler_label (slot, before_insn)
3497 rtx slot, before_insn;
3498{
3499 rtx insns;
3500 rtx handler_label = gen_label_rtx ();
3501
0045d504 3502 /* Don't let cleanup_cfg delete the handler. */
ba716ac9
BS
3503 LABEL_PRESERVE_P (handler_label) = 1;
3504
3505 start_sequence ();
3506 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3507 insns = get_insns ();
3508 end_sequence ();
2f937369 3509 emit_insn_before (insns, before_insn);
ba716ac9
BS
3510
3511 emit_label (handler_label);
e881bb1b
RH
3512
3513 return handler_label;
ba716ac9
BS
3514}
3515
3516/* Emit code to restore vital registers at the beginning of a nonlocal goto
3517 handler. */
3518static void
3519expand_nl_goto_receiver ()
3520{
3521#ifdef HAVE_nonlocal_goto
3522 if (! HAVE_nonlocal_goto)
3523#endif
3524 /* First adjust our frame pointer to its actual value. It was
3525 previously set to the start of the virtual area corresponding to
3526 the stacked variables when we branched here and now needs to be
3527 adjusted to the actual hardware fp value.
3528
3529 Assignments are to virtual registers are converted by
3530 instantiate_virtual_regs into the corresponding assignment
3531 to the underlying register (fp in this case) that makes
3532 the original assignment true.
3533 So the following insn will actually be
3534 decrementing fp by STARTING_FRAME_OFFSET. */
3535 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3536
3537#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3538 if (fixed_regs[ARG_POINTER_REGNUM])
3539 {
3540#ifdef ELIMINABLE_REGS
3541 /* If the argument pointer can be eliminated in favor of the
3542 frame pointer, we don't need to restore it. We assume here
3543 that if such an elimination is present, it can always be used.
3544 This is the case on all known machines; if we don't make this
3545 assumption, we do unnecessary saving on many machines. */
8b60264b 3546 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
3547 size_t i;
3548
b6a1cbae 3549 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
3550 if (elim_regs[i].from == ARG_POINTER_REGNUM
3551 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3552 break;
3553
b6a1cbae 3554 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
3555#endif
3556 {
3557 /* Now restore our arg pointer from the address at which it
278ed218 3558 was saved in our stack frame. */
ba716ac9 3559 emit_move_insn (virtual_incoming_args_rtx,
278ed218 3560 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
3561 }
3562 }
3563#endif
3564
3565#ifdef HAVE_nonlocal_goto_receiver
3566 if (HAVE_nonlocal_goto_receiver)
3567 emit_insn (gen_nonlocal_goto_receiver ());
3568#endif
3569}
3570
3571/* Make handlers for nonlocal gotos taking place in the function calls in
3572 block THISBLOCK. */
3573
3574static void
3575expand_nl_goto_receivers (thisblock)
3576 struct nesting *thisblock;
3577{
3578 tree link;
3579 rtx afterward = gen_label_rtx ();
3580 rtx insns, slot;
e881bb1b 3581 rtx label_list;
ba716ac9
BS
3582 int any_invalid;
3583
3584 /* Record the handler address in the stack slot for that purpose,
3585 during this block, saving and restoring the outer value. */
3586 if (thisblock->next != 0)
3587 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3588 {
3589 rtx save_receiver = gen_reg_rtx (Pmode);
3590 emit_move_insn (XEXP (slot, 0), save_receiver);
3591
3592 start_sequence ();
3593 emit_move_insn (save_receiver, XEXP (slot, 0));
3594 insns = get_insns ();
3595 end_sequence ();
2f937369 3596 emit_insn_before (insns, thisblock->data.block.first_insn);
ba716ac9
BS
3597 }
3598
3599 /* Jump around the handlers; they run only when specially invoked. */
3600 emit_jump (afterward);
3601
3602 /* Make a separate handler for each label. */
3603 link = nonlocal_labels;
3604 slot = nonlocal_goto_handler_slots;
e881bb1b 3605 label_list = NULL_RTX;
ba716ac9
BS
3606 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3607 /* Skip any labels we shouldn't be able to jump to from here,
3608 we generate one special handler for all of them below which just calls
3609 abort. */
3610 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3611 {
e881bb1b
RH
3612 rtx lab;
3613 lab = expand_nl_handler_label (XEXP (slot, 0),
3614 thisblock->data.block.first_insn);
3615 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3616
ba716ac9
BS
3617 expand_nl_goto_receiver ();
3618
3619 /* Jump to the "real" nonlocal label. */
3620 expand_goto (TREE_VALUE (link));
3621 }
3622
3623 /* A second pass over all nonlocal labels; this time we handle those
3624 we should not be able to jump to at this point. */
3625 link = nonlocal_labels;
3626 slot = nonlocal_goto_handler_slots;
3627 any_invalid = 0;
3628 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3629 if (DECL_TOO_LATE (TREE_VALUE (link)))
3630 {
e881bb1b
RH
3631 rtx lab;
3632 lab = expand_nl_handler_label (XEXP (slot, 0),
3633 thisblock->data.block.first_insn);
3634 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
ba716ac9
BS
3635 any_invalid = 1;
3636 }
3637
3638 if (any_invalid)
3639 {
3640 expand_nl_goto_receiver ();
9602f5a0 3641 expand_builtin_trap ();
ba716ac9
BS
3642 }
3643
e881bb1b 3644 nonlocal_goto_handler_labels = label_list;
ba716ac9
BS
3645 emit_label (afterward);
3646}
3647
ba716677
MM
3648/* Warn about any unused VARS (which may contain nodes other than
3649 VAR_DECLs, but such nodes are ignored). The nodes are connected
3650 via the TREE_CHAIN field. */
3651
3652void
3653warn_about_unused_variables (vars)
3654 tree vars;
3655{
3656 tree decl;
3657
078721e1 3658 if (warn_unused_variable)
ba716677 3659 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 3660 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
3661 && ! TREE_USED (decl)
3662 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 3663 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ba716677
MM
3664 warning_with_decl (decl, "unused variable `%s'");
3665}
3666
28d81abb 3667/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3668
3669 VARS is the chain of VAR_DECL nodes for the variables bound in this
3670 contour. There may actually be other nodes in this chain, but any
3671 nodes other than VAR_DECLS are ignored.
3672
28d81abb
RK
3673 MARK_ENDS is nonzero if we should put a note at the beginning
3674 and end of this binding contour.
3675
3676 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3677 (That is true automatically if the contour has a saved stack level.) */
3678
3679void
3680expand_end_bindings (vars, mark_ends, dont_jump_in)
3681 tree vars;
3682 int mark_ends;
3683 int dont_jump_in;
3684{
b3694847 3685 struct nesting *thisblock = block_stack;
e976b8b2 3686
ba716677
MM
3687 /* If any of the variables in this scope were not used, warn the
3688 user. */
3689 warn_about_unused_variables (vars);
28d81abb 3690
28d81abb
RK
3691 if (thisblock->exit_label)
3692 {
3693 do_pending_stack_adjust ();
3694 emit_label (thisblock->exit_label);
3695 }
3696
ba716ac9 3697 /* If necessary, make handlers for nonlocal gotos taking
28d81abb 3698 place in the function calls in this block. */
3f1d071b 3699 if (function_call_count != thisblock->data.block.n_function_calls
28d81abb
RK
3700 && nonlocal_labels
3701 /* Make handler for outermost block
3702 if there were any nonlocal gotos to this function. */
3703 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3704 /* Make handler for inner block if it has something
3705 special to do when you jump out of it. */
3706 : (thisblock->data.block.cleanups != 0
3707 || thisblock->data.block.stack_level != 0)))
ba716ac9 3708 expand_nl_goto_receivers (thisblock);
28d81abb 3709
72eb1038
BH
3710 /* Don't allow jumping into a block that has a stack level.
3711 Cleanups are allowed, though. */
28d81abb 3712 if (dont_jump_in
72eb1038 3713 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3714 {
3715 struct label_chain *chain;
3716
3717 /* Any labels in this block are no longer valid to go to.
3718 Mark them to cause an error message. */
3719 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3720 {
3721 DECL_TOO_LATE (chain->label) = 1;
3722 /* If any goto without a fixup came to this label,
3723 that must be an error, because gotos without fixups
72eb1038 3724 come from outside all saved stack-levels. */
28d81abb
RK
3725 if (TREE_ADDRESSABLE (chain->label))
3726 error_with_decl (chain->label,
3727 "label `%s' used before containing binding contour");
3728 }
3729 }
3730
3731 /* Restore stack level in effect before the block
3732 (only if variable-size objects allocated). */
3733 /* Perform any cleanups associated with the block. */
3734
3735 if (thisblock->data.block.stack_level != 0
3736 || thisblock->data.block.cleanups != 0)
3737 {
04da69d3
JM
3738 int reachable;
3739 rtx insn;
28d81abb 3740
50d1b7a1
MS
3741 /* Don't let cleanups affect ({...}) constructs. */
3742 int old_expr_stmts_for_value = expr_stmts_for_value;
3743 rtx old_last_expr_value = last_expr_value;
3744 tree old_last_expr_type = last_expr_type;
3745 expr_stmts_for_value = 0;
28d81abb 3746
04da69d3
JM
3747 /* Only clean up here if this point can actually be reached. */
3748 insn = get_last_insn ();
3749 if (GET_CODE (insn) == NOTE)
3750 insn = prev_nonnote_insn (insn);
d1ee23e5 3751 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 3752
50d1b7a1
MS
3753 /* Do the cleanups. */
3754 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3755 if (reachable)
3756 do_pending_stack_adjust ();
28d81abb 3757
50d1b7a1
MS
3758 expr_stmts_for_value = old_expr_stmts_for_value;
3759 last_expr_value = old_last_expr_value;
3760 last_expr_type = old_last_expr_type;
3761
3762 /* Restore the stack level. */
3763
3764 if (reachable && thisblock->data.block.stack_level != 0)
3765 {
3766 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3767 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3768 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3769 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3770 NULL_RTX);
28d81abb
RK
3771 }
3772
3773 /* Any gotos out of this block must also do these things.
59257ff7
RK
3774 Also report any gotos with fixups that came to labels in this
3775 level. */
28d81abb
RK
3776 fixup_gotos (thisblock,
3777 thisblock->data.block.stack_level,
3778 thisblock->data.block.cleanups,
3779 thisblock->data.block.first_insn,
3780 dont_jump_in);
3781 }
3782
c7d2d61d
RS
3783 /* Mark the beginning and end of the scope if requested.
3784 We do this now, after running cleanups on the variables
3785 just going out of scope, so they are in scope for their cleanups. */
3786
3787 if (mark_ends)
a97901e6 3788 {
6496a589 3789 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
a97901e6
MM
3790 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3791 }
c7d2d61d
RS
3792 else
3793 /* Get rid of the beginning-mark if we don't make an end-mark. */
3794 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3795
e976b8b2 3796 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3797 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3798
28d81abb
RK
3799 /* Restore block_stack level for containing block. */
3800
3801 stack_block_stack = thisblock->data.block.innermost_stack_block;
3802 POPSTACK (block_stack);
3803
3804 /* Pop the stack slot nesting and free any slots at this level. */
3805 pop_temp_slots ();
3806}
3807\f
7393c642
RK
3808/* Generate code to save the stack pointer at the start of the current block
3809 and set up to restore it on exit. */
3810
3811void
3812save_stack_pointer ()
3813{
3814 struct nesting *thisblock = block_stack;
3815
3816 if (thisblock->data.block.stack_level == 0)
3817 {
3818 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3819 &thisblock->data.block.stack_level,
3820 thisblock->data.block.first_insn);
3821 stack_block_stack = thisblock;
3822 }
3823}
3824\f
28d81abb 3825/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3826 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3827
3828void
3829expand_decl (decl)
b3694847 3830 tree decl;
28d81abb 3831{
ca695ac9
JB
3832 tree type;
3833
ca695ac9 3834 type = TREE_TYPE (decl);
28d81abb 3835
eabb9ed0
RK
3836 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3837 type in case this node is used in a reference. */
3838 if (TREE_CODE (decl) == CONST_DECL)
3839 {
3840 DECL_MODE (decl) = TYPE_MODE (type);
3841 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3842 DECL_SIZE (decl) = TYPE_SIZE (type);
3843 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3844 return;
3845 }
28d81abb 3846
eabb9ed0
RK
3847 /* Otherwise, only automatic variables need any expansion done. Static and
3848 external variables, and external functions, will be handled by
3849 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3850 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
3851 if (TREE_CODE (decl) != VAR_DECL)
3852 return;
eabb9ed0 3853
44fe2e80 3854 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3855 return;
3856
3857 /* Create the RTL representation for the variable. */
3858
3859 if (type == error_mark_node)
19e7881c 3860 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 3861
28d81abb
RK
3862 else if (DECL_SIZE (decl) == 0)
3863 /* Variable with incomplete type. */
3864 {
abde42f7 3865 rtx x;
28d81abb
RK
3866 if (DECL_INITIAL (decl) == 0)
3867 /* Error message was already done; now avoid a crash. */
abde42f7 3868 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3869 else
3870 /* An initializer is going to decide the size of this array.
3871 Until we know the size, represent its address with a reg. */
abde42f7 3872 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 3873
abde42f7
JH
3874 set_mem_attributes (x, decl, 1);
3875 SET_DECL_RTL (decl, x);
28d81abb
RK
3876 }
3877 else if (DECL_MODE (decl) != BLKmode
3878 /* If -ffloat-store, don't put explicit float vars
3879 into regs. */
3880 && !(flag_float_store
3881 && TREE_CODE (type) == REAL_TYPE)
3882 && ! TREE_THIS_VOLATILE (decl)
37a08a29 3883 && (DECL_REGISTER (decl) || optimize))
28d81abb
RK
3884 {
3885 /* Automatic variable that can go in a register. */
98f3b471 3886 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3887 enum machine_mode reg_mode
3888 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3889
19e7881c 3890 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8
RK
3891
3892 if (GET_CODE (DECL_RTL (decl)) == REG)
3893 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3894 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3895 {
3896 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3897 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3898 }
3899
7f070d5e
RK
3900 mark_user_reg (DECL_RTL (decl));
3901
e5e809f4 3902 if (POINTER_TYPE_P (type))
7f070d5e 3903 mark_reg_pointer (DECL_RTL (decl),
bdb429a5 3904 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
258a120b
JM
3905
3906 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3907
3908 /* If something wants our address, try to use ADDRESSOF. */
3909 if (TREE_ADDRESSABLE (decl))
3910 put_var_into_stack (decl);
28d81abb 3911 }
0df15c2c 3912
4559fd9e 3913 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3914 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3915 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3916 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3917 {
3918 /* Variable of fixed size that goes on the stack. */
3919 rtx oldaddr = 0;
3920 rtx addr;
0d4903b8 3921 rtx x;
28d81abb
RK
3922
3923 /* If we previously made RTL for this decl, it must be an array
3924 whose size was determined by the initializer.
3925 The old address was a register; set that register now
3926 to the proper address. */
19e7881c 3927 if (DECL_RTL_SET_P (decl))
28d81abb
RK
3928 {
3929 if (GET_CODE (DECL_RTL (decl)) != MEM
3930 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3931 abort ();
3932 oldaddr = XEXP (DECL_RTL (decl), 0);
3933 }
3934
28d81abb
RK
3935 /* Set alignment we actually gave this decl. */
3936 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3937 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3938 DECL_USER_ALIGN (decl) = 0;
28d81abb 3939
9432c136 3940 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
3941 set_mem_attributes (x, decl, 1);
3942 SET_DECL_RTL (decl, x);
3943
28d81abb
RK
3944 if (oldaddr)
3945 {
3946 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3947 if (addr != oldaddr)
3948 emit_move_insn (oldaddr, addr);
3949 }
28d81abb
RK
3950 }
3951 else
3952 /* Dynamic-size object: must push space on the stack. */
3953 {
abde42f7 3954 rtx address, size, x;
28d81abb
RK
3955
3956 /* Record the stack pointer on entry to block, if have
3957 not already done so. */
7393c642
RK
3958 do_pending_stack_adjust ();
3959 save_stack_pointer ();
28d81abb 3960
4d9af632
JM
3961 /* In function-at-a-time mode, variable_size doesn't expand this,
3962 so do it now. */
3963 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3964 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3965 const0_rtx, VOIDmode, 0);
3966
28d81abb 3967 /* Compute the variable's size, in bytes. */
4559fd9e 3968 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3969 free_temp_slots ();
3970
ff91ad08 3971 /* Allocate space on the stack for the variable. Note that
4381f7c2 3972 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3973 cannot use it to conclude anything about the alignment of
3974 the size. */
37366632 3975 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3976 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3977
28d81abb 3978 /* Reference the variable indirect through that rtx. */
abde42f7
JH
3979 x = gen_rtx_MEM (DECL_MODE (decl), address);
3980 set_mem_attributes (x, decl, 1);
3981 SET_DECL_RTL (decl, x);
28d81abb 3982
2207e295 3983
28d81abb
RK
3984 /* Indicate the alignment we actually gave this variable. */
3985#ifdef STACK_BOUNDARY
3986 DECL_ALIGN (decl) = STACK_BOUNDARY;
3987#else
3988 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3989#endif
11cf4d18 3990 DECL_USER_ALIGN (decl) = 0;
28d81abb 3991 }
28d81abb
RK
3992}
3993\f
3994/* Emit code to perform the initialization of a declaration DECL. */
3995
3996void
3997expand_decl_init (decl)
3998 tree decl;
3999{
b4ac57ab
RS
4000 int was_used = TREE_USED (decl);
4001
ac79cd5a
RK
4002 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4003 for static decls. */
4004 if (TREE_CODE (decl) == CONST_DECL
4005 || TREE_STATIC (decl))
28d81abb
RK
4006 return;
4007
4008 /* Compute and store the initial value now. */
4009
4010 if (DECL_INITIAL (decl) == error_mark_node)
4011 {
4012 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 4013
28d81abb 4014 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 4015 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
4016 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4017 0, 0);
4018 emit_queue ();
4019 }
4020 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4021 {
4022 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4023 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4024 emit_queue ();
4025 }
4026
b4ac57ab
RS
4027 /* Don't let the initialization count as "using" the variable. */
4028 TREE_USED (decl) = was_used;
4029
28d81abb 4030 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 4031 preserve_temp_slots (NULL_RTX);
28d81abb
RK
4032 free_temp_slots ();
4033}
4034
4035/* CLEANUP is an expression to be executed at exit from this binding contour;
4036 for example, in C++, it might call the destructor for this variable.
4037
4847c938
MS
4038 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4039 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
4040 happens in exception handling, for gotos, returns, breaks that
4041 leave the current scope.
28d81abb
RK
4042
4043 If CLEANUP is nonzero and DECL is zero, we record a cleanup
6d2f8887 4044 that is not associated with any particular variable. */
28d81abb
RK
4045
4046int
4047expand_decl_cleanup (decl, cleanup)
4048 tree decl, cleanup;
4049{
3f1d071b 4050 struct nesting *thisblock;
28d81abb
RK
4051
4052 /* Error if we are not in any block. */
01d939e8 4053 if (cfun == 0 || block_stack == 0)
28d81abb
RK
4054 return 0;
4055
3f1d071b
BS
4056 thisblock = block_stack;
4057
28d81abb
RK
4058 /* Record the cleanup if there is one. */
4059
4060 if (cleanup != 0)
4061 {
e976b8b2
MS
4062 tree t;
4063 rtx seq;
4064 tree *cleanups = &thisblock->data.block.cleanups;
4065 int cond_context = conditional_context ();
4066
4067 if (cond_context)
4068 {
4069 rtx flag = gen_reg_rtx (word_mode);
4070 rtx set_flag_0;
4071 tree cond;
4072
4073 start_sequence ();
4074 emit_move_insn (flag, const0_rtx);
4075 set_flag_0 = get_insns ();
4076 end_sequence ();
4077
4078 thisblock->data.block.last_unconditional_cleanup
2f937369 4079 = emit_insn_after (set_flag_0,
e976b8b2
MS
4080 thisblock->data.block.last_unconditional_cleanup);
4081
4082 emit_move_insn (flag, const1_rtx);
4083
b0c48229
NB
4084 cond = build_decl (VAR_DECL, NULL_TREE,
4085 (*lang_hooks.types.type_for_mode) (word_mode, 1));
19e7881c 4086 SET_DECL_RTL (cond, flag);
e976b8b2
MS
4087
4088 /* Conditionalize the cleanup. */
4089 cleanup = build (COND_EXPR, void_type_node,
78ef5b89 4090 (*lang_hooks.truthvalue_conversion) (cond),
e976b8b2
MS
4091 cleanup, integer_zero_node);
4092 cleanup = fold (cleanup);
4093
e2500fed 4094 cleanups = &thisblock->data.block.cleanups;
e976b8b2
MS
4095 }
4096
4847c938 4097 cleanup = unsave_expr (cleanup);
e976b8b2 4098
1f8f4a0b 4099 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
4100
4101 if (! cond_context)
4102 /* If this block has a cleanup, it belongs in stack_block_stack. */
4103 stack_block_stack = thisblock;
4104
4105 if (cond_context)
4106 {
4107 start_sequence ();
4108 }
4847c938 4109
52a11cbf 4110 if (! using_eh_for_cleanups_p)
e976b8b2 4111 TREE_ADDRESSABLE (t) = 1;
52a11cbf
RH
4112 else
4113 expand_eh_region_start ();
4114
e976b8b2
MS
4115 if (cond_context)
4116 {
4117 seq = get_insns ();
4118 end_sequence ();
7e82801f
MS
4119 if (seq)
4120 thisblock->data.block.last_unconditional_cleanup
2f937369
DM
4121 = emit_insn_after (seq,
4122 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4123 }
4124 else
4125 {
4126 thisblock->data.block.last_unconditional_cleanup
4127 = get_last_insn ();
ef97beff
JJ
4128 /* When we insert instructions after the last unconditional cleanup,
4129 we don't adjust last_insn. That means that a later add_insn will
4130 clobber the instructions we've just added. The easiest way to
4131 fix this is to just insert another instruction here, so that the
4132 instructions inserted after the last unconditional cleanup are
4133 never the last instruction. */
6496a589 4134 emit_note (NULL, NOTE_INSN_DELETED);
e976b8b2 4135 }
28d81abb
RK
4136 }
4137 return 1;
4138}
659e5a7a
JM
4139
4140/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4141 is thrown. */
4142
4143int
4144expand_decl_cleanup_eh (decl, cleanup, eh_only)
4145 tree decl, cleanup;
4146 int eh_only;
4147{
4148 int ret = expand_decl_cleanup (decl, cleanup);
4149 if (cleanup && ret)
4150 {
4151 tree node = block_stack->data.block.cleanups;
4152 CLEANUP_EH_ONLY (node) = eh_only;
4153 }
4154 return ret;
4155}
28d81abb
RK
4156\f
4157/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4158 DECL_ELTS is the list of elements that belong to DECL's type.
4159 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4160
4161void
4162expand_anon_union_decl (decl, cleanup, decl_elts)
4163 tree decl, cleanup, decl_elts;
4164{
01d939e8 4165 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4166 rtx x;
8a693bd0 4167 tree t;
28d81abb 4168
8a693bd0
MM
4169 /* If any of the elements are addressable, so is the entire union. */
4170 for (t = decl_elts; t; t = TREE_CHAIN (t))
4171 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4172 {
4173 TREE_ADDRESSABLE (decl) = 1;
4174 break;
4175 }
4381f7c2 4176
ec5cd386
RK
4177 expand_decl (decl);
4178 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4179 x = DECL_RTL (decl);
4180
8a693bd0
MM
4181 /* Go through the elements, assigning RTL to each. */
4182 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4183 {
8a693bd0
MM
4184 tree decl_elt = TREE_VALUE (t);
4185 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4186 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4187
3256b817
JJ
4188 /* If any of the elements are addressable, so is the entire
4189 union. */
4190 if (TREE_USED (decl_elt))
4191 TREE_USED (decl) = 1;
4192
7b9032dd
JM
4193 /* Propagate the union's alignment to the elements. */
4194 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 4195 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
4196
4197 /* If the element has BLKmode and the union doesn't, the union is
4198 aligned such that the element doesn't need to have BLKmode, so
4199 change the element's mode to the appropriate one for its size. */
4200 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4201 DECL_MODE (decl_elt) = mode
05bccae2 4202 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 4203
28d81abb
RK
4204 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4205 instead create a new MEM rtx with the proper mode. */
4206 if (GET_CODE (x) == MEM)
4207 {
4208 if (mode == GET_MODE (x))
19e7881c 4209 SET_DECL_RTL (decl_elt, x);
28d81abb 4210 else
f1ec5147 4211 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb
RK
4212 }
4213 else if (GET_CODE (x) == REG)
4214 {
4215 if (mode == GET_MODE (x))
19e7881c 4216 SET_DECL_RTL (decl_elt, x);
28d81abb 4217 else
ddef6bc7 4218 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
4219 }
4220 else
4221 abort ();
4222
4223 /* Record the cleanup if there is one. */
4224
4225 if (cleanup != 0)
4226 thisblock->data.block.cleanups
1f8f4a0b
MM
4227 = tree_cons (decl_elt, cleanup_elt,
4228 thisblock->data.block.cleanups);
28d81abb
RK
4229 }
4230}
4231\f
4232/* Expand a list of cleanups LIST.
4233 Elements may be expressions or may be nested lists.
4234
4235 If DONT_DO is nonnull, then any list-element
4236 whose TREE_PURPOSE matches DONT_DO is omitted.
4237 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
4238 a value that is being returned out of the scope.
4239
0e9e1e0a 4240 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
50d1b7a1
MS
4241 goto and handle protection regions specially in that case.
4242
4243 If REACHABLE, we emit code, otherwise just inform the exception handling
4244 code about this finalization. */
28d81abb
RK
4245
4246static void
50d1b7a1 4247expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
4248 tree list;
4249 tree dont_do;
4e44807b 4250 int in_fixup;
50d1b7a1 4251 int reachable;
28d81abb
RK
4252{
4253 tree tail;
4254 for (tail = list; tail; tail = TREE_CHAIN (tail))
4255 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4256 {
4257 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 4258 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
4259 else
4260 {
52a11cbf
RH
4261 if (! in_fixup && using_eh_for_cleanups_p)
4262 expand_eh_region_end_cleanup (TREE_VALUE (tail));
61d6b1cc 4263
659e5a7a 4264 if (reachable && !CLEANUP_EH_ONLY (tail))
50d1b7a1
MS
4265 {
4266 /* Cleanups may be run multiple times. For example,
4267 when exiting a binding contour, we expand the
4268 cleanups associated with that contour. When a goto
4269 within that binding contour has a target outside that
4270 contour, it will expand all cleanups from its scope to
4271 the target. Though the cleanups are expanded multiple
4272 times, the control paths are non-overlapping so the
4273 cleanups will not be executed twice. */
9762d48d 4274
52a11cbf
RH
4275 /* We may need to protect from outer cleanups. */
4276 if (in_fixup && using_eh_for_cleanups_p)
4277 {
4278 expand_eh_region_start ();
e5e809f4 4279
52a11cbf
RH
4280 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4281
4282 expand_eh_region_end_fixup (TREE_VALUE (tail));
4283 }
4284 else
4285 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
e5e809f4 4286
50d1b7a1
MS
4287 free_temp_slots ();
4288 }
28d81abb
RK
4289 }
4290 }
4291}
4292
e976b8b2
MS
4293/* Mark when the context we are emitting RTL for as a conditional
4294 context, so that any cleanup actions we register with
4295 expand_decl_init will be properly conditionalized when those
4296 cleanup actions are later performed. Must be called before any
956d6950 4297 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4298
4299void
956d6950 4300start_cleanup_deferral ()
e976b8b2 4301{
e3eef942
JW
4302 /* block_stack can be NULL if we are inside the parameter list. It is
4303 OK to do nothing, because cleanups aren't possible here. */
4304 if (block_stack)
4305 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4306}
4307
4308/* Mark the end of a conditional region of code. Because cleanup
956d6950 4309 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4310 after we end the currently deferred cleanups, only after we end all
4311 deferred cleanups, are we back in unconditional code. */
4312
4313void
956d6950 4314end_cleanup_deferral ()
e976b8b2 4315{
e3eef942
JW
4316 /* block_stack can be NULL if we are inside the parameter list. It is
4317 OK to do nothing, because cleanups aren't possible here. */
4318 if (block_stack)
4319 --block_stack->data.block.conditional_code;
e976b8b2
MS
4320}
4321
28d81abb
RK
4322tree
4323last_cleanup_this_contour ()
4324{
4325 if (block_stack == 0)
4326 return 0;
4327
4328 return block_stack->data.block.cleanups;
4329}
4330
4331/* Return 1 if there are any pending cleanups at this point.
4332 If THIS_CONTOUR is nonzero, check the current contour as well.
4333 Otherwise, look only at the contours that enclose this one. */
4334
4335int
4336any_pending_cleanups (this_contour)
4337 int this_contour;
4338{
4339 struct nesting *block;
4340
01d939e8 4341 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4342 return 0;
4343
4344 if (this_contour && block_stack->data.block.cleanups != NULL)
4345 return 1;
4346 if (block_stack->data.block.cleanups == 0
e976b8b2 4347 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4348 return 0;
4349
4350 for (block = block_stack->next; block; block = block->next)
4351 if (block->data.block.cleanups != 0)
4352 return 1;
4353
4354 return 0;
4355}
4356\f
4357/* Enter a case (Pascal) or switch (C) statement.
4358 Push a block onto case_stack and nesting_stack
4359 to accumulate the case-labels that are seen
4360 and to record the labels generated for the statement.
4361
4362 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4363 Otherwise, this construct is transparent for `exit_something'.
4364
4365 EXPR is the index-expression to be dispatched on.
4366 TYPE is its nominal type. We could simply convert EXPR to this type,
4367 but instead we take short cuts. */
4368
4369void
4370expand_start_case (exit_flag, expr, type, printname)
4371 int exit_flag;
4372 tree expr;
4373 tree type;
dff01034 4374 const char *printname;
28d81abb 4375{
b3694847 4376 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4377
4378 /* Make an entry on case_stack for the case we are entering. */
4379
e2500fed 4380 thiscase->desc = CASE_NESTING;
28d81abb
RK
4381 thiscase->next = case_stack;
4382 thiscase->all = nesting_stack;
4383 thiscase->depth = ++nesting_depth;
4384 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4385 thiscase->data.case_stmt.case_list = 0;
4386 thiscase->data.case_stmt.index_expr = expr;
4387 thiscase->data.case_stmt.nominal_type = type;
4388 thiscase->data.case_stmt.default_label = 0;
28d81abb 4389 thiscase->data.case_stmt.printname = printname;
a11759a3 4390 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4391 case_stack = thiscase;
4392 nesting_stack = thiscase;
4393
4394 do_pending_stack_adjust ();
4395
4396 /* Make sure case_stmt.start points to something that won't
4397 need any transformation before expand_end_case. */
4398 if (GET_CODE (get_last_insn ()) != NOTE)
6496a589 4399 emit_note (NULL, NOTE_INSN_DELETED);
28d81abb
RK
4400
4401 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4402
956d6950 4403 start_cleanup_deferral ();
28d81abb
RK
4404}
4405
4406/* Start a "dummy case statement" within which case labels are invalid
4407 and are not connected to any larger real case statement.
4408 This can be used if you don't want to let a case statement jump
4409 into the middle of certain kinds of constructs. */
4410
4411void
4412expand_start_case_dummy ()
4413{
b3694847 4414 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4415
4416 /* Make an entry on case_stack for the dummy. */
4417
e2500fed 4418 thiscase->desc = CASE_NESTING;
28d81abb
RK
4419 thiscase->next = case_stack;
4420 thiscase->all = nesting_stack;
4421 thiscase->depth = ++nesting_depth;
4422 thiscase->exit_label = 0;
4423 thiscase->data.case_stmt.case_list = 0;
4424 thiscase->data.case_stmt.start = 0;
4425 thiscase->data.case_stmt.nominal_type = 0;
4426 thiscase->data.case_stmt.default_label = 0;
28d81abb
RK
4427 case_stack = thiscase;
4428 nesting_stack = thiscase;
956d6950 4429 start_cleanup_deferral ();
28d81abb 4430}
28d81abb 4431\f
a11759a3
JR
4432static void
4433check_seenlabel ()
4434{
4435 /* If this is the first label, warn if any insns have been emitted. */
4436 if (case_stack->data.case_stmt.line_number_status >= 0)
4437 {
4438 rtx insn;
4439
4440 restore_line_number_status
4441 (case_stack->data.case_stmt.line_number_status);
4442 case_stack->data.case_stmt.line_number_status = -1;
4443
4444 for (insn = case_stack->data.case_stmt.start;
4445 insn;
4446 insn = NEXT_INSN (insn))
4447 {
4448 if (GET_CODE (insn) == CODE_LABEL)
4449 break;
4450 if (GET_CODE (insn) != NOTE
4451 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4452 {
4453 do
4454 insn = PREV_INSN (insn);
0dacbd0e
JW
4455 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4456
4457 /* If insn is zero, then there must have been a syntax error. */
4458 if (insn)
4381f7c2
KH
4459 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4460 NOTE_LINE_NUMBER (insn),
0dacbd0e
JW
4461 "unreachable code at beginning of %s",
4462 case_stack->data.case_stmt.printname);
a11759a3
JR
4463 break;
4464 }
4465 }
4466 }
4467}
4468
28d81abb
RK
4469/* Accumulate one case or default label inside a case or switch statement.
4470 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4471 The function CONVERTER, when applied to arguments T and V,
4472 converts the value V to the type T.
28d81abb
RK
4473
4474 If not currently inside a case or switch statement, return 1 and do
4475 nothing. The caller will print a language-specific error message.
4476 If VALUE is a duplicate or overlaps, return 2 and do nothing
4477 except store the (first) duplicate node in *DUPLICATE.
4478 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4479 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4480 Return 0 on success.
4481
4482 Extended to handle range statements. */
4483
4484int
f52fba84 4485pushcase (value, converter, label, duplicate)
b3694847 4486 tree value;
cdadb1dd 4487 tree (*converter) PARAMS ((tree, tree));
b3694847 4488 tree label;
28d81abb
RK
4489 tree *duplicate;
4490{
28d81abb
RK
4491 tree index_type;
4492 tree nominal_type;
4493
4494 /* Fail if not inside a real case statement. */
4495 if (! (case_stack && case_stack->data.case_stmt.start))
4496 return 1;
4497
4498 if (stack_block_stack
4499 && stack_block_stack->depth > case_stack->depth)
4500 return 5;
4501
4502 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4503 nominal_type = case_stack->data.case_stmt.nominal_type;
4504
4505 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4506 if (index_type == error_mark_node)
4507 return 0;
4508
2f985ca6
JW
4509 /* Convert VALUE to the type in which the comparisons are nominally done. */
4510 if (value != 0)
4511 value = (*converter) (nominal_type, value);
4512
feb60352
R
4513 check_seenlabel ();
4514
28d81abb
RK
4515 /* Fail if this value is out of range for the actual type of the index
4516 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4517 if (value != 0
4518 && (TREE_CONSTANT_OVERFLOW (value)
4519 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4520 return 3;
4521
56cb9733 4522 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4523}
4524
956d6950
JL
4525/* Like pushcase but this case applies to all values between VALUE1 and
4526 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4527 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4528 starts at VALUE1 and ends at the highest value of the index type.
4529 If both are NULL, this case applies to all values.
4530
4531 The return value is the same as that of pushcase but there is one
4532 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4533
4534int
f52fba84 4535pushcase_range (value1, value2, converter, label, duplicate)
b3694847 4536 tree value1, value2;
cdadb1dd 4537 tree (*converter) PARAMS ((tree, tree));
b3694847 4538 tree label;
28d81abb
RK
4539 tree *duplicate;
4540{
28d81abb
RK
4541 tree index_type;
4542 tree nominal_type;
4543
4544 /* Fail if not inside a real case statement. */
4545 if (! (case_stack && case_stack->data.case_stmt.start))
4546 return 1;
4547
4548 if (stack_block_stack
4549 && stack_block_stack->depth > case_stack->depth)
4550 return 5;
4551
4552 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4553 nominal_type = case_stack->data.case_stmt.nominal_type;
4554
4555 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4556 if (index_type == error_mark_node)
4557 return 0;
4558
a11759a3 4559 check_seenlabel ();
28d81abb 4560
956d6950
JL
4561 /* Convert VALUEs to type in which the comparisons are nominally done
4562 and replace any unspecified value with the corresponding bound. */
4563 if (value1 == 0)
1974bfb1 4564 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4565 if (value2 == 0)
1974bfb1 4566 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4567
4568 /* Fail if the range is empty. Do this before any conversion since
4569 we want to allow out-of-range empty ranges. */
14a774a9 4570 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4571 return 4;
4572
4381f7c2 4573 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
4574 converting to. Do this after the < check above to suppress false
4575 positives. */
14a774a9 4576 if (value2 == 0)
e1ee5cdc 4577 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4578
2f985ca6
JW
4579 value1 = (*converter) (nominal_type, value1);
4580 value2 = (*converter) (nominal_type, value2);
4581
28d81abb 4582 /* Fail if these values are out of range. */
956d6950
JL
4583 if (TREE_CONSTANT_OVERFLOW (value1)
4584 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4585 return 3;
4586
956d6950
JL
4587 if (TREE_CONSTANT_OVERFLOW (value2)
4588 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4589 return 3;
4590
57641239
RK
4591 return add_case_node (value1, value2, label, duplicate);
4592}
4593
4594/* Do the actual insertion of a case label for pushcase and pushcase_range
4595 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4596 slowdown for large switch statements. */
4597
56cb9733 4598int
57641239
RK
4599add_case_node (low, high, label, duplicate)
4600 tree low, high;
4601 tree label;
4602 tree *duplicate;
4603{
4604 struct case_node *p, **q, *r;
4605
56cb9733
MM
4606 /* If there's no HIGH value, then this is not a case range; it's
4607 just a simple case label. But that's just a degenerate case
4608 range. */
4609 if (!high)
4610 high = low;
4611
4612 /* Handle default labels specially. */
4613 if (!high && !low)
4614 {
4615 if (case_stack->data.case_stmt.default_label != 0)
4616 {
4617 *duplicate = case_stack->data.case_stmt.default_label;
4618 return 2;
4619 }
4620 case_stack->data.case_stmt.default_label = label;
4621 expand_label (label);
4622 return 0;
4623 }
4624
57641239
RK
4625 q = &case_stack->data.case_stmt.case_list;
4626 p = *q;
4627
69d4ca36 4628 while ((r = *q))
28d81abb 4629 {
57641239
RK
4630 p = r;
4631
4632 /* Keep going past elements distinctly greater than HIGH. */
4633 if (tree_int_cst_lt (high, p->low))
4634 q = &p->left;
4635
4636 /* or distinctly less than LOW. */
4637 else if (tree_int_cst_lt (p->high, low))
4638 q = &p->right;
4639
4640 else
28d81abb 4641 {
57641239
RK
4642 /* We have an overlap; this is an error. */
4643 *duplicate = p->code_label;
28d81abb
RK
4644 return 2;
4645 }
4646 }
4647
1da68f56 4648 /* Add this label to the chain, and succeed. */
28d81abb 4649
e2500fed 4650 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
1da68f56 4651 r->low = low;
28d81abb 4652
57641239 4653 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
4654 if (tree_int_cst_equal (low, high))
4655 r->high = r->low;
4656 else
1da68f56 4657 r->high = high;
57641239
RK
4658
4659 r->code_label = label;
28d81abb
RK
4660 expand_label (label);
4661
57641239
RK
4662 *q = r;
4663 r->parent = p;
4664 r->left = 0;
4665 r->right = 0;
4666 r->balance = 0;
4667
4668 while (p)
4669 {
4670 struct case_node *s;
4671
4672 if (r == p->left)
4673 {
4674 int b;
4675
4676 if (! (b = p->balance))
4677 /* Growth propagation from left side. */
4678 p->balance = -1;
4679 else if (b < 0)
4680 {
4681 if (r->balance < 0)
4682 {
4683 /* R-Rotation */
51723711 4684 if ((p->left = s = r->right))
57641239
RK
4685 s->parent = p;
4686
4687 r->right = p;
4688 p->balance = 0;
4689 r->balance = 0;
4690 s = p->parent;
4691 p->parent = r;
4692
51723711 4693 if ((r->parent = s))
57641239
RK
4694 {
4695 if (s->left == p)
4696 s->left = r;
4697 else
4698 s->right = r;
4699 }
4700 else
4701 case_stack->data.case_stmt.case_list = r;
4702 }
4703 else
4704 /* r->balance == +1 */
4705 {
5720c7e7
RK
4706 /* LR-Rotation */
4707
57641239
RK
4708 int b2;
4709 struct case_node *t = r->right;
4710
51723711 4711 if ((p->left = s = t->right))
57641239
RK
4712 s->parent = p;
4713
4714 t->right = p;
51723711 4715 if ((r->right = s = t->left))
57641239
RK
4716 s->parent = r;
4717
4718 t->left = r;
4719 b = t->balance;
4720 b2 = b < 0;
4721 p->balance = b2;
4722 b2 = -b2 - b;
4723 r->balance = b2;
4724 t->balance = 0;
4725 s = p->parent;
4726 p->parent = t;
4727 r->parent = t;
4728
51723711 4729 if ((t->parent = s))
57641239
RK
4730 {
4731 if (s->left == p)
4732 s->left = t;
4733 else
4734 s->right = t;
4735 }
4736 else
4737 case_stack->data.case_stmt.case_list = t;
4738 }
4739 break;
4740 }
4741
4742 else
4743 {
4744 /* p->balance == +1; growth of left side balances the node. */
4745 p->balance = 0;
4746 break;
4747 }
4748 }
4749 else
4750 /* r == p->right */
4751 {
4752 int b;
4753
4754 if (! (b = p->balance))
4755 /* Growth propagation from right side. */
4756 p->balance++;
4757 else if (b > 0)
4758 {
4759 if (r->balance > 0)
4760 {
4761 /* L-Rotation */
4762
51723711 4763 if ((p->right = s = r->left))
57641239
RK
4764 s->parent = p;
4765
4766 r->left = p;
4767 p->balance = 0;
4768 r->balance = 0;
4769 s = p->parent;
4770 p->parent = r;
51723711 4771 if ((r->parent = s))
57641239
RK
4772 {
4773 if (s->left == p)
4774 s->left = r;
4775 else
4776 s->right = r;
4777 }
4778
4779 else
4780 case_stack->data.case_stmt.case_list = r;
4781 }
4782
4783 else
4784 /* r->balance == -1 */
4785 {
4786 /* RL-Rotation */
4787 int b2;
4788 struct case_node *t = r->left;
4789
51723711 4790 if ((p->right = s = t->left))
57641239
RK
4791 s->parent = p;
4792
4793 t->left = p;
4794
51723711 4795 if ((r->left = s = t->right))
57641239
RK
4796 s->parent = r;
4797
4798 t->right = r;
4799 b = t->balance;
4800 b2 = b < 0;
4801 r->balance = b2;
4802 b2 = -b2 - b;
4803 p->balance = b2;
4804 t->balance = 0;
4805 s = p->parent;
4806 p->parent = t;
4807 r->parent = t;
4808
51723711 4809 if ((t->parent = s))
57641239
RK
4810 {
4811 if (s->left == p)
4812 s->left = t;
4813 else
4814 s->right = t;
4815 }
4816
4817 else
4818 case_stack->data.case_stmt.case_list = t;
4819 }
4820 break;
4821 }
4822 else
4823 {
4824 /* p->balance == -1; growth of right side balances the node. */
4825 p->balance = 0;
4826 break;
4827 }
4828 }
4829
4830 r = p;
4831 p = p->parent;
4832 }
28d81abb
RK
4833
4834 return 0;
4835}
4836\f
94d6511c 4837/* Returns the number of possible values of TYPE.
770ae6cc
RK
4838 Returns -1 if the number is unknown, variable, or if the number does not
4839 fit in a HOST_WIDE_INT.
6f9fdf4d 4840 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
94d6511c
PB
4841 do not increase monotonically (there may be duplicates);
4842 to 1 if the values increase monotonically, but not always by 1;
4843 otherwise sets it to 0. */
4844
4845HOST_WIDE_INT
6f9fdf4d 4846all_cases_count (type, sparseness)
94d6511c 4847 tree type;
6f9fdf4d 4848 int *sparseness;
94d6511c 4849{
770ae6cc
RK
4850 tree t;
4851 HOST_WIDE_INT count, minval, lastval;
4852
6f9fdf4d 4853 *sparseness = 0;
94d6511c
PB
4854
4855 switch (TREE_CODE (type))
4856 {
94d6511c
PB
4857 case BOOLEAN_TYPE:
4858 count = 2;
4859 break;
770ae6cc 4860
94d6511c
PB
4861 case CHAR_TYPE:
4862 count = 1 << BITS_PER_UNIT;
4863 break;
770ae6cc 4864
94d6511c
PB
4865 default:
4866 case INTEGER_TYPE:
770ae6cc
RK
4867 if (TYPE_MAX_VALUE (type) != 0
4868 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4869 TYPE_MIN_VALUE (type))))
4870 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4871 convert (type, integer_zero_node))))
4872 && host_integerp (t, 1))
4873 count = tree_low_cst (t, 1);
94d6511c 4874 else
770ae6cc 4875 return -1;
94d6511c 4876 break;
770ae6cc 4877
94d6511c 4878 case ENUMERAL_TYPE:
770ae6cc
RK
4879 /* Don't waste time with enumeral types with huge values. */
4880 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4881 || TYPE_MAX_VALUE (type) == 0
4882 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4883 return -1;
4884
4885 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
94d6511c 4886 count = 0;
770ae6cc 4887
94d6511c
PB
4888 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4889 {
770ae6cc
RK
4890 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4891
6f9fdf4d
JJ
4892 if (*sparseness == 2 || thisval <= lastval)
4893 *sparseness = 2;
770ae6cc 4894 else if (thisval != minval + count)
6f9fdf4d 4895 *sparseness = 1;
770ae6cc 4896
6f9fdf4d 4897 lastval = thisval;
94d6511c
PB
4898 count++;
4899 }
94d6511c 4900 }
770ae6cc 4901
94d6511c
PB
4902 return count;
4903}
4904
94d6511c 4905#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4906 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4907 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4908#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4909 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4910 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4911
4912/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4913 with the case values we have seen, assuming the case expression
4914 has the given TYPE.
4915 SPARSENESS is as determined by all_cases_count.
4916
9faa82d8 4917 The time needed is proportional to COUNT, unless
94d6511c
PB
4918 SPARSENESS is 2, in which case quadratic time is needed. */
4919
df03cab5 4920void
94d6511c
PB
4921mark_seen_cases (type, cases_seen, count, sparseness)
4922 tree type;
4923 unsigned char *cases_seen;
770ae6cc 4924 HOST_WIDE_INT count;
94d6511c
PB
4925 int sparseness;
4926{
94d6511c 4927 tree next_node_to_try = NULL_TREE;
770ae6cc 4928 HOST_WIDE_INT next_node_offset = 0;
94d6511c 4929
b3694847 4930 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c 4931 tree val = make_node (INTEGER_CST);
770ae6cc 4932
94d6511c 4933 TREE_TYPE (val) = type;
5720c7e7 4934 if (! root)
4381f7c2
KH
4935 /* Do nothing. */
4936 ;
5720c7e7 4937 else if (sparseness == 2)
94d6511c 4938 {
5720c7e7 4939 tree t;
770ae6cc 4940 unsigned HOST_WIDE_INT xlo;
5720c7e7
RK
4941
4942 /* This less efficient loop is only needed to handle
4943 duplicate case values (multiple enum constants
4944 with the same value). */
4945 TREE_TYPE (val) = TREE_TYPE (root->low);
4381f7c2 4946 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5720c7e7 4947 t = TREE_CHAIN (t), xlo++)
94d6511c 4948 {
5720c7e7
RK
4949 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4950 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4951 n = root;
4952 do
94d6511c 4953 {
5720c7e7
RK
4954 /* Keep going past elements distinctly greater than VAL. */
4955 if (tree_int_cst_lt (val, n->low))
4956 n = n->left;
4381f7c2 4957
5720c7e7
RK
4958 /* or distinctly less than VAL. */
4959 else if (tree_int_cst_lt (n->high, val))
4960 n = n->right;
4381f7c2 4961
5720c7e7 4962 else
94d6511c 4963 {
5720c7e7
RK
4964 /* We have found a matching range. */
4965 BITARRAY_SET (cases_seen, xlo);
4966 break;
94d6511c
PB
4967 }
4968 }
5720c7e7
RK
4969 while (n);
4970 }
4971 }
4972 else
4973 {
4974 if (root->left)
4975 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
770ae6cc 4976
5720c7e7
RK
4977 for (n = root; n; n = n->right)
4978 {
4979 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4980 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4381f7c2 4981 while (! tree_int_cst_lt (n->high, val))
94d6511c 4982 {
5720c7e7
RK
4983 /* Calculate (into xlo) the "offset" of the integer (val).
4984 The element with lowest value has offset 0, the next smallest
4985 element has offset 1, etc. */
4986
770ae6cc
RK
4987 unsigned HOST_WIDE_INT xlo;
4988 HOST_WIDE_INT xhi;
5720c7e7 4989 tree t;
770ae6cc 4990
94d6511c
PB
4991 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4992 {
4993 /* The TYPE_VALUES will be in increasing order, so
4994 starting searching where we last ended. */
4995 t = next_node_to_try;
4996 xlo = next_node_offset;
4997 xhi = 0;
4998 for (;;)
4999 {
5000 if (t == NULL_TREE)
5001 {
5002 t = TYPE_VALUES (type);
5003 xlo = 0;
5004 }
5005 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5006 {
5007 next_node_to_try = TREE_CHAIN (t);
5008 next_node_offset = xlo + 1;
5009 break;
5010 }
5011 xlo++;
5012 t = TREE_CHAIN (t);
5013 if (t == next_node_to_try)
5720c7e7
RK
5014 {
5015 xlo = -1;
5016 break;
5017 }
94d6511c
PB
5018 }
5019 }
5020 else
5021 {
5022 t = TYPE_MIN_VALUE (type);
5023 if (t)
5024 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5025 &xlo, &xhi);
5026 else
5027 xlo = xhi = 0;
5028 add_double (xlo, xhi,
5029 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5030 &xlo, &xhi);
5031 }
4381f7c2 5032
770ae6cc 5033 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
94d6511c 5034 BITARRAY_SET (cases_seen, xlo);
770ae6cc 5035
5720c7e7
RK
5036 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5037 1, 0,
5038 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 5039 }
94d6511c
PB
5040 }
5041 }
5042}
5043
740b77b6
AC
5044/* Given a switch statement with an expression that is an enumeration
5045 type, warn if any of the enumeration type's literals are not
5046 covered by the case expressions of the switch. Also, warn if there
5047 are any extra switch cases that are *not* elements of the
5048 enumerated type.
28d81abb 5049
740b77b6 5050 Historical note:
28d81abb 5051
740b77b6
AC
5052 At one stage this function would: ``If all enumeration literals
5053 were covered by the case expressions, turn one of the expressions
5054 into the default expression since it should not be possible to fall
5055 through such a switch.''
5056
5057 That code has since been removed as: ``This optimization is
5058 disabled because it causes valid programs to fail. ANSI C does not
5059 guarantee that an expression with enum type will have a value that
5060 is the same as one of the enumeration literals.'' */
28d81abb
RK
5061
5062void
5063check_for_full_enumeration_handling (type)
5064 tree type;
5065{
b3694847
SS
5066 struct case_node *n;
5067 tree chain;
28d81abb 5068
0f41302f 5069 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
5070 int sparseness = 0;
5071
0f41302f 5072 /* The number of possible selector values. */
94d6511c
PB
5073 HOST_WIDE_INT size;
5074
5075 /* For each possible selector value. a one iff it has been matched
0f41302f 5076 by a case value alternative. */
94d6511c
PB
5077 unsigned char *cases_seen;
5078
0f41302f 5079 /* The allocated size of cases_seen, in chars. */
770ae6cc 5080 HOST_WIDE_INT bytes_needed;
94d6511c 5081
94d6511c
PB
5082 size = all_cases_count (type, &sparseness);
5083 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 5084
94d6511c 5085 if (size > 0 && size < 600000
c5c76735 5086 /* We deliberately use calloc here, not cmalloc, so that we can suppress
4381f7c2 5087 this optimization if we don't have enough memory rather than
c5c76735 5088 aborting, as xmalloc would do. */
f4ce9d90
KG
5089 && (cases_seen =
5090 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
28d81abb 5091 {
770ae6cc 5092 HOST_WIDE_INT i;
94d6511c 5093 tree v = TYPE_VALUES (type);
28d81abb 5094
94d6511c
PB
5095 /* The time complexity of this code is normally O(N), where
5096 N being the number of members in the enumerated type.
e0a2f705 5097 However, if type is an ENUMERAL_TYPE whose values do not
0f41302f 5098 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
5099
5100 mark_seen_cases (type, cases_seen, size, sparseness);
5101
770ae6cc 5102 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4381f7c2 5103 if (BITARRAY_TEST (cases_seen, i) == 0)
770ae6cc
RK
5104 warning ("enumeration value `%s' not handled in switch",
5105 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
94d6511c
PB
5106
5107 free (cases_seen);
28d81abb
RK
5108 }
5109
5110 /* Now we go the other way around; we warn if there are case
ac2a9454 5111 expressions that don't correspond to enumerators. This can
28d81abb 5112 occur since C and C++ don't enforce type-checking of
0f41302f 5113 assignments to enumeration variables. */
28d81abb 5114
5720c7e7
RK
5115 if (case_stack->data.case_stmt.case_list
5116 && case_stack->data.case_stmt.case_list->left)
5117 case_stack->data.case_stmt.case_list
5118 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
740b77b6
AC
5119 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5120 {
5121 for (chain = TYPE_VALUES (type);
5122 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5123 chain = TREE_CHAIN (chain))
5124 ;
786de7eb 5125
740b77b6
AC
5126 if (!chain)
5127 {
5128 if (TYPE_NAME (type) == 0)
5129 warning ("case value `%ld' not in enumerated type",
5130 (long) TREE_INT_CST_LOW (n->low));
5131 else
5132 warning ("case value `%ld' not in enumerated type `%s'",
5133 (long) TREE_INT_CST_LOW (n->low),
5134 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5135 == IDENTIFIER_NODE)
5136 ? TYPE_NAME (type)
5137 : DECL_NAME (TYPE_NAME (type))));
5138 }
5139 if (!tree_int_cst_equal (n->low, n->high))
5140 {
5141 for (chain = TYPE_VALUES (type);
5142 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5143 chain = TREE_CHAIN (chain))
5144 ;
786de7eb 5145
740b77b6
AC
5146 if (!chain)
5147 {
5148 if (TYPE_NAME (type) == 0)
5149 warning ("case value `%ld' not in enumerated type",
5150 (long) TREE_INT_CST_LOW (n->high));
5151 else
5152 warning ("case value `%ld' not in enumerated type `%s'",
5153 (long) TREE_INT_CST_LOW (n->high),
5154 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5155 == IDENTIFIER_NODE)
5156 ? TYPE_NAME (type)
5157 : DECL_NAME (TYPE_NAME (type))));
5158 }
5159 }
5160 }
28d81abb 5161}
ca695ac9 5162
28d81abb 5163\f
ad82abb8 5164
28d81abb 5165/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 5166 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
5167 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5168 type as given in the source before any compiler conversions.
28d81abb
RK
5169 Generate the code to test it and jump to the right place. */
5170
5171void
6f9fdf4d
JJ
5172expand_end_case_type (orig_index, orig_type)
5173 tree orig_index, orig_type;
28d81abb 5174{
9fb60a0d 5175 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 5176 rtx default_label = 0;
b3694847 5177 struct case_node *n;
85066503 5178 unsigned int count;
28d81abb 5179 rtx index;
ca695ac9 5180 rtx table_label;
28d81abb
RK
5181 int ncases;
5182 rtx *labelvec;
b3694847 5183 int i;
2270623a 5184 rtx before_case, end;
b3694847 5185 struct nesting *thiscase = case_stack;
1b0cb6fc 5186 tree index_expr, index_type;
ca695ac9
JB
5187 int unsignedp;
5188
03c03770
AS
5189 /* Don't crash due to previous errors. */
5190 if (thiscase == NULL)
5191 return;
5192
ca695ac9
JB
5193 table_label = gen_label_rtx ();
5194 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
5195 index_type = TREE_TYPE (index_expr);
5196 unsignedp = TREE_UNSIGNED (index_type);
6f9fdf4d
JJ
5197 if (orig_type == NULL)
5198 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
5199
5200 do_pending_stack_adjust ();
5201
09da1532 5202 /* This might get a spurious warning in the presence of a syntax error;
feb60352
R
5203 it could be fixed by moving the call to check_seenlabel after the
5204 check for error_mark_node, and copying the code of check_seenlabel that
5205 deals with case_stack->data.case_stmt.line_number_status /
5206 restore_line_number_status in front of the call to end_cleanup_deferral;
5207 However, this might miss some useful warnings in the presence of
5208 non-syntax errors. */
a11759a3
JR
5209 check_seenlabel ();
5210
28d81abb 5211 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 5212 if (index_type != error_mark_node)
28d81abb 5213 {
740b77b6
AC
5214 /* If the switch expression was an enumerated type, check that
5215 exactly all enumeration literals are covered by the cases.
173028e5
AC
5216 The check is made when -Wswitch was specified and there is no
5217 default case, or when -Wswitch-enum was specified. */
5218 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5219 || warn_switch_enum)
6f9fdf4d 5220 && TREE_CODE (orig_type) == ENUMERAL_TYPE
28d81abb 5221 && TREE_CODE (index_expr) != INTEGER_CST)
6f9fdf4d 5222 check_for_full_enumeration_handling (orig_type);
28d81abb 5223
d6961341
AC
5224 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5225 warning ("switch missing default case");
5226
28d81abb
RK
5227 /* If we don't have a default-label, create one here,
5228 after the body of the switch. */
5229 if (thiscase->data.case_stmt.default_label == 0)
5230 {
5231 thiscase->data.case_stmt.default_label
5232 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5233 expand_label (thiscase->data.case_stmt.default_label);
5234 }
5235 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5236
5237 before_case = get_last_insn ();
5238
5720c7e7
RK
5239 if (thiscase->data.case_stmt.case_list
5240 && thiscase->data.case_stmt.case_list->left)
b059139c 5241 thiscase->data.case_stmt.case_list
4381f7c2 5242 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 5243
28d81abb
RK
5244 /* Simplify the case-list before we count it. */
5245 group_case_nodes (thiscase->data.case_stmt.case_list);
5246
5247 /* Get upper and lower bounds of case values.
5248 Also convert all the case values to the index expr's data type. */
5249
5250 count = 0;
5251 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5252 {
5253 /* Check low and high label values are integers. */
5254 if (TREE_CODE (n->low) != INTEGER_CST)
5255 abort ();
5256 if (TREE_CODE (n->high) != INTEGER_CST)
5257 abort ();
5258
1b0cb6fc
RK
5259 n->low = convert (index_type, n->low);
5260 n->high = convert (index_type, n->high);
28d81abb
RK
5261
5262 /* Count the elements and track the largest and smallest
5263 of them (treating them as signed even if they are not). */
5264 if (count++ == 0)
5265 {
5266 minval = n->low;
5267 maxval = n->high;
5268 }
5269 else
5270 {
5271 if (INT_CST_LT (n->low, minval))
5272 minval = n->low;
5273 if (INT_CST_LT (maxval, n->high))
5274 maxval = n->high;
5275 }
5276 /* A range counts double, since it requires two compares. */
5277 if (! tree_int_cst_equal (n->low, n->high))
5278 count++;
5279 }
5280
5281 /* Compute span of values. */
5282 if (count != 0)
1b0cb6fc 5283 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5284
956d6950 5285 end_cleanup_deferral ();
4c581243 5286
1b0cb6fc 5287 if (count == 0)
28d81abb
RK
5288 {
5289 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5290 emit_queue ();
5291 emit_jump (default_label);
5292 }
3474db0e 5293
28d81abb
RK
5294 /* If range of values is much bigger than number of values,
5295 make a sequence of conditional branches instead of a dispatch.
5296 If the switch-index is a constant, do it this way
5297 because we can optimize it. */
4f73c5dd 5298
ad82abb8 5299 else if (count < case_values_threshold ()
05bccae2 5300 || compare_tree_int (range, 10 * count) > 0
f0c988c8
BS
5301 /* RANGE may be signed, and really large ranges will show up
5302 as negative numbers. */
5303 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
5304#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5305 || flag_pic
5306#endif
28d81abb 5307 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab
RS
5308 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5309 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5310 {
37366632 5311 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5312
5313 /* If the index is a short or char that we do not have
5314 an insn to handle comparisons directly, convert it to
5315 a full integer now, rather than letting each comparison
5316 generate the conversion. */
5317
5318 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 5319 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
5320 {
5321 enum machine_mode wider_mode;
5322 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5323 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 5324 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
5325 {
5326 index = convert_to_mode (wider_mode, index, unsignedp);
5327 break;
5328 }
5329 }
5330
5331 emit_queue ();
5332 do_pending_stack_adjust ();
5333
5334 index = protect_from_queue (index, 0);
5335 if (GET_CODE (index) == MEM)
5336 index = copy_to_reg (index);
5337 if (GET_CODE (index) == CONST_INT
5338 || TREE_CODE (index_expr) == INTEGER_CST)
5339 {
5340 /* Make a tree node with the proper constant value
5341 if we don't already have one. */
5342 if (TREE_CODE (index_expr) != INTEGER_CST)
5343 {
5344 index_expr
5345 = build_int_2 (INTVAL (index),
e9a042b6 5346 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5347 index_expr = convert (index_type, index_expr);
28d81abb
RK
5348 }
5349
5350 /* For constant index expressions we need only
4fe9b91c 5351 issue an unconditional branch to the appropriate
28d81abb
RK
5352 target code. The job of removing any unreachable
5353 code is left to the optimisation phase if the
5354 "-O" option is specified. */
1b0cb6fc
RK
5355 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5356 if (! tree_int_cst_lt (index_expr, n->low)
5357 && ! tree_int_cst_lt (n->high, index_expr))
5358 break;
5359
28d81abb
RK
5360 if (n)
5361 emit_jump (label_rtx (n->code_label));
5362 else
5363 emit_jump (default_label);
5364 }
5365 else
5366 {
5367 /* If the index expression is not constant we generate
5368 a binary decision tree to select the appropriate
5369 target code. This is done as follows:
5370
5371 The list of cases is rearranged into a binary tree,
5372 nearly optimal assuming equal probability for each case.
5373
5374 The tree is transformed into RTL, eliminating
5375 redundant test conditions at the same time.
5376
5377 If program flow could reach the end of the
5378 decision tree an unconditional jump to the
5379 default code is emitted. */
5380
5381 use_cost_table
6f9fdf4d 5382 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 5383 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 5384 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 5385 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5386 default_label, index_type);
28d81abb
RK
5387 emit_jump_if_reachable (default_label);
5388 }
5389 }
5390 else
5391 {
ad82abb8
ZW
5392 if (! try_casesi (index_type, index_expr, minval, range,
5393 table_label, default_label))
28d81abb 5394 {
ecc9dd93 5395 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 5396
786de7eb 5397 /* Index jumptables from zero for suitable values of
1ff37128 5398 minval to avoid a subtraction. */
786de7eb
KH
5399 if (! optimize_size
5400 && compare_tree_int (minval, 0) > 0
5401 && compare_tree_int (minval, 3) < 0)
5402 {
5403 minval = integer_zero_node;
5404 range = maxval;
5405 }
1ff37128 5406
ad82abb8
ZW
5407 if (! try_tablejump (index_type, index_expr, minval, range,
5408 table_label, default_label))
5409 abort ();
28d81abb 5410 }
786de7eb 5411
28d81abb
RK
5412 /* Get table of labels to jump to, in order of case index. */
5413
1ff37128 5414 ncases = tree_low_cst (range, 0) + 1;
28d81abb 5415 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
961192e1 5416 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
5417
5418 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5419 {
2d9d49e4
OH
5420 /* Compute the low and high bounds relative to the minimum
5421 value since that should fit in a HOST_WIDE_INT while the
5422 actual values may not. */
5423 HOST_WIDE_INT i_low
786de7eb
KH
5424 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5425 n->low, minval)), 1);
2d9d49e4 5426 HOST_WIDE_INT i_high
786de7eb
KH
5427 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5428 n->high, minval)), 1);
2d9d49e4
OH
5429 HOST_WIDE_INT i;
5430
5431 for (i = i_low; i <= i_high; i ++)
5432 labelvec[i]
5433 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
5434 }
5435
5436 /* Fill in the gaps with the default. */
5437 for (i = 0; i < ncases; i++)
5438 if (labelvec[i] == 0)
38a448ca 5439 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5440
5441 /* Output the table */
5442 emit_label (table_label);
5443
18543a22 5444 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5445 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5446 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5447 gen_rtvec_v (ncases, labelvec),
4381f7c2 5448 const0_rtx, const0_rtx));
28d81abb 5449 else
38a448ca
RH
5450 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5451 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5452
5453 /* If the case insn drops through the table,
5454 after the table we must jump to the default-label.
5455 Otherwise record no drop-through after the table. */
5456#ifdef CASE_DROPS_THROUGH
5457 emit_jump (default_label);
5458#else
5459 emit_barrier ();
5460#endif
5461 }
5462
2270623a
JM
5463 before_case = NEXT_INSN (before_case);
5464 end = get_last_insn ();
2b7d71b2
JJ
5465 if (squeeze_notes (&before_case, &end))
5466 abort ();
2270623a 5467 reorder_insns (before_case, end,
28d81abb
RK
5468 thiscase->data.case_stmt.start);
5469 }
4c581243 5470 else
956d6950 5471 end_cleanup_deferral ();
1b0cb6fc 5472
28d81abb
RK
5473 if (thiscase->exit_label)
5474 emit_label (thiscase->exit_label);
5475
5476 POPSTACK (case_stack);
5477
5478 free_temp_slots ();
5479}
5480
57641239
RK
5481/* Convert the tree NODE into a list linked by the right field, with the left
5482 field zeroed. RIGHT is used for recursion; it is a list to be placed
5483 rightmost in the resulting list. */
5484
5485static struct case_node *
5486case_tree2list (node, right)
5487 struct case_node *node, *right;
5488{
5489 struct case_node *left;
5490
5491 if (node->right)
5492 right = case_tree2list (node->right, right);
5493
5494 node->right = right;
51723711 5495 if ((left = node->left))
57641239
RK
5496 {
5497 node->left = 0;
5498 return case_tree2list (left, node);
5499 }
5500
5501 return node;
5502}
ca695ac9 5503
28d81abb
RK
5504/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5505
5506static void
5507do_jump_if_equal (op1, op2, label, unsignedp)
5508 rtx op1, op2, label;
5509 int unsignedp;
5510{
d43e0b7d 5511 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb
RK
5512 {
5513 if (INTVAL (op1) == INTVAL (op2))
5514 emit_jump (label);
5515 }
5516 else
d43e0b7d
RK
5517 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5518 (GET_MODE (op1) == VOIDmode
5519 ? GET_MODE (op2) : GET_MODE (op1)),
5520 unsignedp, label);
28d81abb
RK
5521}
5522\f
5523/* Not all case values are encountered equally. This function
5524 uses a heuristic to weight case labels, in cases where that
5525 looks like a reasonable thing to do.
5526
5527 Right now, all we try to guess is text, and we establish the
5528 following weights:
5529
5530 chars above space: 16
5531 digits: 16
5532 default: 12
5533 space, punct: 8
5534 tab: 4
5535 newline: 2
5536 other "\" chars: 1
5537 remaining chars: 0
5538
5539 If we find any cases in the switch that are not either -1 or in the range
5540 of valid ASCII characters, or are control characters other than those
5541 commonly used with "\", don't treat this switch scanning text.
5542
5543 Return 1 if these nodes are suitable for cost estimation, otherwise
5544 return 0. */
5545
5546static int
5547estimate_case_costs (node)
5548 case_node_ptr node;
5549{
f2d1f0ba 5550 tree min_ascii = integer_minus_one_node;
28d81abb
RK
5551 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5552 case_node_ptr n;
5553 int i;
5554
5555 /* If we haven't already made the cost table, make it now. Note that the
5556 lower bound of the table is -1, not zero. */
5557
2a2137c4 5558 if (! cost_table_initialized)
28d81abb 5559 {
2a2137c4 5560 cost_table_initialized = 1;
28d81abb
RK
5561
5562 for (i = 0; i < 128; i++)
5563 {
e9a780ec 5564 if (ISALNUM (i))
2a2137c4 5565 COST_TABLE (i) = 16;
e9a780ec 5566 else if (ISPUNCT (i))
2a2137c4 5567 COST_TABLE (i) = 8;
e9a780ec 5568 else if (ISCNTRL (i))
2a2137c4 5569 COST_TABLE (i) = -1;
28d81abb
RK
5570 }
5571
2a2137c4
RH
5572 COST_TABLE (' ') = 8;
5573 COST_TABLE ('\t') = 4;
5574 COST_TABLE ('\0') = 4;
5575 COST_TABLE ('\n') = 2;
5576 COST_TABLE ('\f') = 1;
5577 COST_TABLE ('\v') = 1;
5578 COST_TABLE ('\b') = 1;
28d81abb
RK
5579 }
5580
5581 /* See if all the case expressions look like text. It is text if the
5582 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5583 as signed arithmetic since we don't want to ever access cost_table with a
5584 value less than -1. Also check that none of the constants in a range
5585 are strange control characters. */
5586
5587 for (n = node; n; n = n->right)
5588 {
5589 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5590 return 0;
5591
05bccae2
RK
5592 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5593 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 5594 if (COST_TABLE (i) < 0)
28d81abb
RK
5595 return 0;
5596 }
5597
5598 /* All interesting values are within the range of interesting
5599 ASCII characters. */
5600 return 1;
5601}
5602
5603/* Scan an ordered list of case nodes
5604 combining those with consecutive values or ranges.
5605
5606 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5607
5608static void
5609group_case_nodes (head)
5610 case_node_ptr head;
5611{
5612 case_node_ptr node = head;
5613
5614 while (node)
5615 {
5616 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5617 rtx lb2;
28d81abb
RK
5618 case_node_ptr np = node;
5619
5620 /* Try to group the successors of NODE with NODE. */
5621 while (((np = np->right) != 0)
5622 /* Do they jump to the same place? */
ad7e369f
JL
5623 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5624 || (lb != 0 && lb2 != 0
5625 && simplejump_p (lb)
5626 && simplejump_p (lb2)
5627 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5628 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5629 /* Are their ranges consecutive? */
5630 && tree_int_cst_equal (np->low,
5631 fold (build (PLUS_EXPR,
5632 TREE_TYPE (node->high),
5633 node->high,
5634 integer_one_node)))
5635 /* An overflow is not consecutive. */
5636 && tree_int_cst_lt (node->high,
5637 fold (build (PLUS_EXPR,
5638 TREE_TYPE (node->high),
5639 node->high,
5640 integer_one_node))))
5641 {
5642 node->high = np->high;
5643 }
5644 /* NP is the first node after NODE which can't be grouped with it.
5645 Delete the nodes in between, and move on to that node. */
5646 node->right = np;
5647 node = np;
5648 }
5649}
5650
5651/* Take an ordered list of case nodes
5652 and transform them into a near optimal binary tree,
6dc42e49 5653 on the assumption that any target code selection value is as
28d81abb
RK
5654 likely as any other.
5655
5656 The transformation is performed by splitting the ordered
5657 list into two equal sections plus a pivot. The parts are
5658 then attached to the pivot as left and right branches. Each
38e01259 5659 branch is then transformed recursively. */
28d81abb
RK
5660
5661static void
5662balance_case_nodes (head, parent)
5663 case_node_ptr *head;
5664 case_node_ptr parent;
5665{
b3694847 5666 case_node_ptr np;
28d81abb
RK
5667
5668 np = *head;
5669 if (np)
5670 {
5671 int cost = 0;
5672 int i = 0;
5673 int ranges = 0;
b3694847 5674 case_node_ptr *npp;
28d81abb
RK
5675 case_node_ptr left;
5676
5677 /* Count the number of entries on branch. Also count the ranges. */
5678
5679 while (np)
5680 {
5681 if (!tree_int_cst_equal (np->low, np->high))
5682 {
5683 ranges++;
5684 if (use_cost_table)
2a2137c4 5685 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
5686 }
5687
5688 if (use_cost_table)
2a2137c4 5689 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
5690
5691 i++;
5692 np = np->right;
5693 }
5694
5695 if (i > 2)
5696 {
5697 /* Split this list if it is long enough for that to help. */
5698 npp = head;
5699 left = *npp;
5700 if (use_cost_table)
5701 {
5702 /* Find the place in the list that bisects the list's total cost,
5703 Here I gets half the total cost. */
5704 int n_moved = 0;
5705 i = (cost + 1) / 2;
5706 while (1)
5707 {
5708 /* Skip nodes while their cost does not reach that amount. */
5709 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
5710 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5711 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
5712 if (i <= 0)
5713 break;
5714 npp = &(*npp)->right;
5715 n_moved += 1;
5716 }
5717 if (n_moved == 0)
5718 {
5719 /* Leave this branch lopsided, but optimize left-hand
5720 side and fill in `parent' fields for right-hand side. */
5721 np = *head;
5722 np->parent = parent;
5723 balance_case_nodes (&np->left, np);
5724 for (; np->right; np = np->right)
5725 np->right->parent = np;
5726 return;
5727 }
5728 }
5729 /* If there are just three nodes, split at the middle one. */
5730 else if (i == 3)
5731 npp = &(*npp)->right;
5732 else
5733 {
5734 /* Find the place in the list that bisects the list's total cost,
5735 where ranges count as 2.
5736 Here I gets half the total cost. */
5737 i = (i + ranges + 1) / 2;
5738 while (1)
5739 {
5740 /* Skip nodes while their cost does not reach that amount. */
5741 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5742 i--;
5743 i--;
5744 if (i <= 0)
5745 break;
5746 npp = &(*npp)->right;
5747 }
5748 }
5749 *head = np = *npp;
5750 *npp = 0;
5751 np->parent = parent;
5752 np->left = left;
5753
5754 /* Optimize each of the two split parts. */
5755 balance_case_nodes (&np->left, np);
5756 balance_case_nodes (&np->right, np);
5757 }
5758 else
5759 {
5760 /* Else leave this branch as one level,
5761 but fill in `parent' fields. */
5762 np = *head;
5763 np->parent = parent;
5764 for (; np->right; np = np->right)
5765 np->right->parent = np;
5766 }
5767 }
5768}
5769\f
5770/* Search the parent sections of the case node tree
5771 to see if a test for the lower bound of NODE would be redundant.
5772 INDEX_TYPE is the type of the index expression.
5773
5774 The instructions to generate the case decision tree are
5775 output in the same order as nodes are processed so it is
5776 known that if a parent node checks the range of the current
5777 node minus one that the current node is bounded at its lower
5778 span. Thus the test would be redundant. */
5779
5780static int
5781node_has_low_bound (node, index_type)
5782 case_node_ptr node;
5783 tree index_type;
5784{
5785 tree low_minus_one;
5786 case_node_ptr pnode;
5787
5788 /* If the lower bound of this node is the lowest value in the index type,
5789 we need not test it. */
5790
5791 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5792 return 1;
5793
5794 /* If this node has a left branch, the value at the left must be less
5795 than that at this node, so it cannot be bounded at the bottom and
5796 we need not bother testing any further. */
5797
5798 if (node->left)
5799 return 0;
5800
5801 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5802 node->low, integer_one_node));
5803
5804 /* If the subtraction above overflowed, we can't verify anything.
5805 Otherwise, look for a parent that tests our value - 1. */
5806
5807 if (! tree_int_cst_lt (low_minus_one, node->low))
5808 return 0;
5809
5810 for (pnode = node->parent; pnode; pnode = pnode->parent)
5811 if (tree_int_cst_equal (low_minus_one, pnode->high))
5812 return 1;
5813
5814 return 0;
5815}
5816
5817/* Search the parent sections of the case node tree
5818 to see if a test for the upper bound of NODE would be redundant.
5819 INDEX_TYPE is the type of the index expression.
5820
5821 The instructions to generate the case decision tree are
5822 output in the same order as nodes are processed so it is
5823 known that if a parent node checks the range of the current
5824 node plus one that the current node is bounded at its upper
5825 span. Thus the test would be redundant. */
5826
5827static int
5828node_has_high_bound (node, index_type)
5829 case_node_ptr node;
5830 tree index_type;
5831{
5832 tree high_plus_one;
5833 case_node_ptr pnode;
5834
e1ee5cdc
RH
5835 /* If there is no upper bound, obviously no test is needed. */
5836
5837 if (TYPE_MAX_VALUE (index_type) == NULL)
5838 return 1;
5839
28d81abb
RK
5840 /* If the upper bound of this node is the highest value in the type
5841 of the index expression, we need not test against it. */
5842
5843 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5844 return 1;
5845
5846 /* If this node has a right branch, the value at the right must be greater
5847 than that at this node, so it cannot be bounded at the top and
5848 we need not bother testing any further. */
5849
5850 if (node->right)
5851 return 0;
5852
5853 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5854 node->high, integer_one_node));
5855
5856 /* If the addition above overflowed, we can't verify anything.
5857 Otherwise, look for a parent that tests our value + 1. */
5858
5859 if (! tree_int_cst_lt (node->high, high_plus_one))
5860 return 0;
5861
5862 for (pnode = node->parent; pnode; pnode = pnode->parent)
5863 if (tree_int_cst_equal (high_plus_one, pnode->low))
5864 return 1;
5865
5866 return 0;
5867}
5868
5869/* Search the parent sections of the
5870 case node tree to see if both tests for the upper and lower
5871 bounds of NODE would be redundant. */
5872
5873static int
5874node_is_bounded (node, index_type)
5875 case_node_ptr node;
5876 tree index_type;
5877{
5878 return (node_has_low_bound (node, index_type)
5879 && node_has_high_bound (node, index_type));
5880}
5881
5882/* Emit an unconditional jump to LABEL unless it would be dead code. */
5883
5884static void
5885emit_jump_if_reachable (label)
5886 rtx label;
5887{
5888 if (GET_CODE (get_last_insn ()) != BARRIER)
5889 emit_jump (label);
5890}
5891\f
5892/* Emit step-by-step code to select a case for the value of INDEX.
5893 The thus generated decision tree follows the form of the
5894 case-node binary tree NODE, whose nodes represent test conditions.
5895 INDEX_TYPE is the type of the index of the switch.
5896
5897 Care is taken to prune redundant tests from the decision tree
5898 by detecting any boundary conditions already checked by
5899 emitted rtx. (See node_has_high_bound, node_has_low_bound
5900 and node_is_bounded, above.)
5901
5902 Where the test conditions can be shown to be redundant we emit
5903 an unconditional jump to the target code. As a further
5904 optimization, the subordinates of a tree node are examined to
5905 check for bounded nodes. In this case conditional and/or
5906 unconditional jumps as a result of the boundary check for the
5907 current node are arranged to target the subordinates associated
38e01259 5908 code for out of bound conditions on the current node.
28d81abb 5909
f72aed24 5910 We can assume that when control reaches the code generated here,
28d81abb
RK
5911 the index value has already been compared with the parents
5912 of this node, and determined to be on the same side of each parent
5913 as this node is. Thus, if this node tests for the value 51,
5914 and a parent tested for 52, we don't need to consider
5915 the possibility of a value greater than 51. If another parent
5916 tests for the value 50, then this node need not test anything. */
5917
5918static void
5919emit_case_nodes (index, node, default_label, index_type)
5920 rtx index;
5921 case_node_ptr node;
5922 rtx default_label;
5923 tree index_type;
5924{
5925 /* If INDEX has an unsigned type, we must make unsigned branches. */
5926 int unsignedp = TREE_UNSIGNED (index_type);
28d81abb 5927 enum machine_mode mode = GET_MODE (index);
69107307 5928 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
5929
5930 /* See if our parents have already tested everything for us.
5931 If they have, emit an unconditional jump for this node. */
5932 if (node_is_bounded (node, index_type))
5933 emit_jump (label_rtx (node->code_label));
5934
5935 else if (tree_int_cst_equal (node->low, node->high))
5936 {
5937 /* Node is single valued. First see if the index expression matches
0f41302f 5938 this node and then check our children, if any. */
28d81abb 5939
69107307
AO
5940 do_jump_if_equal (index,
5941 convert_modes (mode, imode,
5942 expand_expr (node->low, NULL_RTX,
5943 VOIDmode, 0),
5944 unsignedp),
28d81abb
RK
5945 label_rtx (node->code_label), unsignedp);
5946
5947 if (node->right != 0 && node->left != 0)
5948 {
5949 /* This node has children on both sides.
5950 Dispatch to one side or the other
5951 by comparing the index value with this node's value.
5952 If one subtree is bounded, check that one first,
5953 so we can avoid real branches in the tree. */
5954
5955 if (node_is_bounded (node->right, index_type))
5956 {
4381f7c2 5957 emit_cmp_and_jump_insns (index,
69107307
AO
5958 convert_modes
5959 (mode, imode,
5960 expand_expr (node->high, NULL_RTX,
5961 VOIDmode, 0),
5962 unsignedp),
d43e0b7d 5963 GT, NULL_RTX, mode, unsignedp,
4381f7c2 5964 label_rtx (node->right->code_label));
28d81abb
RK
5965 emit_case_nodes (index, node->left, default_label, index_type);
5966 }
5967
5968 else if (node_is_bounded (node->left, index_type))
5969 {
4381f7c2 5970 emit_cmp_and_jump_insns (index,
69107307
AO
5971 convert_modes
5972 (mode, imode,
5973 expand_expr (node->high, NULL_RTX,
5974 VOIDmode, 0),
5975 unsignedp),
d43e0b7d 5976 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5977 label_rtx (node->left->code_label));
28d81abb
RK
5978 emit_case_nodes (index, node->right, default_label, index_type);
5979 }
5980
5981 else
5982 {
5983 /* Neither node is bounded. First distinguish the two sides;
5984 then emit the code for one side at a time. */
5985
4381f7c2 5986 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
5987
5988 /* See if the value is on the right. */
4381f7c2 5989 emit_cmp_and_jump_insns (index,
69107307
AO
5990 convert_modes
5991 (mode, imode,
5992 expand_expr (node->high, NULL_RTX,
5993 VOIDmode, 0),
5994 unsignedp),
d43e0b7d 5995 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5996 label_rtx (test_label));
28d81abb
RK
5997
5998 /* Value must be on the left.
5999 Handle the left-hand subtree. */
6000 emit_case_nodes (index, node->left, default_label, index_type);
6001 /* If left-hand subtree does nothing,
6002 go to default. */
6003 emit_jump_if_reachable (default_label);
6004
6005 /* Code branches here for the right-hand subtree. */
6006 expand_label (test_label);
6007 emit_case_nodes (index, node->right, default_label, index_type);
6008 }
6009 }
6010
6011 else if (node->right != 0 && node->left == 0)
6012 {
6013 /* Here we have a right child but no left so we issue conditional
6014 branch to default and process the right child.
6015
6016 Omit the conditional branch to default if we it avoid only one
6017 right child; it costs too much space to save so little time. */
6018
de14fd73 6019 if (node->right->right || node->right->left
28d81abb
RK
6020 || !tree_int_cst_equal (node->right->low, node->right->high))
6021 {
6022 if (!node_has_low_bound (node, index_type))
6023 {
4381f7c2 6024 emit_cmp_and_jump_insns (index,
69107307
AO
6025 convert_modes
6026 (mode, imode,
6027 expand_expr (node->high, NULL_RTX,
6028 VOIDmode, 0),
6029 unsignedp),
d43e0b7d 6030 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6031 default_label);
28d81abb
RK
6032 }
6033
6034 emit_case_nodes (index, node->right, default_label, index_type);
6035 }
6036 else
6037 /* We cannot process node->right normally
6038 since we haven't ruled out the numbers less than
6039 this node's value. So handle node->right explicitly. */
6040 do_jump_if_equal (index,
69107307
AO
6041 convert_modes
6042 (mode, imode,
6043 expand_expr (node->right->low, NULL_RTX,
6044 VOIDmode, 0),
6045 unsignedp),
28d81abb
RK
6046 label_rtx (node->right->code_label), unsignedp);
6047 }
6048
6049 else if (node->right == 0 && node->left != 0)
6050 {
6051 /* Just one subtree, on the left. */
4381f7c2 6052 if (node->left->left || node->left->right
28d81abb
RK
6053 || !tree_int_cst_equal (node->left->low, node->left->high))
6054 {
6055 if (!node_has_high_bound (node, index_type))
6056 {
69107307
AO
6057 emit_cmp_and_jump_insns (index,
6058 convert_modes
6059 (mode, imode,
6060 expand_expr (node->high, NULL_RTX,
6061 VOIDmode, 0),
6062 unsignedp),
d43e0b7d 6063 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6064 default_label);
28d81abb
RK
6065 }
6066
6067 emit_case_nodes (index, node->left, default_label, index_type);
6068 }
6069 else
6070 /* We cannot process node->left normally
6071 since we haven't ruled out the numbers less than
6072 this node's value. So handle node->left explicitly. */
6073 do_jump_if_equal (index,
69107307
AO
6074 convert_modes
6075 (mode, imode,
6076 expand_expr (node->left->low, NULL_RTX,
6077 VOIDmode, 0),
6078 unsignedp),
28d81abb
RK
6079 label_rtx (node->left->code_label), unsignedp);
6080 }
6081 }
6082 else
6083 {
6084 /* Node is a range. These cases are very similar to those for a single
6085 value, except that we do not start by testing whether this node
6086 is the one to branch to. */
6087
6088 if (node->right != 0 && node->left != 0)
6089 {
6090 /* Node has subtrees on both sides.
6091 If the right-hand subtree is bounded,
6092 test for it first, since we can go straight there.
6093 Otherwise, we need to make a branch in the control structure,
6094 then handle the two subtrees. */
6095 tree test_label = 0;
6096
28d81abb
RK
6097 if (node_is_bounded (node->right, index_type))
6098 /* Right hand node is fully bounded so we can eliminate any
6099 testing and branch directly to the target code. */
69107307
AO
6100 emit_cmp_and_jump_insns (index,
6101 convert_modes
6102 (mode, imode,
6103 expand_expr (node->high, NULL_RTX,
6104 VOIDmode, 0),
6105 unsignedp),
d43e0b7d 6106 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6107 label_rtx (node->right->code_label));
28d81abb
RK
6108 else
6109 {
6110 /* Right hand node requires testing.
6111 Branch to a label where we will handle it later. */
6112
6113 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 6114 emit_cmp_and_jump_insns (index,
69107307
AO
6115 convert_modes
6116 (mode, imode,
6117 expand_expr (node->high, NULL_RTX,
6118 VOIDmode, 0),
6119 unsignedp),
d43e0b7d 6120 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6121 label_rtx (test_label));
28d81abb
RK
6122 }
6123
6124 /* Value belongs to this node or to the left-hand subtree. */
6125
69107307
AO
6126 emit_cmp_and_jump_insns (index,
6127 convert_modes
6128 (mode, imode,
6129 expand_expr (node->low, NULL_RTX,
6130 VOIDmode, 0),
6131 unsignedp),
d43e0b7d 6132 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6133 label_rtx (node->code_label));
28d81abb
RK
6134
6135 /* Handle the left-hand subtree. */
6136 emit_case_nodes (index, node->left, default_label, index_type);
6137
6138 /* If right node had to be handled later, do that now. */
6139
6140 if (test_label)
6141 {
6142 /* If the left-hand subtree fell through,
6143 don't let it fall into the right-hand subtree. */
6144 emit_jump_if_reachable (default_label);
6145
6146 expand_label (test_label);
6147 emit_case_nodes (index, node->right, default_label, index_type);
6148 }
6149 }
6150
6151 else if (node->right != 0 && node->left == 0)
6152 {
6153 /* Deal with values to the left of this node,
6154 if they are possible. */
6155 if (!node_has_low_bound (node, index_type))
6156 {
4381f7c2 6157 emit_cmp_and_jump_insns (index,
69107307
AO
6158 convert_modes
6159 (mode, imode,
6160 expand_expr (node->low, NULL_RTX,
6161 VOIDmode, 0),
6162 unsignedp),
d43e0b7d 6163 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6164 default_label);
28d81abb
RK
6165 }
6166
6167 /* Value belongs to this node or to the right-hand subtree. */
6168
69107307
AO
6169 emit_cmp_and_jump_insns (index,
6170 convert_modes
6171 (mode, imode,
6172 expand_expr (node->high, NULL_RTX,
6173 VOIDmode, 0),
6174 unsignedp),
d43e0b7d 6175 LE, NULL_RTX, mode, unsignedp,
c5d5d461 6176 label_rtx (node->code_label));
28d81abb
RK
6177
6178 emit_case_nodes (index, node->right, default_label, index_type);
6179 }
6180
6181 else if (node->right == 0 && node->left != 0)
6182 {
6183 /* Deal with values to the right of this node,
6184 if they are possible. */
6185 if (!node_has_high_bound (node, index_type))
6186 {
4381f7c2 6187 emit_cmp_and_jump_insns (index,
69107307
AO
6188 convert_modes
6189 (mode, imode,
6190 expand_expr (node->high, NULL_RTX,
6191 VOIDmode, 0),
6192 unsignedp),
d43e0b7d 6193 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6194 default_label);
28d81abb
RK
6195 }
6196
6197 /* Value belongs to this node or to the left-hand subtree. */
6198
4381f7c2 6199 emit_cmp_and_jump_insns (index,
69107307
AO
6200 convert_modes
6201 (mode, imode,
6202 expand_expr (node->low, NULL_RTX,
6203 VOIDmode, 0),
6204 unsignedp),
d43e0b7d 6205 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6206 label_rtx (node->code_label));
28d81abb
RK
6207
6208 emit_case_nodes (index, node->left, default_label, index_type);
6209 }
6210
6211 else
6212 {
6213 /* Node has no children so we check low and high bounds to remove
6214 redundant tests. Only one of the bounds can exist,
6215 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
6216 int high_bound = node_has_high_bound (node, index_type);
6217 int low_bound = node_has_low_bound (node, index_type);
28d81abb 6218
923cbdc3 6219 if (!high_bound && low_bound)
28d81abb 6220 {
4381f7c2 6221 emit_cmp_and_jump_insns (index,
69107307
AO
6222 convert_modes
6223 (mode, imode,
6224 expand_expr (node->high, NULL_RTX,
6225 VOIDmode, 0),
6226 unsignedp),
d43e0b7d 6227 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6228 default_label);
28d81abb
RK
6229 }
6230
923cbdc3 6231 else if (!low_bound && high_bound)
28d81abb 6232 {
4381f7c2 6233 emit_cmp_and_jump_insns (index,
69107307
AO
6234 convert_modes
6235 (mode, imode,
6236 expand_expr (node->low, NULL_RTX,
6237 VOIDmode, 0),
6238 unsignedp),
d43e0b7d 6239 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6240 default_label);
28d81abb 6241 }
923cbdc3
JH
6242 else if (!low_bound && !high_bound)
6243 {
9312aecc 6244 /* Widen LOW and HIGH to the same width as INDEX. */
b0c48229 6245 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9312aecc
JDA
6246 tree low = build1 (CONVERT_EXPR, type, node->low);
6247 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 6248 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
6249
6250 /* Instead of doing two branches, emit one unsigned branch for
6251 (index-low) > (high-low). */
ef89d648
ZW
6252 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6253 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6254 NULL_RTX, unsignedp,
6255 OPTAB_WIDEN);
9312aecc
JDA
6256 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6257 high, low)),
6258 NULL_RTX, mode, 0);
786de7eb 6259
9312aecc 6260 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 6261 mode, 1, default_label);
923cbdc3 6262 }
28d81abb
RK
6263
6264 emit_jump (label_rtx (node->code_label));
6265 }
6266 }
6267}
e2500fed
GK
6268
6269#include "gt-stmt.h"
This page took 2.877228 seconds and 5 git commands to generate.