]> gcc.gnu.org Git - gcc.git/blame - gcc/stmt.c
Merge tree-ssa-20020619-branch into mainline.
[gcc.git] / gcc / stmt.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
a6dd4094 3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
ccd043a9 40
28d81abb
RK
41#include "rtl.h"
42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
28d81abb 44#include "flags.h"
6adb4e3a 45#include "except.h"
28d81abb 46#include "function.h"
28d81abb 47#include "insn-config.h"
28d81abb 48#include "expr.h"
e78d8e51 49#include "libfuncs.h"
28d81abb 50#include "hard-reg-set.h"
28d81abb
RK
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
43577e6b 57#include "langhooks.h"
969d70ca 58#include "predict.h"
9bb231fd 59#include "optabs.h"
61f71b34 60#include "target.h"
66fd46b6 61#include "regs.h"
28d81abb
RK
62\f
63/* Functions and data structures for expanding case statements. */
64
65/* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
5720c7e7
RK
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
28d81abb
RK
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
28d81abb
RK
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
e2500fed 87struct case_node GTY(())
28d81abb
RK
88{
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
57641239 95 int balance;
28d81abb
RK
96};
97
98typedef struct case_node case_node;
99typedef struct case_node *case_node_ptr;
100
101/* These are used by estimate_case_costs and balance_case_nodes. */
102
103/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 104static short cost_table_[129];
28d81abb 105static int use_cost_table;
2a2137c4
RH
106static int cost_table_initialized;
107
108/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
cf403648 110#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
111\f
112/* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
e2500fed 137struct nesting GTY(())
28d81abb
RK
138{
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
e2500fed
GK
143 enum nesting_desc {
144 COND_NESTING,
145 LOOP_NESTING,
146 BLOCK_NESTING,
147 CASE_NESTING
148 } desc;
149 union nesting_u
28d81abb
RK
150 {
151 /* For conds (if-then and if-then-else statements). */
e2500fed 152 struct nesting_cond
28d81abb
RK
153 {
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
157 rtx endif_label;
158 /* Label for the end of this alternative.
0f41302f 159 This may be the end of the if or the next else/elseif. */
28d81abb 160 rtx next_label;
e2500fed 161 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 162 /* For loops. */
e2500fed 163 struct nesting_loop
28d81abb
RK
164 {
165 /* Label at the top of the loop; place to loop back to. */
166 rtx start_label;
167 /* Label at the end of the whole construct. */
168 rtx end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
e2500fed 172 } GTY ((tag ("LOOP_NESTING"))) loop;
28d81abb 173 /* For variable binding contours. */
e2500fed 174 struct nesting_block
28d81abb
RK
175 {
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
b93a436e 179 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
e976b8b2 196 The tail of this list can be 0,
28d81abb
RK
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
e0a2f705 204 /* Nonzero if this is associated with an EH region. */
e976b8b2
MS
205 int exception_region;
206 /* The saved target_temp_slot_level from our outer block.
207 We may reset target_temp_slot_level to be the level of
208 this block, if that is done, target_temp_slot_level
209 reverts to the saved target_temp_slot_level at the very
210 end of the block. */
3f1d071b 211 int block_target_temp_slot_level;
e976b8b2
MS
212 /* True if we are currently emitting insns in an area of
213 output code that is controlled by a conditional
214 expression. This is used by the cleanup handling code to
215 generate conditional cleanup actions. */
216 int conditional_code;
217 /* A place to move the start of the exception region for any
218 of the conditional cleanups, must be at the end or after
219 the start of the last unconditional cleanup, and before any
220 conditional branch points. */
221 rtx last_unconditional_cleanup;
e2500fed 222 } GTY ((tag ("BLOCK_NESTING"))) block;
e8eebd31 223 /* For switch (C) or case (Pascal) statements. */
e2500fed 224 struct nesting_case
28d81abb
RK
225 {
226 /* The insn after which the case dispatch should finally
227 be emitted. Zero for a dummy. */
228 rtx start;
57641239
RK
229 /* A list of case labels; it is first built as an AVL tree.
230 During expand_end_case, this is converted to a list, and may be
231 rearranged into a nearly balanced binary tree. */
28d81abb
RK
232 struct case_node *case_list;
233 /* Label to jump to if no case matches. */
234 tree default_label;
235 /* The expression to be dispatched on. */
236 tree index_expr;
237 /* Type that INDEX_EXPR should be converted to. */
238 tree nominal_type;
28d81abb 239 /* Name of this kind of statement, for warnings. */
dff01034 240 const char *printname;
a11759a3
JR
241 /* Used to save no_line_numbers till we see the first case label.
242 We set this to -1 when we see the first case label in this
243 case statement. */
244 int line_number_status;
e2500fed
GK
245 } GTY ((tag ("CASE_NESTING"))) case_stmt;
246 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
247};
248
28d81abb
RK
249/* Allocate and return a new `struct nesting'. */
250
703ad42b 251#define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
28d81abb 252
6ed1d6c5
RS
253/* Pop the nesting stack element by element until we pop off
254 the element which is at the top of STACK.
255 Update all the other stacks, popping off elements from them
256 as we pop them from nesting_stack. */
28d81abb
RK
257
258#define POPSTACK(STACK) \
6ed1d6c5
RS
259do { struct nesting *target = STACK; \
260 struct nesting *this; \
261 do { this = nesting_stack; \
262 if (loop_stack == this) \
263 loop_stack = loop_stack->next; \
264 if (cond_stack == this) \
265 cond_stack = cond_stack->next; \
266 if (block_stack == this) \
267 block_stack = block_stack->next; \
268 if (stack_block_stack == this) \
269 stack_block_stack = stack_block_stack->next; \
270 if (case_stack == this) \
271 case_stack = case_stack->next; \
6ed1d6c5 272 nesting_depth = nesting_stack->depth - 1; \
e2500fed 273 nesting_stack = this->all; } \
6ed1d6c5 274 while (this != target); } while (0)
28d81abb
RK
275\f
276/* In some cases it is impossible to generate code for a forward goto
277 until the label definition is seen. This happens when it may be necessary
278 for the goto to reset the stack pointer: we don't yet know how to do that.
279 So expand_goto puts an entry on this fixup list.
280 Each time a binding contour that resets the stack is exited,
281 we check each fixup.
282 If the target label has now been defined, we can insert the proper code. */
283
e2500fed 284struct goto_fixup GTY(())
28d81abb
RK
285{
286 /* Points to following fixup. */
287 struct goto_fixup *next;
288 /* Points to the insn before the jump insn.
289 If more code must be inserted, it goes after this insn. */
290 rtx before_jump;
291 /* The LABEL_DECL that this jump is jumping to, or 0
292 for break, continue or return. */
293 tree target;
7629c936
RS
294 /* The BLOCK for the place where this goto was found. */
295 tree context;
28d81abb
RK
296 /* The CODE_LABEL rtx that this is jumping to. */
297 rtx target_rtl;
298 /* Number of binding contours started in current function
299 before the label reference. */
300 int block_start_count;
301 /* The outermost stack level that should be restored for this jump.
302 Each time a binding contour that resets the stack is exited,
303 if the target label is *not* yet defined, this slot is updated. */
304 rtx stack_level;
305 /* List of lists of cleanup expressions to be run by this goto.
306 There is one element for each block that this goto is within.
e976b8b2 307 The tail of this list can be 0,
28d81abb
RK
308 if all remaining elements would be empty.
309 The TREE_VALUE contains the cleanup list of that block as of the
310 time this goto was seen.
311 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
312 tree cleanup_list_list;
313};
314
28d81abb
RK
315/* Within any binding contour that must restore a stack level,
316 all labels are recorded with a chain of these structures. */
317
e2500fed 318struct label_chain GTY(())
28d81abb
RK
319{
320 /* Points to following fixup. */
321 struct label_chain *next;
322 tree label;
323};
e9a25f70 324
e2500fed 325struct stmt_status GTY(())
3f1d071b
BS
326{
327 /* Chain of all pending binding contours. */
e2500fed 328 struct nesting * x_block_stack;
3f1d071b
BS
329
330 /* If any new stacks are added here, add them to POPSTACKS too. */
331
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
e2500fed 334 struct nesting * x_stack_block_stack;
3f1d071b
BS
335
336 /* Chain of all pending conditional statements. */
e2500fed 337 struct nesting * x_cond_stack;
3f1d071b
BS
338
339 /* Chain of all pending loops. */
e2500fed 340 struct nesting * x_loop_stack;
3f1d071b
BS
341
342 /* Chain of all pending case or switch statements. */
e2500fed 343 struct nesting * x_case_stack;
3f1d071b
BS
344
345 /* Separate chain including all of the above,
346 chained through the `all' field. */
e2500fed 347 struct nesting * x_nesting_stack;
3f1d071b
BS
348
349 /* Number of entries on nesting_stack now. */
350 int x_nesting_depth;
351
352 /* Number of binding contours started so far in this function. */
353 int x_block_start_count;
354
355 /* Each time we expand an expression-statement,
356 record the expr's type and its RTL value here. */
357 tree x_last_expr_type;
358 rtx x_last_expr_value;
0fab64a3 359 rtx x_last_expr_alt_rtl;
3f1d071b
BS
360
361 /* Nonzero if within a ({...}) grouping, in which case we must
362 always compute a value for each expr-stmt in case it is the last one. */
363 int x_expr_stmts_for_value;
364
c8608cd6
GDR
365 /* Location of last line-number note, whether we actually
366 emitted it or not. */
367 location_t x_emit_locus;
3f1d071b
BS
368
369 struct goto_fixup *x_goto_fixup_chain;
370};
371
01d939e8
BS
372#define block_stack (cfun->stmt->x_block_stack)
373#define stack_block_stack (cfun->stmt->x_stack_block_stack)
374#define cond_stack (cfun->stmt->x_cond_stack)
375#define loop_stack (cfun->stmt->x_loop_stack)
376#define case_stack (cfun->stmt->x_case_stack)
377#define nesting_stack (cfun->stmt->x_nesting_stack)
378#define nesting_depth (cfun->stmt->x_nesting_depth)
379#define current_block_start_count (cfun->stmt->x_block_start_count)
380#define last_expr_type (cfun->stmt->x_last_expr_type)
381#define last_expr_value (cfun->stmt->x_last_expr_value)
0fab64a3 382#define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
01d939e8 383#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
c8608cd6 384#define emit_locus (cfun->stmt->x_emit_locus)
01d939e8 385#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70 386
272d0bee 387/* Nonzero if we are using EH to handle cleanups. */
6de9cd9a 388int using_eh_for_cleanups_p = 0;
e9a25f70 389
46c5ad27 390static int n_occurrences (int, const char *);
46c5ad27
AJ
391static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
392static void expand_goto_internal (tree, rtx, rtx);
393static int expand_fixup (tree, rtx, rtx);
46c5ad27 394static void expand_nl_goto_receiver (void);
46c5ad27
AJ
395static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
396static bool check_operand_nalternatives (tree, tree);
397static bool check_unique_operand_names (tree, tree);
398static char *resolve_operand_name_1 (char *, tree, tree);
399static void expand_null_return_1 (rtx);
400static enum br_predictor return_prediction (rtx);
c988af2b 401static rtx shift_return_value (rtx);
46c5ad27
AJ
402static void expand_value_return (rtx);
403static int tail_recursion_args (tree, tree);
404static void expand_cleanups (tree, int, int);
405static void check_seenlabel (void);
406static void do_jump_if_equal (rtx, rtx, rtx, int);
407static int estimate_case_costs (case_node_ptr);
408static bool same_case_target_p (rtx, rtx);
409static void strip_default_case_nodes (case_node_ptr *, rtx);
410static bool lshift_cheap_p (void);
411static int case_bit_test_cmp (const void *, const void *);
412static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
413static void group_case_nodes (case_node_ptr);
414static void balance_case_nodes (case_node_ptr *, case_node_ptr);
415static int node_has_low_bound (case_node_ptr, tree);
416static int node_has_high_bound (case_node_ptr, tree);
417static int node_is_bounded (case_node_ptr, tree);
418static void emit_jump_if_reachable (rtx);
419static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
420static struct case_node *case_tree2list (case_node *, case_node *);
28d81abb 421\f
e9a25f70 422void
46c5ad27 423using_eh_for_cleanups (void)
e9a25f70
JL
424{
425 using_eh_for_cleanups_p = 1;
426}
427
28d81abb 428void
46c5ad27 429init_stmt_for_function (void)
28d81abb 430{
3a70d621 431 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
28d81abb 432}
3f1d071b 433\f
3f1d071b 434/* Record the current file and line. Called from emit_line_note. */
0cea056b 435
28d81abb 436void
0cea056b 437set_file_and_line_for_stmt (location_t location)
3f1d071b 438{
61d84605
MM
439 /* If we're outputting an inline function, and we add a line note,
440 there may be no CFUN->STMT information. So, there's no need to
441 update it. */
442 if (cfun->stmt)
0cea056b 443 emit_locus = location;
28d81abb 444}
3f1d071b 445
28d81abb
RK
446/* Emit a no-op instruction. */
447
448void
46c5ad27 449emit_nop (void)
28d81abb 450{
ca695ac9
JB
451 rtx last_insn;
452
b93a436e
JL
453 last_insn = get_last_insn ();
454 if (!optimize
455 && (GET_CODE (last_insn) == CODE_LABEL
456 || (GET_CODE (last_insn) == NOTE
457 && prev_real_insn (last_insn) == 0)))
458 emit_insn (gen_nop ());
28d81abb
RK
459}
460\f
461/* Return the rtx-label that corresponds to a LABEL_DECL,
462 creating it if necessary. */
463
464rtx
46c5ad27 465label_rtx (tree label)
28d81abb
RK
466{
467 if (TREE_CODE (label) != LABEL_DECL)
468 abort ();
469
19e7881c 470 if (!DECL_RTL_SET_P (label))
6de9cd9a
DN
471 {
472 rtx r = gen_label_rtx ();
473 SET_DECL_RTL (label, r);
474 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
475 LABEL_PRESERVE_P (r) = 1;
476 }
28d81abb 477
19e7881c 478 return DECL_RTL (label);
28d81abb
RK
479}
480
046e4e36
ZW
481/* As above, but also put it on the forced-reference list of the
482 function that contains it. */
483rtx
46c5ad27 484force_label_rtx (tree label)
046e4e36
ZW
485{
486 rtx ref = label_rtx (label);
487 tree function = decl_function_context (label);
488 struct function *p;
489
490 if (!function)
491 abort ();
492
6de9cd9a 493 if (function != current_function_decl)
046e4e36
ZW
494 p = find_function_data (function);
495 else
496 p = cfun;
497
498 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
499 p->expr->x_forced_labels);
500 return ref;
501}
19e7881c 502
28d81abb
RK
503/* Add an unconditional jump to LABEL as the next sequential instruction. */
504
505void
46c5ad27 506emit_jump (rtx label)
28d81abb
RK
507{
508 do_pending_stack_adjust ();
509 emit_jump_insn (gen_jump (label));
510 emit_barrier ();
511}
512
513/* Emit code to jump to the address
514 specified by the pointer expression EXP. */
515
516void
46c5ad27 517expand_computed_goto (tree exp)
28d81abb 518{
b93a436e 519 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1 520
5ae6cd0d 521 x = convert_memory_address (Pmode, x);
ffa1a1ce 522
b93a436e 523 emit_queue ();
acd693d1 524
99dc7277
RH
525 if (! cfun->computed_goto_common_label)
526 {
527 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
528 cfun->computed_goto_common_label = gen_label_rtx ();
46c5ad27 529
99dc7277 530 do_pending_stack_adjust ();
0eadce52 531 emit_label (cfun->computed_goto_common_label);
99dc7277
RH
532 emit_indirect_jump (cfun->computed_goto_common_reg);
533
534 current_function_has_computed_jump = 1;
535 }
536 else
537 {
538 emit_move_insn (cfun->computed_goto_common_reg, x);
539 emit_jump (cfun->computed_goto_common_label);
540 }
28d81abb
RK
541}
542\f
543/* Handle goto statements and the labels that they can go to. */
544
545/* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556void
46c5ad27 557expand_label (tree label)
28d81abb
RK
558{
559 struct label_chain *p;
6de9cd9a 560 rtx label_r = label_rtx (label);
28d81abb
RK
561
562 do_pending_stack_adjust ();
6de9cd9a 563 emit_label (label_r);
28d81abb
RK
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
6de9cd9a
DN
567 if (DECL_NONLOCAL (label))
568 {
569 expand_nl_goto_receiver ();
570 nonlocal_goto_handler_labels
571 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
572 nonlocal_goto_handler_labels);
573 }
574
575 if (FORCED_LABEL (label))
576 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
577
578 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
579 maybe_set_first_label_num (label_r);
580
28d81abb
RK
581 if (stack_block_stack != 0)
582 {
703ad42b 583 p = ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
584 p->next = stack_block_stack->data.block.label_chain;
585 stack_block_stack->data.block.label_chain = p;
586 p->label = label;
587 }
588}
589
28d81abb
RK
590/* Generate RTL code for a `goto' statement with target label LABEL.
591 LABEL should be a LABEL_DECL tree node that was or will later be
592 defined with `expand_label'. */
593
594void
46c5ad27 595expand_goto (tree label)
28d81abb 596{
6de9cd9a
DN
597#ifdef ENABLE_CHECKING
598 /* Check for a nonlocal goto to a containing function. Should have
599 gotten translated to __builtin_nonlocal_goto. */
600 tree context = decl_function_context (label);
28d81abb 601 if (context != 0 && context != current_function_decl)
6de9cd9a 602 abort ();
28d81abb 603#endif
4b01bd16 604
6de9cd9a 605 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
606}
607
608/* Generate RTL code for a `goto' statement with target label BODY.
609 LABEL should be a LABEL_REF.
610 LAST_INSN, if non-0, is the rtx we should consider as the last
611 insn emitted (for the purposes of cleaning up a return). */
612
613static void
46c5ad27 614expand_goto_internal (tree body, rtx label, rtx last_insn)
28d81abb
RK
615{
616 struct nesting *block;
617 rtx stack_level = 0;
618
619 if (GET_CODE (label) != CODE_LABEL)
620 abort ();
621
622 /* If label has already been defined, we can tell now
623 whether and how we must alter the stack level. */
624
625 if (PREV_INSN (label) != 0)
626 {
627 /* Find the innermost pending block that contains the label.
628 (Check containment by comparing insn-uids.)
629 Then restore the outermost stack level within that block,
630 and do cleanups of all blocks contained in it. */
631 for (block = block_stack; block; block = block->next)
632 {
633 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
634 break;
635 if (block->data.block.stack_level != 0)
636 stack_level = block->data.block.stack_level;
637 /* Execute the cleanups for blocks we are exiting. */
638 if (block->data.block.cleanups != 0)
639 {
b39b8084 640 expand_cleanups (block->data.block.cleanups, 1, 1);
28d81abb
RK
641 do_pending_stack_adjust ();
642 }
643 }
644
645 if (stack_level)
646 {
0f41302f
MS
647 /* Ensure stack adjust isn't done by emit_jump, as this
648 would clobber the stack pointer. This one should be
649 deleted as dead by flow. */
28d81abb
RK
650 clear_pending_stack_adjust ();
651 do_pending_stack_adjust ();
7393c642
RK
652
653 /* Don't do this adjust if it's to the end label and this function
654 is to return with a depressed stack pointer. */
655 if (label == return_label
c4a6c0f3
RK
656 && (((TREE_CODE (TREE_TYPE (current_function_decl))
657 == FUNCTION_TYPE)
658 && (TYPE_RETURNS_STACK_DEPRESSED
659 (TREE_TYPE (current_function_decl))))))
7393c642
RK
660 ;
661 else
662 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
663 }
664
665 if (body != 0 && DECL_TOO_LATE (body))
666 error ("jump to `%s' invalidly jumps into binding contour",
667 IDENTIFIER_POINTER (DECL_NAME (body)));
668 }
669 /* Label not yet defined: may need to put this goto
670 on the fixup list. */
671 else if (! expand_fixup (body, label, last_insn))
672 {
673 /* No fixup needed. Record that the label is the target
674 of at least one goto that has no fixup. */
675 if (body != 0)
676 TREE_ADDRESSABLE (body) = 1;
677 }
678
679 emit_jump (label);
680}
681\f
682/* Generate if necessary a fixup for a goto
683 whose target label in tree structure (if any) is TREE_LABEL
684 and whose target in rtl is RTL_LABEL.
685
686 If LAST_INSN is nonzero, we pretend that the jump appears
687 after insn LAST_INSN instead of at the current point in the insn stream.
688
023b57e6
RS
689 The fixup will be used later to insert insns just before the goto.
690 Those insns will restore the stack level as appropriate for the
691 target label, and will (in the case of C++) also invoke any object
692 destructors which have to be invoked when we exit the scopes which
693 are exited by the goto.
28d81abb
RK
694
695 Value is nonzero if a fixup is made. */
696
697static int
46c5ad27 698expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
28d81abb
RK
699{
700 struct nesting *block, *end_block;
701
702 /* See if we can recognize which block the label will be output in.
703 This is possible in some very common cases.
704 If we succeed, set END_BLOCK to that block.
705 Otherwise, set it to 0. */
706
707 if (cond_stack
708 && (rtl_label == cond_stack->data.cond.endif_label
709 || rtl_label == cond_stack->data.cond.next_label))
710 end_block = cond_stack;
711 /* If we are in a loop, recognize certain labels which
712 are likely targets. This reduces the number of fixups
713 we need to create. */
714 else if (loop_stack
715 && (rtl_label == loop_stack->data.loop.start_label
716 || rtl_label == loop_stack->data.loop.end_label
717 || rtl_label == loop_stack->data.loop.continue_label))
718 end_block = loop_stack;
719 else
720 end_block = 0;
721
722 /* Now set END_BLOCK to the binding level to which we will return. */
723
724 if (end_block)
725 {
726 struct nesting *next_block = end_block->all;
727 block = block_stack;
728
729 /* First see if the END_BLOCK is inside the innermost binding level.
730 If so, then no cleanups or stack levels are relevant. */
731 while (next_block && next_block != block)
732 next_block = next_block->all;
733
734 if (next_block)
735 return 0;
736
737 /* Otherwise, set END_BLOCK to the innermost binding level
738 which is outside the relevant control-structure nesting. */
739 next_block = block_stack->next;
740 for (block = block_stack; block != end_block; block = block->all)
741 if (block == next_block)
742 next_block = next_block->next;
743 end_block = next_block;
744 }
745
746 /* Does any containing block have a stack level or cleanups?
747 If not, no fixup is needed, and that is the normal case
748 (the only case, for standard C). */
749 for (block = block_stack; block != end_block; block = block->next)
750 if (block->data.block.stack_level != 0
751 || block->data.block.cleanups != 0)
752 break;
753
754 if (block != end_block)
755 {
756 /* Ok, a fixup is needed. Add a fixup to the list of such. */
703ad42b 757 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
758 /* In case an old stack level is restored, make sure that comes
759 after any pending stack adjust. */
760 /* ?? If the fixup isn't to come at the present position,
761 doing the stack adjust here isn't useful. Doing it with our
762 settings at that location isn't useful either. Let's hope
763 someone does it! */
764 if (last_insn == 0)
765 do_pending_stack_adjust ();
28d81abb
RK
766 fixup->target = tree_label;
767 fixup->target_rtl = rtl_label;
023b57e6
RS
768
769 /* Create a BLOCK node and a corresponding matched set of
12f61228 770 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
771 this point. The notes will encapsulate any and all fixup
772 code which we might later insert at this point in the insn
773 stream. Also, the BLOCK node will be the parent (i.e. the
774 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
775 later on when we are expanding the fixup code.
776
777 Note that optimization passes (including expand_end_loop)
778 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
779 as a placeholder. */
023b57e6
RS
780
781 {
786de7eb
KH
782 rtx original_before_jump
783 = last_insn ? last_insn : get_last_insn ();
0679e3fc 784 rtx start;
a97901e6 785 rtx end;
e6fd097e
MM
786 tree block;
787
788 block = make_node (BLOCK);
789 TREE_USED (block) = 1;
790
01d939e8 791 if (!cfun->x_whole_function_mode_p)
ae2bcd98 792 lang_hooks.decls.insert_block (block);
a97901e6 793 else
e6fd097e 794 {
4381f7c2 795 BLOCK_CHAIN (block)
a97901e6
MM
796 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
797 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
798 = block;
e6fd097e 799 }
023b57e6 800
786de7eb 801 start_sequence ();
2e040219 802 start = emit_note (NOTE_INSN_BLOCK_BEG);
01d939e8 803 if (cfun->x_whole_function_mode_p)
a97901e6 804 NOTE_BLOCK (start) = block;
2e040219
NS
805 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
806 end = emit_note (NOTE_INSN_BLOCK_END);
01d939e8 807 if (cfun->x_whole_function_mode_p)
a97901e6 808 NOTE_BLOCK (end) = block;
786de7eb
KH
809 fixup->context = block;
810 end_sequence ();
2f937369 811 emit_insn_after (start, original_before_jump);
023b57e6
RS
812 }
813
3f1d071b 814 fixup->block_start_count = current_block_start_count;
28d81abb
RK
815 fixup->stack_level = 0;
816 fixup->cleanup_list_list
e976b8b2 817 = ((block->data.block.outer_cleanups
28d81abb 818 || block->data.block.cleanups)
37366632 819 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
820 block->data.block.outer_cleanups)
821 : 0);
822 fixup->next = goto_fixup_chain;
823 goto_fixup_chain = fixup;
824 }
825
826 return block != 0;
827}
cfc3d13f
RK
828\f
829/* Expand any needed fixups in the outputmost binding level of the
830 function. FIRST_INSN is the first insn in the function. */
ca695ac9 831
cfc3d13f 832void
46c5ad27 833expand_fixups (rtx first_insn)
cfc3d13f 834{
9714cf43 835 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
cfc3d13f 836}
ca695ac9 837
28d81abb
RK
838/* When exiting a binding contour, process all pending gotos requiring fixups.
839 THISBLOCK is the structure that describes the block being exited.
840 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
841 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
842 FIRST_INSN is the insn that began this contour.
843
844 Gotos that jump out of this contour must restore the
845 stack level and do the cleanups before actually jumping.
846
cda26058
RK
847 DONT_JUMP_IN positive means report error if there is a jump into this
848 contour from before the beginning of the contour. This is also done if
849 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
28d81abb 850
704f4dca 851static void
46c5ad27
AJ
852fixup_gotos (struct nesting *thisblock, rtx stack_level,
853 tree cleanup_list, rtx first_insn, int dont_jump_in)
28d81abb 854{
b3694847 855 struct goto_fixup *f, *prev;
28d81abb
RK
856
857 /* F is the fixup we are considering; PREV is the previous one. */
858 /* We run this loop in two passes so that cleanups of exited blocks
859 are run first, and blocks that are exited are marked so
860 afterwards. */
861
862 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
863 {
864 /* Test for a fixup that is inactive because it is already handled. */
865 if (f->before_jump == 0)
866 {
867 /* Delete inactive fixup from the chain, if that is easy to do. */
868 if (prev != 0)
869 prev->next = f->next;
870 }
871 /* Has this fixup's target label been defined?
872 If so, we can finalize it. */
873 else if (PREV_INSN (f->target_rtl) != 0)
874 {
b3694847 875 rtx cleanup_insns;
7629c936 876
28d81abb 877 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
878 of this contour, report an error. This code used to use
879 the first non-label insn after f->target_rtl, but that's
880 wrong since such can be added, by things like put_var_into_stack
881 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
882 /* ??? Bug: this does not detect jumping in through intermediate
883 blocks that have stack levels or cleanups.
884 It detects only a problem with the innermost block
885 around the label. */
886 if (f->target != 0
cda26058
RK
887 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
888 || cleanup_list)
14a774a9 889 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 890 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 891 && ! DECL_ERROR_ISSUED (f->target))
28d81abb 892 {
ddd2d57e
RH
893 error ("%Jlabel '%D' used before containing binding contour",
894 f->target, f->target);
28d81abb 895 /* Prevent multiple errors for one label. */
33bc3ff5 896 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
897 }
898
7629c936
RS
899 /* We will expand the cleanups into a sequence of their own and
900 then later on we will attach this new sequence to the insn
901 stream just ahead of the actual jump insn. */
902
903 start_sequence ();
904
023b57e6
RS
905 /* Temporarily restore the lexical context where we will
906 logically be inserting the fixup code. We do this for the
907 sake of getting the debugging information right. */
908
ae2bcd98
RS
909 lang_hooks.decls.pushlevel (0);
910 lang_hooks.decls.set_block (f->context);
7629c936
RS
911
912 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
913 if (f->cleanup_list_list)
914 {
915 tree lists;
916 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
917 /* Marked elements correspond to blocks that have been closed.
918 Do their cleanups. */
919 if (TREE_ADDRESSABLE (lists)
920 && TREE_VALUE (lists) != 0)
7629c936 921 {
b39b8084 922 expand_cleanups (TREE_VALUE (lists), 1, 1);
7629c936
RS
923 /* Pop any pushes done in the cleanups,
924 in case function is about to return. */
925 do_pending_stack_adjust ();
926 }
28d81abb
RK
927 }
928
929 /* Restore stack level for the biggest contour that this
930 jump jumps out of. */
7393c642
RK
931 if (f->stack_level
932 && ! (f->target_rtl == return_label
c4a6c0f3
RK
933 && ((TREE_CODE (TREE_TYPE (current_function_decl))
934 == FUNCTION_TYPE)
4381f7c2 935 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 936 (TREE_TYPE (current_function_decl))))))
59257ff7 937 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
938
939 /* Finish up the sequence containing the insns which implement the
940 necessary cleanups, and then attach that whole sequence to the
941 insn stream just ahead of the actual jump insn. Attaching it
942 at that point insures that any cleanups which are in fact
943 implicit C++ object destructions (which must be executed upon
944 leaving the block) appear (to the debugger) to be taking place
945 in an area of the generated code where the object(s) being
946 destructed are still "in scope". */
947
948 cleanup_insns = get_insns ();
ae2bcd98 949 lang_hooks.decls.poplevel (1, 0, 0);
7629c936
RS
950
951 end_sequence ();
2f937369 952 emit_insn_after (cleanup_insns, f->before_jump);
7629c936 953
28d81abb
RK
954 f->before_jump = 0;
955 }
956 }
957
6bc2f582
RK
958 /* For any still-undefined labels, do the cleanups for this block now.
959 We must do this now since items in the cleanup list may go out
0f41302f 960 of scope when the block ends. */
28d81abb
RK
961 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
962 if (f->before_jump != 0
963 && PREV_INSN (f->target_rtl) == 0
964 /* Label has still not appeared. If we are exiting a block with
965 a stack level to restore, that started before the fixup,
966 mark this stack level as needing restoration
6d2f8887 967 when the fixup is later finalized. */
28d81abb 968 && thisblock != 0
6bc2f582
RK
969 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
970 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
971 && (thisblock->data.block.block_start_count
972 <= f->block_start_count))
973 {
974 tree lists = f->cleanup_list_list;
6bc2f582
RK
975 rtx cleanup_insns;
976
28d81abb
RK
977 for (; lists; lists = TREE_CHAIN (lists))
978 /* If the following elt. corresponds to our containing block
979 then the elt. must be for this block. */
980 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
981 {
982 start_sequence ();
ae2bcd98
RS
983 lang_hooks.decls.pushlevel (0);
984 lang_hooks.decls.set_block (f->context);
b39b8084 985 expand_cleanups (TREE_VALUE (lists), 1, 1);
f0959e58 986 do_pending_stack_adjust ();
6bc2f582 987 cleanup_insns = get_insns ();
ae2bcd98 988 lang_hooks.decls.poplevel (1, 0, 0);
6bc2f582 989 end_sequence ();
412c00dc
RK
990 if (cleanup_insns != 0)
991 f->before_jump
2f937369 992 = emit_insn_after (cleanup_insns, f->before_jump);
6bc2f582 993
e07ed33f 994 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 995 }
28d81abb
RK
996
997 if (stack_level)
998 f->stack_level = stack_level;
999 }
1000}
2a230e9d
BS
1001\f
1002/* Return the number of times character C occurs in string S. */
1003static int
46c5ad27 1004n_occurrences (int c, const char *s)
2a230e9d
BS
1005{
1006 int n = 0;
1007 while (*s)
1008 n += (*s++ == c);
1009 return n;
1010}
28d81abb
RK
1011\f
1012/* Generate RTL for an asm statement (explicit assembler code).
4c46ea23
EB
1013 STRING is a STRING_CST node containing the assembler code text,
1014 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1015 insn is volatile; don't optimize it. */
28d81abb
RK
1016
1017void
46c5ad27 1018expand_asm (tree string, int vol)
28d81abb 1019{
4c46ea23
EB
1020 rtx body;
1021
1022 if (TREE_CODE (string) == ADDR_EXPR)
1023 string = TREE_OPERAND (string, 0);
1024
839ee4bc 1025 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
4c46ea23
EB
1026
1027 MEM_VOLATILE_P (body) = vol;
28d81abb 1028
4c46ea23 1029 emit_insn (body);
46c5ad27 1030
e2500fed 1031 clear_last_expr ();
28d81abb
RK
1032}
1033
40b18c0a
MM
1034/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1035 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1036 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1037 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1038 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1039 constraint allows the use of a register operand. And, *IS_INOUT
1040 will be true if the operand is read-write, i.e., if it is used as
1041 an input as well as an output. If *CONSTRAINT_P is not in
1042 canonical form, it will be made canonical. (Note that `+' will be
14b493d6 1043 replaced with `=' as part of this process.)
40b18c0a
MM
1044
1045 Returns TRUE if all went well; FALSE if an error occurred. */
1046
1047bool
46c5ad27
AJ
1048parse_output_constraint (const char **constraint_p, int operand_num,
1049 int ninputs, int noutputs, bool *allows_mem,
1050 bool *allows_reg, bool *is_inout)
40b18c0a
MM
1051{
1052 const char *constraint = *constraint_p;
1053 const char *p;
1054
1055 /* Assume the constraint doesn't allow the use of either a register
1056 or memory. */
1057 *allows_mem = false;
1058 *allows_reg = false;
1059
1060 /* Allow the `=' or `+' to not be at the beginning of the string,
1061 since it wasn't explicitly documented that way, and there is a
1062 large body of code that puts it last. Swap the character to
1063 the front, so as not to uglify any place else. */
1064 p = strchr (constraint, '=');
1065 if (!p)
1066 p = strchr (constraint, '+');
1067
1068 /* If the string doesn't contain an `=', issue an error
1069 message. */
1070 if (!p)
1071 {
1072 error ("output operand constraint lacks `='");
1073 return false;
1074 }
1075
1076 /* If the constraint begins with `+', then the operand is both read
1077 from and written to. */
1078 *is_inout = (*p == '+');
1079
40b18c0a
MM
1080 /* Canonicalize the output constraint so that it begins with `='. */
1081 if (p != constraint || is_inout)
1082 {
1083 char *buf;
1084 size_t c_len = strlen (constraint);
1085
1086 if (p != constraint)
1087 warning ("output constraint `%c' for operand %d is not at the beginning",
1088 *p, operand_num);
1089
1090 /* Make a copy of the constraint. */
1091 buf = alloca (c_len + 1);
1092 strcpy (buf, constraint);
1093 /* Swap the first character and the `=' or `+'. */
1094 buf[p - constraint] = buf[0];
1095 /* Make sure the first character is an `='. (Until we do this,
1096 it might be a `+'.) */
1097 buf[0] = '=';
1098 /* Replace the constraint with the canonicalized string. */
1099 *constraint_p = ggc_alloc_string (buf, c_len);
1100 constraint = *constraint_p;
1101 }
1102
1103 /* Loop through the constraint string. */
97488870 1104 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
40b18c0a
MM
1105 switch (*p)
1106 {
1107 case '+':
1108 case '=':
357351e5 1109 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 1110 return false;
786de7eb 1111
40b18c0a
MM
1112 case '%':
1113 if (operand_num + 1 == ninputs + noutputs)
1114 {
1115 error ("`%%' constraint used with last operand");
1116 return false;
1117 }
1118 break;
1119
1120 case 'V': case 'm': case 'o':
1121 *allows_mem = true;
1122 break;
1123
1124 case '?': case '!': case '*': case '&': case '#':
1125 case 'E': case 'F': case 'G': case 'H':
1126 case 's': case 'i': case 'n':
1127 case 'I': case 'J': case 'K': case 'L': case 'M':
1128 case 'N': case 'O': case 'P': case ',':
1129 break;
1130
1131 case '0': case '1': case '2': case '3': case '4':
1132 case '5': case '6': case '7': case '8': case '9':
84b72302 1133 case '[':
40b18c0a
MM
1134 error ("matching constraint not valid in output operand");
1135 return false;
1136
1137 case '<': case '>':
1138 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1139 excepting those that expand_call created. So match memory
1140 and hope. */
1141 *allows_mem = true;
1142 break;
1143
1144 case 'g': case 'X':
1145 *allows_reg = true;
1146 *allows_mem = true;
1147 break;
786de7eb 1148
40b18c0a
MM
1149 case 'p': case 'r':
1150 *allows_reg = true;
1151 break;
1152
1153 default:
1154 if (!ISALPHA (*p))
1155 break;
97488870 1156 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
40b18c0a 1157 *allows_reg = true;
97488870
R
1158#ifdef EXTRA_CONSTRAINT_STR
1159 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
ccfc6cc8 1160 *allows_reg = true;
97488870 1161 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
ccfc6cc8 1162 *allows_mem = true;
40b18c0a
MM
1163 else
1164 {
1165 /* Otherwise we can't assume anything about the nature of
1166 the constraint except that it isn't purely registers.
1167 Treat it like "g" and hope for the best. */
1168 *allows_reg = true;
1169 *allows_mem = true;
1170 }
1171#endif
1172 break;
1173 }
1174
1175 return true;
1176}
1177
6be2e1f8
RH
1178/* Similar, but for input constraints. */
1179
1456deaf 1180bool
46c5ad27
AJ
1181parse_input_constraint (const char **constraint_p, int input_num,
1182 int ninputs, int noutputs, int ninout,
1183 const char * const * constraints,
1184 bool *allows_mem, bool *allows_reg)
6be2e1f8
RH
1185{
1186 const char *constraint = *constraint_p;
1187 const char *orig_constraint = constraint;
1188 size_t c_len = strlen (constraint);
1189 size_t j;
f3da0ead 1190 bool saw_match = false;
6be2e1f8
RH
1191
1192 /* Assume the constraint doesn't allow the use of either
1193 a register or memory. */
1194 *allows_mem = false;
1195 *allows_reg = false;
1196
1197 /* Make sure constraint has neither `=', `+', nor '&'. */
1198
97488870 1199 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
6be2e1f8
RH
1200 switch (constraint[j])
1201 {
1202 case '+': case '=': case '&':
1203 if (constraint == orig_constraint)
1204 {
1205 error ("input operand constraint contains `%c'", constraint[j]);
1206 return false;
1207 }
1208 break;
1209
1210 case '%':
1211 if (constraint == orig_constraint
1212 && input_num + 1 == ninputs - ninout)
1213 {
1214 error ("`%%' constraint used with last operand");
1215 return false;
1216 }
1217 break;
1218
1219 case 'V': case 'm': case 'o':
1220 *allows_mem = true;
1221 break;
1222
1223 case '<': case '>':
1224 case '?': case '!': case '*': case '#':
1225 case 'E': case 'F': case 'G': case 'H':
1226 case 's': case 'i': case 'n':
1227 case 'I': case 'J': case 'K': case 'L': case 'M':
1228 case 'N': case 'O': case 'P': case ',':
1229 break;
1230
1231 /* Whether or not a numeric constraint allows a register is
1232 decided by the matching constraint, and so there is no need
1233 to do anything special with them. We must handle them in
1234 the default case, so that we don't unnecessarily force
1235 operands to memory. */
1236 case '0': case '1': case '2': case '3': case '4':
1237 case '5': case '6': case '7': case '8': case '9':
1238 {
1239 char *end;
1240 unsigned long match;
1241
f3da0ead
JM
1242 saw_match = true;
1243
6be2e1f8
RH
1244 match = strtoul (constraint + j, &end, 10);
1245 if (match >= (unsigned long) noutputs)
1246 {
1247 error ("matching constraint references invalid operand number");
1248 return false;
1249 }
1250
1251 /* Try and find the real constraint for this dup. Only do this
1252 if the matching constraint is the only alternative. */
1253 if (*end == '\0'
1254 && (j == 0 || (j == 1 && constraint[0] == '%')))
1255 {
1256 constraint = constraints[match];
1257 *constraint_p = constraint;
1258 c_len = strlen (constraint);
1259 j = 0;
97488870
R
1260 /* ??? At the end of the loop, we will skip the first part of
1261 the matched constraint. This assumes not only that the
1262 other constraint is an output constraint, but also that
1263 the '=' or '+' come first. */
6be2e1f8
RH
1264 break;
1265 }
1266 else
1267 j = end - constraint;
97488870
R
1268 /* Anticipate increment at end of loop. */
1269 j--;
6be2e1f8
RH
1270 }
1271 /* Fall through. */
1272
1273 case 'p': case 'r':
1274 *allows_reg = true;
1275 break;
1276
1277 case 'g': case 'X':
1278 *allows_reg = true;
1279 *allows_mem = true;
1280 break;
1281
1282 default:
1283 if (! ISALPHA (constraint[j]))
1284 {
1285 error ("invalid punctuation `%c' in constraint", constraint[j]);
1286 return false;
1287 }
97488870
R
1288 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1289 != NO_REGS)
6be2e1f8 1290 *allows_reg = true;
97488870
R
1291#ifdef EXTRA_CONSTRAINT_STR
1292 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1293 *allows_reg = true;
97488870 1294 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1295 *allows_mem = true;
6be2e1f8
RH
1296 else
1297 {
1298 /* Otherwise we can't assume anything about the nature of
1299 the constraint except that it isn't purely registers.
1300 Treat it like "g" and hope for the best. */
1301 *allows_reg = true;
1302 *allows_mem = true;
1303 }
1304#endif
1305 break;
1306 }
1307
f3da0ead
JM
1308 if (saw_match && !*allows_reg)
1309 warning ("matching constraint does not allow a register");
1310
6be2e1f8
RH
1311 return true;
1312}
1313
6de9cd9a
DN
1314/* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1315 if it is an operand which must be passed in memory (i.e. an "m"
1316 constraint), false otherwise. */
1317
1318bool
1319asm_op_is_mem_input (tree input, tree expr)
1320{
1321 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1322 tree outputs = ASM_OUTPUTS (expr);
1323 int noutputs = list_length (outputs);
1324 const char **constraints
1325 = (const char **) alloca ((noutputs) * sizeof (const char *));
1326 int i = 0;
1327 bool allows_mem, allows_reg;
1328 tree t;
1329
1330 /* Collect output constraints. */
1331 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1332 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1333
1334 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1335 error checking which will be done at expand time. */
1336 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1337 &allows_mem, &allows_reg);
1338 return (!allows_reg && allows_mem);
1339}
1340
acb5d088
HPN
1341/* Check for overlap between registers marked in CLOBBERED_REGS and
1342 anything inappropriate in DECL. Emit error and return TRUE for error,
1343 FALSE for ok. */
1344
1345static bool
46c5ad27 1346decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
acb5d088
HPN
1347{
1348 /* Conflicts between asm-declared register variables and the clobber
1349 list are not allowed. */
1350 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1351 && DECL_REGISTER (decl)
34146b94 1352 && REG_P (DECL_RTL (decl))
acb5d088
HPN
1353 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1354 {
1355 rtx reg = DECL_RTL (decl);
1356 unsigned int regno;
1357
1358 for (regno = REGNO (reg);
1359 regno < (REGNO (reg)
66fd46b6 1360 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
acb5d088
HPN
1361 regno++)
1362 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1363 {
1364 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1365 IDENTIFIER_POINTER (DECL_NAME (decl)));
1366
1367 /* Reset registerness to stop multiple errors emitted for a
1368 single variable. */
1369 DECL_REGISTER (decl) = 0;
1370 return true;
1371 }
1372 }
1373 return false;
1374}
1375
28d81abb
RK
1376/* Generate RTL for an asm statement with arguments.
1377 STRING is the instruction template.
1378 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1379 Each output or input has an expression in the TREE_VALUE and
2ec37136 1380 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 1381 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 1382 in TREE_PURPOSE.
28d81abb
RK
1383 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1384 that is clobbered by this insn.
1385
1386 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1387 Some elements of OUTPUTS may be replaced with trees representing temporary
1388 values. The caller should copy those temporary values to the originally
1389 specified lvalues.
1390
1391 VOL nonzero means the insn is volatile; don't optimize it. */
1392
1393void
46c5ad27 1394expand_asm_operands (tree string, tree outputs, tree inputs,
177560b2 1395 tree clobbers, int vol, location_t locus)
28d81abb 1396{
84b72302 1397 rtvec argvec, constraintvec;
28d81abb
RK
1398 rtx body;
1399 int ninputs = list_length (inputs);
1400 int noutputs = list_length (outputs);
6be2e1f8 1401 int ninout;
b4ccaa16 1402 int nclobbers;
acb5d088
HPN
1403 HARD_REG_SET clobbered_regs;
1404 int clobber_conflict_found = 0;
28d81abb 1405 tree tail;
7dc8b126 1406 tree t;
b3694847 1407 int i;
28d81abb 1408 /* Vector of RTX's of evaluated output operands. */
703ad42b
KG
1409 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1410 int *inout_opnum = alloca (noutputs * sizeof (int));
1411 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
235c5021 1412 enum machine_mode *inout_mode
703ad42b 1413 = alloca (noutputs * sizeof (enum machine_mode));
84b72302 1414 const char **constraints
703ad42b 1415 = alloca ((noutputs + ninputs) * sizeof (const char *));
1b3d8f8a 1416 int old_generating_concat_p = generating_concat_p;
28d81abb 1417
e5e809f4 1418 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1419 if (noutputs == 0)
1420 vol = 1;
1421
84b72302
RH
1422 if (! check_operand_nalternatives (outputs, inputs))
1423 return;
1424
7dc8b126
JM
1425 string = resolve_asm_operand_names (string, outputs, inputs);
1426
1427 /* Collect constraints. */
1428 i = 0;
1429 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1430 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1431 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1432 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
84b72302 1433
57bcb97a
RH
1434 /* Sometimes we wish to automatically clobber registers across an asm.
1435 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 1436 maintaining source-level compatibility means automatically clobbering
57bcb97a 1437 the flags register. */
67dfe110 1438 clobbers = targetm.md_asm_clobbers (clobbers);
57bcb97a 1439
b4ccaa16
RS
1440 /* Count the number of meaningful clobbered registers, ignoring what
1441 we would ignore later. */
1442 nclobbers = 0;
acb5d088 1443 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
1444 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1445 {
47ee9bcb 1446 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1447
c09e6498
RS
1448 i = decode_reg_name (regname);
1449 if (i >= 0 || i == -4)
b4ccaa16 1450 ++nclobbers;
7859e3ac
DE
1451 else if (i == -2)
1452 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
1453
1454 /* Mark clobbered registers. */
1455 if (i >= 0)
e54b4cae
EB
1456 {
1457 /* Clobbering the PIC register is an error */
fc555370 1458 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
e54b4cae
EB
1459 {
1460 error ("PIC register `%s' clobbered in `asm'", regname);
1461 return;
1462 }
1463
1464 SET_HARD_REG_BIT (clobbered_regs, i);
1465 }
b4ccaa16
RS
1466 }
1467
e2500fed 1468 clear_last_expr ();
28d81abb 1469
6be2e1f8
RH
1470 /* First pass over inputs and outputs checks validity and sets
1471 mark_addressable if needed. */
1472
1473 ninout = 0;
28d81abb
RK
1474 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1475 {
1476 tree val = TREE_VALUE (tail);
b50a024d 1477 tree type = TREE_TYPE (val);
6be2e1f8 1478 const char *constraint;
40b18c0a
MM
1479 bool is_inout;
1480 bool allows_reg;
1481 bool allows_mem;
28d81abb
RK
1482
1483 /* If there's an erroneous arg, emit no insn. */
40b18c0a 1484 if (type == error_mark_node)
28d81abb
RK
1485 return;
1486
40b18c0a
MM
1487 /* Try to parse the output constraint. If that fails, there's
1488 no point in going further. */
6be2e1f8
RH
1489 constraint = constraints[i];
1490 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1491 &allows_mem, &allows_reg, &is_inout))
1492 return;
1493
1494 if (! allows_reg
1495 && (allows_mem
1496 || is_inout
1497 || (DECL_P (val)
1498 && GET_CODE (DECL_RTL (val)) == REG
1499 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
ae2bcd98 1500 lang_hooks.mark_addressable (val);
6be2e1f8
RH
1501
1502 if (is_inout)
1503 ninout++;
1504 }
1505
1506 ninputs += ninout;
1507 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1508 {
1509 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1510 return;
1511 }
1512
1513 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1514 {
1515 bool allows_reg, allows_mem;
1516 const char *constraint;
1517
1518 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1519 would get VOIDmode and that could cause a crash in reload. */
1520 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1521 return;
1522
1523 constraint = constraints[i + noutputs];
1524 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1525 constraints, &allows_mem, &allows_reg))
40b18c0a 1526 return;
d09a75ae 1527
6be2e1f8 1528 if (! allows_reg && allows_mem)
ae2bcd98 1529 lang_hooks.mark_addressable (TREE_VALUE (tail));
6be2e1f8
RH
1530 }
1531
1532 /* Second pass evaluates arguments. */
1533
1534 ninout = 0;
1535 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1536 {
1537 tree val = TREE_VALUE (tail);
1538 tree type = TREE_TYPE (val);
1539 bool is_inout;
1540 bool allows_reg;
1541 bool allows_mem;
5b50aa9d 1542 rtx op;
6be2e1f8
RH
1543
1544 if (!parse_output_constraint (&constraints[i], i, ninputs,
1545 noutputs, &allows_mem, &allows_reg,
1546 &is_inout))
1547 abort ();
1548
d09a75ae
RK
1549 /* If an output operand is not a decl or indirect ref and our constraint
1550 allows a register, make a temporary to act as an intermediate.
1551 Make the asm insn write into that, then our caller will copy it to
1552 the real output operand. Likewise for promoted variables. */
28d81abb 1553
1b3d8f8a
GK
1554 generating_concat_p = 0;
1555
947255ed 1556 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1557 if ((TREE_CODE (val) == INDIRECT_REF
1558 && allows_mem)
2f939d94 1559 || (DECL_P (val)
1afbe1c4 1560 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1561 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1562 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1563 || ! allows_reg
2a230e9d 1564 || is_inout)
d09a75ae 1565 {
5b50aa9d
RH
1566 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1567 if (GET_CODE (op) == MEM)
1568 op = validize_mem (op);
d09a75ae 1569
5b50aa9d 1570 if (! allows_reg && GET_CODE (op) != MEM)
d09a75ae 1571 error ("output number %d not directly addressable", i);
5b50aa9d
RH
1572 if ((! allows_mem && GET_CODE (op) == MEM)
1573 || GET_CODE (op) == CONCAT)
947255ed 1574 {
5b50aa9d
RH
1575 real_output_rtx[i] = protect_from_queue (op, 1);
1576 op = gen_reg_rtx (GET_MODE (op));
11579f33 1577 if (is_inout)
5b50aa9d 1578 emit_move_insn (op, real_output_rtx[i]);
947255ed 1579 }
d09a75ae 1580 }
b50a024d 1581 else
e619bb8d 1582 {
5b50aa9d
RH
1583 op = assign_temp (type, 0, 0, 1);
1584 op = validize_mem (op);
1585 TREE_VALUE (tail) = make_tree (type, op);
b50a024d 1586 }
5b50aa9d 1587 output_rtx[i] = op;
235c5021 1588
1b3d8f8a
GK
1589 generating_concat_p = old_generating_concat_p;
1590
2a230e9d 1591 if (is_inout)
235c5021 1592 {
6be2e1f8 1593 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1594 inout_opnum[ninout++] = i;
1595 }
acb5d088
HPN
1596
1597 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1598 clobber_conflict_found = 1;
28d81abb
RK
1599 }
1600
84b72302
RH
1601 /* Make vectors for the expression-rtx, constraint strings,
1602 and named operands. */
28d81abb
RK
1603
1604 argvec = rtvec_alloc (ninputs);
84b72302 1605 constraintvec = rtvec_alloc (ninputs);
28d81abb 1606
6462bb43
AO
1607 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1608 : GET_MODE (output_rtx[0])),
839ee4bc 1609 TREE_STRING_POINTER (string),
84b72302 1610 empty_string, 0, argvec, constraintvec,
177560b2 1611 locus.file, locus.line);
c85f7c16 1612
78418280 1613 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1614
1615 /* Eval the inputs and put them into ARGVEC.
1616 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1617
84b72302 1618 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1619 {
6be2e1f8
RH
1620 bool allows_reg, allows_mem;
1621 const char *constraint;
1622 tree val, type;
1f06ee8d 1623 rtx op;
28d81abb 1624
6be2e1f8
RH
1625 constraint = constraints[i + noutputs];
1626 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1627 constraints, &allows_mem, &allows_reg))
1628 abort ();
2a230e9d 1629
6be2e1f8 1630 generating_concat_p = 0;
65fed0cb 1631
6be2e1f8
RH
1632 val = TREE_VALUE (tail);
1633 type = TREE_TYPE (val);
017e1b43
RH
1634 op = expand_expr (val, NULL_RTX, VOIDmode,
1635 (allows_mem && !allows_reg
1636 ? EXPAND_MEMORY : EXPAND_NORMAL));
65fed0cb 1637
1b3d8f8a 1638 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1639 if (GET_CODE (op) == CONCAT)
1640 op = force_reg (GET_MODE (op), op);
5b50aa9d
RH
1641 else if (GET_CODE (op) == MEM)
1642 op = validize_mem (op);
1b3d8f8a 1643
1afbe1c4 1644 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1645 {
11579f33 1646 if (allows_reg)
6be2e1f8 1647 op = force_reg (TYPE_MODE (type), op);
11579f33 1648 else if (!allows_mem)
84b72302
RH
1649 warning ("asm operand %d probably doesn't match constraints",
1650 i + noutputs);
d50ad6af 1651 else if (GET_CODE (op) == MEM)
6be2e1f8 1652 {
d50ad6af
RH
1653 /* We won't recognize either volatile memory or memory
1654 with a queued address as available a memory_operand
1655 at this point. Ignore it: clearly this *is* a memory. */
6be2e1f8 1656 }
1f06ee8d 1657 else
017e1b43 1658 {
71ed1fdb
RH
1659 warning ("use of memory input without lvalue in "
1660 "asm operand %d is deprecated", i + noutputs);
017e1b43
RH
1661
1662 if (CONSTANT_P (op))
1663 {
9c858681
RS
1664 rtx mem = force_const_mem (TYPE_MODE (type), op);
1665 if (mem)
1666 op = validize_mem (mem);
1667 else
1668 op = force_reg (TYPE_MODE (type), op);
017e1b43 1669 }
9c858681
RS
1670 if (GET_CODE (op) == REG
1671 || GET_CODE (op) == SUBREG
1672 || GET_CODE (op) == ADDRESSOF
1673 || GET_CODE (op) == CONCAT)
017e1b43
RH
1674 {
1675 tree qual_type = build_qualified_type (type,
1676 (TYPE_QUALS (type)
1677 | TYPE_QUAL_CONST));
1678 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1679 memloc = validize_mem (memloc);
1680 emit_move_insn (memloc, op);
1681 op = memloc;
1682 }
1683 }
65fed0cb 1684 }
6be2e1f8 1685
1b3d8f8a 1686 generating_concat_p = old_generating_concat_p;
6462bb43 1687 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1688
6462bb43 1689 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
839ee4bc 1690 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1691
1692 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1693 clobber_conflict_found = 1;
28d81abb
RK
1694 }
1695
14a774a9
RK
1696 /* Protect all the operands from the queue now that they have all been
1697 evaluated. */
28d81abb 1698
1b3d8f8a
GK
1699 generating_concat_p = 0;
1700
235c5021 1701 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1702 ASM_OPERANDS_INPUT (body, i)
1703 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1704
1705 for (i = 0; i < noutputs; i++)
1706 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1707
4381f7c2 1708 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1709 for (i = 0; i < ninout; i++)
1710 {
235c5021 1711 int j = inout_opnum[i];
84b72302 1712 char buffer[16];
235c5021 1713
6462bb43 1714 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1715 = output_rtx[j];
84b72302
RH
1716
1717 sprintf (buffer, "%d", j);
6462bb43 1718 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
485bad26 1719 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
235c5021
RK
1720 }
1721
1b3d8f8a
GK
1722 generating_concat_p = old_generating_concat_p;
1723
28d81abb 1724 /* Now, for each output, construct an rtx
84b72302
RH
1725 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1726 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1727 If there is more than one, put them inside a PARALLEL. */
1728
1729 if (noutputs == 1 && nclobbers == 0)
1730 {
839ee4bc 1731 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
4977bab6 1732 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1733 }
14a774a9 1734
28d81abb
RK
1735 else if (noutputs == 0 && nclobbers == 0)
1736 {
1737 /* No output operands: put in a raw ASM_OPERANDS rtx. */
4977bab6 1738 emit_insn (body);
28d81abb 1739 }
14a774a9 1740
28d81abb
RK
1741 else
1742 {
1743 rtx obody = body;
1744 int num = noutputs;
14a774a9
RK
1745
1746 if (num == 0)
1747 num = 1;
1748
38a448ca 1749 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1750
1751 /* For each output operand, store a SET. */
28d81abb
RK
1752 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1753 {
1754 XVECEXP (body, 0, i)
38a448ca
RH
1755 = gen_rtx_SET (VOIDmode,
1756 output_rtx[i],
c5c76735 1757 gen_rtx_ASM_OPERANDS
6462bb43 1758 (GET_MODE (output_rtx[i]),
839ee4bc
RO
1759 TREE_STRING_POINTER (string),
1760 constraints[i], i, argvec, constraintvec,
177560b2 1761 locus.file, locus.line));
c5c76735 1762
28d81abb
RK
1763 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1764 }
1765
1766 /* If there are no outputs (but there are some clobbers)
1767 store the bare ASM_OPERANDS into the PARALLEL. */
1768
1769 if (i == 0)
1770 XVECEXP (body, 0, i++) = obody;
1771
1772 /* Store (clobber REG) for each clobbered register specified. */
1773
b4ccaa16 1774 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1775 {
47ee9bcb 1776 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1777 int j = decode_reg_name (regname);
acb5d088 1778 rtx clobbered_reg;
28d81abb 1779
b4ac57ab 1780 if (j < 0)
28d81abb 1781 {
c09e6498 1782 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1783 continue;
1784
c09e6498
RS
1785 if (j == -4) /* `memory', don't cache memory across asm */
1786 {
bffc6177 1787 XVECEXP (body, 0, i++)
38a448ca 1788 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1789 gen_rtx_MEM
1790 (BLKmode,
1791 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1792 continue;
1793 }
1794
956d6950 1795 /* Ignore unknown register, error already signaled. */
cc1f5387 1796 continue;
28d81abb
RK
1797 }
1798
1799 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1800 clobbered_reg = gen_rtx_REG (QImode, j);
1801
1802 /* Do sanity check for overlap between clobbers and respectively
1803 input and outputs that hasn't been handled. Such overlap
1804 should have been detected and reported above. */
1805 if (!clobber_conflict_found)
1806 {
1807 int opno;
1808
1809 /* We test the old body (obody) contents to avoid tripping
1810 over the under-construction body. */
1811 for (opno = 0; opno < noutputs; opno++)
1812 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1813 internal_error ("asm clobber conflict with output operand");
1814
1815 for (opno = 0; opno < ninputs - ninout; opno++)
1816 if (reg_overlap_mentioned_p (clobbered_reg,
1817 ASM_OPERANDS_INPUT (obody, opno)))
1818 internal_error ("asm clobber conflict with input operand");
1819 }
1820
b4ccaa16 1821 XVECEXP (body, 0, i++)
acb5d088 1822 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1823 }
1824
4977bab6 1825 emit_insn (body);
28d81abb
RK
1826 }
1827
947255ed
RH
1828 /* For any outputs that needed reloading into registers, spill them
1829 back to where they belong. */
1830 for (i = 0; i < noutputs; ++i)
1831 if (real_output_rtx[i])
1832 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1833
28d81abb
RK
1834 free_temp_slots ();
1835}
84b72302 1836
6de9cd9a
DN
1837void
1838expand_asm_expr (tree exp)
1839{
1840 int noutputs, i;
1841 tree outputs, tail;
1842 tree *o;
1843
1844 if (ASM_INPUT_P (exp))
1845 {
1846 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1847 return;
1848 }
1849
1850 outputs = ASM_OUTPUTS (exp);
1851 noutputs = list_length (outputs);
1852 /* o[I] is the place that output number I should be written. */
1853 o = (tree *) alloca (noutputs * sizeof (tree));
1854
1855 /* Record the contents of OUTPUTS before it is modified. */
1856 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1857 o[i] = TREE_VALUE (tail);
1858
1859 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1860 OUTPUTS some trees for where the values were actually stored. */
1861 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1862 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1863 input_location);
1864
1865 /* Copy all the intermediate outputs into the specified outputs. */
1866 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1867 {
1868 if (o[i] != TREE_VALUE (tail))
1869 {
1870 expand_assignment (o[i], TREE_VALUE (tail), 0);
1871 free_temp_slots ();
1872
1873 /* Restore the original value so that it's correct the next
1874 time we expand this function. */
1875 TREE_VALUE (tail) = o[i];
1876 }
1877 }
1878
1879 /* Those MODIFY_EXPRs could do autoincrements. */
1880 emit_queue ();
1881}
1882
84b72302
RH
1883/* A subroutine of expand_asm_operands. Check that all operands have
1884 the same number of alternatives. Return true if so. */
1885
1886static bool
46c5ad27 1887check_operand_nalternatives (tree outputs, tree inputs)
84b72302
RH
1888{
1889 if (outputs || inputs)
1890 {
1891 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1892 int nalternatives
1893 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1894 tree next = inputs;
1895
1896 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1897 {
1898 error ("too many alternatives in `asm'");
1899 return false;
1900 }
1901
1902 tmp = outputs;
1903 while (tmp)
1904 {
1905 const char *constraint
1906 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1907
1908 if (n_occurrences (',', constraint) != nalternatives)
1909 {
1910 error ("operand constraints for `asm' differ in number of alternatives");
1911 return false;
1912 }
1913
1914 if (TREE_CHAIN (tmp))
1915 tmp = TREE_CHAIN (tmp);
1916 else
1917 tmp = next, next = 0;
1918 }
1919 }
1920
1921 return true;
1922}
1923
1924/* A subroutine of expand_asm_operands. Check that all operand names
1925 are unique. Return true if so. We rely on the fact that these names
1926 are identifiers, and so have been canonicalized by get_identifier,
1927 so all we need are pointer comparisons. */
1928
1929static bool
46c5ad27 1930check_unique_operand_names (tree outputs, tree inputs)
84b72302
RH
1931{
1932 tree i, j;
1933
1934 for (i = outputs; i ; i = TREE_CHAIN (i))
1935 {
1936 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1937 if (! i_name)
1938 continue;
1939
1940 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1941 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1942 goto failure;
1943 }
1944
1945 for (i = inputs; i ; i = TREE_CHAIN (i))
1946 {
1947 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1948 if (! i_name)
1949 continue;
1950
1951 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1952 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1953 goto failure;
1954 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1956 goto failure;
1957 }
1958
1959 return true;
1960
1961 failure:
1962 error ("duplicate asm operand name '%s'",
fc552851 1963 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1964 return false;
1965}
1966
1967/* A subroutine of expand_asm_operands. Resolve the names of the operands
1968 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1969 STRING and in the constraints to those numbers. */
1970
7dc8b126
JM
1971tree
1972resolve_asm_operand_names (tree string, tree outputs, tree inputs)
84b72302 1973{
7dc8b126 1974 char *buffer;
84b72302 1975 char *p;
40209195 1976 const char *c;
84b72302
RH
1977 tree t;
1978
1456deaf
JM
1979 check_unique_operand_names (outputs, inputs);
1980
7dc8b126
JM
1981 /* Substitute [<name>] in input constraint strings. There should be no
1982 named operands in output constraints. */
1983 for (t = inputs; t ; t = TREE_CHAIN (t))
1984 {
40209195 1985 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
7dc8b126
JM
1986 if (strchr (c, '[') != NULL)
1987 {
1988 p = buffer = xstrdup (c);
1989 while ((p = strchr (p, '[')) != NULL)
1990 p = resolve_operand_name_1 (p, outputs, inputs);
1991 TREE_VALUE (TREE_PURPOSE (t))
1992 = build_string (strlen (buffer), buffer);
1993 free (buffer);
1994 }
1995 }
1996
40209195
JM
1997 /* Now check for any needed substitutions in the template. */
1998 c = TREE_STRING_POINTER (string);
1999 while ((c = strchr (c, '%')) != NULL)
84b72302 2000 {
40209195
JM
2001 if (c[1] == '[')
2002 break;
2003 else if (ISALPHA (c[1]) && c[2] == '[')
2004 break;
7abcb63a
RH
2005 else
2006 {
40209195 2007 c += 1;
7abcb63a
RH
2008 continue;
2009 }
84b72302
RH
2010 }
2011
40209195
JM
2012 if (c)
2013 {
2014 /* OK, we need to make a copy so we can perform the substitutions.
2015 Assume that we will not need extra space--we get to remove '['
2016 and ']', which means we cannot have a problem until we have more
2017 than 999 operands. */
2018 buffer = xstrdup (TREE_STRING_POINTER (string));
2019 p = buffer + (c - TREE_STRING_POINTER (string));
2020
2021 while ((p = strchr (p, '%')) != NULL)
2022 {
2023 if (p[1] == '[')
2024 p += 1;
2025 else if (ISALPHA (p[1]) && p[2] == '[')
2026 p += 2;
2027 else
2028 {
2029 p += 1;
2030 continue;
2031 }
2032
2033 p = resolve_operand_name_1 (p, outputs, inputs);
2034 }
2035
2036 string = build_string (strlen (buffer), buffer);
2037 free (buffer);
2038 }
84b72302 2039
84b72302
RH
2040 return string;
2041}
2042
2043/* A subroutine of resolve_operand_names. P points to the '[' for a
2044 potential named operand of the form [<name>]. In place, replace
786de7eb 2045 the name and brackets with a number. Return a pointer to the
84b72302
RH
2046 balance of the string after substitution. */
2047
2048static char *
46c5ad27 2049resolve_operand_name_1 (char *p, tree outputs, tree inputs)
84b72302
RH
2050{
2051 char *q;
2052 int op;
2053 tree t;
2054 size_t len;
2055
2056 /* Collect the operand name. */
2057 q = strchr (p, ']');
2058 if (!q)
2059 {
2060 error ("missing close brace for named operand");
2061 return strchr (p, '\0');
2062 }
2063 len = q - p - 1;
2064
2065 /* Resolve the name to a number. */
2066 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2067 {
fc552851
RS
2068 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2069 if (name)
edd1967d 2070 {
fc552851 2071 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2072 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2073 goto found;
2074 }
84b72302
RH
2075 }
2076 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2077 {
fc552851
RS
2078 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2079 if (name)
edd1967d 2080 {
fc552851 2081 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2082 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2083 goto found;
2084 }
84b72302
RH
2085 }
2086
2087 *q = '\0';
2088 error ("undefined named operand '%s'", p + 1);
2089 op = 0;
2090 found:
2091
2092 /* Replace the name with the number. Unfortunately, not all libraries
2093 get the return value of sprintf correct, so search for the end of the
2094 generated string by hand. */
2095 sprintf (p, "%d", op);
2096 p = strchr (p, '\0');
2097
2098 /* Verify the no extra buffer space assumption. */
2099 if (p > q)
2100 abort ();
2101
2102 /* Shift the rest of the buffer down to fill the gap. */
2103 memmove (p, q + 1, strlen (q + 1) + 1);
2104
2105 return p;
2106}
28d81abb
RK
2107\f
2108/* Generate RTL to evaluate the expression EXP
1574ef13
AO
2109 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2110 Provided just for backward-compatibility. expand_expr_stmt_value()
2111 should be used for new code. */
28d81abb
RK
2112
2113void
46c5ad27 2114expand_expr_stmt (tree exp)
28d81abb 2115{
b0832fe1 2116 expand_expr_stmt_value (exp, -1, 1);
1574ef13
AO
2117}
2118
2119/* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2120 whether to (1) save the value of the expression, (0) discard it or
2121 (-1) use expr_stmts_for_value to tell. The use of -1 is
2122 deprecated, and retained only for backward compatibility. */
2123
2124void
46c5ad27 2125expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
1574ef13
AO
2126{
2127 rtx value;
2128 tree type;
0fab64a3 2129 rtx alt_rtl = NULL;
1574ef13
AO
2130
2131 if (want_value == -1)
2132 want_value = expr_stmts_for_value != 0;
2133
65ca2d60 2134 /* If -Wextra, warn about statements with no side effects,
28d81abb 2135 except for an explicit cast to void (e.g. for assert()), and
b0832fe1
JJ
2136 except for last statement in ({...}) where they may be useful. */
2137 if (! want_value
2138 && (expr_stmts_for_value == 0 || ! maybe_last)
e895113a
NS
2139 && exp != error_mark_node
2140 && warn_unused_value)
28d81abb 2141 {
e895113a 2142 if (TREE_SIDE_EFFECTS (exp))
28d81abb 2143 warn_if_unused_value (exp);
6de9cd9a 2144 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
e895113a 2145 warning ("%Hstatement with no effect", &emit_locus);
28d81abb 2146 }
b6ec8c5f
RK
2147
2148 /* If EXP is of function type and we are expanding statements for
2149 value, convert it to pointer-to-function. */
1574ef13 2150 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
b6ec8c5f
RK
2151 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2152
8f17b5c5
MM
2153 /* The call to `expand_expr' could cause last_expr_type and
2154 last_expr_value to get reset. Therefore, we set last_expr_value
2155 and last_expr_type *after* calling expand_expr. */
0fab64a3
MM
2156 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2157 VOIDmode, 0, &alt_rtl);
1574ef13 2158 type = TREE_TYPE (exp);
28d81abb
RK
2159
2160 /* If all we do is reference a volatile value in memory,
2161 copy it to a register to be sure it is actually touched. */
1574ef13 2162 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
28d81abb 2163 {
1574ef13 2164 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 2165 ;
1574ef13
AO
2166 else if (TYPE_MODE (type) != BLKmode)
2167 value = copy_to_reg (value);
28d81abb 2168 else
ddbe9812
RS
2169 {
2170 rtx lab = gen_label_rtx ();
4381f7c2 2171
ddbe9812 2172 /* Compare the value with itself to reference it. */
1574ef13
AO
2173 emit_cmp_and_jump_insns (value, value, EQ,
2174 expand_expr (TYPE_SIZE (type),
c5d5d461 2175 NULL_RTX, VOIDmode, 0),
d43e0b7d 2176 BLKmode, 0, lab);
ddbe9812
RS
2177 emit_label (lab);
2178 }
28d81abb
RK
2179 }
2180
2181 /* If this expression is part of a ({...}) and is in memory, we may have
2182 to preserve temporaries. */
1574ef13 2183 preserve_temp_slots (value);
28d81abb
RK
2184
2185 /* Free any temporaries used to evaluate this expression. Any temporary
2186 used as a result of this expression will already have been preserved
2187 above. */
2188 free_temp_slots ();
2189
1574ef13
AO
2190 if (want_value)
2191 {
2192 last_expr_value = value;
0fab64a3 2193 last_expr_alt_rtl = alt_rtl;
1574ef13
AO
2194 last_expr_type = type;
2195 }
2196
28d81abb
RK
2197 emit_queue ();
2198}
2199
2200/* Warn if EXP contains any computations whose results are not used.
2201 Return 1 if a warning is printed; 0 otherwise. */
2202
150a992a 2203int
46c5ad27 2204warn_if_unused_value (tree exp)
28d81abb
RK
2205{
2206 if (TREE_USED (exp))
2207 return 0;
2208
9790cefd
RH
2209 /* Don't warn about void constructs. This includes casting to void,
2210 void function calls, and statement expressions with a final cast
2211 to void. */
2212 if (VOID_TYPE_P (TREE_TYPE (exp)))
2213 return 0;
2214
28d81abb
RK
2215 switch (TREE_CODE (exp))
2216 {
2217 case PREINCREMENT_EXPR:
2218 case POSTINCREMENT_EXPR:
2219 case PREDECREMENT_EXPR:
2220 case POSTDECREMENT_EXPR:
2221 case MODIFY_EXPR:
2222 case INIT_EXPR:
2223 case TARGET_EXPR:
2224 case CALL_EXPR:
28d81abb 2225 case RTL_EXPR:
81797aba 2226 case TRY_CATCH_EXPR:
28d81abb
RK
2227 case WITH_CLEANUP_EXPR:
2228 case EXIT_EXPR:
28d81abb
RK
2229 return 0;
2230
2231 case BIND_EXPR:
2232 /* For a binding, warn if no side effect within it. */
2233 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2234
de73f171 2235 case SAVE_EXPR:
6de9cd9a 2236 return warn_if_unused_value (TREE_OPERAND (exp, 0));
de73f171 2237
28d81abb
RK
2238 case TRUTH_ORIF_EXPR:
2239 case TRUTH_ANDIF_EXPR:
2240 /* In && or ||, warn if 2nd operand has no side effect. */
2241 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2242
2243 case COMPOUND_EXPR:
6de9cd9a 2244 if (TREE_NO_WARNING (exp))
a646a211 2245 return 0;
28d81abb
RK
2246 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2247 return 1;
4d23e509
RS
2248 /* Let people do `(foo (), 0)' without a warning. */
2249 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2250 return 0;
28d81abb
RK
2251 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2252
2253 case NOP_EXPR:
2254 case CONVERT_EXPR:
b4ac57ab 2255 case NON_LVALUE_EXPR:
28d81abb 2256 /* Don't warn about conversions not explicit in the user's program. */
6de9cd9a 2257 if (TREE_NO_WARNING (exp))
28d81abb
RK
2258 return 0;
2259 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2260 Don't complain about that. There can be an arbitrary number of
2261 casts before the modify, so we must loop until we find the first
2262 non-cast expression and then test to see if that is a modify. */
2263 {
2264 tree tem = TREE_OPERAND (exp, 0);
2265
2266 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2267 tem = TREE_OPERAND (tem, 0);
2268
de73f171
RK
2269 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2270 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2271 return 0;
2272 }
7133e992 2273 goto maybe_warn;
28d81abb 2274
d1e1adfb
JM
2275 case INDIRECT_REF:
2276 /* Don't warn about automatic dereferencing of references, since
2277 the user cannot control it. */
2278 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2279 return warn_if_unused_value (TREE_OPERAND (exp, 0));
4381f7c2
KH
2280 /* Fall through. */
2281
28d81abb 2282 default:
ddbe9812 2283 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2284 if ((DECL_P (exp)
ddbe9812
RS
2285 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2286 && TREE_THIS_VOLATILE (exp))
2287 return 0;
8d5e6e25
RK
2288
2289 /* If this is an expression which has no operands, there is no value
2290 to be unused. There are no such language-independent codes,
2291 but front ends may define such. */
2292 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2293 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2294 return 0;
2295
7133e992
JJ
2296 maybe_warn:
2297 /* If this is an expression with side effects, don't warn. */
2298 if (TREE_SIDE_EFFECTS (exp))
2299 return 0;
2300
c8608cd6 2301 warning ("%Hvalue computed is not used", &emit_locus);
28d81abb
RK
2302 return 1;
2303 }
2304}
2305
2306/* Clear out the memory of the last expression evaluated. */
2307
2308void
46c5ad27 2309clear_last_expr (void)
28d81abb 2310{
e2500fed
GK
2311 last_expr_type = NULL_TREE;
2312 last_expr_value = NULL_RTX;
0fab64a3 2313 last_expr_alt_rtl = NULL_RTX;
28d81abb
RK
2314}
2315
b2123dc0
MM
2316/* Begin a statement-expression, i.e., a series of statements which
2317 may return a value. Return the RTL_EXPR for this statement expr.
2318 The caller must save that value and pass it to
2319 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2320 in the statement-expression are deallocated at the end of the
2321 expression. */
28d81abb
RK
2322
2323tree
46c5ad27 2324expand_start_stmt_expr (int has_scope)
28d81abb 2325{
ca695ac9
JB
2326 tree t;
2327
28d81abb
RK
2328 /* Make the RTL_EXPR node temporary, not momentary,
2329 so that rtl_expr_chain doesn't become garbage. */
ca695ac9 2330 t = make_node (RTL_EXPR);
33c6ab80 2331 do_pending_stack_adjust ();
b2123dc0
MM
2332 if (has_scope)
2333 start_sequence_for_rtl_expr (t);
2334 else
2335 start_sequence ();
28d81abb
RK
2336 NO_DEFER_POP;
2337 expr_stmts_for_value++;
2338 return t;
2339}
2340
2341/* Restore the previous state at the end of a statement that returns a value.
2342 Returns a tree node representing the statement's value and the
2343 insns to compute the value.
2344
2345 The nodes of that expression have been freed by now, so we cannot use them.
2346 But we don't want to do that anyway; the expression has already been
2347 evaluated and now we just want to use the value. So generate a RTL_EXPR
2348 with the proper type and RTL value.
2349
2350 If the last substatement was not an expression,
2351 return something with type `void'. */
2352
2353tree
46c5ad27 2354expand_end_stmt_expr (tree t)
28d81abb
RK
2355{
2356 OK_DEFER_POP;
2357
1574ef13 2358 if (! last_expr_value || ! last_expr_type)
28d81abb 2359 {
28d81abb 2360 last_expr_value = const0_rtx;
0fab64a3 2361 last_expr_alt_rtl = NULL_RTX;
1574ef13 2362 last_expr_type = void_type_node;
28d81abb 2363 }
28d81abb
RK
2364 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2365 /* Remove any possible QUEUED. */
2366 last_expr_value = protect_from_queue (last_expr_value, 0);
2367
2368 emit_queue ();
2369
2370 TREE_TYPE (t) = last_expr_type;
2371 RTL_EXPR_RTL (t) = last_expr_value;
0fab64a3 2372 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
28d81abb
RK
2373 RTL_EXPR_SEQUENCE (t) = get_insns ();
2374
2375 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2376
2377 end_sequence ();
2378
2379 /* Don't consider deleting this expr or containing exprs at tree level. */
2380 TREE_SIDE_EFFECTS (t) = 1;
2381 /* Propagate volatility of the actual RTL expr. */
2382 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2383
e2500fed 2384 clear_last_expr ();
28d81abb
RK
2385 expr_stmts_for_value--;
2386
2387 return t;
2388}
2389\f
28d81abb
RK
2390/* Generate RTL for the start of an if-then. COND is the expression
2391 whose truth should be tested.
2392
2393 If EXITFLAG is nonzero, this conditional is visible to
2394 `exit_something'. */
2395
2396void
46c5ad27 2397expand_start_cond (tree cond, int exitflag)
28d81abb
RK
2398{
2399 struct nesting *thiscond = ALLOC_NESTING ();
2400
2401 /* Make an entry on cond_stack for the cond we are entering. */
2402
e2500fed 2403 thiscond->desc = COND_NESTING;
28d81abb
RK
2404 thiscond->next = cond_stack;
2405 thiscond->all = nesting_stack;
2406 thiscond->depth = ++nesting_depth;
2407 thiscond->data.cond.next_label = gen_label_rtx ();
2408 /* Before we encounter an `else', we don't need a separate exit label
2409 unless there are supposed to be exit statements
2410 to exit this conditional. */
2411 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2412 thiscond->data.cond.endif_label = thiscond->exit_label;
2413 cond_stack = thiscond;
2414 nesting_stack = thiscond;
2415
b93a436e 2416 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2417}
2418
2419/* Generate RTL between then-clause and the elseif-clause
2420 of an if-then-elseif-.... */
2421
2422void
46c5ad27 2423expand_start_elseif (tree cond)
28d81abb
RK
2424{
2425 if (cond_stack->data.cond.endif_label == 0)
2426 cond_stack->data.cond.endif_label = gen_label_rtx ();
2427 emit_jump (cond_stack->data.cond.endif_label);
2428 emit_label (cond_stack->data.cond.next_label);
2429 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2430 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2431}
2432
2433/* Generate RTL between the then-clause and the else-clause
2434 of an if-then-else. */
2435
2436void
46c5ad27 2437expand_start_else (void)
28d81abb
RK
2438{
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2441
28d81abb
RK
2442 emit_jump (cond_stack->data.cond.endif_label);
2443 emit_label (cond_stack->data.cond.next_label);
0f41302f 2444 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2445}
2446
d947ba59
RK
2447/* After calling expand_start_else, turn this "else" into an "else if"
2448 by providing another condition. */
2449
2450void
46c5ad27 2451expand_elseif (tree cond)
d947ba59
RK
2452{
2453 cond_stack->data.cond.next_label = gen_label_rtx ();
2454 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2455}
2456
28d81abb
RK
2457/* Generate RTL for the end of an if-then.
2458 Pop the record for it off of cond_stack. */
2459
2460void
46c5ad27 2461expand_end_cond (void)
28d81abb
RK
2462{
2463 struct nesting *thiscond = cond_stack;
2464
b93a436e
JL
2465 do_pending_stack_adjust ();
2466 if (thiscond->data.cond.next_label)
2467 emit_label (thiscond->data.cond.next_label);
2468 if (thiscond->data.cond.endif_label)
2469 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2470
2471 POPSTACK (cond_stack);
e2500fed 2472 clear_last_expr ();
28d81abb
RK
2473}
2474\f
2475/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2476 loop should be exited by `exit_something'. This is a loop for which
2477 `expand_continue' will jump to the top of the loop.
2478
2479 Make an entry on loop_stack to record the labels associated with
2480 this loop. */
2481
2482struct nesting *
46c5ad27 2483expand_start_loop (int exit_flag)
28d81abb 2484{
b3694847 2485 struct nesting *thisloop = ALLOC_NESTING ();
28d81abb
RK
2486
2487 /* Make an entry on loop_stack for the loop we are entering. */
2488
e2500fed 2489 thisloop->desc = LOOP_NESTING;
28d81abb
RK
2490 thisloop->next = loop_stack;
2491 thisloop->all = nesting_stack;
2492 thisloop->depth = ++nesting_depth;
2493 thisloop->data.loop.start_label = gen_label_rtx ();
2494 thisloop->data.loop.end_label = gen_label_rtx ();
2495 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2496 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2497 loop_stack = thisloop;
2498 nesting_stack = thisloop;
2499
2500 do_pending_stack_adjust ();
2501 emit_queue ();
28d81abb
RK
2502 emit_label (thisloop->data.loop.start_label);
2503
2504 return thisloop;
2505}
2506
2507/* Like expand_start_loop but for a loop where the continuation point
2508 (for expand_continue_loop) will be specified explicitly. */
2509
2510struct nesting *
46c5ad27 2511expand_start_loop_continue_elsewhere (int exit_flag)
28d81abb
RK
2512{
2513 struct nesting *thisloop = expand_start_loop (exit_flag);
2514 loop_stack->data.loop.continue_label = gen_label_rtx ();
2515 return thisloop;
2516}
2517
f0de0c5d
RH
2518/* Begin a null, aka do { } while (0) "loop". But since the contents
2519 of said loop can still contain a break, we must frob the loop nest. */
2520
2521struct nesting *
46c5ad27 2522expand_start_null_loop (void)
f0de0c5d 2523{
b3694847 2524 struct nesting *thisloop = ALLOC_NESTING ();
f0de0c5d
RH
2525
2526 /* Make an entry on loop_stack for the loop we are entering. */
2527
e2500fed 2528 thisloop->desc = LOOP_NESTING;
f0de0c5d
RH
2529 thisloop->next = loop_stack;
2530 thisloop->all = nesting_stack;
2531 thisloop->depth = ++nesting_depth;
2e040219 2532 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
f0de0c5d 2533 thisloop->data.loop.end_label = gen_label_rtx ();
ba89764a 2534 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
f0de0c5d
RH
2535 thisloop->exit_label = thisloop->data.loop.end_label;
2536 loop_stack = thisloop;
2537 nesting_stack = thisloop;
2538
2539 return thisloop;
2540}
2541
28d81abb
RK
2542/* Specify the continuation point for a loop started with
2543 expand_start_loop_continue_elsewhere.
2544 Use this at the point in the code to which a continue statement
2545 should jump. */
2546
2547void
46c5ad27 2548expand_loop_continue_here (void)
28d81abb
RK
2549{
2550 do_pending_stack_adjust ();
28d81abb
RK
2551 emit_label (loop_stack->data.loop.continue_label);
2552}
2553
2554/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2555 Pop the block off of loop_stack. */
2556
2557void
46c5ad27 2558expand_end_loop (void)
28d81abb 2559{
0720f6fb 2560 rtx start_label = loop_stack->data.loop.start_label;
e803a64b
RH
2561 rtx etc_note;
2562 int eh_regions, debug_blocks;
4977bab6 2563 bool empty_test;
28d81abb 2564
28d81abb
RK
2565 do_pending_stack_adjust ();
2566
e803a64b 2567 /* If the loop starts with a loop exit, roll that to the end where
a7d308f7 2568 it will optimize together with the jump back.
93de5c31 2569
e803a64b 2570 If the loop presently looks like this (in pseudo-C):
93de5c31 2571
e803a64b
RH
2572 start_label:
2573 if (test) goto end_label;
2574 LOOP_END_TOP_COND
2575 body;
2576 goto start_label;
2577 end_label:
4381f7c2 2578
93de5c31
MM
2579 transform it to look like:
2580
e803a64b
RH
2581 goto start_label;
2582 top_label:
2583 body;
2584 start_label:
2585 if (test) goto end_label;
2586 goto top_label;
2587 end_label:
2588
2589 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
14b493d6 2590 the end of the entry conditional. Without this, our lexical scan
e803a64b
RH
2591 can't tell the difference between an entry conditional and a
2592 body conditional that exits the loop. Mistaking the two means
786de7eb 2593 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
e803a64b
RH
2594 screw up loop unrolling.
2595
2596 Things will be oh so much better when loop optimization is done
2597 off of a proper control flow graph... */
2598
2599 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2600
4977bab6 2601 empty_test = true;
e803a64b
RH
2602 eh_regions = debug_blocks = 0;
2603 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2604 if (GET_CODE (etc_note) == NOTE)
2605 {
2606 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2607 break;
28d81abb 2608
6de9cd9a
DN
2609 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2610 abort ();
28d81abb 2611
e803a64b
RH
2612 /* At the same time, scan for EH region notes, as we don't want
2613 to scrog region nesting. This shouldn't happen, but... */
6de9cd9a 2614 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
e803a64b
RH
2615 eh_regions++;
2616 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2617 {
2618 if (--eh_regions < 0)
2619 /* We've come to the end of an EH region, but never saw the
2620 beginning of that region. That means that an EH region
2621 begins before the top of the loop, and ends in the middle
2622 of it. The existence of such a situation violates a basic
2623 assumption in this code, since that would imply that even
2624 when EH_REGIONS is zero, we might move code out of an
2625 exception region. */
2626 abort ();
2627 }
0720f6fb 2628
e803a64b
RH
2629 /* Likewise for debug scopes. In this case we'll either (1) move
2630 all of the notes if they are properly nested or (2) leave the
786de7eb 2631 notes alone and only rotate the loop at high optimization
e803a64b
RH
2632 levels when we expect to scrog debug info. */
2633 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2634 debug_blocks++;
2635 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2636 debug_blocks--;
2637 }
4977bab6
ZW
2638 else if (INSN_P (etc_note))
2639 empty_test = false;
28d81abb 2640
e803a64b
RH
2641 if (etc_note
2642 && optimize
4977bab6 2643 && ! empty_test
e803a64b
RH
2644 && eh_regions == 0
2645 && (debug_blocks == 0 || optimize >= 2)
2646 && NEXT_INSN (etc_note) != NULL_RTX
2647 && ! any_condjump_p (get_last_insn ()))
2648 {
2649 /* We found one. Move everything from START to ETC to the end
2650 of the loop, and add a jump from the top of the loop. */
2651 rtx top_label = gen_label_rtx ();
2652 rtx start_move = start_label;
2653
e803a64b
RH
2654 emit_label_before (top_label, start_move);
2655
2656 /* Actually move the insns. If the debug scopes are nested, we
2657 can move everything at once. Otherwise we have to move them
2658 one by one and squeeze out the block notes. */
2659 if (debug_blocks == 0)
2660 reorder_insns (start_move, etc_note, get_last_insn ());
2661 else
28d81abb 2662 {
e803a64b 2663 rtx insn, next_insn;
93de5c31
MM
2664 for (insn = start_move; insn; insn = next_insn)
2665 {
2666 /* Figure out which insn comes after this one. We have
2667 to do this before we move INSN. */
e803a64b 2668 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
93de5c31
MM
2669
2670 if (GET_CODE (insn) == NOTE
2671 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2672 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
93de5c31
MM
2673 continue;
2674
93de5c31
MM
2675 reorder_insns (insn, insn, get_last_insn ());
2676 }
28d81abb 2677 }
28d81abb 2678
e803a64b
RH
2679 /* Add the jump from the top of the loop. */
2680 emit_jump_insn_before (gen_jump (start_label), top_label);
2681 emit_barrier_before (top_label);
2682 start_label = top_label;
a7d308f7 2683 }
e803a64b 2684
6de9cd9a
DN
2685 if (etc_note)
2686 delete_insn (etc_note);
2687
e803a64b 2688 emit_jump (start_label);
28d81abb
RK
2689 emit_label (loop_stack->data.loop.end_label);
2690
2691 POPSTACK (loop_stack);
2692
e2500fed 2693 clear_last_expr ();
28d81abb
RK
2694}
2695
f0de0c5d
RH
2696/* Finish a null loop, aka do { } while (0). */
2697
2698void
46c5ad27 2699expand_end_null_loop (void)
f0de0c5d
RH
2700{
2701 do_pending_stack_adjust ();
2702 emit_label (loop_stack->data.loop.end_label);
2703
2704 POPSTACK (loop_stack);
2705
e2500fed 2706 clear_last_expr ();
f0de0c5d
RH
2707}
2708
28d81abb
RK
2709/* Generate a jump to the current loop's continue-point.
2710 This is usually the top of the loop, but may be specified
2711 explicitly elsewhere. If not currently inside a loop,
2712 return 0 and do nothing; caller will print an error message. */
2713
2714int
46c5ad27 2715expand_continue_loop (struct nesting *whichloop)
28d81abb 2716{
969d70ca
JH
2717 /* Emit information for branch prediction. */
2718 rtx note;
2719
d50672ef
JH
2720 if (flag_guess_branch_prob)
2721 {
2e040219 2722 note = emit_note (NOTE_INSN_PREDICTION);
d50672ef
JH
2723 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2724 }
e2500fed 2725 clear_last_expr ();
28d81abb
RK
2726 if (whichloop == 0)
2727 whichloop = loop_stack;
2728 if (whichloop == 0)
2729 return 0;
37366632
RK
2730 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2731 NULL_RTX);
28d81abb
RK
2732 return 1;
2733}
2734
2735/* Generate a jump to exit the current loop. If not currently inside a loop,
2736 return 0 and do nothing; caller will print an error message. */
2737
2738int
46c5ad27 2739expand_exit_loop (struct nesting *whichloop)
28d81abb 2740{
e2500fed 2741 clear_last_expr ();
28d81abb
RK
2742 if (whichloop == 0)
2743 whichloop = loop_stack;
2744 if (whichloop == 0)
2745 return 0;
37366632 2746 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2747 return 1;
2748}
2749
2750/* Generate a conditional jump to exit the current loop if COND
2751 evaluates to zero. If not currently inside a loop,
2752 return 0 and do nothing; caller will print an error message. */
2753
2754int
46c5ad27 2755expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
28d81abb 2756{
4977bab6 2757 rtx label;
e2500fed 2758 clear_last_expr ();
b93a436e 2759
28d81abb
RK
2760 if (whichloop == 0)
2761 whichloop = loop_stack;
2762 if (whichloop == 0)
2763 return 0;
4977bab6
ZW
2764
2765 if (integer_nonzerop (cond))
2766 return 1;
2767 if (integer_zerop (cond))
2768 return expand_exit_loop (whichloop);
2769
2770 /* Check if we definitely won't need a fixup. */
2771 if (whichloop == nesting_stack)
2772 {
2773 jumpifnot (cond, whichloop->data.loop.end_label);
2774 return 1;
2775 }
2776
b93a436e 2777 /* In order to handle fixups, we actually create a conditional jump
4fe9b91c 2778 around an unconditional branch to exit the loop. If fixups are
b93a436e 2779 necessary, they go before the unconditional branch. */
d902c7ea 2780
4977bab6
ZW
2781 label = gen_label_rtx ();
2782 jumpif (cond, label);
b93a436e
JL
2783 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2784 NULL_RTX);
2785 emit_label (label);
ca695ac9 2786
28d81abb
RK
2787 return 1;
2788}
2789
e803a64b 2790/* Like expand_exit_loop_if_false except also emit a note marking
786de7eb 2791 the end of the conditional. Should only be used immediately
e803a64b
RH
2792 after expand_loop_start. */
2793
2794int
46c5ad27 2795expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
e803a64b
RH
2796{
2797 if (! expand_exit_loop_if_false (whichloop, cond))
2798 return 0;
2799
2e040219 2800 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
e803a64b
RH
2801 return 1;
2802}
2803
0e9e1e0a 2804/* Return nonzero if we should preserve sub-expressions as separate
28d81abb
RK
2805 pseudos. We never do so if we aren't optimizing. We always do so
2806 if -fexpensive-optimizations.
2807
2808 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2809 the loop may still be a small one. */
2810
2811int
46c5ad27 2812preserve_subexpressions_p (void)
28d81abb
RK
2813{
2814 rtx insn;
2815
2816 if (flag_expensive_optimizations)
2817 return 1;
2818
01d939e8 2819 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
28d81abb
RK
2820 return 0;
2821
2822 insn = get_last_insn_anywhere ();
2823
2824 return (insn
2825 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2826 < n_non_fixed_regs * 3));
2827
2828}
2829
2830/* Generate a jump to exit the current loop, conditional, binding contour
2831 or case statement. Not all such constructs are visible to this function,
2832 only those started with EXIT_FLAG nonzero. Individual languages use
2833 the EXIT_FLAG parameter to control which kinds of constructs you can
2834 exit this way.
2835
2836 If not currently inside anything that can be exited,
2837 return 0 and do nothing; caller will print an error message. */
2838
2839int
46c5ad27 2840expand_exit_something (void)
28d81abb
RK
2841{
2842 struct nesting *n;
e2500fed 2843 clear_last_expr ();
28d81abb
RK
2844 for (n = nesting_stack; n; n = n->all)
2845 if (n->exit_label != 0)
2846 {
37366632 2847 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2848 return 1;
2849 }
2850
2851 return 0;
2852}
2853\f
2854/* Generate RTL to return from the current function, with no value.
2855 (That is, we do not do anything about returning any value.) */
2856
2857void
46c5ad27 2858expand_null_return (void)
28d81abb 2859{
969d70ca
JH
2860 rtx last_insn;
2861
2862 last_insn = get_last_insn ();
bd695e1e 2863
4381f7c2 2864 /* If this function was declared to return a value, but we
bd695e1e 2865 didn't, clobber the return registers so that they are not
a1f300c0 2866 propagated live to the rest of the function. */
c13fde05 2867 clobber_return_register ();
28d81abb 2868
396ad517 2869 expand_null_return_1 (last_insn);
28d81abb
RK
2870}
2871
6e3077c6
EB
2872/* Generate RTL to return directly from the current function.
2873 (That is, we bypass any return value.) */
2874
2875void
2876expand_naked_return (void)
2877{
2878 rtx last_insn, end_label;
2879
2880 last_insn = get_last_insn ();
2881 end_label = naked_return_label;
2882
2883 clear_pending_stack_adjust ();
2884 do_pending_stack_adjust ();
2885 clear_last_expr ();
2886
2887 if (end_label == 0)
2888 end_label = naked_return_label = gen_label_rtx ();
2889 expand_goto_internal (NULL_TREE, end_label, last_insn);
2890}
2891
969d70ca
JH
2892/* Try to guess whether the value of return means error code. */
2893static enum br_predictor
46c5ad27 2894return_prediction (rtx val)
969d70ca
JH
2895{
2896 /* Different heuristics for pointers and scalars. */
2897 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2898 {
2899 /* NULL is usually not returned. */
2900 if (val == const0_rtx)
2901 return PRED_NULL_RETURN;
2902 }
2903 else
2904 {
2905 /* Negative return values are often used to indicate
2906 errors. */
2907 if (GET_CODE (val) == CONST_INT
2908 && INTVAL (val) < 0)
2909 return PRED_NEGATIVE_RETURN;
2910 /* Constant return values are also usually erors,
2911 zero/one often mean booleans so exclude them from the
2912 heuristics. */
2913 if (CONSTANT_P (val)
2914 && (val != const0_rtx && val != const1_rtx))
2915 return PRED_CONST_RETURN;
2916 }
2917 return PRED_NO_PREDICTION;
2918}
2919
c988af2b
RS
2920
2921/* If the current function returns values in the most significant part
2922 of a register, shift return value VAL appropriately. The mode of
2923 the function's return type is known not to be BLKmode. */
2924
2925static rtx
2926shift_return_value (rtx val)
2927{
2928 tree type;
2929
2930 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2931 if (targetm.calls.return_in_msb (type))
2932 {
2933 rtx target;
2934 HOST_WIDE_INT shift;
2935
2936 target = DECL_RTL (DECL_RESULT (current_function_decl));
2937 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2938 - BITS_PER_UNIT * int_size_in_bytes (type));
2939 if (shift > 0)
2940 val = expand_binop (GET_MODE (target), ashl_optab,
2941 gen_lowpart (GET_MODE (target), val),
2942 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2943 }
2944 return val;
2945}
2946
2947
28d81abb
RK
2948/* Generate RTL to return from the current function, with value VAL. */
2949
8d800403 2950static void
46c5ad27 2951expand_value_return (rtx val)
28d81abb 2952{
969d70ca
JH
2953 rtx last_insn;
2954 rtx return_reg;
2955 enum br_predictor pred;
2956
d50672ef
JH
2957 if (flag_guess_branch_prob
2958 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
969d70ca
JH
2959 {
2960 /* Emit information for branch prediction. */
2961 rtx note;
2962
2e040219 2963 note = emit_note (NOTE_INSN_PREDICTION);
969d70ca
JH
2964
2965 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2966
2967 }
2968
2969 last_insn = get_last_insn ();
2970 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
2971
2972 /* Copy the value to the return location
2973 unless it's already there. */
2974
2975 if (return_reg != val)
77636079 2976 {
77636079 2977 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
61f71b34
DD
2978 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2979 {
8df83eae 2980 int unsignedp = TYPE_UNSIGNED (type);
61f71b34
DD
2981 enum machine_mode old_mode
2982 = DECL_MODE (DECL_RESULT (current_function_decl));
2983 enum machine_mode mode
2984 = promote_mode (type, old_mode, &unsignedp, 1);
2985
2986 if (mode != old_mode)
2987 val = convert_modes (mode, old_mode, val, unsignedp);
2988 }
14a774a9 2989 if (GET_CODE (return_reg) == PARALLEL)
6e985040 2990 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
14a774a9 2991 else
77636079
RS
2992 emit_move_insn (return_reg, val);
2993 }
14a774a9 2994
396ad517 2995 expand_null_return_1 (last_insn);
28d81abb
RK
2996}
2997
2998/* Output a return with no value. If LAST_INSN is nonzero,
396ad517 2999 pretend that the return takes place after LAST_INSN. */
28d81abb
RK
3000
3001static void
46c5ad27 3002expand_null_return_1 (rtx last_insn)
28d81abb
RK
3003{
3004 rtx end_label = cleanup_label ? cleanup_label : return_label;
3005
3006 clear_pending_stack_adjust ();
3007 do_pending_stack_adjust ();
e2500fed 3008 clear_last_expr ();
28d81abb 3009
396ad517
JDA
3010 if (end_label == 0)
3011 end_label = return_label = gen_label_rtx ();
37366632 3012 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
3013}
3014\f
3015/* Generate RTL to evaluate the expression RETVAL and return it
3016 from the current function. */
3017
3018void
46c5ad27 3019expand_return (tree retval)
28d81abb
RK
3020{
3021 /* If there are any cleanups to be performed, then they will
3022 be inserted following LAST_INSN. It is desirable
3023 that the last_insn, for such purposes, should be the
3024 last insn before computing the return value. Otherwise, cleanups
3025 which call functions can clobber the return value. */
3026 /* ??? rms: I think that is erroneous, because in C++ it would
3027 run destructors on variables that might be used in the subsequent
3028 computation of the return value. */
3029 rtx last_insn = 0;
19e7881c 3030 rtx result_rtl;
b3694847 3031 rtx val = 0;
28d81abb 3032 tree retval_rhs;
28d81abb
RK
3033
3034 /* If function wants no value, give it none. */
3035 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3036 {
37366632 3037 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 3038 emit_queue ();
28d81abb
RK
3039 expand_null_return ();
3040 return;
3041 }
3042
ea11ca7e 3043 if (retval == error_mark_node)
c9407e4c
MM
3044 {
3045 /* Treat this like a return of no value from a function that
3046 returns a value. */
3047 expand_null_return ();
786de7eb 3048 return;
c9407e4c 3049 }
ea11ca7e 3050 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
3051 retval_rhs = retval;
3052 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3053 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3054 retval_rhs = TREE_OPERAND (retval, 1);
28d81abb 3055 else
6de9cd9a 3056 retval_rhs = retval;
28d81abb 3057
7cc8342c 3058 last_insn = get_last_insn ();
28d81abb
RK
3059
3060 /* Distribute return down conditional expr if either of the sides
3061 may involve tail recursion (see test below). This enhances the number
3062 of tail recursions we see. Don't do this always since it can produce
3063 sub-optimal code in some cases and we distribute assignments into
3064 conditional expressions when it would help. */
3065
3066 if (optimize && retval_rhs != 0
3067 && frame_offset == 0
3068 && TREE_CODE (retval_rhs) == COND_EXPR
3069 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3070 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3071 {
3072 rtx label = gen_label_rtx ();
a0a34f94
RK
3073 tree expr;
3074
37366632 3075 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
1483bddb 3076 start_cleanup_deferral ();
dd98f85c 3077 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3078 DECL_RESULT (current_function_decl),
3079 TREE_OPERAND (retval_rhs, 1));
3080 TREE_SIDE_EFFECTS (expr) = 1;
3081 expand_return (expr);
28d81abb 3082 emit_label (label);
a0a34f94 3083
dd98f85c 3084 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
3085 DECL_RESULT (current_function_decl),
3086 TREE_OPERAND (retval_rhs, 2));
3087 TREE_SIDE_EFFECTS (expr) = 1;
3088 expand_return (expr);
1483bddb 3089 end_cleanup_deferral ();
28d81abb
RK
3090 return;
3091 }
3092
19e7881c
MM
3093 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3094
4c485b63
JL
3095 /* If the result is an aggregate that is being returned in one (or more)
3096 registers, load the registers here. The compiler currently can't handle
3097 copying a BLKmode value into registers. We could put this code in a
3098 more general area (for use by everyone instead of just function
3099 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
3100 (and in expand_call). The value must go into a pseudo in case there
3101 are cleanups that will clobber the real return register. */
4c485b63
JL
3102
3103 if (retval_rhs != 0
3104 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
14a774a9 3105 && GET_CODE (result_rtl) == REG)
4c485b63 3106 {
770ae6cc
RK
3107 int i;
3108 unsigned HOST_WIDE_INT bitpos, xbitpos;
c988af2b 3109 unsigned HOST_WIDE_INT padding_correction = 0;
770ae6cc
RK
3110 unsigned HOST_WIDE_INT bytes
3111 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 3112 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
3113 unsigned int bitsize
3114 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
703ad42b 3115 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
c16ddde3 3116 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 3117 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 3118 enum machine_mode tmpmode, result_reg_mode;
4c485b63 3119
2954d7db
RK
3120 if (bytes == 0)
3121 {
3122 expand_null_return ();
3123 return;
3124 }
3125
c988af2b
RS
3126 /* If the structure doesn't take up a whole number of words, see
3127 whether the register value should be padded on the left or on
3128 the right. Set PADDING_CORRECTION to the number of padding
3129 bits needed on the left side.
3130
3131 In most ABIs, the structure will be returned at the least end of
3132 the register, which translates to right padding on little-endian
3133 targets and left padding on big-endian targets. The opposite
3134 holds if the structure is returned at the most significant
3135 end of the register. */
3136 if (bytes % UNITS_PER_WORD != 0
3137 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3138 ? !BYTES_BIG_ENDIAN
3139 : BYTES_BIG_ENDIAN))
3140 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3141 * BITS_PER_UNIT));
a7f875d7 3142
4381f7c2 3143 /* Copy the structure BITSIZE bits at a time. */
c988af2b 3144 for (bitpos = 0, xbitpos = padding_correction;
a7f875d7
RK
3145 bitpos < bytes * BITS_PER_UNIT;
3146 bitpos += bitsize, xbitpos += bitsize)
4c485b63 3147 {
a7f875d7 3148 /* We need a new destination pseudo each time xbitpos is
c988af2b 3149 on a word boundary and when xbitpos == padding_correction
a7f875d7
RK
3150 (the first time through). */
3151 if (xbitpos % BITS_PER_WORD == 0
c988af2b 3152 || xbitpos == padding_correction)
4c485b63 3153 {
a7f875d7
RK
3154 /* Generate an appropriate register. */
3155 dst = gen_reg_rtx (word_mode);
3156 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3157
8a38ed86
AM
3158 /* Clear the destination before we move anything into it. */
3159 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 3160 }
a7f875d7
RK
3161
3162 /* We need a new source operand each time bitpos is on a word
3163 boundary. */
3164 if (bitpos % BITS_PER_WORD == 0)
3165 src = operand_subword_force (result_val,
3166 bitpos / BITS_PER_WORD,
3167 BLKmode);
3168
3169 /* Use bitpos for the source extraction (left justified) and
3170 xbitpos for the destination store (right justified). */
3171 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3172 extract_bit_field (src, bitsize,
3173 bitpos % BITS_PER_WORD, 1,
19caa751 3174 NULL_RTX, word_mode, word_mode,
04050c69
RK
3175 BITS_PER_WORD),
3176 BITS_PER_WORD);
4c485b63
JL
3177 }
3178
c988af2b
RS
3179 tmpmode = GET_MODE (result_rtl);
3180 if (tmpmode == BLKmode)
3181 {
3182 /* Find the smallest integer mode large enough to hold the
3183 entire structure and use that mode instead of BLKmode
3184 on the USE insn for the return register. */
3185 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3186 tmpmode != VOIDmode;
3187 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3188 /* Have we found a large enough mode? */
3189 if (GET_MODE_SIZE (tmpmode) >= bytes)
3190 break;
4c485b63 3191
c988af2b
RS
3192 /* No suitable mode found. */
3193 if (tmpmode == VOIDmode)
3194 abort ();
4c485b63 3195
c988af2b
RS
3196 PUT_MODE (result_rtl, tmpmode);
3197 }
3ffeb8f1 3198
af55da56
JW
3199 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3200 result_reg_mode = word_mode;
3201 else
3202 result_reg_mode = tmpmode;
3203 result_reg = gen_reg_rtx (result_reg_mode);
3204
3ffeb8f1 3205 emit_queue ();
3ffeb8f1 3206 for (i = 0; i < n_regs; i++)
af55da56 3207 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 3208 result_pseudos[i]);
4c485b63 3209
af55da56
JW
3210 if (tmpmode != result_reg_mode)
3211 result_reg = gen_lowpart (tmpmode, result_reg);
3212
4c485b63
JL
3213 expand_value_return (result_reg);
3214 }
7cc8342c
RH
3215 else if (retval_rhs != 0
3216 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3217 && (GET_CODE (result_rtl) == REG
3218 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 3219 {
14a774a9
RK
3220 /* Calculate the return value into a temporary (usually a pseudo
3221 reg). */
1da68f56
RK
3222 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3223 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3224
3225 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
3226 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3227 val = force_not_mem (val);
28d81abb 3228 emit_queue ();
28d81abb 3229 /* Return the calculated value, doing cleanups first. */
c988af2b 3230 expand_value_return (shift_return_value (val));
28d81abb
RK
3231 }
3232 else
3233 {
3234 /* No cleanups or no hard reg used;
3235 calculate value into hard return reg. */
cba389cd 3236 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 3237 emit_queue ();
14a774a9 3238 expand_value_return (result_rtl);
28d81abb
RK
3239 }
3240}
28d81abb 3241\f
b06775f9
RH
3242/* Attempt to optimize a potential tail recursion call into a goto.
3243 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
4381f7c2
KH
3244 where to place the jump to the tail recursion label.
3245
b06775f9 3246 Return TRUE if the call was optimized into a goto. */
642cac7b 3247
564ea051 3248int
46c5ad27 3249optimize_tail_recursion (tree arguments, rtx last_insn)
642cac7b 3250{
b06775f9
RH
3251 /* Finish checking validity, and if valid emit code to set the
3252 argument variables for the new call. */
3253 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
642cac7b
JL
3254 {
3255 if (tail_recursion_label == 0)
3256 {
3257 tail_recursion_label = gen_label_rtx ();
3258 emit_label_after (tail_recursion_label,
3259 tail_recursion_reentry);
3260 }
3261 emit_queue ();
3262 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3263 emit_barrier ();
564ea051 3264 return 1;
642cac7b 3265 }
564ea051 3266 return 0;
642cac7b
JL
3267}
3268
28d81abb
RK
3269/* Emit code to alter this function's formal parms for a tail-recursive call.
3270 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3271 FORMALS is the chain of decls of formals.
3272 Return 1 if this can be done;
3273 otherwise return 0 and do not emit any code. */
3274
3275static int
46c5ad27 3276tail_recursion_args (tree actuals, tree formals)
28d81abb 3277{
b3694847
SS
3278 tree a = actuals, f = formals;
3279 int i;
3280 rtx *argvec;
28d81abb
RK
3281
3282 /* Check that number and types of actuals are compatible
3283 with the formals. This is not always true in valid C code.
3284 Also check that no formal needs to be addressable
3285 and that all formals are scalars. */
3286
3287 /* Also count the args. */
3288
3289 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3290 {
6de9cd9a
DN
3291 if (!lang_hooks.types_compatible_p (TREE_TYPE (TREE_VALUE (a)),
3292 TREE_TYPE (f)))
28d81abb
RK
3293 return 0;
3294 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3295 return 0;
3296 }
3297 if (a != 0 || f != 0)
3298 return 0;
3299
3300 /* Compute all the actuals. */
3301
703ad42b 3302 argvec = alloca (i * sizeof (rtx));
28d81abb
RK
3303
3304 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 3305 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3306
3307 /* Find which actual values refer to current values of previous formals.
3308 Copy each of them now, before any formal is changed. */
3309
3310 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3311 {
3312 int copy = 0;
b3694847 3313 int j;
28d81abb
RK
3314 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3315 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
4381f7c2
KH
3316 {
3317 copy = 1;
3318 break;
3319 }
28d81abb
RK
3320 if (copy)
3321 argvec[i] = copy_to_reg (argvec[i]);
3322 }
3323
3324 /* Store the values of the actuals into the formals. */
3325
3326 for (f = formals, a = actuals, i = 0; f;
3327 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3328 {
98f3b471 3329 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
3330 emit_move_insn (DECL_RTL (f), argvec[i]);
3331 else
303b90b0
FS
3332 {
3333 rtx tmp = argvec[i];
8df83eae 3334 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
405a98aa
SE
3335 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3336 &unsignedp, 0);
303b90b0
FS
3337 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3338 {
3339 tmp = gen_reg_rtx (DECL_MODE (f));
405a98aa 3340 convert_move (tmp, argvec[i], unsignedp);
303b90b0 3341 }
405a98aa 3342 convert_move (DECL_RTL (f), tmp, unsignedp);
303b90b0 3343 }
28d81abb
RK
3344 }
3345
3346 free_temp_slots ();
3347 return 1;
3348}
3349\f
3350/* Generate the RTL code for entering a binding contour.
3351 The variables are declared one by one, by calls to `expand_decl'.
3352
8e91754e
MM
3353 FLAGS is a bitwise or of the following flags:
3354
3355 1 - Nonzero if this construct should be visible to
3356 `exit_something'.
3357
3358 2 - Nonzero if this contour does not require a
3359 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3360 language-independent code should set this flag because they
3361 will not create corresponding BLOCK nodes. (There should be
3362 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3363 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 3364 when expand_end_bindings is called.
a97901e6
MM
3365
3366 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3367 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3368 note. */
28d81abb
RK
3369
3370void
46c5ad27 3371expand_start_bindings_and_block (int flags, tree block)
28d81abb
RK
3372{
3373 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
3374 rtx note;
3375 int exit_flag = ((flags & 1) != 0);
3376 int block_flag = ((flags & 2) == 0);
4381f7c2 3377
a97901e6
MM
3378 /* If a BLOCK is supplied, then the caller should be requesting a
3379 NOTE_INSN_BLOCK_BEG note. */
3380 if (!block_flag && block)
3381 abort ();
8e91754e 3382
a97901e6 3383 /* Create a note to mark the beginning of the block. */
6de9cd9a 3384 if (block_flag && !cfun->dont_emit_block_notes)
a97901e6 3385 {
2e040219 3386 note = emit_note (NOTE_INSN_BLOCK_BEG);
a97901e6
MM
3387 NOTE_BLOCK (note) = block;
3388 }
3389 else
2e040219 3390 note = emit_note (NOTE_INSN_DELETED);
4381f7c2 3391
28d81abb
RK
3392 /* Make an entry on block_stack for the block we are entering. */
3393
e2500fed 3394 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
3395 thisblock->next = block_stack;
3396 thisblock->all = nesting_stack;
3397 thisblock->depth = ++nesting_depth;
3398 thisblock->data.block.stack_level = 0;
3399 thisblock->data.block.cleanups = 0;
e976b8b2 3400 thisblock->data.block.exception_region = 0;
3f1d071b 3401 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
3402
3403 thisblock->data.block.conditional_code = 0;
3404 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
3405 /* When we insert instructions after the last unconditional cleanup,
3406 we don't adjust last_insn. That means that a later add_insn will
3407 clobber the instructions we've just added. The easiest way to
3408 fix this is to just insert another instruction here, so that the
3409 instructions inserted after the last unconditional cleanup are
3410 never the last instruction. */
2e040219 3411 emit_note (NOTE_INSN_DELETED);
e976b8b2 3412
28d81abb
RK
3413 if (block_stack
3414 && !(block_stack->data.block.cleanups == NULL_TREE
3415 && block_stack->data.block.outer_cleanups == NULL_TREE))
3416 thisblock->data.block.outer_cleanups
3417 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3418 block_stack->data.block.outer_cleanups);
3419 else
3420 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
3421 thisblock->data.block.label_chain = 0;
3422 thisblock->data.block.innermost_stack_block = stack_block_stack;
3423 thisblock->data.block.first_insn = note;
3f1d071b 3424 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
3425 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3426 block_stack = thisblock;
3427 nesting_stack = thisblock;
3428
b93a436e
JL
3429 /* Make a new level for allocating stack slots. */
3430 push_temp_slots ();
28d81abb
RK
3431}
3432
e976b8b2
MS
3433/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3434 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3435 expand_expr are made. After we end the region, we know that all
3436 space for all temporaries that were created by TARGET_EXPRs will be
3437 destroyed and their space freed for reuse. */
3438
3439void
46c5ad27 3440expand_start_target_temps (void)
e976b8b2
MS
3441{
3442 /* This is so that even if the result is preserved, the space
3443 allocated will be freed, as we know that it is no longer in use. */
3444 push_temp_slots ();
3445
3446 /* Start a new binding layer that will keep track of all cleanup
3447 actions to be performed. */
8e91754e 3448 expand_start_bindings (2);
e976b8b2
MS
3449
3450 target_temp_slot_level = temp_slot_level;
3451}
3452
3453void
46c5ad27 3454expand_end_target_temps (void)
e976b8b2
MS
3455{
3456 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 3457
e976b8b2
MS
3458 /* This is so that even if the result is preserved, the space
3459 allocated will be freed, as we know that it is no longer in use. */
3460 pop_temp_slots ();
3461}
3462
0e9e1e0a 3463/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
deb5e280
JM
3464 in question represents the outermost pair of curly braces (i.e. the "body
3465 block") of a function or method.
3466
3467 For any BLOCK node representing a "body block" of a function or method, the
3468 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3469 represents the outermost (function) scope for the function or method (i.e.
3470 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 3471 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
3472
3473int
46c5ad27 3474is_body_block (tree stmt)
deb5e280 3475{
2896d056
ZW
3476 if (lang_hooks.no_body_blocks)
3477 return 0;
3478
deb5e280
JM
3479 if (TREE_CODE (stmt) == BLOCK)
3480 {
3481 tree parent = BLOCK_SUPERCONTEXT (stmt);
3482
3483 if (parent && TREE_CODE (parent) == BLOCK)
3484 {
3485 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3486
3487 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3488 return 1;
3489 }
3490 }
3491
3492 return 0;
3493}
3494
e976b8b2
MS
3495/* True if we are currently emitting insns in an area of output code
3496 that is controlled by a conditional expression. This is used by
3497 the cleanup handling code to generate conditional cleanup actions. */
3498
3499int
46c5ad27 3500conditional_context (void)
e976b8b2
MS
3501{
3502 return block_stack && block_stack->data.block.conditional_code;
3503}
3504
91088ddb
JM
3505/* Return an opaque pointer to the current nesting level, so frontend code
3506 can check its own sanity. */
3507
3508struct nesting *
46c5ad27 3509current_nesting_level (void)
91088ddb
JM
3510{
3511 return cfun ? block_stack : 0;
3512}
3513
ba716ac9
BS
3514/* Emit code to restore vital registers at the beginning of a nonlocal goto
3515 handler. */
3516static void
46c5ad27 3517expand_nl_goto_receiver (void)
ba716ac9 3518{
6de9cd9a 3519 /* Clobber the FP when we get here, so we have to make sure it's
e292dbb0
WH
3520 marked as used by this function. */
3521 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3522
3523 /* Mark the static chain as clobbered here so life information
3524 doesn't get messed up for it. */
3525 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3526
ba716ac9
BS
3527#ifdef HAVE_nonlocal_goto
3528 if (! HAVE_nonlocal_goto)
3529#endif
3530 /* First adjust our frame pointer to its actual value. It was
3531 previously set to the start of the virtual area corresponding to
3532 the stacked variables when we branched here and now needs to be
3533 adjusted to the actual hardware fp value.
3534
3535 Assignments are to virtual registers are converted by
3536 instantiate_virtual_regs into the corresponding assignment
3537 to the underlying register (fp in this case) that makes
3538 the original assignment true.
3539 So the following insn will actually be
3540 decrementing fp by STARTING_FRAME_OFFSET. */
3541 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3542
3543#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3544 if (fixed_regs[ARG_POINTER_REGNUM])
3545 {
3546#ifdef ELIMINABLE_REGS
3547 /* If the argument pointer can be eliminated in favor of the
3548 frame pointer, we don't need to restore it. We assume here
3549 that if such an elimination is present, it can always be used.
3550 This is the case on all known machines; if we don't make this
3551 assumption, we do unnecessary saving on many machines. */
8b60264b 3552 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
3553 size_t i;
3554
b6a1cbae 3555 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
3556 if (elim_regs[i].from == ARG_POINTER_REGNUM
3557 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3558 break;
3559
b6a1cbae 3560 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
3561#endif
3562 {
3563 /* Now restore our arg pointer from the address at which it
278ed218 3564 was saved in our stack frame. */
ba716ac9 3565 emit_move_insn (virtual_incoming_args_rtx,
278ed218 3566 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
3567 }
3568 }
3569#endif
3570
3571#ifdef HAVE_nonlocal_goto_receiver
3572 if (HAVE_nonlocal_goto_receiver)
3573 emit_insn (gen_nonlocal_goto_receiver ());
3574#endif
e292dbb0
WH
3575
3576 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3577 insn, but we must not allow the code we just generated to be reordered
3578 by scheduling. Specifically, the update of the frame pointer must
3579 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3580 insn. */
3581 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
ba716ac9
BS
3582}
3583
ba716677
MM
3584/* Warn about any unused VARS (which may contain nodes other than
3585 VAR_DECLs, but such nodes are ignored). The nodes are connected
3586 via the TREE_CHAIN field. */
3587
3588void
46c5ad27 3589warn_about_unused_variables (tree vars)
ba716677
MM
3590{
3591 tree decl;
3592
078721e1 3593 if (warn_unused_variable)
ba716677 3594 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 3595 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
3596 && ! TREE_USED (decl)
3597 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 3598 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ddd2d57e 3599 warning ("%Junused variable '%D'", decl, decl);
ba716677
MM
3600}
3601
28d81abb 3602/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
3603
3604 VARS is the chain of VAR_DECL nodes for the variables bound in this
3605 contour. There may actually be other nodes in this chain, but any
3606 nodes other than VAR_DECLS are ignored.
3607
28d81abb
RK
3608 MARK_ENDS is nonzero if we should put a note at the beginning
3609 and end of this binding contour.
3610
cda26058
RK
3611 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3612 zero if we can jump into this contour only if it does not have a saved
3613 stack level, and negative if we are not to check for invalid use of
3614 labels (because the front end does that). */
28d81abb
RK
3615
3616void
46c5ad27 3617expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
28d81abb 3618{
b3694847 3619 struct nesting *thisblock = block_stack;
e976b8b2 3620
ba716677
MM
3621 /* If any of the variables in this scope were not used, warn the
3622 user. */
3623 warn_about_unused_variables (vars);
28d81abb 3624
28d81abb
RK
3625 if (thisblock->exit_label)
3626 {
3627 do_pending_stack_adjust ();
3628 emit_label (thisblock->exit_label);
3629 }
3630
72eb1038
BH
3631 /* Don't allow jumping into a block that has a stack level.
3632 Cleanups are allowed, though. */
cda26058
RK
3633 if (dont_jump_in > 0
3634 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
28d81abb
RK
3635 {
3636 struct label_chain *chain;
3637
3638 /* Any labels in this block are no longer valid to go to.
3639 Mark them to cause an error message. */
3640 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3641 {
3642 DECL_TOO_LATE (chain->label) = 1;
3643 /* If any goto without a fixup came to this label,
3644 that must be an error, because gotos without fixups
72eb1038 3645 come from outside all saved stack-levels. */
28d81abb 3646 if (TREE_ADDRESSABLE (chain->label))
ddd2d57e
RH
3647 error ("%Jlabel '%D' used before containing binding contour",
3648 chain->label, chain->label);
28d81abb
RK
3649 }
3650 }
3651
3652 /* Restore stack level in effect before the block
3653 (only if variable-size objects allocated). */
3654 /* Perform any cleanups associated with the block. */
3655
3656 if (thisblock->data.block.stack_level != 0
3657 || thisblock->data.block.cleanups != 0)
3658 {
04da69d3
JM
3659 int reachable;
3660 rtx insn;
28d81abb 3661
50d1b7a1
MS
3662 /* Don't let cleanups affect ({...}) constructs. */
3663 int old_expr_stmts_for_value = expr_stmts_for_value;
3664 rtx old_last_expr_value = last_expr_value;
0fab64a3 3665 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
50d1b7a1
MS
3666 tree old_last_expr_type = last_expr_type;
3667 expr_stmts_for_value = 0;
28d81abb 3668
04da69d3
JM
3669 /* Only clean up here if this point can actually be reached. */
3670 insn = get_last_insn ();
3671 if (GET_CODE (insn) == NOTE)
3672 insn = prev_nonnote_insn (insn);
d1ee23e5 3673 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 3674
50d1b7a1 3675 /* Do the cleanups. */
b39b8084 3676 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
50d1b7a1
MS
3677 if (reachable)
3678 do_pending_stack_adjust ();
28d81abb 3679
50d1b7a1
MS
3680 expr_stmts_for_value = old_expr_stmts_for_value;
3681 last_expr_value = old_last_expr_value;
0fab64a3 3682 last_expr_alt_rtl = old_last_expr_alt_rtl;
50d1b7a1
MS
3683 last_expr_type = old_last_expr_type;
3684
3685 /* Restore the stack level. */
3686
3687 if (reachable && thisblock->data.block.stack_level != 0)
3688 {
3689 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3690 thisblock->data.block.stack_level, NULL_RTX);
6de9cd9a
DN
3691 if (cfun->nonlocal_goto_save_area)
3692 update_nonlocal_goto_save_area ();
28d81abb
RK
3693 }
3694
3695 /* Any gotos out of this block must also do these things.
59257ff7
RK
3696 Also report any gotos with fixups that came to labels in this
3697 level. */
28d81abb
RK
3698 fixup_gotos (thisblock,
3699 thisblock->data.block.stack_level,
3700 thisblock->data.block.cleanups,
3701 thisblock->data.block.first_insn,
3702 dont_jump_in);
3703 }
3704
c7d2d61d
RS
3705 /* Mark the beginning and end of the scope if requested.
3706 We do this now, after running cleanups on the variables
3707 just going out of scope, so they are in scope for their cleanups. */
3708
6de9cd9a 3709 if (mark_ends && !cfun->dont_emit_block_notes)
a97901e6 3710 {
2e040219 3711 rtx note = emit_note (NOTE_INSN_BLOCK_END);
a97901e6
MM
3712 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3713 }
c7d2d61d
RS
3714 else
3715 /* Get rid of the beginning-mark if we don't make an end-mark. */
3716 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3717
e976b8b2 3718 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 3719 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 3720
28d81abb
RK
3721 /* Restore block_stack level for containing block. */
3722
3723 stack_block_stack = thisblock->data.block.innermost_stack_block;
3724 POPSTACK (block_stack);
3725
3726 /* Pop the stack slot nesting and free any slots at this level. */
3727 pop_temp_slots ();
3728}
3729\f
7393c642
RK
3730/* Generate code to save the stack pointer at the start of the current block
3731 and set up to restore it on exit. */
3732
3733void
46c5ad27 3734save_stack_pointer (void)
7393c642
RK
3735{
3736 struct nesting *thisblock = block_stack;
3737
3738 if (thisblock->data.block.stack_level == 0)
3739 {
3740 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3741 &thisblock->data.block.stack_level,
3742 thisblock->data.block.first_insn);
3743 stack_block_stack = thisblock;
3744 }
3745}
3746\f
28d81abb 3747/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3748 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3749
3750void
46c5ad27 3751expand_decl (tree decl)
28d81abb 3752{
ca695ac9
JB
3753 tree type;
3754
ca695ac9 3755 type = TREE_TYPE (decl);
28d81abb 3756
eabb9ed0
RK
3757 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3758 type in case this node is used in a reference. */
3759 if (TREE_CODE (decl) == CONST_DECL)
3760 {
3761 DECL_MODE (decl) = TYPE_MODE (type);
3762 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3763 DECL_SIZE (decl) = TYPE_SIZE (type);
3764 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3765 return;
3766 }
28d81abb 3767
eabb9ed0
RK
3768 /* Otherwise, only automatic variables need any expansion done. Static and
3769 external variables, and external functions, will be handled by
3770 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3771 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
3772 if (TREE_CODE (decl) != VAR_DECL)
3773 return;
eabb9ed0 3774
44fe2e80 3775 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3776 return;
3777
3778 /* Create the RTL representation for the variable. */
3779
3780 if (type == error_mark_node)
19e7881c 3781 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 3782
28d81abb
RK
3783 else if (DECL_SIZE (decl) == 0)
3784 /* Variable with incomplete type. */
3785 {
abde42f7 3786 rtx x;
28d81abb
RK
3787 if (DECL_INITIAL (decl) == 0)
3788 /* Error message was already done; now avoid a crash. */
abde42f7 3789 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3790 else
3791 /* An initializer is going to decide the size of this array.
3792 Until we know the size, represent its address with a reg. */
abde42f7 3793 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 3794
abde42f7
JH
3795 set_mem_attributes (x, decl, 1);
3796 SET_DECL_RTL (decl, x);
28d81abb
RK
3797 }
3798 else if (DECL_MODE (decl) != BLKmode
3799 /* If -ffloat-store, don't put explicit float vars
3800 into regs. */
3801 && !(flag_float_store
3802 && TREE_CODE (type) == REAL_TYPE)
3803 && ! TREE_THIS_VOLATILE (decl)
6a29edea 3804 && ! DECL_NONLOCAL (decl)
7dc8b126 3805 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
28d81abb
RK
3806 {
3807 /* Automatic variable that can go in a register. */
8df83eae 3808 int unsignedp = TYPE_UNSIGNED (type);
28612f9e
RK
3809 enum machine_mode reg_mode
3810 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3811
19e7881c 3812 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8 3813
7dc8b126
JM
3814 if (!DECL_ARTIFICIAL (decl))
3815 mark_user_reg (DECL_RTL (decl));
7f070d5e 3816
e5e809f4 3817 if (POINTER_TYPE_P (type))
7f070d5e 3818 mark_reg_pointer (DECL_RTL (decl),
bdb429a5 3819 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
258a120b
JM
3820
3821 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3822
3823 /* If something wants our address, try to use ADDRESSOF. */
3824 if (TREE_ADDRESSABLE (decl))
f29a2bd1 3825 put_var_into_stack (decl, /*rescan=*/false);
28d81abb 3826 }
0df15c2c 3827
4559fd9e 3828 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3829 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3830 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3831 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3832 {
3833 /* Variable of fixed size that goes on the stack. */
3834 rtx oldaddr = 0;
3835 rtx addr;
0d4903b8 3836 rtx x;
28d81abb
RK
3837
3838 /* If we previously made RTL for this decl, it must be an array
3839 whose size was determined by the initializer.
3840 The old address was a register; set that register now
3841 to the proper address. */
19e7881c 3842 if (DECL_RTL_SET_P (decl))
28d81abb
RK
3843 {
3844 if (GET_CODE (DECL_RTL (decl)) != MEM
3845 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3846 abort ();
3847 oldaddr = XEXP (DECL_RTL (decl), 0);
3848 }
3849
28d81abb
RK
3850 /* Set alignment we actually gave this decl. */
3851 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3852 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3853 DECL_USER_ALIGN (decl) = 0;
28d81abb 3854
9432c136 3855 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
3856 set_mem_attributes (x, decl, 1);
3857 SET_DECL_RTL (decl, x);
3858
28d81abb
RK
3859 if (oldaddr)
3860 {
3861 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3862 if (addr != oldaddr)
3863 emit_move_insn (oldaddr, addr);
3864 }
28d81abb
RK
3865 }
3866 else
3867 /* Dynamic-size object: must push space on the stack. */
3868 {
abde42f7 3869 rtx address, size, x;
28d81abb
RK
3870
3871 /* Record the stack pointer on entry to block, if have
3872 not already done so. */
7393c642
RK
3873 do_pending_stack_adjust ();
3874 save_stack_pointer ();
28d81abb 3875
1c9766da
RK
3876 /* Compute the variable's size, in bytes. This will expand any
3877 needed SAVE_EXPRs for the first time. */
4559fd9e 3878 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3879 free_temp_slots ();
3880
ff91ad08 3881 /* Allocate space on the stack for the variable. Note that
4381f7c2 3882 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3883 cannot use it to conclude anything about the alignment of
3884 the size. */
37366632 3885 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3886 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3887
28d81abb 3888 /* Reference the variable indirect through that rtx. */
abde42f7
JH
3889 x = gen_rtx_MEM (DECL_MODE (decl), address);
3890 set_mem_attributes (x, decl, 1);
3891 SET_DECL_RTL (decl, x);
28d81abb 3892
2207e295 3893
28d81abb
RK
3894 /* Indicate the alignment we actually gave this variable. */
3895#ifdef STACK_BOUNDARY
3896 DECL_ALIGN (decl) = STACK_BOUNDARY;
3897#else
3898 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3899#endif
11cf4d18 3900 DECL_USER_ALIGN (decl) = 0;
28d81abb 3901 }
28d81abb
RK
3902}
3903\f
6de9cd9a
DN
3904/* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3905void
3906expand_stack_alloc (tree alloc, tree t_size)
3907{
3908 rtx address, dest, size;
3909 tree var, type;
3910
3911 if (TREE_CODE (alloc) != ADDR_EXPR)
3912 abort ();
3913 var = TREE_OPERAND (alloc, 0);
3914 if (TREE_CODE (var) != VAR_DECL)
3915 abort ();
3916
3917 type = TREE_TYPE (var);
3918
3919 /* In function-at-a-time mode, variable_size doesn't expand this,
3920 so do it now. */
3921 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3922 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3923 const0_rtx, VOIDmode, 0);
3924
3925 /* Compute the variable's size, in bytes. */
3926 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3927 free_temp_slots ();
3928
3929 /* Allocate space on the stack for the variable. */
3930 address = XEXP (DECL_RTL (var), 0);
3931 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3932 if (dest != address)
3933 emit_move_insn (address, dest);
3934
3935 /* Indicate the alignment we actually gave this variable. */
3936#ifdef STACK_BOUNDARY
3937 DECL_ALIGN (var) = STACK_BOUNDARY;
3938#else
3939 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3940#endif
3941 DECL_USER_ALIGN (var) = 0;
3942}
3943
3944/* Emit code to save the current value of stack. */
3945rtx
3946expand_stack_save (void)
3947{
3948 rtx ret = NULL_RTX;
3949
3950 do_pending_stack_adjust ();
3951 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3952 return ret;
3953}
3954
3955/* Emit code to restore the current value of stack. */
3956void
3957expand_stack_restore (tree var)
3958{
3959 rtx sa = DECL_RTL (var);
3960
3961 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3962}
3963\f
28d81abb
RK
3964/* Emit code to perform the initialization of a declaration DECL. */
3965
3966void
46c5ad27 3967expand_decl_init (tree decl)
28d81abb 3968{
b4ac57ab
RS
3969 int was_used = TREE_USED (decl);
3970
ac79cd5a
RK
3971 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3972 for static decls. */
3973 if (TREE_CODE (decl) == CONST_DECL
3974 || TREE_STATIC (decl))
28d81abb
RK
3975 return;
3976
3977 /* Compute and store the initial value now. */
3978
59a7f9bf
DJ
3979 push_temp_slots ();
3980
28d81abb
RK
3981 if (DECL_INITIAL (decl) == error_mark_node)
3982 {
3983 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3984
28d81abb 3985 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3986 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb 3987 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
b90f141a 3988 0);
28d81abb
RK
3989 emit_queue ();
3990 }
3991 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3992 {
f31686a3 3993 emit_line_note (DECL_SOURCE_LOCATION (decl));
b90f141a 3994 expand_assignment (decl, DECL_INITIAL (decl), 0);
28d81abb
RK
3995 emit_queue ();
3996 }
3997
b4ac57ab
RS
3998 /* Don't let the initialization count as "using" the variable. */
3999 TREE_USED (decl) = was_used;
4000
28d81abb 4001 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 4002 preserve_temp_slots (NULL_RTX);
28d81abb 4003 free_temp_slots ();
59a7f9bf 4004 pop_temp_slots ();
28d81abb
RK
4005}
4006
4007/* CLEANUP is an expression to be executed at exit from this binding contour;
4008 for example, in C++, it might call the destructor for this variable.
4009
4847c938
MS
4010 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4011 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
4012 happens in exception handling, for gotos, returns, breaks that
4013 leave the current scope.
28d81abb
RK
4014
4015 If CLEANUP is nonzero and DECL is zero, we record a cleanup
6d2f8887 4016 that is not associated with any particular variable. */
28d81abb
RK
4017
4018int
46c5ad27 4019expand_decl_cleanup (tree decl, tree cleanup)
28d81abb 4020{
3f1d071b 4021 struct nesting *thisblock;
28d81abb
RK
4022
4023 /* Error if we are not in any block. */
01d939e8 4024 if (cfun == 0 || block_stack == 0)
28d81abb
RK
4025 return 0;
4026
3f1d071b
BS
4027 thisblock = block_stack;
4028
28d81abb
RK
4029 /* Record the cleanup if there is one. */
4030
4031 if (cleanup != 0)
4032 {
e976b8b2
MS
4033 tree t;
4034 rtx seq;
4035 tree *cleanups = &thisblock->data.block.cleanups;
4036 int cond_context = conditional_context ();
4037
4038 if (cond_context)
4039 {
4040 rtx flag = gen_reg_rtx (word_mode);
4041 rtx set_flag_0;
4042 tree cond;
4043
4044 start_sequence ();
4045 emit_move_insn (flag, const0_rtx);
4046 set_flag_0 = get_insns ();
4047 end_sequence ();
4048
4049 thisblock->data.block.last_unconditional_cleanup
2f937369 4050 = emit_insn_after (set_flag_0,
e976b8b2
MS
4051 thisblock->data.block.last_unconditional_cleanup);
4052
4053 emit_move_insn (flag, const1_rtx);
4054
b0c48229 4055 cond = build_decl (VAR_DECL, NULL_TREE,
ae2bcd98 4056 lang_hooks.types.type_for_mode (word_mode, 1));
19e7881c 4057 SET_DECL_RTL (cond, flag);
e976b8b2
MS
4058
4059 /* Conditionalize the cleanup. */
4060 cleanup = build (COND_EXPR, void_type_node,
ae2bcd98 4061 lang_hooks.truthvalue_conversion (cond),
e976b8b2
MS
4062 cleanup, integer_zero_node);
4063 cleanup = fold (cleanup);
4064
e2500fed 4065 cleanups = &thisblock->data.block.cleanups;
e976b8b2
MS
4066 }
4067
4847c938 4068 cleanup = unsave_expr (cleanup);
e976b8b2 4069
1f8f4a0b 4070 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
4071
4072 if (! cond_context)
4073 /* If this block has a cleanup, it belongs in stack_block_stack. */
4074 stack_block_stack = thisblock;
4075
4076 if (cond_context)
4077 {
4078 start_sequence ();
4079 }
4847c938 4080
52a11cbf 4081 if (! using_eh_for_cleanups_p)
e976b8b2 4082 TREE_ADDRESSABLE (t) = 1;
52a11cbf
RH
4083 else
4084 expand_eh_region_start ();
4085
e976b8b2
MS
4086 if (cond_context)
4087 {
4088 seq = get_insns ();
4089 end_sequence ();
7e82801f
MS
4090 if (seq)
4091 thisblock->data.block.last_unconditional_cleanup
2f937369
DM
4092 = emit_insn_after (seq,
4093 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
4094 }
4095 else
4096 {
4097 thisblock->data.block.last_unconditional_cleanup
4098 = get_last_insn ();
ef97beff
JJ
4099 /* When we insert instructions after the last unconditional cleanup,
4100 we don't adjust last_insn. That means that a later add_insn will
4101 clobber the instructions we've just added. The easiest way to
4102 fix this is to just insert another instruction here, so that the
4103 instructions inserted after the last unconditional cleanup are
4104 never the last instruction. */
2e040219 4105 emit_note (NOTE_INSN_DELETED);
e976b8b2 4106 }
28d81abb
RK
4107 }
4108 return 1;
4109}
659e5a7a
JM
4110
4111/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4112 is thrown. */
4113
4114int
46c5ad27 4115expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
659e5a7a
JM
4116{
4117 int ret = expand_decl_cleanup (decl, cleanup);
4118 if (cleanup && ret)
4119 {
4120 tree node = block_stack->data.block.cleanups;
4121 CLEANUP_EH_ONLY (node) = eh_only;
4122 }
4123 return ret;
4124}
28d81abb
RK
4125\f
4126/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4127 DECL_ELTS is the list of elements that belong to DECL's type.
4128 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4129
4130void
46c5ad27 4131expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
28d81abb 4132{
01d939e8 4133 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 4134 rtx x;
8a693bd0 4135 tree t;
28d81abb 4136
8a693bd0
MM
4137 /* If any of the elements are addressable, so is the entire union. */
4138 for (t = decl_elts; t; t = TREE_CHAIN (t))
4139 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4140 {
4141 TREE_ADDRESSABLE (decl) = 1;
4142 break;
4143 }
4381f7c2 4144
ec5cd386
RK
4145 expand_decl (decl);
4146 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
4147 x = DECL_RTL (decl);
4148
8a693bd0
MM
4149 /* Go through the elements, assigning RTL to each. */
4150 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 4151 {
8a693bd0
MM
4152 tree decl_elt = TREE_VALUE (t);
4153 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
4154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4155
3256b817
JJ
4156 /* If any of the elements are addressable, so is the entire
4157 union. */
4158 if (TREE_USED (decl_elt))
4159 TREE_USED (decl) = 1;
4160
7b9032dd
JM
4161 /* Propagate the union's alignment to the elements. */
4162 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 4163 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
4164
4165 /* If the element has BLKmode and the union doesn't, the union is
4166 aligned such that the element doesn't need to have BLKmode, so
4167 change the element's mode to the appropriate one for its size. */
4168 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4169 DECL_MODE (decl_elt) = mode
05bccae2 4170 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 4171
28d81abb
RK
4172 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4173 instead create a new MEM rtx with the proper mode. */
4174 if (GET_CODE (x) == MEM)
4175 {
4176 if (mode == GET_MODE (x))
19e7881c 4177 SET_DECL_RTL (decl_elt, x);
28d81abb 4178 else
f1ec5147 4179 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb
RK
4180 }
4181 else if (GET_CODE (x) == REG)
4182 {
4183 if (mode == GET_MODE (x))
19e7881c 4184 SET_DECL_RTL (decl_elt, x);
28d81abb 4185 else
ddef6bc7 4186 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
4187 }
4188 else
4189 abort ();
4190
4191 /* Record the cleanup if there is one. */
4192
4193 if (cleanup != 0)
4194 thisblock->data.block.cleanups
1f8f4a0b
MM
4195 = tree_cons (decl_elt, cleanup_elt,
4196 thisblock->data.block.cleanups);
28d81abb
RK
4197 }
4198}
4199\f
4200/* Expand a list of cleanups LIST.
4201 Elements may be expressions or may be nested lists.
4202
0e9e1e0a 4203 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
50d1b7a1
MS
4204 goto and handle protection regions specially in that case.
4205
4206 If REACHABLE, we emit code, otherwise just inform the exception handling
4207 code about this finalization. */
28d81abb
RK
4208
4209static void
46c5ad27 4210expand_cleanups (tree list, int in_fixup, int reachable)
28d81abb
RK
4211{
4212 tree tail;
4213 for (tail = list; tail; tail = TREE_CHAIN (tail))
b39b8084
CL
4214 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4215 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4216 else
28d81abb 4217 {
b39b8084
CL
4218 if (! in_fixup && using_eh_for_cleanups_p)
4219 expand_eh_region_end_cleanup (TREE_VALUE (tail));
61d6b1cc 4220
b39b8084
CL
4221 if (reachable && !CLEANUP_EH_ONLY (tail))
4222 {
4223 /* Cleanups may be run multiple times. For example,
4224 when exiting a binding contour, we expand the
4225 cleanups associated with that contour. When a goto
4226 within that binding contour has a target outside that
4227 contour, it will expand all cleanups from its scope to
4228 the target. Though the cleanups are expanded multiple
4229 times, the control paths are non-overlapping so the
4230 cleanups will not be executed twice. */
4231
4232 /* We may need to protect from outer cleanups. */
4233 if (in_fixup && using_eh_for_cleanups_p)
50d1b7a1 4234 {
b39b8084 4235 expand_eh_region_start ();
52a11cbf 4236
b39b8084 4237 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
e5e809f4 4238
b39b8084 4239 expand_eh_region_end_fixup (TREE_VALUE (tail));
50d1b7a1 4240 }
b39b8084
CL
4241 else
4242 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4243
4244 free_temp_slots ();
28d81abb
RK
4245 }
4246 }
4247}
4248
e976b8b2
MS
4249/* Mark when the context we are emitting RTL for as a conditional
4250 context, so that any cleanup actions we register with
4251 expand_decl_init will be properly conditionalized when those
4252 cleanup actions are later performed. Must be called before any
956d6950 4253 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4254
4255void
46c5ad27 4256start_cleanup_deferral (void)
e976b8b2 4257{
e3eef942
JW
4258 /* block_stack can be NULL if we are inside the parameter list. It is
4259 OK to do nothing, because cleanups aren't possible here. */
4260 if (block_stack)
4261 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4262}
4263
4264/* Mark the end of a conditional region of code. Because cleanup
956d6950 4265 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4266 after we end the currently deferred cleanups, only after we end all
4267 deferred cleanups, are we back in unconditional code. */
4268
4269void
46c5ad27 4270end_cleanup_deferral (void)
e976b8b2 4271{
e3eef942
JW
4272 /* block_stack can be NULL if we are inside the parameter list. It is
4273 OK to do nothing, because cleanups aren't possible here. */
4274 if (block_stack)
4275 --block_stack->data.block.conditional_code;
e976b8b2
MS
4276}
4277
28d81abb 4278tree
46c5ad27 4279last_cleanup_this_contour (void)
28d81abb
RK
4280{
4281 if (block_stack == 0)
4282 return 0;
4283
4284 return block_stack->data.block.cleanups;
4285}
4286
6de9cd9a
DN
4287
4288/* Return nonzero if any containing block has a stack level or
4289 cleanups. */
4290
4291int
4292containing_blocks_have_cleanups_or_stack_level (void)
4293{
4294 struct nesting *block;
4295
4296 for (block = block_stack; block; block = block->next)
4297 if (block->data.block.stack_level != 0
4298 || block->data.block.cleanups != 0)
4299 return 1;
4300
4301 return 0;
4302}
4303
28d81abb 4304/* Return 1 if there are any pending cleanups at this point.
de1f5659
JL
4305 Check the current contour as well as contours that enclose
4306 the current contour. */
28d81abb
RK
4307
4308int
46c5ad27 4309any_pending_cleanups (void)
28d81abb
RK
4310{
4311 struct nesting *block;
4312
01d939e8 4313 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
4314 return 0;
4315
de1f5659 4316 if (block_stack->data.block.cleanups != NULL)
28d81abb 4317 return 1;
ce47ca44
JL
4318
4319 if (block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4320 return 0;
4321
4322 for (block = block_stack->next; block; block = block->next)
4323 if (block->data.block.cleanups != 0)
4324 return 1;
4325
4326 return 0;
4327}
4328\f
4329/* Enter a case (Pascal) or switch (C) statement.
4330 Push a block onto case_stack and nesting_stack
4331 to accumulate the case-labels that are seen
4332 and to record the labels generated for the statement.
4333
4334 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4335 Otherwise, this construct is transparent for `exit_something'.
4336
4337 EXPR is the index-expression to be dispatched on.
4338 TYPE is its nominal type. We could simply convert EXPR to this type,
4339 but instead we take short cuts. */
4340
4341void
46c5ad27
AJ
4342expand_start_case (int exit_flag, tree expr, tree type,
4343 const char *printname)
28d81abb 4344{
b3694847 4345 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
4346
4347 /* Make an entry on case_stack for the case we are entering. */
4348
e2500fed 4349 thiscase->desc = CASE_NESTING;
28d81abb
RK
4350 thiscase->next = case_stack;
4351 thiscase->all = nesting_stack;
4352 thiscase->depth = ++nesting_depth;
4353 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4354 thiscase->data.case_stmt.case_list = 0;
4355 thiscase->data.case_stmt.index_expr = expr;
4356 thiscase->data.case_stmt.nominal_type = type;
4357 thiscase->data.case_stmt.default_label = 0;
28d81abb 4358 thiscase->data.case_stmt.printname = printname;
a11759a3 4359 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4360 case_stack = thiscase;
4361 nesting_stack = thiscase;
4362
4363 do_pending_stack_adjust ();
f43f4314 4364 emit_queue ();
28d81abb
RK
4365
4366 /* Make sure case_stmt.start points to something that won't
4367 need any transformation before expand_end_case. */
4368 if (GET_CODE (get_last_insn ()) != NOTE)
2e040219 4369 emit_note (NOTE_INSN_DELETED);
28d81abb
RK
4370
4371 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4372
956d6950 4373 start_cleanup_deferral ();
28d81abb 4374}
28d81abb 4375\f
a11759a3 4376static void
46c5ad27 4377check_seenlabel (void)
a11759a3
JR
4378{
4379 /* If this is the first label, warn if any insns have been emitted. */
4380 if (case_stack->data.case_stmt.line_number_status >= 0)
4381 {
4382 rtx insn;
4383
4384 restore_line_number_status
4385 (case_stack->data.case_stmt.line_number_status);
4386 case_stack->data.case_stmt.line_number_status = -1;
4387
4388 for (insn = case_stack->data.case_stmt.start;
4389 insn;
4390 insn = NEXT_INSN (insn))
4391 {
4392 if (GET_CODE (insn) == CODE_LABEL)
4393 break;
4394 if (GET_CODE (insn) != NOTE
4395 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4396 {
4397 do
4398 insn = PREV_INSN (insn);
0dacbd0e
JW
4399 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4400
4401 /* If insn is zero, then there must have been a syntax error. */
4402 if (insn)
c8608cd6
GDR
4403 {
4404 location_t locus;
4405 locus.file = NOTE_SOURCE_FILE (insn);
4406 locus.line = NOTE_LINE_NUMBER (insn);
4407 warning ("%Hunreachable code at beginning of %s", &locus,
4408 case_stack->data.case_stmt.printname);
4409 }
a11759a3
JR
4410 break;
4411 }
4412 }
4413 }
4414}
4415
28d81abb
RK
4416/* Accumulate one case or default label inside a case or switch statement.
4417 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4418 The function CONVERTER, when applied to arguments T and V,
4419 converts the value V to the type T.
28d81abb
RK
4420
4421 If not currently inside a case or switch statement, return 1 and do
4422 nothing. The caller will print a language-specific error message.
4423 If VALUE is a duplicate or overlaps, return 2 and do nothing
4424 except store the (first) duplicate node in *DUPLICATE.
4425 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4426 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4427 Return 0 on success.
4428
4429 Extended to handle range statements. */
4430
4431int
46c5ad27
AJ
4432pushcase (tree value, tree (*converter) (tree, tree), tree label,
4433 tree *duplicate)
28d81abb 4434{
28d81abb
RK
4435 tree index_type;
4436 tree nominal_type;
4437
4438 /* Fail if not inside a real case statement. */
4439 if (! (case_stack && case_stack->data.case_stmt.start))
4440 return 1;
4441
4442 if (stack_block_stack
4443 && stack_block_stack->depth > case_stack->depth)
4444 return 5;
4445
4446 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4447 nominal_type = case_stack->data.case_stmt.nominal_type;
4448
4449 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4450 if (index_type == error_mark_node)
4451 return 0;
4452
2f985ca6
JW
4453 /* Convert VALUE to the type in which the comparisons are nominally done. */
4454 if (value != 0)
4455 value = (*converter) (nominal_type, value);
4456
feb60352
R
4457 check_seenlabel ();
4458
28d81abb
RK
4459 /* Fail if this value is out of range for the actual type of the index
4460 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
4461 if (value != 0
4462 && (TREE_CONSTANT_OVERFLOW (value)
4463 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
4464 return 3;
4465
6de9cd9a 4466 return add_case_node (value, value, label, duplicate, false);
28d81abb
RK
4467}
4468
956d6950
JL
4469/* Like pushcase but this case applies to all values between VALUE1 and
4470 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4471 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4472 starts at VALUE1 and ends at the highest value of the index type.
4473 If both are NULL, this case applies to all values.
4474
4475 The return value is the same as that of pushcase but there is one
4476 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4477
4478int
46c5ad27
AJ
4479pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4480 tree label, tree *duplicate)
28d81abb 4481{
28d81abb
RK
4482 tree index_type;
4483 tree nominal_type;
4484
4485 /* Fail if not inside a real case statement. */
4486 if (! (case_stack && case_stack->data.case_stmt.start))
4487 return 1;
4488
4489 if (stack_block_stack
4490 && stack_block_stack->depth > case_stack->depth)
4491 return 5;
4492
4493 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4494 nominal_type = case_stack->data.case_stmt.nominal_type;
4495
4496 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4497 if (index_type == error_mark_node)
4498 return 0;
4499
a11759a3 4500 check_seenlabel ();
28d81abb 4501
956d6950
JL
4502 /* Convert VALUEs to type in which the comparisons are nominally done
4503 and replace any unspecified value with the corresponding bound. */
4504 if (value1 == 0)
1974bfb1 4505 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4506 if (value2 == 0)
1974bfb1 4507 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4508
4509 /* Fail if the range is empty. Do this before any conversion since
4510 we want to allow out-of-range empty ranges. */
14a774a9 4511 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
4512 return 4;
4513
4381f7c2 4514 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
4515 converting to. Do this after the < check above to suppress false
4516 positives. */
14a774a9 4517 if (value2 == 0)
e1ee5cdc 4518 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 4519
2f985ca6
JW
4520 value1 = (*converter) (nominal_type, value1);
4521 value2 = (*converter) (nominal_type, value2);
4522
28d81abb 4523 /* Fail if these values are out of range. */
956d6950
JL
4524 if (TREE_CONSTANT_OVERFLOW (value1)
4525 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4526 return 3;
4527
956d6950
JL
4528 if (TREE_CONSTANT_OVERFLOW (value2)
4529 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4530 return 3;
4531
6de9cd9a 4532 return add_case_node (value1, value2, label, duplicate, false);
57641239
RK
4533}
4534
4535/* Do the actual insertion of a case label for pushcase and pushcase_range
4536 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4537 slowdown for large switch statements. */
4538
56cb9733 4539int
6de9cd9a
DN
4540add_case_node (tree low, tree high, tree label, tree *duplicate,
4541 bool dont_expand_label)
57641239
RK
4542{
4543 struct case_node *p, **q, *r;
4544
56cb9733
MM
4545 /* If there's no HIGH value, then this is not a case range; it's
4546 just a simple case label. But that's just a degenerate case
4547 range. */
4548 if (!high)
4549 high = low;
4550
4551 /* Handle default labels specially. */
4552 if (!high && !low)
4553 {
4554 if (case_stack->data.case_stmt.default_label != 0)
4555 {
4556 *duplicate = case_stack->data.case_stmt.default_label;
4557 return 2;
4558 }
4559 case_stack->data.case_stmt.default_label = label;
6de9cd9a
DN
4560 if (!dont_expand_label)
4561 expand_label (label);
56cb9733
MM
4562 return 0;
4563 }
4564
57641239
RK
4565 q = &case_stack->data.case_stmt.case_list;
4566 p = *q;
4567
69d4ca36 4568 while ((r = *q))
28d81abb 4569 {
57641239
RK
4570 p = r;
4571
4572 /* Keep going past elements distinctly greater than HIGH. */
4573 if (tree_int_cst_lt (high, p->low))
4574 q = &p->left;
4575
4576 /* or distinctly less than LOW. */
4577 else if (tree_int_cst_lt (p->high, low))
4578 q = &p->right;
4579
4580 else
28d81abb 4581 {
57641239
RK
4582 /* We have an overlap; this is an error. */
4583 *duplicate = p->code_label;
28d81abb
RK
4584 return 2;
4585 }
4586 }
4587
1da68f56 4588 /* Add this label to the chain, and succeed. */
28d81abb 4589
703ad42b 4590 r = ggc_alloc (sizeof (struct case_node));
1da68f56 4591 r->low = low;
28d81abb 4592
57641239 4593 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
4594 if (tree_int_cst_equal (low, high))
4595 r->high = r->low;
4596 else
1da68f56 4597 r->high = high;
57641239
RK
4598
4599 r->code_label = label;
6de9cd9a
DN
4600 if (!dont_expand_label)
4601 expand_label (label);
28d81abb 4602
57641239
RK
4603 *q = r;
4604 r->parent = p;
4605 r->left = 0;
4606 r->right = 0;
4607 r->balance = 0;
4608
4609 while (p)
4610 {
4611 struct case_node *s;
4612
4613 if (r == p->left)
4614 {
4615 int b;
4616
4617 if (! (b = p->balance))
4618 /* Growth propagation from left side. */
4619 p->balance = -1;
4620 else if (b < 0)
4621 {
4622 if (r->balance < 0)
4623 {
4624 /* R-Rotation */
51723711 4625 if ((p->left = s = r->right))
57641239
RK
4626 s->parent = p;
4627
4628 r->right = p;
4629 p->balance = 0;
4630 r->balance = 0;
4631 s = p->parent;
4632 p->parent = r;
4633
51723711 4634 if ((r->parent = s))
57641239
RK
4635 {
4636 if (s->left == p)
4637 s->left = r;
4638 else
4639 s->right = r;
4640 }
4641 else
4642 case_stack->data.case_stmt.case_list = r;
4643 }
4644 else
4645 /* r->balance == +1 */
4646 {
5720c7e7
RK
4647 /* LR-Rotation */
4648
57641239
RK
4649 int b2;
4650 struct case_node *t = r->right;
4651
51723711 4652 if ((p->left = s = t->right))
57641239
RK
4653 s->parent = p;
4654
4655 t->right = p;
51723711 4656 if ((r->right = s = t->left))
57641239
RK
4657 s->parent = r;
4658
4659 t->left = r;
4660 b = t->balance;
4661 b2 = b < 0;
4662 p->balance = b2;
4663 b2 = -b2 - b;
4664 r->balance = b2;
4665 t->balance = 0;
4666 s = p->parent;
4667 p->parent = t;
4668 r->parent = t;
4669
51723711 4670 if ((t->parent = s))
57641239
RK
4671 {
4672 if (s->left == p)
4673 s->left = t;
4674 else
4675 s->right = t;
4676 }
4677 else
4678 case_stack->data.case_stmt.case_list = t;
4679 }
4680 break;
4681 }
4682
4683 else
4684 {
4685 /* p->balance == +1; growth of left side balances the node. */
4686 p->balance = 0;
4687 break;
4688 }
4689 }
4690 else
4691 /* r == p->right */
4692 {
4693 int b;
4694
4695 if (! (b = p->balance))
4696 /* Growth propagation from right side. */
4697 p->balance++;
4698 else if (b > 0)
4699 {
4700 if (r->balance > 0)
4701 {
4702 /* L-Rotation */
4703
51723711 4704 if ((p->right = s = r->left))
57641239
RK
4705 s->parent = p;
4706
4707 r->left = p;
4708 p->balance = 0;
4709 r->balance = 0;
4710 s = p->parent;
4711 p->parent = r;
51723711 4712 if ((r->parent = s))
57641239
RK
4713 {
4714 if (s->left == p)
4715 s->left = r;
4716 else
4717 s->right = r;
4718 }
4719
4720 else
4721 case_stack->data.case_stmt.case_list = r;
4722 }
4723
4724 else
4725 /* r->balance == -1 */
4726 {
4727 /* RL-Rotation */
4728 int b2;
4729 struct case_node *t = r->left;
4730
51723711 4731 if ((p->right = s = t->left))
57641239
RK
4732 s->parent = p;
4733
4734 t->left = p;
4735
51723711 4736 if ((r->left = s = t->right))
57641239
RK
4737 s->parent = r;
4738
4739 t->right = r;
4740 b = t->balance;
4741 b2 = b < 0;
4742 r->balance = b2;
4743 b2 = -b2 - b;
4744 p->balance = b2;
4745 t->balance = 0;
4746 s = p->parent;
4747 p->parent = t;
4748 r->parent = t;
4749
51723711 4750 if ((t->parent = s))
57641239
RK
4751 {
4752 if (s->left == p)
4753 s->left = t;
4754 else
4755 s->right = t;
4756 }
4757
4758 else
4759 case_stack->data.case_stmt.case_list = t;
4760 }
4761 break;
4762 }
4763 else
4764 {
4765 /* p->balance == -1; growth of right side balances the node. */
4766 p->balance = 0;
4767 break;
4768 }
4769 }
4770
4771 r = p;
4772 p = p->parent;
4773 }
28d81abb
RK
4774
4775 return 0;
4776}
28d81abb 4777\f
9bb231fd
RS
4778/* Maximum number of case bit tests. */
4779#define MAX_CASE_BIT_TESTS 3
4780
4781/* By default, enable case bit tests on targets with ashlsi3. */
4782#ifndef CASE_USE_BIT_TESTS
4783#define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4784 != CODE_FOR_nothing)
4785#endif
4786
4787
4788/* A case_bit_test represents a set of case nodes that may be
4789 selected from using a bit-wise comparison. HI and LO hold
4790 the integer to be tested against, LABEL contains the label
4791 to jump to upon success and BITS counts the number of case
4792 nodes handled by this test, typically the number of bits
4793 set in HI:LO. */
4794
4795struct case_bit_test
4796{
4797 HOST_WIDE_INT hi;
4798 HOST_WIDE_INT lo;
4799 rtx label;
4800 int bits;
4801};
4802
4803/* Determine whether "1 << x" is relatively cheap in word_mode. */
4804
7e51717c
AJ
4805static
4806bool lshift_cheap_p (void)
9bb231fd
RS
4807{
4808 static bool init = false;
4809 static bool cheap = true;
4810
4811 if (!init)
4812 {
4813 rtx reg = gen_rtx_REG (word_mode, 10000);
4814 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4815 cheap = cost < COSTS_N_INSNS (3);
4816 init = true;
4817 }
4818
4819 return cheap;
4820}
4821
4822/* Comparison function for qsort to order bit tests by decreasing
4823 number of case nodes, i.e. the node with the most cases gets
4824 tested first. */
4825
7e51717c
AJ
4826static
4827int case_bit_test_cmp (const void *p1, const void *p2)
9bb231fd
RS
4828{
4829 const struct case_bit_test *d1 = p1;
4830 const struct case_bit_test *d2 = p2;
4831
4832 return d2->bits - d1->bits;
4833}
4834
4835/* Expand a switch statement by a short sequence of bit-wise
4836 comparisons. "switch(x)" is effectively converted into
4837 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4838 integer constants.
4839
4840 INDEX_EXPR is the value being switched on, which is of
4841 type INDEX_TYPE. MINVAL is the lowest case value of in
4842 the case nodes, of INDEX_TYPE type, and RANGE is highest
4843 value minus MINVAL, also of type INDEX_TYPE. NODES is
4844 the set of case nodes, and DEFAULT_LABEL is the label to
4845 branch to should none of the cases match.
4846
4847 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4848 node targets. */
4849
4850static void
46c5ad27
AJ
4851emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4852 tree range, case_node_ptr nodes, rtx default_label)
9bb231fd
RS
4853{
4854 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4855 enum machine_mode mode;
4856 rtx expr, index, label;
4857 unsigned int i,j,lo,hi;
4858 struct case_node *n;
4859 unsigned int count;
4860
4861 count = 0;
4862 for (n = nodes; n; n = n->right)
4863 {
4864 label = label_rtx (n->code_label);
4865 for (i = 0; i < count; i++)
4866 if (same_case_target_p (label, test[i].label))
4867 break;
4868
4869 if (i == count)
4870 {
4871 if (count >= MAX_CASE_BIT_TESTS)
4872 abort ();
4873 test[i].hi = 0;
4874 test[i].lo = 0;
4875 test[i].label = label;
4876 test[i].bits = 1;
4877 count++;
4878 }
4879 else
4880 test[i].bits++;
4881
4882 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4883 n->low, minval)), 1);
4884 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4885 n->high, minval)), 1);
4886 for (j = lo; j <= hi; j++)
4887 if (j >= HOST_BITS_PER_WIDE_INT)
4888 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4889 else
4890 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4891 }
4892
4893 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4894
4895 index_expr = fold (build (MINUS_EXPR, index_type,
4896 convert (index_type, index_expr),
4897 convert (index_type, minval)));
4898 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4899 emit_queue ();
4900 index = protect_from_queue (index, 0);
4901 do_pending_stack_adjust ();
4902
4903 mode = TYPE_MODE (index_type);
4904 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4905 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4906 default_label);
4907
4908 index = convert_to_mode (word_mode, index, 0);
4909 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4910 index, NULL_RTX, 1, OPTAB_WIDEN);
4911
4912 for (i = 0; i < count; i++)
4913 {
4914 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4915 expr = expand_binop (word_mode, and_optab, index, expr,
4916 NULL_RTX, 1, OPTAB_WIDEN);
4917 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4918 word_mode, 1, test[i].label);
4919 }
4920
4921 emit_jump (default_label);
4922}
ad82abb8 4923
41cbdcd0
KH
4924#ifndef HAVE_casesi
4925#define HAVE_casesi 0
4926#endif
4927
4928#ifndef HAVE_tablejump
4929#define HAVE_tablejump 0
4930#endif
4931
28d81abb 4932/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4933 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
4934 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4935 type as given in the source before any compiler conversions.
28d81abb
RK
4936 Generate the code to test it and jump to the right place. */
4937
4938void
46c5ad27 4939expand_end_case_type (tree orig_index, tree orig_type)
28d81abb 4940{
9fb60a0d 4941 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 4942 rtx default_label = 0;
9bb231fd
RS
4943 struct case_node *n, *m;
4944 unsigned int count, uniq;
28d81abb 4945 rtx index;
ca695ac9 4946 rtx table_label;
28d81abb
RK
4947 int ncases;
4948 rtx *labelvec;
b3694847 4949 int i;
9bb231fd 4950 rtx before_case, end, lab;
b3694847 4951 struct nesting *thiscase = case_stack;
1b0cb6fc 4952 tree index_expr, index_type;
100e3acb 4953 bool exit_done = false;
ca695ac9
JB
4954 int unsignedp;
4955
03c03770
AS
4956 /* Don't crash due to previous errors. */
4957 if (thiscase == NULL)
4958 return;
4959
ca695ac9 4960 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc 4961 index_type = TREE_TYPE (index_expr);
8df83eae 4962 unsignedp = TYPE_UNSIGNED (index_type);
6f9fdf4d
JJ
4963 if (orig_type == NULL)
4964 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
4965
4966 do_pending_stack_adjust ();
4967
09da1532 4968 /* This might get a spurious warning in the presence of a syntax error;
feb60352
R
4969 it could be fixed by moving the call to check_seenlabel after the
4970 check for error_mark_node, and copying the code of check_seenlabel that
4971 deals with case_stack->data.case_stmt.line_number_status /
4972 restore_line_number_status in front of the call to end_cleanup_deferral;
4973 However, this might miss some useful warnings in the presence of
4974 non-syntax errors. */
a11759a3
JR
4975 check_seenlabel ();
4976
28d81abb 4977 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4978 if (index_type != error_mark_node)
28d81abb 4979 {
28d81abb
RK
4980 /* If we don't have a default-label, create one here,
4981 after the body of the switch. */
4982 if (thiscase->data.case_stmt.default_label == 0)
4983 {
4984 thiscase->data.case_stmt.default_label
4985 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
100e3acb
RS
4986 /* Share the exit label if possible. */
4987 if (thiscase->exit_label)
4988 {
4989 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4990 thiscase->exit_label);
4991 exit_done = true;
4992 }
28d81abb
RK
4993 expand_label (thiscase->data.case_stmt.default_label);
4994 }
4995 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4996
4997 before_case = get_last_insn ();
4998
5720c7e7
RK
4999 if (thiscase->data.case_stmt.case_list
5000 && thiscase->data.case_stmt.case_list->left)
b059139c 5001 thiscase->data.case_stmt.case_list
4381f7c2 5002 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 5003
28d81abb
RK
5004 /* Simplify the case-list before we count it. */
5005 group_case_nodes (thiscase->data.case_stmt.case_list);
100e3acb
RS
5006 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5007 default_label);
28d81abb
RK
5008
5009 /* Get upper and lower bounds of case values.
5010 Also convert all the case values to the index expr's data type. */
5011
9bb231fd 5012 uniq = 0;
28d81abb
RK
5013 count = 0;
5014 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5015 {
5016 /* Check low and high label values are integers. */
5017 if (TREE_CODE (n->low) != INTEGER_CST)
5018 abort ();
5019 if (TREE_CODE (n->high) != INTEGER_CST)
5020 abort ();
5021
1b0cb6fc
RK
5022 n->low = convert (index_type, n->low);
5023 n->high = convert (index_type, n->high);
28d81abb
RK
5024
5025 /* Count the elements and track the largest and smallest
5026 of them (treating them as signed even if they are not). */
5027 if (count++ == 0)
5028 {
5029 minval = n->low;
5030 maxval = n->high;
5031 }
5032 else
5033 {
5034 if (INT_CST_LT (n->low, minval))
5035 minval = n->low;
5036 if (INT_CST_LT (maxval, n->high))
5037 maxval = n->high;
5038 }
5039 /* A range counts double, since it requires two compares. */
5040 if (! tree_int_cst_equal (n->low, n->high))
5041 count++;
9bb231fd
RS
5042
5043 /* Count the number of unique case node targets. */
5044 uniq++;
5045 lab = label_rtx (n->code_label);
5046 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5047 if (same_case_target_p (label_rtx (m->code_label), lab))
5048 {
5049 uniq--;
5050 break;
5051 }
28d81abb
RK
5052 }
5053
5054 /* Compute span of values. */
5055 if (count != 0)
1b0cb6fc 5056 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5057
956d6950 5058 end_cleanup_deferral ();
4c581243 5059
1b0cb6fc 5060 if (count == 0)
28d81abb
RK
5061 {
5062 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5063 emit_queue ();
5064 emit_jump (default_label);
5065 }
3474db0e 5066
9bb231fd
RS
5067 /* Try implementing this switch statement by a short sequence of
5068 bit-wise comparisons. However, we let the binary-tree case
5069 below handle constant index expressions. */
5070 else if (CASE_USE_BIT_TESTS
5071 && ! TREE_CONSTANT (index_expr)
5072 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
766dec0e 5073 && compare_tree_int (range, 0) > 0
9bb231fd
RS
5074 && lshift_cheap_p ()
5075 && ((uniq == 1 && count >= 3)
5076 || (uniq == 2 && count >= 5)
5077 || (uniq == 3 && count >= 6)))
5078 {
5079 /* Optimize the case where all the case values fit in a
5080 word without having to subtract MINVAL. In this case,
5081 we can optimize away the subtraction. */
5082 if (compare_tree_int (minval, 0) > 0
5083 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5084 {
5085 minval = integer_zero_node;
5086 range = maxval;
5087 }
5088 emit_case_bit_tests (index_type, index_expr, minval, range,
5089 thiscase->data.case_stmt.case_list,
5090 default_label);
5091 }
5092
28d81abb
RK
5093 /* If range of values is much bigger than number of values,
5094 make a sequence of conditional branches instead of a dispatch.
5095 If the switch-index is a constant, do it this way
5096 because we can optimize it. */
4f73c5dd 5097
ad82abb8 5098 else if (count < case_values_threshold ()
9e4b13a7
SB
5099 || compare_tree_int (range,
5100 (optimize_size ? 3 : 10) * count) > 0
f0c988c8
BS
5101 /* RANGE may be signed, and really large ranges will show up
5102 as negative numbers. */
5103 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
5104#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5105 || flag_pic
5106#endif
41cbdcd0
KH
5107 || TREE_CONSTANT (index_expr)
5108 /* If neither casesi or tablejump is available, we can
5109 only go this way. */
5110 || (!HAVE_casesi && !HAVE_tablejump))
28d81abb 5111 {
37366632 5112 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5113
5114 /* If the index is a short or char that we do not have
5115 an insn to handle comparisons directly, convert it to
5116 a full integer now, rather than letting each comparison
5117 generate the conversion. */
5118
5119 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 5120 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
5121 {
5122 enum machine_mode wider_mode;
5123 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5124 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 5125 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
5126 {
5127 index = convert_to_mode (wider_mode, index, unsignedp);
5128 break;
5129 }
5130 }
5131
5132 emit_queue ();
5133 do_pending_stack_adjust ();
5134
5135 index = protect_from_queue (index, 0);
5136 if (GET_CODE (index) == MEM)
5137 index = copy_to_reg (index);
5138 if (GET_CODE (index) == CONST_INT
5139 || TREE_CODE (index_expr) == INTEGER_CST)
5140 {
5141 /* Make a tree node with the proper constant value
5142 if we don't already have one. */
5143 if (TREE_CODE (index_expr) != INTEGER_CST)
5144 {
5145 index_expr
5146 = build_int_2 (INTVAL (index),
e9a042b6 5147 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5148 index_expr = convert (index_type, index_expr);
28d81abb
RK
5149 }
5150
5151 /* For constant index expressions we need only
4fe9b91c 5152 issue an unconditional branch to the appropriate
28d81abb 5153 target code. The job of removing any unreachable
6356f892 5154 code is left to the optimization phase if the
28d81abb 5155 "-O" option is specified. */
1b0cb6fc
RK
5156 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5157 if (! tree_int_cst_lt (index_expr, n->low)
5158 && ! tree_int_cst_lt (n->high, index_expr))
5159 break;
5160
28d81abb
RK
5161 if (n)
5162 emit_jump (label_rtx (n->code_label));
5163 else
5164 emit_jump (default_label);
5165 }
5166 else
5167 {
5168 /* If the index expression is not constant we generate
5169 a binary decision tree to select the appropriate
5170 target code. This is done as follows:
5171
5172 The list of cases is rearranged into a binary tree,
5173 nearly optimal assuming equal probability for each case.
5174
5175 The tree is transformed into RTL, eliminating
5176 redundant test conditions at the same time.
5177
5178 If program flow could reach the end of the
5179 decision tree an unconditional jump to the
5180 default code is emitted. */
5181
5182 use_cost_table
6f9fdf4d 5183 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 5184 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 5185 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 5186 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5187 default_label, index_type);
28d81abb
RK
5188 emit_jump_if_reachable (default_label);
5189 }
5190 }
5191 else
5192 {
100e3acb 5193 table_label = gen_label_rtx ();
ad82abb8
ZW
5194 if (! try_casesi (index_type, index_expr, minval, range,
5195 table_label, default_label))
28d81abb 5196 {
ecc9dd93 5197 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 5198
786de7eb 5199 /* Index jumptables from zero for suitable values of
1ff37128 5200 minval to avoid a subtraction. */
786de7eb
KH
5201 if (! optimize_size
5202 && compare_tree_int (minval, 0) > 0
5203 && compare_tree_int (minval, 3) < 0)
5204 {
5205 minval = integer_zero_node;
5206 range = maxval;
5207 }
1ff37128 5208
ad82abb8
ZW
5209 if (! try_tablejump (index_type, index_expr, minval, range,
5210 table_label, default_label))
5211 abort ();
28d81abb 5212 }
786de7eb 5213
28d81abb
RK
5214 /* Get table of labels to jump to, in order of case index. */
5215
1ff37128 5216 ncases = tree_low_cst (range, 0) + 1;
703ad42b
KG
5217 labelvec = alloca (ncases * sizeof (rtx));
5218 memset (labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
5219
5220 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5221 {
2d9d49e4
OH
5222 /* Compute the low and high bounds relative to the minimum
5223 value since that should fit in a HOST_WIDE_INT while the
5224 actual values may not. */
5225 HOST_WIDE_INT i_low
786de7eb
KH
5226 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5227 n->low, minval)), 1);
2d9d49e4 5228 HOST_WIDE_INT i_high
786de7eb
KH
5229 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5230 n->high, minval)), 1);
2d9d49e4
OH
5231 HOST_WIDE_INT i;
5232
5233 for (i = i_low; i <= i_high; i ++)
5234 labelvec[i]
5235 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
5236 }
5237
5238 /* Fill in the gaps with the default. */
5239 for (i = 0; i < ncases; i++)
5240 if (labelvec[i] == 0)
38a448ca 5241 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb 5242
f9da5064 5243 /* Output the table. */
28d81abb
RK
5244 emit_label (table_label);
5245
18543a22 5246 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5247 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5248 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 5249 gen_rtvec_v (ncases, labelvec),
4381f7c2 5250 const0_rtx, const0_rtx));
28d81abb 5251 else
38a448ca
RH
5252 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5253 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5254
5255 /* If the case insn drops through the table,
5256 after the table we must jump to the default-label.
5257 Otherwise record no drop-through after the table. */
5258#ifdef CASE_DROPS_THROUGH
5259 emit_jump (default_label);
5260#else
5261 emit_barrier ();
5262#endif
5263 }
5264
2270623a
JM
5265 before_case = NEXT_INSN (before_case);
5266 end = get_last_insn ();
2b7d71b2
JJ
5267 if (squeeze_notes (&before_case, &end))
5268 abort ();
2270623a 5269 reorder_insns (before_case, end,
28d81abb
RK
5270 thiscase->data.case_stmt.start);
5271 }
4c581243 5272 else
956d6950 5273 end_cleanup_deferral ();
1b0cb6fc 5274
100e3acb 5275 if (thiscase->exit_label && !exit_done)
28d81abb
RK
5276 emit_label (thiscase->exit_label);
5277
5278 POPSTACK (case_stack);
5279
5280 free_temp_slots ();
5281}
5282
57641239
RK
5283/* Convert the tree NODE into a list linked by the right field, with the left
5284 field zeroed. RIGHT is used for recursion; it is a list to be placed
5285 rightmost in the resulting list. */
5286
5287static struct case_node *
46c5ad27 5288case_tree2list (struct case_node *node, struct case_node *right)
57641239
RK
5289{
5290 struct case_node *left;
5291
5292 if (node->right)
5293 right = case_tree2list (node->right, right);
5294
5295 node->right = right;
51723711 5296 if ((left = node->left))
57641239
RK
5297 {
5298 node->left = 0;
5299 return case_tree2list (left, node);
5300 }
5301
5302 return node;
5303}
ca695ac9 5304
28d81abb
RK
5305/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5306
5307static void
46c5ad27 5308do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
28d81abb 5309{
d43e0b7d 5310 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb 5311 {
d51d146f 5312 if (op1 == op2)
28d81abb
RK
5313 emit_jump (label);
5314 }
5315 else
d43e0b7d
RK
5316 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5317 (GET_MODE (op1) == VOIDmode
5318 ? GET_MODE (op2) : GET_MODE (op1)),
5319 unsignedp, label);
28d81abb
RK
5320}
5321\f
5322/* Not all case values are encountered equally. This function
5323 uses a heuristic to weight case labels, in cases where that
5324 looks like a reasonable thing to do.
5325
5326 Right now, all we try to guess is text, and we establish the
5327 following weights:
5328
5329 chars above space: 16
5330 digits: 16
5331 default: 12
5332 space, punct: 8
5333 tab: 4
5334 newline: 2
5335 other "\" chars: 1
5336 remaining chars: 0
5337
5338 If we find any cases in the switch that are not either -1 or in the range
5339 of valid ASCII characters, or are control characters other than those
5340 commonly used with "\", don't treat this switch scanning text.
5341
5342 Return 1 if these nodes are suitable for cost estimation, otherwise
5343 return 0. */
5344
5345static int
46c5ad27 5346estimate_case_costs (case_node_ptr node)
28d81abb 5347{
f2d1f0ba 5348 tree min_ascii = integer_minus_one_node;
28d81abb
RK
5349 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5350 case_node_ptr n;
5351 int i;
5352
5353 /* If we haven't already made the cost table, make it now. Note that the
5354 lower bound of the table is -1, not zero. */
5355
2a2137c4 5356 if (! cost_table_initialized)
28d81abb 5357 {
2a2137c4 5358 cost_table_initialized = 1;
28d81abb
RK
5359
5360 for (i = 0; i < 128; i++)
5361 {
e9a780ec 5362 if (ISALNUM (i))
2a2137c4 5363 COST_TABLE (i) = 16;
e9a780ec 5364 else if (ISPUNCT (i))
2a2137c4 5365 COST_TABLE (i) = 8;
e9a780ec 5366 else if (ISCNTRL (i))
2a2137c4 5367 COST_TABLE (i) = -1;
28d81abb
RK
5368 }
5369
2a2137c4
RH
5370 COST_TABLE (' ') = 8;
5371 COST_TABLE ('\t') = 4;
5372 COST_TABLE ('\0') = 4;
5373 COST_TABLE ('\n') = 2;
5374 COST_TABLE ('\f') = 1;
5375 COST_TABLE ('\v') = 1;
5376 COST_TABLE ('\b') = 1;
28d81abb
RK
5377 }
5378
5379 /* See if all the case expressions look like text. It is text if the
5380 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5381 as signed arithmetic since we don't want to ever access cost_table with a
5382 value less than -1. Also check that none of the constants in a range
5383 are strange control characters. */
5384
5385 for (n = node; n; n = n->right)
5386 {
5387 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5388 return 0;
5389
05bccae2
RK
5390 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5391 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 5392 if (COST_TABLE (i) < 0)
28d81abb
RK
5393 return 0;
5394 }
5395
5396 /* All interesting values are within the range of interesting
5397 ASCII characters. */
5398 return 1;
5399}
5400
100e3acb
RS
5401/* Determine whether two case labels branch to the same target. */
5402
5403static bool
46c5ad27 5404same_case_target_p (rtx l1, rtx l2)
100e3acb 5405{
6de9cd9a 5406#if 0
100e3acb
RS
5407 rtx i1, i2;
5408
5409 if (l1 == l2)
5410 return true;
5411
5412 i1 = next_real_insn (l1);
5413 i2 = next_real_insn (l2);
5414 if (i1 == i2)
5415 return true;
5416
5417 if (i1 && simplejump_p (i1))
5418 {
5419 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5420 }
5421
5422 if (i2 && simplejump_p (i2))
5423 {
5424 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5425 }
6de9cd9a
DN
5426#endif
5427 /* When coming from gimple, we usually won't have emitted either
5428 the labels or the body of the switch statement. The job being
5429 done here should be done via jump threading at the tree level.
5430 Cases that go the same place should have the same label. */
100e3acb
RS
5431 return l1 == l2;
5432}
5433
5434/* Delete nodes that branch to the default label from a list of
5435 case nodes. Eg. case 5: default: becomes just default: */
5436
5437static void
46c5ad27 5438strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
100e3acb
RS
5439{
5440 case_node_ptr ptr;
5441
5442 while (*prev)
5443 {
5444 ptr = *prev;
5445 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5446 *prev = ptr->right;
5447 else
5448 prev = &ptr->right;
5449 }
5450}
5451
28d81abb
RK
5452/* Scan an ordered list of case nodes
5453 combining those with consecutive values or ranges.
5454
5455 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5456
5457static void
46c5ad27 5458group_case_nodes (case_node_ptr head)
28d81abb
RK
5459{
5460 case_node_ptr node = head;
5461
5462 while (node)
5463 {
6de9cd9a 5464 rtx lab;
28d81abb
RK
5465 case_node_ptr np = node;
5466
6de9cd9a
DN
5467 lab = label_rtx (node->code_label);
5468
28d81abb
RK
5469 /* Try to group the successors of NODE with NODE. */
5470 while (((np = np->right) != 0)
5471 /* Do they jump to the same place? */
100e3acb 5472 && same_case_target_p (label_rtx (np->code_label), lab)
28d81abb
RK
5473 /* Are their ranges consecutive? */
5474 && tree_int_cst_equal (np->low,
5475 fold (build (PLUS_EXPR,
5476 TREE_TYPE (node->high),
5477 node->high,
5478 integer_one_node)))
5479 /* An overflow is not consecutive. */
5480 && tree_int_cst_lt (node->high,
5481 fold (build (PLUS_EXPR,
5482 TREE_TYPE (node->high),
5483 node->high,
5484 integer_one_node))))
5485 {
5486 node->high = np->high;
5487 }
5488 /* NP is the first node after NODE which can't be grouped with it.
5489 Delete the nodes in between, and move on to that node. */
5490 node->right = np;
5491 node = np;
5492 }
5493}
5494
5495/* Take an ordered list of case nodes
5496 and transform them into a near optimal binary tree,
6dc42e49 5497 on the assumption that any target code selection value is as
28d81abb
RK
5498 likely as any other.
5499
5500 The transformation is performed by splitting the ordered
5501 list into two equal sections plus a pivot. The parts are
5502 then attached to the pivot as left and right branches. Each
38e01259 5503 branch is then transformed recursively. */
28d81abb
RK
5504
5505static void
46c5ad27 5506balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
28d81abb 5507{
b3694847 5508 case_node_ptr np;
28d81abb
RK
5509
5510 np = *head;
5511 if (np)
5512 {
5513 int cost = 0;
5514 int i = 0;
5515 int ranges = 0;
b3694847 5516 case_node_ptr *npp;
28d81abb
RK
5517 case_node_ptr left;
5518
5519 /* Count the number of entries on branch. Also count the ranges. */
5520
5521 while (np)
5522 {
5523 if (!tree_int_cst_equal (np->low, np->high))
5524 {
5525 ranges++;
5526 if (use_cost_table)
2a2137c4 5527 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
5528 }
5529
5530 if (use_cost_table)
2a2137c4 5531 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
5532
5533 i++;
5534 np = np->right;
5535 }
5536
5537 if (i > 2)
5538 {
5539 /* Split this list if it is long enough for that to help. */
5540 npp = head;
5541 left = *npp;
5542 if (use_cost_table)
5543 {
5544 /* Find the place in the list that bisects the list's total cost,
5545 Here I gets half the total cost. */
5546 int n_moved = 0;
5547 i = (cost + 1) / 2;
5548 while (1)
5549 {
5550 /* Skip nodes while their cost does not reach that amount. */
5551 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
5552 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5553 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
5554 if (i <= 0)
5555 break;
5556 npp = &(*npp)->right;
5557 n_moved += 1;
5558 }
5559 if (n_moved == 0)
5560 {
5561 /* Leave this branch lopsided, but optimize left-hand
5562 side and fill in `parent' fields for right-hand side. */
5563 np = *head;
5564 np->parent = parent;
5565 balance_case_nodes (&np->left, np);
5566 for (; np->right; np = np->right)
5567 np->right->parent = np;
5568 return;
5569 }
5570 }
5571 /* If there are just three nodes, split at the middle one. */
5572 else if (i == 3)
5573 npp = &(*npp)->right;
5574 else
5575 {
5576 /* Find the place in the list that bisects the list's total cost,
5577 where ranges count as 2.
5578 Here I gets half the total cost. */
5579 i = (i + ranges + 1) / 2;
5580 while (1)
5581 {
5582 /* Skip nodes while their cost does not reach that amount. */
5583 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5584 i--;
5585 i--;
5586 if (i <= 0)
5587 break;
5588 npp = &(*npp)->right;
5589 }
5590 }
5591 *head = np = *npp;
5592 *npp = 0;
5593 np->parent = parent;
5594 np->left = left;
5595
5596 /* Optimize each of the two split parts. */
5597 balance_case_nodes (&np->left, np);
5598 balance_case_nodes (&np->right, np);
5599 }
5600 else
5601 {
5602 /* Else leave this branch as one level,
5603 but fill in `parent' fields. */
5604 np = *head;
5605 np->parent = parent;
5606 for (; np->right; np = np->right)
5607 np->right->parent = np;
5608 }
5609 }
5610}
5611\f
5612/* Search the parent sections of the case node tree
5613 to see if a test for the lower bound of NODE would be redundant.
5614 INDEX_TYPE is the type of the index expression.
5615
5616 The instructions to generate the case decision tree are
5617 output in the same order as nodes are processed so it is
5618 known that if a parent node checks the range of the current
5619 node minus one that the current node is bounded at its lower
5620 span. Thus the test would be redundant. */
5621
5622static int
46c5ad27 5623node_has_low_bound (case_node_ptr node, tree index_type)
28d81abb
RK
5624{
5625 tree low_minus_one;
5626 case_node_ptr pnode;
5627
5628 /* If the lower bound of this node is the lowest value in the index type,
5629 we need not test it. */
5630
5631 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5632 return 1;
5633
5634 /* If this node has a left branch, the value at the left must be less
5635 than that at this node, so it cannot be bounded at the bottom and
5636 we need not bother testing any further. */
5637
5638 if (node->left)
5639 return 0;
5640
5641 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5642 node->low, integer_one_node));
5643
5644 /* If the subtraction above overflowed, we can't verify anything.
5645 Otherwise, look for a parent that tests our value - 1. */
5646
5647 if (! tree_int_cst_lt (low_minus_one, node->low))
5648 return 0;
5649
5650 for (pnode = node->parent; pnode; pnode = pnode->parent)
5651 if (tree_int_cst_equal (low_minus_one, pnode->high))
5652 return 1;
5653
5654 return 0;
5655}
5656
5657/* Search the parent sections of the case node tree
5658 to see if a test for the upper bound of NODE would be redundant.
5659 INDEX_TYPE is the type of the index expression.
5660
5661 The instructions to generate the case decision tree are
5662 output in the same order as nodes are processed so it is
5663 known that if a parent node checks the range of the current
5664 node plus one that the current node is bounded at its upper
5665 span. Thus the test would be redundant. */
5666
5667static int
46c5ad27 5668node_has_high_bound (case_node_ptr node, tree index_type)
28d81abb
RK
5669{
5670 tree high_plus_one;
5671 case_node_ptr pnode;
5672
e1ee5cdc
RH
5673 /* If there is no upper bound, obviously no test is needed. */
5674
5675 if (TYPE_MAX_VALUE (index_type) == NULL)
5676 return 1;
5677
28d81abb
RK
5678 /* If the upper bound of this node is the highest value in the type
5679 of the index expression, we need not test against it. */
5680
5681 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5682 return 1;
5683
5684 /* If this node has a right branch, the value at the right must be greater
5685 than that at this node, so it cannot be bounded at the top and
5686 we need not bother testing any further. */
5687
5688 if (node->right)
5689 return 0;
5690
5691 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5692 node->high, integer_one_node));
5693
5694 /* If the addition above overflowed, we can't verify anything.
5695 Otherwise, look for a parent that tests our value + 1. */
5696
5697 if (! tree_int_cst_lt (node->high, high_plus_one))
5698 return 0;
5699
5700 for (pnode = node->parent; pnode; pnode = pnode->parent)
5701 if (tree_int_cst_equal (high_plus_one, pnode->low))
5702 return 1;
5703
5704 return 0;
5705}
5706
5707/* Search the parent sections of the
5708 case node tree to see if both tests for the upper and lower
5709 bounds of NODE would be redundant. */
5710
5711static int
46c5ad27 5712node_is_bounded (case_node_ptr node, tree index_type)
28d81abb
RK
5713{
5714 return (node_has_low_bound (node, index_type)
5715 && node_has_high_bound (node, index_type));
5716}
5717
5718/* Emit an unconditional jump to LABEL unless it would be dead code. */
5719
5720static void
46c5ad27 5721emit_jump_if_reachable (rtx label)
28d81abb
RK
5722{
5723 if (GET_CODE (get_last_insn ()) != BARRIER)
5724 emit_jump (label);
5725}
5726\f
5727/* Emit step-by-step code to select a case for the value of INDEX.
5728 The thus generated decision tree follows the form of the
5729 case-node binary tree NODE, whose nodes represent test conditions.
5730 INDEX_TYPE is the type of the index of the switch.
5731
5732 Care is taken to prune redundant tests from the decision tree
5733 by detecting any boundary conditions already checked by
5734 emitted rtx. (See node_has_high_bound, node_has_low_bound
5735 and node_is_bounded, above.)
5736
5737 Where the test conditions can be shown to be redundant we emit
5738 an unconditional jump to the target code. As a further
5739 optimization, the subordinates of a tree node are examined to
5740 check for bounded nodes. In this case conditional and/or
5741 unconditional jumps as a result of the boundary check for the
5742 current node are arranged to target the subordinates associated
38e01259 5743 code for out of bound conditions on the current node.
28d81abb 5744
f72aed24 5745 We can assume that when control reaches the code generated here,
28d81abb
RK
5746 the index value has already been compared with the parents
5747 of this node, and determined to be on the same side of each parent
5748 as this node is. Thus, if this node tests for the value 51,
5749 and a parent tested for 52, we don't need to consider
5750 the possibility of a value greater than 51. If another parent
5751 tests for the value 50, then this node need not test anything. */
5752
5753static void
46c5ad27
AJ
5754emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5755 tree index_type)
28d81abb
RK
5756{
5757 /* If INDEX has an unsigned type, we must make unsigned branches. */
8df83eae 5758 int unsignedp = TYPE_UNSIGNED (index_type);
28d81abb 5759 enum machine_mode mode = GET_MODE (index);
69107307 5760 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
5761
5762 /* See if our parents have already tested everything for us.
5763 If they have, emit an unconditional jump for this node. */
5764 if (node_is_bounded (node, index_type))
5765 emit_jump (label_rtx (node->code_label));
5766
5767 else if (tree_int_cst_equal (node->low, node->high))
5768 {
5769 /* Node is single valued. First see if the index expression matches
0f41302f 5770 this node and then check our children, if any. */
28d81abb 5771
69107307
AO
5772 do_jump_if_equal (index,
5773 convert_modes (mode, imode,
5774 expand_expr (node->low, NULL_RTX,
5775 VOIDmode, 0),
5776 unsignedp),
28d81abb
RK
5777 label_rtx (node->code_label), unsignedp);
5778
5779 if (node->right != 0 && node->left != 0)
5780 {
5781 /* This node has children on both sides.
5782 Dispatch to one side or the other
5783 by comparing the index value with this node's value.
5784 If one subtree is bounded, check that one first,
5785 so we can avoid real branches in the tree. */
5786
5787 if (node_is_bounded (node->right, index_type))
5788 {
4381f7c2 5789 emit_cmp_and_jump_insns (index,
69107307
AO
5790 convert_modes
5791 (mode, imode,
5792 expand_expr (node->high, NULL_RTX,
5793 VOIDmode, 0),
5794 unsignedp),
d43e0b7d 5795 GT, NULL_RTX, mode, unsignedp,
4381f7c2 5796 label_rtx (node->right->code_label));
28d81abb
RK
5797 emit_case_nodes (index, node->left, default_label, index_type);
5798 }
5799
5800 else if (node_is_bounded (node->left, index_type))
5801 {
4381f7c2 5802 emit_cmp_and_jump_insns (index,
69107307
AO
5803 convert_modes
5804 (mode, imode,
5805 expand_expr (node->high, NULL_RTX,
5806 VOIDmode, 0),
5807 unsignedp),
d43e0b7d 5808 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5809 label_rtx (node->left->code_label));
28d81abb
RK
5810 emit_case_nodes (index, node->right, default_label, index_type);
5811 }
5812
43a21dfc
KH
5813 /* If both children are single-valued cases with no
5814 children, finish up all the work. This way, we can save
5815 one ordered comparison. */
5816 else if (tree_int_cst_equal (node->right->low, node->right->high)
5817 && node->right->left == 0
5818 && node->right->right == 0
5819 && tree_int_cst_equal (node->left->low, node->left->high)
5820 && node->left->left == 0
5821 && node->left->right == 0)
5822 {
5823 /* Neither node is bounded. First distinguish the two sides;
5824 then emit the code for one side at a time. */
5825
5826 /* See if the value matches what the right hand side
5827 wants. */
5828 do_jump_if_equal (index,
5829 convert_modes (mode, imode,
5830 expand_expr (node->right->low,
5831 NULL_RTX,
5832 VOIDmode, 0),
5833 unsignedp),
5834 label_rtx (node->right->code_label),
5835 unsignedp);
5836
5837 /* See if the value matches what the left hand side
5838 wants. */
5839 do_jump_if_equal (index,
5840 convert_modes (mode, imode,
5841 expand_expr (node->left->low,
5842 NULL_RTX,
5843 VOIDmode, 0),
5844 unsignedp),
5845 label_rtx (node->left->code_label),
5846 unsignedp);
5847 }
5848
28d81abb
RK
5849 else
5850 {
5851 /* Neither node is bounded. First distinguish the two sides;
5852 then emit the code for one side at a time. */
5853
4381f7c2 5854 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
5855
5856 /* See if the value is on the right. */
4381f7c2 5857 emit_cmp_and_jump_insns (index,
69107307
AO
5858 convert_modes
5859 (mode, imode,
5860 expand_expr (node->high, NULL_RTX,
5861 VOIDmode, 0),
5862 unsignedp),
d43e0b7d 5863 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5864 label_rtx (test_label));
28d81abb
RK
5865
5866 /* Value must be on the left.
5867 Handle the left-hand subtree. */
5868 emit_case_nodes (index, node->left, default_label, index_type);
5869 /* If left-hand subtree does nothing,
5870 go to default. */
5871 emit_jump_if_reachable (default_label);
5872
5873 /* Code branches here for the right-hand subtree. */
5874 expand_label (test_label);
5875 emit_case_nodes (index, node->right, default_label, index_type);
5876 }
5877 }
5878
5879 else if (node->right != 0 && node->left == 0)
5880 {
5881 /* Here we have a right child but no left so we issue conditional
5882 branch to default and process the right child.
5883
5884 Omit the conditional branch to default if we it avoid only one
5885 right child; it costs too much space to save so little time. */
5886
de14fd73 5887 if (node->right->right || node->right->left
28d81abb
RK
5888 || !tree_int_cst_equal (node->right->low, node->right->high))
5889 {
5890 if (!node_has_low_bound (node, index_type))
5891 {
4381f7c2 5892 emit_cmp_and_jump_insns (index,
69107307
AO
5893 convert_modes
5894 (mode, imode,
5895 expand_expr (node->high, NULL_RTX,
5896 VOIDmode, 0),
5897 unsignedp),
d43e0b7d 5898 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5899 default_label);
28d81abb
RK
5900 }
5901
5902 emit_case_nodes (index, node->right, default_label, index_type);
5903 }
5904 else
5905 /* We cannot process node->right normally
5906 since we haven't ruled out the numbers less than
5907 this node's value. So handle node->right explicitly. */
5908 do_jump_if_equal (index,
69107307
AO
5909 convert_modes
5910 (mode, imode,
5911 expand_expr (node->right->low, NULL_RTX,
5912 VOIDmode, 0),
5913 unsignedp),
28d81abb
RK
5914 label_rtx (node->right->code_label), unsignedp);
5915 }
5916
5917 else if (node->right == 0 && node->left != 0)
5918 {
5919 /* Just one subtree, on the left. */
4381f7c2 5920 if (node->left->left || node->left->right
28d81abb
RK
5921 || !tree_int_cst_equal (node->left->low, node->left->high))
5922 {
5923 if (!node_has_high_bound (node, index_type))
5924 {
69107307
AO
5925 emit_cmp_and_jump_insns (index,
5926 convert_modes
5927 (mode, imode,
5928 expand_expr (node->high, NULL_RTX,
5929 VOIDmode, 0),
5930 unsignedp),
d43e0b7d 5931 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5932 default_label);
28d81abb
RK
5933 }
5934
5935 emit_case_nodes (index, node->left, default_label, index_type);
5936 }
5937 else
5938 /* We cannot process node->left normally
5939 since we haven't ruled out the numbers less than
5940 this node's value. So handle node->left explicitly. */
5941 do_jump_if_equal (index,
69107307
AO
5942 convert_modes
5943 (mode, imode,
5944 expand_expr (node->left->low, NULL_RTX,
5945 VOIDmode, 0),
5946 unsignedp),
28d81abb
RK
5947 label_rtx (node->left->code_label), unsignedp);
5948 }
5949 }
5950 else
5951 {
5952 /* Node is a range. These cases are very similar to those for a single
5953 value, except that we do not start by testing whether this node
5954 is the one to branch to. */
5955
5956 if (node->right != 0 && node->left != 0)
5957 {
5958 /* Node has subtrees on both sides.
5959 If the right-hand subtree is bounded,
5960 test for it first, since we can go straight there.
5961 Otherwise, we need to make a branch in the control structure,
5962 then handle the two subtrees. */
5963 tree test_label = 0;
5964
28d81abb
RK
5965 if (node_is_bounded (node->right, index_type))
5966 /* Right hand node is fully bounded so we can eliminate any
5967 testing and branch directly to the target code. */
69107307
AO
5968 emit_cmp_and_jump_insns (index,
5969 convert_modes
5970 (mode, imode,
5971 expand_expr (node->high, NULL_RTX,
5972 VOIDmode, 0),
5973 unsignedp),
d43e0b7d 5974 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5975 label_rtx (node->right->code_label));
28d81abb
RK
5976 else
5977 {
5978 /* Right hand node requires testing.
5979 Branch to a label where we will handle it later. */
5980
5981 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 5982 emit_cmp_and_jump_insns (index,
69107307
AO
5983 convert_modes
5984 (mode, imode,
5985 expand_expr (node->high, NULL_RTX,
5986 VOIDmode, 0),
5987 unsignedp),
d43e0b7d 5988 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5989 label_rtx (test_label));
28d81abb
RK
5990 }
5991
5992 /* Value belongs to this node or to the left-hand subtree. */
5993
69107307
AO
5994 emit_cmp_and_jump_insns (index,
5995 convert_modes
5996 (mode, imode,
5997 expand_expr (node->low, NULL_RTX,
5998 VOIDmode, 0),
5999 unsignedp),
d43e0b7d 6000 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6001 label_rtx (node->code_label));
28d81abb
RK
6002
6003 /* Handle the left-hand subtree. */
6004 emit_case_nodes (index, node->left, default_label, index_type);
6005
6006 /* If right node had to be handled later, do that now. */
6007
6008 if (test_label)
6009 {
6010 /* If the left-hand subtree fell through,
6011 don't let it fall into the right-hand subtree. */
6012 emit_jump_if_reachable (default_label);
6013
6014 expand_label (test_label);
6015 emit_case_nodes (index, node->right, default_label, index_type);
6016 }
6017 }
6018
6019 else if (node->right != 0 && node->left == 0)
6020 {
6021 /* Deal with values to the left of this node,
6022 if they are possible. */
6023 if (!node_has_low_bound (node, index_type))
6024 {
4381f7c2 6025 emit_cmp_and_jump_insns (index,
69107307
AO
6026 convert_modes
6027 (mode, imode,
6028 expand_expr (node->low, NULL_RTX,
6029 VOIDmode, 0),
6030 unsignedp),
d43e0b7d 6031 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6032 default_label);
28d81abb
RK
6033 }
6034
6035 /* Value belongs to this node or to the right-hand subtree. */
6036
69107307
AO
6037 emit_cmp_and_jump_insns (index,
6038 convert_modes
6039 (mode, imode,
6040 expand_expr (node->high, NULL_RTX,
6041 VOIDmode, 0),
6042 unsignedp),
d43e0b7d 6043 LE, NULL_RTX, mode, unsignedp,
c5d5d461 6044 label_rtx (node->code_label));
28d81abb
RK
6045
6046 emit_case_nodes (index, node->right, default_label, index_type);
6047 }
6048
6049 else if (node->right == 0 && node->left != 0)
6050 {
6051 /* Deal with values to the right of this node,
6052 if they are possible. */
6053 if (!node_has_high_bound (node, index_type))
6054 {
4381f7c2 6055 emit_cmp_and_jump_insns (index,
69107307
AO
6056 convert_modes
6057 (mode, imode,
6058 expand_expr (node->high, NULL_RTX,
6059 VOIDmode, 0),
6060 unsignedp),
d43e0b7d 6061 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6062 default_label);
28d81abb
RK
6063 }
6064
6065 /* Value belongs to this node or to the left-hand subtree. */
6066
4381f7c2 6067 emit_cmp_and_jump_insns (index,
69107307
AO
6068 convert_modes
6069 (mode, imode,
6070 expand_expr (node->low, NULL_RTX,
6071 VOIDmode, 0),
6072 unsignedp),
d43e0b7d 6073 GE, NULL_RTX, mode, unsignedp,
c5d5d461 6074 label_rtx (node->code_label));
28d81abb
RK
6075
6076 emit_case_nodes (index, node->left, default_label, index_type);
6077 }
6078
6079 else
6080 {
6081 /* Node has no children so we check low and high bounds to remove
6082 redundant tests. Only one of the bounds can exist,
6083 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
6084 int high_bound = node_has_high_bound (node, index_type);
6085 int low_bound = node_has_low_bound (node, index_type);
28d81abb 6086
923cbdc3 6087 if (!high_bound && low_bound)
28d81abb 6088 {
4381f7c2 6089 emit_cmp_and_jump_insns (index,
69107307
AO
6090 convert_modes
6091 (mode, imode,
6092 expand_expr (node->high, NULL_RTX,
6093 VOIDmode, 0),
6094 unsignedp),
d43e0b7d 6095 GT, NULL_RTX, mode, unsignedp,
c5d5d461 6096 default_label);
28d81abb
RK
6097 }
6098
923cbdc3 6099 else if (!low_bound && high_bound)
28d81abb 6100 {
4381f7c2 6101 emit_cmp_and_jump_insns (index,
69107307
AO
6102 convert_modes
6103 (mode, imode,
6104 expand_expr (node->low, NULL_RTX,
6105 VOIDmode, 0),
6106 unsignedp),
d43e0b7d 6107 LT, NULL_RTX, mode, unsignedp,
c5d5d461 6108 default_label);
28d81abb 6109 }
923cbdc3
JH
6110 else if (!low_bound && !high_bound)
6111 {
9312aecc 6112 /* Widen LOW and HIGH to the same width as INDEX. */
ae2bcd98 6113 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9312aecc
JDA
6114 tree low = build1 (CONVERT_EXPR, type, node->low);
6115 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 6116 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
6117
6118 /* Instead of doing two branches, emit one unsigned branch for
6119 (index-low) > (high-low). */
ef89d648
ZW
6120 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6121 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6122 NULL_RTX, unsignedp,
6123 OPTAB_WIDEN);
9312aecc
JDA
6124 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6125 high, low)),
6126 NULL_RTX, mode, 0);
786de7eb 6127
9312aecc 6128 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 6129 mode, 1, default_label);
923cbdc3 6130 }
28d81abb
RK
6131
6132 emit_jump (label_rtx (node->code_label));
6133 }
6134 }
6135}
e2500fed
GK
6136
6137#include "gt-stmt.h"
This page took 3.348587 seconds and 5 git commands to generate.