]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
stmt.c (warn_if_unused_value): Add locus argument.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
62 \f
63 /* Functions and data structures for expanding case statements. */
64
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
87 struct case_node GTY(())
88 {
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
96 };
97
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
100
101 /* These are used by estimate_case_costs and balance_case_nodes. */
102
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
107
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 \f
112 /* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
137 struct nesting GTY(())
138 {
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
149 {
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
163 {
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
212 struct nesting_case
213 {
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
235 };
236
237 /* Allocate and return a new `struct nesting'. */
238
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
240
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
245
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
261 \f
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
269
270 struct goto_fixup GTY(())
271 {
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
299 };
300
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
303
304 struct label_chain GTY(())
305 {
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
309 };
310
311 struct stmt_status GTY(())
312 {
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
315
316 /* If any new stacks are added here, add them to POPSTACKS too. */
317
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting * x_stack_block_stack;
321
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
324
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
327
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
331
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
334
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
337
338 /* Each time we expand an expression-statement,
339 record the expr's type and its RTL value here. */
340 tree x_last_expr_type;
341 rtx x_last_expr_value;
342 rtx x_last_expr_alt_rtl;
343
344 /* Nonzero if within a ({...}) grouping, in which case we must
345 always compute a value for each expr-stmt in case it is the last one. */
346 int x_expr_stmts_for_value;
347
348 /* Location of last line-number note, whether we actually
349 emitted it or not. */
350 location_t x_emit_locus;
351
352 struct goto_fixup *x_goto_fixup_chain;
353 };
354
355 #define block_stack (cfun->stmt->x_block_stack)
356 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
357 #define cond_stack (cfun->stmt->x_cond_stack)
358 #define case_stack (cfun->stmt->x_case_stack)
359 #define nesting_stack (cfun->stmt->x_nesting_stack)
360 #define nesting_depth (cfun->stmt->x_nesting_depth)
361 #define current_block_start_count (cfun->stmt->x_block_start_count)
362 #define last_expr_type (cfun->stmt->x_last_expr_type)
363 #define last_expr_value (cfun->stmt->x_last_expr_value)
364 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
365 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
366 #define emit_locus (cfun->stmt->x_emit_locus)
367 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
368
369 /* Nonzero if we are using EH to handle cleanups. */
370 int using_eh_for_cleanups_p = 0;
371
372 static int n_occurrences (int, const char *);
373 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
374 static void expand_goto_internal (tree, rtx, rtx);
375 static int expand_fixup (tree, rtx, rtx);
376 static void expand_nl_goto_receiver (void);
377 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
378 static bool check_operand_nalternatives (tree, tree);
379 static bool check_unique_operand_names (tree, tree);
380 static char *resolve_operand_name_1 (char *, tree, tree);
381 static void expand_null_return_1 (rtx);
382 static enum br_predictor return_prediction (rtx);
383 static rtx shift_return_value (rtx);
384 static void expand_value_return (rtx);
385 static void expand_cleanups (tree, int, int);
386 static void check_seenlabel (void);
387 static void do_jump_if_equal (rtx, rtx, rtx, int);
388 static int estimate_case_costs (case_node_ptr);
389 static bool same_case_target_p (rtx, rtx);
390 static void strip_default_case_nodes (case_node_ptr *, rtx);
391 static bool lshift_cheap_p (void);
392 static int case_bit_test_cmp (const void *, const void *);
393 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
394 static void group_case_nodes (case_node_ptr);
395 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
396 static int node_has_low_bound (case_node_ptr, tree);
397 static int node_has_high_bound (case_node_ptr, tree);
398 static int node_is_bounded (case_node_ptr, tree);
399 static void emit_jump_if_reachable (rtx);
400 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
401 static struct case_node *case_tree2list (case_node *, case_node *);
402 \f
403 void
404 using_eh_for_cleanups (void)
405 {
406 using_eh_for_cleanups_p = 1;
407 }
408
409 void
410 init_stmt_for_function (void)
411 {
412 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
413 }
414 \f
415 /* Record the current file and line. Called from emit_line_note. */
416
417 void
418 set_file_and_line_for_stmt (location_t location)
419 {
420 /* If we're outputting an inline function, and we add a line note,
421 there may be no CFUN->STMT information. So, there's no need to
422 update it. */
423 if (cfun->stmt)
424 emit_locus = location;
425 }
426
427 /* Emit a no-op instruction. */
428
429 void
430 emit_nop (void)
431 {
432 rtx last_insn;
433
434 last_insn = get_last_insn ();
435 if (!optimize
436 && (GET_CODE (last_insn) == CODE_LABEL
437 || (GET_CODE (last_insn) == NOTE
438 && prev_real_insn (last_insn) == 0)))
439 emit_insn (gen_nop ());
440 }
441 \f
442 /* Return the rtx-label that corresponds to a LABEL_DECL,
443 creating it if necessary. */
444
445 rtx
446 label_rtx (tree label)
447 {
448 if (TREE_CODE (label) != LABEL_DECL)
449 abort ();
450
451 if (!DECL_RTL_SET_P (label))
452 {
453 rtx r = gen_label_rtx ();
454 SET_DECL_RTL (label, r);
455 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
456 LABEL_PRESERVE_P (r) = 1;
457 }
458
459 return DECL_RTL (label);
460 }
461
462 /* As above, but also put it on the forced-reference list of the
463 function that contains it. */
464 rtx
465 force_label_rtx (tree label)
466 {
467 rtx ref = label_rtx (label);
468 tree function = decl_function_context (label);
469 struct function *p;
470
471 if (!function)
472 abort ();
473
474 if (function != current_function_decl)
475 p = find_function_data (function);
476 else
477 p = cfun;
478
479 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
480 p->expr->x_forced_labels);
481 return ref;
482 }
483
484 /* Add an unconditional jump to LABEL as the next sequential instruction. */
485
486 void
487 emit_jump (rtx label)
488 {
489 do_pending_stack_adjust ();
490 emit_jump_insn (gen_jump (label));
491 emit_barrier ();
492 }
493
494 /* Emit code to jump to the address
495 specified by the pointer expression EXP. */
496
497 void
498 expand_computed_goto (tree exp)
499 {
500 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
501
502 x = convert_memory_address (Pmode, x);
503
504 emit_queue ();
505 do_pending_stack_adjust ();
506 emit_indirect_jump (x);
507 }
508 \f
509 /* Handle goto statements and the labels that they can go to. */
510
511 /* Specify the location in the RTL code of a label LABEL,
512 which is a LABEL_DECL tree node.
513
514 This is used for the kind of label that the user can jump to with a
515 goto statement, and for alternatives of a switch or case statement.
516 RTL labels generated for loops and conditionals don't go through here;
517 they are generated directly at the RTL level, by other functions below.
518
519 Note that this has nothing to do with defining label *names*.
520 Languages vary in how they do that and what that even means. */
521
522 void
523 expand_label (tree label)
524 {
525 struct label_chain *p;
526 rtx label_r = label_rtx (label);
527
528 do_pending_stack_adjust ();
529 emit_label (label_r);
530 if (DECL_NAME (label))
531 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
532
533 if (DECL_NONLOCAL (label))
534 {
535 expand_nl_goto_receiver ();
536 nonlocal_goto_handler_labels
537 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
538 nonlocal_goto_handler_labels);
539 }
540
541 if (FORCED_LABEL (label))
542 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
543
544 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
545 maybe_set_first_label_num (label_r);
546
547 if (stack_block_stack != 0)
548 {
549 p = ggc_alloc (sizeof (struct label_chain));
550 p->next = stack_block_stack->data.block.label_chain;
551 stack_block_stack->data.block.label_chain = p;
552 p->label = label;
553 }
554 }
555
556 /* Generate RTL code for a `goto' statement with target label LABEL.
557 LABEL should be a LABEL_DECL tree node that was or will later be
558 defined with `expand_label'. */
559
560 void
561 expand_goto (tree label)
562 {
563 #ifdef ENABLE_CHECKING
564 /* Check for a nonlocal goto to a containing function. Should have
565 gotten translated to __builtin_nonlocal_goto. */
566 tree context = decl_function_context (label);
567 if (context != 0 && context != current_function_decl)
568 abort ();
569 #endif
570
571 expand_goto_internal (label, label_rtx (label), NULL_RTX);
572 }
573
574 /* Generate RTL code for a `goto' statement with target label BODY.
575 LABEL should be a LABEL_REF.
576 LAST_INSN, if non-0, is the rtx we should consider as the last
577 insn emitted (for the purposes of cleaning up a return). */
578
579 static void
580 expand_goto_internal (tree body, rtx label, rtx last_insn)
581 {
582 struct nesting *block;
583 rtx stack_level = 0;
584
585 if (GET_CODE (label) != CODE_LABEL)
586 abort ();
587
588 /* If label has already been defined, we can tell now
589 whether and how we must alter the stack level. */
590
591 if (PREV_INSN (label) != 0)
592 {
593 /* Find the innermost pending block that contains the label.
594 (Check containment by comparing insn-uids.)
595 Then restore the outermost stack level within that block,
596 and do cleanups of all blocks contained in it. */
597 for (block = block_stack; block; block = block->next)
598 {
599 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
600 break;
601 if (block->data.block.stack_level != 0)
602 stack_level = block->data.block.stack_level;
603 /* Execute the cleanups for blocks we are exiting. */
604 if (block->data.block.cleanups != 0)
605 {
606 expand_cleanups (block->data.block.cleanups, 1, 1);
607 do_pending_stack_adjust ();
608 }
609 }
610
611 if (stack_level)
612 {
613 /* Ensure stack adjust isn't done by emit_jump, as this
614 would clobber the stack pointer. This one should be
615 deleted as dead by flow. */
616 clear_pending_stack_adjust ();
617 do_pending_stack_adjust ();
618
619 /* Don't do this adjust if it's to the end label and this function
620 is to return with a depressed stack pointer. */
621 if (label == return_label
622 && (((TREE_CODE (TREE_TYPE (current_function_decl))
623 == FUNCTION_TYPE)
624 && (TYPE_RETURNS_STACK_DEPRESSED
625 (TREE_TYPE (current_function_decl))))))
626 ;
627 else
628 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
629 }
630
631 if (body != 0 && DECL_TOO_LATE (body))
632 error ("jump to `%s' invalidly jumps into binding contour",
633 IDENTIFIER_POINTER (DECL_NAME (body)));
634 }
635 /* Label not yet defined: may need to put this goto
636 on the fixup list. */
637 else if (! expand_fixup (body, label, last_insn))
638 {
639 /* No fixup needed. Record that the label is the target
640 of at least one goto that has no fixup. */
641 if (body != 0)
642 TREE_ADDRESSABLE (body) = 1;
643 }
644
645 emit_jump (label);
646 }
647 \f
648 /* Generate if necessary a fixup for a goto
649 whose target label in tree structure (if any) is TREE_LABEL
650 and whose target in rtl is RTL_LABEL.
651
652 If LAST_INSN is nonzero, we pretend that the jump appears
653 after insn LAST_INSN instead of at the current point in the insn stream.
654
655 The fixup will be used later to insert insns just before the goto.
656 Those insns will restore the stack level as appropriate for the
657 target label, and will (in the case of C++) also invoke any object
658 destructors which have to be invoked when we exit the scopes which
659 are exited by the goto.
660
661 Value is nonzero if a fixup is made. */
662
663 static int
664 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
665 {
666 struct nesting *block, *end_block;
667
668 /* See if we can recognize which block the label will be output in.
669 This is possible in some very common cases.
670 If we succeed, set END_BLOCK to that block.
671 Otherwise, set it to 0. */
672
673 if (cond_stack
674 && (rtl_label == cond_stack->data.cond.endif_label
675 || rtl_label == cond_stack->data.cond.next_label))
676 end_block = cond_stack;
677 else
678 end_block = 0;
679
680 /* Now set END_BLOCK to the binding level to which we will return. */
681
682 if (end_block)
683 {
684 struct nesting *next_block = end_block->all;
685 block = block_stack;
686
687 /* First see if the END_BLOCK is inside the innermost binding level.
688 If so, then no cleanups or stack levels are relevant. */
689 while (next_block && next_block != block)
690 next_block = next_block->all;
691
692 if (next_block)
693 return 0;
694
695 /* Otherwise, set END_BLOCK to the innermost binding level
696 which is outside the relevant control-structure nesting. */
697 next_block = block_stack->next;
698 for (block = block_stack; block != end_block; block = block->all)
699 if (block == next_block)
700 next_block = next_block->next;
701 end_block = next_block;
702 }
703
704 /* Does any containing block have a stack level or cleanups?
705 If not, no fixup is needed, and that is the normal case
706 (the only case, for standard C). */
707 for (block = block_stack; block != end_block; block = block->next)
708 if (block->data.block.stack_level != 0
709 || block->data.block.cleanups != 0)
710 break;
711
712 if (block != end_block)
713 {
714 /* Ok, a fixup is needed. Add a fixup to the list of such. */
715 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
716 /* In case an old stack level is restored, make sure that comes
717 after any pending stack adjust. */
718 /* ?? If the fixup isn't to come at the present position,
719 doing the stack adjust here isn't useful. Doing it with our
720 settings at that location isn't useful either. Let's hope
721 someone does it! */
722 if (last_insn == 0)
723 do_pending_stack_adjust ();
724 fixup->target = tree_label;
725 fixup->target_rtl = rtl_label;
726
727 /* Create a BLOCK node and a corresponding matched set of
728 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
729 this point. The notes will encapsulate any and all fixup
730 code which we might later insert at this point in the insn
731 stream. Also, the BLOCK node will be the parent (i.e. the
732 `SUPERBLOCK') of any other BLOCK nodes which we might create
733 later on when we are expanding the fixup code.
734
735 Note that optimization passes might move the *_BLOCK notes away,
736 so we use a NOTE_INSN_DELETED as a placeholder. */
737
738 {
739 rtx original_before_jump
740 = last_insn ? last_insn : get_last_insn ();
741 rtx start;
742 rtx end;
743 tree block;
744
745 block = make_node (BLOCK);
746 TREE_USED (block) = 1;
747
748 if (!cfun->x_whole_function_mode_p)
749 lang_hooks.decls.insert_block (block);
750 else
751 {
752 BLOCK_CHAIN (block)
753 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
754 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
755 = block;
756 }
757
758 start_sequence ();
759 start = emit_note (NOTE_INSN_BLOCK_BEG);
760 if (cfun->x_whole_function_mode_p)
761 NOTE_BLOCK (start) = block;
762 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
763 end = emit_note (NOTE_INSN_BLOCK_END);
764 if (cfun->x_whole_function_mode_p)
765 NOTE_BLOCK (end) = block;
766 fixup->context = block;
767 end_sequence ();
768 emit_insn_after (start, original_before_jump);
769 }
770
771 fixup->block_start_count = current_block_start_count;
772 fixup->stack_level = 0;
773 fixup->cleanup_list_list
774 = ((block->data.block.outer_cleanups
775 || block->data.block.cleanups)
776 ? tree_cons (NULL_TREE, block->data.block.cleanups,
777 block->data.block.outer_cleanups)
778 : 0);
779 fixup->next = goto_fixup_chain;
780 goto_fixup_chain = fixup;
781 }
782
783 return block != 0;
784 }
785 \f
786 /* Expand any needed fixups in the outputmost binding level of the
787 function. FIRST_INSN is the first insn in the function. */
788
789 void
790 expand_fixups (rtx first_insn)
791 {
792 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
793 }
794
795 /* When exiting a binding contour, process all pending gotos requiring fixups.
796 THISBLOCK is the structure that describes the block being exited.
797 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
798 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
799 FIRST_INSN is the insn that began this contour.
800
801 Gotos that jump out of this contour must restore the
802 stack level and do the cleanups before actually jumping.
803
804 DONT_JUMP_IN positive means report error if there is a jump into this
805 contour from before the beginning of the contour. This is also done if
806 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
807
808 static void
809 fixup_gotos (struct nesting *thisblock, rtx stack_level,
810 tree cleanup_list, rtx first_insn, int dont_jump_in)
811 {
812 struct goto_fixup *f, *prev;
813
814 /* F is the fixup we are considering; PREV is the previous one. */
815 /* We run this loop in two passes so that cleanups of exited blocks
816 are run first, and blocks that are exited are marked so
817 afterwards. */
818
819 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
820 {
821 /* Test for a fixup that is inactive because it is already handled. */
822 if (f->before_jump == 0)
823 {
824 /* Delete inactive fixup from the chain, if that is easy to do. */
825 if (prev != 0)
826 prev->next = f->next;
827 }
828 /* Has this fixup's target label been defined?
829 If so, we can finalize it. */
830 else if (PREV_INSN (f->target_rtl) != 0)
831 {
832 rtx cleanup_insns;
833
834 /* If this fixup jumped into this contour from before the beginning
835 of this contour, report an error. This code used to use
836 the first non-label insn after f->target_rtl, but that's
837 wrong since such can be added, by things like put_var_into_stack
838 and have INSN_UIDs that are out of the range of the block. */
839 /* ??? Bug: this does not detect jumping in through intermediate
840 blocks that have stack levels or cleanups.
841 It detects only a problem with the innermost block
842 around the label. */
843 if (f->target != 0
844 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
845 || cleanup_list)
846 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
847 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
848 && ! DECL_ERROR_ISSUED (f->target))
849 {
850 error ("%Jlabel '%D' used before containing binding contour",
851 f->target, f->target);
852 /* Prevent multiple errors for one label. */
853 DECL_ERROR_ISSUED (f->target) = 1;
854 }
855
856 /* We will expand the cleanups into a sequence of their own and
857 then later on we will attach this new sequence to the insn
858 stream just ahead of the actual jump insn. */
859
860 start_sequence ();
861
862 /* Temporarily restore the lexical context where we will
863 logically be inserting the fixup code. We do this for the
864 sake of getting the debugging information right. */
865
866 lang_hooks.decls.pushlevel (0);
867 lang_hooks.decls.set_block (f->context);
868
869 /* Expand the cleanups for blocks this jump exits. */
870 if (f->cleanup_list_list)
871 {
872 tree lists;
873 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
874 /* Marked elements correspond to blocks that have been closed.
875 Do their cleanups. */
876 if (TREE_ADDRESSABLE (lists)
877 && TREE_VALUE (lists) != 0)
878 {
879 expand_cleanups (TREE_VALUE (lists), 1, 1);
880 /* Pop any pushes done in the cleanups,
881 in case function is about to return. */
882 do_pending_stack_adjust ();
883 }
884 }
885
886 /* Restore stack level for the biggest contour that this
887 jump jumps out of. */
888 if (f->stack_level
889 && ! (f->target_rtl == return_label
890 && ((TREE_CODE (TREE_TYPE (current_function_decl))
891 == FUNCTION_TYPE)
892 && (TYPE_RETURNS_STACK_DEPRESSED
893 (TREE_TYPE (current_function_decl))))))
894 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
895
896 /* Finish up the sequence containing the insns which implement the
897 necessary cleanups, and then attach that whole sequence to the
898 insn stream just ahead of the actual jump insn. Attaching it
899 at that point insures that any cleanups which are in fact
900 implicit C++ object destructions (which must be executed upon
901 leaving the block) appear (to the debugger) to be taking place
902 in an area of the generated code where the object(s) being
903 destructed are still "in scope". */
904
905 cleanup_insns = get_insns ();
906 lang_hooks.decls.poplevel (1, 0, 0);
907
908 end_sequence ();
909 emit_insn_after (cleanup_insns, f->before_jump);
910
911 f->before_jump = 0;
912 }
913 }
914
915 /* For any still-undefined labels, do the cleanups for this block now.
916 We must do this now since items in the cleanup list may go out
917 of scope when the block ends. */
918 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
919 if (f->before_jump != 0
920 && PREV_INSN (f->target_rtl) == 0
921 /* Label has still not appeared. If we are exiting a block with
922 a stack level to restore, that started before the fixup,
923 mark this stack level as needing restoration
924 when the fixup is later finalized. */
925 && thisblock != 0
926 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
927 means the label is undefined. That's erroneous, but possible. */
928 && (thisblock->data.block.block_start_count
929 <= f->block_start_count))
930 {
931 tree lists = f->cleanup_list_list;
932 rtx cleanup_insns;
933
934 for (; lists; lists = TREE_CHAIN (lists))
935 /* If the following elt. corresponds to our containing block
936 then the elt. must be for this block. */
937 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
938 {
939 start_sequence ();
940 lang_hooks.decls.pushlevel (0);
941 lang_hooks.decls.set_block (f->context);
942 expand_cleanups (TREE_VALUE (lists), 1, 1);
943 do_pending_stack_adjust ();
944 cleanup_insns = get_insns ();
945 lang_hooks.decls.poplevel (1, 0, 0);
946 end_sequence ();
947 if (cleanup_insns != 0)
948 f->before_jump
949 = emit_insn_after (cleanup_insns, f->before_jump);
950
951 f->cleanup_list_list = TREE_CHAIN (lists);
952 }
953
954 if (stack_level)
955 f->stack_level = stack_level;
956 }
957 }
958 \f
959 /* Return the number of times character C occurs in string S. */
960 static int
961 n_occurrences (int c, const char *s)
962 {
963 int n = 0;
964 while (*s)
965 n += (*s++ == c);
966 return n;
967 }
968 \f
969 /* Generate RTL for an asm statement (explicit assembler code).
970 STRING is a STRING_CST node containing the assembler code text,
971 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
972 insn is volatile; don't optimize it. */
973
974 void
975 expand_asm (tree string, int vol)
976 {
977 rtx body;
978
979 if (TREE_CODE (string) == ADDR_EXPR)
980 string = TREE_OPERAND (string, 0);
981
982 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
983
984 MEM_VOLATILE_P (body) = vol;
985
986 emit_insn (body);
987
988 clear_last_expr ();
989 }
990
991 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
992 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
993 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
994 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
995 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
996 constraint allows the use of a register operand. And, *IS_INOUT
997 will be true if the operand is read-write, i.e., if it is used as
998 an input as well as an output. If *CONSTRAINT_P is not in
999 canonical form, it will be made canonical. (Note that `+' will be
1000 replaced with `=' as part of this process.)
1001
1002 Returns TRUE if all went well; FALSE if an error occurred. */
1003
1004 bool
1005 parse_output_constraint (const char **constraint_p, int operand_num,
1006 int ninputs, int noutputs, bool *allows_mem,
1007 bool *allows_reg, bool *is_inout)
1008 {
1009 const char *constraint = *constraint_p;
1010 const char *p;
1011
1012 /* Assume the constraint doesn't allow the use of either a register
1013 or memory. */
1014 *allows_mem = false;
1015 *allows_reg = false;
1016
1017 /* Allow the `=' or `+' to not be at the beginning of the string,
1018 since it wasn't explicitly documented that way, and there is a
1019 large body of code that puts it last. Swap the character to
1020 the front, so as not to uglify any place else. */
1021 p = strchr (constraint, '=');
1022 if (!p)
1023 p = strchr (constraint, '+');
1024
1025 /* If the string doesn't contain an `=', issue an error
1026 message. */
1027 if (!p)
1028 {
1029 error ("output operand constraint lacks `='");
1030 return false;
1031 }
1032
1033 /* If the constraint begins with `+', then the operand is both read
1034 from and written to. */
1035 *is_inout = (*p == '+');
1036
1037 /* Canonicalize the output constraint so that it begins with `='. */
1038 if (p != constraint || is_inout)
1039 {
1040 char *buf;
1041 size_t c_len = strlen (constraint);
1042
1043 if (p != constraint)
1044 warning ("output constraint `%c' for operand %d is not at the beginning",
1045 *p, operand_num);
1046
1047 /* Make a copy of the constraint. */
1048 buf = alloca (c_len + 1);
1049 strcpy (buf, constraint);
1050 /* Swap the first character and the `=' or `+'. */
1051 buf[p - constraint] = buf[0];
1052 /* Make sure the first character is an `='. (Until we do this,
1053 it might be a `+'.) */
1054 buf[0] = '=';
1055 /* Replace the constraint with the canonicalized string. */
1056 *constraint_p = ggc_alloc_string (buf, c_len);
1057 constraint = *constraint_p;
1058 }
1059
1060 /* Loop through the constraint string. */
1061 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1062 switch (*p)
1063 {
1064 case '+':
1065 case '=':
1066 error ("operand constraint contains incorrectly positioned '+' or '='");
1067 return false;
1068
1069 case '%':
1070 if (operand_num + 1 == ninputs + noutputs)
1071 {
1072 error ("`%%' constraint used with last operand");
1073 return false;
1074 }
1075 break;
1076
1077 case 'V': case 'm': case 'o':
1078 *allows_mem = true;
1079 break;
1080
1081 case '?': case '!': case '*': case '&': case '#':
1082 case 'E': case 'F': case 'G': case 'H':
1083 case 's': case 'i': case 'n':
1084 case 'I': case 'J': case 'K': case 'L': case 'M':
1085 case 'N': case 'O': case 'P': case ',':
1086 break;
1087
1088 case '0': case '1': case '2': case '3': case '4':
1089 case '5': case '6': case '7': case '8': case '9':
1090 case '[':
1091 error ("matching constraint not valid in output operand");
1092 return false;
1093
1094 case '<': case '>':
1095 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1096 excepting those that expand_call created. So match memory
1097 and hope. */
1098 *allows_mem = true;
1099 break;
1100
1101 case 'g': case 'X':
1102 *allows_reg = true;
1103 *allows_mem = true;
1104 break;
1105
1106 case 'p': case 'r':
1107 *allows_reg = true;
1108 break;
1109
1110 default:
1111 if (!ISALPHA (*p))
1112 break;
1113 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1114 *allows_reg = true;
1115 #ifdef EXTRA_CONSTRAINT_STR
1116 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1117 *allows_reg = true;
1118 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1119 *allows_mem = true;
1120 else
1121 {
1122 /* Otherwise we can't assume anything about the nature of
1123 the constraint except that it isn't purely registers.
1124 Treat it like "g" and hope for the best. */
1125 *allows_reg = true;
1126 *allows_mem = true;
1127 }
1128 #endif
1129 break;
1130 }
1131
1132 return true;
1133 }
1134
1135 /* Similar, but for input constraints. */
1136
1137 bool
1138 parse_input_constraint (const char **constraint_p, int input_num,
1139 int ninputs, int noutputs, int ninout,
1140 const char * const * constraints,
1141 bool *allows_mem, bool *allows_reg)
1142 {
1143 const char *constraint = *constraint_p;
1144 const char *orig_constraint = constraint;
1145 size_t c_len = strlen (constraint);
1146 size_t j;
1147 bool saw_match = false;
1148
1149 /* Assume the constraint doesn't allow the use of either
1150 a register or memory. */
1151 *allows_mem = false;
1152 *allows_reg = false;
1153
1154 /* Make sure constraint has neither `=', `+', nor '&'. */
1155
1156 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1157 switch (constraint[j])
1158 {
1159 case '+': case '=': case '&':
1160 if (constraint == orig_constraint)
1161 {
1162 error ("input operand constraint contains `%c'", constraint[j]);
1163 return false;
1164 }
1165 break;
1166
1167 case '%':
1168 if (constraint == orig_constraint
1169 && input_num + 1 == ninputs - ninout)
1170 {
1171 error ("`%%' constraint used with last operand");
1172 return false;
1173 }
1174 break;
1175
1176 case 'V': case 'm': case 'o':
1177 *allows_mem = true;
1178 break;
1179
1180 case '<': case '>':
1181 case '?': case '!': case '*': case '#':
1182 case 'E': case 'F': case 'G': case 'H':
1183 case 's': case 'i': case 'n':
1184 case 'I': case 'J': case 'K': case 'L': case 'M':
1185 case 'N': case 'O': case 'P': case ',':
1186 break;
1187
1188 /* Whether or not a numeric constraint allows a register is
1189 decided by the matching constraint, and so there is no need
1190 to do anything special with them. We must handle them in
1191 the default case, so that we don't unnecessarily force
1192 operands to memory. */
1193 case '0': case '1': case '2': case '3': case '4':
1194 case '5': case '6': case '7': case '8': case '9':
1195 {
1196 char *end;
1197 unsigned long match;
1198
1199 saw_match = true;
1200
1201 match = strtoul (constraint + j, &end, 10);
1202 if (match >= (unsigned long) noutputs)
1203 {
1204 error ("matching constraint references invalid operand number");
1205 return false;
1206 }
1207
1208 /* Try and find the real constraint for this dup. Only do this
1209 if the matching constraint is the only alternative. */
1210 if (*end == '\0'
1211 && (j == 0 || (j == 1 && constraint[0] == '%')))
1212 {
1213 constraint = constraints[match];
1214 *constraint_p = constraint;
1215 c_len = strlen (constraint);
1216 j = 0;
1217 /* ??? At the end of the loop, we will skip the first part of
1218 the matched constraint. This assumes not only that the
1219 other constraint is an output constraint, but also that
1220 the '=' or '+' come first. */
1221 break;
1222 }
1223 else
1224 j = end - constraint;
1225 /* Anticipate increment at end of loop. */
1226 j--;
1227 }
1228 /* Fall through. */
1229
1230 case 'p': case 'r':
1231 *allows_reg = true;
1232 break;
1233
1234 case 'g': case 'X':
1235 *allows_reg = true;
1236 *allows_mem = true;
1237 break;
1238
1239 default:
1240 if (! ISALPHA (constraint[j]))
1241 {
1242 error ("invalid punctuation `%c' in constraint", constraint[j]);
1243 return false;
1244 }
1245 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1246 != NO_REGS)
1247 *allows_reg = true;
1248 #ifdef EXTRA_CONSTRAINT_STR
1249 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1250 *allows_reg = true;
1251 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1252 *allows_mem = true;
1253 else
1254 {
1255 /* Otherwise we can't assume anything about the nature of
1256 the constraint except that it isn't purely registers.
1257 Treat it like "g" and hope for the best. */
1258 *allows_reg = true;
1259 *allows_mem = true;
1260 }
1261 #endif
1262 break;
1263 }
1264
1265 if (saw_match && !*allows_reg)
1266 warning ("matching constraint does not allow a register");
1267
1268 return true;
1269 }
1270
1271 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1272 if it is an operand which must be passed in memory (i.e. an "m"
1273 constraint), false otherwise. */
1274
1275 bool
1276 asm_op_is_mem_input (tree input, tree expr)
1277 {
1278 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1279 tree outputs = ASM_OUTPUTS (expr);
1280 int noutputs = list_length (outputs);
1281 const char **constraints
1282 = (const char **) alloca ((noutputs) * sizeof (const char *));
1283 int i = 0;
1284 bool allows_mem, allows_reg;
1285 tree t;
1286
1287 /* Collect output constraints. */
1288 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1289 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1290
1291 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1292 error checking which will be done at expand time. */
1293 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1294 &allows_mem, &allows_reg);
1295 return (!allows_reg && allows_mem);
1296 }
1297
1298 /* Check for overlap between registers marked in CLOBBERED_REGS and
1299 anything inappropriate in DECL. Emit error and return TRUE for error,
1300 FALSE for ok. */
1301
1302 static bool
1303 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1304 {
1305 /* Conflicts between asm-declared register variables and the clobber
1306 list are not allowed. */
1307 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1308 && DECL_REGISTER (decl)
1309 && REG_P (DECL_RTL (decl))
1310 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1311 {
1312 rtx reg = DECL_RTL (decl);
1313 unsigned int regno;
1314
1315 for (regno = REGNO (reg);
1316 regno < (REGNO (reg)
1317 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1318 regno++)
1319 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1320 {
1321 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1322 IDENTIFIER_POINTER (DECL_NAME (decl)));
1323
1324 /* Reset registerness to stop multiple errors emitted for a
1325 single variable. */
1326 DECL_REGISTER (decl) = 0;
1327 return true;
1328 }
1329 }
1330 return false;
1331 }
1332
1333 /* Generate RTL for an asm statement with arguments.
1334 STRING is the instruction template.
1335 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1336 Each output or input has an expression in the TREE_VALUE and
1337 and a tree list in TREE_PURPOSE which in turn contains a constraint
1338 name in TREE_VALUE (or NULL_TREE) and a constraint string
1339 in TREE_PURPOSE.
1340 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1341 that is clobbered by this insn.
1342
1343 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1344 Some elements of OUTPUTS may be replaced with trees representing temporary
1345 values. The caller should copy those temporary values to the originally
1346 specified lvalues.
1347
1348 VOL nonzero means the insn is volatile; don't optimize it. */
1349
1350 void
1351 expand_asm_operands (tree string, tree outputs, tree inputs,
1352 tree clobbers, int vol, location_t locus)
1353 {
1354 rtvec argvec, constraintvec;
1355 rtx body;
1356 int ninputs = list_length (inputs);
1357 int noutputs = list_length (outputs);
1358 int ninout;
1359 int nclobbers;
1360 HARD_REG_SET clobbered_regs;
1361 int clobber_conflict_found = 0;
1362 tree tail;
1363 tree t;
1364 int i;
1365 /* Vector of RTX's of evaluated output operands. */
1366 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1367 int *inout_opnum = alloca (noutputs * sizeof (int));
1368 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1369 enum machine_mode *inout_mode
1370 = alloca (noutputs * sizeof (enum machine_mode));
1371 const char **constraints
1372 = alloca ((noutputs + ninputs) * sizeof (const char *));
1373 int old_generating_concat_p = generating_concat_p;
1374
1375 /* An ASM with no outputs needs to be treated as volatile, for now. */
1376 if (noutputs == 0)
1377 vol = 1;
1378
1379 if (! check_operand_nalternatives (outputs, inputs))
1380 return;
1381
1382 string = resolve_asm_operand_names (string, outputs, inputs);
1383
1384 /* Collect constraints. */
1385 i = 0;
1386 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1387 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1388 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1389 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1390
1391 /* Sometimes we wish to automatically clobber registers across an asm.
1392 Case in point is when the i386 backend moved from cc0 to a hard reg --
1393 maintaining source-level compatibility means automatically clobbering
1394 the flags register. */
1395 clobbers = targetm.md_asm_clobbers (clobbers);
1396
1397 /* Count the number of meaningful clobbered registers, ignoring what
1398 we would ignore later. */
1399 nclobbers = 0;
1400 CLEAR_HARD_REG_SET (clobbered_regs);
1401 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1402 {
1403 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1404
1405 i = decode_reg_name (regname);
1406 if (i >= 0 || i == -4)
1407 ++nclobbers;
1408 else if (i == -2)
1409 error ("unknown register name `%s' in `asm'", regname);
1410
1411 /* Mark clobbered registers. */
1412 if (i >= 0)
1413 {
1414 /* Clobbering the PIC register is an error */
1415 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1416 {
1417 error ("PIC register `%s' clobbered in `asm'", regname);
1418 return;
1419 }
1420
1421 SET_HARD_REG_BIT (clobbered_regs, i);
1422 }
1423 }
1424
1425 clear_last_expr ();
1426
1427 /* First pass over inputs and outputs checks validity and sets
1428 mark_addressable if needed. */
1429
1430 ninout = 0;
1431 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1432 {
1433 tree val = TREE_VALUE (tail);
1434 tree type = TREE_TYPE (val);
1435 const char *constraint;
1436 bool is_inout;
1437 bool allows_reg;
1438 bool allows_mem;
1439
1440 /* If there's an erroneous arg, emit no insn. */
1441 if (type == error_mark_node)
1442 return;
1443
1444 /* Try to parse the output constraint. If that fails, there's
1445 no point in going further. */
1446 constraint = constraints[i];
1447 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1448 &allows_mem, &allows_reg, &is_inout))
1449 return;
1450
1451 if (! allows_reg
1452 && (allows_mem
1453 || is_inout
1454 || (DECL_P (val)
1455 && REG_P (DECL_RTL (val))
1456 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1457 lang_hooks.mark_addressable (val);
1458
1459 if (is_inout)
1460 ninout++;
1461 }
1462
1463 ninputs += ninout;
1464 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1465 {
1466 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1467 return;
1468 }
1469
1470 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1471 {
1472 bool allows_reg, allows_mem;
1473 const char *constraint;
1474
1475 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1476 would get VOIDmode and that could cause a crash in reload. */
1477 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1478 return;
1479
1480 constraint = constraints[i + noutputs];
1481 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1482 constraints, &allows_mem, &allows_reg))
1483 return;
1484
1485 if (! allows_reg && allows_mem)
1486 lang_hooks.mark_addressable (TREE_VALUE (tail));
1487 }
1488
1489 /* Second pass evaluates arguments. */
1490
1491 ninout = 0;
1492 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1493 {
1494 tree val = TREE_VALUE (tail);
1495 tree type = TREE_TYPE (val);
1496 bool is_inout;
1497 bool allows_reg;
1498 bool allows_mem;
1499 rtx op;
1500
1501 if (!parse_output_constraint (&constraints[i], i, ninputs,
1502 noutputs, &allows_mem, &allows_reg,
1503 &is_inout))
1504 abort ();
1505
1506 /* If an output operand is not a decl or indirect ref and our constraint
1507 allows a register, make a temporary to act as an intermediate.
1508 Make the asm insn write into that, then our caller will copy it to
1509 the real output operand. Likewise for promoted variables. */
1510
1511 generating_concat_p = 0;
1512
1513 real_output_rtx[i] = NULL_RTX;
1514 if ((TREE_CODE (val) == INDIRECT_REF
1515 && allows_mem)
1516 || (DECL_P (val)
1517 && (allows_mem || REG_P (DECL_RTL (val)))
1518 && ! (REG_P (DECL_RTL (val))
1519 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1520 || ! allows_reg
1521 || is_inout)
1522 {
1523 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1524 if (GET_CODE (op) == MEM)
1525 op = validize_mem (op);
1526
1527 if (! allows_reg && GET_CODE (op) != MEM)
1528 error ("output number %d not directly addressable", i);
1529 if ((! allows_mem && GET_CODE (op) == MEM)
1530 || GET_CODE (op) == CONCAT)
1531 {
1532 real_output_rtx[i] = protect_from_queue (op, 1);
1533 op = gen_reg_rtx (GET_MODE (op));
1534 if (is_inout)
1535 emit_move_insn (op, real_output_rtx[i]);
1536 }
1537 }
1538 else
1539 {
1540 op = assign_temp (type, 0, 0, 1);
1541 op = validize_mem (op);
1542 TREE_VALUE (tail) = make_tree (type, op);
1543 }
1544 output_rtx[i] = op;
1545
1546 generating_concat_p = old_generating_concat_p;
1547
1548 if (is_inout)
1549 {
1550 inout_mode[ninout] = TYPE_MODE (type);
1551 inout_opnum[ninout++] = i;
1552 }
1553
1554 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1555 clobber_conflict_found = 1;
1556 }
1557
1558 /* Make vectors for the expression-rtx, constraint strings,
1559 and named operands. */
1560
1561 argvec = rtvec_alloc (ninputs);
1562 constraintvec = rtvec_alloc (ninputs);
1563
1564 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1565 : GET_MODE (output_rtx[0])),
1566 TREE_STRING_POINTER (string),
1567 empty_string, 0, argvec, constraintvec,
1568 locus.file, locus.line);
1569
1570 MEM_VOLATILE_P (body) = vol;
1571
1572 /* Eval the inputs and put them into ARGVEC.
1573 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1574
1575 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1576 {
1577 bool allows_reg, allows_mem;
1578 const char *constraint;
1579 tree val, type;
1580 rtx op;
1581
1582 constraint = constraints[i + noutputs];
1583 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1584 constraints, &allows_mem, &allows_reg))
1585 abort ();
1586
1587 generating_concat_p = 0;
1588
1589 val = TREE_VALUE (tail);
1590 type = TREE_TYPE (val);
1591 op = expand_expr (val, NULL_RTX, VOIDmode,
1592 (allows_mem && !allows_reg
1593 ? EXPAND_MEMORY : EXPAND_NORMAL));
1594
1595 /* Never pass a CONCAT to an ASM. */
1596 if (GET_CODE (op) == CONCAT)
1597 op = force_reg (GET_MODE (op), op);
1598 else if (GET_CODE (op) == MEM)
1599 op = validize_mem (op);
1600
1601 if (asm_operand_ok (op, constraint) <= 0)
1602 {
1603 if (allows_reg)
1604 op = force_reg (TYPE_MODE (type), op);
1605 else if (!allows_mem)
1606 warning ("asm operand %d probably doesn't match constraints",
1607 i + noutputs);
1608 else if (GET_CODE (op) == MEM)
1609 {
1610 /* We won't recognize either volatile memory or memory
1611 with a queued address as available a memory_operand
1612 at this point. Ignore it: clearly this *is* a memory. */
1613 }
1614 else
1615 {
1616 warning ("use of memory input without lvalue in "
1617 "asm operand %d is deprecated", i + noutputs);
1618
1619 if (CONSTANT_P (op))
1620 {
1621 rtx mem = force_const_mem (TYPE_MODE (type), op);
1622 if (mem)
1623 op = validize_mem (mem);
1624 else
1625 op = force_reg (TYPE_MODE (type), op);
1626 }
1627 if (REG_P (op)
1628 || GET_CODE (op) == SUBREG
1629 || GET_CODE (op) == ADDRESSOF
1630 || GET_CODE (op) == CONCAT)
1631 {
1632 tree qual_type = build_qualified_type (type,
1633 (TYPE_QUALS (type)
1634 | TYPE_QUAL_CONST));
1635 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1636 memloc = validize_mem (memloc);
1637 emit_move_insn (memloc, op);
1638 op = memloc;
1639 }
1640 }
1641 }
1642
1643 generating_concat_p = old_generating_concat_p;
1644 ASM_OPERANDS_INPUT (body, i) = op;
1645
1646 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1647 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1648
1649 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1650 clobber_conflict_found = 1;
1651 }
1652
1653 /* Protect all the operands from the queue now that they have all been
1654 evaluated. */
1655
1656 generating_concat_p = 0;
1657
1658 for (i = 0; i < ninputs - ninout; i++)
1659 ASM_OPERANDS_INPUT (body, i)
1660 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1661
1662 for (i = 0; i < noutputs; i++)
1663 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1664
1665 /* For in-out operands, copy output rtx to input rtx. */
1666 for (i = 0; i < ninout; i++)
1667 {
1668 int j = inout_opnum[i];
1669 char buffer[16];
1670
1671 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1672 = output_rtx[j];
1673
1674 sprintf (buffer, "%d", j);
1675 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1676 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1677 }
1678
1679 generating_concat_p = old_generating_concat_p;
1680
1681 /* Now, for each output, construct an rtx
1682 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1683 ARGVEC CONSTRAINTS OPNAMES))
1684 If there is more than one, put them inside a PARALLEL. */
1685
1686 if (noutputs == 1 && nclobbers == 0)
1687 {
1688 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1689 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1690 }
1691
1692 else if (noutputs == 0 && nclobbers == 0)
1693 {
1694 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1695 emit_insn (body);
1696 }
1697
1698 else
1699 {
1700 rtx obody = body;
1701 int num = noutputs;
1702
1703 if (num == 0)
1704 num = 1;
1705
1706 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1707
1708 /* For each output operand, store a SET. */
1709 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1710 {
1711 XVECEXP (body, 0, i)
1712 = gen_rtx_SET (VOIDmode,
1713 output_rtx[i],
1714 gen_rtx_ASM_OPERANDS
1715 (GET_MODE (output_rtx[i]),
1716 TREE_STRING_POINTER (string),
1717 constraints[i], i, argvec, constraintvec,
1718 locus.file, locus.line));
1719
1720 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1721 }
1722
1723 /* If there are no outputs (but there are some clobbers)
1724 store the bare ASM_OPERANDS into the PARALLEL. */
1725
1726 if (i == 0)
1727 XVECEXP (body, 0, i++) = obody;
1728
1729 /* Store (clobber REG) for each clobbered register specified. */
1730
1731 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1732 {
1733 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1734 int j = decode_reg_name (regname);
1735 rtx clobbered_reg;
1736
1737 if (j < 0)
1738 {
1739 if (j == -3) /* `cc', which is not a register */
1740 continue;
1741
1742 if (j == -4) /* `memory', don't cache memory across asm */
1743 {
1744 XVECEXP (body, 0, i++)
1745 = gen_rtx_CLOBBER (VOIDmode,
1746 gen_rtx_MEM
1747 (BLKmode,
1748 gen_rtx_SCRATCH (VOIDmode)));
1749 continue;
1750 }
1751
1752 /* Ignore unknown register, error already signaled. */
1753 continue;
1754 }
1755
1756 /* Use QImode since that's guaranteed to clobber just one reg. */
1757 clobbered_reg = gen_rtx_REG (QImode, j);
1758
1759 /* Do sanity check for overlap between clobbers and respectively
1760 input and outputs that hasn't been handled. Such overlap
1761 should have been detected and reported above. */
1762 if (!clobber_conflict_found)
1763 {
1764 int opno;
1765
1766 /* We test the old body (obody) contents to avoid tripping
1767 over the under-construction body. */
1768 for (opno = 0; opno < noutputs; opno++)
1769 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1770 internal_error ("asm clobber conflict with output operand");
1771
1772 for (opno = 0; opno < ninputs - ninout; opno++)
1773 if (reg_overlap_mentioned_p (clobbered_reg,
1774 ASM_OPERANDS_INPUT (obody, opno)))
1775 internal_error ("asm clobber conflict with input operand");
1776 }
1777
1778 XVECEXP (body, 0, i++)
1779 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1780 }
1781
1782 emit_insn (body);
1783 }
1784
1785 /* For any outputs that needed reloading into registers, spill them
1786 back to where they belong. */
1787 for (i = 0; i < noutputs; ++i)
1788 if (real_output_rtx[i])
1789 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1790
1791 free_temp_slots ();
1792 }
1793
1794 void
1795 expand_asm_expr (tree exp)
1796 {
1797 int noutputs, i;
1798 tree outputs, tail;
1799 tree *o;
1800
1801 if (ASM_INPUT_P (exp))
1802 {
1803 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1804 return;
1805 }
1806
1807 outputs = ASM_OUTPUTS (exp);
1808 noutputs = list_length (outputs);
1809 /* o[I] is the place that output number I should be written. */
1810 o = (tree *) alloca (noutputs * sizeof (tree));
1811
1812 /* Record the contents of OUTPUTS before it is modified. */
1813 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1814 o[i] = TREE_VALUE (tail);
1815
1816 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1817 OUTPUTS some trees for where the values were actually stored. */
1818 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1819 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1820 input_location);
1821
1822 /* Copy all the intermediate outputs into the specified outputs. */
1823 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1824 {
1825 if (o[i] != TREE_VALUE (tail))
1826 {
1827 expand_assignment (o[i], TREE_VALUE (tail), 0);
1828 free_temp_slots ();
1829
1830 /* Restore the original value so that it's correct the next
1831 time we expand this function. */
1832 TREE_VALUE (tail) = o[i];
1833 }
1834 }
1835
1836 /* Those MODIFY_EXPRs could do autoincrements. */
1837 emit_queue ();
1838 }
1839
1840 /* A subroutine of expand_asm_operands. Check that all operands have
1841 the same number of alternatives. Return true if so. */
1842
1843 static bool
1844 check_operand_nalternatives (tree outputs, tree inputs)
1845 {
1846 if (outputs || inputs)
1847 {
1848 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1849 int nalternatives
1850 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1851 tree next = inputs;
1852
1853 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1854 {
1855 error ("too many alternatives in `asm'");
1856 return false;
1857 }
1858
1859 tmp = outputs;
1860 while (tmp)
1861 {
1862 const char *constraint
1863 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1864
1865 if (n_occurrences (',', constraint) != nalternatives)
1866 {
1867 error ("operand constraints for `asm' differ in number of alternatives");
1868 return false;
1869 }
1870
1871 if (TREE_CHAIN (tmp))
1872 tmp = TREE_CHAIN (tmp);
1873 else
1874 tmp = next, next = 0;
1875 }
1876 }
1877
1878 return true;
1879 }
1880
1881 /* A subroutine of expand_asm_operands. Check that all operand names
1882 are unique. Return true if so. We rely on the fact that these names
1883 are identifiers, and so have been canonicalized by get_identifier,
1884 so all we need are pointer comparisons. */
1885
1886 static bool
1887 check_unique_operand_names (tree outputs, tree inputs)
1888 {
1889 tree i, j;
1890
1891 for (i = outputs; i ; i = TREE_CHAIN (i))
1892 {
1893 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1894 if (! i_name)
1895 continue;
1896
1897 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1898 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1899 goto failure;
1900 }
1901
1902 for (i = inputs; i ; i = TREE_CHAIN (i))
1903 {
1904 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1905 if (! i_name)
1906 continue;
1907
1908 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1909 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1910 goto failure;
1911 for (j = outputs; j ; j = TREE_CHAIN (j))
1912 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1913 goto failure;
1914 }
1915
1916 return true;
1917
1918 failure:
1919 error ("duplicate asm operand name '%s'",
1920 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1921 return false;
1922 }
1923
1924 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1925 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1926 STRING and in the constraints to those numbers. */
1927
1928 tree
1929 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1930 {
1931 char *buffer;
1932 char *p;
1933 const char *c;
1934 tree t;
1935
1936 check_unique_operand_names (outputs, inputs);
1937
1938 /* Substitute [<name>] in input constraint strings. There should be no
1939 named operands in output constraints. */
1940 for (t = inputs; t ; t = TREE_CHAIN (t))
1941 {
1942 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1943 if (strchr (c, '[') != NULL)
1944 {
1945 p = buffer = xstrdup (c);
1946 while ((p = strchr (p, '[')) != NULL)
1947 p = resolve_operand_name_1 (p, outputs, inputs);
1948 TREE_VALUE (TREE_PURPOSE (t))
1949 = build_string (strlen (buffer), buffer);
1950 free (buffer);
1951 }
1952 }
1953
1954 /* Now check for any needed substitutions in the template. */
1955 c = TREE_STRING_POINTER (string);
1956 while ((c = strchr (c, '%')) != NULL)
1957 {
1958 if (c[1] == '[')
1959 break;
1960 else if (ISALPHA (c[1]) && c[2] == '[')
1961 break;
1962 else
1963 {
1964 c += 1;
1965 continue;
1966 }
1967 }
1968
1969 if (c)
1970 {
1971 /* OK, we need to make a copy so we can perform the substitutions.
1972 Assume that we will not need extra space--we get to remove '['
1973 and ']', which means we cannot have a problem until we have more
1974 than 999 operands. */
1975 buffer = xstrdup (TREE_STRING_POINTER (string));
1976 p = buffer + (c - TREE_STRING_POINTER (string));
1977
1978 while ((p = strchr (p, '%')) != NULL)
1979 {
1980 if (p[1] == '[')
1981 p += 1;
1982 else if (ISALPHA (p[1]) && p[2] == '[')
1983 p += 2;
1984 else
1985 {
1986 p += 1;
1987 continue;
1988 }
1989
1990 p = resolve_operand_name_1 (p, outputs, inputs);
1991 }
1992
1993 string = build_string (strlen (buffer), buffer);
1994 free (buffer);
1995 }
1996
1997 return string;
1998 }
1999
2000 /* A subroutine of resolve_operand_names. P points to the '[' for a
2001 potential named operand of the form [<name>]. In place, replace
2002 the name and brackets with a number. Return a pointer to the
2003 balance of the string after substitution. */
2004
2005 static char *
2006 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2007 {
2008 char *q;
2009 int op;
2010 tree t;
2011 size_t len;
2012
2013 /* Collect the operand name. */
2014 q = strchr (p, ']');
2015 if (!q)
2016 {
2017 error ("missing close brace for named operand");
2018 return strchr (p, '\0');
2019 }
2020 len = q - p - 1;
2021
2022 /* Resolve the name to a number. */
2023 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2024 {
2025 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2026 if (name)
2027 {
2028 const char *c = TREE_STRING_POINTER (name);
2029 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2030 goto found;
2031 }
2032 }
2033 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2034 {
2035 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2036 if (name)
2037 {
2038 const char *c = TREE_STRING_POINTER (name);
2039 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2040 goto found;
2041 }
2042 }
2043
2044 *q = '\0';
2045 error ("undefined named operand '%s'", p + 1);
2046 op = 0;
2047 found:
2048
2049 /* Replace the name with the number. Unfortunately, not all libraries
2050 get the return value of sprintf correct, so search for the end of the
2051 generated string by hand. */
2052 sprintf (p, "%d", op);
2053 p = strchr (p, '\0');
2054
2055 /* Verify the no extra buffer space assumption. */
2056 if (p > q)
2057 abort ();
2058
2059 /* Shift the rest of the buffer down to fill the gap. */
2060 memmove (p, q + 1, strlen (q + 1) + 1);
2061
2062 return p;
2063 }
2064 \f
2065 /* Generate RTL to evaluate the expression EXP
2066 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2067 Provided just for backward-compatibility. expand_expr_stmt_value()
2068 should be used for new code. */
2069
2070 void
2071 expand_expr_stmt (tree exp)
2072 {
2073 expand_expr_stmt_value (exp, -1, 1);
2074 }
2075
2076 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2077 whether to (1) save the value of the expression, (0) discard it or
2078 (-1) use expr_stmts_for_value to tell. The use of -1 is
2079 deprecated, and retained only for backward compatibility. */
2080
2081 void
2082 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2083 {
2084 rtx value;
2085 tree type;
2086 rtx alt_rtl = NULL;
2087
2088 if (want_value == -1)
2089 want_value = expr_stmts_for_value != 0;
2090
2091 /* If -Wextra, warn about statements with no side effects,
2092 except for an explicit cast to void (e.g. for assert()), and
2093 except for last statement in ({...}) where they may be useful. */
2094 if (! want_value
2095 && (expr_stmts_for_value == 0 || ! maybe_last)
2096 && exp != error_mark_node
2097 && warn_unused_value)
2098 {
2099 if (TREE_SIDE_EFFECTS (exp))
2100 warn_if_unused_value (exp, emit_locus);
2101 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
2102 warning ("%Hstatement with no effect", &emit_locus);
2103 }
2104
2105 /* If EXP is of function type and we are expanding statements for
2106 value, convert it to pointer-to-function. */
2107 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2108 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2109
2110 /* The call to `expand_expr' could cause last_expr_type and
2111 last_expr_value to get reset. Therefore, we set last_expr_value
2112 and last_expr_type *after* calling expand_expr. */
2113 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2114 VOIDmode, 0, &alt_rtl);
2115 type = TREE_TYPE (exp);
2116
2117 /* If all we do is reference a volatile value in memory,
2118 copy it to a register to be sure it is actually touched. */
2119 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2120 {
2121 if (TYPE_MODE (type) == VOIDmode)
2122 ;
2123 else if (TYPE_MODE (type) != BLKmode)
2124 value = copy_to_reg (value);
2125 else
2126 {
2127 rtx lab = gen_label_rtx ();
2128
2129 /* Compare the value with itself to reference it. */
2130 emit_cmp_and_jump_insns (value, value, EQ,
2131 expand_expr (TYPE_SIZE (type),
2132 NULL_RTX, VOIDmode, 0),
2133 BLKmode, 0, lab);
2134 emit_label (lab);
2135 }
2136 }
2137
2138 /* If this expression is part of a ({...}) and is in memory, we may have
2139 to preserve temporaries. */
2140 preserve_temp_slots (value);
2141
2142 /* Free any temporaries used to evaluate this expression. Any temporary
2143 used as a result of this expression will already have been preserved
2144 above. */
2145 free_temp_slots ();
2146
2147 if (want_value)
2148 {
2149 last_expr_value = value;
2150 last_expr_alt_rtl = alt_rtl;
2151 last_expr_type = type;
2152 }
2153
2154 emit_queue ();
2155 }
2156
2157 /* Warn if EXP contains any computations whose results are not used.
2158 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
2159 (potential) location of the expression. */
2160
2161 int
2162 warn_if_unused_value (tree exp, location_t locus)
2163 {
2164 restart:
2165 if (TREE_USED (exp))
2166 return 0;
2167
2168 /* Don't warn about void constructs. This includes casting to void,
2169 void function calls, and statement expressions with a final cast
2170 to void. */
2171 if (VOID_TYPE_P (TREE_TYPE (exp)))
2172 return 0;
2173
2174 if (EXPR_LOCUS (exp))
2175 locus = *EXPR_LOCUS (exp);
2176
2177 switch (TREE_CODE (exp))
2178 {
2179 case PREINCREMENT_EXPR:
2180 case POSTINCREMENT_EXPR:
2181 case PREDECREMENT_EXPR:
2182 case POSTDECREMENT_EXPR:
2183 case MODIFY_EXPR:
2184 case INIT_EXPR:
2185 case TARGET_EXPR:
2186 case CALL_EXPR:
2187 case RTL_EXPR:
2188 case TRY_CATCH_EXPR:
2189 case WITH_CLEANUP_EXPR:
2190 case EXIT_EXPR:
2191 return 0;
2192
2193 case BIND_EXPR:
2194 /* For a binding, warn if no side effect within it. */
2195 exp = BIND_EXPR_BODY (exp);
2196 goto restart;
2197
2198 case SAVE_EXPR:
2199 exp = TREE_OPERAND (exp, 0);
2200 goto restart;
2201
2202 case TRUTH_ORIF_EXPR:
2203 case TRUTH_ANDIF_EXPR:
2204 /* In && or ||, warn if 2nd operand has no side effect. */
2205 exp = TREE_OPERAND (exp, 1);
2206 goto restart;
2207
2208 case COMPOUND_EXPR:
2209 if (TREE_NO_WARNING (exp))
2210 return 0;
2211 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
2212 return 1;
2213 /* Let people do `(foo (), 0)' without a warning. */
2214 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2215 return 0;
2216 exp = TREE_OPERAND (exp, 1);
2217 goto restart;
2218
2219 case NOP_EXPR:
2220 case CONVERT_EXPR:
2221 case NON_LVALUE_EXPR:
2222 /* Don't warn about conversions not explicit in the user's program. */
2223 if (TREE_NO_WARNING (exp))
2224 return 0;
2225 /* Assignment to a cast usually results in a cast of a modify.
2226 Don't complain about that. There can be an arbitrary number of
2227 casts before the modify, so we must loop until we find the first
2228 non-cast expression and then test to see if that is a modify. */
2229 {
2230 tree tem = TREE_OPERAND (exp, 0);
2231
2232 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2233 tem = TREE_OPERAND (tem, 0);
2234
2235 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2236 || TREE_CODE (tem) == CALL_EXPR)
2237 return 0;
2238 }
2239 goto maybe_warn;
2240
2241 case INDIRECT_REF:
2242 /* Don't warn about automatic dereferencing of references, since
2243 the user cannot control it. */
2244 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2245 {
2246 exp = TREE_OPERAND (exp, 0);
2247 goto restart;
2248 }
2249 /* Fall through. */
2250
2251 default:
2252 /* Referencing a volatile value is a side effect, so don't warn. */
2253 if ((DECL_P (exp)
2254 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2255 && TREE_THIS_VOLATILE (exp))
2256 return 0;
2257
2258 /* If this is an expression which has no operands, there is no value
2259 to be unused. There are no such language-independent codes,
2260 but front ends may define such. */
2261 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2262 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2263 return 0;
2264
2265 maybe_warn:
2266 /* If this is an expression with side effects, don't warn. */
2267 if (TREE_SIDE_EFFECTS (exp))
2268 return 0;
2269
2270 warning ("%Hvalue computed is not used", &locus);
2271 return 1;
2272 }
2273 }
2274
2275 /* Clear out the memory of the last expression evaluated. */
2276
2277 void
2278 clear_last_expr (void)
2279 {
2280 last_expr_type = NULL_TREE;
2281 last_expr_value = NULL_RTX;
2282 last_expr_alt_rtl = NULL_RTX;
2283 }
2284
2285 /* Begin a statement-expression, i.e., a series of statements which
2286 may return a value. Return the RTL_EXPR for this statement expr.
2287 The caller must save that value and pass it to
2288 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2289 in the statement-expression are deallocated at the end of the
2290 expression. */
2291
2292 tree
2293 expand_start_stmt_expr (int has_scope)
2294 {
2295 tree t;
2296
2297 /* Make the RTL_EXPR node temporary, not momentary,
2298 so that rtl_expr_chain doesn't become garbage. */
2299 t = make_node (RTL_EXPR);
2300 do_pending_stack_adjust ();
2301 if (has_scope)
2302 start_sequence_for_rtl_expr (t);
2303 else
2304 start_sequence ();
2305 NO_DEFER_POP;
2306 expr_stmts_for_value++;
2307 return t;
2308 }
2309
2310 /* Restore the previous state at the end of a statement that returns a value.
2311 Returns a tree node representing the statement's value and the
2312 insns to compute the value.
2313
2314 The nodes of that expression have been freed by now, so we cannot use them.
2315 But we don't want to do that anyway; the expression has already been
2316 evaluated and now we just want to use the value. So generate a RTL_EXPR
2317 with the proper type and RTL value.
2318
2319 If the last substatement was not an expression,
2320 return something with type `void'. */
2321
2322 tree
2323 expand_end_stmt_expr (tree t)
2324 {
2325 OK_DEFER_POP;
2326
2327 if (! last_expr_value || ! last_expr_type)
2328 {
2329 last_expr_value = const0_rtx;
2330 last_expr_alt_rtl = NULL_RTX;
2331 last_expr_type = void_type_node;
2332 }
2333 else if (!REG_P (last_expr_value) && ! CONSTANT_P (last_expr_value))
2334 /* Remove any possible QUEUED. */
2335 last_expr_value = protect_from_queue (last_expr_value, 0);
2336
2337 emit_queue ();
2338
2339 TREE_TYPE (t) = last_expr_type;
2340 RTL_EXPR_RTL (t) = last_expr_value;
2341 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2342 RTL_EXPR_SEQUENCE (t) = get_insns ();
2343
2344 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2345
2346 end_sequence ();
2347
2348 /* Don't consider deleting this expr or containing exprs at tree level. */
2349 TREE_SIDE_EFFECTS (t) = 1;
2350 /* Propagate volatility of the actual RTL expr. */
2351 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2352
2353 clear_last_expr ();
2354 expr_stmts_for_value--;
2355
2356 return t;
2357 }
2358 \f
2359 /* Generate RTL for the start of an if-then. COND is the expression
2360 whose truth should be tested.
2361
2362 If EXITFLAG is nonzero, this conditional is visible to
2363 `exit_something'. */
2364
2365 void
2366 expand_start_cond (tree cond, int exitflag)
2367 {
2368 struct nesting *thiscond = ALLOC_NESTING ();
2369
2370 /* Make an entry on cond_stack for the cond we are entering. */
2371
2372 thiscond->desc = COND_NESTING;
2373 thiscond->next = cond_stack;
2374 thiscond->all = nesting_stack;
2375 thiscond->depth = ++nesting_depth;
2376 thiscond->data.cond.next_label = gen_label_rtx ();
2377 /* Before we encounter an `else', we don't need a separate exit label
2378 unless there are supposed to be exit statements
2379 to exit this conditional. */
2380 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2381 thiscond->data.cond.endif_label = thiscond->exit_label;
2382 cond_stack = thiscond;
2383 nesting_stack = thiscond;
2384
2385 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2386 }
2387
2388 /* Generate RTL between then-clause and the elseif-clause
2389 of an if-then-elseif-.... */
2390
2391 void
2392 expand_start_elseif (tree cond)
2393 {
2394 if (cond_stack->data.cond.endif_label == 0)
2395 cond_stack->data.cond.endif_label = gen_label_rtx ();
2396 emit_jump (cond_stack->data.cond.endif_label);
2397 emit_label (cond_stack->data.cond.next_label);
2398 cond_stack->data.cond.next_label = gen_label_rtx ();
2399 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2400 }
2401
2402 /* Generate RTL between the then-clause and the else-clause
2403 of an if-then-else. */
2404
2405 void
2406 expand_start_else (void)
2407 {
2408 if (cond_stack->data.cond.endif_label == 0)
2409 cond_stack->data.cond.endif_label = gen_label_rtx ();
2410
2411 emit_jump (cond_stack->data.cond.endif_label);
2412 emit_label (cond_stack->data.cond.next_label);
2413 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2414 }
2415
2416 /* After calling expand_start_else, turn this "else" into an "else if"
2417 by providing another condition. */
2418
2419 void
2420 expand_elseif (tree cond)
2421 {
2422 cond_stack->data.cond.next_label = gen_label_rtx ();
2423 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2424 }
2425
2426 /* Generate RTL for the end of an if-then.
2427 Pop the record for it off of cond_stack. */
2428
2429 void
2430 expand_end_cond (void)
2431 {
2432 struct nesting *thiscond = cond_stack;
2433
2434 do_pending_stack_adjust ();
2435 if (thiscond->data.cond.next_label)
2436 emit_label (thiscond->data.cond.next_label);
2437 if (thiscond->data.cond.endif_label)
2438 emit_label (thiscond->data.cond.endif_label);
2439
2440 POPSTACK (cond_stack);
2441 clear_last_expr ();
2442 }
2443 \f
2444 /* Return nonzero if we should preserve sub-expressions as separate
2445 pseudos. We never do so if we aren't optimizing. We always do so
2446 if -fexpensive-optimizations. */
2447
2448 int
2449 preserve_subexpressions_p (void)
2450 {
2451 if (flag_expensive_optimizations)
2452 return 1;
2453
2454 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2455 return 0;
2456
2457 return 1;
2458 }
2459
2460 \f
2461 /* Generate RTL to return from the current function, with no value.
2462 (That is, we do not do anything about returning any value.) */
2463
2464 void
2465 expand_null_return (void)
2466 {
2467 rtx last_insn;
2468
2469 last_insn = get_last_insn ();
2470
2471 /* If this function was declared to return a value, but we
2472 didn't, clobber the return registers so that they are not
2473 propagated live to the rest of the function. */
2474 clobber_return_register ();
2475
2476 expand_null_return_1 (last_insn);
2477 }
2478
2479 /* Generate RTL to return directly from the current function.
2480 (That is, we bypass any return value.) */
2481
2482 void
2483 expand_naked_return (void)
2484 {
2485 rtx last_insn, end_label;
2486
2487 last_insn = get_last_insn ();
2488 end_label = naked_return_label;
2489
2490 clear_pending_stack_adjust ();
2491 do_pending_stack_adjust ();
2492 clear_last_expr ();
2493
2494 if (end_label == 0)
2495 end_label = naked_return_label = gen_label_rtx ();
2496 expand_goto_internal (NULL_TREE, end_label, last_insn);
2497 }
2498
2499 /* Try to guess whether the value of return means error code. */
2500 static enum br_predictor
2501 return_prediction (rtx val)
2502 {
2503 /* Different heuristics for pointers and scalars. */
2504 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2505 {
2506 /* NULL is usually not returned. */
2507 if (val == const0_rtx)
2508 return PRED_NULL_RETURN;
2509 }
2510 else
2511 {
2512 /* Negative return values are often used to indicate
2513 errors. */
2514 if (GET_CODE (val) == CONST_INT
2515 && INTVAL (val) < 0)
2516 return PRED_NEGATIVE_RETURN;
2517 /* Constant return values are also usually erors,
2518 zero/one often mean booleans so exclude them from the
2519 heuristics. */
2520 if (CONSTANT_P (val)
2521 && (val != const0_rtx && val != const1_rtx))
2522 return PRED_CONST_RETURN;
2523 }
2524 return PRED_NO_PREDICTION;
2525 }
2526
2527
2528 /* If the current function returns values in the most significant part
2529 of a register, shift return value VAL appropriately. The mode of
2530 the function's return type is known not to be BLKmode. */
2531
2532 static rtx
2533 shift_return_value (rtx val)
2534 {
2535 tree type;
2536
2537 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2538 if (targetm.calls.return_in_msb (type))
2539 {
2540 rtx target;
2541 HOST_WIDE_INT shift;
2542
2543 target = DECL_RTL (DECL_RESULT (current_function_decl));
2544 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2545 - BITS_PER_UNIT * int_size_in_bytes (type));
2546 if (shift > 0)
2547 val = expand_binop (GET_MODE (target), ashl_optab,
2548 gen_lowpart (GET_MODE (target), val),
2549 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2550 }
2551 return val;
2552 }
2553
2554
2555 /* Generate RTL to return from the current function, with value VAL. */
2556
2557 static void
2558 expand_value_return (rtx val)
2559 {
2560 rtx last_insn;
2561 rtx return_reg;
2562 enum br_predictor pred;
2563
2564 if (flag_guess_branch_prob
2565 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2566 {
2567 /* Emit information for branch prediction. */
2568 rtx note;
2569
2570 note = emit_note (NOTE_INSN_PREDICTION);
2571
2572 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2573
2574 }
2575
2576 last_insn = get_last_insn ();
2577 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2578
2579 /* Copy the value to the return location
2580 unless it's already there. */
2581
2582 if (return_reg != val)
2583 {
2584 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2585 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2586 {
2587 int unsignedp = TYPE_UNSIGNED (type);
2588 enum machine_mode old_mode
2589 = DECL_MODE (DECL_RESULT (current_function_decl));
2590 enum machine_mode mode
2591 = promote_mode (type, old_mode, &unsignedp, 1);
2592
2593 if (mode != old_mode)
2594 val = convert_modes (mode, old_mode, val, unsignedp);
2595 }
2596 if (GET_CODE (return_reg) == PARALLEL)
2597 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2598 else
2599 emit_move_insn (return_reg, val);
2600 }
2601
2602 expand_null_return_1 (last_insn);
2603 }
2604
2605 /* Output a return with no value. If LAST_INSN is nonzero,
2606 pretend that the return takes place after LAST_INSN. */
2607
2608 static void
2609 expand_null_return_1 (rtx last_insn)
2610 {
2611 rtx end_label = cleanup_label ? cleanup_label : return_label;
2612
2613 clear_pending_stack_adjust ();
2614 do_pending_stack_adjust ();
2615 clear_last_expr ();
2616
2617 if (end_label == 0)
2618 end_label = return_label = gen_label_rtx ();
2619 expand_goto_internal (NULL_TREE, end_label, last_insn);
2620 }
2621 \f
2622 /* Generate RTL to evaluate the expression RETVAL and return it
2623 from the current function. */
2624
2625 void
2626 expand_return (tree retval)
2627 {
2628 /* If there are any cleanups to be performed, then they will
2629 be inserted following LAST_INSN. It is desirable
2630 that the last_insn, for such purposes, should be the
2631 last insn before computing the return value. Otherwise, cleanups
2632 which call functions can clobber the return value. */
2633 /* ??? rms: I think that is erroneous, because in C++ it would
2634 run destructors on variables that might be used in the subsequent
2635 computation of the return value. */
2636 rtx last_insn = 0;
2637 rtx result_rtl;
2638 rtx val = 0;
2639 tree retval_rhs;
2640
2641 /* If function wants no value, give it none. */
2642 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2643 {
2644 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2645 emit_queue ();
2646 expand_null_return ();
2647 return;
2648 }
2649
2650 if (retval == error_mark_node)
2651 {
2652 /* Treat this like a return of no value from a function that
2653 returns a value. */
2654 expand_null_return ();
2655 return;
2656 }
2657 else if (TREE_CODE (retval) == RESULT_DECL)
2658 retval_rhs = retval;
2659 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2660 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2661 retval_rhs = TREE_OPERAND (retval, 1);
2662 else
2663 retval_rhs = retval;
2664
2665 last_insn = get_last_insn ();
2666
2667 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2668
2669 /* If the result is an aggregate that is being returned in one (or more)
2670 registers, load the registers here. The compiler currently can't handle
2671 copying a BLKmode value into registers. We could put this code in a
2672 more general area (for use by everyone instead of just function
2673 call/return), but until this feature is generally usable it is kept here
2674 (and in expand_call). The value must go into a pseudo in case there
2675 are cleanups that will clobber the real return register. */
2676
2677 if (retval_rhs != 0
2678 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2679 && REG_P (result_rtl))
2680 {
2681 int i;
2682 unsigned HOST_WIDE_INT bitpos, xbitpos;
2683 unsigned HOST_WIDE_INT padding_correction = 0;
2684 unsigned HOST_WIDE_INT bytes
2685 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2686 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2687 unsigned int bitsize
2688 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2689 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2690 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2691 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2692 enum machine_mode tmpmode, result_reg_mode;
2693
2694 if (bytes == 0)
2695 {
2696 expand_null_return ();
2697 return;
2698 }
2699
2700 /* If the structure doesn't take up a whole number of words, see
2701 whether the register value should be padded on the left or on
2702 the right. Set PADDING_CORRECTION to the number of padding
2703 bits needed on the left side.
2704
2705 In most ABIs, the structure will be returned at the least end of
2706 the register, which translates to right padding on little-endian
2707 targets and left padding on big-endian targets. The opposite
2708 holds if the structure is returned at the most significant
2709 end of the register. */
2710 if (bytes % UNITS_PER_WORD != 0
2711 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2712 ? !BYTES_BIG_ENDIAN
2713 : BYTES_BIG_ENDIAN))
2714 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2715 * BITS_PER_UNIT));
2716
2717 /* Copy the structure BITSIZE bits at a time. */
2718 for (bitpos = 0, xbitpos = padding_correction;
2719 bitpos < bytes * BITS_PER_UNIT;
2720 bitpos += bitsize, xbitpos += bitsize)
2721 {
2722 /* We need a new destination pseudo each time xbitpos is
2723 on a word boundary and when xbitpos == padding_correction
2724 (the first time through). */
2725 if (xbitpos % BITS_PER_WORD == 0
2726 || xbitpos == padding_correction)
2727 {
2728 /* Generate an appropriate register. */
2729 dst = gen_reg_rtx (word_mode);
2730 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2731
2732 /* Clear the destination before we move anything into it. */
2733 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2734 }
2735
2736 /* We need a new source operand each time bitpos is on a word
2737 boundary. */
2738 if (bitpos % BITS_PER_WORD == 0)
2739 src = operand_subword_force (result_val,
2740 bitpos / BITS_PER_WORD,
2741 BLKmode);
2742
2743 /* Use bitpos for the source extraction (left justified) and
2744 xbitpos for the destination store (right justified). */
2745 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2746 extract_bit_field (src, bitsize,
2747 bitpos % BITS_PER_WORD, 1,
2748 NULL_RTX, word_mode, word_mode,
2749 BITS_PER_WORD),
2750 BITS_PER_WORD);
2751 }
2752
2753 tmpmode = GET_MODE (result_rtl);
2754 if (tmpmode == BLKmode)
2755 {
2756 /* Find the smallest integer mode large enough to hold the
2757 entire structure and use that mode instead of BLKmode
2758 on the USE insn for the return register. */
2759 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2760 tmpmode != VOIDmode;
2761 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2762 /* Have we found a large enough mode? */
2763 if (GET_MODE_SIZE (tmpmode) >= bytes)
2764 break;
2765
2766 /* No suitable mode found. */
2767 if (tmpmode == VOIDmode)
2768 abort ();
2769
2770 PUT_MODE (result_rtl, tmpmode);
2771 }
2772
2773 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2774 result_reg_mode = word_mode;
2775 else
2776 result_reg_mode = tmpmode;
2777 result_reg = gen_reg_rtx (result_reg_mode);
2778
2779 emit_queue ();
2780 for (i = 0; i < n_regs; i++)
2781 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2782 result_pseudos[i]);
2783
2784 if (tmpmode != result_reg_mode)
2785 result_reg = gen_lowpart (tmpmode, result_reg);
2786
2787 expand_value_return (result_reg);
2788 }
2789 else if (retval_rhs != 0
2790 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2791 && (REG_P (result_rtl)
2792 || (GET_CODE (result_rtl) == PARALLEL)))
2793 {
2794 /* Calculate the return value into a temporary (usually a pseudo
2795 reg). */
2796 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2797 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2798
2799 val = assign_temp (nt, 0, 0, 1);
2800 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2801 val = force_not_mem (val);
2802 emit_queue ();
2803 /* Return the calculated value, doing cleanups first. */
2804 expand_value_return (shift_return_value (val));
2805 }
2806 else
2807 {
2808 /* No cleanups or no hard reg used;
2809 calculate value into hard return reg. */
2810 expand_expr (retval, const0_rtx, VOIDmode, 0);
2811 emit_queue ();
2812 expand_value_return (result_rtl);
2813 }
2814 }
2815 \f
2816 /* Generate the RTL code for entering a binding contour.
2817 The variables are declared one by one, by calls to `expand_decl'.
2818
2819 FLAGS is a bitwise or of the following flags:
2820
2821 1 - Nonzero if this construct should be visible to
2822 `exit_something'.
2823
2824 2 - Nonzero if this contour does not require a
2825 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2826 language-independent code should set this flag because they
2827 will not create corresponding BLOCK nodes. (There should be
2828 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2829 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2830 when expand_end_bindings is called.
2831
2832 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2833 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2834 note. */
2835
2836 void
2837 expand_start_bindings_and_block (int flags, tree block)
2838 {
2839 struct nesting *thisblock = ALLOC_NESTING ();
2840 rtx note;
2841 int exit_flag = ((flags & 1) != 0);
2842 int block_flag = ((flags & 2) == 0);
2843
2844 /* If a BLOCK is supplied, then the caller should be requesting a
2845 NOTE_INSN_BLOCK_BEG note. */
2846 if (!block_flag && block)
2847 abort ();
2848
2849 /* Create a note to mark the beginning of the block. */
2850 if (block_flag && !cfun->dont_emit_block_notes)
2851 {
2852 note = emit_note (NOTE_INSN_BLOCK_BEG);
2853 NOTE_BLOCK (note) = block;
2854 }
2855 else
2856 note = emit_note (NOTE_INSN_DELETED);
2857
2858 /* Make an entry on block_stack for the block we are entering. */
2859
2860 thisblock->desc = BLOCK_NESTING;
2861 thisblock->next = block_stack;
2862 thisblock->all = nesting_stack;
2863 thisblock->depth = ++nesting_depth;
2864 thisblock->data.block.stack_level = 0;
2865 thisblock->data.block.cleanups = 0;
2866 thisblock->data.block.exception_region = 0;
2867 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2868
2869 thisblock->data.block.conditional_code = 0;
2870 thisblock->data.block.last_unconditional_cleanup = note;
2871 /* When we insert instructions after the last unconditional cleanup,
2872 we don't adjust last_insn. That means that a later add_insn will
2873 clobber the instructions we've just added. The easiest way to
2874 fix this is to just insert another instruction here, so that the
2875 instructions inserted after the last unconditional cleanup are
2876 never the last instruction. */
2877 emit_note (NOTE_INSN_DELETED);
2878
2879 if (block_stack
2880 && !(block_stack->data.block.cleanups == NULL_TREE
2881 && block_stack->data.block.outer_cleanups == NULL_TREE))
2882 thisblock->data.block.outer_cleanups
2883 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2884 block_stack->data.block.outer_cleanups);
2885 else
2886 thisblock->data.block.outer_cleanups = 0;
2887 thisblock->data.block.label_chain = 0;
2888 thisblock->data.block.innermost_stack_block = stack_block_stack;
2889 thisblock->data.block.first_insn = note;
2890 thisblock->data.block.block_start_count = ++current_block_start_count;
2891 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2892 block_stack = thisblock;
2893 nesting_stack = thisblock;
2894
2895 /* Make a new level for allocating stack slots. */
2896 push_temp_slots ();
2897 }
2898
2899 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2900 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2901 expand_expr are made. After we end the region, we know that all
2902 space for all temporaries that were created by TARGET_EXPRs will be
2903 destroyed and their space freed for reuse. */
2904
2905 void
2906 expand_start_target_temps (void)
2907 {
2908 /* This is so that even if the result is preserved, the space
2909 allocated will be freed, as we know that it is no longer in use. */
2910 push_temp_slots ();
2911
2912 /* Start a new binding layer that will keep track of all cleanup
2913 actions to be performed. */
2914 expand_start_bindings (2);
2915
2916 target_temp_slot_level = temp_slot_level;
2917 }
2918
2919 void
2920 expand_end_target_temps (void)
2921 {
2922 expand_end_bindings (NULL_TREE, 0, 0);
2923
2924 /* This is so that even if the result is preserved, the space
2925 allocated will be freed, as we know that it is no longer in use. */
2926 pop_temp_slots ();
2927 }
2928
2929 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2930 in question represents the outermost pair of curly braces (i.e. the "body
2931 block") of a function or method.
2932
2933 For any BLOCK node representing a "body block" of a function or method, the
2934 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2935 represents the outermost (function) scope for the function or method (i.e.
2936 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2937 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2938
2939 int
2940 is_body_block (tree stmt)
2941 {
2942 if (lang_hooks.no_body_blocks)
2943 return 0;
2944
2945 if (TREE_CODE (stmt) == BLOCK)
2946 {
2947 tree parent = BLOCK_SUPERCONTEXT (stmt);
2948
2949 if (parent && TREE_CODE (parent) == BLOCK)
2950 {
2951 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2952
2953 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2954 return 1;
2955 }
2956 }
2957
2958 return 0;
2959 }
2960
2961 /* True if we are currently emitting insns in an area of output code
2962 that is controlled by a conditional expression. This is used by
2963 the cleanup handling code to generate conditional cleanup actions. */
2964
2965 int
2966 conditional_context (void)
2967 {
2968 return block_stack && block_stack->data.block.conditional_code;
2969 }
2970
2971 /* Return an opaque pointer to the current nesting level, so frontend code
2972 can check its own sanity. */
2973
2974 struct nesting *
2975 current_nesting_level (void)
2976 {
2977 return cfun ? block_stack : 0;
2978 }
2979
2980 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2981 handler. */
2982 static void
2983 expand_nl_goto_receiver (void)
2984 {
2985 /* Clobber the FP when we get here, so we have to make sure it's
2986 marked as used by this function. */
2987 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2988
2989 /* Mark the static chain as clobbered here so life information
2990 doesn't get messed up for it. */
2991 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2992
2993 #ifdef HAVE_nonlocal_goto
2994 if (! HAVE_nonlocal_goto)
2995 #endif
2996 /* First adjust our frame pointer to its actual value. It was
2997 previously set to the start of the virtual area corresponding to
2998 the stacked variables when we branched here and now needs to be
2999 adjusted to the actual hardware fp value.
3000
3001 Assignments are to virtual registers are converted by
3002 instantiate_virtual_regs into the corresponding assignment
3003 to the underlying register (fp in this case) that makes
3004 the original assignment true.
3005 So the following insn will actually be
3006 decrementing fp by STARTING_FRAME_OFFSET. */
3007 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3008
3009 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3010 if (fixed_regs[ARG_POINTER_REGNUM])
3011 {
3012 #ifdef ELIMINABLE_REGS
3013 /* If the argument pointer can be eliminated in favor of the
3014 frame pointer, we don't need to restore it. We assume here
3015 that if such an elimination is present, it can always be used.
3016 This is the case on all known machines; if we don't make this
3017 assumption, we do unnecessary saving on many machines. */
3018 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3019 size_t i;
3020
3021 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3022 if (elim_regs[i].from == ARG_POINTER_REGNUM
3023 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3024 break;
3025
3026 if (i == ARRAY_SIZE (elim_regs))
3027 #endif
3028 {
3029 /* Now restore our arg pointer from the address at which it
3030 was saved in our stack frame. */
3031 emit_move_insn (virtual_incoming_args_rtx,
3032 copy_to_reg (get_arg_pointer_save_area (cfun)));
3033 }
3034 }
3035 #endif
3036
3037 #ifdef HAVE_nonlocal_goto_receiver
3038 if (HAVE_nonlocal_goto_receiver)
3039 emit_insn (gen_nonlocal_goto_receiver ());
3040 #endif
3041
3042 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3043 insn, but we must not allow the code we just generated to be reordered
3044 by scheduling. Specifically, the update of the frame pointer must
3045 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3046 insn. */
3047 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3048 }
3049
3050 /* Warn about any unused VARS (which may contain nodes other than
3051 VAR_DECLs, but such nodes are ignored). The nodes are connected
3052 via the TREE_CHAIN field. */
3053
3054 void
3055 warn_about_unused_variables (tree vars)
3056 {
3057 tree decl;
3058
3059 if (warn_unused_variable)
3060 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3061 if (TREE_CODE (decl) == VAR_DECL
3062 && ! TREE_USED (decl)
3063 && ! DECL_IN_SYSTEM_HEADER (decl)
3064 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3065 warning ("%Junused variable '%D'", decl, decl);
3066 }
3067
3068 /* Generate RTL code to terminate a binding contour.
3069
3070 VARS is the chain of VAR_DECL nodes for the variables bound in this
3071 contour. There may actually be other nodes in this chain, but any
3072 nodes other than VAR_DECLS are ignored.
3073
3074 MARK_ENDS is nonzero if we should put a note at the beginning
3075 and end of this binding contour.
3076
3077 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3078 zero if we can jump into this contour only if it does not have a saved
3079 stack level, and negative if we are not to check for invalid use of
3080 labels (because the front end does that). */
3081
3082 void
3083 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3084 {
3085 struct nesting *thisblock = block_stack;
3086
3087 /* If any of the variables in this scope were not used, warn the
3088 user. */
3089 warn_about_unused_variables (vars);
3090
3091 if (thisblock->exit_label)
3092 {
3093 do_pending_stack_adjust ();
3094 emit_label (thisblock->exit_label);
3095 }
3096
3097 /* Don't allow jumping into a block that has a stack level.
3098 Cleanups are allowed, though. */
3099 if (dont_jump_in > 0
3100 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3101 {
3102 struct label_chain *chain;
3103
3104 /* Any labels in this block are no longer valid to go to.
3105 Mark them to cause an error message. */
3106 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3107 {
3108 DECL_TOO_LATE (chain->label) = 1;
3109 /* If any goto without a fixup came to this label,
3110 that must be an error, because gotos without fixups
3111 come from outside all saved stack-levels. */
3112 if (TREE_ADDRESSABLE (chain->label))
3113 error ("%Jlabel '%D' used before containing binding contour",
3114 chain->label, chain->label);
3115 }
3116 }
3117
3118 /* Restore stack level in effect before the block
3119 (only if variable-size objects allocated). */
3120 /* Perform any cleanups associated with the block. */
3121
3122 if (thisblock->data.block.stack_level != 0
3123 || thisblock->data.block.cleanups != 0)
3124 {
3125 int reachable;
3126 rtx insn;
3127
3128 /* Don't let cleanups affect ({...}) constructs. */
3129 int old_expr_stmts_for_value = expr_stmts_for_value;
3130 rtx old_last_expr_value = last_expr_value;
3131 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3132 tree old_last_expr_type = last_expr_type;
3133 expr_stmts_for_value = 0;
3134
3135 /* Only clean up here if this point can actually be reached. */
3136 insn = get_last_insn ();
3137 if (GET_CODE (insn) == NOTE)
3138 insn = prev_nonnote_insn (insn);
3139 reachable = (! insn || GET_CODE (insn) != BARRIER);
3140
3141 /* Do the cleanups. */
3142 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3143 if (reachable)
3144 do_pending_stack_adjust ();
3145
3146 expr_stmts_for_value = old_expr_stmts_for_value;
3147 last_expr_value = old_last_expr_value;
3148 last_expr_alt_rtl = old_last_expr_alt_rtl;
3149 last_expr_type = old_last_expr_type;
3150
3151 /* Restore the stack level. */
3152
3153 if (reachable && thisblock->data.block.stack_level != 0)
3154 {
3155 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3156 thisblock->data.block.stack_level, NULL_RTX);
3157 if (cfun->nonlocal_goto_save_area)
3158 update_nonlocal_goto_save_area ();
3159 }
3160
3161 /* Any gotos out of this block must also do these things.
3162 Also report any gotos with fixups that came to labels in this
3163 level. */
3164 fixup_gotos (thisblock,
3165 thisblock->data.block.stack_level,
3166 thisblock->data.block.cleanups,
3167 thisblock->data.block.first_insn,
3168 dont_jump_in);
3169 }
3170
3171 /* Mark the beginning and end of the scope if requested.
3172 We do this now, after running cleanups on the variables
3173 just going out of scope, so they are in scope for their cleanups. */
3174
3175 if (mark_ends && !cfun->dont_emit_block_notes)
3176 {
3177 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3178 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3179 }
3180 else
3181 /* Get rid of the beginning-mark if we don't make an end-mark. */
3182 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3183
3184 /* Restore the temporary level of TARGET_EXPRs. */
3185 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3186
3187 /* Restore block_stack level for containing block. */
3188
3189 stack_block_stack = thisblock->data.block.innermost_stack_block;
3190 POPSTACK (block_stack);
3191
3192 /* Pop the stack slot nesting and free any slots at this level. */
3193 pop_temp_slots ();
3194 }
3195 \f
3196 /* Generate code to save the stack pointer at the start of the current block
3197 and set up to restore it on exit. */
3198
3199 void
3200 save_stack_pointer (void)
3201 {
3202 struct nesting *thisblock = block_stack;
3203
3204 if (thisblock->data.block.stack_level == 0)
3205 {
3206 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3207 &thisblock->data.block.stack_level,
3208 thisblock->data.block.first_insn);
3209 stack_block_stack = thisblock;
3210 }
3211 }
3212 \f
3213 /* Generate RTL for the automatic variable declaration DECL.
3214 (Other kinds of declarations are simply ignored if seen here.) */
3215
3216 void
3217 expand_decl (tree decl)
3218 {
3219 tree type;
3220
3221 type = TREE_TYPE (decl);
3222
3223 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3224 type in case this node is used in a reference. */
3225 if (TREE_CODE (decl) == CONST_DECL)
3226 {
3227 DECL_MODE (decl) = TYPE_MODE (type);
3228 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3229 DECL_SIZE (decl) = TYPE_SIZE (type);
3230 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3231 return;
3232 }
3233
3234 /* Otherwise, only automatic variables need any expansion done. Static and
3235 external variables, and external functions, will be handled by
3236 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3237 nothing. PARM_DECLs are handled in `assign_parms'. */
3238 if (TREE_CODE (decl) != VAR_DECL)
3239 return;
3240
3241 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3242 return;
3243
3244 /* Create the RTL representation for the variable. */
3245
3246 if (type == error_mark_node)
3247 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3248
3249 else if (DECL_SIZE (decl) == 0)
3250 /* Variable with incomplete type. */
3251 {
3252 rtx x;
3253 if (DECL_INITIAL (decl) == 0)
3254 /* Error message was already done; now avoid a crash. */
3255 x = gen_rtx_MEM (BLKmode, const0_rtx);
3256 else
3257 /* An initializer is going to decide the size of this array.
3258 Until we know the size, represent its address with a reg. */
3259 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3260
3261 set_mem_attributes (x, decl, 1);
3262 SET_DECL_RTL (decl, x);
3263 }
3264 else if (DECL_MODE (decl) != BLKmode
3265 /* If -ffloat-store, don't put explicit float vars
3266 into regs. */
3267 && !(flag_float_store
3268 && TREE_CODE (type) == REAL_TYPE)
3269 && ! TREE_THIS_VOLATILE (decl)
3270 && ! DECL_NONLOCAL (decl)
3271 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3272 {
3273 /* Automatic variable that can go in a register. */
3274 int unsignedp = TYPE_UNSIGNED (type);
3275 enum machine_mode reg_mode
3276 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3277
3278 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3279
3280 /* Note if the object is a user variable. */
3281 if (!DECL_ARTIFICIAL (decl))
3282 {
3283 mark_user_reg (DECL_RTL (decl));
3284
3285 /* Trust user variables which have a pointer type to really
3286 be pointers. Do not trust compiler generated temporaries
3287 as our type system is totally busted as it relates to
3288 pointer arithmetic which translates into lots of compiler
3289 generated objects with pointer types, but which are not really
3290 pointers. */
3291 if (POINTER_TYPE_P (type))
3292 mark_reg_pointer (DECL_RTL (decl),
3293 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3294 }
3295
3296 maybe_set_unchanging (DECL_RTL (decl), decl);
3297
3298 /* If something wants our address, try to use ADDRESSOF. */
3299 if (TREE_ADDRESSABLE (decl))
3300 put_var_into_stack (decl, /*rescan=*/false);
3301 }
3302
3303 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3304 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3305 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3306 STACK_CHECK_MAX_VAR_SIZE)))
3307 {
3308 /* Variable of fixed size that goes on the stack. */
3309 rtx oldaddr = 0;
3310 rtx addr;
3311 rtx x;
3312
3313 /* If we previously made RTL for this decl, it must be an array
3314 whose size was determined by the initializer.
3315 The old address was a register; set that register now
3316 to the proper address. */
3317 if (DECL_RTL_SET_P (decl))
3318 {
3319 if (GET_CODE (DECL_RTL (decl)) != MEM
3320 || !REG_P (XEXP (DECL_RTL (decl), 0)))
3321 abort ();
3322 oldaddr = XEXP (DECL_RTL (decl), 0);
3323 }
3324
3325 /* Set alignment we actually gave this decl. */
3326 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3327 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3328 DECL_USER_ALIGN (decl) = 0;
3329
3330 x = assign_temp (decl, 1, 1, 1);
3331 set_mem_attributes (x, decl, 1);
3332 SET_DECL_RTL (decl, x);
3333
3334 if (oldaddr)
3335 {
3336 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3337 if (addr != oldaddr)
3338 emit_move_insn (oldaddr, addr);
3339 }
3340 }
3341 else
3342 /* Dynamic-size object: must push space on the stack. */
3343 {
3344 rtx address, size, x;
3345
3346 /* Record the stack pointer on entry to block, if have
3347 not already done so. */
3348 do_pending_stack_adjust ();
3349 save_stack_pointer ();
3350
3351 /* Compute the variable's size, in bytes. This will expand any
3352 needed SAVE_EXPRs for the first time. */
3353 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3354 free_temp_slots ();
3355
3356 /* Allocate space on the stack for the variable. Note that
3357 DECL_ALIGN says how the variable is to be aligned and we
3358 cannot use it to conclude anything about the alignment of
3359 the size. */
3360 address = allocate_dynamic_stack_space (size, NULL_RTX,
3361 TYPE_ALIGN (TREE_TYPE (decl)));
3362
3363 /* Reference the variable indirect through that rtx. */
3364 x = gen_rtx_MEM (DECL_MODE (decl), address);
3365 set_mem_attributes (x, decl, 1);
3366 SET_DECL_RTL (decl, x);
3367
3368
3369 /* Indicate the alignment we actually gave this variable. */
3370 #ifdef STACK_BOUNDARY
3371 DECL_ALIGN (decl) = STACK_BOUNDARY;
3372 #else
3373 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3374 #endif
3375 DECL_USER_ALIGN (decl) = 0;
3376 }
3377 }
3378 \f
3379 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3380 void
3381 expand_stack_alloc (tree alloc, tree t_size)
3382 {
3383 rtx address, dest, size;
3384 tree var, type;
3385
3386 if (TREE_CODE (alloc) != ADDR_EXPR)
3387 abort ();
3388 var = TREE_OPERAND (alloc, 0);
3389 if (TREE_CODE (var) != VAR_DECL)
3390 abort ();
3391
3392 type = TREE_TYPE (var);
3393
3394 /* In function-at-a-time mode, variable_size doesn't expand this,
3395 so do it now. */
3396 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3397 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3398 const0_rtx, VOIDmode, 0);
3399
3400 /* Compute the variable's size, in bytes. */
3401 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3402 free_temp_slots ();
3403
3404 /* Allocate space on the stack for the variable. */
3405 address = XEXP (DECL_RTL (var), 0);
3406 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3407 if (dest != address)
3408 emit_move_insn (address, dest);
3409
3410 /* Indicate the alignment we actually gave this variable. */
3411 #ifdef STACK_BOUNDARY
3412 DECL_ALIGN (var) = STACK_BOUNDARY;
3413 #else
3414 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3415 #endif
3416 DECL_USER_ALIGN (var) = 0;
3417 }
3418
3419 /* Emit code to save the current value of stack. */
3420 rtx
3421 expand_stack_save (void)
3422 {
3423 rtx ret = NULL_RTX;
3424
3425 do_pending_stack_adjust ();
3426 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3427 return ret;
3428 }
3429
3430 /* Emit code to restore the current value of stack. */
3431 void
3432 expand_stack_restore (tree var)
3433 {
3434 rtx sa = DECL_RTL (var);
3435
3436 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3437 }
3438 \f
3439 /* Emit code to perform the initialization of a declaration DECL. */
3440
3441 void
3442 expand_decl_init (tree decl)
3443 {
3444 int was_used = TREE_USED (decl);
3445
3446 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3447 for static decls. */
3448 if (TREE_CODE (decl) == CONST_DECL
3449 || TREE_STATIC (decl))
3450 return;
3451
3452 /* Compute and store the initial value now. */
3453
3454 push_temp_slots ();
3455
3456 if (DECL_INITIAL (decl) == error_mark_node)
3457 {
3458 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3459
3460 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3461 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3462 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3463 0);
3464 emit_queue ();
3465 }
3466 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3467 {
3468 emit_line_note (DECL_SOURCE_LOCATION (decl));
3469 expand_assignment (decl, DECL_INITIAL (decl), 0);
3470 emit_queue ();
3471 }
3472
3473 /* Don't let the initialization count as "using" the variable. */
3474 TREE_USED (decl) = was_used;
3475
3476 /* Free any temporaries we made while initializing the decl. */
3477 preserve_temp_slots (NULL_RTX);
3478 free_temp_slots ();
3479 pop_temp_slots ();
3480 }
3481
3482 /* CLEANUP is an expression to be executed at exit from this binding contour;
3483 for example, in C++, it might call the destructor for this variable.
3484
3485 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3486 CLEANUP multiple times, and have the correct semantics. This
3487 happens in exception handling, for gotos, returns, breaks that
3488 leave the current scope.
3489
3490 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3491 that is not associated with any particular variable. */
3492
3493 int
3494 expand_decl_cleanup (tree decl, tree cleanup)
3495 {
3496 struct nesting *thisblock;
3497
3498 /* Error if we are not in any block. */
3499 if (cfun == 0 || block_stack == 0)
3500 return 0;
3501
3502 thisblock = block_stack;
3503
3504 /* Record the cleanup if there is one. */
3505
3506 if (cleanup != 0)
3507 {
3508 tree t;
3509 rtx seq;
3510 tree *cleanups = &thisblock->data.block.cleanups;
3511 int cond_context = conditional_context ();
3512
3513 if (cond_context)
3514 {
3515 rtx flag = gen_reg_rtx (word_mode);
3516 rtx set_flag_0;
3517 tree cond;
3518
3519 start_sequence ();
3520 emit_move_insn (flag, const0_rtx);
3521 set_flag_0 = get_insns ();
3522 end_sequence ();
3523
3524 thisblock->data.block.last_unconditional_cleanup
3525 = emit_insn_after (set_flag_0,
3526 thisblock->data.block.last_unconditional_cleanup);
3527
3528 emit_move_insn (flag, const1_rtx);
3529
3530 cond = build_decl (VAR_DECL, NULL_TREE,
3531 lang_hooks.types.type_for_mode (word_mode, 1));
3532 SET_DECL_RTL (cond, flag);
3533
3534 /* Conditionalize the cleanup. */
3535 cleanup = build (COND_EXPR, void_type_node,
3536 lang_hooks.truthvalue_conversion (cond),
3537 cleanup, integer_zero_node);
3538 cleanup = fold (cleanup);
3539
3540 cleanups = &thisblock->data.block.cleanups;
3541 }
3542
3543 cleanup = unsave_expr (cleanup);
3544
3545 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3546
3547 if (! cond_context)
3548 /* If this block has a cleanup, it belongs in stack_block_stack. */
3549 stack_block_stack = thisblock;
3550
3551 if (cond_context)
3552 {
3553 start_sequence ();
3554 }
3555
3556 if (! using_eh_for_cleanups_p)
3557 TREE_ADDRESSABLE (t) = 1;
3558 else
3559 expand_eh_region_start ();
3560
3561 if (cond_context)
3562 {
3563 seq = get_insns ();
3564 end_sequence ();
3565 if (seq)
3566 thisblock->data.block.last_unconditional_cleanup
3567 = emit_insn_after (seq,
3568 thisblock->data.block.last_unconditional_cleanup);
3569 }
3570 else
3571 {
3572 thisblock->data.block.last_unconditional_cleanup
3573 = get_last_insn ();
3574 /* When we insert instructions after the last unconditional cleanup,
3575 we don't adjust last_insn. That means that a later add_insn will
3576 clobber the instructions we've just added. The easiest way to
3577 fix this is to just insert another instruction here, so that the
3578 instructions inserted after the last unconditional cleanup are
3579 never the last instruction. */
3580 emit_note (NOTE_INSN_DELETED);
3581 }
3582 }
3583 return 1;
3584 }
3585
3586 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3587 is thrown. */
3588
3589 int
3590 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3591 {
3592 int ret = expand_decl_cleanup (decl, cleanup);
3593 if (cleanup && ret)
3594 {
3595 tree node = block_stack->data.block.cleanups;
3596 CLEANUP_EH_ONLY (node) = eh_only;
3597 }
3598 return ret;
3599 }
3600 \f
3601 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3602 DECL_ELTS is the list of elements that belong to DECL's type.
3603 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3604
3605 void
3606 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3607 {
3608 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3609 rtx x;
3610 tree t;
3611
3612 /* If any of the elements are addressable, so is the entire union. */
3613 for (t = decl_elts; t; t = TREE_CHAIN (t))
3614 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3615 {
3616 TREE_ADDRESSABLE (decl) = 1;
3617 break;
3618 }
3619
3620 expand_decl (decl);
3621 expand_decl_cleanup (decl, cleanup);
3622 x = DECL_RTL (decl);
3623
3624 /* Go through the elements, assigning RTL to each. */
3625 for (t = decl_elts; t; t = TREE_CHAIN (t))
3626 {
3627 tree decl_elt = TREE_VALUE (t);
3628 tree cleanup_elt = TREE_PURPOSE (t);
3629 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3630
3631 /* If any of the elements are addressable, so is the entire
3632 union. */
3633 if (TREE_USED (decl_elt))
3634 TREE_USED (decl) = 1;
3635
3636 /* Propagate the union's alignment to the elements. */
3637 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3638 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3639
3640 /* If the element has BLKmode and the union doesn't, the union is
3641 aligned such that the element doesn't need to have BLKmode, so
3642 change the element's mode to the appropriate one for its size. */
3643 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3644 DECL_MODE (decl_elt) = mode
3645 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3646
3647 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3648 instead create a new MEM rtx with the proper mode. */
3649 if (GET_CODE (x) == MEM)
3650 {
3651 if (mode == GET_MODE (x))
3652 SET_DECL_RTL (decl_elt, x);
3653 else
3654 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3655 }
3656 else if (REG_P (x))
3657 {
3658 if (mode == GET_MODE (x))
3659 SET_DECL_RTL (decl_elt, x);
3660 else
3661 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3662 }
3663 else
3664 abort ();
3665
3666 /* Record the cleanup if there is one. */
3667
3668 if (cleanup != 0)
3669 thisblock->data.block.cleanups
3670 = tree_cons (decl_elt, cleanup_elt,
3671 thisblock->data.block.cleanups);
3672 }
3673 }
3674 \f
3675 /* Expand a list of cleanups LIST.
3676 Elements may be expressions or may be nested lists.
3677
3678 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3679 goto and handle protection regions specially in that case.
3680
3681 If REACHABLE, we emit code, otherwise just inform the exception handling
3682 code about this finalization. */
3683
3684 static void
3685 expand_cleanups (tree list, int in_fixup, int reachable)
3686 {
3687 tree tail;
3688 for (tail = list; tail; tail = TREE_CHAIN (tail))
3689 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3690 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3691 else
3692 {
3693 if (! in_fixup && using_eh_for_cleanups_p)
3694 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3695
3696 if (reachable && !CLEANUP_EH_ONLY (tail))
3697 {
3698 /* Cleanups may be run multiple times. For example,
3699 when exiting a binding contour, we expand the
3700 cleanups associated with that contour. When a goto
3701 within that binding contour has a target outside that
3702 contour, it will expand all cleanups from its scope to
3703 the target. Though the cleanups are expanded multiple
3704 times, the control paths are non-overlapping so the
3705 cleanups will not be executed twice. */
3706
3707 /* We may need to protect from outer cleanups. */
3708 if (in_fixup && using_eh_for_cleanups_p)
3709 {
3710 expand_eh_region_start ();
3711
3712 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3713
3714 expand_eh_region_end_fixup (TREE_VALUE (tail));
3715 }
3716 else
3717 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3718
3719 free_temp_slots ();
3720 }
3721 }
3722 }
3723
3724 /* Mark when the context we are emitting RTL for as a conditional
3725 context, so that any cleanup actions we register with
3726 expand_decl_init will be properly conditionalized when those
3727 cleanup actions are later performed. Must be called before any
3728 expression (tree) is expanded that is within a conditional context. */
3729
3730 void
3731 start_cleanup_deferral (void)
3732 {
3733 /* block_stack can be NULL if we are inside the parameter list. It is
3734 OK to do nothing, because cleanups aren't possible here. */
3735 if (block_stack)
3736 ++block_stack->data.block.conditional_code;
3737 }
3738
3739 /* Mark the end of a conditional region of code. Because cleanup
3740 deferrals may be nested, we may still be in a conditional region
3741 after we end the currently deferred cleanups, only after we end all
3742 deferred cleanups, are we back in unconditional code. */
3743
3744 void
3745 end_cleanup_deferral (void)
3746 {
3747 /* block_stack can be NULL if we are inside the parameter list. It is
3748 OK to do nothing, because cleanups aren't possible here. */
3749 if (block_stack)
3750 --block_stack->data.block.conditional_code;
3751 }
3752
3753 tree
3754 last_cleanup_this_contour (void)
3755 {
3756 if (block_stack == 0)
3757 return 0;
3758
3759 return block_stack->data.block.cleanups;
3760 }
3761
3762
3763 /* Return nonzero if any containing block has a stack level or
3764 cleanups. */
3765
3766 int
3767 containing_blocks_have_cleanups_or_stack_level (void)
3768 {
3769 struct nesting *block;
3770
3771 for (block = block_stack; block; block = block->next)
3772 if (block->data.block.stack_level != 0
3773 || block->data.block.cleanups != 0)
3774 return 1;
3775
3776 return 0;
3777 }
3778
3779 /* Return 1 if there are any pending cleanups at this point.
3780 Check the current contour as well as contours that enclose
3781 the current contour. */
3782
3783 int
3784 any_pending_cleanups (void)
3785 {
3786 struct nesting *block;
3787
3788 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3789 return 0;
3790
3791 if (block_stack->data.block.cleanups != NULL)
3792 return 1;
3793
3794 if (block_stack->data.block.outer_cleanups == 0)
3795 return 0;
3796
3797 for (block = block_stack->next; block; block = block->next)
3798 if (block->data.block.cleanups != 0)
3799 return 1;
3800
3801 return 0;
3802 }
3803 \f
3804 /* Enter a case (Pascal) or switch (C) statement.
3805 Push a block onto case_stack and nesting_stack
3806 to accumulate the case-labels that are seen
3807 and to record the labels generated for the statement.
3808
3809 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3810 Otherwise, this construct is transparent for `exit_something'.
3811
3812 EXPR is the index-expression to be dispatched on.
3813 TYPE is its nominal type. We could simply convert EXPR to this type,
3814 but instead we take short cuts. */
3815
3816 void
3817 expand_start_case (int exit_flag, tree expr, tree type,
3818 const char *printname)
3819 {
3820 struct nesting *thiscase = ALLOC_NESTING ();
3821
3822 /* Make an entry on case_stack for the case we are entering. */
3823
3824 thiscase->desc = CASE_NESTING;
3825 thiscase->next = case_stack;
3826 thiscase->all = nesting_stack;
3827 thiscase->depth = ++nesting_depth;
3828 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3829 thiscase->data.case_stmt.case_list = 0;
3830 thiscase->data.case_stmt.index_expr = expr;
3831 thiscase->data.case_stmt.nominal_type = type;
3832 thiscase->data.case_stmt.default_label = 0;
3833 thiscase->data.case_stmt.printname = printname;
3834 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3835 case_stack = thiscase;
3836 nesting_stack = thiscase;
3837
3838 do_pending_stack_adjust ();
3839 emit_queue ();
3840
3841 /* Make sure case_stmt.start points to something that won't
3842 need any transformation before expand_end_case. */
3843 if (GET_CODE (get_last_insn ()) != NOTE)
3844 emit_note (NOTE_INSN_DELETED);
3845
3846 thiscase->data.case_stmt.start = get_last_insn ();
3847
3848 start_cleanup_deferral ();
3849 }
3850 \f
3851 static void
3852 check_seenlabel (void)
3853 {
3854 /* If this is the first label, warn if any insns have been emitted. */
3855 if (case_stack->data.case_stmt.line_number_status >= 0)
3856 {
3857 rtx insn;
3858
3859 restore_line_number_status
3860 (case_stack->data.case_stmt.line_number_status);
3861 case_stack->data.case_stmt.line_number_status = -1;
3862
3863 for (insn = case_stack->data.case_stmt.start;
3864 insn;
3865 insn = NEXT_INSN (insn))
3866 {
3867 if (GET_CODE (insn) == CODE_LABEL)
3868 break;
3869 if (GET_CODE (insn) != NOTE
3870 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3871 {
3872 do
3873 insn = PREV_INSN (insn);
3874 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
3875
3876 /* If insn is zero, then there must have been a syntax error. */
3877 if (insn)
3878 {
3879 location_t locus;
3880 locus.file = NOTE_SOURCE_FILE (insn);
3881 locus.line = NOTE_LINE_NUMBER (insn);
3882 warning ("%Hunreachable code at beginning of %s", &locus,
3883 case_stack->data.case_stmt.printname);
3884 }
3885 break;
3886 }
3887 }
3888 }
3889 }
3890
3891 /* Accumulate one case or default label inside a case or switch statement.
3892 VALUE is the value of the case (a null pointer, for a default label).
3893 The function CONVERTER, when applied to arguments T and V,
3894 converts the value V to the type T.
3895
3896 If not currently inside a case or switch statement, return 1 and do
3897 nothing. The caller will print a language-specific error message.
3898 If VALUE is a duplicate or overlaps, return 2 and do nothing
3899 except store the (first) duplicate node in *DUPLICATE.
3900 If VALUE is out of range, return 3 and do nothing.
3901 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3902 Return 0 on success.
3903
3904 Extended to handle range statements. */
3905
3906 int
3907 pushcase (tree value, tree (*converter) (tree, tree), tree label,
3908 tree *duplicate)
3909 {
3910 tree index_type;
3911 tree nominal_type;
3912
3913 /* Fail if not inside a real case statement. */
3914 if (! (case_stack && case_stack->data.case_stmt.start))
3915 return 1;
3916
3917 if (stack_block_stack
3918 && stack_block_stack->depth > case_stack->depth)
3919 return 5;
3920
3921 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3922 nominal_type = case_stack->data.case_stmt.nominal_type;
3923
3924 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3925 if (index_type == error_mark_node)
3926 return 0;
3927
3928 /* Convert VALUE to the type in which the comparisons are nominally done. */
3929 if (value != 0)
3930 value = (*converter) (nominal_type, value);
3931
3932 check_seenlabel ();
3933
3934 /* Fail if this value is out of range for the actual type of the index
3935 (which may be narrower than NOMINAL_TYPE). */
3936 if (value != 0
3937 && (TREE_CONSTANT_OVERFLOW (value)
3938 || ! int_fits_type_p (value, index_type)))
3939 return 3;
3940
3941 return add_case_node (value, value, label, duplicate, false);
3942 }
3943
3944 /* Like pushcase but this case applies to all values between VALUE1 and
3945 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3946 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3947 starts at VALUE1 and ends at the highest value of the index type.
3948 If both are NULL, this case applies to all values.
3949
3950 The return value is the same as that of pushcase but there is one
3951 additional error code: 4 means the specified range was empty. */
3952
3953 int
3954 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3955 tree label, tree *duplicate)
3956 {
3957 tree index_type;
3958 tree nominal_type;
3959
3960 /* Fail if not inside a real case statement. */
3961 if (! (case_stack && case_stack->data.case_stmt.start))
3962 return 1;
3963
3964 if (stack_block_stack
3965 && stack_block_stack->depth > case_stack->depth)
3966 return 5;
3967
3968 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3969 nominal_type = case_stack->data.case_stmt.nominal_type;
3970
3971 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3972 if (index_type == error_mark_node)
3973 return 0;
3974
3975 check_seenlabel ();
3976
3977 /* Convert VALUEs to type in which the comparisons are nominally done
3978 and replace any unspecified value with the corresponding bound. */
3979 if (value1 == 0)
3980 value1 = TYPE_MIN_VALUE (index_type);
3981 if (value2 == 0)
3982 value2 = TYPE_MAX_VALUE (index_type);
3983
3984 /* Fail if the range is empty. Do this before any conversion since
3985 we want to allow out-of-range empty ranges. */
3986 if (value2 != 0 && tree_int_cst_lt (value2, value1))
3987 return 4;
3988
3989 /* If the max was unbounded, use the max of the nominal_type we are
3990 converting to. Do this after the < check above to suppress false
3991 positives. */
3992 if (value2 == 0)
3993 value2 = TYPE_MAX_VALUE (nominal_type);
3994
3995 value1 = (*converter) (nominal_type, value1);
3996 value2 = (*converter) (nominal_type, value2);
3997
3998 /* Fail if these values are out of range. */
3999 if (TREE_CONSTANT_OVERFLOW (value1)
4000 || ! int_fits_type_p (value1, index_type))
4001 return 3;
4002
4003 if (TREE_CONSTANT_OVERFLOW (value2)
4004 || ! int_fits_type_p (value2, index_type))
4005 return 3;
4006
4007 return add_case_node (value1, value2, label, duplicate, false);
4008 }
4009
4010 /* Do the actual insertion of a case label for pushcase and pushcase_range
4011 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4012 slowdown for large switch statements. */
4013
4014 int
4015 add_case_node (tree low, tree high, tree label, tree *duplicate,
4016 bool dont_expand_label)
4017 {
4018 struct case_node *p, **q, *r;
4019
4020 /* If there's no HIGH value, then this is not a case range; it's
4021 just a simple case label. But that's just a degenerate case
4022 range. */
4023 if (!high)
4024 high = low;
4025
4026 /* Handle default labels specially. */
4027 if (!high && !low)
4028 {
4029 if (case_stack->data.case_stmt.default_label != 0)
4030 {
4031 *duplicate = case_stack->data.case_stmt.default_label;
4032 return 2;
4033 }
4034 case_stack->data.case_stmt.default_label = label;
4035 if (!dont_expand_label)
4036 expand_label (label);
4037 return 0;
4038 }
4039
4040 q = &case_stack->data.case_stmt.case_list;
4041 p = *q;
4042
4043 while ((r = *q))
4044 {
4045 p = r;
4046
4047 /* Keep going past elements distinctly greater than HIGH. */
4048 if (tree_int_cst_lt (high, p->low))
4049 q = &p->left;
4050
4051 /* or distinctly less than LOW. */
4052 else if (tree_int_cst_lt (p->high, low))
4053 q = &p->right;
4054
4055 else
4056 {
4057 /* We have an overlap; this is an error. */
4058 *duplicate = p->code_label;
4059 return 2;
4060 }
4061 }
4062
4063 /* Add this label to the chain, and succeed. */
4064
4065 r = ggc_alloc (sizeof (struct case_node));
4066 r->low = low;
4067
4068 /* If the bounds are equal, turn this into the one-value case. */
4069 if (tree_int_cst_equal (low, high))
4070 r->high = r->low;
4071 else
4072 r->high = high;
4073
4074 r->code_label = label;
4075 if (!dont_expand_label)
4076 expand_label (label);
4077
4078 *q = r;
4079 r->parent = p;
4080 r->left = 0;
4081 r->right = 0;
4082 r->balance = 0;
4083
4084 while (p)
4085 {
4086 struct case_node *s;
4087
4088 if (r == p->left)
4089 {
4090 int b;
4091
4092 if (! (b = p->balance))
4093 /* Growth propagation from left side. */
4094 p->balance = -1;
4095 else if (b < 0)
4096 {
4097 if (r->balance < 0)
4098 {
4099 /* R-Rotation */
4100 if ((p->left = s = r->right))
4101 s->parent = p;
4102
4103 r->right = p;
4104 p->balance = 0;
4105 r->balance = 0;
4106 s = p->parent;
4107 p->parent = r;
4108
4109 if ((r->parent = s))
4110 {
4111 if (s->left == p)
4112 s->left = r;
4113 else
4114 s->right = r;
4115 }
4116 else
4117 case_stack->data.case_stmt.case_list = r;
4118 }
4119 else
4120 /* r->balance == +1 */
4121 {
4122 /* LR-Rotation */
4123
4124 int b2;
4125 struct case_node *t = r->right;
4126
4127 if ((p->left = s = t->right))
4128 s->parent = p;
4129
4130 t->right = p;
4131 if ((r->right = s = t->left))
4132 s->parent = r;
4133
4134 t->left = r;
4135 b = t->balance;
4136 b2 = b < 0;
4137 p->balance = b2;
4138 b2 = -b2 - b;
4139 r->balance = b2;
4140 t->balance = 0;
4141 s = p->parent;
4142 p->parent = t;
4143 r->parent = t;
4144
4145 if ((t->parent = s))
4146 {
4147 if (s->left == p)
4148 s->left = t;
4149 else
4150 s->right = t;
4151 }
4152 else
4153 case_stack->data.case_stmt.case_list = t;
4154 }
4155 break;
4156 }
4157
4158 else
4159 {
4160 /* p->balance == +1; growth of left side balances the node. */
4161 p->balance = 0;
4162 break;
4163 }
4164 }
4165 else
4166 /* r == p->right */
4167 {
4168 int b;
4169
4170 if (! (b = p->balance))
4171 /* Growth propagation from right side. */
4172 p->balance++;
4173 else if (b > 0)
4174 {
4175 if (r->balance > 0)
4176 {
4177 /* L-Rotation */
4178
4179 if ((p->right = s = r->left))
4180 s->parent = p;
4181
4182 r->left = p;
4183 p->balance = 0;
4184 r->balance = 0;
4185 s = p->parent;
4186 p->parent = r;
4187 if ((r->parent = s))
4188 {
4189 if (s->left == p)
4190 s->left = r;
4191 else
4192 s->right = r;
4193 }
4194
4195 else
4196 case_stack->data.case_stmt.case_list = r;
4197 }
4198
4199 else
4200 /* r->balance == -1 */
4201 {
4202 /* RL-Rotation */
4203 int b2;
4204 struct case_node *t = r->left;
4205
4206 if ((p->right = s = t->left))
4207 s->parent = p;
4208
4209 t->left = p;
4210
4211 if ((r->left = s = t->right))
4212 s->parent = r;
4213
4214 t->right = r;
4215 b = t->balance;
4216 b2 = b < 0;
4217 r->balance = b2;
4218 b2 = -b2 - b;
4219 p->balance = b2;
4220 t->balance = 0;
4221 s = p->parent;
4222 p->parent = t;
4223 r->parent = t;
4224
4225 if ((t->parent = s))
4226 {
4227 if (s->left == p)
4228 s->left = t;
4229 else
4230 s->right = t;
4231 }
4232
4233 else
4234 case_stack->data.case_stmt.case_list = t;
4235 }
4236 break;
4237 }
4238 else
4239 {
4240 /* p->balance == -1; growth of right side balances the node. */
4241 p->balance = 0;
4242 break;
4243 }
4244 }
4245
4246 r = p;
4247 p = p->parent;
4248 }
4249
4250 return 0;
4251 }
4252 \f
4253 /* Maximum number of case bit tests. */
4254 #define MAX_CASE_BIT_TESTS 3
4255
4256 /* By default, enable case bit tests on targets with ashlsi3. */
4257 #ifndef CASE_USE_BIT_TESTS
4258 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4259 != CODE_FOR_nothing)
4260 #endif
4261
4262
4263 /* A case_bit_test represents a set of case nodes that may be
4264 selected from using a bit-wise comparison. HI and LO hold
4265 the integer to be tested against, LABEL contains the label
4266 to jump to upon success and BITS counts the number of case
4267 nodes handled by this test, typically the number of bits
4268 set in HI:LO. */
4269
4270 struct case_bit_test
4271 {
4272 HOST_WIDE_INT hi;
4273 HOST_WIDE_INT lo;
4274 rtx label;
4275 int bits;
4276 };
4277
4278 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4279
4280 static
4281 bool lshift_cheap_p (void)
4282 {
4283 static bool init = false;
4284 static bool cheap = true;
4285
4286 if (!init)
4287 {
4288 rtx reg = gen_rtx_REG (word_mode, 10000);
4289 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4290 cheap = cost < COSTS_N_INSNS (3);
4291 init = true;
4292 }
4293
4294 return cheap;
4295 }
4296
4297 /* Comparison function for qsort to order bit tests by decreasing
4298 number of case nodes, i.e. the node with the most cases gets
4299 tested first. */
4300
4301 static int
4302 case_bit_test_cmp (const void *p1, const void *p2)
4303 {
4304 const struct case_bit_test *d1 = p1;
4305 const struct case_bit_test *d2 = p2;
4306
4307 return d2->bits - d1->bits;
4308 }
4309
4310 /* Expand a switch statement by a short sequence of bit-wise
4311 comparisons. "switch(x)" is effectively converted into
4312 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4313 integer constants.
4314
4315 INDEX_EXPR is the value being switched on, which is of
4316 type INDEX_TYPE. MINVAL is the lowest case value of in
4317 the case nodes, of INDEX_TYPE type, and RANGE is highest
4318 value minus MINVAL, also of type INDEX_TYPE. NODES is
4319 the set of case nodes, and DEFAULT_LABEL is the label to
4320 branch to should none of the cases match.
4321
4322 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4323 node targets. */
4324
4325 static void
4326 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4327 tree range, case_node_ptr nodes, rtx default_label)
4328 {
4329 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4330 enum machine_mode mode;
4331 rtx expr, index, label;
4332 unsigned int i,j,lo,hi;
4333 struct case_node *n;
4334 unsigned int count;
4335
4336 count = 0;
4337 for (n = nodes; n; n = n->right)
4338 {
4339 label = label_rtx (n->code_label);
4340 for (i = 0; i < count; i++)
4341 if (same_case_target_p (label, test[i].label))
4342 break;
4343
4344 if (i == count)
4345 {
4346 if (count >= MAX_CASE_BIT_TESTS)
4347 abort ();
4348 test[i].hi = 0;
4349 test[i].lo = 0;
4350 test[i].label = label;
4351 test[i].bits = 1;
4352 count++;
4353 }
4354 else
4355 test[i].bits++;
4356
4357 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4358 n->low, minval)), 1);
4359 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4360 n->high, minval)), 1);
4361 for (j = lo; j <= hi; j++)
4362 if (j >= HOST_BITS_PER_WIDE_INT)
4363 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4364 else
4365 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4366 }
4367
4368 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4369
4370 index_expr = fold (build (MINUS_EXPR, index_type,
4371 convert (index_type, index_expr),
4372 convert (index_type, minval)));
4373 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4374 emit_queue ();
4375 index = protect_from_queue (index, 0);
4376 do_pending_stack_adjust ();
4377
4378 mode = TYPE_MODE (index_type);
4379 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4380 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4381 default_label);
4382
4383 index = convert_to_mode (word_mode, index, 0);
4384 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4385 index, NULL_RTX, 1, OPTAB_WIDEN);
4386
4387 for (i = 0; i < count; i++)
4388 {
4389 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4390 expr = expand_binop (word_mode, and_optab, index, expr,
4391 NULL_RTX, 1, OPTAB_WIDEN);
4392 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4393 word_mode, 1, test[i].label);
4394 }
4395
4396 emit_jump (default_label);
4397 }
4398
4399 #ifndef HAVE_casesi
4400 #define HAVE_casesi 0
4401 #endif
4402
4403 #ifndef HAVE_tablejump
4404 #define HAVE_tablejump 0
4405 #endif
4406
4407 /* Terminate a case (Pascal) or switch (C) statement
4408 in which ORIG_INDEX is the expression to be tested.
4409 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4410 type as given in the source before any compiler conversions.
4411 Generate the code to test it and jump to the right place. */
4412
4413 void
4414 expand_end_case_type (tree orig_index, tree orig_type)
4415 {
4416 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4417 rtx default_label = 0;
4418 struct case_node *n, *m;
4419 unsigned int count, uniq;
4420 rtx index;
4421 rtx table_label;
4422 int ncases;
4423 rtx *labelvec;
4424 int i;
4425 rtx before_case, end, lab;
4426 struct nesting *thiscase = case_stack;
4427 tree index_expr, index_type;
4428 bool exit_done = false;
4429 int unsignedp;
4430
4431 /* Don't crash due to previous errors. */
4432 if (thiscase == NULL)
4433 return;
4434
4435 index_expr = thiscase->data.case_stmt.index_expr;
4436 index_type = TREE_TYPE (index_expr);
4437 unsignedp = TYPE_UNSIGNED (index_type);
4438 if (orig_type == NULL)
4439 orig_type = TREE_TYPE (orig_index);
4440
4441 do_pending_stack_adjust ();
4442
4443 /* This might get a spurious warning in the presence of a syntax error;
4444 it could be fixed by moving the call to check_seenlabel after the
4445 check for error_mark_node, and copying the code of check_seenlabel that
4446 deals with case_stack->data.case_stmt.line_number_status /
4447 restore_line_number_status in front of the call to end_cleanup_deferral;
4448 However, this might miss some useful warnings in the presence of
4449 non-syntax errors. */
4450 check_seenlabel ();
4451
4452 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4453 if (index_type != error_mark_node)
4454 {
4455 /* If we don't have a default-label, create one here,
4456 after the body of the switch. */
4457 if (thiscase->data.case_stmt.default_label == 0)
4458 {
4459 thiscase->data.case_stmt.default_label
4460 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4461 /* Share the exit label if possible. */
4462 if (thiscase->exit_label)
4463 {
4464 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4465 thiscase->exit_label);
4466 exit_done = true;
4467 }
4468 expand_label (thiscase->data.case_stmt.default_label);
4469 }
4470 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4471
4472 before_case = get_last_insn ();
4473
4474 if (thiscase->data.case_stmt.case_list
4475 && thiscase->data.case_stmt.case_list->left)
4476 thiscase->data.case_stmt.case_list
4477 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4478
4479 /* Simplify the case-list before we count it. */
4480 group_case_nodes (thiscase->data.case_stmt.case_list);
4481 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4482 default_label);
4483
4484 /* Get upper and lower bounds of case values.
4485 Also convert all the case values to the index expr's data type. */
4486
4487 uniq = 0;
4488 count = 0;
4489 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4490 {
4491 /* Check low and high label values are integers. */
4492 if (TREE_CODE (n->low) != INTEGER_CST)
4493 abort ();
4494 if (TREE_CODE (n->high) != INTEGER_CST)
4495 abort ();
4496
4497 n->low = convert (index_type, n->low);
4498 n->high = convert (index_type, n->high);
4499
4500 /* Count the elements and track the largest and smallest
4501 of them (treating them as signed even if they are not). */
4502 if (count++ == 0)
4503 {
4504 minval = n->low;
4505 maxval = n->high;
4506 }
4507 else
4508 {
4509 if (INT_CST_LT (n->low, minval))
4510 minval = n->low;
4511 if (INT_CST_LT (maxval, n->high))
4512 maxval = n->high;
4513 }
4514 /* A range counts double, since it requires two compares. */
4515 if (! tree_int_cst_equal (n->low, n->high))
4516 count++;
4517
4518 /* Count the number of unique case node targets. */
4519 uniq++;
4520 lab = label_rtx (n->code_label);
4521 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4522 if (same_case_target_p (label_rtx (m->code_label), lab))
4523 {
4524 uniq--;
4525 break;
4526 }
4527 }
4528
4529 /* Compute span of values. */
4530 if (count != 0)
4531 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4532
4533 end_cleanup_deferral ();
4534
4535 if (count == 0)
4536 {
4537 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4538 emit_queue ();
4539 emit_jump (default_label);
4540 }
4541
4542 /* Try implementing this switch statement by a short sequence of
4543 bit-wise comparisons. However, we let the binary-tree case
4544 below handle constant index expressions. */
4545 else if (CASE_USE_BIT_TESTS
4546 && ! TREE_CONSTANT (index_expr)
4547 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4548 && compare_tree_int (range, 0) > 0
4549 && lshift_cheap_p ()
4550 && ((uniq == 1 && count >= 3)
4551 || (uniq == 2 && count >= 5)
4552 || (uniq == 3 && count >= 6)))
4553 {
4554 /* Optimize the case where all the case values fit in a
4555 word without having to subtract MINVAL. In this case,
4556 we can optimize away the subtraction. */
4557 if (compare_tree_int (minval, 0) > 0
4558 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4559 {
4560 minval = integer_zero_node;
4561 range = maxval;
4562 }
4563 emit_case_bit_tests (index_type, index_expr, minval, range,
4564 thiscase->data.case_stmt.case_list,
4565 default_label);
4566 }
4567
4568 /* If range of values is much bigger than number of values,
4569 make a sequence of conditional branches instead of a dispatch.
4570 If the switch-index is a constant, do it this way
4571 because we can optimize it. */
4572
4573 else if (count < case_values_threshold ()
4574 || compare_tree_int (range,
4575 (optimize_size ? 3 : 10) * count) > 0
4576 /* RANGE may be signed, and really large ranges will show up
4577 as negative numbers. */
4578 || compare_tree_int (range, 0) < 0
4579 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4580 || flag_pic
4581 #endif
4582 || TREE_CONSTANT (index_expr)
4583 /* If neither casesi or tablejump is available, we can
4584 only go this way. */
4585 || (!HAVE_casesi && !HAVE_tablejump))
4586 {
4587 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4588
4589 /* If the index is a short or char that we do not have
4590 an insn to handle comparisons directly, convert it to
4591 a full integer now, rather than letting each comparison
4592 generate the conversion. */
4593
4594 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4595 && ! have_insn_for (COMPARE, GET_MODE (index)))
4596 {
4597 enum machine_mode wider_mode;
4598 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4599 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4600 if (have_insn_for (COMPARE, wider_mode))
4601 {
4602 index = convert_to_mode (wider_mode, index, unsignedp);
4603 break;
4604 }
4605 }
4606
4607 emit_queue ();
4608 do_pending_stack_adjust ();
4609
4610 index = protect_from_queue (index, 0);
4611 if (GET_CODE (index) == MEM)
4612 index = copy_to_reg (index);
4613 if (GET_CODE (index) == CONST_INT
4614 || TREE_CODE (index_expr) == INTEGER_CST)
4615 {
4616 /* Make a tree node with the proper constant value
4617 if we don't already have one. */
4618 if (TREE_CODE (index_expr) != INTEGER_CST)
4619 {
4620 index_expr
4621 = build_int_2 (INTVAL (index),
4622 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4623 index_expr = convert (index_type, index_expr);
4624 }
4625
4626 /* For constant index expressions we need only
4627 issue an unconditional branch to the appropriate
4628 target code. The job of removing any unreachable
4629 code is left to the optimization phase if the
4630 "-O" option is specified. */
4631 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4632 if (! tree_int_cst_lt (index_expr, n->low)
4633 && ! tree_int_cst_lt (n->high, index_expr))
4634 break;
4635
4636 if (n)
4637 emit_jump (label_rtx (n->code_label));
4638 else
4639 emit_jump (default_label);
4640 }
4641 else
4642 {
4643 /* If the index expression is not constant we generate
4644 a binary decision tree to select the appropriate
4645 target code. This is done as follows:
4646
4647 The list of cases is rearranged into a binary tree,
4648 nearly optimal assuming equal probability for each case.
4649
4650 The tree is transformed into RTL, eliminating
4651 redundant test conditions at the same time.
4652
4653 If program flow could reach the end of the
4654 decision tree an unconditional jump to the
4655 default code is emitted. */
4656
4657 use_cost_table
4658 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4659 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4660 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4661 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4662 default_label, index_type);
4663 emit_jump_if_reachable (default_label);
4664 }
4665 }
4666 else
4667 {
4668 table_label = gen_label_rtx ();
4669 if (! try_casesi (index_type, index_expr, minval, range,
4670 table_label, default_label))
4671 {
4672 index_type = thiscase->data.case_stmt.nominal_type;
4673
4674 /* Index jumptables from zero for suitable values of
4675 minval to avoid a subtraction. */
4676 if (! optimize_size
4677 && compare_tree_int (minval, 0) > 0
4678 && compare_tree_int (minval, 3) < 0)
4679 {
4680 minval = integer_zero_node;
4681 range = maxval;
4682 }
4683
4684 if (! try_tablejump (index_type, index_expr, minval, range,
4685 table_label, default_label))
4686 abort ();
4687 }
4688
4689 /* Get table of labels to jump to, in order of case index. */
4690
4691 ncases = tree_low_cst (range, 0) + 1;
4692 labelvec = alloca (ncases * sizeof (rtx));
4693 memset (labelvec, 0, ncases * sizeof (rtx));
4694
4695 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4696 {
4697 /* Compute the low and high bounds relative to the minimum
4698 value since that should fit in a HOST_WIDE_INT while the
4699 actual values may not. */
4700 HOST_WIDE_INT i_low
4701 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4702 n->low, minval)), 1);
4703 HOST_WIDE_INT i_high
4704 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4705 n->high, minval)), 1);
4706 HOST_WIDE_INT i;
4707
4708 for (i = i_low; i <= i_high; i ++)
4709 labelvec[i]
4710 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4711 }
4712
4713 /* Fill in the gaps with the default. */
4714 for (i = 0; i < ncases; i++)
4715 if (labelvec[i] == 0)
4716 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4717
4718 /* Output the table. */
4719 emit_label (table_label);
4720
4721 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4722 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4723 gen_rtx_LABEL_REF (Pmode, table_label),
4724 gen_rtvec_v (ncases, labelvec),
4725 const0_rtx, const0_rtx));
4726 else
4727 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4728 gen_rtvec_v (ncases, labelvec)));
4729
4730 /* If the case insn drops through the table,
4731 after the table we must jump to the default-label.
4732 Otherwise record no drop-through after the table. */
4733 #ifdef CASE_DROPS_THROUGH
4734 emit_jump (default_label);
4735 #else
4736 emit_barrier ();
4737 #endif
4738 }
4739
4740 before_case = NEXT_INSN (before_case);
4741 end = get_last_insn ();
4742 if (squeeze_notes (&before_case, &end))
4743 abort ();
4744 reorder_insns (before_case, end,
4745 thiscase->data.case_stmt.start);
4746 }
4747 else
4748 end_cleanup_deferral ();
4749
4750 if (thiscase->exit_label && !exit_done)
4751 emit_label (thiscase->exit_label);
4752
4753 POPSTACK (case_stack);
4754
4755 free_temp_slots ();
4756 }
4757
4758 /* Convert the tree NODE into a list linked by the right field, with the left
4759 field zeroed. RIGHT is used for recursion; it is a list to be placed
4760 rightmost in the resulting list. */
4761
4762 static struct case_node *
4763 case_tree2list (struct case_node *node, struct case_node *right)
4764 {
4765 struct case_node *left;
4766
4767 if (node->right)
4768 right = case_tree2list (node->right, right);
4769
4770 node->right = right;
4771 if ((left = node->left))
4772 {
4773 node->left = 0;
4774 return case_tree2list (left, node);
4775 }
4776
4777 return node;
4778 }
4779
4780 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4781
4782 static void
4783 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4784 {
4785 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4786 {
4787 if (op1 == op2)
4788 emit_jump (label);
4789 }
4790 else
4791 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4792 (GET_MODE (op1) == VOIDmode
4793 ? GET_MODE (op2) : GET_MODE (op1)),
4794 unsignedp, label);
4795 }
4796 \f
4797 /* Not all case values are encountered equally. This function
4798 uses a heuristic to weight case labels, in cases where that
4799 looks like a reasonable thing to do.
4800
4801 Right now, all we try to guess is text, and we establish the
4802 following weights:
4803
4804 chars above space: 16
4805 digits: 16
4806 default: 12
4807 space, punct: 8
4808 tab: 4
4809 newline: 2
4810 other "\" chars: 1
4811 remaining chars: 0
4812
4813 If we find any cases in the switch that are not either -1 or in the range
4814 of valid ASCII characters, or are control characters other than those
4815 commonly used with "\", don't treat this switch scanning text.
4816
4817 Return 1 if these nodes are suitable for cost estimation, otherwise
4818 return 0. */
4819
4820 static int
4821 estimate_case_costs (case_node_ptr node)
4822 {
4823 tree min_ascii = integer_minus_one_node;
4824 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4825 case_node_ptr n;
4826 int i;
4827
4828 /* If we haven't already made the cost table, make it now. Note that the
4829 lower bound of the table is -1, not zero. */
4830
4831 if (! cost_table_initialized)
4832 {
4833 cost_table_initialized = 1;
4834
4835 for (i = 0; i < 128; i++)
4836 {
4837 if (ISALNUM (i))
4838 COST_TABLE (i) = 16;
4839 else if (ISPUNCT (i))
4840 COST_TABLE (i) = 8;
4841 else if (ISCNTRL (i))
4842 COST_TABLE (i) = -1;
4843 }
4844
4845 COST_TABLE (' ') = 8;
4846 COST_TABLE ('\t') = 4;
4847 COST_TABLE ('\0') = 4;
4848 COST_TABLE ('\n') = 2;
4849 COST_TABLE ('\f') = 1;
4850 COST_TABLE ('\v') = 1;
4851 COST_TABLE ('\b') = 1;
4852 }
4853
4854 /* See if all the case expressions look like text. It is text if the
4855 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4856 as signed arithmetic since we don't want to ever access cost_table with a
4857 value less than -1. Also check that none of the constants in a range
4858 are strange control characters. */
4859
4860 for (n = node; n; n = n->right)
4861 {
4862 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4863 return 0;
4864
4865 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4866 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
4867 if (COST_TABLE (i) < 0)
4868 return 0;
4869 }
4870
4871 /* All interesting values are within the range of interesting
4872 ASCII characters. */
4873 return 1;
4874 }
4875
4876 /* Determine whether two case labels branch to the same target. */
4877
4878 static bool
4879 same_case_target_p (rtx l1, rtx l2)
4880 {
4881 #if 0
4882 rtx i1, i2;
4883
4884 if (l1 == l2)
4885 return true;
4886
4887 i1 = next_real_insn (l1);
4888 i2 = next_real_insn (l2);
4889 if (i1 == i2)
4890 return true;
4891
4892 if (i1 && simplejump_p (i1))
4893 {
4894 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4895 }
4896
4897 if (i2 && simplejump_p (i2))
4898 {
4899 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4900 }
4901 #endif
4902 /* When coming from gimple, we usually won't have emitted either
4903 the labels or the body of the switch statement. The job being
4904 done here should be done via jump threading at the tree level.
4905 Cases that go the same place should have the same label. */
4906 return l1 == l2;
4907 }
4908
4909 /* Delete nodes that branch to the default label from a list of
4910 case nodes. Eg. case 5: default: becomes just default: */
4911
4912 static void
4913 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
4914 {
4915 case_node_ptr ptr;
4916
4917 while (*prev)
4918 {
4919 ptr = *prev;
4920 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4921 *prev = ptr->right;
4922 else
4923 prev = &ptr->right;
4924 }
4925 }
4926
4927 /* Scan an ordered list of case nodes
4928 combining those with consecutive values or ranges.
4929
4930 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4931
4932 static void
4933 group_case_nodes (case_node_ptr head)
4934 {
4935 case_node_ptr node = head;
4936
4937 while (node)
4938 {
4939 rtx lab;
4940 case_node_ptr np = node;
4941
4942 lab = label_rtx (node->code_label);
4943
4944 /* Try to group the successors of NODE with NODE. */
4945 while (((np = np->right) != 0)
4946 /* Do they jump to the same place? */
4947 && same_case_target_p (label_rtx (np->code_label), lab)
4948 /* Are their ranges consecutive? */
4949 && tree_int_cst_equal (np->low,
4950 fold (build (PLUS_EXPR,
4951 TREE_TYPE (node->high),
4952 node->high,
4953 integer_one_node)))
4954 /* An overflow is not consecutive. */
4955 && tree_int_cst_lt (node->high,
4956 fold (build (PLUS_EXPR,
4957 TREE_TYPE (node->high),
4958 node->high,
4959 integer_one_node))))
4960 {
4961 node->high = np->high;
4962 }
4963 /* NP is the first node after NODE which can't be grouped with it.
4964 Delete the nodes in between, and move on to that node. */
4965 node->right = np;
4966 node = np;
4967 }
4968 }
4969
4970 /* Take an ordered list of case nodes
4971 and transform them into a near optimal binary tree,
4972 on the assumption that any target code selection value is as
4973 likely as any other.
4974
4975 The transformation is performed by splitting the ordered
4976 list into two equal sections plus a pivot. The parts are
4977 then attached to the pivot as left and right branches. Each
4978 branch is then transformed recursively. */
4979
4980 static void
4981 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
4982 {
4983 case_node_ptr np;
4984
4985 np = *head;
4986 if (np)
4987 {
4988 int cost = 0;
4989 int i = 0;
4990 int ranges = 0;
4991 case_node_ptr *npp;
4992 case_node_ptr left;
4993
4994 /* Count the number of entries on branch. Also count the ranges. */
4995
4996 while (np)
4997 {
4998 if (!tree_int_cst_equal (np->low, np->high))
4999 {
5000 ranges++;
5001 if (use_cost_table)
5002 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5003 }
5004
5005 if (use_cost_table)
5006 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5007
5008 i++;
5009 np = np->right;
5010 }
5011
5012 if (i > 2)
5013 {
5014 /* Split this list if it is long enough for that to help. */
5015 npp = head;
5016 left = *npp;
5017 if (use_cost_table)
5018 {
5019 /* Find the place in the list that bisects the list's total cost,
5020 Here I gets half the total cost. */
5021 int n_moved = 0;
5022 i = (cost + 1) / 2;
5023 while (1)
5024 {
5025 /* Skip nodes while their cost does not reach that amount. */
5026 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5027 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5028 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5029 if (i <= 0)
5030 break;
5031 npp = &(*npp)->right;
5032 n_moved += 1;
5033 }
5034 if (n_moved == 0)
5035 {
5036 /* Leave this branch lopsided, but optimize left-hand
5037 side and fill in `parent' fields for right-hand side. */
5038 np = *head;
5039 np->parent = parent;
5040 balance_case_nodes (&np->left, np);
5041 for (; np->right; np = np->right)
5042 np->right->parent = np;
5043 return;
5044 }
5045 }
5046 /* If there are just three nodes, split at the middle one. */
5047 else if (i == 3)
5048 npp = &(*npp)->right;
5049 else
5050 {
5051 /* Find the place in the list that bisects the list's total cost,
5052 where ranges count as 2.
5053 Here I gets half the total cost. */
5054 i = (i + ranges + 1) / 2;
5055 while (1)
5056 {
5057 /* Skip nodes while their cost does not reach that amount. */
5058 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5059 i--;
5060 i--;
5061 if (i <= 0)
5062 break;
5063 npp = &(*npp)->right;
5064 }
5065 }
5066 *head = np = *npp;
5067 *npp = 0;
5068 np->parent = parent;
5069 np->left = left;
5070
5071 /* Optimize each of the two split parts. */
5072 balance_case_nodes (&np->left, np);
5073 balance_case_nodes (&np->right, np);
5074 }
5075 else
5076 {
5077 /* Else leave this branch as one level,
5078 but fill in `parent' fields. */
5079 np = *head;
5080 np->parent = parent;
5081 for (; np->right; np = np->right)
5082 np->right->parent = np;
5083 }
5084 }
5085 }
5086 \f
5087 /* Search the parent sections of the case node tree
5088 to see if a test for the lower bound of NODE would be redundant.
5089 INDEX_TYPE is the type of the index expression.
5090
5091 The instructions to generate the case decision tree are
5092 output in the same order as nodes are processed so it is
5093 known that if a parent node checks the range of the current
5094 node minus one that the current node is bounded at its lower
5095 span. Thus the test would be redundant. */
5096
5097 static int
5098 node_has_low_bound (case_node_ptr node, tree index_type)
5099 {
5100 tree low_minus_one;
5101 case_node_ptr pnode;
5102
5103 /* If the lower bound of this node is the lowest value in the index type,
5104 we need not test it. */
5105
5106 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5107 return 1;
5108
5109 /* If this node has a left branch, the value at the left must be less
5110 than that at this node, so it cannot be bounded at the bottom and
5111 we need not bother testing any further. */
5112
5113 if (node->left)
5114 return 0;
5115
5116 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5117 node->low, integer_one_node));
5118
5119 /* If the subtraction above overflowed, we can't verify anything.
5120 Otherwise, look for a parent that tests our value - 1. */
5121
5122 if (! tree_int_cst_lt (low_minus_one, node->low))
5123 return 0;
5124
5125 for (pnode = node->parent; pnode; pnode = pnode->parent)
5126 if (tree_int_cst_equal (low_minus_one, pnode->high))
5127 return 1;
5128
5129 return 0;
5130 }
5131
5132 /* Search the parent sections of the case node tree
5133 to see if a test for the upper bound of NODE would be redundant.
5134 INDEX_TYPE is the type of the index expression.
5135
5136 The instructions to generate the case decision tree are
5137 output in the same order as nodes are processed so it is
5138 known that if a parent node checks the range of the current
5139 node plus one that the current node is bounded at its upper
5140 span. Thus the test would be redundant. */
5141
5142 static int
5143 node_has_high_bound (case_node_ptr node, tree index_type)
5144 {
5145 tree high_plus_one;
5146 case_node_ptr pnode;
5147
5148 /* If there is no upper bound, obviously no test is needed. */
5149
5150 if (TYPE_MAX_VALUE (index_type) == NULL)
5151 return 1;
5152
5153 /* If the upper bound of this node is the highest value in the type
5154 of the index expression, we need not test against it. */
5155
5156 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5157 return 1;
5158
5159 /* If this node has a right branch, the value at the right must be greater
5160 than that at this node, so it cannot be bounded at the top and
5161 we need not bother testing any further. */
5162
5163 if (node->right)
5164 return 0;
5165
5166 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5167 node->high, integer_one_node));
5168
5169 /* If the addition above overflowed, we can't verify anything.
5170 Otherwise, look for a parent that tests our value + 1. */
5171
5172 if (! tree_int_cst_lt (node->high, high_plus_one))
5173 return 0;
5174
5175 for (pnode = node->parent; pnode; pnode = pnode->parent)
5176 if (tree_int_cst_equal (high_plus_one, pnode->low))
5177 return 1;
5178
5179 return 0;
5180 }
5181
5182 /* Search the parent sections of the
5183 case node tree to see if both tests for the upper and lower
5184 bounds of NODE would be redundant. */
5185
5186 static int
5187 node_is_bounded (case_node_ptr node, tree index_type)
5188 {
5189 return (node_has_low_bound (node, index_type)
5190 && node_has_high_bound (node, index_type));
5191 }
5192
5193 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5194
5195 static void
5196 emit_jump_if_reachable (rtx label)
5197 {
5198 if (GET_CODE (get_last_insn ()) != BARRIER)
5199 emit_jump (label);
5200 }
5201 \f
5202 /* Emit step-by-step code to select a case for the value of INDEX.
5203 The thus generated decision tree follows the form of the
5204 case-node binary tree NODE, whose nodes represent test conditions.
5205 INDEX_TYPE is the type of the index of the switch.
5206
5207 Care is taken to prune redundant tests from the decision tree
5208 by detecting any boundary conditions already checked by
5209 emitted rtx. (See node_has_high_bound, node_has_low_bound
5210 and node_is_bounded, above.)
5211
5212 Where the test conditions can be shown to be redundant we emit
5213 an unconditional jump to the target code. As a further
5214 optimization, the subordinates of a tree node are examined to
5215 check for bounded nodes. In this case conditional and/or
5216 unconditional jumps as a result of the boundary check for the
5217 current node are arranged to target the subordinates associated
5218 code for out of bound conditions on the current node.
5219
5220 We can assume that when control reaches the code generated here,
5221 the index value has already been compared with the parents
5222 of this node, and determined to be on the same side of each parent
5223 as this node is. Thus, if this node tests for the value 51,
5224 and a parent tested for 52, we don't need to consider
5225 the possibility of a value greater than 51. If another parent
5226 tests for the value 50, then this node need not test anything. */
5227
5228 static void
5229 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5230 tree index_type)
5231 {
5232 /* If INDEX has an unsigned type, we must make unsigned branches. */
5233 int unsignedp = TYPE_UNSIGNED (index_type);
5234 enum machine_mode mode = GET_MODE (index);
5235 enum machine_mode imode = TYPE_MODE (index_type);
5236
5237 /* See if our parents have already tested everything for us.
5238 If they have, emit an unconditional jump for this node. */
5239 if (node_is_bounded (node, index_type))
5240 emit_jump (label_rtx (node->code_label));
5241
5242 else if (tree_int_cst_equal (node->low, node->high))
5243 {
5244 /* Node is single valued. First see if the index expression matches
5245 this node and then check our children, if any. */
5246
5247 do_jump_if_equal (index,
5248 convert_modes (mode, imode,
5249 expand_expr (node->low, NULL_RTX,
5250 VOIDmode, 0),
5251 unsignedp),
5252 label_rtx (node->code_label), unsignedp);
5253
5254 if (node->right != 0 && node->left != 0)
5255 {
5256 /* This node has children on both sides.
5257 Dispatch to one side or the other
5258 by comparing the index value with this node's value.
5259 If one subtree is bounded, check that one first,
5260 so we can avoid real branches in the tree. */
5261
5262 if (node_is_bounded (node->right, index_type))
5263 {
5264 emit_cmp_and_jump_insns (index,
5265 convert_modes
5266 (mode, imode,
5267 expand_expr (node->high, NULL_RTX,
5268 VOIDmode, 0),
5269 unsignedp),
5270 GT, NULL_RTX, mode, unsignedp,
5271 label_rtx (node->right->code_label));
5272 emit_case_nodes (index, node->left, default_label, index_type);
5273 }
5274
5275 else if (node_is_bounded (node->left, index_type))
5276 {
5277 emit_cmp_and_jump_insns (index,
5278 convert_modes
5279 (mode, imode,
5280 expand_expr (node->high, NULL_RTX,
5281 VOIDmode, 0),
5282 unsignedp),
5283 LT, NULL_RTX, mode, unsignedp,
5284 label_rtx (node->left->code_label));
5285 emit_case_nodes (index, node->right, default_label, index_type);
5286 }
5287
5288 /* If both children are single-valued cases with no
5289 children, finish up all the work. This way, we can save
5290 one ordered comparison. */
5291 else if (tree_int_cst_equal (node->right->low, node->right->high)
5292 && node->right->left == 0
5293 && node->right->right == 0
5294 && tree_int_cst_equal (node->left->low, node->left->high)
5295 && node->left->left == 0
5296 && node->left->right == 0)
5297 {
5298 /* Neither node is bounded. First distinguish the two sides;
5299 then emit the code for one side at a time. */
5300
5301 /* See if the value matches what the right hand side
5302 wants. */
5303 do_jump_if_equal (index,
5304 convert_modes (mode, imode,
5305 expand_expr (node->right->low,
5306 NULL_RTX,
5307 VOIDmode, 0),
5308 unsignedp),
5309 label_rtx (node->right->code_label),
5310 unsignedp);
5311
5312 /* See if the value matches what the left hand side
5313 wants. */
5314 do_jump_if_equal (index,
5315 convert_modes (mode, imode,
5316 expand_expr (node->left->low,
5317 NULL_RTX,
5318 VOIDmode, 0),
5319 unsignedp),
5320 label_rtx (node->left->code_label),
5321 unsignedp);
5322 }
5323
5324 else
5325 {
5326 /* Neither node is bounded. First distinguish the two sides;
5327 then emit the code for one side at a time. */
5328
5329 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5330
5331 /* See if the value is on the right. */
5332 emit_cmp_and_jump_insns (index,
5333 convert_modes
5334 (mode, imode,
5335 expand_expr (node->high, NULL_RTX,
5336 VOIDmode, 0),
5337 unsignedp),
5338 GT, NULL_RTX, mode, unsignedp,
5339 label_rtx (test_label));
5340
5341 /* Value must be on the left.
5342 Handle the left-hand subtree. */
5343 emit_case_nodes (index, node->left, default_label, index_type);
5344 /* If left-hand subtree does nothing,
5345 go to default. */
5346 emit_jump_if_reachable (default_label);
5347
5348 /* Code branches here for the right-hand subtree. */
5349 expand_label (test_label);
5350 emit_case_nodes (index, node->right, default_label, index_type);
5351 }
5352 }
5353
5354 else if (node->right != 0 && node->left == 0)
5355 {
5356 /* Here we have a right child but no left so we issue conditional
5357 branch to default and process the right child.
5358
5359 Omit the conditional branch to default if we it avoid only one
5360 right child; it costs too much space to save so little time. */
5361
5362 if (node->right->right || node->right->left
5363 || !tree_int_cst_equal (node->right->low, node->right->high))
5364 {
5365 if (!node_has_low_bound (node, index_type))
5366 {
5367 emit_cmp_and_jump_insns (index,
5368 convert_modes
5369 (mode, imode,
5370 expand_expr (node->high, NULL_RTX,
5371 VOIDmode, 0),
5372 unsignedp),
5373 LT, NULL_RTX, mode, unsignedp,
5374 default_label);
5375 }
5376
5377 emit_case_nodes (index, node->right, default_label, index_type);
5378 }
5379 else
5380 /* We cannot process node->right normally
5381 since we haven't ruled out the numbers less than
5382 this node's value. So handle node->right explicitly. */
5383 do_jump_if_equal (index,
5384 convert_modes
5385 (mode, imode,
5386 expand_expr (node->right->low, NULL_RTX,
5387 VOIDmode, 0),
5388 unsignedp),
5389 label_rtx (node->right->code_label), unsignedp);
5390 }
5391
5392 else if (node->right == 0 && node->left != 0)
5393 {
5394 /* Just one subtree, on the left. */
5395 if (node->left->left || node->left->right
5396 || !tree_int_cst_equal (node->left->low, node->left->high))
5397 {
5398 if (!node_has_high_bound (node, index_type))
5399 {
5400 emit_cmp_and_jump_insns (index,
5401 convert_modes
5402 (mode, imode,
5403 expand_expr (node->high, NULL_RTX,
5404 VOIDmode, 0),
5405 unsignedp),
5406 GT, NULL_RTX, mode, unsignedp,
5407 default_label);
5408 }
5409
5410 emit_case_nodes (index, node->left, default_label, index_type);
5411 }
5412 else
5413 /* We cannot process node->left normally
5414 since we haven't ruled out the numbers less than
5415 this node's value. So handle node->left explicitly. */
5416 do_jump_if_equal (index,
5417 convert_modes
5418 (mode, imode,
5419 expand_expr (node->left->low, NULL_RTX,
5420 VOIDmode, 0),
5421 unsignedp),
5422 label_rtx (node->left->code_label), unsignedp);
5423 }
5424 }
5425 else
5426 {
5427 /* Node is a range. These cases are very similar to those for a single
5428 value, except that we do not start by testing whether this node
5429 is the one to branch to. */
5430
5431 if (node->right != 0 && node->left != 0)
5432 {
5433 /* Node has subtrees on both sides.
5434 If the right-hand subtree is bounded,
5435 test for it first, since we can go straight there.
5436 Otherwise, we need to make a branch in the control structure,
5437 then handle the two subtrees. */
5438 tree test_label = 0;
5439
5440 if (node_is_bounded (node->right, index_type))
5441 /* Right hand node is fully bounded so we can eliminate any
5442 testing and branch directly to the target code. */
5443 emit_cmp_and_jump_insns (index,
5444 convert_modes
5445 (mode, imode,
5446 expand_expr (node->high, NULL_RTX,
5447 VOIDmode, 0),
5448 unsignedp),
5449 GT, NULL_RTX, mode, unsignedp,
5450 label_rtx (node->right->code_label));
5451 else
5452 {
5453 /* Right hand node requires testing.
5454 Branch to a label where we will handle it later. */
5455
5456 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5457 emit_cmp_and_jump_insns (index,
5458 convert_modes
5459 (mode, imode,
5460 expand_expr (node->high, NULL_RTX,
5461 VOIDmode, 0),
5462 unsignedp),
5463 GT, NULL_RTX, mode, unsignedp,
5464 label_rtx (test_label));
5465 }
5466
5467 /* Value belongs to this node or to the left-hand subtree. */
5468
5469 emit_cmp_and_jump_insns (index,
5470 convert_modes
5471 (mode, imode,
5472 expand_expr (node->low, NULL_RTX,
5473 VOIDmode, 0),
5474 unsignedp),
5475 GE, NULL_RTX, mode, unsignedp,
5476 label_rtx (node->code_label));
5477
5478 /* Handle the left-hand subtree. */
5479 emit_case_nodes (index, node->left, default_label, index_type);
5480
5481 /* If right node had to be handled later, do that now. */
5482
5483 if (test_label)
5484 {
5485 /* If the left-hand subtree fell through,
5486 don't let it fall into the right-hand subtree. */
5487 emit_jump_if_reachable (default_label);
5488
5489 expand_label (test_label);
5490 emit_case_nodes (index, node->right, default_label, index_type);
5491 }
5492 }
5493
5494 else if (node->right != 0 && node->left == 0)
5495 {
5496 /* Deal with values to the left of this node,
5497 if they are possible. */
5498 if (!node_has_low_bound (node, index_type))
5499 {
5500 emit_cmp_and_jump_insns (index,
5501 convert_modes
5502 (mode, imode,
5503 expand_expr (node->low, NULL_RTX,
5504 VOIDmode, 0),
5505 unsignedp),
5506 LT, NULL_RTX, mode, unsignedp,
5507 default_label);
5508 }
5509
5510 /* Value belongs to this node or to the right-hand subtree. */
5511
5512 emit_cmp_and_jump_insns (index,
5513 convert_modes
5514 (mode, imode,
5515 expand_expr (node->high, NULL_RTX,
5516 VOIDmode, 0),
5517 unsignedp),
5518 LE, NULL_RTX, mode, unsignedp,
5519 label_rtx (node->code_label));
5520
5521 emit_case_nodes (index, node->right, default_label, index_type);
5522 }
5523
5524 else if (node->right == 0 && node->left != 0)
5525 {
5526 /* Deal with values to the right of this node,
5527 if they are possible. */
5528 if (!node_has_high_bound (node, index_type))
5529 {
5530 emit_cmp_and_jump_insns (index,
5531 convert_modes
5532 (mode, imode,
5533 expand_expr (node->high, NULL_RTX,
5534 VOIDmode, 0),
5535 unsignedp),
5536 GT, NULL_RTX, mode, unsignedp,
5537 default_label);
5538 }
5539
5540 /* Value belongs to this node or to the left-hand subtree. */
5541
5542 emit_cmp_and_jump_insns (index,
5543 convert_modes
5544 (mode, imode,
5545 expand_expr (node->low, NULL_RTX,
5546 VOIDmode, 0),
5547 unsignedp),
5548 GE, NULL_RTX, mode, unsignedp,
5549 label_rtx (node->code_label));
5550
5551 emit_case_nodes (index, node->left, default_label, index_type);
5552 }
5553
5554 else
5555 {
5556 /* Node has no children so we check low and high bounds to remove
5557 redundant tests. Only one of the bounds can exist,
5558 since otherwise this node is bounded--a case tested already. */
5559 int high_bound = node_has_high_bound (node, index_type);
5560 int low_bound = node_has_low_bound (node, index_type);
5561
5562 if (!high_bound && low_bound)
5563 {
5564 emit_cmp_and_jump_insns (index,
5565 convert_modes
5566 (mode, imode,
5567 expand_expr (node->high, NULL_RTX,
5568 VOIDmode, 0),
5569 unsignedp),
5570 GT, NULL_RTX, mode, unsignedp,
5571 default_label);
5572 }
5573
5574 else if (!low_bound && high_bound)
5575 {
5576 emit_cmp_and_jump_insns (index,
5577 convert_modes
5578 (mode, imode,
5579 expand_expr (node->low, NULL_RTX,
5580 VOIDmode, 0),
5581 unsignedp),
5582 LT, NULL_RTX, mode, unsignedp,
5583 default_label);
5584 }
5585 else if (!low_bound && !high_bound)
5586 {
5587 /* Widen LOW and HIGH to the same width as INDEX. */
5588 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5589 tree low = build1 (CONVERT_EXPR, type, node->low);
5590 tree high = build1 (CONVERT_EXPR, type, node->high);
5591 rtx low_rtx, new_index, new_bound;
5592
5593 /* Instead of doing two branches, emit one unsigned branch for
5594 (index-low) > (high-low). */
5595 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5596 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5597 NULL_RTX, unsignedp,
5598 OPTAB_WIDEN);
5599 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5600 high, low)),
5601 NULL_RTX, mode, 0);
5602
5603 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5604 mode, 1, default_label);
5605 }
5606
5607 emit_jump (label_rtx (node->code_label));
5608 }
5609 }
5610 }
5611
5612 #include "gt-stmt.h"
This page took 0.330837 seconds and 5 git commands to generate.