]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
cf0124b7977115b95b8c0e64d75b801c3e6dbfe3
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
61
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
66 \f
67 /* Functions and data structures for expanding case statements. */
68
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
72
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
76
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
82
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
90
91 struct case_node
92 {
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
100 };
101
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
104
105 /* These are used by estimate_case_costs and balance_case_nodes. */
106
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static short *cost_table;
110 static int use_cost_table;
111 \f
112 /* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
137 struct nesting
138 {
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 union
144 {
145 /* For conds (if-then and if-then-else statements). */
146 struct
147 {
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
151 rtx endif_label;
152 /* Label for the end of this alternative.
153 This may be the end of the if or the next else/elseif. */
154 rtx next_label;
155 } cond;
156 /* For loops. */
157 struct
158 {
159 /* Label at the top of the loop; place to loop back to. */
160 rtx start_label;
161 /* Label at the end of the whole construct. */
162 rtx end_label;
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
165 rtx alt_end_label;
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
168 rtx continue_label;
169 } loop;
170 /* For variable binding contours. */
171 struct
172 {
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count;
176 /* Nonzero => value to restore stack to on exit. */
177 rtx stack_level;
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
181 rtx first_insn;
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting *innermost_stack_block;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
188 tree cleanups;
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
193 The tail of this list can be 0,
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
197 tree outer_cleanups;
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain *label_chain;
201 /* Number of function calls seen, as of start of this block. */
202 int n_function_calls;
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
209 end of the block. */
210 int block_target_temp_slot_level;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
224 tree *cleanup_ptr;
225 } block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct
229 {
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } case_stmt;
250 } data;
251 };
252
253 /* Allocate and return a new `struct nesting'. */
254
255 #define ALLOC_NESTING() \
256 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
257
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
262
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; \
279 obstack_free (&stmt_obstack, this); } \
280 while (this != target); } while (0)
281 \f
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
289
290 struct goto_fixup
291 {
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
319 };
320
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
323
324 struct label_chain
325 {
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
329 };
330
331 struct stmt_status
332 {
333 /* Chain of all pending binding contours. */
334 struct nesting *x_block_stack;
335
336 /* If any new stacks are added here, add them to POPSTACKS too. */
337
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting *x_stack_block_stack;
341
342 /* Chain of all pending conditional statements. */
343 struct nesting *x_cond_stack;
344
345 /* Chain of all pending loops. */
346 struct nesting *x_loop_stack;
347
348 /* Chain of all pending case or switch statements. */
349 struct nesting *x_case_stack;
350
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting *x_nesting_stack;
354
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
357
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
360
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
365
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
369
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
374
375 struct goto_fixup *x_goto_fixup_chain;
376 };
377
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
392
393 /* Non-zero if we are using EH to handle cleanus. */
394 static int using_eh_for_cleanups_p = 0;
395
396 /* Character strings, each containing a single decimal digit. */
397 static char *digit_strings[10];
398
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static void expand_null_return_1 PARAMS ((rtx, int));
408 static void expand_value_return PARAMS ((rtx));
409 static int tail_recursion_args PARAMS ((tree, tree));
410 static void expand_cleanups PARAMS ((tree, tree, int, int));
411 static void check_seenlabel PARAMS ((void));
412 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
413 static int estimate_case_costs PARAMS ((case_node_ptr));
414 static void group_case_nodes PARAMS ((case_node_ptr));
415 static void balance_case_nodes PARAMS ((case_node_ptr *,
416 case_node_ptr));
417 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
418 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
419 static int node_is_bounded PARAMS ((case_node_ptr, tree));
420 static void emit_jump_if_reachable PARAMS ((rtx));
421 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
422 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
423 static void mark_cond_nesting PARAMS ((struct nesting *));
424 static void mark_loop_nesting PARAMS ((struct nesting *));
425 static void mark_block_nesting PARAMS ((struct nesting *));
426 static void mark_case_nesting PARAMS ((struct nesting *));
427 static void mark_case_node PARAMS ((struct case_node *));
428 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
429 static void free_case_nodes PARAMS ((case_node_ptr));
430 \f
431 void
432 using_eh_for_cleanups ()
433 {
434 using_eh_for_cleanups_p = 1;
435 }
436
437 /* Mark N (known to be a cond-nesting) for GC. */
438
439 static void
440 mark_cond_nesting (n)
441 struct nesting *n;
442 {
443 while (n)
444 {
445 ggc_mark_rtx (n->exit_label);
446 ggc_mark_rtx (n->data.cond.endif_label);
447 ggc_mark_rtx (n->data.cond.next_label);
448
449 n = n->next;
450 }
451 }
452
453 /* Mark N (known to be a loop-nesting) for GC. */
454
455 static void
456 mark_loop_nesting (n)
457 struct nesting *n;
458 {
459
460 while (n)
461 {
462 ggc_mark_rtx (n->exit_label);
463 ggc_mark_rtx (n->data.loop.start_label);
464 ggc_mark_rtx (n->data.loop.end_label);
465 ggc_mark_rtx (n->data.loop.alt_end_label);
466 ggc_mark_rtx (n->data.loop.continue_label);
467
468 n = n->next;
469 }
470 }
471
472 /* Mark N (known to be a block-nesting) for GC. */
473
474 static void
475 mark_block_nesting (n)
476 struct nesting *n;
477 {
478 while (n)
479 {
480 struct label_chain *l;
481
482 ggc_mark_rtx (n->exit_label);
483 ggc_mark_rtx (n->data.block.stack_level);
484 ggc_mark_rtx (n->data.block.first_insn);
485 ggc_mark_tree (n->data.block.cleanups);
486 ggc_mark_tree (n->data.block.outer_cleanups);
487
488 for (l = n->data.block.label_chain; l != NULL; l = l->next)
489 {
490 ggc_mark (l);
491 ggc_mark_tree (l->label);
492 }
493
494 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
495
496 /* ??? cleanup_ptr never points outside the stack, does it? */
497
498 n = n->next;
499 }
500 }
501
502 /* Mark N (known to be a case-nesting) for GC. */
503
504 static void
505 mark_case_nesting (n)
506 struct nesting *n;
507 {
508 while (n)
509 {
510 ggc_mark_rtx (n->exit_label);
511 ggc_mark_rtx (n->data.case_stmt.start);
512
513 ggc_mark_tree (n->data.case_stmt.default_label);
514 ggc_mark_tree (n->data.case_stmt.index_expr);
515 ggc_mark_tree (n->data.case_stmt.nominal_type);
516
517 mark_case_node (n->data.case_stmt.case_list);
518 n = n->next;
519 }
520 }
521
522 /* Mark C for GC. */
523
524 static void
525 mark_case_node (c)
526 struct case_node *c;
527 {
528 if (c != 0)
529 {
530 ggc_mark_tree (c->low);
531 ggc_mark_tree (c->high);
532 ggc_mark_tree (c->code_label);
533
534 mark_case_node (c->right);
535 mark_case_node (c->left);
536 }
537 }
538
539 /* Mark G for GC. */
540
541 static void
542 mark_goto_fixup (g)
543 struct goto_fixup *g;
544 {
545 while (g)
546 {
547 ggc_mark (g);
548 ggc_mark_rtx (g->before_jump);
549 ggc_mark_tree (g->target);
550 ggc_mark_tree (g->context);
551 ggc_mark_rtx (g->target_rtl);
552 ggc_mark_rtx (g->stack_level);
553 ggc_mark_tree (g->cleanup_list_list);
554
555 g = g->next;
556 }
557 }
558
559 /* Clear out all parts of the state in F that can safely be discarded
560 after the function has been compiled, to let garbage collection
561 reclaim the memory. */
562
563 void
564 free_stmt_status (f)
565 struct function *f;
566 {
567 /* We're about to free the function obstack. If we hold pointers to
568 things allocated there, then we'll try to mark them when we do
569 GC. So, we clear them out here explicitly. */
570 if (f->stmt)
571 free (f->stmt);
572 f->stmt = NULL;
573 }
574
575 /* Mark P for GC. */
576
577 void
578 mark_stmt_status (p)
579 struct stmt_status *p;
580 {
581 if (p == 0)
582 return;
583
584 mark_block_nesting (p->x_block_stack);
585 mark_cond_nesting (p->x_cond_stack);
586 mark_loop_nesting (p->x_loop_stack);
587 mark_case_nesting (p->x_case_stack);
588
589 ggc_mark_tree (p->x_last_expr_type);
590 /* last_epxr_value is only valid if last_expr_type is nonzero. */
591 if (p->x_last_expr_type)
592 ggc_mark_rtx (p->x_last_expr_value);
593
594 mark_goto_fixup (p->x_goto_fixup_chain);
595 }
596
597 void
598 init_stmt ()
599 {
600 int i;
601
602 gcc_obstack_init (&stmt_obstack);
603
604 for (i = 0; i < 10; i++)
605 {
606 digit_strings[i] = ggc_alloc_string (NULL, 1);
607 digit_strings[i][0] = '0' + i;
608 }
609 ggc_add_string_root (digit_strings, 10);
610 }
611
612 void
613 init_stmt_for_function ()
614 {
615 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
616
617 /* We are not currently within any block, conditional, loop or case. */
618 block_stack = 0;
619 stack_block_stack = 0;
620 loop_stack = 0;
621 case_stack = 0;
622 cond_stack = 0;
623 nesting_stack = 0;
624 nesting_depth = 0;
625
626 current_block_start_count = 0;
627
628 /* No gotos have been expanded yet. */
629 goto_fixup_chain = 0;
630
631 /* We are not processing a ({...}) grouping. */
632 expr_stmts_for_value = 0;
633 last_expr_type = 0;
634 last_expr_value = NULL_RTX;
635 }
636 \f
637 /* Return nonzero if anything is pushed on the loop, condition, or case
638 stack. */
639 int
640 in_control_zone_p ()
641 {
642 return cond_stack || loop_stack || case_stack;
643 }
644
645 /* Record the current file and line. Called from emit_line_note. */
646 void
647 set_file_and_line_for_stmt (file, line)
648 const char *file;
649 int line;
650 {
651 /* If we're outputting an inline function, and we add a line note,
652 there may be no CFUN->STMT information. So, there's no need to
653 update it. */
654 if (cfun->stmt)
655 {
656 emit_filename = file;
657 emit_lineno = line;
658 }
659 }
660
661 /* Emit a no-op instruction. */
662
663 void
664 emit_nop ()
665 {
666 rtx last_insn;
667
668 last_insn = get_last_insn ();
669 if (!optimize
670 && (GET_CODE (last_insn) == CODE_LABEL
671 || (GET_CODE (last_insn) == NOTE
672 && prev_real_insn (last_insn) == 0)))
673 emit_insn (gen_nop ());
674 }
675 \f
676 /* Return the rtx-label that corresponds to a LABEL_DECL,
677 creating it if necessary. */
678
679 rtx
680 label_rtx (label)
681 tree label;
682 {
683 if (TREE_CODE (label) != LABEL_DECL)
684 abort ();
685
686 if (DECL_RTL (label))
687 return DECL_RTL (label);
688
689 return DECL_RTL (label) = gen_label_rtx ();
690 }
691
692 /* Add an unconditional jump to LABEL as the next sequential instruction. */
693
694 void
695 emit_jump (label)
696 rtx label;
697 {
698 do_pending_stack_adjust ();
699 emit_jump_insn (gen_jump (label));
700 emit_barrier ();
701 }
702
703 /* Emit code to jump to the address
704 specified by the pointer expression EXP. */
705
706 void
707 expand_computed_goto (exp)
708 tree exp;
709 {
710 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
711
712 #ifdef POINTERS_EXTEND_UNSIGNED
713 x = convert_memory_address (Pmode, x);
714 #endif
715
716 emit_queue ();
717 /* Be sure the function is executable. */
718 if (current_function_check_memory_usage)
719 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
720 VOIDmode, 1, x, ptr_mode);
721
722 do_pending_stack_adjust ();
723 emit_indirect_jump (x);
724
725 current_function_has_computed_jump = 1;
726 }
727 \f
728 /* Handle goto statements and the labels that they can go to. */
729
730 /* Specify the location in the RTL code of a label LABEL,
731 which is a LABEL_DECL tree node.
732
733 This is used for the kind of label that the user can jump to with a
734 goto statement, and for alternatives of a switch or case statement.
735 RTL labels generated for loops and conditionals don't go through here;
736 they are generated directly at the RTL level, by other functions below.
737
738 Note that this has nothing to do with defining label *names*.
739 Languages vary in how they do that and what that even means. */
740
741 void
742 expand_label (label)
743 tree label;
744 {
745 struct label_chain *p;
746
747 do_pending_stack_adjust ();
748 emit_label (label_rtx (label));
749 if (DECL_NAME (label))
750 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
751
752 if (stack_block_stack != 0)
753 {
754 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
755 p->next = stack_block_stack->data.block.label_chain;
756 stack_block_stack->data.block.label_chain = p;
757 p->label = label;
758 }
759 }
760
761 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
762 from nested functions. */
763
764 void
765 declare_nonlocal_label (label)
766 tree label;
767 {
768 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
769
770 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
771 LABEL_PRESERVE_P (label_rtx (label)) = 1;
772 if (nonlocal_goto_handler_slots == 0)
773 {
774 emit_stack_save (SAVE_NONLOCAL,
775 &nonlocal_goto_stack_level,
776 PREV_INSN (tail_recursion_reentry));
777 }
778 nonlocal_goto_handler_slots
779 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
780 }
781
782 /* Generate RTL code for a `goto' statement with target label LABEL.
783 LABEL should be a LABEL_DECL tree node that was or will later be
784 defined with `expand_label'. */
785
786 void
787 expand_goto (label)
788 tree label;
789 {
790 tree context;
791
792 /* Check for a nonlocal goto to a containing function. */
793 context = decl_function_context (label);
794 if (context != 0 && context != current_function_decl)
795 {
796 struct function *p = find_function_data (context);
797 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
798 rtx handler_slot, static_chain, save_area;
799 tree link;
800
801 /* Find the corresponding handler slot for this label. */
802 handler_slot = p->x_nonlocal_goto_handler_slots;
803 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
804 link = TREE_CHAIN (link))
805 handler_slot = XEXP (handler_slot, 1);
806 handler_slot = XEXP (handler_slot, 0);
807
808 p->has_nonlocal_label = 1;
809 current_function_has_nonlocal_goto = 1;
810 LABEL_REF_NONLOCAL_P (label_ref) = 1;
811
812 /* Copy the rtl for the slots so that they won't be shared in
813 case the virtual stack vars register gets instantiated differently
814 in the parent than in the child. */
815
816 static_chain = copy_to_reg (lookup_static_chain (label));
817
818 /* Get addr of containing function's current nonlocal goto handler,
819 which will do any cleanups and then jump to the label. */
820 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
821 virtual_stack_vars_rtx,
822 static_chain));
823
824 /* Get addr of containing function's nonlocal save area. */
825 save_area = p->x_nonlocal_goto_stack_level;
826 if (save_area)
827 save_area = replace_rtx (copy_rtx (save_area),
828 virtual_stack_vars_rtx, static_chain);
829
830 #if HAVE_nonlocal_goto
831 if (HAVE_nonlocal_goto)
832 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
833 save_area, label_ref));
834 else
835 #endif
836 {
837 /* Restore frame pointer for containing function.
838 This sets the actual hard register used for the frame pointer
839 to the location of the function's incoming static chain info.
840 The non-local goto handler will then adjust it to contain the
841 proper value and reload the argument pointer, if needed. */
842 emit_move_insn (hard_frame_pointer_rtx, static_chain);
843 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
844
845 /* USE of hard_frame_pointer_rtx added for consistency;
846 not clear if really needed. */
847 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
848 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
849 emit_indirect_jump (handler_slot);
850 }
851 }
852 else
853 expand_goto_internal (label, label_rtx (label), NULL_RTX);
854 }
855
856 /* Generate RTL code for a `goto' statement with target label BODY.
857 LABEL should be a LABEL_REF.
858 LAST_INSN, if non-0, is the rtx we should consider as the last
859 insn emitted (for the purposes of cleaning up a return). */
860
861 static void
862 expand_goto_internal (body, label, last_insn)
863 tree body;
864 rtx label;
865 rtx last_insn;
866 {
867 struct nesting *block;
868 rtx stack_level = 0;
869
870 if (GET_CODE (label) != CODE_LABEL)
871 abort ();
872
873 /* If label has already been defined, we can tell now
874 whether and how we must alter the stack level. */
875
876 if (PREV_INSN (label) != 0)
877 {
878 /* Find the innermost pending block that contains the label.
879 (Check containment by comparing insn-uids.)
880 Then restore the outermost stack level within that block,
881 and do cleanups of all blocks contained in it. */
882 for (block = block_stack; block; block = block->next)
883 {
884 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
885 break;
886 if (block->data.block.stack_level != 0)
887 stack_level = block->data.block.stack_level;
888 /* Execute the cleanups for blocks we are exiting. */
889 if (block->data.block.cleanups != 0)
890 {
891 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
892 do_pending_stack_adjust ();
893 }
894 }
895
896 if (stack_level)
897 {
898 /* Ensure stack adjust isn't done by emit_jump, as this
899 would clobber the stack pointer. This one should be
900 deleted as dead by flow. */
901 clear_pending_stack_adjust ();
902 do_pending_stack_adjust ();
903
904 /* Don't do this adjust if it's to the end label and this function
905 is to return with a depressed stack pointer. */
906 if (label == return_label
907 && (((TREE_CODE (TREE_TYPE (current_function_decl))
908 == FUNCTION_TYPE)
909 && (TYPE_RETURNS_STACK_DEPRESSED
910 (TREE_TYPE (current_function_decl))))))
911 ;
912 else
913 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
914 }
915
916 if (body != 0 && DECL_TOO_LATE (body))
917 error ("jump to `%s' invalidly jumps into binding contour",
918 IDENTIFIER_POINTER (DECL_NAME (body)));
919 }
920 /* Label not yet defined: may need to put this goto
921 on the fixup list. */
922 else if (! expand_fixup (body, label, last_insn))
923 {
924 /* No fixup needed. Record that the label is the target
925 of at least one goto that has no fixup. */
926 if (body != 0)
927 TREE_ADDRESSABLE (body) = 1;
928 }
929
930 emit_jump (label);
931 }
932 \f
933 /* Generate if necessary a fixup for a goto
934 whose target label in tree structure (if any) is TREE_LABEL
935 and whose target in rtl is RTL_LABEL.
936
937 If LAST_INSN is nonzero, we pretend that the jump appears
938 after insn LAST_INSN instead of at the current point in the insn stream.
939
940 The fixup will be used later to insert insns just before the goto.
941 Those insns will restore the stack level as appropriate for the
942 target label, and will (in the case of C++) also invoke any object
943 destructors which have to be invoked when we exit the scopes which
944 are exited by the goto.
945
946 Value is nonzero if a fixup is made. */
947
948 static int
949 expand_fixup (tree_label, rtl_label, last_insn)
950 tree tree_label;
951 rtx rtl_label;
952 rtx last_insn;
953 {
954 struct nesting *block, *end_block;
955
956 /* See if we can recognize which block the label will be output in.
957 This is possible in some very common cases.
958 If we succeed, set END_BLOCK to that block.
959 Otherwise, set it to 0. */
960
961 if (cond_stack
962 && (rtl_label == cond_stack->data.cond.endif_label
963 || rtl_label == cond_stack->data.cond.next_label))
964 end_block = cond_stack;
965 /* If we are in a loop, recognize certain labels which
966 are likely targets. This reduces the number of fixups
967 we need to create. */
968 else if (loop_stack
969 && (rtl_label == loop_stack->data.loop.start_label
970 || rtl_label == loop_stack->data.loop.end_label
971 || rtl_label == loop_stack->data.loop.continue_label))
972 end_block = loop_stack;
973 else
974 end_block = 0;
975
976 /* Now set END_BLOCK to the binding level to which we will return. */
977
978 if (end_block)
979 {
980 struct nesting *next_block = end_block->all;
981 block = block_stack;
982
983 /* First see if the END_BLOCK is inside the innermost binding level.
984 If so, then no cleanups or stack levels are relevant. */
985 while (next_block && next_block != block)
986 next_block = next_block->all;
987
988 if (next_block)
989 return 0;
990
991 /* Otherwise, set END_BLOCK to the innermost binding level
992 which is outside the relevant control-structure nesting. */
993 next_block = block_stack->next;
994 for (block = block_stack; block != end_block; block = block->all)
995 if (block == next_block)
996 next_block = next_block->next;
997 end_block = next_block;
998 }
999
1000 /* Does any containing block have a stack level or cleanups?
1001 If not, no fixup is needed, and that is the normal case
1002 (the only case, for standard C). */
1003 for (block = block_stack; block != end_block; block = block->next)
1004 if (block->data.block.stack_level != 0
1005 || block->data.block.cleanups != 0)
1006 break;
1007
1008 if (block != end_block)
1009 {
1010 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1011 struct goto_fixup *fixup
1012 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1013 /* In case an old stack level is restored, make sure that comes
1014 after any pending stack adjust. */
1015 /* ?? If the fixup isn't to come at the present position,
1016 doing the stack adjust here isn't useful. Doing it with our
1017 settings at that location isn't useful either. Let's hope
1018 someone does it! */
1019 if (last_insn == 0)
1020 do_pending_stack_adjust ();
1021 fixup->target = tree_label;
1022 fixup->target_rtl = rtl_label;
1023
1024 /* Create a BLOCK node and a corresponding matched set of
1025 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1026 this point. The notes will encapsulate any and all fixup
1027 code which we might later insert at this point in the insn
1028 stream. Also, the BLOCK node will be the parent (i.e. the
1029 `SUPERBLOCK') of any other BLOCK nodes which we might create
1030 later on when we are expanding the fixup code.
1031
1032 Note that optimization passes (including expand_end_loop)
1033 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1034 as a placeholder. */
1035
1036 {
1037 register rtx original_before_jump
1038 = last_insn ? last_insn : get_last_insn ();
1039 rtx start;
1040 rtx end;
1041 tree block;
1042
1043 block = make_node (BLOCK);
1044 TREE_USED (block) = 1;
1045
1046 if (!cfun->x_whole_function_mode_p)
1047 insert_block (block);
1048 else
1049 {
1050 BLOCK_CHAIN (block)
1051 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1052 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1053 = block;
1054 }
1055
1056 start_sequence ();
1057 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1058 if (cfun->x_whole_function_mode_p)
1059 NOTE_BLOCK (start) = block;
1060 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1061 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1062 if (cfun->x_whole_function_mode_p)
1063 NOTE_BLOCK (end) = block;
1064 fixup->context = block;
1065 end_sequence ();
1066 emit_insns_after (start, original_before_jump);
1067 }
1068
1069 fixup->block_start_count = current_block_start_count;
1070 fixup->stack_level = 0;
1071 fixup->cleanup_list_list
1072 = ((block->data.block.outer_cleanups
1073 || block->data.block.cleanups)
1074 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1075 block->data.block.outer_cleanups)
1076 : 0);
1077 fixup->next = goto_fixup_chain;
1078 goto_fixup_chain = fixup;
1079 }
1080
1081 return block != 0;
1082 }
1083 \f
1084 /* Expand any needed fixups in the outputmost binding level of the
1085 function. FIRST_INSN is the first insn in the function. */
1086
1087 void
1088 expand_fixups (first_insn)
1089 rtx first_insn;
1090 {
1091 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1092 }
1093
1094 /* When exiting a binding contour, process all pending gotos requiring fixups.
1095 THISBLOCK is the structure that describes the block being exited.
1096 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1097 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1098 FIRST_INSN is the insn that began this contour.
1099
1100 Gotos that jump out of this contour must restore the
1101 stack level and do the cleanups before actually jumping.
1102
1103 DONT_JUMP_IN nonzero means report error there is a jump into this
1104 contour from before the beginning of the contour.
1105 This is also done if STACK_LEVEL is nonzero. */
1106
1107 static void
1108 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1109 struct nesting *thisblock;
1110 rtx stack_level;
1111 tree cleanup_list;
1112 rtx first_insn;
1113 int dont_jump_in;
1114 {
1115 register struct goto_fixup *f, *prev;
1116
1117 /* F is the fixup we are considering; PREV is the previous one. */
1118 /* We run this loop in two passes so that cleanups of exited blocks
1119 are run first, and blocks that are exited are marked so
1120 afterwards. */
1121
1122 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1123 {
1124 /* Test for a fixup that is inactive because it is already handled. */
1125 if (f->before_jump == 0)
1126 {
1127 /* Delete inactive fixup from the chain, if that is easy to do. */
1128 if (prev != 0)
1129 prev->next = f->next;
1130 }
1131 /* Has this fixup's target label been defined?
1132 If so, we can finalize it. */
1133 else if (PREV_INSN (f->target_rtl) != 0)
1134 {
1135 register rtx cleanup_insns;
1136
1137 /* If this fixup jumped into this contour from before the beginning
1138 of this contour, report an error. This code used to use
1139 the first non-label insn after f->target_rtl, but that's
1140 wrong since such can be added, by things like put_var_into_stack
1141 and have INSN_UIDs that are out of the range of the block. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1146 if (f->target != 0
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1149 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1150 && ! DECL_ERROR_ISSUED (f->target))
1151 {
1152 error_with_decl (f->target,
1153 "label `%s' used before containing binding contour");
1154 /* Prevent multiple errors for one label. */
1155 DECL_ERROR_ISSUED (f->target) = 1;
1156 }
1157
1158 /* We will expand the cleanups into a sequence of their own and
1159 then later on we will attach this new sequence to the insn
1160 stream just ahead of the actual jump insn. */
1161
1162 start_sequence ();
1163
1164 /* Temporarily restore the lexical context where we will
1165 logically be inserting the fixup code. We do this for the
1166 sake of getting the debugging information right. */
1167
1168 pushlevel (0);
1169 set_block (f->context);
1170
1171 /* Expand the cleanups for blocks this jump exits. */
1172 if (f->cleanup_list_list)
1173 {
1174 tree lists;
1175 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1176 /* Marked elements correspond to blocks that have been closed.
1177 Do their cleanups. */
1178 if (TREE_ADDRESSABLE (lists)
1179 && TREE_VALUE (lists) != 0)
1180 {
1181 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1182 /* Pop any pushes done in the cleanups,
1183 in case function is about to return. */
1184 do_pending_stack_adjust ();
1185 }
1186 }
1187
1188 /* Restore stack level for the biggest contour that this
1189 jump jumps out of. */
1190 if (f->stack_level
1191 && ! (f->target_rtl == return_label
1192 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1193 == FUNCTION_TYPE)
1194 && (TYPE_RETURNS_STACK_DEPRESSED
1195 (TREE_TYPE (current_function_decl))))))
1196 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1197
1198 /* Finish up the sequence containing the insns which implement the
1199 necessary cleanups, and then attach that whole sequence to the
1200 insn stream just ahead of the actual jump insn. Attaching it
1201 at that point insures that any cleanups which are in fact
1202 implicit C++ object destructions (which must be executed upon
1203 leaving the block) appear (to the debugger) to be taking place
1204 in an area of the generated code where the object(s) being
1205 destructed are still "in scope". */
1206
1207 cleanup_insns = get_insns ();
1208 poplevel (1, 0, 0);
1209
1210 end_sequence ();
1211 emit_insns_after (cleanup_insns, f->before_jump);
1212
1213 f->before_jump = 0;
1214 }
1215 }
1216
1217 /* For any still-undefined labels, do the cleanups for this block now.
1218 We must do this now since items in the cleanup list may go out
1219 of scope when the block ends. */
1220 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1221 if (f->before_jump != 0
1222 && PREV_INSN (f->target_rtl) == 0
1223 /* Label has still not appeared. If we are exiting a block with
1224 a stack level to restore, that started before the fixup,
1225 mark this stack level as needing restoration
1226 when the fixup is later finalized. */
1227 && thisblock != 0
1228 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1229 means the label is undefined. That's erroneous, but possible. */
1230 && (thisblock->data.block.block_start_count
1231 <= f->block_start_count))
1232 {
1233 tree lists = f->cleanup_list_list;
1234 rtx cleanup_insns;
1235
1236 for (; lists; lists = TREE_CHAIN (lists))
1237 /* If the following elt. corresponds to our containing block
1238 then the elt. must be for this block. */
1239 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1240 {
1241 start_sequence ();
1242 pushlevel (0);
1243 set_block (f->context);
1244 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1245 do_pending_stack_adjust ();
1246 cleanup_insns = get_insns ();
1247 poplevel (1, 0, 0);
1248 end_sequence ();
1249 if (cleanup_insns != 0)
1250 f->before_jump
1251 = emit_insns_after (cleanup_insns, f->before_jump);
1252
1253 f->cleanup_list_list = TREE_CHAIN (lists);
1254 }
1255
1256 if (stack_level)
1257 f->stack_level = stack_level;
1258 }
1259 }
1260 \f
1261 /* Return the number of times character C occurs in string S. */
1262 static int
1263 n_occurrences (c, s)
1264 int c;
1265 const char *s;
1266 {
1267 int n = 0;
1268 while (*s)
1269 n += (*s++ == c);
1270 return n;
1271 }
1272 \f
1273 /* Generate RTL for an asm statement (explicit assembler code).
1274 BODY is a STRING_CST node containing the assembler code text,
1275 or an ADDR_EXPR containing a STRING_CST. */
1276
1277 void
1278 expand_asm (body)
1279 tree body;
1280 {
1281 if (current_function_check_memory_usage)
1282 {
1283 error ("`asm' cannot be used in function where memory usage is checked");
1284 return;
1285 }
1286
1287 if (TREE_CODE (body) == ADDR_EXPR)
1288 body = TREE_OPERAND (body, 0);
1289
1290 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1291 TREE_STRING_POINTER (body)));
1292 last_expr_type = 0;
1293 }
1294
1295 /* Generate RTL for an asm statement with arguments.
1296 STRING is the instruction template.
1297 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1298 Each output or input has an expression in the TREE_VALUE and
1299 a constraint-string in the TREE_PURPOSE.
1300 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1301 that is clobbered by this insn.
1302
1303 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1304 Some elements of OUTPUTS may be replaced with trees representing temporary
1305 values. The caller should copy those temporary values to the originally
1306 specified lvalues.
1307
1308 VOL nonzero means the insn is volatile; don't optimize it. */
1309
1310 void
1311 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1312 tree string, outputs, inputs, clobbers;
1313 int vol;
1314 const char *filename;
1315 int line;
1316 {
1317 rtvec argvec, constraints;
1318 rtx body;
1319 int ninputs = list_length (inputs);
1320 int noutputs = list_length (outputs);
1321 int ninout = 0;
1322 int nclobbers;
1323 tree tail;
1324 register int i;
1325 /* Vector of RTX's of evaluated output operands. */
1326 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1327 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1328 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1329 enum machine_mode *inout_mode
1330 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1331 /* The insn we have emitted. */
1332 rtx insn;
1333 int old_generating_concat_p = generating_concat_p;
1334
1335 /* An ASM with no outputs needs to be treated as volatile, for now. */
1336 if (noutputs == 0)
1337 vol = 1;
1338
1339 if (current_function_check_memory_usage)
1340 {
1341 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1342 return;
1343 }
1344
1345 #ifdef MD_ASM_CLOBBERS
1346 /* Sometimes we wish to automatically clobber registers across an asm.
1347 Case in point is when the i386 backend moved from cc0 to a hard reg --
1348 maintaining source-level compatability means automatically clobbering
1349 the flags register. */
1350 MD_ASM_CLOBBERS (clobbers);
1351 #endif
1352
1353 if (current_function_check_memory_usage)
1354 {
1355 error ("`asm' cannot be used in function where memory usage is checked");
1356 return;
1357 }
1358
1359 /* Count the number of meaningful clobbered registers, ignoring what
1360 we would ignore later. */
1361 nclobbers = 0;
1362 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1363 {
1364 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1365
1366 i = decode_reg_name (regname);
1367 if (i >= 0 || i == -4)
1368 ++nclobbers;
1369 else if (i == -2)
1370 error ("unknown register name `%s' in `asm'", regname);
1371 }
1372
1373 last_expr_type = 0;
1374
1375 /* Check that the number of alternatives is constant across all
1376 operands. */
1377 if (outputs || inputs)
1378 {
1379 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1381 tree next = inputs;
1382
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1384 {
1385 error ("too many alternatives in `asm'");
1386 return;
1387 }
1388
1389 tmp = outputs;
1390 while (tmp)
1391 {
1392 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1393
1394 if (n_occurrences (',', constraint) != nalternatives)
1395 {
1396 error ("operand constraints for `asm' differ in number of alternatives");
1397 return;
1398 }
1399
1400 if (TREE_CHAIN (tmp))
1401 tmp = TREE_CHAIN (tmp);
1402 else
1403 tmp = next, next = 0;
1404 }
1405 }
1406
1407 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1408 {
1409 tree val = TREE_VALUE (tail);
1410 tree type = TREE_TYPE (val);
1411 char *constraint;
1412 char *p;
1413 int c_len;
1414 int j;
1415 int is_inout = 0;
1416 int allows_reg = 0;
1417 int allows_mem = 0;
1418
1419 /* If there's an erroneous arg, emit no insn. */
1420 if (TREE_TYPE (val) == error_mark_node)
1421 return;
1422
1423 /* Make sure constraint has `=' and does not have `+'. Also, see
1424 if it allows any register. Be liberal on the latter test, since
1425 the worst that happens if we get it wrong is we issue an error
1426 message. */
1427
1428 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1429 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1430
1431 /* Allow the `=' or `+' to not be at the beginning of the string,
1432 since it wasn't explicitly documented that way, and there is a
1433 large body of code that puts it last. Swap the character to
1434 the front, so as not to uglify any place else. */
1435 switch (c_len)
1436 {
1437 default:
1438 if ((p = strchr (constraint, '=')) != NULL)
1439 break;
1440 if ((p = strchr (constraint, '+')) != NULL)
1441 break;
1442 case 0:
1443 error ("output operand constraint lacks `='");
1444 return;
1445 }
1446
1447 if (p != constraint)
1448 {
1449 j = *p;
1450 bcopy (constraint, constraint+1, p-constraint);
1451 *constraint = j;
1452
1453 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1454 }
1455
1456 is_inout = constraint[0] == '+';
1457 /* Replace '+' with '='. */
1458 constraint[0] = '=';
1459 /* Make sure we can specify the matching operand. */
1460 if (is_inout && i > 9)
1461 {
1462 error ("output operand constraint %d contains `+'", i);
1463 return;
1464 }
1465
1466 for (j = 1; j < c_len; j++)
1467 switch (constraint[j])
1468 {
1469 case '+':
1470 case '=':
1471 error ("operand constraint contains '+' or '=' at illegal position.");
1472 return;
1473
1474 case '%':
1475 if (i + 1 == ninputs + noutputs)
1476 {
1477 error ("`%%' constraint used with last operand");
1478 return;
1479 }
1480 break;
1481
1482 case '?': case '!': case '*': case '&': case '#':
1483 case 'E': case 'F': case 'G': case 'H':
1484 case 's': case 'i': case 'n':
1485 case 'I': case 'J': case 'K': case 'L': case 'M':
1486 case 'N': case 'O': case 'P': case ',':
1487 break;
1488
1489 case '0': case '1': case '2': case '3': case '4':
1490 case '5': case '6': case '7': case '8': case '9':
1491 error ("matching constraint not valid in output operand");
1492 break;
1493
1494 case 'V': case 'm': case 'o':
1495 allows_mem = 1;
1496 break;
1497
1498 case '<': case '>':
1499 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1500 excepting those that expand_call created. So match memory
1501 and hope. */
1502 allows_mem = 1;
1503 break;
1504
1505 case 'g': case 'X':
1506 allows_reg = 1;
1507 allows_mem = 1;
1508 break;
1509
1510 case 'p': case 'r':
1511 allows_reg = 1;
1512 break;
1513
1514 default:
1515 if (! ISALPHA (constraint[j]))
1516 {
1517 error ("invalid punctuation `%c' in constraint",
1518 constraint[j]);
1519 return;
1520 }
1521 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1522 allows_reg = 1;
1523 #ifdef EXTRA_CONSTRAINT
1524 else
1525 {
1526 /* Otherwise we can't assume anything about the nature of
1527 the constraint except that it isn't purely registers.
1528 Treat it like "g" and hope for the best. */
1529 allows_reg = 1;
1530 allows_mem = 1;
1531 }
1532 #endif
1533 break;
1534 }
1535
1536 /* If an output operand is not a decl or indirect ref and our constraint
1537 allows a register, make a temporary to act as an intermediate.
1538 Make the asm insn write into that, then our caller will copy it to
1539 the real output operand. Likewise for promoted variables. */
1540
1541 generating_concat_p = 0;
1542
1543 real_output_rtx[i] = NULL_RTX;
1544 if ((TREE_CODE (val) == INDIRECT_REF
1545 && allows_mem)
1546 || (DECL_P (val)
1547 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1548 && ! (GET_CODE (DECL_RTL (val)) == REG
1549 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1550 || ! allows_reg
1551 || is_inout)
1552 {
1553 if (! allows_reg)
1554 mark_addressable (TREE_VALUE (tail));
1555
1556 output_rtx[i]
1557 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1558 EXPAND_MEMORY_USE_WO);
1559
1560 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1561 error ("output number %d not directly addressable", i);
1562 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1563 || GET_CODE (output_rtx[i]) == CONCAT)
1564 {
1565 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1566 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1567 if (is_inout)
1568 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1569 }
1570 }
1571 else
1572 {
1573 output_rtx[i] = assign_temp (type, 0, 0, 1);
1574 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1575 }
1576
1577 generating_concat_p = old_generating_concat_p;
1578
1579 if (is_inout)
1580 {
1581 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1582 inout_opnum[ninout++] = i;
1583 }
1584 }
1585
1586 ninputs += ninout;
1587 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1588 {
1589 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1590 return;
1591 }
1592
1593 /* Make vectors for the expression-rtx and constraint strings. */
1594
1595 argvec = rtvec_alloc (ninputs);
1596 constraints = rtvec_alloc (ninputs);
1597
1598 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1599 : GET_MODE (output_rtx[0])),
1600 TREE_STRING_POINTER (string),
1601 empty_string, 0, argvec, constraints,
1602 filename, line);
1603
1604 MEM_VOLATILE_P (body) = vol;
1605
1606 /* Eval the inputs and put them into ARGVEC.
1607 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1608
1609 i = 0;
1610 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1611 {
1612 int j;
1613 int allows_reg = 0, allows_mem = 0;
1614 char *constraint, *orig_constraint;
1615 int c_len;
1616 rtx op;
1617
1618 /* If there's an erroneous arg, emit no insn,
1619 because the ASM_INPUT would get VOIDmode
1620 and that could cause a crash in reload. */
1621 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1622 return;
1623
1624 /* ??? Can this happen, and does the error message make any sense? */
1625 if (TREE_PURPOSE (tail) == NULL_TREE)
1626 {
1627 error ("hard register `%s' listed as input operand to `asm'",
1628 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1629 return;
1630 }
1631
1632 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1633 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1634 orig_constraint = constraint;
1635
1636 /* Make sure constraint has neither `=', `+', nor '&'. */
1637
1638 for (j = 0; j < c_len; j++)
1639 switch (constraint[j])
1640 {
1641 case '+': case '=': case '&':
1642 if (constraint == orig_constraint)
1643 {
1644 error ("input operand constraint contains `%c'",
1645 constraint[j]);
1646 return;
1647 }
1648 break;
1649
1650 case '%':
1651 if (constraint == orig_constraint
1652 && i + 1 == ninputs - ninout)
1653 {
1654 error ("`%%' constraint used with last operand");
1655 return;
1656 }
1657 break;
1658
1659 case 'V': case 'm': case 'o':
1660 allows_mem = 1;
1661 break;
1662
1663 case '<': case '>':
1664 case '?': case '!': case '*': case '#':
1665 case 'E': case 'F': case 'G': case 'H':
1666 case 's': case 'i': case 'n':
1667 case 'I': case 'J': case 'K': case 'L': case 'M':
1668 case 'N': case 'O': case 'P': case ',':
1669 break;
1670
1671 /* Whether or not a numeric constraint allows a register is
1672 decided by the matching constraint, and so there is no need
1673 to do anything special with them. We must handle them in
1674 the default case, so that we don't unnecessarily force
1675 operands to memory. */
1676 case '0': case '1': case '2': case '3': case '4':
1677 case '5': case '6': case '7': case '8': case '9':
1678 if (constraint[j] >= '0' + noutputs)
1679 {
1680 error
1681 ("matching constraint references invalid operand number");
1682 return;
1683 }
1684
1685 /* Try and find the real constraint for this dup. */
1686 if ((j == 0 && c_len == 1)
1687 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1688 {
1689 tree o = outputs;
1690
1691 for (j = constraint[j] - '0'; j > 0; --j)
1692 o = TREE_CHAIN (o);
1693
1694 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
1695 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1696 j = 0;
1697 break;
1698 }
1699
1700 /* Fall through. */
1701
1702 case 'p': case 'r':
1703 allows_reg = 1;
1704 break;
1705
1706 case 'g': case 'X':
1707 allows_reg = 1;
1708 allows_mem = 1;
1709 break;
1710
1711 default:
1712 if (! ISALPHA (constraint[j]))
1713 {
1714 error ("invalid punctuation `%c' in constraint",
1715 constraint[j]);
1716 return;
1717 }
1718 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1719 allows_reg = 1;
1720 #ifdef EXTRA_CONSTRAINT
1721 else
1722 {
1723 /* Otherwise we can't assume anything about the nature of
1724 the constraint except that it isn't purely registers.
1725 Treat it like "g" and hope for the best. */
1726 allows_reg = 1;
1727 allows_mem = 1;
1728 }
1729 #endif
1730 break;
1731 }
1732
1733 if (! allows_reg && allows_mem)
1734 mark_addressable (TREE_VALUE (tail));
1735
1736 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1737
1738 /* Never pass a CONCAT to an ASM. */
1739 generating_concat_p = 0;
1740 if (GET_CODE (op) == CONCAT)
1741 op = force_reg (GET_MODE (op), op);
1742
1743 if (asm_operand_ok (op, constraint) <= 0)
1744 {
1745 if (allows_reg)
1746 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1747 else if (!allows_mem)
1748 warning ("asm operand %d probably doesn't match constraints", i);
1749 else if (CONSTANT_P (op))
1750 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1751 op);
1752 else if (GET_CODE (op) == REG
1753 || GET_CODE (op) == SUBREG
1754 || GET_CODE (op) == CONCAT)
1755 {
1756 tree type = TREE_TYPE (TREE_VALUE (tail));
1757 rtx memloc = assign_temp (type, 1, 1, 1);
1758
1759 emit_move_insn (memloc, op);
1760 op = memloc;
1761 }
1762
1763 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1764 /* We won't recognize volatile memory as available a
1765 memory_operand at this point. Ignore it. */
1766 ;
1767 else if (queued_subexp_p (op))
1768 ;
1769 else
1770 /* ??? Leave this only until we have experience with what
1771 happens in combine and elsewhere when constraints are
1772 not satisfied. */
1773 warning ("asm operand %d probably doesn't match constraints", i);
1774 }
1775 generating_concat_p = old_generating_concat_p;
1776 ASM_OPERANDS_INPUT (body, i) = op;
1777
1778 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1779 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1780 orig_constraint);
1781 i++;
1782 }
1783
1784 /* Protect all the operands from the queue now that they have all been
1785 evaluated. */
1786
1787 generating_concat_p = 0;
1788
1789 for (i = 0; i < ninputs - ninout; i++)
1790 ASM_OPERANDS_INPUT (body, i)
1791 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1792
1793 for (i = 0; i < noutputs; i++)
1794 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1795
1796 /* For in-out operands, copy output rtx to input rtx. */
1797 for (i = 0; i < ninout; i++)
1798 {
1799 int j = inout_opnum[i];
1800
1801 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1802 = output_rtx[j];
1803 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1804 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
1805 }
1806
1807 generating_concat_p = old_generating_concat_p;
1808
1809 /* Now, for each output, construct an rtx
1810 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1811 ARGVEC CONSTRAINTS))
1812 If there is more than one, put them inside a PARALLEL. */
1813
1814 if (noutputs == 1 && nclobbers == 0)
1815 {
1816 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1817 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1818 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1819 }
1820
1821 else if (noutputs == 0 && nclobbers == 0)
1822 {
1823 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1824 insn = emit_insn (body);
1825 }
1826
1827 else
1828 {
1829 rtx obody = body;
1830 int num = noutputs;
1831
1832 if (num == 0)
1833 num = 1;
1834
1835 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1836
1837 /* For each output operand, store a SET. */
1838 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1839 {
1840 XVECEXP (body, 0, i)
1841 = gen_rtx_SET (VOIDmode,
1842 output_rtx[i],
1843 gen_rtx_ASM_OPERANDS
1844 (GET_MODE (output_rtx[i]),
1845 TREE_STRING_POINTER (string),
1846 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1847 i, argvec, constraints,
1848 filename, line));
1849
1850 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1851 }
1852
1853 /* If there are no outputs (but there are some clobbers)
1854 store the bare ASM_OPERANDS into the PARALLEL. */
1855
1856 if (i == 0)
1857 XVECEXP (body, 0, i++) = obody;
1858
1859 /* Store (clobber REG) for each clobbered register specified. */
1860
1861 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1862 {
1863 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1864 int j = decode_reg_name (regname);
1865
1866 if (j < 0)
1867 {
1868 if (j == -3) /* `cc', which is not a register */
1869 continue;
1870
1871 if (j == -4) /* `memory', don't cache memory across asm */
1872 {
1873 XVECEXP (body, 0, i++)
1874 = gen_rtx_CLOBBER (VOIDmode,
1875 gen_rtx_MEM
1876 (BLKmode,
1877 gen_rtx_SCRATCH (VOIDmode)));
1878 continue;
1879 }
1880
1881 /* Ignore unknown register, error already signaled. */
1882 continue;
1883 }
1884
1885 /* Use QImode since that's guaranteed to clobber just one reg. */
1886 XVECEXP (body, 0, i++)
1887 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1888 }
1889
1890 insn = emit_insn (body);
1891 }
1892
1893 /* For any outputs that needed reloading into registers, spill them
1894 back to where they belong. */
1895 for (i = 0; i < noutputs; ++i)
1896 if (real_output_rtx[i])
1897 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1898
1899 free_temp_slots ();
1900 }
1901 \f
1902 /* Generate RTL to evaluate the expression EXP
1903 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1904
1905 void
1906 expand_expr_stmt (exp)
1907 tree exp;
1908 {
1909 /* If -W, warn about statements with no side effects,
1910 except for an explicit cast to void (e.g. for assert()), and
1911 except inside a ({...}) where they may be useful. */
1912 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1913 {
1914 if (! TREE_SIDE_EFFECTS (exp))
1915 {
1916 if ((extra_warnings || warn_unused_value)
1917 && !(TREE_CODE (exp) == CONVERT_EXPR
1918 && VOID_TYPE_P (TREE_TYPE (exp))))
1919 warning_with_file_and_line (emit_filename, emit_lineno,
1920 "statement with no effect");
1921 }
1922 else if (warn_unused_value)
1923 warn_if_unused_value (exp);
1924 }
1925
1926 /* If EXP is of function type and we are expanding statements for
1927 value, convert it to pointer-to-function. */
1928 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1929 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1930
1931 /* The call to `expand_expr' could cause last_expr_type and
1932 last_expr_value to get reset. Therefore, we set last_expr_value
1933 and last_expr_type *after* calling expand_expr. */
1934 last_expr_value = expand_expr (exp,
1935 (expr_stmts_for_value
1936 ? NULL_RTX : const0_rtx),
1937 VOIDmode, 0);
1938 last_expr_type = TREE_TYPE (exp);
1939
1940 /* If all we do is reference a volatile value in memory,
1941 copy it to a register to be sure it is actually touched. */
1942 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1943 && TREE_THIS_VOLATILE (exp))
1944 {
1945 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1946 ;
1947 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1948 copy_to_reg (last_expr_value);
1949 else
1950 {
1951 rtx lab = gen_label_rtx ();
1952
1953 /* Compare the value with itself to reference it. */
1954 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1955 expand_expr (TYPE_SIZE (last_expr_type),
1956 NULL_RTX, VOIDmode, 0),
1957 BLKmode, 0,
1958 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1959 lab);
1960 emit_label (lab);
1961 }
1962 }
1963
1964 /* If this expression is part of a ({...}) and is in memory, we may have
1965 to preserve temporaries. */
1966 preserve_temp_slots (last_expr_value);
1967
1968 /* Free any temporaries used to evaluate this expression. Any temporary
1969 used as a result of this expression will already have been preserved
1970 above. */
1971 free_temp_slots ();
1972
1973 emit_queue ();
1974 }
1975
1976 /* Warn if EXP contains any computations whose results are not used.
1977 Return 1 if a warning is printed; 0 otherwise. */
1978
1979 int
1980 warn_if_unused_value (exp)
1981 tree exp;
1982 {
1983 if (TREE_USED (exp))
1984 return 0;
1985
1986 /* Don't warn about void constructs. This includes casting to void,
1987 void function calls, and statement expressions with a final cast
1988 to void. */
1989 if (VOID_TYPE_P (TREE_TYPE (exp)))
1990 return 0;
1991
1992 switch (TREE_CODE (exp))
1993 {
1994 case PREINCREMENT_EXPR:
1995 case POSTINCREMENT_EXPR:
1996 case PREDECREMENT_EXPR:
1997 case POSTDECREMENT_EXPR:
1998 case MODIFY_EXPR:
1999 case INIT_EXPR:
2000 case TARGET_EXPR:
2001 case CALL_EXPR:
2002 case METHOD_CALL_EXPR:
2003 case RTL_EXPR:
2004 case TRY_CATCH_EXPR:
2005 case WITH_CLEANUP_EXPR:
2006 case EXIT_EXPR:
2007 /* We don't warn about COND_EXPR because it may be a useful
2008 construct if either arm contains a side effect. */
2009 case COND_EXPR:
2010 return 0;
2011
2012 case BIND_EXPR:
2013 /* For a binding, warn if no side effect within it. */
2014 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2015
2016 case SAVE_EXPR:
2017 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2018
2019 case TRUTH_ORIF_EXPR:
2020 case TRUTH_ANDIF_EXPR:
2021 /* In && or ||, warn if 2nd operand has no side effect. */
2022 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2023
2024 case COMPOUND_EXPR:
2025 if (TREE_NO_UNUSED_WARNING (exp))
2026 return 0;
2027 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2028 return 1;
2029 /* Let people do `(foo (), 0)' without a warning. */
2030 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2031 return 0;
2032 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2033
2034 case NOP_EXPR:
2035 case CONVERT_EXPR:
2036 case NON_LVALUE_EXPR:
2037 /* Don't warn about conversions not explicit in the user's program. */
2038 if (TREE_NO_UNUSED_WARNING (exp))
2039 return 0;
2040 /* Assignment to a cast usually results in a cast of a modify.
2041 Don't complain about that. There can be an arbitrary number of
2042 casts before the modify, so we must loop until we find the first
2043 non-cast expression and then test to see if that is a modify. */
2044 {
2045 tree tem = TREE_OPERAND (exp, 0);
2046
2047 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2048 tem = TREE_OPERAND (tem, 0);
2049
2050 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2051 || TREE_CODE (tem) == CALL_EXPR)
2052 return 0;
2053 }
2054 goto warn;
2055
2056 case INDIRECT_REF:
2057 /* Don't warn about automatic dereferencing of references, since
2058 the user cannot control it. */
2059 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2060 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2061 /* Fall through. */
2062
2063 default:
2064 /* Referencing a volatile value is a side effect, so don't warn. */
2065 if ((DECL_P (exp)
2066 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2067 && TREE_THIS_VOLATILE (exp))
2068 return 0;
2069
2070 /* If this is an expression which has no operands, there is no value
2071 to be unused. There are no such language-independent codes,
2072 but front ends may define such. */
2073 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2074 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2075 return 0;
2076
2077 warn:
2078 warning_with_file_and_line (emit_filename, emit_lineno,
2079 "value computed is not used");
2080 return 1;
2081 }
2082 }
2083
2084 /* Clear out the memory of the last expression evaluated. */
2085
2086 void
2087 clear_last_expr ()
2088 {
2089 last_expr_type = 0;
2090 }
2091
2092 /* Begin a statement which will return a value.
2093 Return the RTL_EXPR for this statement expr.
2094 The caller must save that value and pass it to expand_end_stmt_expr. */
2095
2096 tree
2097 expand_start_stmt_expr ()
2098 {
2099 tree t;
2100
2101 /* Make the RTL_EXPR node temporary, not momentary,
2102 so that rtl_expr_chain doesn't become garbage. */
2103 t = make_node (RTL_EXPR);
2104 do_pending_stack_adjust ();
2105 start_sequence_for_rtl_expr (t);
2106 NO_DEFER_POP;
2107 expr_stmts_for_value++;
2108 return t;
2109 }
2110
2111 /* Restore the previous state at the end of a statement that returns a value.
2112 Returns a tree node representing the statement's value and the
2113 insns to compute the value.
2114
2115 The nodes of that expression have been freed by now, so we cannot use them.
2116 But we don't want to do that anyway; the expression has already been
2117 evaluated and now we just want to use the value. So generate a RTL_EXPR
2118 with the proper type and RTL value.
2119
2120 If the last substatement was not an expression,
2121 return something with type `void'. */
2122
2123 tree
2124 expand_end_stmt_expr (t)
2125 tree t;
2126 {
2127 OK_DEFER_POP;
2128
2129 if (last_expr_type == 0)
2130 {
2131 last_expr_type = void_type_node;
2132 last_expr_value = const0_rtx;
2133 }
2134 else if (last_expr_value == 0)
2135 /* There are some cases where this can happen, such as when the
2136 statement is void type. */
2137 last_expr_value = const0_rtx;
2138 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2139 /* Remove any possible QUEUED. */
2140 last_expr_value = protect_from_queue (last_expr_value, 0);
2141
2142 emit_queue ();
2143
2144 TREE_TYPE (t) = last_expr_type;
2145 RTL_EXPR_RTL (t) = last_expr_value;
2146 RTL_EXPR_SEQUENCE (t) = get_insns ();
2147
2148 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2149
2150 end_sequence ();
2151
2152 /* Don't consider deleting this expr or containing exprs at tree level. */
2153 TREE_SIDE_EFFECTS (t) = 1;
2154 /* Propagate volatility of the actual RTL expr. */
2155 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2156
2157 last_expr_type = 0;
2158 expr_stmts_for_value--;
2159
2160 return t;
2161 }
2162 \f
2163 /* Generate RTL for the start of an if-then. COND is the expression
2164 whose truth should be tested.
2165
2166 If EXITFLAG is nonzero, this conditional is visible to
2167 `exit_something'. */
2168
2169 void
2170 expand_start_cond (cond, exitflag)
2171 tree cond;
2172 int exitflag;
2173 {
2174 struct nesting *thiscond = ALLOC_NESTING ();
2175
2176 /* Make an entry on cond_stack for the cond we are entering. */
2177
2178 thiscond->next = cond_stack;
2179 thiscond->all = nesting_stack;
2180 thiscond->depth = ++nesting_depth;
2181 thiscond->data.cond.next_label = gen_label_rtx ();
2182 /* Before we encounter an `else', we don't need a separate exit label
2183 unless there are supposed to be exit statements
2184 to exit this conditional. */
2185 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2186 thiscond->data.cond.endif_label = thiscond->exit_label;
2187 cond_stack = thiscond;
2188 nesting_stack = thiscond;
2189
2190 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2191 }
2192
2193 /* Generate RTL between then-clause and the elseif-clause
2194 of an if-then-elseif-.... */
2195
2196 void
2197 expand_start_elseif (cond)
2198 tree cond;
2199 {
2200 if (cond_stack->data.cond.endif_label == 0)
2201 cond_stack->data.cond.endif_label = gen_label_rtx ();
2202 emit_jump (cond_stack->data.cond.endif_label);
2203 emit_label (cond_stack->data.cond.next_label);
2204 cond_stack->data.cond.next_label = gen_label_rtx ();
2205 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2206 }
2207
2208 /* Generate RTL between the then-clause and the else-clause
2209 of an if-then-else. */
2210
2211 void
2212 expand_start_else ()
2213 {
2214 if (cond_stack->data.cond.endif_label == 0)
2215 cond_stack->data.cond.endif_label = gen_label_rtx ();
2216
2217 emit_jump (cond_stack->data.cond.endif_label);
2218 emit_label (cond_stack->data.cond.next_label);
2219 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2220 }
2221
2222 /* After calling expand_start_else, turn this "else" into an "else if"
2223 by providing another condition. */
2224
2225 void
2226 expand_elseif (cond)
2227 tree cond;
2228 {
2229 cond_stack->data.cond.next_label = gen_label_rtx ();
2230 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2231 }
2232
2233 /* Generate RTL for the end of an if-then.
2234 Pop the record for it off of cond_stack. */
2235
2236 void
2237 expand_end_cond ()
2238 {
2239 struct nesting *thiscond = cond_stack;
2240
2241 do_pending_stack_adjust ();
2242 if (thiscond->data.cond.next_label)
2243 emit_label (thiscond->data.cond.next_label);
2244 if (thiscond->data.cond.endif_label)
2245 emit_label (thiscond->data.cond.endif_label);
2246
2247 POPSTACK (cond_stack);
2248 last_expr_type = 0;
2249 }
2250 \f
2251 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2252 loop should be exited by `exit_something'. This is a loop for which
2253 `expand_continue' will jump to the top of the loop.
2254
2255 Make an entry on loop_stack to record the labels associated with
2256 this loop. */
2257
2258 struct nesting *
2259 expand_start_loop (exit_flag)
2260 int exit_flag;
2261 {
2262 register struct nesting *thisloop = ALLOC_NESTING ();
2263
2264 /* Make an entry on loop_stack for the loop we are entering. */
2265
2266 thisloop->next = loop_stack;
2267 thisloop->all = nesting_stack;
2268 thisloop->depth = ++nesting_depth;
2269 thisloop->data.loop.start_label = gen_label_rtx ();
2270 thisloop->data.loop.end_label = gen_label_rtx ();
2271 thisloop->data.loop.alt_end_label = 0;
2272 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2273 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2274 loop_stack = thisloop;
2275 nesting_stack = thisloop;
2276
2277 do_pending_stack_adjust ();
2278 emit_queue ();
2279 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2280 emit_label (thisloop->data.loop.start_label);
2281
2282 return thisloop;
2283 }
2284
2285 /* Like expand_start_loop but for a loop where the continuation point
2286 (for expand_continue_loop) will be specified explicitly. */
2287
2288 struct nesting *
2289 expand_start_loop_continue_elsewhere (exit_flag)
2290 int exit_flag;
2291 {
2292 struct nesting *thisloop = expand_start_loop (exit_flag);
2293 loop_stack->data.loop.continue_label = gen_label_rtx ();
2294 return thisloop;
2295 }
2296
2297 /* Begin a null, aka do { } while (0) "loop". But since the contents
2298 of said loop can still contain a break, we must frob the loop nest. */
2299
2300 struct nesting *
2301 expand_start_null_loop ()
2302 {
2303 register struct nesting *thisloop = ALLOC_NESTING ();
2304
2305 /* Make an entry on loop_stack for the loop we are entering. */
2306
2307 thisloop->next = loop_stack;
2308 thisloop->all = nesting_stack;
2309 thisloop->depth = ++nesting_depth;
2310 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2311 thisloop->data.loop.end_label = gen_label_rtx ();
2312 thisloop->data.loop.alt_end_label = NULL_RTX;
2313 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2314 thisloop->exit_label = thisloop->data.loop.end_label;
2315 loop_stack = thisloop;
2316 nesting_stack = thisloop;
2317
2318 return thisloop;
2319 }
2320
2321 /* Specify the continuation point for a loop started with
2322 expand_start_loop_continue_elsewhere.
2323 Use this at the point in the code to which a continue statement
2324 should jump. */
2325
2326 void
2327 expand_loop_continue_here ()
2328 {
2329 do_pending_stack_adjust ();
2330 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2331 emit_label (loop_stack->data.loop.continue_label);
2332 }
2333
2334 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2335 Pop the block off of loop_stack. */
2336
2337 void
2338 expand_end_loop ()
2339 {
2340 rtx start_label = loop_stack->data.loop.start_label;
2341 rtx insn = get_last_insn ();
2342 int needs_end_jump = 1;
2343
2344 /* Mark the continue-point at the top of the loop if none elsewhere. */
2345 if (start_label == loop_stack->data.loop.continue_label)
2346 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2347
2348 do_pending_stack_adjust ();
2349
2350 /* If optimizing, perhaps reorder the loop.
2351 First, try to use a condjump near the end.
2352 expand_exit_loop_if_false ends loops with unconditional jumps,
2353 like this:
2354
2355 if (test) goto label;
2356 optional: cleanup
2357 goto loop_stack->data.loop.end_label
2358 barrier
2359 label:
2360
2361 If we find such a pattern, we can end the loop earlier. */
2362
2363 if (optimize
2364 && GET_CODE (insn) == CODE_LABEL
2365 && LABEL_NAME (insn) == NULL
2366 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2367 {
2368 rtx label = insn;
2369 rtx jump = PREV_INSN (PREV_INSN (label));
2370
2371 if (GET_CODE (jump) == JUMP_INSN
2372 && GET_CODE (PATTERN (jump)) == SET
2373 && SET_DEST (PATTERN (jump)) == pc_rtx
2374 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2375 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2376 == loop_stack->data.loop.end_label))
2377 {
2378 rtx prev;
2379
2380 /* The test might be complex and reference LABEL multiple times,
2381 like the loop in loop_iterations to set vtop. To handle this,
2382 we move LABEL. */
2383 insn = PREV_INSN (label);
2384 reorder_insns (label, label, start_label);
2385
2386 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2387 {
2388 /* We ignore line number notes, but if we see any other note,
2389 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2390 NOTE_INSN_LOOP_*, we disable this optimization. */
2391 if (GET_CODE (prev) == NOTE)
2392 {
2393 if (NOTE_LINE_NUMBER (prev) < 0)
2394 break;
2395 continue;
2396 }
2397 if (GET_CODE (prev) == CODE_LABEL)
2398 break;
2399 if (GET_CODE (prev) == JUMP_INSN)
2400 {
2401 if (GET_CODE (PATTERN (prev)) == SET
2402 && SET_DEST (PATTERN (prev)) == pc_rtx
2403 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2404 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2405 == LABEL_REF)
2406 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2407 {
2408 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2409 = start_label;
2410 emit_note_after (NOTE_INSN_LOOP_END, prev);
2411 needs_end_jump = 0;
2412 }
2413 break;
2414 }
2415 }
2416 }
2417 }
2418
2419 /* If the loop starts with a loop exit, roll that to the end where
2420 it will optimize together with the jump back.
2421
2422 We look for the conditional branch to the exit, except that once
2423 we find such a branch, we don't look past 30 instructions.
2424
2425 In more detail, if the loop presently looks like this (in pseudo-C):
2426
2427 start_label:
2428 if (test) goto end_label;
2429 body;
2430 goto start_label;
2431 end_label:
2432
2433 transform it to look like:
2434
2435 goto start_label;
2436 newstart_label:
2437 body;
2438 start_label:
2439 if (test) goto end_label;
2440 goto newstart_label;
2441 end_label:
2442
2443 Here, the `test' may actually consist of some reasonably complex
2444 code, terminating in a test. */
2445
2446 if (optimize
2447 && needs_end_jump
2448 &&
2449 ! (GET_CODE (insn) == JUMP_INSN
2450 && GET_CODE (PATTERN (insn)) == SET
2451 && SET_DEST (PATTERN (insn)) == pc_rtx
2452 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2453 {
2454 int eh_regions = 0;
2455 int num_insns = 0;
2456 rtx last_test_insn = NULL_RTX;
2457
2458 /* Scan insns from the top of the loop looking for a qualified
2459 conditional exit. */
2460 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2461 insn = NEXT_INSN (insn))
2462 {
2463 if (GET_CODE (insn) == NOTE)
2464 {
2465 if (optimize < 2
2466 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2467 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2468 /* The code that actually moves the exit test will
2469 carefully leave BLOCK notes in their original
2470 location. That means, however, that we can't debug
2471 the exit test itself. So, we refuse to move code
2472 containing BLOCK notes at low optimization levels. */
2473 break;
2474
2475 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2476 ++eh_regions;
2477 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2478 {
2479 --eh_regions;
2480 if (eh_regions < 0)
2481 /* We've come to the end of an EH region, but
2482 never saw the beginning of that region. That
2483 means that an EH region begins before the top
2484 of the loop, and ends in the middle of it. The
2485 existence of such a situation violates a basic
2486 assumption in this code, since that would imply
2487 that even when EH_REGIONS is zero, we might
2488 move code out of an exception region. */
2489 abort ();
2490 }
2491
2492 /* We must not walk into a nested loop. */
2493 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2494 break;
2495
2496 /* We already know this INSN is a NOTE, so there's no
2497 point in looking at it to see if it's a JUMP. */
2498 continue;
2499 }
2500
2501 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2502 num_insns++;
2503
2504 if (last_test_insn && num_insns > 30)
2505 break;
2506
2507 if (eh_regions > 0)
2508 /* We don't want to move a partial EH region. Consider:
2509
2510 while ( ( { try {
2511 if (cond ()) 0;
2512 else {
2513 bar();
2514 1;
2515 }
2516 } catch (...) {
2517 1;
2518 } )) {
2519 body;
2520 }
2521
2522 This isn't legal C++, but here's what it's supposed to
2523 mean: if cond() is true, stop looping. Otherwise,
2524 call bar, and keep looping. In addition, if cond
2525 throws an exception, catch it and keep looping. Such
2526 constructs are certainy legal in LISP.
2527
2528 We should not move the `if (cond()) 0' test since then
2529 the EH-region for the try-block would be broken up.
2530 (In this case we would the EH_BEG note for the `try'
2531 and `if cond()' but not the call to bar() or the
2532 EH_END note.)
2533
2534 So we don't look for tests within an EH region. */
2535 continue;
2536
2537 if (GET_CODE (insn) == JUMP_INSN
2538 && GET_CODE (PATTERN (insn)) == SET
2539 && SET_DEST (PATTERN (insn)) == pc_rtx)
2540 {
2541 /* This is indeed a jump. */
2542 rtx dest1 = NULL_RTX;
2543 rtx dest2 = NULL_RTX;
2544 rtx potential_last_test;
2545 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2546 {
2547 /* A conditional jump. */
2548 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2549 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2550 potential_last_test = insn;
2551 }
2552 else
2553 {
2554 /* An unconditional jump. */
2555 dest1 = SET_SRC (PATTERN (insn));
2556 /* Include the BARRIER after the JUMP. */
2557 potential_last_test = NEXT_INSN (insn);
2558 }
2559
2560 do {
2561 if (dest1 && GET_CODE (dest1) == LABEL_REF
2562 && ((XEXP (dest1, 0)
2563 == loop_stack->data.loop.alt_end_label)
2564 || (XEXP (dest1, 0)
2565 == loop_stack->data.loop.end_label)))
2566 {
2567 last_test_insn = potential_last_test;
2568 break;
2569 }
2570
2571 /* If this was a conditional jump, there may be
2572 another label at which we should look. */
2573 dest1 = dest2;
2574 dest2 = NULL_RTX;
2575 } while (dest1);
2576 }
2577 }
2578
2579 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2580 {
2581 /* We found one. Move everything from there up
2582 to the end of the loop, and add a jump into the loop
2583 to jump to there. */
2584 register rtx newstart_label = gen_label_rtx ();
2585 register rtx start_move = start_label;
2586 rtx next_insn;
2587
2588 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2589 then we want to move this note also. */
2590 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2591 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2592 == NOTE_INSN_LOOP_CONT))
2593 start_move = PREV_INSN (start_move);
2594
2595 emit_label_after (newstart_label, PREV_INSN (start_move));
2596
2597 /* Actually move the insns. Start at the beginning, and
2598 keep copying insns until we've copied the
2599 last_test_insn. */
2600 for (insn = start_move; insn; insn = next_insn)
2601 {
2602 /* Figure out which insn comes after this one. We have
2603 to do this before we move INSN. */
2604 if (insn == last_test_insn)
2605 /* We've moved all the insns. */
2606 next_insn = NULL_RTX;
2607 else
2608 next_insn = NEXT_INSN (insn);
2609
2610 if (GET_CODE (insn) == NOTE
2611 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2612 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2613 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2614 NOTE_INSN_BLOCK_ENDs because the correct generation
2615 of debugging information depends on these appearing
2616 in the same order in the RTL and in the tree
2617 structure, where they are represented as BLOCKs.
2618 So, we don't move block notes. Of course, moving
2619 the code inside the block is likely to make it
2620 impossible to debug the instructions in the exit
2621 test, but such is the price of optimization. */
2622 continue;
2623
2624 /* Move the INSN. */
2625 reorder_insns (insn, insn, get_last_insn ());
2626 }
2627
2628 emit_jump_insn_after (gen_jump (start_label),
2629 PREV_INSN (newstart_label));
2630 emit_barrier_after (PREV_INSN (newstart_label));
2631 start_label = newstart_label;
2632 }
2633 }
2634
2635 if (needs_end_jump)
2636 {
2637 emit_jump (start_label);
2638 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2639 }
2640 emit_label (loop_stack->data.loop.end_label);
2641
2642 POPSTACK (loop_stack);
2643
2644 last_expr_type = 0;
2645 }
2646
2647 /* Finish a null loop, aka do { } while (0). */
2648
2649 void
2650 expand_end_null_loop ()
2651 {
2652 do_pending_stack_adjust ();
2653 emit_label (loop_stack->data.loop.end_label);
2654
2655 POPSTACK (loop_stack);
2656
2657 last_expr_type = 0;
2658 }
2659
2660 /* Generate a jump to the current loop's continue-point.
2661 This is usually the top of the loop, but may be specified
2662 explicitly elsewhere. If not currently inside a loop,
2663 return 0 and do nothing; caller will print an error message. */
2664
2665 int
2666 expand_continue_loop (whichloop)
2667 struct nesting *whichloop;
2668 {
2669 last_expr_type = 0;
2670 if (whichloop == 0)
2671 whichloop = loop_stack;
2672 if (whichloop == 0)
2673 return 0;
2674 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2675 NULL_RTX);
2676 return 1;
2677 }
2678
2679 /* Generate a jump to exit the current loop. If not currently inside a loop,
2680 return 0 and do nothing; caller will print an error message. */
2681
2682 int
2683 expand_exit_loop (whichloop)
2684 struct nesting *whichloop;
2685 {
2686 last_expr_type = 0;
2687 if (whichloop == 0)
2688 whichloop = loop_stack;
2689 if (whichloop == 0)
2690 return 0;
2691 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2692 return 1;
2693 }
2694
2695 /* Generate a conditional jump to exit the current loop if COND
2696 evaluates to zero. If not currently inside a loop,
2697 return 0 and do nothing; caller will print an error message. */
2698
2699 int
2700 expand_exit_loop_if_false (whichloop, cond)
2701 struct nesting *whichloop;
2702 tree cond;
2703 {
2704 rtx label = gen_label_rtx ();
2705 rtx last_insn;
2706 last_expr_type = 0;
2707
2708 if (whichloop == 0)
2709 whichloop = loop_stack;
2710 if (whichloop == 0)
2711 return 0;
2712 /* In order to handle fixups, we actually create a conditional jump
2713 around a unconditional branch to exit the loop. If fixups are
2714 necessary, they go before the unconditional branch. */
2715
2716 do_jump (cond, NULL_RTX, label);
2717 last_insn = get_last_insn ();
2718 if (GET_CODE (last_insn) == CODE_LABEL)
2719 whichloop->data.loop.alt_end_label = last_insn;
2720 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2721 NULL_RTX);
2722 emit_label (label);
2723
2724 return 1;
2725 }
2726
2727 /* Return nonzero if the loop nest is empty. Else return zero. */
2728
2729 int
2730 stmt_loop_nest_empty ()
2731 {
2732 /* cfun->stmt can be NULL if we are building a call to get the
2733 EH context for a setjmp/longjmp EH target and the current
2734 function was a deferred inline function. */
2735 return (cfun->stmt == NULL || loop_stack == NULL);
2736 }
2737
2738 /* Return non-zero if we should preserve sub-expressions as separate
2739 pseudos. We never do so if we aren't optimizing. We always do so
2740 if -fexpensive-optimizations.
2741
2742 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2743 the loop may still be a small one. */
2744
2745 int
2746 preserve_subexpressions_p ()
2747 {
2748 rtx insn;
2749
2750 if (flag_expensive_optimizations)
2751 return 1;
2752
2753 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2754 return 0;
2755
2756 insn = get_last_insn_anywhere ();
2757
2758 return (insn
2759 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2760 < n_non_fixed_regs * 3));
2761
2762 }
2763
2764 /* Generate a jump to exit the current loop, conditional, binding contour
2765 or case statement. Not all such constructs are visible to this function,
2766 only those started with EXIT_FLAG nonzero. Individual languages use
2767 the EXIT_FLAG parameter to control which kinds of constructs you can
2768 exit this way.
2769
2770 If not currently inside anything that can be exited,
2771 return 0 and do nothing; caller will print an error message. */
2772
2773 int
2774 expand_exit_something ()
2775 {
2776 struct nesting *n;
2777 last_expr_type = 0;
2778 for (n = nesting_stack; n; n = n->all)
2779 if (n->exit_label != 0)
2780 {
2781 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2782 return 1;
2783 }
2784
2785 return 0;
2786 }
2787 \f
2788 /* Generate RTL to return from the current function, with no value.
2789 (That is, we do not do anything about returning any value.) */
2790
2791 void
2792 expand_null_return ()
2793 {
2794 struct nesting *block = block_stack;
2795 rtx last_insn = get_last_insn ();
2796
2797 /* If this function was declared to return a value, but we
2798 didn't, clobber the return registers so that they are not
2799 propogated live to the rest of the function. */
2800 clobber_return_register ();
2801
2802 /* Does any pending block have cleanups? */
2803 while (block && block->data.block.cleanups == 0)
2804 block = block->next;
2805
2806 /* If yes, use a goto to return, since that runs cleanups. */
2807
2808 expand_null_return_1 (last_insn, block != 0);
2809 }
2810
2811 /* Generate RTL to return from the current function, with value VAL. */
2812
2813 static void
2814 expand_value_return (val)
2815 rtx val;
2816 {
2817 struct nesting *block = block_stack;
2818 rtx last_insn = get_last_insn ();
2819 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2820
2821 /* Copy the value to the return location
2822 unless it's already there. */
2823
2824 if (return_reg != val)
2825 {
2826 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2827 #ifdef PROMOTE_FUNCTION_RETURN
2828 int unsignedp = TREE_UNSIGNED (type);
2829 enum machine_mode old_mode
2830 = DECL_MODE (DECL_RESULT (current_function_decl));
2831 enum machine_mode mode
2832 = promote_mode (type, old_mode, &unsignedp, 1);
2833
2834 if (mode != old_mode)
2835 val = convert_modes (mode, old_mode, val, unsignedp);
2836 #endif
2837 if (GET_CODE (return_reg) == PARALLEL)
2838 emit_group_load (return_reg, val, int_size_in_bytes (type),
2839 TYPE_ALIGN (type));
2840 else
2841 emit_move_insn (return_reg, val);
2842 }
2843
2844 /* Does any pending block have cleanups? */
2845
2846 while (block && block->data.block.cleanups == 0)
2847 block = block->next;
2848
2849 /* If yes, use a goto to return, since that runs cleanups.
2850 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2851
2852 expand_null_return_1 (last_insn, block != 0);
2853 }
2854
2855 /* Output a return with no value. If LAST_INSN is nonzero,
2856 pretend that the return takes place after LAST_INSN.
2857 If USE_GOTO is nonzero then don't use a return instruction;
2858 go to the return label instead. This causes any cleanups
2859 of pending blocks to be executed normally. */
2860
2861 static void
2862 expand_null_return_1 (last_insn, use_goto)
2863 rtx last_insn;
2864 int use_goto;
2865 {
2866 rtx end_label = cleanup_label ? cleanup_label : return_label;
2867
2868 clear_pending_stack_adjust ();
2869 do_pending_stack_adjust ();
2870 last_expr_type = 0;
2871
2872 /* PCC-struct return always uses an epilogue. */
2873 if (current_function_returns_pcc_struct || use_goto)
2874 {
2875 if (end_label == 0)
2876 end_label = return_label = gen_label_rtx ();
2877 expand_goto_internal (NULL_TREE, end_label, last_insn);
2878 return;
2879 }
2880
2881 /* Otherwise output a simple return-insn if one is available,
2882 unless it won't do the job. */
2883 #ifdef HAVE_return
2884 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2885 {
2886 emit_jump_insn (gen_return ());
2887 emit_barrier ();
2888 return;
2889 }
2890 #endif
2891
2892 /* Otherwise jump to the epilogue. */
2893 expand_goto_internal (NULL_TREE, end_label, last_insn);
2894 }
2895 \f
2896 /* Generate RTL to evaluate the expression RETVAL and return it
2897 from the current function. */
2898
2899 void
2900 expand_return (retval)
2901 tree retval;
2902 {
2903 /* If there are any cleanups to be performed, then they will
2904 be inserted following LAST_INSN. It is desirable
2905 that the last_insn, for such purposes, should be the
2906 last insn before computing the return value. Otherwise, cleanups
2907 which call functions can clobber the return value. */
2908 /* ??? rms: I think that is erroneous, because in C++ it would
2909 run destructors on variables that might be used in the subsequent
2910 computation of the return value. */
2911 rtx last_insn = 0;
2912 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2913 register rtx val = 0;
2914 tree retval_rhs;
2915 int cleanups;
2916
2917 /* If function wants no value, give it none. */
2918 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2919 {
2920 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2921 emit_queue ();
2922 expand_null_return ();
2923 return;
2924 }
2925
2926 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2927 /* This is not sufficient. We also need to watch for cleanups of the
2928 expression we are about to expand. Unfortunately, we cannot know
2929 if it has cleanups until we expand it, and we want to change how we
2930 expand it depending upon if we need cleanups. We can't win. */
2931 #if 0
2932 cleanups = any_pending_cleanups (1);
2933 #else
2934 cleanups = 1;
2935 #endif
2936
2937 if (retval == error_mark_node)
2938 retval_rhs = NULL_TREE;
2939 else if (TREE_CODE (retval) == RESULT_DECL)
2940 retval_rhs = retval;
2941 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2942 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2943 retval_rhs = TREE_OPERAND (retval, 1);
2944 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2945 /* Recognize tail-recursive call to void function. */
2946 retval_rhs = retval;
2947 else
2948 retval_rhs = NULL_TREE;
2949
2950 /* Only use `last_insn' if there are cleanups which must be run. */
2951 if (cleanups || cleanup_label != 0)
2952 last_insn = get_last_insn ();
2953
2954 /* Distribute return down conditional expr if either of the sides
2955 may involve tail recursion (see test below). This enhances the number
2956 of tail recursions we see. Don't do this always since it can produce
2957 sub-optimal code in some cases and we distribute assignments into
2958 conditional expressions when it would help. */
2959
2960 if (optimize && retval_rhs != 0
2961 && frame_offset == 0
2962 && TREE_CODE (retval_rhs) == COND_EXPR
2963 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2964 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2965 {
2966 rtx label = gen_label_rtx ();
2967 tree expr;
2968
2969 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2970 start_cleanup_deferral ();
2971 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2972 DECL_RESULT (current_function_decl),
2973 TREE_OPERAND (retval_rhs, 1));
2974 TREE_SIDE_EFFECTS (expr) = 1;
2975 expand_return (expr);
2976 emit_label (label);
2977
2978 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2979 DECL_RESULT (current_function_decl),
2980 TREE_OPERAND (retval_rhs, 2));
2981 TREE_SIDE_EFFECTS (expr) = 1;
2982 expand_return (expr);
2983 end_cleanup_deferral ();
2984 return;
2985 }
2986
2987 /* If the result is an aggregate that is being returned in one (or more)
2988 registers, load the registers here. The compiler currently can't handle
2989 copying a BLKmode value into registers. We could put this code in a
2990 more general area (for use by everyone instead of just function
2991 call/return), but until this feature is generally usable it is kept here
2992 (and in expand_call). The value must go into a pseudo in case there
2993 are cleanups that will clobber the real return register. */
2994
2995 if (retval_rhs != 0
2996 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2997 && GET_CODE (result_rtl) == REG)
2998 {
2999 int i;
3000 unsigned HOST_WIDE_INT bitpos, xbitpos;
3001 unsigned HOST_WIDE_INT big_endian_correction = 0;
3002 unsigned HOST_WIDE_INT bytes
3003 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3004 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3005 unsigned int bitsize
3006 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3007 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3008 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3009 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3010 enum machine_mode tmpmode, result_reg_mode;
3011
3012 /* Structures whose size is not a multiple of a word are aligned
3013 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3014 machine, this means we must skip the empty high order bytes when
3015 calculating the bit offset. */
3016 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3017 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3018 * BITS_PER_UNIT));
3019
3020 /* Copy the structure BITSIZE bits at a time. */
3021 for (bitpos = 0, xbitpos = big_endian_correction;
3022 bitpos < bytes * BITS_PER_UNIT;
3023 bitpos += bitsize, xbitpos += bitsize)
3024 {
3025 /* We need a new destination pseudo each time xbitpos is
3026 on a word boundary and when xbitpos == big_endian_correction
3027 (the first time through). */
3028 if (xbitpos % BITS_PER_WORD == 0
3029 || xbitpos == big_endian_correction)
3030 {
3031 /* Generate an appropriate register. */
3032 dst = gen_reg_rtx (word_mode);
3033 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3034
3035 /* Clobber the destination before we move anything into it. */
3036 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3037 }
3038
3039 /* We need a new source operand each time bitpos is on a word
3040 boundary. */
3041 if (bitpos % BITS_PER_WORD == 0)
3042 src = operand_subword_force (result_val,
3043 bitpos / BITS_PER_WORD,
3044 BLKmode);
3045
3046 /* Use bitpos for the source extraction (left justified) and
3047 xbitpos for the destination store (right justified). */
3048 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3049 extract_bit_field (src, bitsize,
3050 bitpos % BITS_PER_WORD, 1,
3051 NULL_RTX, word_mode, word_mode,
3052 bitsize, BITS_PER_WORD),
3053 bitsize, BITS_PER_WORD);
3054 }
3055
3056 /* Find the smallest integer mode large enough to hold the
3057 entire structure and use that mode instead of BLKmode
3058 on the USE insn for the return register. */
3059 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3060 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3061 tmpmode != VOIDmode;
3062 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3063 {
3064 /* Have we found a large enough mode? */
3065 if (GET_MODE_SIZE (tmpmode) >= bytes)
3066 break;
3067 }
3068
3069 /* No suitable mode found. */
3070 if (tmpmode == VOIDmode)
3071 abort ();
3072
3073 PUT_MODE (result_rtl, tmpmode);
3074
3075 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3076 result_reg_mode = word_mode;
3077 else
3078 result_reg_mode = tmpmode;
3079 result_reg = gen_reg_rtx (result_reg_mode);
3080
3081 emit_queue ();
3082 for (i = 0; i < n_regs; i++)
3083 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3084 result_pseudos[i]);
3085
3086 if (tmpmode != result_reg_mode)
3087 result_reg = gen_lowpart (tmpmode, result_reg);
3088
3089 expand_value_return (result_reg);
3090 }
3091 else if (cleanups
3092 && retval_rhs != 0
3093 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3094 && (GET_CODE (result_rtl) == REG
3095 || (GET_CODE (result_rtl) == PARALLEL)))
3096 {
3097 /* Calculate the return value into a temporary (usually a pseudo
3098 reg). */
3099 val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
3100 0, 0, 1);
3101 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3102 val = force_not_mem (val);
3103 emit_queue ();
3104 /* Return the calculated value, doing cleanups first. */
3105 expand_value_return (val);
3106 }
3107 else
3108 {
3109 /* No cleanups or no hard reg used;
3110 calculate value into hard return reg. */
3111 expand_expr (retval, const0_rtx, VOIDmode, 0);
3112 emit_queue ();
3113 expand_value_return (result_rtl);
3114 }
3115 }
3116
3117 /* Return 1 if the end of the generated RTX is not a barrier.
3118 This means code already compiled can drop through. */
3119
3120 int
3121 drop_through_at_end_p ()
3122 {
3123 rtx insn = get_last_insn ();
3124 while (insn && GET_CODE (insn) == NOTE)
3125 insn = PREV_INSN (insn);
3126 return insn && GET_CODE (insn) != BARRIER;
3127 }
3128 \f
3129 /* Attempt to optimize a potential tail recursion call into a goto.
3130 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3131 where to place the jump to the tail recursion label.
3132
3133 Return TRUE if the call was optimized into a goto. */
3134
3135 int
3136 optimize_tail_recursion (arguments, last_insn)
3137 tree arguments;
3138 rtx last_insn;
3139 {
3140 /* Finish checking validity, and if valid emit code to set the
3141 argument variables for the new call. */
3142 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3143 {
3144 if (tail_recursion_label == 0)
3145 {
3146 tail_recursion_label = gen_label_rtx ();
3147 emit_label_after (tail_recursion_label,
3148 tail_recursion_reentry);
3149 }
3150 emit_queue ();
3151 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3152 emit_barrier ();
3153 return 1;
3154 }
3155 return 0;
3156 }
3157
3158 /* Emit code to alter this function's formal parms for a tail-recursive call.
3159 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3160 FORMALS is the chain of decls of formals.
3161 Return 1 if this can be done;
3162 otherwise return 0 and do not emit any code. */
3163
3164 static int
3165 tail_recursion_args (actuals, formals)
3166 tree actuals, formals;
3167 {
3168 register tree a = actuals, f = formals;
3169 register int i;
3170 register rtx *argvec;
3171
3172 /* Check that number and types of actuals are compatible
3173 with the formals. This is not always true in valid C code.
3174 Also check that no formal needs to be addressable
3175 and that all formals are scalars. */
3176
3177 /* Also count the args. */
3178
3179 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3180 {
3181 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3182 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3183 return 0;
3184 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3185 return 0;
3186 }
3187 if (a != 0 || f != 0)
3188 return 0;
3189
3190 /* Compute all the actuals. */
3191
3192 argvec = (rtx *) alloca (i * sizeof (rtx));
3193
3194 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3195 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3196
3197 /* Find which actual values refer to current values of previous formals.
3198 Copy each of them now, before any formal is changed. */
3199
3200 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3201 {
3202 int copy = 0;
3203 register int j;
3204 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3205 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3206 {
3207 copy = 1;
3208 break;
3209 }
3210 if (copy)
3211 argvec[i] = copy_to_reg (argvec[i]);
3212 }
3213
3214 /* Store the values of the actuals into the formals. */
3215
3216 for (f = formals, a = actuals, i = 0; f;
3217 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3218 {
3219 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3220 emit_move_insn (DECL_RTL (f), argvec[i]);
3221 else
3222 convert_move (DECL_RTL (f), argvec[i],
3223 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3224 }
3225
3226 free_temp_slots ();
3227 return 1;
3228 }
3229 \f
3230 /* Generate the RTL code for entering a binding contour.
3231 The variables are declared one by one, by calls to `expand_decl'.
3232
3233 FLAGS is a bitwise or of the following flags:
3234
3235 1 - Nonzero if this construct should be visible to
3236 `exit_something'.
3237
3238 2 - Nonzero if this contour does not require a
3239 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3240 language-independent code should set this flag because they
3241 will not create corresponding BLOCK nodes. (There should be
3242 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3243 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3244 when expand_end_bindings is called.
3245
3246 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3247 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3248 note. */
3249
3250 void
3251 expand_start_bindings_and_block (flags, block)
3252 int flags;
3253 tree block;
3254 {
3255 struct nesting *thisblock = ALLOC_NESTING ();
3256 rtx note;
3257 int exit_flag = ((flags & 1) != 0);
3258 int block_flag = ((flags & 2) == 0);
3259
3260 /* If a BLOCK is supplied, then the caller should be requesting a
3261 NOTE_INSN_BLOCK_BEG note. */
3262 if (!block_flag && block)
3263 abort ();
3264
3265 /* Create a note to mark the beginning of the block. */
3266 if (block_flag)
3267 {
3268 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3269 NOTE_BLOCK (note) = block;
3270 }
3271 else
3272 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3273
3274 /* Make an entry on block_stack for the block we are entering. */
3275
3276 thisblock->next = block_stack;
3277 thisblock->all = nesting_stack;
3278 thisblock->depth = ++nesting_depth;
3279 thisblock->data.block.stack_level = 0;
3280 thisblock->data.block.cleanups = 0;
3281 thisblock->data.block.n_function_calls = 0;
3282 thisblock->data.block.exception_region = 0;
3283 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3284
3285 thisblock->data.block.conditional_code = 0;
3286 thisblock->data.block.last_unconditional_cleanup = note;
3287 /* When we insert instructions after the last unconditional cleanup,
3288 we don't adjust last_insn. That means that a later add_insn will
3289 clobber the instructions we've just added. The easiest way to
3290 fix this is to just insert another instruction here, so that the
3291 instructions inserted after the last unconditional cleanup are
3292 never the last instruction. */
3293 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3294 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3295
3296 if (block_stack
3297 && !(block_stack->data.block.cleanups == NULL_TREE
3298 && block_stack->data.block.outer_cleanups == NULL_TREE))
3299 thisblock->data.block.outer_cleanups
3300 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3301 block_stack->data.block.outer_cleanups);
3302 else
3303 thisblock->data.block.outer_cleanups = 0;
3304 thisblock->data.block.label_chain = 0;
3305 thisblock->data.block.innermost_stack_block = stack_block_stack;
3306 thisblock->data.block.first_insn = note;
3307 thisblock->data.block.block_start_count = ++current_block_start_count;
3308 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3309 block_stack = thisblock;
3310 nesting_stack = thisblock;
3311
3312 /* Make a new level for allocating stack slots. */
3313 push_temp_slots ();
3314 }
3315
3316 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3317 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3318 expand_expr are made. After we end the region, we know that all
3319 space for all temporaries that were created by TARGET_EXPRs will be
3320 destroyed and their space freed for reuse. */
3321
3322 void
3323 expand_start_target_temps ()
3324 {
3325 /* This is so that even if the result is preserved, the space
3326 allocated will be freed, as we know that it is no longer in use. */
3327 push_temp_slots ();
3328
3329 /* Start a new binding layer that will keep track of all cleanup
3330 actions to be performed. */
3331 expand_start_bindings (2);
3332
3333 target_temp_slot_level = temp_slot_level;
3334 }
3335
3336 void
3337 expand_end_target_temps ()
3338 {
3339 expand_end_bindings (NULL_TREE, 0, 0);
3340
3341 /* This is so that even if the result is preserved, the space
3342 allocated will be freed, as we know that it is no longer in use. */
3343 pop_temp_slots ();
3344 }
3345
3346 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3347 in question represents the outermost pair of curly braces (i.e. the "body
3348 block") of a function or method.
3349
3350 For any BLOCK node representing a "body block" of a function or method, the
3351 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3352 represents the outermost (function) scope for the function or method (i.e.
3353 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3354 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3355
3356 int
3357 is_body_block (stmt)
3358 register tree stmt;
3359 {
3360 if (TREE_CODE (stmt) == BLOCK)
3361 {
3362 tree parent = BLOCK_SUPERCONTEXT (stmt);
3363
3364 if (parent && TREE_CODE (parent) == BLOCK)
3365 {
3366 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3367
3368 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3369 return 1;
3370 }
3371 }
3372
3373 return 0;
3374 }
3375
3376 /* Mark top block of block_stack as an implicit binding for an
3377 exception region. This is used to prevent infinite recursion when
3378 ending a binding with expand_end_bindings. It is only ever called
3379 by expand_eh_region_start, as that it the only way to create a
3380 block stack for a exception region. */
3381
3382 void
3383 mark_block_as_eh_region ()
3384 {
3385 block_stack->data.block.exception_region = 1;
3386 if (block_stack->next
3387 && block_stack->next->data.block.conditional_code)
3388 {
3389 block_stack->data.block.conditional_code
3390 = block_stack->next->data.block.conditional_code;
3391 block_stack->data.block.last_unconditional_cleanup
3392 = block_stack->next->data.block.last_unconditional_cleanup;
3393 block_stack->data.block.cleanup_ptr
3394 = block_stack->next->data.block.cleanup_ptr;
3395 }
3396 }
3397
3398 /* True if we are currently emitting insns in an area of output code
3399 that is controlled by a conditional expression. This is used by
3400 the cleanup handling code to generate conditional cleanup actions. */
3401
3402 int
3403 conditional_context ()
3404 {
3405 return block_stack && block_stack->data.block.conditional_code;
3406 }
3407
3408 /* Mark top block of block_stack as not for an implicit binding for an
3409 exception region. This is only ever done by expand_eh_region_end
3410 to let expand_end_bindings know that it is being called explicitly
3411 to end the binding layer for just the binding layer associated with
3412 the exception region, otherwise expand_end_bindings would try and
3413 end all implicit binding layers for exceptions regions, and then
3414 one normal binding layer. */
3415
3416 void
3417 mark_block_as_not_eh_region ()
3418 {
3419 block_stack->data.block.exception_region = 0;
3420 }
3421
3422 /* True if the top block of block_stack was marked as for an exception
3423 region by mark_block_as_eh_region. */
3424
3425 int
3426 is_eh_region ()
3427 {
3428 return cfun && block_stack && block_stack->data.block.exception_region;
3429 }
3430
3431 /* Emit a handler label for a nonlocal goto handler.
3432 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3433
3434 static rtx
3435 expand_nl_handler_label (slot, before_insn)
3436 rtx slot, before_insn;
3437 {
3438 rtx insns;
3439 rtx handler_label = gen_label_rtx ();
3440
3441 /* Don't let jump_optimize delete the handler. */
3442 LABEL_PRESERVE_P (handler_label) = 1;
3443
3444 start_sequence ();
3445 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3446 insns = get_insns ();
3447 end_sequence ();
3448 emit_insns_before (insns, before_insn);
3449
3450 emit_label (handler_label);
3451
3452 return handler_label;
3453 }
3454
3455 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3456 handler. */
3457 static void
3458 expand_nl_goto_receiver ()
3459 {
3460 #ifdef HAVE_nonlocal_goto
3461 if (! HAVE_nonlocal_goto)
3462 #endif
3463 /* First adjust our frame pointer to its actual value. It was
3464 previously set to the start of the virtual area corresponding to
3465 the stacked variables when we branched here and now needs to be
3466 adjusted to the actual hardware fp value.
3467
3468 Assignments are to virtual registers are converted by
3469 instantiate_virtual_regs into the corresponding assignment
3470 to the underlying register (fp in this case) that makes
3471 the original assignment true.
3472 So the following insn will actually be
3473 decrementing fp by STARTING_FRAME_OFFSET. */
3474 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3475
3476 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3477 if (fixed_regs[ARG_POINTER_REGNUM])
3478 {
3479 #ifdef ELIMINABLE_REGS
3480 /* If the argument pointer can be eliminated in favor of the
3481 frame pointer, we don't need to restore it. We assume here
3482 that if such an elimination is present, it can always be used.
3483 This is the case on all known machines; if we don't make this
3484 assumption, we do unnecessary saving on many machines. */
3485 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3486 size_t i;
3487
3488 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3489 if (elim_regs[i].from == ARG_POINTER_REGNUM
3490 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3491 break;
3492
3493 if (i == ARRAY_SIZE (elim_regs))
3494 #endif
3495 {
3496 /* Now restore our arg pointer from the address at which it
3497 was saved in our stack frame.
3498 If there hasn't be space allocated for it yet, make
3499 some now. */
3500 if (arg_pointer_save_area == 0)
3501 arg_pointer_save_area
3502 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3503 emit_move_insn (virtual_incoming_args_rtx,
3504 /* We need a pseudo here, or else
3505 instantiate_virtual_regs_1 complains. */
3506 copy_to_reg (arg_pointer_save_area));
3507 }
3508 }
3509 #endif
3510
3511 #ifdef HAVE_nonlocal_goto_receiver
3512 if (HAVE_nonlocal_goto_receiver)
3513 emit_insn (gen_nonlocal_goto_receiver ());
3514 #endif
3515 }
3516
3517 /* Make handlers for nonlocal gotos taking place in the function calls in
3518 block THISBLOCK. */
3519
3520 static void
3521 expand_nl_goto_receivers (thisblock)
3522 struct nesting *thisblock;
3523 {
3524 tree link;
3525 rtx afterward = gen_label_rtx ();
3526 rtx insns, slot;
3527 rtx label_list;
3528 int any_invalid;
3529
3530 /* Record the handler address in the stack slot for that purpose,
3531 during this block, saving and restoring the outer value. */
3532 if (thisblock->next != 0)
3533 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3534 {
3535 rtx save_receiver = gen_reg_rtx (Pmode);
3536 emit_move_insn (XEXP (slot, 0), save_receiver);
3537
3538 start_sequence ();
3539 emit_move_insn (save_receiver, XEXP (slot, 0));
3540 insns = get_insns ();
3541 end_sequence ();
3542 emit_insns_before (insns, thisblock->data.block.first_insn);
3543 }
3544
3545 /* Jump around the handlers; they run only when specially invoked. */
3546 emit_jump (afterward);
3547
3548 /* Make a separate handler for each label. */
3549 link = nonlocal_labels;
3550 slot = nonlocal_goto_handler_slots;
3551 label_list = NULL_RTX;
3552 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3553 /* Skip any labels we shouldn't be able to jump to from here,
3554 we generate one special handler for all of them below which just calls
3555 abort. */
3556 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3557 {
3558 rtx lab;
3559 lab = expand_nl_handler_label (XEXP (slot, 0),
3560 thisblock->data.block.first_insn);
3561 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3562
3563 expand_nl_goto_receiver ();
3564
3565 /* Jump to the "real" nonlocal label. */
3566 expand_goto (TREE_VALUE (link));
3567 }
3568
3569 /* A second pass over all nonlocal labels; this time we handle those
3570 we should not be able to jump to at this point. */
3571 link = nonlocal_labels;
3572 slot = nonlocal_goto_handler_slots;
3573 any_invalid = 0;
3574 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3575 if (DECL_TOO_LATE (TREE_VALUE (link)))
3576 {
3577 rtx lab;
3578 lab = expand_nl_handler_label (XEXP (slot, 0),
3579 thisblock->data.block.first_insn);
3580 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3581 any_invalid = 1;
3582 }
3583
3584 if (any_invalid)
3585 {
3586 expand_nl_goto_receiver ();
3587 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3588 VOIDmode, 0);
3589 emit_barrier ();
3590 }
3591
3592 nonlocal_goto_handler_labels = label_list;
3593 emit_label (afterward);
3594 }
3595
3596 /* Warn about any unused VARS (which may contain nodes other than
3597 VAR_DECLs, but such nodes are ignored). The nodes are connected
3598 via the TREE_CHAIN field. */
3599
3600 void
3601 warn_about_unused_variables (vars)
3602 tree vars;
3603 {
3604 tree decl;
3605
3606 if (warn_unused_variable)
3607 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3608 if (TREE_CODE (decl) == VAR_DECL
3609 && ! TREE_USED (decl)
3610 && ! DECL_IN_SYSTEM_HEADER (decl)
3611 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3612 warning_with_decl (decl, "unused variable `%s'");
3613 }
3614
3615 /* Generate RTL code to terminate a binding contour.
3616
3617 VARS is the chain of VAR_DECL nodes for the variables bound in this
3618 contour. There may actually be other nodes in this chain, but any
3619 nodes other than VAR_DECLS are ignored.
3620
3621 MARK_ENDS is nonzero if we should put a note at the beginning
3622 and end of this binding contour.
3623
3624 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3625 (That is true automatically if the contour has a saved stack level.) */
3626
3627 void
3628 expand_end_bindings (vars, mark_ends, dont_jump_in)
3629 tree vars;
3630 int mark_ends;
3631 int dont_jump_in;
3632 {
3633 register struct nesting *thisblock;
3634
3635 while (block_stack->data.block.exception_region)
3636 {
3637 /* Because we don't need or want a new temporary level and
3638 because we didn't create one in expand_eh_region_start,
3639 create a fake one now to avoid removing one in
3640 expand_end_bindings. */
3641 push_temp_slots ();
3642
3643 block_stack->data.block.exception_region = 0;
3644
3645 expand_end_bindings (NULL_TREE, 0, 0);
3646 }
3647
3648 /* Since expand_eh_region_start does an expand_start_bindings, we
3649 have to first end all the bindings that were created by
3650 expand_eh_region_start. */
3651
3652 thisblock = block_stack;
3653
3654 /* If any of the variables in this scope were not used, warn the
3655 user. */
3656 warn_about_unused_variables (vars);
3657
3658 if (thisblock->exit_label)
3659 {
3660 do_pending_stack_adjust ();
3661 emit_label (thisblock->exit_label);
3662 }
3663
3664 /* If necessary, make handlers for nonlocal gotos taking
3665 place in the function calls in this block. */
3666 if (function_call_count != thisblock->data.block.n_function_calls
3667 && nonlocal_labels
3668 /* Make handler for outermost block
3669 if there were any nonlocal gotos to this function. */
3670 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3671 /* Make handler for inner block if it has something
3672 special to do when you jump out of it. */
3673 : (thisblock->data.block.cleanups != 0
3674 || thisblock->data.block.stack_level != 0)))
3675 expand_nl_goto_receivers (thisblock);
3676
3677 /* Don't allow jumping into a block that has a stack level.
3678 Cleanups are allowed, though. */
3679 if (dont_jump_in
3680 || thisblock->data.block.stack_level != 0)
3681 {
3682 struct label_chain *chain;
3683
3684 /* Any labels in this block are no longer valid to go to.
3685 Mark them to cause an error message. */
3686 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3687 {
3688 DECL_TOO_LATE (chain->label) = 1;
3689 /* If any goto without a fixup came to this label,
3690 that must be an error, because gotos without fixups
3691 come from outside all saved stack-levels. */
3692 if (TREE_ADDRESSABLE (chain->label))
3693 error_with_decl (chain->label,
3694 "label `%s' used before containing binding contour");
3695 }
3696 }
3697
3698 /* Restore stack level in effect before the block
3699 (only if variable-size objects allocated). */
3700 /* Perform any cleanups associated with the block. */
3701
3702 if (thisblock->data.block.stack_level != 0
3703 || thisblock->data.block.cleanups != 0)
3704 {
3705 int reachable;
3706 rtx insn;
3707
3708 /* Don't let cleanups affect ({...}) constructs. */
3709 int old_expr_stmts_for_value = expr_stmts_for_value;
3710 rtx old_last_expr_value = last_expr_value;
3711 tree old_last_expr_type = last_expr_type;
3712 expr_stmts_for_value = 0;
3713
3714 /* Only clean up here if this point can actually be reached. */
3715 insn = get_last_insn ();
3716 if (GET_CODE (insn) == NOTE)
3717 insn = prev_nonnote_insn (insn);
3718 reachable = (! insn || GET_CODE (insn) != BARRIER);
3719
3720 /* Do the cleanups. */
3721 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3722 if (reachable)
3723 do_pending_stack_adjust ();
3724
3725 expr_stmts_for_value = old_expr_stmts_for_value;
3726 last_expr_value = old_last_expr_value;
3727 last_expr_type = old_last_expr_type;
3728
3729 /* Restore the stack level. */
3730
3731 if (reachable && thisblock->data.block.stack_level != 0)
3732 {
3733 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3734 thisblock->data.block.stack_level, NULL_RTX);
3735 if (nonlocal_goto_handler_slots != 0)
3736 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3737 NULL_RTX);
3738 }
3739
3740 /* Any gotos out of this block must also do these things.
3741 Also report any gotos with fixups that came to labels in this
3742 level. */
3743 fixup_gotos (thisblock,
3744 thisblock->data.block.stack_level,
3745 thisblock->data.block.cleanups,
3746 thisblock->data.block.first_insn,
3747 dont_jump_in);
3748 }
3749
3750 /* Mark the beginning and end of the scope if requested.
3751 We do this now, after running cleanups on the variables
3752 just going out of scope, so they are in scope for their cleanups. */
3753
3754 if (mark_ends)
3755 {
3756 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3757 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3758 }
3759 else
3760 /* Get rid of the beginning-mark if we don't make an end-mark. */
3761 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3762
3763 /* Restore the temporary level of TARGET_EXPRs. */
3764 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3765
3766 /* Restore block_stack level for containing block. */
3767
3768 stack_block_stack = thisblock->data.block.innermost_stack_block;
3769 POPSTACK (block_stack);
3770
3771 /* Pop the stack slot nesting and free any slots at this level. */
3772 pop_temp_slots ();
3773 }
3774 \f
3775 /* Generate code to save the stack pointer at the start of the current block
3776 and set up to restore it on exit. */
3777
3778 void
3779 save_stack_pointer ()
3780 {
3781 struct nesting *thisblock = block_stack;
3782
3783 if (thisblock->data.block.stack_level == 0)
3784 {
3785 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3786 &thisblock->data.block.stack_level,
3787 thisblock->data.block.first_insn);
3788 stack_block_stack = thisblock;
3789 }
3790 }
3791 \f
3792 /* Generate RTL for the automatic variable declaration DECL.
3793 (Other kinds of declarations are simply ignored if seen here.) */
3794
3795 void
3796 expand_decl (decl)
3797 register tree decl;
3798 {
3799 struct nesting *thisblock;
3800 tree type;
3801
3802 type = TREE_TYPE (decl);
3803
3804 /* Only automatic variables need any expansion done.
3805 Static and external variables, and external functions,
3806 will be handled by `assemble_variable' (called from finish_decl).
3807 TYPE_DECL and CONST_DECL require nothing.
3808 PARM_DECLs are handled in `assign_parms'. */
3809
3810 if (TREE_CODE (decl) != VAR_DECL)
3811 return;
3812 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3813 return;
3814
3815 thisblock = block_stack;
3816
3817 /* Create the RTL representation for the variable. */
3818
3819 if (type == error_mark_node)
3820 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3821 else if (DECL_SIZE (decl) == 0)
3822 /* Variable with incomplete type. */
3823 {
3824 if (DECL_INITIAL (decl) == 0)
3825 /* Error message was already done; now avoid a crash. */
3826 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3827 else
3828 /* An initializer is going to decide the size of this array.
3829 Until we know the size, represent its address with a reg. */
3830 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3831
3832 set_mem_attributes (DECL_RTL (decl), decl, 1);
3833 }
3834 else if (DECL_MODE (decl) != BLKmode
3835 /* If -ffloat-store, don't put explicit float vars
3836 into regs. */
3837 && !(flag_float_store
3838 && TREE_CODE (type) == REAL_TYPE)
3839 && ! TREE_THIS_VOLATILE (decl)
3840 && ! TREE_ADDRESSABLE (decl)
3841 && (DECL_REGISTER (decl) || optimize)
3842 /* if -fcheck-memory-usage, check all variables. */
3843 && ! current_function_check_memory_usage)
3844 {
3845 /* Automatic variable that can go in a register. */
3846 int unsignedp = TREE_UNSIGNED (type);
3847 enum machine_mode reg_mode
3848 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3849
3850 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3851 mark_user_reg (DECL_RTL (decl));
3852
3853 if (POINTER_TYPE_P (type))
3854 mark_reg_pointer (DECL_RTL (decl),
3855 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3856
3857 maybe_set_unchanging (DECL_RTL (decl), decl);
3858 }
3859
3860 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3861 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3862 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3863 STACK_CHECK_MAX_VAR_SIZE)))
3864 {
3865 /* Variable of fixed size that goes on the stack. */
3866 rtx oldaddr = 0;
3867 rtx addr;
3868
3869 /* If we previously made RTL for this decl, it must be an array
3870 whose size was determined by the initializer.
3871 The old address was a register; set that register now
3872 to the proper address. */
3873 if (DECL_RTL (decl) != 0)
3874 {
3875 if (GET_CODE (DECL_RTL (decl)) != MEM
3876 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3877 abort ();
3878 oldaddr = XEXP (DECL_RTL (decl), 0);
3879 }
3880
3881 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3882
3883 /* Set alignment we actually gave this decl. */
3884 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3885 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3886 DECL_USER_ALIGN (decl) = 0;
3887
3888 if (oldaddr)
3889 {
3890 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3891 if (addr != oldaddr)
3892 emit_move_insn (oldaddr, addr);
3893 }
3894 }
3895 else
3896 /* Dynamic-size object: must push space on the stack. */
3897 {
3898 rtx address, size;
3899
3900 /* Record the stack pointer on entry to block, if have
3901 not already done so. */
3902 do_pending_stack_adjust ();
3903 save_stack_pointer ();
3904
3905 /* In function-at-a-time mode, variable_size doesn't expand this,
3906 so do it now. */
3907 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3908 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3909 const0_rtx, VOIDmode, 0);
3910
3911 /* Compute the variable's size, in bytes. */
3912 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3913 free_temp_slots ();
3914
3915 /* Allocate space on the stack for the variable. Note that
3916 DECL_ALIGN says how the variable is to be aligned and we
3917 cannot use it to conclude anything about the alignment of
3918 the size. */
3919 address = allocate_dynamic_stack_space (size, NULL_RTX,
3920 TYPE_ALIGN (TREE_TYPE (decl)));
3921
3922 /* Reference the variable indirect through that rtx. */
3923 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3924
3925 set_mem_attributes (DECL_RTL (decl), decl, 1);
3926
3927 /* Indicate the alignment we actually gave this variable. */
3928 #ifdef STACK_BOUNDARY
3929 DECL_ALIGN (decl) = STACK_BOUNDARY;
3930 #else
3931 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3932 #endif
3933 DECL_USER_ALIGN (decl) = 0;
3934 }
3935 }
3936 \f
3937 /* Emit code to perform the initialization of a declaration DECL. */
3938
3939 void
3940 expand_decl_init (decl)
3941 tree decl;
3942 {
3943 int was_used = TREE_USED (decl);
3944
3945 /* If this is a CONST_DECL, we don't have to generate any code, but
3946 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3947 to be set while in the obstack containing the constant. If we don't
3948 do this, we can lose if we have functions nested three deep and the middle
3949 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3950 the innermost function is the first to expand that STRING_CST. */
3951 if (TREE_CODE (decl) == CONST_DECL)
3952 {
3953 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3954 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3955 EXPAND_INITIALIZER);
3956 return;
3957 }
3958
3959 if (TREE_STATIC (decl))
3960 return;
3961
3962 /* Compute and store the initial value now. */
3963
3964 if (DECL_INITIAL (decl) == error_mark_node)
3965 {
3966 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3967
3968 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3969 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3970 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3971 0, 0);
3972 emit_queue ();
3973 }
3974 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3975 {
3976 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3977 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3978 emit_queue ();
3979 }
3980
3981 /* Don't let the initialization count as "using" the variable. */
3982 TREE_USED (decl) = was_used;
3983
3984 /* Free any temporaries we made while initializing the decl. */
3985 preserve_temp_slots (NULL_RTX);
3986 free_temp_slots ();
3987 }
3988
3989 /* CLEANUP is an expression to be executed at exit from this binding contour;
3990 for example, in C++, it might call the destructor for this variable.
3991
3992 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3993 CLEANUP multiple times, and have the correct semantics. This
3994 happens in exception handling, for gotos, returns, breaks that
3995 leave the current scope.
3996
3997 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3998 that is not associated with any particular variable. */
3999
4000 int
4001 expand_decl_cleanup (decl, cleanup)
4002 tree decl, cleanup;
4003 {
4004 struct nesting *thisblock;
4005
4006 /* Error if we are not in any block. */
4007 if (cfun == 0 || block_stack == 0)
4008 return 0;
4009
4010 thisblock = block_stack;
4011
4012 /* Record the cleanup if there is one. */
4013
4014 if (cleanup != 0)
4015 {
4016 tree t;
4017 rtx seq;
4018 tree *cleanups = &thisblock->data.block.cleanups;
4019 int cond_context = conditional_context ();
4020
4021 if (cond_context)
4022 {
4023 rtx flag = gen_reg_rtx (word_mode);
4024 rtx set_flag_0;
4025 tree cond;
4026
4027 start_sequence ();
4028 emit_move_insn (flag, const0_rtx);
4029 set_flag_0 = get_insns ();
4030 end_sequence ();
4031
4032 thisblock->data.block.last_unconditional_cleanup
4033 = emit_insns_after (set_flag_0,
4034 thisblock->data.block.last_unconditional_cleanup);
4035
4036 emit_move_insn (flag, const1_rtx);
4037
4038 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4039 DECL_RTL (cond) = flag;
4040
4041 /* Conditionalize the cleanup. */
4042 cleanup = build (COND_EXPR, void_type_node,
4043 truthvalue_conversion (cond),
4044 cleanup, integer_zero_node);
4045 cleanup = fold (cleanup);
4046
4047 cleanups = thisblock->data.block.cleanup_ptr;
4048 }
4049
4050 cleanup = unsave_expr (cleanup);
4051
4052 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4053
4054 if (! cond_context)
4055 /* If this block has a cleanup, it belongs in stack_block_stack. */
4056 stack_block_stack = thisblock;
4057
4058 if (cond_context)
4059 {
4060 start_sequence ();
4061 }
4062
4063 /* If this was optimized so that there is no exception region for the
4064 cleanup, then mark the TREE_LIST node, so that we can later tell
4065 if we need to call expand_eh_region_end. */
4066 if (! using_eh_for_cleanups_p
4067 || expand_eh_region_start_tree (decl, cleanup))
4068 TREE_ADDRESSABLE (t) = 1;
4069 /* If that started a new EH region, we're in a new block. */
4070 thisblock = block_stack;
4071
4072 if (cond_context)
4073 {
4074 seq = get_insns ();
4075 end_sequence ();
4076 if (seq)
4077 thisblock->data.block.last_unconditional_cleanup
4078 = emit_insns_after (seq,
4079 thisblock->data.block.last_unconditional_cleanup);
4080 }
4081 else
4082 {
4083 thisblock->data.block.last_unconditional_cleanup
4084 = get_last_insn ();
4085 /* When we insert instructions after the last unconditional cleanup,
4086 we don't adjust last_insn. That means that a later add_insn will
4087 clobber the instructions we've just added. The easiest way to
4088 fix this is to just insert another instruction here, so that the
4089 instructions inserted after the last unconditional cleanup are
4090 never the last instruction. */
4091 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4092 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4093 }
4094 }
4095 return 1;
4096 }
4097
4098 /* Like expand_decl_cleanup, but suppress generating an exception handler
4099 to perform the cleanup. */
4100
4101 #if 0
4102 int
4103 expand_decl_cleanup_no_eh (decl, cleanup)
4104 tree decl, cleanup;
4105 {
4106 int save_eh = using_eh_for_cleanups_p;
4107 int result;
4108
4109 using_eh_for_cleanups_p = 0;
4110 result = expand_decl_cleanup (decl, cleanup);
4111 using_eh_for_cleanups_p = save_eh;
4112
4113 return result;
4114 }
4115 #endif
4116
4117 /* Arrange for the top element of the dynamic cleanup chain to be
4118 popped if we exit the current binding contour. DECL is the
4119 associated declaration, if any, otherwise NULL_TREE. If the
4120 current contour is left via an exception, then __sjthrow will pop
4121 the top element off the dynamic cleanup chain. The code that
4122 avoids doing the action we push into the cleanup chain in the
4123 exceptional case is contained in expand_cleanups.
4124
4125 This routine is only used by expand_eh_region_start, and that is
4126 the only way in which an exception region should be started. This
4127 routine is only used when using the setjmp/longjmp codegen method
4128 for exception handling. */
4129
4130 int
4131 expand_dcc_cleanup (decl)
4132 tree decl;
4133 {
4134 struct nesting *thisblock;
4135 tree cleanup;
4136
4137 /* Error if we are not in any block. */
4138 if (cfun == 0 || block_stack == 0)
4139 return 0;
4140 thisblock = block_stack;
4141
4142 /* Record the cleanup for the dynamic handler chain. */
4143
4144 cleanup = make_node (POPDCC_EXPR);
4145
4146 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4147 thisblock->data.block.cleanups
4148 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4149
4150 /* If this block has a cleanup, it belongs in stack_block_stack. */
4151 stack_block_stack = thisblock;
4152 return 1;
4153 }
4154
4155 /* Arrange for the top element of the dynamic handler chain to be
4156 popped if we exit the current binding contour. DECL is the
4157 associated declaration, if any, otherwise NULL_TREE. If the current
4158 contour is left via an exception, then __sjthrow will pop the top
4159 element off the dynamic handler chain. The code that avoids doing
4160 the action we push into the handler chain in the exceptional case
4161 is contained in expand_cleanups.
4162
4163 This routine is only used by expand_eh_region_start, and that is
4164 the only way in which an exception region should be started. This
4165 routine is only used when using the setjmp/longjmp codegen method
4166 for exception handling. */
4167
4168 int
4169 expand_dhc_cleanup (decl)
4170 tree decl;
4171 {
4172 struct nesting *thisblock;
4173 tree cleanup;
4174
4175 /* Error if we are not in any block. */
4176 if (cfun == 0 || block_stack == 0)
4177 return 0;
4178 thisblock = block_stack;
4179
4180 /* Record the cleanup for the dynamic handler chain. */
4181
4182 cleanup = make_node (POPDHC_EXPR);
4183
4184 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4185 thisblock->data.block.cleanups
4186 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4187
4188 /* If this block has a cleanup, it belongs in stack_block_stack. */
4189 stack_block_stack = thisblock;
4190 return 1;
4191 }
4192 \f
4193 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4194 DECL_ELTS is the list of elements that belong to DECL's type.
4195 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4196
4197 void
4198 expand_anon_union_decl (decl, cleanup, decl_elts)
4199 tree decl, cleanup, decl_elts;
4200 {
4201 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4202 rtx x;
4203 tree t;
4204
4205 /* If any of the elements are addressable, so is the entire union. */
4206 for (t = decl_elts; t; t = TREE_CHAIN (t))
4207 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4208 {
4209 TREE_ADDRESSABLE (decl) = 1;
4210 break;
4211 }
4212
4213 expand_decl (decl);
4214 expand_decl_cleanup (decl, cleanup);
4215 x = DECL_RTL (decl);
4216
4217 /* Go through the elements, assigning RTL to each. */
4218 for (t = decl_elts; t; t = TREE_CHAIN (t))
4219 {
4220 tree decl_elt = TREE_VALUE (t);
4221 tree cleanup_elt = TREE_PURPOSE (t);
4222 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4223
4224 /* Propagate the union's alignment to the elements. */
4225 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4226 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4227
4228 /* If the element has BLKmode and the union doesn't, the union is
4229 aligned such that the element doesn't need to have BLKmode, so
4230 change the element's mode to the appropriate one for its size. */
4231 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4232 DECL_MODE (decl_elt) = mode
4233 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4234
4235 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4236 instead create a new MEM rtx with the proper mode. */
4237 if (GET_CODE (x) == MEM)
4238 {
4239 if (mode == GET_MODE (x))
4240 DECL_RTL (decl_elt) = x;
4241 else
4242 {
4243 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4244 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4245 }
4246 }
4247 else if (GET_CODE (x) == REG)
4248 {
4249 if (mode == GET_MODE (x))
4250 DECL_RTL (decl_elt) = x;
4251 else
4252 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4253 }
4254 else
4255 abort ();
4256
4257 /* Record the cleanup if there is one. */
4258
4259 if (cleanup != 0)
4260 thisblock->data.block.cleanups
4261 = tree_cons (decl_elt, cleanup_elt,
4262 thisblock->data.block.cleanups);
4263 }
4264 }
4265 \f
4266 /* Expand a list of cleanups LIST.
4267 Elements may be expressions or may be nested lists.
4268
4269 If DONT_DO is nonnull, then any list-element
4270 whose TREE_PURPOSE matches DONT_DO is omitted.
4271 This is sometimes used to avoid a cleanup associated with
4272 a value that is being returned out of the scope.
4273
4274 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4275 goto and handle protection regions specially in that case.
4276
4277 If REACHABLE, we emit code, otherwise just inform the exception handling
4278 code about this finalization. */
4279
4280 static void
4281 expand_cleanups (list, dont_do, in_fixup, reachable)
4282 tree list;
4283 tree dont_do;
4284 int in_fixup;
4285 int reachable;
4286 {
4287 tree tail;
4288 for (tail = list; tail; tail = TREE_CHAIN (tail))
4289 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4290 {
4291 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4292 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4293 else
4294 {
4295 if (! in_fixup)
4296 {
4297 tree cleanup = TREE_VALUE (tail);
4298
4299 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4300 if (TREE_CODE (cleanup) != POPDHC_EXPR
4301 && TREE_CODE (cleanup) != POPDCC_EXPR
4302 /* See expand_eh_region_start_tree for this case. */
4303 && ! TREE_ADDRESSABLE (tail))
4304 {
4305 cleanup = protect_with_terminate (cleanup);
4306 expand_eh_region_end (cleanup);
4307 }
4308 }
4309
4310 if (reachable)
4311 {
4312 /* Cleanups may be run multiple times. For example,
4313 when exiting a binding contour, we expand the
4314 cleanups associated with that contour. When a goto
4315 within that binding contour has a target outside that
4316 contour, it will expand all cleanups from its scope to
4317 the target. Though the cleanups are expanded multiple
4318 times, the control paths are non-overlapping so the
4319 cleanups will not be executed twice. */
4320
4321 /* We may need to protect fixups with rethrow regions. */
4322 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4323
4324 if (protect)
4325 expand_fixup_region_start ();
4326
4327 /* The cleanup might contain try-blocks, so we have to
4328 preserve our current queue. */
4329 push_ehqueue ();
4330 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4331 pop_ehqueue ();
4332 if (protect)
4333 expand_fixup_region_end (TREE_VALUE (tail));
4334 free_temp_slots ();
4335 }
4336 }
4337 }
4338 }
4339
4340 /* Mark when the context we are emitting RTL for as a conditional
4341 context, so that any cleanup actions we register with
4342 expand_decl_init will be properly conditionalized when those
4343 cleanup actions are later performed. Must be called before any
4344 expression (tree) is expanded that is within a conditional context. */
4345
4346 void
4347 start_cleanup_deferral ()
4348 {
4349 /* block_stack can be NULL if we are inside the parameter list. It is
4350 OK to do nothing, because cleanups aren't possible here. */
4351 if (block_stack)
4352 ++block_stack->data.block.conditional_code;
4353 }
4354
4355 /* Mark the end of a conditional region of code. Because cleanup
4356 deferrals may be nested, we may still be in a conditional region
4357 after we end the currently deferred cleanups, only after we end all
4358 deferred cleanups, are we back in unconditional code. */
4359
4360 void
4361 end_cleanup_deferral ()
4362 {
4363 /* block_stack can be NULL if we are inside the parameter list. It is
4364 OK to do nothing, because cleanups aren't possible here. */
4365 if (block_stack)
4366 --block_stack->data.block.conditional_code;
4367 }
4368
4369 /* Move all cleanups from the current block_stack
4370 to the containing block_stack, where they are assumed to
4371 have been created. If anything can cause a temporary to
4372 be created, but not expanded for more than one level of
4373 block_stacks, then this code will have to change. */
4374
4375 void
4376 move_cleanups_up ()
4377 {
4378 struct nesting *block = block_stack;
4379 struct nesting *outer = block->next;
4380
4381 outer->data.block.cleanups
4382 = chainon (block->data.block.cleanups,
4383 outer->data.block.cleanups);
4384 block->data.block.cleanups = 0;
4385 }
4386
4387 tree
4388 last_cleanup_this_contour ()
4389 {
4390 if (block_stack == 0)
4391 return 0;
4392
4393 return block_stack->data.block.cleanups;
4394 }
4395
4396 /* Return 1 if there are any pending cleanups at this point.
4397 If THIS_CONTOUR is nonzero, check the current contour as well.
4398 Otherwise, look only at the contours that enclose this one. */
4399
4400 int
4401 any_pending_cleanups (this_contour)
4402 int this_contour;
4403 {
4404 struct nesting *block;
4405
4406 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4407 return 0;
4408
4409 if (this_contour && block_stack->data.block.cleanups != NULL)
4410 return 1;
4411 if (block_stack->data.block.cleanups == 0
4412 && block_stack->data.block.outer_cleanups == 0)
4413 return 0;
4414
4415 for (block = block_stack->next; block; block = block->next)
4416 if (block->data.block.cleanups != 0)
4417 return 1;
4418
4419 return 0;
4420 }
4421 \f
4422 /* Enter a case (Pascal) or switch (C) statement.
4423 Push a block onto case_stack and nesting_stack
4424 to accumulate the case-labels that are seen
4425 and to record the labels generated for the statement.
4426
4427 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4428 Otherwise, this construct is transparent for `exit_something'.
4429
4430 EXPR is the index-expression to be dispatched on.
4431 TYPE is its nominal type. We could simply convert EXPR to this type,
4432 but instead we take short cuts. */
4433
4434 void
4435 expand_start_case (exit_flag, expr, type, printname)
4436 int exit_flag;
4437 tree expr;
4438 tree type;
4439 const char *printname;
4440 {
4441 register struct nesting *thiscase = ALLOC_NESTING ();
4442
4443 /* Make an entry on case_stack for the case we are entering. */
4444
4445 thiscase->next = case_stack;
4446 thiscase->all = nesting_stack;
4447 thiscase->depth = ++nesting_depth;
4448 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4449 thiscase->data.case_stmt.case_list = 0;
4450 thiscase->data.case_stmt.index_expr = expr;
4451 thiscase->data.case_stmt.nominal_type = type;
4452 thiscase->data.case_stmt.default_label = 0;
4453 thiscase->data.case_stmt.printname = printname;
4454 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4455 case_stack = thiscase;
4456 nesting_stack = thiscase;
4457
4458 do_pending_stack_adjust ();
4459
4460 /* Make sure case_stmt.start points to something that won't
4461 need any transformation before expand_end_case. */
4462 if (GET_CODE (get_last_insn ()) != NOTE)
4463 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4464
4465 thiscase->data.case_stmt.start = get_last_insn ();
4466
4467 start_cleanup_deferral ();
4468 }
4469
4470 /* Start a "dummy case statement" within which case labels are invalid
4471 and are not connected to any larger real case statement.
4472 This can be used if you don't want to let a case statement jump
4473 into the middle of certain kinds of constructs. */
4474
4475 void
4476 expand_start_case_dummy ()
4477 {
4478 register struct nesting *thiscase = ALLOC_NESTING ();
4479
4480 /* Make an entry on case_stack for the dummy. */
4481
4482 thiscase->next = case_stack;
4483 thiscase->all = nesting_stack;
4484 thiscase->depth = ++nesting_depth;
4485 thiscase->exit_label = 0;
4486 thiscase->data.case_stmt.case_list = 0;
4487 thiscase->data.case_stmt.start = 0;
4488 thiscase->data.case_stmt.nominal_type = 0;
4489 thiscase->data.case_stmt.default_label = 0;
4490 case_stack = thiscase;
4491 nesting_stack = thiscase;
4492 start_cleanup_deferral ();
4493 }
4494
4495 /* End a dummy case statement. */
4496
4497 void
4498 expand_end_case_dummy ()
4499 {
4500 end_cleanup_deferral ();
4501 POPSTACK (case_stack);
4502 }
4503
4504 /* Return the data type of the index-expression
4505 of the innermost case statement, or null if none. */
4506
4507 tree
4508 case_index_expr_type ()
4509 {
4510 if (case_stack)
4511 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4512 return 0;
4513 }
4514 \f
4515 static void
4516 check_seenlabel ()
4517 {
4518 /* If this is the first label, warn if any insns have been emitted. */
4519 if (case_stack->data.case_stmt.line_number_status >= 0)
4520 {
4521 rtx insn;
4522
4523 restore_line_number_status
4524 (case_stack->data.case_stmt.line_number_status);
4525 case_stack->data.case_stmt.line_number_status = -1;
4526
4527 for (insn = case_stack->data.case_stmt.start;
4528 insn;
4529 insn = NEXT_INSN (insn))
4530 {
4531 if (GET_CODE (insn) == CODE_LABEL)
4532 break;
4533 if (GET_CODE (insn) != NOTE
4534 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4535 {
4536 do
4537 insn = PREV_INSN (insn);
4538 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4539
4540 /* If insn is zero, then there must have been a syntax error. */
4541 if (insn)
4542 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4543 NOTE_LINE_NUMBER (insn),
4544 "unreachable code at beginning of %s",
4545 case_stack->data.case_stmt.printname);
4546 break;
4547 }
4548 }
4549 }
4550 }
4551
4552 /* Accumulate one case or default label inside a case or switch statement.
4553 VALUE is the value of the case (a null pointer, for a default label).
4554 The function CONVERTER, when applied to arguments T and V,
4555 converts the value V to the type T.
4556
4557 If not currently inside a case or switch statement, return 1 and do
4558 nothing. The caller will print a language-specific error message.
4559 If VALUE is a duplicate or overlaps, return 2 and do nothing
4560 except store the (first) duplicate node in *DUPLICATE.
4561 If VALUE is out of range, return 3 and do nothing.
4562 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4563 Return 0 on success.
4564
4565 Extended to handle range statements. */
4566
4567 int
4568 pushcase (value, converter, label, duplicate)
4569 register tree value;
4570 tree (*converter) PARAMS ((tree, tree));
4571 register tree label;
4572 tree *duplicate;
4573 {
4574 tree index_type;
4575 tree nominal_type;
4576
4577 /* Fail if not inside a real case statement. */
4578 if (! (case_stack && case_stack->data.case_stmt.start))
4579 return 1;
4580
4581 if (stack_block_stack
4582 && stack_block_stack->depth > case_stack->depth)
4583 return 5;
4584
4585 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4586 nominal_type = case_stack->data.case_stmt.nominal_type;
4587
4588 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4589 if (index_type == error_mark_node)
4590 return 0;
4591
4592 /* Convert VALUE to the type in which the comparisons are nominally done. */
4593 if (value != 0)
4594 value = (*converter) (nominal_type, value);
4595
4596 check_seenlabel ();
4597
4598 /* Fail if this value is out of range for the actual type of the index
4599 (which may be narrower than NOMINAL_TYPE). */
4600 if (value != 0
4601 && (TREE_CONSTANT_OVERFLOW (value)
4602 || ! int_fits_type_p (value, index_type)))
4603 return 3;
4604
4605 return add_case_node (value, value, label, duplicate);
4606 }
4607
4608 /* Like pushcase but this case applies to all values between VALUE1 and
4609 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4610 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4611 starts at VALUE1 and ends at the highest value of the index type.
4612 If both are NULL, this case applies to all values.
4613
4614 The return value is the same as that of pushcase but there is one
4615 additional error code: 4 means the specified range was empty. */
4616
4617 int
4618 pushcase_range (value1, value2, converter, label, duplicate)
4619 register tree value1, value2;
4620 tree (*converter) PARAMS ((tree, tree));
4621 register tree label;
4622 tree *duplicate;
4623 {
4624 tree index_type;
4625 tree nominal_type;
4626
4627 /* Fail if not inside a real case statement. */
4628 if (! (case_stack && case_stack->data.case_stmt.start))
4629 return 1;
4630
4631 if (stack_block_stack
4632 && stack_block_stack->depth > case_stack->depth)
4633 return 5;
4634
4635 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4636 nominal_type = case_stack->data.case_stmt.nominal_type;
4637
4638 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4639 if (index_type == error_mark_node)
4640 return 0;
4641
4642 check_seenlabel ();
4643
4644 /* Convert VALUEs to type in which the comparisons are nominally done
4645 and replace any unspecified value with the corresponding bound. */
4646 if (value1 == 0)
4647 value1 = TYPE_MIN_VALUE (index_type);
4648 if (value2 == 0)
4649 value2 = TYPE_MAX_VALUE (index_type);
4650
4651 /* Fail if the range is empty. Do this before any conversion since
4652 we want to allow out-of-range empty ranges. */
4653 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4654 return 4;
4655
4656 /* If the max was unbounded, use the max of the nominal_type we are
4657 converting to. Do this after the < check above to suppress false
4658 positives. */
4659 if (value2 == 0)
4660 value2 = TYPE_MAX_VALUE (nominal_type);
4661
4662 value1 = (*converter) (nominal_type, value1);
4663 value2 = (*converter) (nominal_type, value2);
4664
4665 /* Fail if these values are out of range. */
4666 if (TREE_CONSTANT_OVERFLOW (value1)
4667 || ! int_fits_type_p (value1, index_type))
4668 return 3;
4669
4670 if (TREE_CONSTANT_OVERFLOW (value2)
4671 || ! int_fits_type_p (value2, index_type))
4672 return 3;
4673
4674 return add_case_node (value1, value2, label, duplicate);
4675 }
4676
4677 /* Do the actual insertion of a case label for pushcase and pushcase_range
4678 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4679 slowdown for large switch statements. */
4680
4681 int
4682 add_case_node (low, high, label, duplicate)
4683 tree low, high;
4684 tree label;
4685 tree *duplicate;
4686 {
4687 struct case_node *p, **q, *r;
4688
4689 /* If there's no HIGH value, then this is not a case range; it's
4690 just a simple case label. But that's just a degenerate case
4691 range. */
4692 if (!high)
4693 high = low;
4694
4695 /* Handle default labels specially. */
4696 if (!high && !low)
4697 {
4698 if (case_stack->data.case_stmt.default_label != 0)
4699 {
4700 *duplicate = case_stack->data.case_stmt.default_label;
4701 return 2;
4702 }
4703 case_stack->data.case_stmt.default_label = label;
4704 expand_label (label);
4705 return 0;
4706 }
4707
4708 q = &case_stack->data.case_stmt.case_list;
4709 p = *q;
4710
4711 while ((r = *q))
4712 {
4713 p = r;
4714
4715 /* Keep going past elements distinctly greater than HIGH. */
4716 if (tree_int_cst_lt (high, p->low))
4717 q = &p->left;
4718
4719 /* or distinctly less than LOW. */
4720 else if (tree_int_cst_lt (p->high, low))
4721 q = &p->right;
4722
4723 else
4724 {
4725 /* We have an overlap; this is an error. */
4726 *duplicate = p->code_label;
4727 return 2;
4728 }
4729 }
4730
4731 /* Add this label to the chain, and succeed.
4732 Copy LOW, HIGH so they are on temporary rather than momentary
4733 obstack and will thus survive till the end of the case statement. */
4734
4735 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4736 r->low = copy_node (low);
4737
4738 /* If the bounds are equal, turn this into the one-value case. */
4739 if (tree_int_cst_equal (low, high))
4740 r->high = r->low;
4741 else
4742 r->high = copy_node (high);
4743
4744 r->code_label = label;
4745 expand_label (label);
4746
4747 *q = r;
4748 r->parent = p;
4749 r->left = 0;
4750 r->right = 0;
4751 r->balance = 0;
4752
4753 while (p)
4754 {
4755 struct case_node *s;
4756
4757 if (r == p->left)
4758 {
4759 int b;
4760
4761 if (! (b = p->balance))
4762 /* Growth propagation from left side. */
4763 p->balance = -1;
4764 else if (b < 0)
4765 {
4766 if (r->balance < 0)
4767 {
4768 /* R-Rotation */
4769 if ((p->left = s = r->right))
4770 s->parent = p;
4771
4772 r->right = p;
4773 p->balance = 0;
4774 r->balance = 0;
4775 s = p->parent;
4776 p->parent = r;
4777
4778 if ((r->parent = s))
4779 {
4780 if (s->left == p)
4781 s->left = r;
4782 else
4783 s->right = r;
4784 }
4785 else
4786 case_stack->data.case_stmt.case_list = r;
4787 }
4788 else
4789 /* r->balance == +1 */
4790 {
4791 /* LR-Rotation */
4792
4793 int b2;
4794 struct case_node *t = r->right;
4795
4796 if ((p->left = s = t->right))
4797 s->parent = p;
4798
4799 t->right = p;
4800 if ((r->right = s = t->left))
4801 s->parent = r;
4802
4803 t->left = r;
4804 b = t->balance;
4805 b2 = b < 0;
4806 p->balance = b2;
4807 b2 = -b2 - b;
4808 r->balance = b2;
4809 t->balance = 0;
4810 s = p->parent;
4811 p->parent = t;
4812 r->parent = t;
4813
4814 if ((t->parent = s))
4815 {
4816 if (s->left == p)
4817 s->left = t;
4818 else
4819 s->right = t;
4820 }
4821 else
4822 case_stack->data.case_stmt.case_list = t;
4823 }
4824 break;
4825 }
4826
4827 else
4828 {
4829 /* p->balance == +1; growth of left side balances the node. */
4830 p->balance = 0;
4831 break;
4832 }
4833 }
4834 else
4835 /* r == p->right */
4836 {
4837 int b;
4838
4839 if (! (b = p->balance))
4840 /* Growth propagation from right side. */
4841 p->balance++;
4842 else if (b > 0)
4843 {
4844 if (r->balance > 0)
4845 {
4846 /* L-Rotation */
4847
4848 if ((p->right = s = r->left))
4849 s->parent = p;
4850
4851 r->left = p;
4852 p->balance = 0;
4853 r->balance = 0;
4854 s = p->parent;
4855 p->parent = r;
4856 if ((r->parent = s))
4857 {
4858 if (s->left == p)
4859 s->left = r;
4860 else
4861 s->right = r;
4862 }
4863
4864 else
4865 case_stack->data.case_stmt.case_list = r;
4866 }
4867
4868 else
4869 /* r->balance == -1 */
4870 {
4871 /* RL-Rotation */
4872 int b2;
4873 struct case_node *t = r->left;
4874
4875 if ((p->right = s = t->left))
4876 s->parent = p;
4877
4878 t->left = p;
4879
4880 if ((r->left = s = t->right))
4881 s->parent = r;
4882
4883 t->right = r;
4884 b = t->balance;
4885 b2 = b < 0;
4886 r->balance = b2;
4887 b2 = -b2 - b;
4888 p->balance = b2;
4889 t->balance = 0;
4890 s = p->parent;
4891 p->parent = t;
4892 r->parent = t;
4893
4894 if ((t->parent = s))
4895 {
4896 if (s->left == p)
4897 s->left = t;
4898 else
4899 s->right = t;
4900 }
4901
4902 else
4903 case_stack->data.case_stmt.case_list = t;
4904 }
4905 break;
4906 }
4907 else
4908 {
4909 /* p->balance == -1; growth of right side balances the node. */
4910 p->balance = 0;
4911 break;
4912 }
4913 }
4914
4915 r = p;
4916 p = p->parent;
4917 }
4918
4919 return 0;
4920 }
4921 \f
4922 /* Returns the number of possible values of TYPE.
4923 Returns -1 if the number is unknown, variable, or if the number does not
4924 fit in a HOST_WIDE_INT.
4925 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4926 do not increase monotonically (there may be duplicates);
4927 to 1 if the values increase monotonically, but not always by 1;
4928 otherwise sets it to 0. */
4929
4930 HOST_WIDE_INT
4931 all_cases_count (type, spareness)
4932 tree type;
4933 int *spareness;
4934 {
4935 tree t;
4936 HOST_WIDE_INT count, minval, lastval;
4937
4938 *spareness = 0;
4939
4940 switch (TREE_CODE (type))
4941 {
4942 case BOOLEAN_TYPE:
4943 count = 2;
4944 break;
4945
4946 case CHAR_TYPE:
4947 count = 1 << BITS_PER_UNIT;
4948 break;
4949
4950 default:
4951 case INTEGER_TYPE:
4952 if (TYPE_MAX_VALUE (type) != 0
4953 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4954 TYPE_MIN_VALUE (type))))
4955 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4956 convert (type, integer_zero_node))))
4957 && host_integerp (t, 1))
4958 count = tree_low_cst (t, 1);
4959 else
4960 return -1;
4961 break;
4962
4963 case ENUMERAL_TYPE:
4964 /* Don't waste time with enumeral types with huge values. */
4965 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4966 || TYPE_MAX_VALUE (type) == 0
4967 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4968 return -1;
4969
4970 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4971 count = 0;
4972
4973 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4974 {
4975 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4976
4977 if (*spareness == 2 || thisval < lastval)
4978 *spareness = 2;
4979 else if (thisval != minval + count)
4980 *spareness = 1;
4981
4982 count++;
4983 }
4984 }
4985
4986 return count;
4987 }
4988
4989 #define BITARRAY_TEST(ARRAY, INDEX) \
4990 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4991 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4992 #define BITARRAY_SET(ARRAY, INDEX) \
4993 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4994 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4995
4996 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4997 with the case values we have seen, assuming the case expression
4998 has the given TYPE.
4999 SPARSENESS is as determined by all_cases_count.
5000
5001 The time needed is proportional to COUNT, unless
5002 SPARSENESS is 2, in which case quadratic time is needed. */
5003
5004 void
5005 mark_seen_cases (type, cases_seen, count, sparseness)
5006 tree type;
5007 unsigned char *cases_seen;
5008 HOST_WIDE_INT count;
5009 int sparseness;
5010 {
5011 tree next_node_to_try = NULL_TREE;
5012 HOST_WIDE_INT next_node_offset = 0;
5013
5014 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5015 tree val = make_node (INTEGER_CST);
5016
5017 TREE_TYPE (val) = type;
5018 if (! root)
5019 /* Do nothing. */
5020 ;
5021 else if (sparseness == 2)
5022 {
5023 tree t;
5024 unsigned HOST_WIDE_INT xlo;
5025
5026 /* This less efficient loop is only needed to handle
5027 duplicate case values (multiple enum constants
5028 with the same value). */
5029 TREE_TYPE (val) = TREE_TYPE (root->low);
5030 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5031 t = TREE_CHAIN (t), xlo++)
5032 {
5033 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5034 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5035 n = root;
5036 do
5037 {
5038 /* Keep going past elements distinctly greater than VAL. */
5039 if (tree_int_cst_lt (val, n->low))
5040 n = n->left;
5041
5042 /* or distinctly less than VAL. */
5043 else if (tree_int_cst_lt (n->high, val))
5044 n = n->right;
5045
5046 else
5047 {
5048 /* We have found a matching range. */
5049 BITARRAY_SET (cases_seen, xlo);
5050 break;
5051 }
5052 }
5053 while (n);
5054 }
5055 }
5056 else
5057 {
5058 if (root->left)
5059 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5060
5061 for (n = root; n; n = n->right)
5062 {
5063 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5064 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5065 while (! tree_int_cst_lt (n->high, val))
5066 {
5067 /* Calculate (into xlo) the "offset" of the integer (val).
5068 The element with lowest value has offset 0, the next smallest
5069 element has offset 1, etc. */
5070
5071 unsigned HOST_WIDE_INT xlo;
5072 HOST_WIDE_INT xhi;
5073 tree t;
5074
5075 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5076 {
5077 /* The TYPE_VALUES will be in increasing order, so
5078 starting searching where we last ended. */
5079 t = next_node_to_try;
5080 xlo = next_node_offset;
5081 xhi = 0;
5082 for (;;)
5083 {
5084 if (t == NULL_TREE)
5085 {
5086 t = TYPE_VALUES (type);
5087 xlo = 0;
5088 }
5089 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5090 {
5091 next_node_to_try = TREE_CHAIN (t);
5092 next_node_offset = xlo + 1;
5093 break;
5094 }
5095 xlo++;
5096 t = TREE_CHAIN (t);
5097 if (t == next_node_to_try)
5098 {
5099 xlo = -1;
5100 break;
5101 }
5102 }
5103 }
5104 else
5105 {
5106 t = TYPE_MIN_VALUE (type);
5107 if (t)
5108 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5109 &xlo, &xhi);
5110 else
5111 xlo = xhi = 0;
5112 add_double (xlo, xhi,
5113 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5114 &xlo, &xhi);
5115 }
5116
5117 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5118 BITARRAY_SET (cases_seen, xlo);
5119
5120 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5121 1, 0,
5122 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5123 }
5124 }
5125 }
5126 }
5127
5128 /* Called when the index of a switch statement is an enumerated type
5129 and there is no default label.
5130
5131 Checks that all enumeration literals are covered by the case
5132 expressions of a switch. Also, warn if there are any extra
5133 switch cases that are *not* elements of the enumerated type.
5134
5135 If all enumeration literals were covered by the case expressions,
5136 turn one of the expressions into the default expression since it should
5137 not be possible to fall through such a switch. */
5138
5139 void
5140 check_for_full_enumeration_handling (type)
5141 tree type;
5142 {
5143 register struct case_node *n;
5144 register tree chain;
5145 #if 0 /* variable used by 'if 0'ed code below. */
5146 register struct case_node **l;
5147 int all_values = 1;
5148 #endif
5149
5150 /* True iff the selector type is a numbered set mode. */
5151 int sparseness = 0;
5152
5153 /* The number of possible selector values. */
5154 HOST_WIDE_INT size;
5155
5156 /* For each possible selector value. a one iff it has been matched
5157 by a case value alternative. */
5158 unsigned char *cases_seen;
5159
5160 /* The allocated size of cases_seen, in chars. */
5161 HOST_WIDE_INT bytes_needed;
5162
5163 if (! warn_switch)
5164 return;
5165
5166 size = all_cases_count (type, &sparseness);
5167 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5168
5169 if (size > 0 && size < 600000
5170 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5171 this optimization if we don't have enough memory rather than
5172 aborting, as xmalloc would do. */
5173 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5174 {
5175 HOST_WIDE_INT i;
5176 tree v = TYPE_VALUES (type);
5177
5178 /* The time complexity of this code is normally O(N), where
5179 N being the number of members in the enumerated type.
5180 However, if type is a ENUMERAL_TYPE whose values do not
5181 increase monotonically, O(N*log(N)) time may be needed. */
5182
5183 mark_seen_cases (type, cases_seen, size, sparseness);
5184
5185 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5186 if (BITARRAY_TEST (cases_seen, i) == 0)
5187 warning ("enumeration value `%s' not handled in switch",
5188 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5189
5190 free (cases_seen);
5191 }
5192
5193 /* Now we go the other way around; we warn if there are case
5194 expressions that don't correspond to enumerators. This can
5195 occur since C and C++ don't enforce type-checking of
5196 assignments to enumeration variables. */
5197
5198 if (case_stack->data.case_stmt.case_list
5199 && case_stack->data.case_stmt.case_list->left)
5200 case_stack->data.case_stmt.case_list
5201 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5202 if (warn_switch)
5203 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5204 {
5205 for (chain = TYPE_VALUES (type);
5206 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5207 chain = TREE_CHAIN (chain))
5208 ;
5209
5210 if (!chain)
5211 {
5212 if (TYPE_NAME (type) == 0)
5213 warning ("case value `%ld' not in enumerated type",
5214 (long) TREE_INT_CST_LOW (n->low));
5215 else
5216 warning ("case value `%ld' not in enumerated type `%s'",
5217 (long) TREE_INT_CST_LOW (n->low),
5218 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5219 == IDENTIFIER_NODE)
5220 ? TYPE_NAME (type)
5221 : DECL_NAME (TYPE_NAME (type))));
5222 }
5223 if (!tree_int_cst_equal (n->low, n->high))
5224 {
5225 for (chain = TYPE_VALUES (type);
5226 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5227 chain = TREE_CHAIN (chain))
5228 ;
5229
5230 if (!chain)
5231 {
5232 if (TYPE_NAME (type) == 0)
5233 warning ("case value `%ld' not in enumerated type",
5234 (long) TREE_INT_CST_LOW (n->high));
5235 else
5236 warning ("case value `%ld' not in enumerated type `%s'",
5237 (long) TREE_INT_CST_LOW (n->high),
5238 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5239 == IDENTIFIER_NODE)
5240 ? TYPE_NAME (type)
5241 : DECL_NAME (TYPE_NAME (type))));
5242 }
5243 }
5244 }
5245
5246 #if 0
5247 /* ??? This optimization is disabled because it causes valid programs to
5248 fail. ANSI C does not guarantee that an expression with enum type
5249 will have a value that is the same as one of the enumeration literals. */
5250
5251 /* If all values were found as case labels, make one of them the default
5252 label. Thus, this switch will never fall through. We arbitrarily pick
5253 the last one to make the default since this is likely the most
5254 efficient choice. */
5255
5256 if (all_values)
5257 {
5258 for (l = &case_stack->data.case_stmt.case_list;
5259 (*l)->right != 0;
5260 l = &(*l)->right)
5261 ;
5262
5263 case_stack->data.case_stmt.default_label = (*l)->code_label;
5264 *l = 0;
5265 }
5266 #endif /* 0 */
5267 }
5268
5269 /* Free CN, and its children. */
5270
5271 static void
5272 free_case_nodes (cn)
5273 case_node_ptr cn;
5274 {
5275 if (cn)
5276 {
5277 free_case_nodes (cn->left);
5278 free_case_nodes (cn->right);
5279 free (cn);
5280 }
5281 }
5282
5283 \f
5284 /* Terminate a case (Pascal) or switch (C) statement
5285 in which ORIG_INDEX is the expression to be tested.
5286 Generate the code to test it and jump to the right place. */
5287
5288 void
5289 expand_end_case (orig_index)
5290 tree orig_index;
5291 {
5292 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5293 rtx default_label = 0;
5294 register struct case_node *n;
5295 unsigned int count;
5296 rtx index;
5297 rtx table_label;
5298 int ncases;
5299 rtx *labelvec;
5300 register int i;
5301 rtx before_case;
5302 register struct nesting *thiscase = case_stack;
5303 tree index_expr, index_type;
5304 int unsignedp;
5305
5306 /* Don't crash due to previous errors. */
5307 if (thiscase == NULL)
5308 return;
5309
5310 table_label = gen_label_rtx ();
5311 index_expr = thiscase->data.case_stmt.index_expr;
5312 index_type = TREE_TYPE (index_expr);
5313 unsignedp = TREE_UNSIGNED (index_type);
5314
5315 do_pending_stack_adjust ();
5316
5317 /* This might get an spurious warning in the presence of a syntax error;
5318 it could be fixed by moving the call to check_seenlabel after the
5319 check for error_mark_node, and copying the code of check_seenlabel that
5320 deals with case_stack->data.case_stmt.line_number_status /
5321 restore_line_number_status in front of the call to end_cleanup_deferral;
5322 However, this might miss some useful warnings in the presence of
5323 non-syntax errors. */
5324 check_seenlabel ();
5325
5326 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5327 if (index_type != error_mark_node)
5328 {
5329 /* If switch expression was an enumerated type, check that all
5330 enumeration literals are covered by the cases.
5331 No sense trying this if there's a default case, however. */
5332
5333 if (!thiscase->data.case_stmt.default_label
5334 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5335 && TREE_CODE (index_expr) != INTEGER_CST)
5336 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5337
5338 /* If we don't have a default-label, create one here,
5339 after the body of the switch. */
5340 if (thiscase->data.case_stmt.default_label == 0)
5341 {
5342 thiscase->data.case_stmt.default_label
5343 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5344 expand_label (thiscase->data.case_stmt.default_label);
5345 }
5346 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5347
5348 before_case = get_last_insn ();
5349
5350 if (thiscase->data.case_stmt.case_list
5351 && thiscase->data.case_stmt.case_list->left)
5352 thiscase->data.case_stmt.case_list
5353 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5354
5355 /* Simplify the case-list before we count it. */
5356 group_case_nodes (thiscase->data.case_stmt.case_list);
5357
5358 /* Get upper and lower bounds of case values.
5359 Also convert all the case values to the index expr's data type. */
5360
5361 count = 0;
5362 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5363 {
5364 /* Check low and high label values are integers. */
5365 if (TREE_CODE (n->low) != INTEGER_CST)
5366 abort ();
5367 if (TREE_CODE (n->high) != INTEGER_CST)
5368 abort ();
5369
5370 n->low = convert (index_type, n->low);
5371 n->high = convert (index_type, n->high);
5372
5373 /* Count the elements and track the largest and smallest
5374 of them (treating them as signed even if they are not). */
5375 if (count++ == 0)
5376 {
5377 minval = n->low;
5378 maxval = n->high;
5379 }
5380 else
5381 {
5382 if (INT_CST_LT (n->low, minval))
5383 minval = n->low;
5384 if (INT_CST_LT (maxval, n->high))
5385 maxval = n->high;
5386 }
5387 /* A range counts double, since it requires two compares. */
5388 if (! tree_int_cst_equal (n->low, n->high))
5389 count++;
5390 }
5391
5392 orig_minval = minval;
5393
5394 /* Compute span of values. */
5395 if (count != 0)
5396 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5397
5398 end_cleanup_deferral ();
5399
5400 if (count == 0)
5401 {
5402 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5403 emit_queue ();
5404 emit_jump (default_label);
5405 }
5406
5407 /* If range of values is much bigger than number of values,
5408 make a sequence of conditional branches instead of a dispatch.
5409 If the switch-index is a constant, do it this way
5410 because we can optimize it. */
5411
5412 #ifndef CASE_VALUES_THRESHOLD
5413 #ifdef HAVE_casesi
5414 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5415 #else
5416 /* If machine does not have a case insn that compares the
5417 bounds, this means extra overhead for dispatch tables
5418 which raises the threshold for using them. */
5419 #define CASE_VALUES_THRESHOLD 5
5420 #endif /* HAVE_casesi */
5421 #endif /* CASE_VALUES_THRESHOLD */
5422
5423 else if (count < CASE_VALUES_THRESHOLD
5424 || compare_tree_int (range, 10 * count) > 0
5425 /* RANGE may be signed, and really large ranges will show up
5426 as negative numbers. */
5427 || compare_tree_int (range, 0) < 0
5428 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5429 || flag_pic
5430 #endif
5431 || TREE_CODE (index_expr) == INTEGER_CST
5432 /* These will reduce to a constant. */
5433 || (TREE_CODE (index_expr) == CALL_EXPR
5434 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5435 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5436 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5437 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5438 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5439 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5440 {
5441 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5442
5443 /* If the index is a short or char that we do not have
5444 an insn to handle comparisons directly, convert it to
5445 a full integer now, rather than letting each comparison
5446 generate the conversion. */
5447
5448 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5449 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5450 == CODE_FOR_nothing))
5451 {
5452 enum machine_mode wider_mode;
5453 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5454 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5455 if (cmp_optab->handlers[(int) wider_mode].insn_code
5456 != CODE_FOR_nothing)
5457 {
5458 index = convert_to_mode (wider_mode, index, unsignedp);
5459 break;
5460 }
5461 }
5462
5463 emit_queue ();
5464 do_pending_stack_adjust ();
5465
5466 index = protect_from_queue (index, 0);
5467 if (GET_CODE (index) == MEM)
5468 index = copy_to_reg (index);
5469 if (GET_CODE (index) == CONST_INT
5470 || TREE_CODE (index_expr) == INTEGER_CST)
5471 {
5472 /* Make a tree node with the proper constant value
5473 if we don't already have one. */
5474 if (TREE_CODE (index_expr) != INTEGER_CST)
5475 {
5476 index_expr
5477 = build_int_2 (INTVAL (index),
5478 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5479 index_expr = convert (index_type, index_expr);
5480 }
5481
5482 /* For constant index expressions we need only
5483 issue a unconditional branch to the appropriate
5484 target code. The job of removing any unreachable
5485 code is left to the optimisation phase if the
5486 "-O" option is specified. */
5487 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5488 if (! tree_int_cst_lt (index_expr, n->low)
5489 && ! tree_int_cst_lt (n->high, index_expr))
5490 break;
5491
5492 if (n)
5493 emit_jump (label_rtx (n->code_label));
5494 else
5495 emit_jump (default_label);
5496 }
5497 else
5498 {
5499 /* If the index expression is not constant we generate
5500 a binary decision tree to select the appropriate
5501 target code. This is done as follows:
5502
5503 The list of cases is rearranged into a binary tree,
5504 nearly optimal assuming equal probability for each case.
5505
5506 The tree is transformed into RTL, eliminating
5507 redundant test conditions at the same time.
5508
5509 If program flow could reach the end of the
5510 decision tree an unconditional jump to the
5511 default code is emitted. */
5512
5513 use_cost_table
5514 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5515 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5516 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5517 NULL_PTR);
5518 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5519 default_label, index_type);
5520 emit_jump_if_reachable (default_label);
5521 }
5522 }
5523 else
5524 {
5525 int win = 0;
5526 #ifdef HAVE_casesi
5527 if (HAVE_casesi)
5528 {
5529 enum machine_mode index_mode = SImode;
5530 int index_bits = GET_MODE_BITSIZE (index_mode);
5531 rtx op1, op2;
5532 enum machine_mode op_mode;
5533
5534 /* Convert the index to SImode. */
5535 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5536 > GET_MODE_BITSIZE (index_mode))
5537 {
5538 enum machine_mode omode = TYPE_MODE (index_type);
5539 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5540
5541 /* We must handle the endpoints in the original mode. */
5542 index_expr = build (MINUS_EXPR, index_type,
5543 index_expr, minval);
5544 minval = integer_zero_node;
5545 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5546 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5547 omode, 1, 0, default_label);
5548 /* Now we can safely truncate. */
5549 index = convert_to_mode (index_mode, index, 0);
5550 }
5551 else
5552 {
5553 if (TYPE_MODE (index_type) != index_mode)
5554 {
5555 index_expr = convert (type_for_size (index_bits, 0),
5556 index_expr);
5557 index_type = TREE_TYPE (index_expr);
5558 }
5559
5560 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5561 }
5562 emit_queue ();
5563 index = protect_from_queue (index, 0);
5564 do_pending_stack_adjust ();
5565
5566 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5567 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5568 (index, op_mode))
5569 index = copy_to_mode_reg (op_mode, index);
5570
5571 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5572
5573 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5574 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5575 (op1, op_mode))
5576 op1 = copy_to_mode_reg (op_mode, op1);
5577
5578 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5579
5580 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5582 (op2, op_mode))
5583 op2 = copy_to_mode_reg (op_mode, op2);
5584
5585 emit_jump_insn (gen_casesi (index, op1, op2,
5586 table_label, default_label));
5587 win = 1;
5588 }
5589 #endif
5590 #ifdef HAVE_tablejump
5591 if (! win && HAVE_tablejump)
5592 {
5593 index_type = thiscase->data.case_stmt.nominal_type;
5594 index_expr = fold (build (MINUS_EXPR, index_type,
5595 convert (index_type, index_expr),
5596 convert (index_type, minval)));
5597 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5598 emit_queue ();
5599 index = protect_from_queue (index, 0);
5600 do_pending_stack_adjust ();
5601
5602 do_tablejump (index, TYPE_MODE (index_type),
5603 expand_expr (range, NULL_RTX, VOIDmode, 0),
5604 table_label, default_label);
5605 win = 1;
5606 }
5607 #endif
5608 if (! win)
5609 abort ();
5610
5611 /* Get table of labels to jump to, in order of case index. */
5612
5613 ncases = TREE_INT_CST_LOW (range) + 1;
5614 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5615 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5616
5617 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5618 {
5619 register HOST_WIDE_INT i
5620 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5621
5622 while (1)
5623 {
5624 labelvec[i]
5625 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5626 if (i + TREE_INT_CST_LOW (orig_minval)
5627 == TREE_INT_CST_LOW (n->high))
5628 break;
5629 i++;
5630 }
5631 }
5632
5633 /* Fill in the gaps with the default. */
5634 for (i = 0; i < ncases; i++)
5635 if (labelvec[i] == 0)
5636 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5637
5638 /* Output the table */
5639 emit_label (table_label);
5640
5641 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5642 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5643 gen_rtx_LABEL_REF (Pmode, table_label),
5644 gen_rtvec_v (ncases, labelvec),
5645 const0_rtx, const0_rtx));
5646 else
5647 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5648 gen_rtvec_v (ncases, labelvec)));
5649
5650 /* If the case insn drops through the table,
5651 after the table we must jump to the default-label.
5652 Otherwise record no drop-through after the table. */
5653 #ifdef CASE_DROPS_THROUGH
5654 emit_jump (default_label);
5655 #else
5656 emit_barrier ();
5657 #endif
5658 }
5659
5660 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5661 reorder_insns (before_case, get_last_insn (),
5662 thiscase->data.case_stmt.start);
5663 }
5664 else
5665 end_cleanup_deferral ();
5666
5667 if (thiscase->exit_label)
5668 emit_label (thiscase->exit_label);
5669
5670 free_case_nodes (case_stack->data.case_stmt.case_list);
5671 POPSTACK (case_stack);
5672
5673 free_temp_slots ();
5674 }
5675
5676 /* Convert the tree NODE into a list linked by the right field, with the left
5677 field zeroed. RIGHT is used for recursion; it is a list to be placed
5678 rightmost in the resulting list. */
5679
5680 static struct case_node *
5681 case_tree2list (node, right)
5682 struct case_node *node, *right;
5683 {
5684 struct case_node *left;
5685
5686 if (node->right)
5687 right = case_tree2list (node->right, right);
5688
5689 node->right = right;
5690 if ((left = node->left))
5691 {
5692 node->left = 0;
5693 return case_tree2list (left, node);
5694 }
5695
5696 return node;
5697 }
5698
5699 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5700
5701 static void
5702 do_jump_if_equal (op1, op2, label, unsignedp)
5703 rtx op1, op2, label;
5704 int unsignedp;
5705 {
5706 if (GET_CODE (op1) == CONST_INT
5707 && GET_CODE (op2) == CONST_INT)
5708 {
5709 if (INTVAL (op1) == INTVAL (op2))
5710 emit_jump (label);
5711 }
5712 else
5713 {
5714 enum machine_mode mode = GET_MODE (op1);
5715 if (mode == VOIDmode)
5716 mode = GET_MODE (op2);
5717 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5718 0, label);
5719 }
5720 }
5721 \f
5722 /* Not all case values are encountered equally. This function
5723 uses a heuristic to weight case labels, in cases where that
5724 looks like a reasonable thing to do.
5725
5726 Right now, all we try to guess is text, and we establish the
5727 following weights:
5728
5729 chars above space: 16
5730 digits: 16
5731 default: 12
5732 space, punct: 8
5733 tab: 4
5734 newline: 2
5735 other "\" chars: 1
5736 remaining chars: 0
5737
5738 If we find any cases in the switch that are not either -1 or in the range
5739 of valid ASCII characters, or are control characters other than those
5740 commonly used with "\", don't treat this switch scanning text.
5741
5742 Return 1 if these nodes are suitable for cost estimation, otherwise
5743 return 0. */
5744
5745 static int
5746 estimate_case_costs (node)
5747 case_node_ptr node;
5748 {
5749 tree min_ascii = build_int_2 (-1, -1);
5750 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5751 case_node_ptr n;
5752 int i;
5753
5754 /* If we haven't already made the cost table, make it now. Note that the
5755 lower bound of the table is -1, not zero. */
5756
5757 if (cost_table == NULL)
5758 {
5759 cost_table = cost_table_ + 1;
5760
5761 for (i = 0; i < 128; i++)
5762 {
5763 if (ISALNUM (i))
5764 cost_table[i] = 16;
5765 else if (ISPUNCT (i))
5766 cost_table[i] = 8;
5767 else if (ISCNTRL (i))
5768 cost_table[i] = -1;
5769 }
5770
5771 cost_table[' '] = 8;
5772 cost_table['\t'] = 4;
5773 cost_table['\0'] = 4;
5774 cost_table['\n'] = 2;
5775 cost_table['\f'] = 1;
5776 cost_table['\v'] = 1;
5777 cost_table['\b'] = 1;
5778 }
5779
5780 /* See if all the case expressions look like text. It is text if the
5781 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5782 as signed arithmetic since we don't want to ever access cost_table with a
5783 value less than -1. Also check that none of the constants in a range
5784 are strange control characters. */
5785
5786 for (n = node; n; n = n->right)
5787 {
5788 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5789 return 0;
5790
5791 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5792 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5793 if (cost_table[i] < 0)
5794 return 0;
5795 }
5796
5797 /* All interesting values are within the range of interesting
5798 ASCII characters. */
5799 return 1;
5800 }
5801
5802 /* Scan an ordered list of case nodes
5803 combining those with consecutive values or ranges.
5804
5805 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5806
5807 static void
5808 group_case_nodes (head)
5809 case_node_ptr head;
5810 {
5811 case_node_ptr node = head;
5812
5813 while (node)
5814 {
5815 rtx lb = next_real_insn (label_rtx (node->code_label));
5816 rtx lb2;
5817 case_node_ptr np = node;
5818
5819 /* Try to group the successors of NODE with NODE. */
5820 while (((np = np->right) != 0)
5821 /* Do they jump to the same place? */
5822 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5823 || (lb != 0 && lb2 != 0
5824 && simplejump_p (lb)
5825 && simplejump_p (lb2)
5826 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5827 SET_SRC (PATTERN (lb2)))))
5828 /* Are their ranges consecutive? */
5829 && tree_int_cst_equal (np->low,
5830 fold (build (PLUS_EXPR,
5831 TREE_TYPE (node->high),
5832 node->high,
5833 integer_one_node)))
5834 /* An overflow is not consecutive. */
5835 && tree_int_cst_lt (node->high,
5836 fold (build (PLUS_EXPR,
5837 TREE_TYPE (node->high),
5838 node->high,
5839 integer_one_node))))
5840 {
5841 node->high = np->high;
5842 }
5843 /* NP is the first node after NODE which can't be grouped with it.
5844 Delete the nodes in between, and move on to that node. */
5845 node->right = np;
5846 node = np;
5847 }
5848 }
5849
5850 /* Take an ordered list of case nodes
5851 and transform them into a near optimal binary tree,
5852 on the assumption that any target code selection value is as
5853 likely as any other.
5854
5855 The transformation is performed by splitting the ordered
5856 list into two equal sections plus a pivot. The parts are
5857 then attached to the pivot as left and right branches. Each
5858 branch is then transformed recursively. */
5859
5860 static void
5861 balance_case_nodes (head, parent)
5862 case_node_ptr *head;
5863 case_node_ptr parent;
5864 {
5865 register case_node_ptr np;
5866
5867 np = *head;
5868 if (np)
5869 {
5870 int cost = 0;
5871 int i = 0;
5872 int ranges = 0;
5873 register case_node_ptr *npp;
5874 case_node_ptr left;
5875
5876 /* Count the number of entries on branch. Also count the ranges. */
5877
5878 while (np)
5879 {
5880 if (!tree_int_cst_equal (np->low, np->high))
5881 {
5882 ranges++;
5883 if (use_cost_table)
5884 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5885 }
5886
5887 if (use_cost_table)
5888 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5889
5890 i++;
5891 np = np->right;
5892 }
5893
5894 if (i > 2)
5895 {
5896 /* Split this list if it is long enough for that to help. */
5897 npp = head;
5898 left = *npp;
5899 if (use_cost_table)
5900 {
5901 /* Find the place in the list that bisects the list's total cost,
5902 Here I gets half the total cost. */
5903 int n_moved = 0;
5904 i = (cost + 1) / 2;
5905 while (1)
5906 {
5907 /* Skip nodes while their cost does not reach that amount. */
5908 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5909 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5910 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5911 if (i <= 0)
5912 break;
5913 npp = &(*npp)->right;
5914 n_moved += 1;
5915 }
5916 if (n_moved == 0)
5917 {
5918 /* Leave this branch lopsided, but optimize left-hand
5919 side and fill in `parent' fields for right-hand side. */
5920 np = *head;
5921 np->parent = parent;
5922 balance_case_nodes (&np->left, np);
5923 for (; np->right; np = np->right)
5924 np->right->parent = np;
5925 return;
5926 }
5927 }
5928 /* If there are just three nodes, split at the middle one. */
5929 else if (i == 3)
5930 npp = &(*npp)->right;
5931 else
5932 {
5933 /* Find the place in the list that bisects the list's total cost,
5934 where ranges count as 2.
5935 Here I gets half the total cost. */
5936 i = (i + ranges + 1) / 2;
5937 while (1)
5938 {
5939 /* Skip nodes while their cost does not reach that amount. */
5940 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5941 i--;
5942 i--;
5943 if (i <= 0)
5944 break;
5945 npp = &(*npp)->right;
5946 }
5947 }
5948 *head = np = *npp;
5949 *npp = 0;
5950 np->parent = parent;
5951 np->left = left;
5952
5953 /* Optimize each of the two split parts. */
5954 balance_case_nodes (&np->left, np);
5955 balance_case_nodes (&np->right, np);
5956 }
5957 else
5958 {
5959 /* Else leave this branch as one level,
5960 but fill in `parent' fields. */
5961 np = *head;
5962 np->parent = parent;
5963 for (; np->right; np = np->right)
5964 np->right->parent = np;
5965 }
5966 }
5967 }
5968 \f
5969 /* Search the parent sections of the case node tree
5970 to see if a test for the lower bound of NODE would be redundant.
5971 INDEX_TYPE is the type of the index expression.
5972
5973 The instructions to generate the case decision tree are
5974 output in the same order as nodes are processed so it is
5975 known that if a parent node checks the range of the current
5976 node minus one that the current node is bounded at its lower
5977 span. Thus the test would be redundant. */
5978
5979 static int
5980 node_has_low_bound (node, index_type)
5981 case_node_ptr node;
5982 tree index_type;
5983 {
5984 tree low_minus_one;
5985 case_node_ptr pnode;
5986
5987 /* If the lower bound of this node is the lowest value in the index type,
5988 we need not test it. */
5989
5990 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5991 return 1;
5992
5993 /* If this node has a left branch, the value at the left must be less
5994 than that at this node, so it cannot be bounded at the bottom and
5995 we need not bother testing any further. */
5996
5997 if (node->left)
5998 return 0;
5999
6000 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6001 node->low, integer_one_node));
6002
6003 /* If the subtraction above overflowed, we can't verify anything.
6004 Otherwise, look for a parent that tests our value - 1. */
6005
6006 if (! tree_int_cst_lt (low_minus_one, node->low))
6007 return 0;
6008
6009 for (pnode = node->parent; pnode; pnode = pnode->parent)
6010 if (tree_int_cst_equal (low_minus_one, pnode->high))
6011 return 1;
6012
6013 return 0;
6014 }
6015
6016 /* Search the parent sections of the case node tree
6017 to see if a test for the upper bound of NODE would be redundant.
6018 INDEX_TYPE is the type of the index expression.
6019
6020 The instructions to generate the case decision tree are
6021 output in the same order as nodes are processed so it is
6022 known that if a parent node checks the range of the current
6023 node plus one that the current node is bounded at its upper
6024 span. Thus the test would be redundant. */
6025
6026 static int
6027 node_has_high_bound (node, index_type)
6028 case_node_ptr node;
6029 tree index_type;
6030 {
6031 tree high_plus_one;
6032 case_node_ptr pnode;
6033
6034 /* If there is no upper bound, obviously no test is needed. */
6035
6036 if (TYPE_MAX_VALUE (index_type) == NULL)
6037 return 1;
6038
6039 /* If the upper bound of this node is the highest value in the type
6040 of the index expression, we need not test against it. */
6041
6042 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6043 return 1;
6044
6045 /* If this node has a right branch, the value at the right must be greater
6046 than that at this node, so it cannot be bounded at the top and
6047 we need not bother testing any further. */
6048
6049 if (node->right)
6050 return 0;
6051
6052 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6053 node->high, integer_one_node));
6054
6055 /* If the addition above overflowed, we can't verify anything.
6056 Otherwise, look for a parent that tests our value + 1. */
6057
6058 if (! tree_int_cst_lt (node->high, high_plus_one))
6059 return 0;
6060
6061 for (pnode = node->parent; pnode; pnode = pnode->parent)
6062 if (tree_int_cst_equal (high_plus_one, pnode->low))
6063 return 1;
6064
6065 return 0;
6066 }
6067
6068 /* Search the parent sections of the
6069 case node tree to see if both tests for the upper and lower
6070 bounds of NODE would be redundant. */
6071
6072 static int
6073 node_is_bounded (node, index_type)
6074 case_node_ptr node;
6075 tree index_type;
6076 {
6077 return (node_has_low_bound (node, index_type)
6078 && node_has_high_bound (node, index_type));
6079 }
6080
6081 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6082
6083 static void
6084 emit_jump_if_reachable (label)
6085 rtx label;
6086 {
6087 if (GET_CODE (get_last_insn ()) != BARRIER)
6088 emit_jump (label);
6089 }
6090 \f
6091 /* Emit step-by-step code to select a case for the value of INDEX.
6092 The thus generated decision tree follows the form of the
6093 case-node binary tree NODE, whose nodes represent test conditions.
6094 INDEX_TYPE is the type of the index of the switch.
6095
6096 Care is taken to prune redundant tests from the decision tree
6097 by detecting any boundary conditions already checked by
6098 emitted rtx. (See node_has_high_bound, node_has_low_bound
6099 and node_is_bounded, above.)
6100
6101 Where the test conditions can be shown to be redundant we emit
6102 an unconditional jump to the target code. As a further
6103 optimization, the subordinates of a tree node are examined to
6104 check for bounded nodes. In this case conditional and/or
6105 unconditional jumps as a result of the boundary check for the
6106 current node are arranged to target the subordinates associated
6107 code for out of bound conditions on the current node.
6108
6109 We can assume that when control reaches the code generated here,
6110 the index value has already been compared with the parents
6111 of this node, and determined to be on the same side of each parent
6112 as this node is. Thus, if this node tests for the value 51,
6113 and a parent tested for 52, we don't need to consider
6114 the possibility of a value greater than 51. If another parent
6115 tests for the value 50, then this node need not test anything. */
6116
6117 static void
6118 emit_case_nodes (index, node, default_label, index_type)
6119 rtx index;
6120 case_node_ptr node;
6121 rtx default_label;
6122 tree index_type;
6123 {
6124 /* If INDEX has an unsigned type, we must make unsigned branches. */
6125 int unsignedp = TREE_UNSIGNED (index_type);
6126 enum machine_mode mode = GET_MODE (index);
6127
6128 /* See if our parents have already tested everything for us.
6129 If they have, emit an unconditional jump for this node. */
6130 if (node_is_bounded (node, index_type))
6131 emit_jump (label_rtx (node->code_label));
6132
6133 else if (tree_int_cst_equal (node->low, node->high))
6134 {
6135 /* Node is single valued. First see if the index expression matches
6136 this node and then check our children, if any. */
6137
6138 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6139 label_rtx (node->code_label), unsignedp);
6140
6141 if (node->right != 0 && node->left != 0)
6142 {
6143 /* This node has children on both sides.
6144 Dispatch to one side or the other
6145 by comparing the index value with this node's value.
6146 If one subtree is bounded, check that one first,
6147 so we can avoid real branches in the tree. */
6148
6149 if (node_is_bounded (node->right, index_type))
6150 {
6151 emit_cmp_and_jump_insns (index,
6152 expand_expr (node->high, NULL_RTX,
6153 VOIDmode, 0),
6154 GT, NULL_RTX, mode, unsignedp, 0,
6155 label_rtx (node->right->code_label));
6156 emit_case_nodes (index, node->left, default_label, index_type);
6157 }
6158
6159 else if (node_is_bounded (node->left, index_type))
6160 {
6161 emit_cmp_and_jump_insns (index,
6162 expand_expr (node->high, NULL_RTX,
6163 VOIDmode, 0),
6164 LT, NULL_RTX, mode, unsignedp, 0,
6165 label_rtx (node->left->code_label));
6166 emit_case_nodes (index, node->right, default_label, index_type);
6167 }
6168
6169 else
6170 {
6171 /* Neither node is bounded. First distinguish the two sides;
6172 then emit the code for one side at a time. */
6173
6174 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6175
6176 /* See if the value is on the right. */
6177 emit_cmp_and_jump_insns (index,
6178 expand_expr (node->high, NULL_RTX,
6179 VOIDmode, 0),
6180 GT, NULL_RTX, mode, unsignedp, 0,
6181 label_rtx (test_label));
6182
6183 /* Value must be on the left.
6184 Handle the left-hand subtree. */
6185 emit_case_nodes (index, node->left, default_label, index_type);
6186 /* If left-hand subtree does nothing,
6187 go to default. */
6188 emit_jump_if_reachable (default_label);
6189
6190 /* Code branches here for the right-hand subtree. */
6191 expand_label (test_label);
6192 emit_case_nodes (index, node->right, default_label, index_type);
6193 }
6194 }
6195
6196 else if (node->right != 0 && node->left == 0)
6197 {
6198 /* Here we have a right child but no left so we issue conditional
6199 branch to default and process the right child.
6200
6201 Omit the conditional branch to default if we it avoid only one
6202 right child; it costs too much space to save so little time. */
6203
6204 if (node->right->right || node->right->left
6205 || !tree_int_cst_equal (node->right->low, node->right->high))
6206 {
6207 if (!node_has_low_bound (node, index_type))
6208 {
6209 emit_cmp_and_jump_insns (index,
6210 expand_expr (node->high, NULL_RTX,
6211 VOIDmode, 0),
6212 LT, NULL_RTX, mode, unsignedp, 0,
6213 default_label);
6214 }
6215
6216 emit_case_nodes (index, node->right, default_label, index_type);
6217 }
6218 else
6219 /* We cannot process node->right normally
6220 since we haven't ruled out the numbers less than
6221 this node's value. So handle node->right explicitly. */
6222 do_jump_if_equal (index,
6223 expand_expr (node->right->low, NULL_RTX,
6224 VOIDmode, 0),
6225 label_rtx (node->right->code_label), unsignedp);
6226 }
6227
6228 else if (node->right == 0 && node->left != 0)
6229 {
6230 /* Just one subtree, on the left. */
6231
6232 #if 0 /* The following code and comment were formerly part
6233 of the condition here, but they didn't work
6234 and I don't understand what the idea was. -- rms. */
6235 /* If our "most probable entry" is less probable
6236 than the default label, emit a jump to
6237 the default label using condition codes
6238 already lying around. With no right branch,
6239 a branch-greater-than will get us to the default
6240 label correctly. */
6241 if (use_cost_table
6242 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6243 ;
6244 #endif /* 0 */
6245 if (node->left->left || node->left->right
6246 || !tree_int_cst_equal (node->left->low, node->left->high))
6247 {
6248 if (!node_has_high_bound (node, index_type))
6249 {
6250 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6251 NULL_RTX,
6252 VOIDmode, 0),
6253 GT, NULL_RTX, mode, unsignedp, 0,
6254 default_label);
6255 }
6256
6257 emit_case_nodes (index, node->left, default_label, index_type);
6258 }
6259 else
6260 /* We cannot process node->left normally
6261 since we haven't ruled out the numbers less than
6262 this node's value. So handle node->left explicitly. */
6263 do_jump_if_equal (index,
6264 expand_expr (node->left->low, NULL_RTX,
6265 VOIDmode, 0),
6266 label_rtx (node->left->code_label), unsignedp);
6267 }
6268 }
6269 else
6270 {
6271 /* Node is a range. These cases are very similar to those for a single
6272 value, except that we do not start by testing whether this node
6273 is the one to branch to. */
6274
6275 if (node->right != 0 && node->left != 0)
6276 {
6277 /* Node has subtrees on both sides.
6278 If the right-hand subtree is bounded,
6279 test for it first, since we can go straight there.
6280 Otherwise, we need to make a branch in the control structure,
6281 then handle the two subtrees. */
6282 tree test_label = 0;
6283
6284 if (node_is_bounded (node->right, index_type))
6285 /* Right hand node is fully bounded so we can eliminate any
6286 testing and branch directly to the target code. */
6287 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6288 VOIDmode, 0),
6289 GT, NULL_RTX, mode, unsignedp, 0,
6290 label_rtx (node->right->code_label));
6291 else
6292 {
6293 /* Right hand node requires testing.
6294 Branch to a label where we will handle it later. */
6295
6296 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6297 emit_cmp_and_jump_insns (index,
6298 expand_expr (node->high, NULL_RTX,
6299 VOIDmode, 0),
6300 GT, NULL_RTX, mode, unsignedp, 0,
6301 label_rtx (test_label));
6302 }
6303
6304 /* Value belongs to this node or to the left-hand subtree. */
6305
6306 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6307 VOIDmode, 0),
6308 GE, NULL_RTX, mode, unsignedp, 0,
6309 label_rtx (node->code_label));
6310
6311 /* Handle the left-hand subtree. */
6312 emit_case_nodes (index, node->left, default_label, index_type);
6313
6314 /* If right node had to be handled later, do that now. */
6315
6316 if (test_label)
6317 {
6318 /* If the left-hand subtree fell through,
6319 don't let it fall into the right-hand subtree. */
6320 emit_jump_if_reachable (default_label);
6321
6322 expand_label (test_label);
6323 emit_case_nodes (index, node->right, default_label, index_type);
6324 }
6325 }
6326
6327 else if (node->right != 0 && node->left == 0)
6328 {
6329 /* Deal with values to the left of this node,
6330 if they are possible. */
6331 if (!node_has_low_bound (node, index_type))
6332 {
6333 emit_cmp_and_jump_insns (index,
6334 expand_expr (node->low, NULL_RTX,
6335 VOIDmode, 0),
6336 LT, NULL_RTX, mode, unsignedp, 0,
6337 default_label);
6338 }
6339
6340 /* Value belongs to this node or to the right-hand subtree. */
6341
6342 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6343 VOIDmode, 0),
6344 LE, NULL_RTX, mode, unsignedp, 0,
6345 label_rtx (node->code_label));
6346
6347 emit_case_nodes (index, node->right, default_label, index_type);
6348 }
6349
6350 else if (node->right == 0 && node->left != 0)
6351 {
6352 /* Deal with values to the right of this node,
6353 if they are possible. */
6354 if (!node_has_high_bound (node, index_type))
6355 {
6356 emit_cmp_and_jump_insns (index,
6357 expand_expr (node->high, NULL_RTX,
6358 VOIDmode, 0),
6359 GT, NULL_RTX, mode, unsignedp, 0,
6360 default_label);
6361 }
6362
6363 /* Value belongs to this node or to the left-hand subtree. */
6364
6365 emit_cmp_and_jump_insns (index,
6366 expand_expr (node->low, NULL_RTX,
6367 VOIDmode, 0),
6368 GE, NULL_RTX, mode, unsignedp, 0,
6369 label_rtx (node->code_label));
6370
6371 emit_case_nodes (index, node->left, default_label, index_type);
6372 }
6373
6374 else
6375 {
6376 /* Node has no children so we check low and high bounds to remove
6377 redundant tests. Only one of the bounds can exist,
6378 since otherwise this node is bounded--a case tested already. */
6379
6380 if (!node_has_high_bound (node, index_type))
6381 {
6382 emit_cmp_and_jump_insns (index,
6383 expand_expr (node->high, NULL_RTX,
6384 VOIDmode, 0),
6385 GT, NULL_RTX, mode, unsignedp, 0,
6386 default_label);
6387 }
6388
6389 if (!node_has_low_bound (node, index_type))
6390 {
6391 emit_cmp_and_jump_insns (index,
6392 expand_expr (node->low, NULL_RTX,
6393 VOIDmode, 0),
6394 LT, NULL_RTX, mode, unsignedp, 0,
6395 default_label);
6396 }
6397
6398 emit_jump (label_rtx (node->code_label));
6399 }
6400 }
6401 }
This page took 0.379722 seconds and 4 git commands to generate.