]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35 #include "config.h"
36
37 #include <stdio.h>
38 #include <ctype.h>
39
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack;
56
57 extern int xmalloc ();
58 extern void free ();
59
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62 char *emit_filename;
63 int emit_lineno;
64
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68 int expr_stmts_for_value;
69
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73 static tree last_expr_type;
74 static rtx last_expr_value;
75
76 /* Number of binding contours started so far in this function. */
77
78 int block_start_count;
79
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83 extern int current_function_returns_pcc_struct;
84
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89 extern rtx cleanup_label;
90
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95 extern rtx return_label;
96
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs;
100
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset;
105
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label;
109
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry;
112
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118 extern rtx arg_pointer_save_area;
119
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain;
122
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list;
128 #endif
129 \f
130 /* Functions and data structures for expanding case statements. */
131
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153 struct case_node
154 {
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161 };
162
163 typedef struct case_node case_node;
164 typedef struct case_node *case_node_ptr;
165
166 /* These are used by estimate_case_costs and balance_case_nodes. */
167
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table;
170 static int use_cost_table;
171
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
177
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
181 void fixup_gotos ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
188 \f
189 /* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214 struct nesting
215 {
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327 };
328
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
331
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334 struct nesting *stack_block_stack;
335
336 /* Chain of all pending conditional statements. */
337 struct nesting *cond_stack;
338
339 /* Chain of all pending loops. */
340 struct nesting *loop_stack;
341
342 /* Chain of all pending case or switch statements. */
343 struct nesting *case_stack;
344
345 /* Chain of all pending exception contours. */
346 struct nesting *except_stack;
347
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting *nesting_stack;
351
352 /* Number of entries on nesting_stack now. */
353 int nesting_depth;
354
355 /* Allocate and return a new `struct nesting'. */
356
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407 };
408
409 static struct goto_fixup *goto_fixup_chain;
410
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414 struct label_chain
415 {
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419 };
420 \f
421 void
422 init_stmt ()
423 {
424 gcc_obstack_init (&stmt_obstack);
425 #if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427 #endif
428 }
429
430 void
431 init_stmt_for_function ()
432 {
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449 }
450
451 void
452 save_stmt_status (p)
453 struct function *p;
454 {
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469 }
470
471 void
472 restore_stmt_status (p)
473 struct function *p;
474 {
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489 }
490 \f
491 /* Emit a no-op instruction. */
492
493 void
494 emit_nop ()
495 {
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501 }
502 \f
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506 rtx
507 label_rtx (label)
508 tree label;
509 {
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517 }
518
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521 void
522 emit_jump (label)
523 rtx label;
524 {
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528 }
529
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533 void
534 expand_computed_goto (exp)
535 tree exp;
536 {
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
538 emit_queue ();
539 emit_indirect_jump (x);
540 emit_barrier ();
541 }
542 \f
543 /* Handle goto statements and the labels that they can go to. */
544
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556 void
557 expand_label (label)
558 tree label;
559 {
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
569 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574 }
575
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579 void
580 declare_nonlocal_label (label)
581 tree label;
582 {
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slot == 0)
586 {
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
589 nonlocal_goto_stack_level
590 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
591 emit_insn_before (gen_move_insn (nonlocal_goto_stack_level,
592 stack_pointer_rtx),
593 tail_recursion_reentry);
594 }
595 }
596
597 /* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
600
601 void
602 expand_goto (label)
603 tree label;
604 {
605 /* Check for a nonlocal goto to a containing function. */
606 tree context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
608 {
609 struct function *p = find_function_data (context);
610 rtx temp;
611 p->has_nonlocal_label = 1;
612 #if HAVE_nonlocal_goto
613 if (HAVE_nonlocal_goto)
614 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
615 p->nonlocal_goto_handler_slot,
616 p->nonlocal_goto_stack_level,
617 gen_rtx (LABEL_REF, Pmode,
618 label_rtx (label))));
619 else
620 #endif
621 {
622 /* Restore frame pointer for containing function.
623 This sets the actual hard register used for the frame pointer
624 to the location of the function's incoming static chain info.
625 The non-local goto handler will then adjust it to contain the
626 proper value and reload the argument pointer, if needed. */
627 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
628 /* Get addr of containing function's current nonlocal goto handler,
629 which will do any cleanups and then jump to the label. */
630 temp = copy_to_reg (p->nonlocal_goto_handler_slot);
631 /* Restore the stack pointer. Note this uses fp just restored. */
632 emit_move_insn (stack_pointer_rtx, p->nonlocal_goto_stack_level);
633 /* Put in the static chain register the nonlocal label address. */
634 emit_move_insn (static_chain_rtx,
635 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
636 /* USE of frame_pointer_rtx added for consistency; not clear if
637 really needed. */
638 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
639 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
640 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
641 emit_indirect_jump (temp);
642 }
643 }
644 else
645 expand_goto_internal (label, label_rtx (label), 0);
646 }
647
648 /* Generate RTL code for a `goto' statement with target label BODY.
649 LABEL should be a LABEL_REF.
650 LAST_INSN, if non-0, is the rtx we should consider as the last
651 insn emitted (for the purposes of cleaning up a return). */
652
653 static void
654 expand_goto_internal (body, label, last_insn)
655 tree body;
656 rtx label;
657 rtx last_insn;
658 {
659 struct nesting *block;
660 rtx stack_level = 0;
661
662 if (GET_CODE (label) != CODE_LABEL)
663 abort ();
664
665 /* If label has already been defined, we can tell now
666 whether and how we must alter the stack level. */
667
668 if (PREV_INSN (label) != 0)
669 {
670 /* Find the innermost pending block that contains the label.
671 (Check containment by comparing insn-uids.)
672 Then restore the outermost stack level within that block,
673 and do cleanups of all blocks contained in it. */
674 for (block = block_stack; block; block = block->next)
675 {
676 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
677 break;
678 if (block->data.block.stack_level != 0)
679 stack_level = block->data.block.stack_level;
680 /* Execute the cleanups for blocks we are exiting. */
681 if (block->data.block.cleanups != 0)
682 {
683 expand_cleanups (block->data.block.cleanups, 0);
684 do_pending_stack_adjust ();
685 }
686 }
687
688 if (stack_level)
689 {
690 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
691 the stack pointer. This one should be deleted as dead by flow. */
692 clear_pending_stack_adjust ();
693 do_pending_stack_adjust ();
694 emit_move_insn (stack_pointer_rtx, stack_level);
695 }
696
697 if (body != 0 && DECL_TOO_LATE (body))
698 error ("jump to `%s' invalidly jumps into binding contour",
699 IDENTIFIER_POINTER (DECL_NAME (body)));
700 }
701 /* Label not yet defined: may need to put this goto
702 on the fixup list. */
703 else if (! expand_fixup (body, label, last_insn))
704 {
705 /* No fixup needed. Record that the label is the target
706 of at least one goto that has no fixup. */
707 if (body != 0)
708 TREE_ADDRESSABLE (body) = 1;
709 }
710
711 emit_jump (label);
712 }
713 \f
714 /* Generate if necessary a fixup for a goto
715 whose target label in tree structure (if any) is TREE_LABEL
716 and whose target in rtl is RTL_LABEL.
717
718 If LAST_INSN is nonzero, we pretend that the jump appears
719 after insn LAST_INSN instead of at the current point in the insn stream.
720
721 The fixup will be used later to insert insns at this point
722 to restore the stack level as appropriate for the target label.
723
724 Value is nonzero if a fixup is made. */
725
726 static int
727 expand_fixup (tree_label, rtl_label, last_insn)
728 tree tree_label;
729 rtx rtl_label;
730 rtx last_insn;
731 {
732 struct nesting *block, *end_block;
733
734 /* See if we can recognize which block the label will be output in.
735 This is possible in some very common cases.
736 If we succeed, set END_BLOCK to that block.
737 Otherwise, set it to 0. */
738
739 if (cond_stack
740 && (rtl_label == cond_stack->data.cond.endif_label
741 || rtl_label == cond_stack->data.cond.next_label))
742 end_block = cond_stack;
743 /* If we are in a loop, recognize certain labels which
744 are likely targets. This reduces the number of fixups
745 we need to create. */
746 else if (loop_stack
747 && (rtl_label == loop_stack->data.loop.start_label
748 || rtl_label == loop_stack->data.loop.end_label
749 || rtl_label == loop_stack->data.loop.continue_label))
750 end_block = loop_stack;
751 else
752 end_block = 0;
753
754 /* Now set END_BLOCK to the binding level to which we will return. */
755
756 if (end_block)
757 {
758 struct nesting *next_block = end_block->all;
759 block = block_stack;
760
761 /* First see if the END_BLOCK is inside the innermost binding level.
762 If so, then no cleanups or stack levels are relevant. */
763 while (next_block && next_block != block)
764 next_block = next_block->all;
765
766 if (next_block)
767 return 0;
768
769 /* Otherwise, set END_BLOCK to the innermost binding level
770 which is outside the relevant control-structure nesting. */
771 next_block = block_stack->next;
772 for (block = block_stack; block != end_block; block = block->all)
773 if (block == next_block)
774 next_block = next_block->next;
775 end_block = next_block;
776 }
777
778 /* Does any containing block have a stack level or cleanups?
779 If not, no fixup is needed, and that is the normal case
780 (the only case, for standard C). */
781 for (block = block_stack; block != end_block; block = block->next)
782 if (block->data.block.stack_level != 0
783 || block->data.block.cleanups != 0)
784 break;
785
786 if (block != end_block)
787 {
788 /* Ok, a fixup is needed. Add a fixup to the list of such. */
789 struct goto_fixup *fixup
790 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
791 /* In case an old stack level is restored, make sure that comes
792 after any pending stack adjust. */
793 /* ?? If the fixup isn't to come at the present position,
794 doing the stack adjust here isn't useful. Doing it with our
795 settings at that location isn't useful either. Let's hope
796 someone does it! */
797 if (last_insn == 0)
798 do_pending_stack_adjust ();
799 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
800 fixup->target = tree_label;
801 fixup->target_rtl = rtl_label;
802 fixup->block_start_count = block_start_count;
803 fixup->stack_level = 0;
804 fixup->cleanup_list_list
805 = (((block->data.block.outer_cleanups
806 #if 0
807 && block->data.block.outer_cleanups != empty_cleanup_list
808 #endif
809 )
810 || block->data.block.cleanups)
811 ? tree_cons (0, block->data.block.cleanups,
812 block->data.block.outer_cleanups)
813 : 0);
814 fixup->next = goto_fixup_chain;
815 goto_fixup_chain = fixup;
816 }
817
818 return block != 0;
819 }
820
821 /* When exiting a binding contour, process all pending gotos requiring fixups.
822 THISBLOCK is the structure that describes the block being exited.
823 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
824 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
825 FIRST_INSN is the insn that began this contour.
826
827 Gotos that jump out of this contour must restore the
828 stack level and do the cleanups before actually jumping.
829
830 DONT_JUMP_IN nonzero means report error there is a jump into this
831 contour from before the beginning of the contour.
832 This is also done if STACK_LEVEL is nonzero. */
833
834 void
835 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
836 struct nesting *thisblock;
837 rtx stack_level;
838 tree cleanup_list;
839 rtx first_insn;
840 int dont_jump_in;
841 {
842 register struct goto_fixup *f, *prev;
843
844 /* F is the fixup we are considering; PREV is the previous one. */
845 /* We run this loop in two passes so that cleanups of exited blocks
846 are run first, and blocks that are exited are marked so
847 afterwards. */
848
849 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
850 {
851 /* Test for a fixup that is inactive because it is already handled. */
852 if (f->before_jump == 0)
853 {
854 /* Delete inactive fixup from the chain, if that is easy to do. */
855 if (prev != 0)
856 prev->next = f->next;
857 }
858 /* Has this fixup's target label been defined?
859 If so, we can finalize it. */
860 else if (PREV_INSN (f->target_rtl) != 0)
861 {
862 /* Get the first non-label after the label
863 this goto jumps to. If that's before this scope begins,
864 we don't have a jump into the scope. */
865 rtx after_label = f->target_rtl;
866 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
867 after_label = NEXT_INSN (after_label);
868
869 /* If this fixup jumped into this contour from before the beginning
870 of this contour, report an error. */
871 /* ??? Bug: this does not detect jumping in through intermediate
872 blocks that have stack levels or cleanups.
873 It detects only a problem with the innermost block
874 around the label. */
875 if (f->target != 0
876 && (dont_jump_in || stack_level || cleanup_list)
877 /* If AFTER_LABEL is 0, it means the jump goes to the end
878 of the rtl, which means it jumps into this scope. */
879 && (after_label == 0
880 || INSN_UID (first_insn) < INSN_UID (after_label))
881 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
882 && ! TREE_REGDECL (f->target))
883 {
884 error_with_decl (f->target,
885 "label `%s' used before containing binding contour");
886 /* Prevent multiple errors for one label. */
887 TREE_REGDECL (f->target) = 1;
888 }
889
890 /* Execute cleanups for blocks this jump exits. */
891 if (f->cleanup_list_list)
892 {
893 tree lists;
894 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
895 /* Marked elements correspond to blocks that have been closed.
896 Do their cleanups. */
897 if (TREE_ADDRESSABLE (lists)
898 && TREE_VALUE (lists) != 0)
899 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
900 }
901
902 /* Restore stack level for the biggest contour that this
903 jump jumps out of. */
904 if (f->stack_level)
905 emit_insn_after (gen_move_insn (stack_pointer_rtx, f->stack_level),
906 f->before_jump);
907 f->before_jump = 0;
908 }
909 }
910
911 /* Mark the cleanups of exited blocks so that they are executed
912 by the code above. */
913 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
914 if (f->before_jump != 0
915 && PREV_INSN (f->target_rtl) == 0
916 /* Label has still not appeared. If we are exiting a block with
917 a stack level to restore, that started before the fixup,
918 mark this stack level as needing restoration
919 when the fixup is later finalized.
920 Also mark the cleanup_list_list element for F
921 that corresponds to this block, so that ultimately
922 this block's cleanups will be executed by the code above. */
923 && thisblock != 0
924 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
925 it means the label is undefined. That's erroneous, but possible. */
926 && (thisblock->data.block.block_start_count
927 <= f->block_start_count))
928 {
929 tree lists = f->cleanup_list_list;
930 for (; lists; lists = TREE_CHAIN (lists))
931 /* If the following elt. corresponds to our containing block
932 then the elt. must be for this block. */
933 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
934 TREE_ADDRESSABLE (lists) = 1;
935
936 if (stack_level)
937 f->stack_level = stack_level;
938 }
939 }
940 \f
941 /* Generate RTL for an asm statement (explicit assembler code).
942 BODY is a STRING_CST node containing the assembler code text,
943 or an ADDR_EXPR containing a STRING_CST. */
944
945 void
946 expand_asm (body)
947 tree body;
948 {
949 if (TREE_CODE (body) == ADDR_EXPR)
950 body = TREE_OPERAND (body, 0);
951
952 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
953 TREE_STRING_POINTER (body)));
954 last_expr_type = 0;
955 }
956
957 /* Generate RTL for an asm statement with arguments.
958 STRING is the instruction template.
959 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
960 Each output or input has an expression in the TREE_VALUE and
961 a constraint-string in the TREE_PURPOSE.
962 CLOBBERS is a list of STRING_CST nodes each naming a hard register
963 that is clobbered by this insn.
964
965 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
966 Some elements of OUTPUTS may be replaced with trees representing temporary
967 values. The caller should copy those temporary values to the originally
968 specified lvalues.
969
970 VOL nonzero means the insn is volatile; don't optimize it. */
971
972 void
973 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
974 tree string, outputs, inputs, clobbers;
975 int vol;
976 char *filename;
977 int line;
978 {
979 rtvec argvec, constraints;
980 rtx body;
981 int ninputs = list_length (inputs);
982 int noutputs = list_length (outputs);
983 int nclobbers = list_length (clobbers);
984 tree tail;
985 register int i;
986 /* Vector of RTX's of evaluated output operands. */
987 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
988 /* The insn we have emitted. */
989 rtx insn;
990
991 last_expr_type = 0;
992
993 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
994 {
995 tree val = TREE_VALUE (tail);
996 tree val1;
997 int j;
998 int found_equal;
999
1000 /* If there's an erroneous arg, emit no insn. */
1001 if (TREE_TYPE (val) == error_mark_node)
1002 return;
1003
1004 /* Make sure constraint has `=' and does not have `+'. */
1005
1006 found_equal = 0;
1007 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1008 {
1009 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1010 {
1011 error ("output operand constraint contains `+'");
1012 return;
1013 }
1014 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1015 found_equal = 1;
1016 }
1017 if (! found_equal)
1018 {
1019 error ("output operand constraint lacks `='");
1020 return;
1021 }
1022
1023 /* If an output operand is not a variable or indirect ref,
1024 or a part of one,
1025 create a SAVE_EXPR which is a pseudo-reg
1026 to act as an intermediate temporary.
1027 Make the asm insn write into that, then copy it to
1028 the real output operand. */
1029
1030 while (TREE_CODE (val) == COMPONENT_REF
1031 || TREE_CODE (val) == ARRAY_REF)
1032 val = TREE_OPERAND (val, 0);
1033
1034 if (TREE_CODE (val) != VAR_DECL
1035 && TREE_CODE (val) != PARM_DECL
1036 && TREE_CODE (val) != INDIRECT_REF)
1037 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1038
1039 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1040 }
1041
1042 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1043 {
1044 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1045 return;
1046 }
1047
1048 /* Make vectors for the expression-rtx and constraint strings. */
1049
1050 argvec = rtvec_alloc (ninputs);
1051 constraints = rtvec_alloc (ninputs);
1052
1053 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1054 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1055 filename, line);
1056 MEM_VOLATILE_P (body) = vol;
1057
1058 /* Eval the inputs and put them into ARGVEC.
1059 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1060
1061 i = 0;
1062 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1063 {
1064 int j;
1065
1066 /* If there's an erroneous arg, emit no insn,
1067 because the ASM_INPUT would get VOIDmode
1068 and that could cause a crash in reload. */
1069 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1070 return;
1071 if (TREE_PURPOSE (tail) == NULL_TREE)
1072 {
1073 error ("hard register `%s' listed as input operand to `asm'",
1074 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1075 return;
1076 }
1077
1078 /* Make sure constraint has neither `=' nor `+'. */
1079
1080 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1081 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1082 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1083 {
1084 error ("input operand constraint contains `%c'",
1085 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1086 return;
1087 }
1088
1089 XVECEXP (body, 3, i) /* argvec */
1090 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1091 XVECEXP (body, 4, i) /* constraints */
1092 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1093 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1094 i++;
1095 }
1096
1097 /* Protect all the operands from the queue,
1098 now that they have all been evaluated. */
1099
1100 for (i = 0; i < ninputs; i++)
1101 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1102
1103 for (i = 0; i < noutputs; i++)
1104 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1105
1106 /* Now, for each output, construct an rtx
1107 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1108 ARGVEC CONSTRAINTS))
1109 If there is more than one, put them inside a PARALLEL. */
1110
1111 if (noutputs == 1 && nclobbers == 0)
1112 {
1113 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1114 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1115 }
1116 else if (noutputs == 0 && nclobbers == 0)
1117 {
1118 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1119 insn = emit_insn (body);
1120 }
1121 else
1122 {
1123 rtx obody = body;
1124 int num = noutputs;
1125 if (num == 0) num = 1;
1126 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1127
1128 /* For each output operand, store a SET. */
1129
1130 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1131 {
1132 XVECEXP (body, 0, i)
1133 = gen_rtx (SET, VOIDmode,
1134 output_rtx[i],
1135 gen_rtx (ASM_OPERANDS, VOIDmode,
1136 TREE_STRING_POINTER (string),
1137 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1138 i, argvec, constraints,
1139 filename, line));
1140 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1141 }
1142
1143 /* If there are no outputs (but there are some clobbers)
1144 store the bare ASM_OPERANDS into the PARALLEL. */
1145
1146 if (i == 0)
1147 XVECEXP (body, 0, i++) = obody;
1148
1149 /* Store (clobber REG) for each clobbered register specified. */
1150
1151 for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++)
1152 {
1153 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1154 int j = decode_reg_name (regname);
1155
1156 if (j < 0)
1157 {
1158 error ("unknown register name `%s' in `asm'", regname);
1159 return;
1160 }
1161
1162 /* Use QImode since that's guaranteed to clobber just one reg. */
1163 XVECEXP (body, 0, i)
1164 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1165 }
1166
1167 insn = emit_insn (body);
1168 }
1169
1170 free_temp_slots ();
1171 }
1172 \f
1173 /* Generate RTL to evaluate the expression EXP
1174 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1175
1176 void
1177 expand_expr_stmt (exp)
1178 tree exp;
1179 {
1180 /* If -W, warn about statements with no side effects,
1181 except for an explicit cast to void (e.g. for assert()), and
1182 except inside a ({...}) where they may be useful. */
1183 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1184 {
1185 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1186 && !(TREE_CODE (exp) == CONVERT_EXPR
1187 && TREE_TYPE (exp) == void_type_node))
1188 warning_with_file_and_line (emit_filename, emit_lineno,
1189 "statement with no effect");
1190 else if (warn_unused)
1191 warn_if_unused_value (exp);
1192 }
1193 last_expr_type = TREE_TYPE (exp);
1194 if (! flag_syntax_only)
1195 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1196 VOIDmode, 0);
1197
1198 /* If all we do is reference a volatile value in memory,
1199 copy it to a register to be sure it is actually touched. */
1200 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1201 && TREE_THIS_VOLATILE (exp))
1202 {
1203 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1204 copy_to_reg (last_expr_value);
1205 else
1206 /* This case needs to be written. */
1207 abort ();
1208 }
1209
1210 /* If this expression is part of a ({...}) and is in memory, we may have
1211 to preserve temporaries. */
1212 preserve_temp_slots (last_expr_value);
1213
1214 /* Free any temporaries used to evaluate this expression. Any temporary
1215 used as a result of this expression will already have been preserved
1216 above. */
1217 free_temp_slots ();
1218
1219 emit_queue ();
1220 }
1221
1222 /* Warn if EXP contains any computations whose results are not used.
1223 Return 1 if a warning is printed; 0 otherwise. */
1224
1225 static int
1226 warn_if_unused_value (exp)
1227 tree exp;
1228 {
1229 if (TREE_USED (exp))
1230 return 0;
1231
1232 switch (TREE_CODE (exp))
1233 {
1234 case PREINCREMENT_EXPR:
1235 case POSTINCREMENT_EXPR:
1236 case PREDECREMENT_EXPR:
1237 case POSTDECREMENT_EXPR:
1238 case MODIFY_EXPR:
1239 case INIT_EXPR:
1240 case TARGET_EXPR:
1241 case CALL_EXPR:
1242 case METHOD_CALL_EXPR:
1243 case RTL_EXPR:
1244 case WRAPPER_EXPR:
1245 case ANTI_WRAPPER_EXPR:
1246 case WITH_CLEANUP_EXPR:
1247 case EXIT_EXPR:
1248 /* We don't warn about COND_EXPR because it may be a useful
1249 construct if either arm contains a side effect. */
1250 case COND_EXPR:
1251 return 0;
1252
1253 case BIND_EXPR:
1254 /* For a binding, warn if no side effect within it. */
1255 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1256
1257 case TRUTH_ORIF_EXPR:
1258 case TRUTH_ANDIF_EXPR:
1259 /* In && or ||, warn if 2nd operand has no side effect. */
1260 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1261
1262 case COMPOUND_EXPR:
1263 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1264 return 1;
1265 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1266
1267 case NOP_EXPR:
1268 case CONVERT_EXPR:
1269 case NON_LVALUE_EXPR:
1270 /* Don't warn about values cast to void. */
1271 if (TREE_TYPE (exp) == void_type_node)
1272 return 0;
1273 /* Don't warn about conversions not explicit in the user's program. */
1274 if (TREE_NO_UNUSED_WARNING (exp))
1275 return 0;
1276 /* Assignment to a cast usually results in a cast of a modify.
1277 Don't complain about that. */
1278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1279 return 0;
1280 /* Sometimes it results in a cast of a cast of a modify.
1281 Don't complain about that. */
1282 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1283 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1285 return 0;
1286
1287 default:
1288 warning_with_file_and_line (emit_filename, emit_lineno,
1289 "value computed is not used");
1290 return 1;
1291 }
1292 }
1293
1294 /* Clear out the memory of the last expression evaluated. */
1295
1296 void
1297 clear_last_expr ()
1298 {
1299 last_expr_type = 0;
1300 }
1301
1302 /* Begin a statement which will return a value.
1303 Return the RTL_EXPR for this statement expr.
1304 The caller must save that value and pass it to expand_end_stmt_expr. */
1305
1306 tree
1307 expand_start_stmt_expr ()
1308 {
1309 /* Make the RTL_EXPR node temporary, not momentary,
1310 so that rtl_expr_chain doesn't become garbage. */
1311 int momentary = suspend_momentary ();
1312 tree t = make_node (RTL_EXPR);
1313 resume_momentary (momentary);
1314 start_sequence ();
1315 NO_DEFER_POP;
1316 expr_stmts_for_value++;
1317 return t;
1318 }
1319
1320 /* Restore the previous state at the end of a statement that returns a value.
1321 Returns a tree node representing the statement's value and the
1322 insns to compute the value.
1323
1324 The nodes of that expression have been freed by now, so we cannot use them.
1325 But we don't want to do that anyway; the expression has already been
1326 evaluated and now we just want to use the value. So generate a RTL_EXPR
1327 with the proper type and RTL value.
1328
1329 If the last substatement was not an expression,
1330 return something with type `void'. */
1331
1332 tree
1333 expand_end_stmt_expr (t)
1334 tree t;
1335 {
1336 OK_DEFER_POP;
1337
1338 if (last_expr_type == 0)
1339 {
1340 last_expr_type = void_type_node;
1341 last_expr_value = const0_rtx;
1342 }
1343 else if (last_expr_value == 0)
1344 /* There are some cases where this can happen, such as when the
1345 statement is void type. */
1346 last_expr_value = const0_rtx;
1347 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1348 /* Remove any possible QUEUED. */
1349 last_expr_value = protect_from_queue (last_expr_value, 0);
1350
1351 emit_queue ();
1352
1353 TREE_TYPE (t) = last_expr_type;
1354 RTL_EXPR_RTL (t) = last_expr_value;
1355 RTL_EXPR_SEQUENCE (t) = get_insns ();
1356
1357 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1358
1359 end_sequence ();
1360
1361 /* Don't consider deleting this expr or containing exprs at tree level. */
1362 TREE_SIDE_EFFECTS (t) = 1;
1363 /* Propagate volatility of the actual RTL expr. */
1364 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1365
1366 last_expr_type = 0;
1367 expr_stmts_for_value--;
1368
1369 return t;
1370 }
1371 \f
1372 /* The exception handling nesting looks like this:
1373
1374 <-- Level N-1
1375 { <-- exception handler block
1376 <-- Level N
1377 <-- in an exception handler
1378 { <-- try block
1379 : <-- in a TRY block
1380 : <-- in an exception handler
1381 :
1382 }
1383
1384 { <-- except block
1385 : <-- in an except block
1386 : <-- in an exception handler
1387 :
1388 }
1389
1390 }
1391
1392 /* Return nonzero iff in a try block at level LEVEL. */
1393
1394 int
1395 in_try_block (level)
1396 int level;
1397 {
1398 struct nesting *n = except_stack;
1399 while (1)
1400 {
1401 while (n && n->data.except_stmt.after_label != 0)
1402 n = n->next;
1403 if (n == 0)
1404 return 0;
1405 if (level == 0)
1406 return n != 0;
1407 level--;
1408 n = n->next;
1409 }
1410 }
1411
1412 /* Return nonzero iff in an except block at level LEVEL. */
1413
1414 int
1415 in_except_block (level)
1416 int level;
1417 {
1418 struct nesting *n = except_stack;
1419 while (1)
1420 {
1421 while (n && n->data.except_stmt.after_label == 0)
1422 n = n->next;
1423 if (n == 0)
1424 return 0;
1425 if (level == 0)
1426 return n != 0;
1427 level--;
1428 n = n->next;
1429 }
1430 }
1431
1432 /* Return nonzero iff in an exception handler at level LEVEL. */
1433
1434 int
1435 in_exception_handler (level)
1436 int level;
1437 {
1438 struct nesting *n = except_stack;
1439 while (n && level--)
1440 n = n->next;
1441 return n != 0;
1442 }
1443
1444 /* Record the fact that the current exception nesting raises
1445 exception EX. If not in an exception handler, return 0. */
1446 int
1447 expand_raise (ex)
1448 tree ex;
1449 {
1450 tree *raises_ptr;
1451
1452 if (except_stack == 0)
1453 return 0;
1454 raises_ptr = &except_stack->data.except_stmt.raised;
1455 if (! value_member (ex, *raises_ptr))
1456 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1457 return 1;
1458 }
1459
1460 /* Generate RTL for the start of a try block.
1461
1462 TRY_CLAUSE is the condition to test to enter the try block. */
1463
1464 void
1465 expand_start_try (try_clause, exitflag, escapeflag)
1466 tree try_clause;
1467 int exitflag;
1468 int escapeflag;
1469 {
1470 struct nesting *thishandler = ALLOC_NESTING ();
1471
1472 /* Make an entry on cond_stack for the cond we are entering. */
1473
1474 thishandler->next = except_stack;
1475 thishandler->all = nesting_stack;
1476 thishandler->depth = ++nesting_depth;
1477 thishandler->data.except_stmt.raised = 0;
1478 thishandler->data.except_stmt.handled = 0;
1479 thishandler->data.except_stmt.first_insn = get_insns ();
1480 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1481 thishandler->data.except_stmt.unhandled_label = 0;
1482 thishandler->data.except_stmt.after_label = 0;
1483 thishandler->data.except_stmt.escape_label
1484 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1485 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1486 except_stack = thishandler;
1487 nesting_stack = thishandler;
1488
1489 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1490 }
1491
1492 /* End of a TRY block. Nothing to do for now. */
1493
1494 void
1495 expand_end_try ()
1496 {
1497 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1498 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1499 }
1500
1501 /* Start an `except' nesting contour.
1502 EXITFLAG says whether this contour should be able to `exit' something.
1503 ESCAPEFLAG says whether this contour should be escapable. */
1504
1505 void
1506 expand_start_except (exitflag, escapeflag)
1507 int exitflag;
1508 int escapeflag;
1509 {
1510 if (exitflag)
1511 {
1512 struct nesting *n;
1513 /* An `exit' from catch clauses goes out to next exit level,
1514 if there is one. Otherwise, it just goes to the end
1515 of the construct. */
1516 for (n = except_stack->next; n; n = n->next)
1517 if (n->exit_label != 0)
1518 {
1519 except_stack->exit_label = n->exit_label;
1520 break;
1521 }
1522 if (n == 0)
1523 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1524 }
1525 if (escapeflag)
1526 {
1527 struct nesting *n;
1528 /* An `escape' from catch clauses goes out to next escape level,
1529 if there is one. Otherwise, it just goes to the end
1530 of the construct. */
1531 for (n = except_stack->next; n; n = n->next)
1532 if (n->data.except_stmt.escape_label != 0)
1533 {
1534 except_stack->data.except_stmt.escape_label
1535 = n->data.except_stmt.escape_label;
1536 break;
1537 }
1538 if (n == 0)
1539 except_stack->data.except_stmt.escape_label
1540 = except_stack->data.except_stmt.after_label;
1541 }
1542 do_pending_stack_adjust ();
1543 emit_label (except_stack->data.except_stmt.except_label);
1544 }
1545
1546 /* Generate code to `escape' from an exception contour. This
1547 is like `exiting', but does not conflict with constructs which
1548 use `exit_label'.
1549
1550 Return nonzero if this contour is escapable, otherwise
1551 return zero, and language-specific code will emit the
1552 appropriate error message. */
1553 int
1554 expand_escape_except ()
1555 {
1556 struct nesting *n;
1557 last_expr_type = 0;
1558 for (n = except_stack; n; n = n->next)
1559 if (n->data.except_stmt.escape_label != 0)
1560 {
1561 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1562 return 1;
1563 }
1564
1565 return 0;
1566 }
1567
1568 /* Finish processing and `except' contour.
1569 Culls out all exceptions which might be raise but not
1570 handled, and returns the list to the caller.
1571 Language-specific code is responsible for dealing with these
1572 exceptions. */
1573
1574 tree
1575 expand_end_except ()
1576 {
1577 struct nesting *n;
1578 tree raised = NULL_TREE;
1579
1580 do_pending_stack_adjust ();
1581 emit_label (except_stack->data.except_stmt.after_label);
1582
1583 n = except_stack->next;
1584 if (n)
1585 {
1586 /* Propagate exceptions raised but not handled to next
1587 highest level. */
1588 tree handled = except_stack->data.except_stmt.raised;
1589 if (handled != void_type_node)
1590 {
1591 tree prev = NULL_TREE;
1592 raised = except_stack->data.except_stmt.raised;
1593 while (handled)
1594 {
1595 tree this_raise;
1596 for (this_raise = raised, prev = 0; this_raise;
1597 this_raise = TREE_CHAIN (this_raise))
1598 {
1599 if (value_member (TREE_VALUE (this_raise), handled))
1600 {
1601 if (prev)
1602 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1603 else
1604 {
1605 raised = TREE_CHAIN (raised);
1606 if (raised == NULL_TREE)
1607 goto nada;
1608 }
1609 }
1610 else
1611 prev = this_raise;
1612 }
1613 handled = TREE_CHAIN (handled);
1614 }
1615 if (prev == NULL_TREE)
1616 prev = raised;
1617 if (prev)
1618 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1619 nada:
1620 n->data.except_stmt.raised = raised;
1621 }
1622 }
1623
1624 POPSTACK (except_stack);
1625 last_expr_type = 0;
1626 return raised;
1627 }
1628
1629 /* Record that exception EX is caught by this exception handler.
1630 Return nonzero if in exception handling construct, otherwise return 0. */
1631 int
1632 expand_catch (ex)
1633 tree ex;
1634 {
1635 tree *raises_ptr;
1636
1637 if (except_stack == 0)
1638 return 0;
1639 raises_ptr = &except_stack->data.except_stmt.handled;
1640 if (*raises_ptr != void_type_node
1641 && ex != NULL_TREE
1642 && ! value_member (ex, *raises_ptr))
1643 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1644 return 1;
1645 }
1646
1647 /* Record that this exception handler catches all exceptions.
1648 Return nonzero if in exception handling construct, otherwise return 0. */
1649
1650 int
1651 expand_catch_default ()
1652 {
1653 if (except_stack == 0)
1654 return 0;
1655 except_stack->data.except_stmt.handled = void_type_node;
1656 return 1;
1657 }
1658
1659 int
1660 expand_end_catch ()
1661 {
1662 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1663 return 0;
1664 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1665 return 1;
1666 }
1667 \f
1668 /* Generate RTL for the start of an if-then. COND is the expression
1669 whose truth should be tested.
1670
1671 If EXITFLAG is nonzero, this conditional is visible to
1672 `exit_something'. */
1673
1674 void
1675 expand_start_cond (cond, exitflag)
1676 tree cond;
1677 int exitflag;
1678 {
1679 struct nesting *thiscond = ALLOC_NESTING ();
1680
1681 /* Make an entry on cond_stack for the cond we are entering. */
1682
1683 thiscond->next = cond_stack;
1684 thiscond->all = nesting_stack;
1685 thiscond->depth = ++nesting_depth;
1686 thiscond->data.cond.next_label = gen_label_rtx ();
1687 /* Before we encounter an `else', we don't need a separate exit label
1688 unless there are supposed to be exit statements
1689 to exit this conditional. */
1690 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1691 thiscond->data.cond.endif_label = thiscond->exit_label;
1692 cond_stack = thiscond;
1693 nesting_stack = thiscond;
1694
1695 do_jump (cond, thiscond->data.cond.next_label, NULL);
1696 }
1697
1698 /* Generate RTL between then-clause and the elseif-clause
1699 of an if-then-elseif-.... */
1700
1701 void
1702 expand_start_elseif (cond)
1703 tree cond;
1704 {
1705 if (cond_stack->data.cond.endif_label == 0)
1706 cond_stack->data.cond.endif_label = gen_label_rtx ();
1707 emit_jump (cond_stack->data.cond.endif_label);
1708 emit_label (cond_stack->data.cond.next_label);
1709 cond_stack->data.cond.next_label = gen_label_rtx ();
1710 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1711 }
1712
1713 /* Generate RTL between the then-clause and the else-clause
1714 of an if-then-else. */
1715
1716 void
1717 expand_start_else ()
1718 {
1719 if (cond_stack->data.cond.endif_label == 0)
1720 cond_stack->data.cond.endif_label = gen_label_rtx ();
1721 emit_jump (cond_stack->data.cond.endif_label);
1722 emit_label (cond_stack->data.cond.next_label);
1723 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1724 }
1725
1726 /* Generate RTL for the end of an if-then.
1727 Pop the record for it off of cond_stack. */
1728
1729 void
1730 expand_end_cond ()
1731 {
1732 struct nesting *thiscond = cond_stack;
1733
1734 do_pending_stack_adjust ();
1735 if (thiscond->data.cond.next_label)
1736 emit_label (thiscond->data.cond.next_label);
1737 if (thiscond->data.cond.endif_label)
1738 emit_label (thiscond->data.cond.endif_label);
1739
1740 POPSTACK (cond_stack);
1741 last_expr_type = 0;
1742 }
1743 \f
1744 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1745 loop should be exited by `exit_something'. This is a loop for which
1746 `expand_continue' will jump to the top of the loop.
1747
1748 Make an entry on loop_stack to record the labels associated with
1749 this loop. */
1750
1751 struct nesting *
1752 expand_start_loop (exit_flag)
1753 int exit_flag;
1754 {
1755 register struct nesting *thisloop = ALLOC_NESTING ();
1756
1757 /* Make an entry on loop_stack for the loop we are entering. */
1758
1759 thisloop->next = loop_stack;
1760 thisloop->all = nesting_stack;
1761 thisloop->depth = ++nesting_depth;
1762 thisloop->data.loop.start_label = gen_label_rtx ();
1763 thisloop->data.loop.end_label = gen_label_rtx ();
1764 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1765 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1766 loop_stack = thisloop;
1767 nesting_stack = thisloop;
1768
1769 do_pending_stack_adjust ();
1770 emit_queue ();
1771 emit_note (0, NOTE_INSN_LOOP_BEG);
1772 emit_label (thisloop->data.loop.start_label);
1773
1774 return thisloop;
1775 }
1776
1777 /* Like expand_start_loop but for a loop where the continuation point
1778 (for expand_continue_loop) will be specified explicitly. */
1779
1780 struct nesting *
1781 expand_start_loop_continue_elsewhere (exit_flag)
1782 int exit_flag;
1783 {
1784 struct nesting *thisloop = expand_start_loop (exit_flag);
1785 loop_stack->data.loop.continue_label = gen_label_rtx ();
1786 return thisloop;
1787 }
1788
1789 /* Specify the continuation point for a loop started with
1790 expand_start_loop_continue_elsewhere.
1791 Use this at the point in the code to which a continue statement
1792 should jump. */
1793
1794 void
1795 expand_loop_continue_here ()
1796 {
1797 do_pending_stack_adjust ();
1798 emit_note (0, NOTE_INSN_LOOP_CONT);
1799 emit_label (loop_stack->data.loop.continue_label);
1800 }
1801
1802 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1803 Pop the block off of loop_stack. */
1804
1805 void
1806 expand_end_loop ()
1807 {
1808 register rtx insn = get_last_insn ();
1809 register rtx start_label = loop_stack->data.loop.start_label;
1810 rtx last_test_insn = 0;
1811 int num_insns = 0;
1812
1813 /* Mark the continue-point at the top of the loop if none elsewhere. */
1814 if (start_label == loop_stack->data.loop.continue_label)
1815 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1816
1817 do_pending_stack_adjust ();
1818
1819 /* If optimizing, perhaps reorder the loop. If the loop
1820 starts with a conditional exit, roll that to the end
1821 where it will optimize together with the jump back.
1822
1823 We look for the last conditional branch to the exit that we encounter
1824 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1825 branch to the exit first, use it.
1826
1827 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1828 because moving them is not valid. */
1829
1830 if (optimize
1831 &&
1832 ! (GET_CODE (insn) == JUMP_INSN
1833 && GET_CODE (PATTERN (insn)) == SET
1834 && SET_DEST (PATTERN (insn)) == pc_rtx
1835 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1836 {
1837 /* Scan insns from the top of the loop looking for a qualified
1838 conditional exit. */
1839 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1840 insn = NEXT_INSN (insn))
1841 {
1842 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1843 break;
1844
1845 if (GET_CODE (insn) == NOTE
1846 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1847 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1848 break;
1849
1850 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1851 num_insns++;
1852
1853 if (last_test_insn && num_insns > 30)
1854 break;
1855
1856 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1857 && SET_DEST (PATTERN (insn)) == pc_rtx
1858 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1859 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1860 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1861 == loop_stack->data.loop.end_label))
1862 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1863 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1864 == loop_stack->data.loop.end_label))))
1865 last_test_insn = insn;
1866
1867 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1868 && GET_CODE (PATTERN (insn)) == SET
1869 && SET_DEST (PATTERN (insn)) == pc_rtx
1870 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1871 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1872 == loop_stack->data.loop.end_label))
1873 /* Include BARRIER. */
1874 last_test_insn = NEXT_INSN (insn);
1875 }
1876
1877 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1878 {
1879 /* We found one. Move everything from there up
1880 to the end of the loop, and add a jump into the loop
1881 to jump to there. */
1882 register rtx newstart_label = gen_label_rtx ();
1883 register rtx start_move = start_label;
1884
1885 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1886 then we want to move this note also. */
1887 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1888 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1889 == NOTE_INSN_LOOP_CONT))
1890 start_move = PREV_INSN (start_move);
1891
1892 emit_label_after (newstart_label, PREV_INSN (start_move));
1893 reorder_insns (start_move, last_test_insn, get_last_insn ());
1894 emit_jump_insn_after (gen_jump (start_label),
1895 PREV_INSN (newstart_label));
1896 emit_barrier_after (PREV_INSN (newstart_label));
1897 start_label = newstart_label;
1898 }
1899 }
1900
1901 emit_jump (start_label);
1902 emit_note (0, NOTE_INSN_LOOP_END);
1903 emit_label (loop_stack->data.loop.end_label);
1904
1905 POPSTACK (loop_stack);
1906
1907 last_expr_type = 0;
1908 }
1909
1910 /* Generate a jump to the current loop's continue-point.
1911 This is usually the top of the loop, but may be specified
1912 explicitly elsewhere. If not currently inside a loop,
1913 return 0 and do nothing; caller will print an error message. */
1914
1915 int
1916 expand_continue_loop (whichloop)
1917 struct nesting *whichloop;
1918 {
1919 last_expr_type = 0;
1920 if (whichloop == 0)
1921 whichloop = loop_stack;
1922 if (whichloop == 0)
1923 return 0;
1924 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1925 return 1;
1926 }
1927
1928 /* Generate a jump to exit the current loop. If not currently inside a loop,
1929 return 0 and do nothing; caller will print an error message. */
1930
1931 int
1932 expand_exit_loop (whichloop)
1933 struct nesting *whichloop;
1934 {
1935 last_expr_type = 0;
1936 if (whichloop == 0)
1937 whichloop = loop_stack;
1938 if (whichloop == 0)
1939 return 0;
1940 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1941 return 1;
1942 }
1943
1944 /* Generate a conditional jump to exit the current loop if COND
1945 evaluates to zero. If not currently inside a loop,
1946 return 0 and do nothing; caller will print an error message. */
1947
1948 int
1949 expand_exit_loop_if_false (whichloop, cond)
1950 struct nesting *whichloop;
1951 tree cond;
1952 {
1953 last_expr_type = 0;
1954 if (whichloop == 0)
1955 whichloop = loop_stack;
1956 if (whichloop == 0)
1957 return 0;
1958 do_jump (cond, whichloop->data.loop.end_label, NULL);
1959 return 1;
1960 }
1961
1962 /* Return non-zero if we should preserve sub-expressions as separate
1963 pseudos. We never do so if we aren't optimizing. We always do so
1964 if -fexpensive-optimizations.
1965
1966 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
1967 the loop may still be a small one. */
1968
1969 int
1970 preserve_subexpressions_p ()
1971 {
1972 rtx insn;
1973
1974 if (flag_expensive_optimizations)
1975 return 1;
1976
1977 if (optimize == 0 || loop_stack == 0)
1978 return 0;
1979
1980 insn = get_last_insn_anywhere ();
1981
1982 return (insn
1983 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
1984 < n_non_fixed_regs * 3));
1985
1986 }
1987
1988 /* Generate a jump to exit the current loop, conditional, binding contour
1989 or case statement. Not all such constructs are visible to this function,
1990 only those started with EXIT_FLAG nonzero. Individual languages use
1991 the EXIT_FLAG parameter to control which kinds of constructs you can
1992 exit this way.
1993
1994 If not currently inside anything that can be exited,
1995 return 0 and do nothing; caller will print an error message. */
1996
1997 int
1998 expand_exit_something ()
1999 {
2000 struct nesting *n;
2001 last_expr_type = 0;
2002 for (n = nesting_stack; n; n = n->all)
2003 if (n->exit_label != 0)
2004 {
2005 expand_goto_internal (0, n->exit_label, 0);
2006 return 1;
2007 }
2008
2009 return 0;
2010 }
2011 \f
2012 /* Generate RTL to return from the current function, with no value.
2013 (That is, we do not do anything about returning any value.) */
2014
2015 void
2016 expand_null_return ()
2017 {
2018 struct nesting *block = block_stack;
2019 rtx last_insn = 0;
2020
2021 /* Does any pending block have cleanups? */
2022
2023 while (block && block->data.block.cleanups == 0)
2024 block = block->next;
2025
2026 /* If yes, use a goto to return, since that runs cleanups. */
2027
2028 expand_null_return_1 (last_insn, block != 0);
2029 }
2030
2031 /* Generate RTL to return from the current function, with value VAL. */
2032
2033 void
2034 expand_value_return (val)
2035 rtx val;
2036 {
2037 struct nesting *block = block_stack;
2038 rtx last_insn = get_last_insn ();
2039 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2040
2041 /* Copy the value to the return location
2042 unless it's already there. */
2043
2044 if (return_reg != val)
2045 emit_move_insn (return_reg, val);
2046 if (GET_CODE (return_reg) == REG
2047 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2048 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2049
2050 /* Does any pending block have cleanups? */
2051
2052 while (block && block->data.block.cleanups == 0)
2053 block = block->next;
2054
2055 /* If yes, use a goto to return, since that runs cleanups.
2056 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2057
2058 expand_null_return_1 (last_insn, block != 0);
2059 }
2060
2061 /* Output a return with no value. If LAST_INSN is nonzero,
2062 pretend that the return takes place after LAST_INSN.
2063 If USE_GOTO is nonzero then don't use a return instruction;
2064 go to the return label instead. This causes any cleanups
2065 of pending blocks to be executed normally. */
2066
2067 static void
2068 expand_null_return_1 (last_insn, use_goto)
2069 rtx last_insn;
2070 int use_goto;
2071 {
2072 rtx end_label = cleanup_label ? cleanup_label : return_label;
2073
2074 clear_pending_stack_adjust ();
2075 do_pending_stack_adjust ();
2076 last_expr_type = 0;
2077
2078 /* PCC-struct return always uses an epilogue. */
2079 if (current_function_returns_pcc_struct || use_goto)
2080 {
2081 if (end_label == 0)
2082 end_label = return_label = gen_label_rtx ();
2083 expand_goto_internal (0, end_label, last_insn);
2084 return;
2085 }
2086
2087 /* Otherwise output a simple return-insn if one is available,
2088 unless it won't do the job. */
2089 #ifdef HAVE_return
2090 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2091 {
2092 emit_jump_insn (gen_return ());
2093 emit_barrier ();
2094 return;
2095 }
2096 #endif
2097
2098 /* Otherwise jump to the epilogue. */
2099 expand_goto_internal (0, end_label, last_insn);
2100 }
2101 \f
2102 /* Generate RTL to evaluate the expression RETVAL and return it
2103 from the current function. */
2104
2105 void
2106 expand_return (retval)
2107 tree retval;
2108 {
2109 /* If there are any cleanups to be performed, then they will
2110 be inserted following LAST_INSN. It is desirable
2111 that the last_insn, for such purposes, should be the
2112 last insn before computing the return value. Otherwise, cleanups
2113 which call functions can clobber the return value. */
2114 /* ??? rms: I think that is erroneous, because in C++ it would
2115 run destructors on variables that might be used in the subsequent
2116 computation of the return value. */
2117 rtx last_insn = 0;
2118 register rtx val = 0;
2119 register rtx op0;
2120 tree retval_rhs;
2121 int cleanups;
2122 struct nesting *block;
2123
2124 /* If function wants no value, give it none. */
2125 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2126 {
2127 expand_expr (retval, 0, VOIDmode, 0);
2128 expand_null_return ();
2129 return;
2130 }
2131
2132 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2133 cleanups = any_pending_cleanups (1);
2134
2135 if (TREE_CODE (retval) == RESULT_DECL)
2136 retval_rhs = retval;
2137 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2138 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2139 retval_rhs = TREE_OPERAND (retval, 1);
2140 else if (TREE_TYPE (retval) == void_type_node)
2141 /* Recognize tail-recursive call to void function. */
2142 retval_rhs = retval;
2143 else
2144 retval_rhs = NULL_TREE;
2145
2146 /* Only use `last_insn' if there are cleanups which must be run. */
2147 if (cleanups || cleanup_label != 0)
2148 last_insn = get_last_insn ();
2149
2150 /* Distribute return down conditional expr if either of the sides
2151 may involve tail recursion (see test below). This enhances the number
2152 of tail recursions we see. Don't do this always since it can produce
2153 sub-optimal code in some cases and we distribute assignments into
2154 conditional expressions when it would help. */
2155
2156 if (optimize && retval_rhs != 0
2157 && frame_offset == 0
2158 && TREE_CODE (retval_rhs) == COND_EXPR
2159 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2160 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2161 {
2162 rtx label = gen_label_rtx ();
2163 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2164 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2165 DECL_RESULT (current_function_decl),
2166 TREE_OPERAND (retval_rhs, 1)));
2167 emit_label (label);
2168 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2169 DECL_RESULT (current_function_decl),
2170 TREE_OPERAND (retval_rhs, 2)));
2171 return;
2172 }
2173
2174 /* For tail-recursive call to current function,
2175 just jump back to the beginning.
2176 It's unsafe if any auto variable in this function
2177 has its address taken; for simplicity,
2178 require stack frame to be empty. */
2179 if (optimize && retval_rhs != 0
2180 && frame_offset == 0
2181 && TREE_CODE (retval_rhs) == CALL_EXPR
2182 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2183 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2184 /* Finish checking validity, and if valid emit code
2185 to set the argument variables for the new call. */
2186 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2187 DECL_ARGUMENTS (current_function_decl)))
2188 {
2189 if (tail_recursion_label == 0)
2190 {
2191 tail_recursion_label = gen_label_rtx ();
2192 emit_label_after (tail_recursion_label,
2193 tail_recursion_reentry);
2194 }
2195 expand_goto_internal (0, tail_recursion_label, last_insn);
2196 emit_barrier ();
2197 return;
2198 }
2199 #ifdef HAVE_return
2200 /* This optimization is safe if there are local cleanups
2201 because expand_null_return takes care of them.
2202 ??? I think it should also be safe when there is a cleanup label,
2203 because expand_null_return takes care of them, too.
2204 Any reason why not? */
2205 if (HAVE_return && cleanup_label == 0
2206 && ! current_function_returns_pcc_struct)
2207 {
2208 /* If this is return x == y; then generate
2209 if (x == y) return 1; else return 0;
2210 if we can do it with explicit return insns. */
2211 if (retval_rhs)
2212 switch (TREE_CODE (retval_rhs))
2213 {
2214 case EQ_EXPR:
2215 case NE_EXPR:
2216 case GT_EXPR:
2217 case GE_EXPR:
2218 case LT_EXPR:
2219 case LE_EXPR:
2220 case TRUTH_ANDIF_EXPR:
2221 case TRUTH_ORIF_EXPR:
2222 case TRUTH_AND_EXPR:
2223 case TRUTH_OR_EXPR:
2224 case TRUTH_NOT_EXPR:
2225 op0 = gen_label_rtx ();
2226 jumpifnot (retval_rhs, op0);
2227 expand_value_return (const1_rtx);
2228 emit_label (op0);
2229 expand_value_return (const0_rtx);
2230 return;
2231 }
2232 }
2233 #endif /* HAVE_return */
2234
2235 if (cleanups
2236 && retval_rhs != 0
2237 && TREE_TYPE (retval_rhs) != void_type_node
2238 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2239 {
2240 /* Calculate the return value into a pseudo reg. */
2241 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2242 emit_queue ();
2243 /* All temporaries have now been used. */
2244 free_temp_slots ();
2245 /* Return the calculated value, doing cleanups first. */
2246 expand_value_return (val);
2247 }
2248 else
2249 {
2250 /* No cleanups or no hard reg used;
2251 calculate value into hard return reg. */
2252 expand_expr (retval, 0, VOIDmode, 0);
2253 emit_queue ();
2254 free_temp_slots ();
2255 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2256 }
2257 }
2258
2259 /* Return 1 if the end of the generated RTX is not a barrier.
2260 This means code already compiled can drop through. */
2261
2262 int
2263 drop_through_at_end_p ()
2264 {
2265 rtx insn = get_last_insn ();
2266 while (insn && GET_CODE (insn) == NOTE)
2267 insn = PREV_INSN (insn);
2268 return insn && GET_CODE (insn) != BARRIER;
2269 }
2270 \f
2271 /* Emit code to alter this function's formal parms for a tail-recursive call.
2272 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2273 FORMALS is the chain of decls of formals.
2274 Return 1 if this can be done;
2275 otherwise return 0 and do not emit any code. */
2276
2277 static int
2278 tail_recursion_args (actuals, formals)
2279 tree actuals, formals;
2280 {
2281 register tree a = actuals, f = formals;
2282 register int i;
2283 register rtx *argvec;
2284
2285 /* Check that number and types of actuals are compatible
2286 with the formals. This is not always true in valid C code.
2287 Also check that no formal needs to be addressable
2288 and that all formals are scalars. */
2289
2290 /* Also count the args. */
2291
2292 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2293 {
2294 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2295 return 0;
2296 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2297 return 0;
2298 }
2299 if (a != 0 || f != 0)
2300 return 0;
2301
2302 /* Compute all the actuals. */
2303
2304 argvec = (rtx *) alloca (i * sizeof (rtx));
2305
2306 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2307 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2308
2309 /* Find which actual values refer to current values of previous formals.
2310 Copy each of them now, before any formal is changed. */
2311
2312 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2313 {
2314 int copy = 0;
2315 register int j;
2316 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2317 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2318 { copy = 1; break; }
2319 if (copy)
2320 argvec[i] = copy_to_reg (argvec[i]);
2321 }
2322
2323 /* Store the values of the actuals into the formals. */
2324
2325 for (f = formals, a = actuals, i = 0; f;
2326 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2327 {
2328 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2329 emit_move_insn (DECL_RTL (f), argvec[i]);
2330 else
2331 convert_move (DECL_RTL (f), argvec[i],
2332 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2333 }
2334
2335 free_temp_slots ();
2336 return 1;
2337 }
2338 \f
2339 /* Generate the RTL code for entering a binding contour.
2340 The variables are declared one by one, by calls to `expand_decl'.
2341
2342 EXIT_FLAG is nonzero if this construct should be visible to
2343 `exit_something'. */
2344
2345 void
2346 expand_start_bindings (exit_flag)
2347 int exit_flag;
2348 {
2349 struct nesting *thisblock = ALLOC_NESTING ();
2350
2351 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2352
2353 /* Make an entry on block_stack for the block we are entering. */
2354
2355 thisblock->next = block_stack;
2356 thisblock->all = nesting_stack;
2357 thisblock->depth = ++nesting_depth;
2358 thisblock->data.block.stack_level = 0;
2359 thisblock->data.block.cleanups = 0;
2360 thisblock->data.block.function_call_count = 0;
2361 #if 0
2362 if (block_stack)
2363 {
2364 if (block_stack->data.block.cleanups == NULL_TREE
2365 && (block_stack->data.block.outer_cleanups == NULL_TREE
2366 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2367 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2368 else
2369 thisblock->data.block.outer_cleanups
2370 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2371 block_stack->data.block.outer_cleanups);
2372 }
2373 else
2374 thisblock->data.block.outer_cleanups = 0;
2375 #endif
2376 #if 1
2377 if (block_stack
2378 && !(block_stack->data.block.cleanups == NULL_TREE
2379 && block_stack->data.block.outer_cleanups == NULL_TREE))
2380 thisblock->data.block.outer_cleanups
2381 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2382 block_stack->data.block.outer_cleanups);
2383 else
2384 thisblock->data.block.outer_cleanups = 0;
2385 #endif
2386 thisblock->data.block.label_chain = 0;
2387 thisblock->data.block.innermost_stack_block = stack_block_stack;
2388 thisblock->data.block.first_insn = note;
2389 thisblock->data.block.block_start_count = ++block_start_count;
2390 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2391 block_stack = thisblock;
2392 nesting_stack = thisblock;
2393
2394 /* Make a new level for allocating stack slots. */
2395 push_temp_slots ();
2396 }
2397
2398 /* Generate RTL code to terminate a binding contour.
2399 VARS is the chain of VAR_DECL nodes
2400 for the variables bound in this contour.
2401 MARK_ENDS is nonzero if we should put a note at the beginning
2402 and end of this binding contour.
2403
2404 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2405 (That is true automatically if the contour has a saved stack level.) */
2406
2407 void
2408 expand_end_bindings (vars, mark_ends, dont_jump_in)
2409 tree vars;
2410 int mark_ends;
2411 int dont_jump_in;
2412 {
2413 register struct nesting *thisblock = block_stack;
2414 register tree decl;
2415
2416 if (warn_unused)
2417 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2418 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2419 warning_with_decl (decl, "unused variable `%s'");
2420
2421 /* Mark the beginning and end of the scope if requested. */
2422
2423 if (mark_ends)
2424 emit_note (0, NOTE_INSN_BLOCK_END);
2425 else
2426 /* Get rid of the beginning-mark if we don't make an end-mark. */
2427 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2428
2429 if (thisblock->exit_label)
2430 {
2431 do_pending_stack_adjust ();
2432 emit_label (thisblock->exit_label);
2433 }
2434
2435 /* If necessary, make a handler for nonlocal gotos taking
2436 place in the function calls in this block. */
2437 if (function_call_count != thisblock->data.block.function_call_count
2438 && nonlocal_labels
2439 /* Make handler for outermost block
2440 if there were any nonlocal gotos to this function. */
2441 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2442 /* Make handler for inner block if it has something
2443 special to do when you jump out of it. */
2444 : (thisblock->data.block.cleanups != 0
2445 || thisblock->data.block.stack_level != 0)))
2446 {
2447 tree link;
2448 rtx afterward = gen_label_rtx ();
2449 rtx handler_label = gen_label_rtx ();
2450 rtx save_receiver = gen_reg_rtx (Pmode);
2451
2452 /* Don't let jump_optimize delete the handler. */
2453 LABEL_PRESERVE_P (handler_label) = 1;
2454
2455 /* Record the handler address in the stack slot for that purpose,
2456 during this block, saving and restoring the outer value. */
2457 if (thisblock->next != 0)
2458 {
2459 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2460 emit_insn_before (gen_move_insn (save_receiver,
2461 nonlocal_goto_handler_slot),
2462 thisblock->data.block.first_insn);
2463 }
2464 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2465 gen_rtx (LABEL_REF, Pmode,
2466 handler_label)),
2467 thisblock->data.block.first_insn);
2468
2469 /* Jump around the handler; it runs only when specially invoked. */
2470 emit_jump (afterward);
2471 emit_label (handler_label);
2472
2473 #ifdef HAVE_nonlocal_goto
2474 if (! HAVE_nonlocal_goto)
2475 #endif
2476 /* First adjust our frame pointer to its actual value. It was
2477 previously set to the start of the virtual area corresponding to
2478 the stacked variables when we branched here and now needs to be
2479 adjusted to the actual hardware fp value.
2480
2481 Assignments are to virtual registers are converted by
2482 instantiate_virtual_regs into the corresponding assignment
2483 to the underlying register (fp in this case) that makes
2484 the original assignment true.
2485 So the following insn will actually be
2486 decrementing fp by STARTING_FRAME_OFFSET. */
2487 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2488
2489 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2490 if (fixed_regs[ARG_POINTER_REGNUM])
2491 {
2492 /* Now restore our arg pointer from the address at which it was saved
2493 in our stack frame.
2494 If there hasn't be space allocated for it yet, make some now. */
2495 if (arg_pointer_save_area == 0)
2496 arg_pointer_save_area
2497 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2498 emit_move_insn (virtual_incoming_args_rtx,
2499 /* We need a pseudo here,
2500 or else instantiate_virtual_regs_1 complains. */
2501 copy_to_reg (arg_pointer_save_area));
2502 }
2503 #endif
2504
2505 /* The handler expects the desired label address in the static chain
2506 register. It tests the address and does an appropriate jump
2507 to whatever label is desired. */
2508 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2509 /* Skip any labels we shouldn't be able to jump to from here. */
2510 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2511 {
2512 rtx not_this = gen_label_rtx ();
2513 rtx this = gen_label_rtx ();
2514 do_jump_if_equal (static_chain_rtx,
2515 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2516 this, 0);
2517 emit_jump (not_this);
2518 emit_label (this);
2519 expand_goto (TREE_VALUE (link));
2520 emit_label (not_this);
2521 }
2522 /* If label is not recognized, abort. */
2523 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2524 VOIDmode, 0);
2525 emit_label (afterward);
2526 }
2527
2528 /* Don't allow jumping into a block that has cleanups or a stack level. */
2529 if (dont_jump_in
2530 || thisblock->data.block.stack_level != 0
2531 || thisblock->data.block.cleanups != 0)
2532 {
2533 struct label_chain *chain;
2534
2535 /* Any labels in this block are no longer valid to go to.
2536 Mark them to cause an error message. */
2537 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2538 {
2539 DECL_TOO_LATE (chain->label) = 1;
2540 /* If any goto without a fixup came to this label,
2541 that must be an error, because gotos without fixups
2542 come from outside all saved stack-levels and all cleanups. */
2543 if (TREE_ADDRESSABLE (chain->label))
2544 error_with_decl (chain->label,
2545 "label `%s' used before containing binding contour");
2546 }
2547 }
2548
2549 /* Restore stack level in effect before the block
2550 (only if variable-size objects allocated). */
2551 /* Perform any cleanups associated with the block. */
2552
2553 if (thisblock->data.block.stack_level != 0
2554 || thisblock->data.block.cleanups != 0)
2555 {
2556 /* Don't let cleanups affect ({...}) constructs. */
2557 int old_expr_stmts_for_value = expr_stmts_for_value;
2558 rtx old_last_expr_value = last_expr_value;
2559 tree old_last_expr_type = last_expr_type;
2560 expr_stmts_for_value = 0;
2561
2562 /* Do the cleanups. */
2563 expand_cleanups (thisblock->data.block.cleanups, 0);
2564 do_pending_stack_adjust ();
2565
2566 expr_stmts_for_value = old_expr_stmts_for_value;
2567 last_expr_value = old_last_expr_value;
2568 last_expr_type = old_last_expr_type;
2569
2570 /* Restore the stack level. */
2571
2572 if (thisblock->data.block.stack_level != 0)
2573 {
2574 emit_move_insn (stack_pointer_rtx,
2575 thisblock->data.block.stack_level);
2576 if (nonlocal_goto_stack_level != 0)
2577 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2578 }
2579
2580 /* Any gotos out of this block must also do these things.
2581 Also report any gotos with fixups that came to labels in this level. */
2582 fixup_gotos (thisblock,
2583 thisblock->data.block.stack_level,
2584 thisblock->data.block.cleanups,
2585 thisblock->data.block.first_insn,
2586 dont_jump_in);
2587 }
2588
2589 /* If doing stupid register allocation, make sure lives of all
2590 register variables declared here extend thru end of scope. */
2591
2592 if (obey_regdecls)
2593 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2594 {
2595 rtx rtl = DECL_RTL (decl);
2596 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2597 use_variable (rtl);
2598 }
2599
2600 /* Restore block_stack level for containing block. */
2601
2602 stack_block_stack = thisblock->data.block.innermost_stack_block;
2603 POPSTACK (block_stack);
2604
2605 /* Pop the stack slot nesting and free any slots at this level. */
2606 pop_temp_slots ();
2607 }
2608 \f
2609 /* Generate RTL for the automatic variable declaration DECL.
2610 (Other kinds of declarations are simply ignored if seen here.)
2611 CLEANUP is an expression to be executed at exit from this binding contour;
2612 for example, in C++, it might call the destructor for this variable.
2613
2614 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2615 either before or after calling `expand_decl' but before compiling
2616 any subsequent expressions. This is because CLEANUP may be expanded
2617 more than once, on different branches of execution.
2618 For the same reason, CLEANUP may not contain a CALL_EXPR
2619 except as its topmost node--else `preexpand_calls' would get confused.
2620
2621 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2622 that is not associated with any particular variable.
2623
2624 There is no special support here for C++ constructors.
2625 They should be handled by the proper code in DECL_INITIAL. */
2626
2627 void
2628 expand_decl (decl)
2629 register tree decl;
2630 {
2631 struct nesting *thisblock = block_stack;
2632 tree type = TREE_TYPE (decl);
2633
2634 /* Only automatic variables need any expansion done.
2635 Static and external variables, and external functions,
2636 will be handled by `assemble_variable' (called from finish_decl).
2637 TYPE_DECL and CONST_DECL require nothing.
2638 PARM_DECLs are handled in `assign_parms'. */
2639
2640 if (TREE_CODE (decl) != VAR_DECL)
2641 return;
2642 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2643 return;
2644
2645 /* Create the RTL representation for the variable. */
2646
2647 if (type == error_mark_node)
2648 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2649 else if (DECL_SIZE (decl) == 0)
2650 /* Variable with incomplete type. */
2651 {
2652 if (DECL_INITIAL (decl) == 0)
2653 /* Error message was already done; now avoid a crash. */
2654 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2655 else
2656 /* An initializer is going to decide the size of this array.
2657 Until we know the size, represent its address with a reg. */
2658 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2659 }
2660 else if (DECL_MODE (decl) != BLKmode
2661 /* If -ffloat-store, don't put explicit float vars
2662 into regs. */
2663 && !(flag_float_store
2664 && TREE_CODE (type) == REAL_TYPE)
2665 && ! TREE_THIS_VOLATILE (decl)
2666 && ! TREE_ADDRESSABLE (decl)
2667 && (TREE_REGDECL (decl) || ! obey_regdecls))
2668 {
2669 /* Automatic variable that can go in a register. */
2670 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2671 if (TREE_CODE (type) == POINTER_TYPE)
2672 mark_reg_pointer (DECL_RTL (decl));
2673 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2674 }
2675 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2676 {
2677 /* Variable of fixed size that goes on the stack. */
2678 rtx oldaddr = 0;
2679 rtx addr;
2680
2681 /* If we previously made RTL for this decl, it must be an array
2682 whose size was determined by the initializer.
2683 The old address was a register; set that register now
2684 to the proper address. */
2685 if (DECL_RTL (decl) != 0)
2686 {
2687 if (GET_CODE (DECL_RTL (decl)) != MEM
2688 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2689 abort ();
2690 oldaddr = XEXP (DECL_RTL (decl), 0);
2691 }
2692
2693 DECL_RTL (decl)
2694 = assign_stack_temp (DECL_MODE (decl),
2695 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2696 + BITS_PER_UNIT - 1)
2697 / BITS_PER_UNIT),
2698 1);
2699
2700 /* Set alignment we actually gave this decl. */
2701 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2702 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2703
2704 if (oldaddr)
2705 {
2706 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2707 if (addr != oldaddr)
2708 emit_move_insn (oldaddr, addr);
2709 }
2710
2711 /* If this is a memory ref that contains aggregate components,
2712 mark it as such for cse and loop optimize. */
2713 MEM_IN_STRUCT_P (DECL_RTL (decl))
2714 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2715 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2716 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2717 #if 0
2718 /* If this is in memory because of -ffloat-store,
2719 set the volatile bit, to prevent optimizations from
2720 undoing the effects. */
2721 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2722 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2723 #endif
2724 }
2725 else
2726 /* Dynamic-size object: must push space on the stack. */
2727 {
2728 rtx address, size;
2729
2730 /* Record the stack pointer on entry to block, if have
2731 not already done so. */
2732 if (thisblock->data.block.stack_level == 0)
2733 {
2734 do_pending_stack_adjust ();
2735 thisblock->data.block.stack_level
2736 = copy_to_reg (stack_pointer_rtx);
2737 stack_block_stack = thisblock;
2738 }
2739
2740 /* Compute the variable's size, in bytes. */
2741 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2742 DECL_SIZE (decl),
2743 size_int (BITS_PER_UNIT)),
2744 0, VOIDmode, 0);
2745 free_temp_slots ();
2746
2747 /* Allocate space on the stack for the variable. */
2748 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
2749
2750 if (nonlocal_goto_stack_level != 0)
2751 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2752
2753 /* Reference the variable indirect through that rtx. */
2754 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2755
2756 /* Indicate the alignment we actually gave this variable. */
2757 #ifdef STACK_BOUNDARY
2758 DECL_ALIGN (decl) = STACK_BOUNDARY;
2759 #else
2760 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2761 #endif
2762 }
2763
2764 if (TREE_THIS_VOLATILE (decl))
2765 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2766 if (TREE_READONLY (decl))
2767 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2768
2769 /* If doing stupid register allocation, make sure life of any
2770 register variable starts here, at the start of its scope. */
2771
2772 if (obey_regdecls)
2773 use_variable (DECL_RTL (decl));
2774 }
2775 \f
2776 /* Emit code to perform the initialization of a declaration DECL. */
2777
2778 void
2779 expand_decl_init (decl)
2780 tree decl;
2781 {
2782 int was_used = TREE_USED (decl);
2783
2784 if (TREE_STATIC (decl))
2785 return;
2786
2787 /* Compute and store the initial value now. */
2788
2789 if (DECL_INITIAL (decl) == error_mark_node)
2790 {
2791 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2792 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2793 || code == POINTER_TYPE)
2794 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2795 0, 0);
2796 emit_queue ();
2797 }
2798 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2799 {
2800 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2801 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2802 emit_queue ();
2803 }
2804
2805 /* Don't let the initialization count as "using" the variable. */
2806 TREE_USED (decl) = was_used;
2807
2808 /* Free any temporaries we made while initializing the decl. */
2809 free_temp_slots ();
2810 }
2811
2812 /* CLEANUP is an expression to be executed at exit from this binding contour;
2813 for example, in C++, it might call the destructor for this variable.
2814
2815 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2816 either before or after calling `expand_decl' but before compiling
2817 any subsequent expressions. This is because CLEANUP may be expanded
2818 more than once, on different branches of execution.
2819 For the same reason, CLEANUP may not contain a CALL_EXPR
2820 except as its topmost node--else `preexpand_calls' would get confused.
2821
2822 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2823 that is not associated with any particular variable. */
2824
2825 int
2826 expand_decl_cleanup (decl, cleanup)
2827 tree decl, cleanup;
2828 {
2829 struct nesting *thisblock = block_stack;
2830
2831 /* Error if we are not in any block. */
2832 if (thisblock == 0)
2833 return 0;
2834
2835 /* Record the cleanup if there is one. */
2836
2837 if (cleanup != 0)
2838 {
2839 thisblock->data.block.cleanups
2840 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2841 /* If this block has a cleanup, it belongs in stack_block_stack. */
2842 stack_block_stack = thisblock;
2843 }
2844 return 1;
2845 }
2846 \f
2847 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2848 DECL_ELTS is the list of elements that belong to DECL's type.
2849 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2850
2851 void
2852 expand_anon_union_decl (decl, cleanup, decl_elts)
2853 tree decl, cleanup, decl_elts;
2854 {
2855 struct nesting *thisblock = block_stack;
2856 rtx x;
2857
2858 expand_decl (decl, cleanup);
2859 x = DECL_RTL (decl);
2860
2861 while (decl_elts)
2862 {
2863 tree decl_elt = TREE_VALUE (decl_elts);
2864 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2865 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2866
2867 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2868 instead create a new MEM rtx with the proper mode. */
2869 if (GET_CODE (x) == MEM)
2870 {
2871 if (mode == GET_MODE (x))
2872 DECL_RTL (decl_elt) = x;
2873 else
2874 {
2875 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2876 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2877 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2878 }
2879 }
2880 else if (GET_CODE (x) == REG)
2881 {
2882 if (mode == GET_MODE (x))
2883 DECL_RTL (decl_elt) = x;
2884 else
2885 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2886 }
2887 else
2888 abort ();
2889
2890 /* Record the cleanup if there is one. */
2891
2892 if (cleanup != 0)
2893 thisblock->data.block.cleanups
2894 = temp_tree_cons (decl_elt, cleanup_elt,
2895 thisblock->data.block.cleanups);
2896
2897 decl_elts = TREE_CHAIN (decl_elts);
2898 }
2899 }
2900 \f
2901 /* Expand a list of cleanups LIST.
2902 Elements may be expressions or may be nested lists.
2903
2904 If DONT_DO is nonnull, then any list-element
2905 whose TREE_PURPOSE matches DONT_DO is omitted.
2906 This is sometimes used to avoid a cleanup associated with
2907 a value that is being returned out of the scope. */
2908
2909 static void
2910 expand_cleanups (list, dont_do)
2911 tree list;
2912 tree dont_do;
2913 {
2914 tree tail;
2915 for (tail = list; tail; tail = TREE_CHAIN (tail))
2916 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2917 {
2918 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2919 expand_cleanups (TREE_VALUE (tail), dont_do);
2920 else
2921 {
2922 /* Cleanups may be run multiple times. For example,
2923 when exiting a binding contour, we expand the
2924 cleanups associated with that contour. When a goto
2925 within that binding contour has a target outside that
2926 contour, it will expand all cleanups from its scope to
2927 the target. Though the cleanups are expanded multiple
2928 times, the control paths are non-overlapping so the
2929 cleanups will not be executed twice. */
2930 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2931 free_temp_slots ();
2932 }
2933 }
2934 }
2935
2936 /* Expand a list of cleanups for a goto fixup.
2937 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2938 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2939
2940 static void
2941 fixup_cleanups (list, before_jump)
2942 tree list;
2943 rtx *before_jump;
2944 {
2945 rtx beyond_jump = get_last_insn ();
2946 rtx new_before_jump;
2947
2948 expand_cleanups (list, 0);
2949 /* Pop any pushes done in the cleanups,
2950 in case function is about to return. */
2951 do_pending_stack_adjust ();
2952
2953 new_before_jump = get_last_insn ();
2954
2955 if (beyond_jump != new_before_jump)
2956 {
2957 /* If cleanups expand to nothing, don't reorder. */
2958 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
2959 *before_jump = new_before_jump;
2960 }
2961 }
2962
2963 /* Move all cleanups from the current block_stack
2964 to the containing block_stack, where they are assumed to
2965 have been created. If anything can cause a temporary to
2966 be created, but not expanded for more than one level of
2967 block_stacks, then this code will have to change. */
2968
2969 void
2970 move_cleanups_up ()
2971 {
2972 struct nesting *block = block_stack;
2973 struct nesting *outer = block->next;
2974
2975 outer->data.block.cleanups
2976 = chainon (block->data.block.cleanups,
2977 outer->data.block.cleanups);
2978 block->data.block.cleanups = 0;
2979 }
2980
2981 tree
2982 last_cleanup_this_contour ()
2983 {
2984 if (block_stack == 0)
2985 return 0;
2986
2987 return block_stack->data.block.cleanups;
2988 }
2989
2990 /* Return 1 if there are any pending cleanups at this point.
2991 If THIS_CONTOUR is nonzero, check the current contour as well.
2992 Otherwise, look only at the contours that enclose this one. */
2993
2994 int
2995 any_pending_cleanups (this_contour)
2996 int this_contour;
2997 {
2998 struct nesting *block;
2999
3000 if (block_stack == 0)
3001 return 0;
3002
3003 if (this_contour && block_stack->data.block.cleanups != NULL)
3004 return 1;
3005 if (block_stack->data.block.cleanups == 0
3006 && (block_stack->data.block.outer_cleanups == 0
3007 #if 0
3008 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3009 #endif
3010 ))
3011 return 0;
3012
3013 for (block = block_stack->next; block; block = block->next)
3014 if (block->data.block.cleanups != 0)
3015 return 1;
3016
3017 return 0;
3018 }
3019 \f
3020 /* Enter a case (Pascal) or switch (C) statement.
3021 Push a block onto case_stack and nesting_stack
3022 to accumulate the case-labels that are seen
3023 and to record the labels generated for the statement.
3024
3025 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3026 Otherwise, this construct is transparent for `exit_something'.
3027
3028 EXPR is the index-expression to be dispatched on.
3029 TYPE is its nominal type. We could simply convert EXPR to this type,
3030 but instead we take short cuts. */
3031
3032 void
3033 expand_start_case (exit_flag, expr, type, printname)
3034 int exit_flag;
3035 tree expr;
3036 tree type;
3037 char *printname;
3038 {
3039 register struct nesting *thiscase = ALLOC_NESTING ();
3040
3041 /* Make an entry on case_stack for the case we are entering. */
3042
3043 thiscase->next = case_stack;
3044 thiscase->all = nesting_stack;
3045 thiscase->depth = ++nesting_depth;
3046 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3047 thiscase->data.case_stmt.case_list = 0;
3048 thiscase->data.case_stmt.index_expr = expr;
3049 thiscase->data.case_stmt.nominal_type = type;
3050 thiscase->data.case_stmt.default_label = 0;
3051 thiscase->data.case_stmt.num_ranges = 0;
3052 thiscase->data.case_stmt.printname = printname;
3053 thiscase->data.case_stmt.seenlabel = 0;
3054 case_stack = thiscase;
3055 nesting_stack = thiscase;
3056
3057 do_pending_stack_adjust ();
3058
3059 /* Make sure case_stmt.start points to something that won't
3060 need any transformation before expand_end_case. */
3061 if (GET_CODE (get_last_insn ()) != NOTE)
3062 emit_note (0, NOTE_INSN_DELETED);
3063
3064 thiscase->data.case_stmt.start = get_last_insn ();
3065 }
3066
3067 /* Start a "dummy case statement" within which case labels are invalid
3068 and are not connected to any larger real case statement.
3069 This can be used if you don't want to let a case statement jump
3070 into the middle of certain kinds of constructs. */
3071
3072 void
3073 expand_start_case_dummy ()
3074 {
3075 register struct nesting *thiscase = ALLOC_NESTING ();
3076
3077 /* Make an entry on case_stack for the dummy. */
3078
3079 thiscase->next = case_stack;
3080 thiscase->all = nesting_stack;
3081 thiscase->depth = ++nesting_depth;
3082 thiscase->exit_label = 0;
3083 thiscase->data.case_stmt.case_list = 0;
3084 thiscase->data.case_stmt.start = 0;
3085 thiscase->data.case_stmt.nominal_type = 0;
3086 thiscase->data.case_stmt.default_label = 0;
3087 thiscase->data.case_stmt.num_ranges = 0;
3088 case_stack = thiscase;
3089 nesting_stack = thiscase;
3090 }
3091
3092 /* End a dummy case statement. */
3093
3094 void
3095 expand_end_case_dummy ()
3096 {
3097 POPSTACK (case_stack);
3098 }
3099
3100 /* Return the data type of the index-expression
3101 of the innermost case statement, or null if none. */
3102
3103 tree
3104 case_index_expr_type ()
3105 {
3106 if (case_stack)
3107 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3108 return 0;
3109 }
3110 \f
3111 /* Accumulate one case or default label inside a case or switch statement.
3112 VALUE is the value of the case (a null pointer, for a default label).
3113
3114 If not currently inside a case or switch statement, return 1 and do
3115 nothing. The caller will print a language-specific error message.
3116 If VALUE is a duplicate or overlaps, return 2 and do nothing
3117 except store the (first) duplicate node in *DUPLICATE.
3118 If VALUE is out of range, return 3 and do nothing.
3119 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3120 Return 0 on success.
3121
3122 Extended to handle range statements. */
3123
3124 int
3125 pushcase (value, label, duplicate)
3126 register tree value;
3127 register tree label;
3128 tree *duplicate;
3129 {
3130 register struct case_node **l;
3131 register struct case_node *n;
3132 tree index_type;
3133 tree nominal_type;
3134
3135 /* Fail if not inside a real case statement. */
3136 if (! (case_stack && case_stack->data.case_stmt.start))
3137 return 1;
3138
3139 if (stack_block_stack
3140 && stack_block_stack->depth > case_stack->depth)
3141 return 5;
3142
3143 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3144 nominal_type = case_stack->data.case_stmt.nominal_type;
3145
3146 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3147 if (index_type == error_mark_node)
3148 return 0;
3149
3150 /* Convert VALUE to the type in which the comparisons are nominally done. */
3151 if (value != 0)
3152 value = convert (nominal_type, value);
3153
3154 /* If this is the first label, warn if any insns have been emitted. */
3155 if (case_stack->data.case_stmt.seenlabel == 0)
3156 {
3157 rtx insn;
3158 for (insn = case_stack->data.case_stmt.start;
3159 insn;
3160 insn = NEXT_INSN (insn))
3161 {
3162 if (GET_CODE (insn) == CODE_LABEL)
3163 break;
3164 if (GET_CODE (insn) != NOTE
3165 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3166 {
3167 warning ("unreachable code at beginning of %s",
3168 case_stack->data.case_stmt.printname);
3169 break;
3170 }
3171 }
3172 }
3173 case_stack->data.case_stmt.seenlabel = 1;
3174
3175 /* Fail if this value is out of range for the actual type of the index
3176 (which may be narrower than NOMINAL_TYPE). */
3177 if (value != 0 && ! int_fits_type_p (value, index_type))
3178 return 3;
3179
3180 /* Fail if this is a duplicate or overlaps another entry. */
3181 if (value == 0)
3182 {
3183 if (case_stack->data.case_stmt.default_label != 0)
3184 {
3185 *duplicate = case_stack->data.case_stmt.default_label;
3186 return 2;
3187 }
3188 case_stack->data.case_stmt.default_label = label;
3189 }
3190 else
3191 {
3192 /* Find the elt in the chain before which to insert the new value,
3193 to keep the chain sorted in increasing order.
3194 But report an error if this element is a duplicate. */
3195 for (l = &case_stack->data.case_stmt.case_list;
3196 /* Keep going past elements distinctly less than VALUE. */
3197 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3198 l = &(*l)->right)
3199 ;
3200 if (*l)
3201 {
3202 /* Element we will insert before must be distinctly greater;
3203 overlap means error. */
3204 if (! tree_int_cst_lt (value, (*l)->low))
3205 {
3206 *duplicate = (*l)->code_label;
3207 return 2;
3208 }
3209 }
3210
3211 /* Add this label to the chain, and succeed.
3212 Copy VALUE so it is on temporary rather than momentary
3213 obstack and will thus survive till the end of the case statement. */
3214 n = (struct case_node *) oballoc (sizeof (struct case_node));
3215 n->left = 0;
3216 n->right = *l;
3217 n->high = n->low = copy_node (value);
3218 n->code_label = label;
3219 *l = n;
3220 }
3221
3222 expand_label (label);
3223 return 0;
3224 }
3225
3226 /* Like pushcase but this case applies to all values
3227 between VALUE1 and VALUE2 (inclusive).
3228 The return value is the same as that of pushcase
3229 but there is one additional error code:
3230 4 means the specified range was empty. */
3231
3232 int
3233 pushcase_range (value1, value2, label, duplicate)
3234 register tree value1, value2;
3235 register tree label;
3236 tree *duplicate;
3237 {
3238 register struct case_node **l;
3239 register struct case_node *n;
3240 tree index_type;
3241 tree nominal_type;
3242
3243 /* Fail if not inside a real case statement. */
3244 if (! (case_stack && case_stack->data.case_stmt.start))
3245 return 1;
3246
3247 if (stack_block_stack
3248 && stack_block_stack->depth > case_stack->depth)
3249 return 5;
3250
3251 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3252 nominal_type = case_stack->data.case_stmt.nominal_type;
3253
3254 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3255 if (index_type == error_mark_node)
3256 return 0;
3257
3258 /* If this is the first label, warn if any insns have been emitted. */
3259 if (case_stack->data.case_stmt.seenlabel == 0)
3260 {
3261 rtx insn;
3262 for (insn = case_stack->data.case_stmt.start;
3263 insn;
3264 insn = NEXT_INSN (insn))
3265 {
3266 if (GET_CODE (insn) == CODE_LABEL)
3267 break;
3268 if (GET_CODE (insn) != NOTE
3269 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3270 {
3271 warning ("unreachable code at beginning of %s",
3272 case_stack->data.case_stmt.printname);
3273 break;
3274 }
3275 }
3276 }
3277 case_stack->data.case_stmt.seenlabel = 1;
3278
3279 /* Convert VALUEs to type in which the comparisons are nominally done. */
3280 if (value1 == 0) /* Negative infinity. */
3281 value1 = TYPE_MIN_VALUE(index_type);
3282 value1 = convert (nominal_type, value1);
3283
3284 if (value2 == 0) /* Positive infinity. */
3285 value2 = TYPE_MAX_VALUE(index_type);
3286 value2 = convert (nominal_type, value2);
3287
3288 /* Fail if these values are out of range. */
3289 if (! int_fits_type_p (value1, index_type))
3290 return 3;
3291
3292 if (! int_fits_type_p (value2, index_type))
3293 return 3;
3294
3295 /* Fail if the range is empty. */
3296 if (tree_int_cst_lt (value2, value1))
3297 return 4;
3298
3299 /* If the bounds are equal, turn this into the one-value case. */
3300 if (tree_int_cst_equal (value1, value2))
3301 return pushcase (value1, label, duplicate);
3302
3303 /* Find the elt in the chain before which to insert the new value,
3304 to keep the chain sorted in increasing order.
3305 But report an error if this element is a duplicate. */
3306 for (l = &case_stack->data.case_stmt.case_list;
3307 /* Keep going past elements distinctly less than this range. */
3308 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3309 l = &(*l)->right)
3310 ;
3311 if (*l)
3312 {
3313 /* Element we will insert before must be distinctly greater;
3314 overlap means error. */
3315 if (! tree_int_cst_lt (value2, (*l)->low))
3316 {
3317 *duplicate = (*l)->code_label;
3318 return 2;
3319 }
3320 }
3321
3322 /* Add this label to the chain, and succeed.
3323 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3324 obstack and will thus survive till the end of the case statement. */
3325
3326 n = (struct case_node *) oballoc (sizeof (struct case_node));
3327 n->left = 0;
3328 n->right = *l;
3329 n->low = copy_node (value1);
3330 n->high = copy_node (value2);
3331 n->code_label = label;
3332 *l = n;
3333
3334 expand_label (label);
3335
3336 case_stack->data.case_stmt.num_ranges++;
3337
3338 return 0;
3339 }
3340 \f
3341 /* Called when the index of a switch statement is an enumerated type
3342 and there is no default label.
3343
3344 Checks that all enumeration literals are covered by the case
3345 expressions of a switch. Also, warn if there are any extra
3346 switch cases that are *not* elements of the enumerated type.
3347
3348 If all enumeration literals were covered by the case expressions,
3349 turn one of the expressions into the default expression since it should
3350 not be possible to fall through such a switch. */
3351
3352 void
3353 check_for_full_enumeration_handling (type)
3354 tree type;
3355 {
3356 register struct case_node *n;
3357 register struct case_node **l;
3358 register tree chain;
3359 int all_values = 1;
3360
3361 /* The time complexity of this loop is currently O(N * M), with
3362 N being the number of enumerals in the enumerated type, and
3363 M being the number of case expressions in the switch. */
3364
3365 for (chain = TYPE_VALUES (type);
3366 chain;
3367 chain = TREE_CHAIN (chain))
3368 {
3369 /* Find a match between enumeral and case expression, if possible.
3370 Quit looking when we've gone too far (since case expressions
3371 are kept sorted in ascending order). Warn about enumerals not
3372 handled in the switch statement case expression list. */
3373
3374 for (n = case_stack->data.case_stmt.case_list;
3375 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3376 n = n->right)
3377 ;
3378
3379 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3380 {
3381 if (warn_switch)
3382 warning ("enumerated value `%s' not handled in switch",
3383 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3384 all_values = 0;
3385 }
3386 }
3387
3388 /* Now we go the other way around; we warn if there are case
3389 expressions that don't correspond to enumerals. This can
3390 occur since C and C++ don't enforce type-checking of
3391 assignments to enumeration variables. */
3392
3393 if (warn_switch)
3394 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3395 {
3396 for (chain = TYPE_VALUES (type);
3397 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3398 chain = TREE_CHAIN (chain))
3399 ;
3400
3401 if (!chain)
3402 warning ("case value `%d' not in enumerated type `%s'",
3403 TREE_INT_CST_LOW (n->low),
3404 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3405 == IDENTIFIER_NODE)
3406 ? TYPE_NAME (type)
3407 : DECL_NAME (TYPE_NAME (type))));
3408 }
3409
3410 /* If all values were found as case labels, make one of them the default
3411 label. Thus, this switch will never fall through. We arbitrarily pick
3412 the last one to make the default since this is likely the most
3413 efficient choice. */
3414
3415 if (all_values)
3416 {
3417 for (l = &case_stack->data.case_stmt.case_list;
3418 (*l)->right != 0;
3419 l = &(*l)->right)
3420 ;
3421
3422 case_stack->data.case_stmt.default_label = (*l)->code_label;
3423 *l = 0;
3424 }
3425 }
3426 \f
3427 /* Terminate a case (Pascal) or switch (C) statement
3428 in which CASE_INDEX is the expression to be tested.
3429 Generate the code to test it and jump to the right place. */
3430
3431 void
3432 expand_end_case (orig_index)
3433 tree orig_index;
3434 {
3435 tree minval, maxval, range;
3436 rtx default_label = 0;
3437 register struct case_node *n;
3438 int count;
3439 rtx index;
3440 rtx table_label = gen_label_rtx ();
3441 int ncases;
3442 rtx *labelvec;
3443 register int i;
3444 rtx before_case;
3445 register struct nesting *thiscase = case_stack;
3446 tree index_expr = thiscase->data.case_stmt.index_expr;
3447 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3448
3449 do_pending_stack_adjust ();
3450
3451 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3452 if (TREE_TYPE (index_expr) != error_mark_node)
3453 {
3454 /* If switch expression was an enumerated type, check that all
3455 enumeration literals are covered by the cases.
3456 No sense trying this if there's a default case, however. */
3457
3458 if (!thiscase->data.case_stmt.default_label
3459 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3460 && TREE_CODE (index_expr) != INTEGER_CST)
3461 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3462
3463 /* If this is the first label, warn if any insns have been emitted. */
3464 if (thiscase->data.case_stmt.seenlabel == 0)
3465 {
3466 rtx insn;
3467 for (insn = get_last_insn ();
3468 insn != case_stack->data.case_stmt.start;
3469 insn = PREV_INSN (insn))
3470 if (GET_CODE (insn) != NOTE
3471 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3472 {
3473 warning ("unreachable code at beginning of %s",
3474 case_stack->data.case_stmt.printname);
3475 break;
3476 }
3477 }
3478
3479 /* If we don't have a default-label, create one here,
3480 after the body of the switch. */
3481 if (thiscase->data.case_stmt.default_label == 0)
3482 {
3483 thiscase->data.case_stmt.default_label
3484 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3485 expand_label (thiscase->data.case_stmt.default_label);
3486 }
3487 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3488
3489 before_case = get_last_insn ();
3490
3491 /* Simplify the case-list before we count it. */
3492 group_case_nodes (thiscase->data.case_stmt.case_list);
3493
3494 /* Get upper and lower bounds of case values.
3495 Also convert all the case values to the index expr's data type. */
3496
3497 count = 0;
3498 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3499 {
3500 /* Check low and high label values are integers. */
3501 if (TREE_CODE (n->low) != INTEGER_CST)
3502 abort ();
3503 if (TREE_CODE (n->high) != INTEGER_CST)
3504 abort ();
3505
3506 n->low = convert (TREE_TYPE (index_expr), n->low);
3507 n->high = convert (TREE_TYPE (index_expr), n->high);
3508
3509 /* Count the elements and track the largest and smallest
3510 of them (treating them as signed even if they are not). */
3511 if (count++ == 0)
3512 {
3513 minval = n->low;
3514 maxval = n->high;
3515 }
3516 else
3517 {
3518 if (INT_CST_LT (n->low, minval))
3519 minval = n->low;
3520 if (INT_CST_LT (maxval, n->high))
3521 maxval = n->high;
3522 }
3523 /* A range counts double, since it requires two compares. */
3524 if (! tree_int_cst_equal (n->low, n->high))
3525 count++;
3526 }
3527
3528 /* Compute span of values. */
3529 if (count != 0)
3530 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3531 maxval, minval));
3532
3533 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3534 {
3535 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3536 emit_queue ();
3537 emit_jump (default_label);
3538 }
3539 /* If range of values is much bigger than number of values,
3540 make a sequence of conditional branches instead of a dispatch.
3541 If the switch-index is a constant, do it this way
3542 because we can optimize it. */
3543 else if (TREE_INT_CST_HIGH (range) != 0
3544 #ifdef HAVE_casesi
3545 || (HAVE_casesi ? count < 4 : count < 5)
3546 #else
3547 /* If machine does not have a case insn that compares the
3548 bounds, this means extra overhead for dispatch tables
3549 which raises the threshold for using them. */
3550 || count < 5
3551 #endif
3552 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3553 || TREE_CODE (index_expr) == INTEGER_CST
3554 /* These will reduce to a constant. */
3555 || (TREE_CODE (index_expr) == CALL_EXPR
3556 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
3557 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
3558 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3559 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3560 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
3561 {
3562 index = expand_expr (index_expr, 0, VOIDmode, 0);
3563
3564 /* If the index is a short or char that we do not have
3565 an insn to handle comparisons directly, convert it to
3566 a full integer now, rather than letting each comparison
3567 generate the conversion. */
3568
3569 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3570 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3571 == CODE_FOR_nothing))
3572 {
3573 enum machine_mode wider_mode;
3574 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3575 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3576 if (cmp_optab->handlers[(int) wider_mode].insn_code
3577 != CODE_FOR_nothing)
3578 {
3579 index = convert_to_mode (wider_mode, index, unsignedp);
3580 break;
3581 }
3582 }
3583
3584 emit_queue ();
3585 do_pending_stack_adjust ();
3586
3587 index = protect_from_queue (index, 0);
3588 if (GET_CODE (index) == MEM)
3589 index = copy_to_reg (index);
3590 if (GET_CODE (index) == CONST_INT
3591 || TREE_CODE (index_expr) == INTEGER_CST)
3592 {
3593 /* Make a tree node with the proper constant value
3594 if we don't already have one. */
3595 if (TREE_CODE (index_expr) != INTEGER_CST)
3596 {
3597 index_expr
3598 = build_int_2 (INTVAL (index),
3599 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3600 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3601 }
3602
3603 /* For constant index expressions we need only
3604 issue a unconditional branch to the appropriate
3605 target code. The job of removing any unreachable
3606 code is left to the optimisation phase if the
3607 "-O" option is specified. */
3608 for (n = thiscase->data.case_stmt.case_list;
3609 n;
3610 n = n->right)
3611 {
3612 if (! tree_int_cst_lt (index_expr, n->low)
3613 && ! tree_int_cst_lt (n->high, index_expr))
3614 break;
3615 }
3616 if (n)
3617 emit_jump (label_rtx (n->code_label));
3618 else
3619 emit_jump (default_label);
3620 }
3621 else
3622 {
3623 /* If the index expression is not constant we generate
3624 a binary decision tree to select the appropriate
3625 target code. This is done as follows:
3626
3627 The list of cases is rearranged into a binary tree,
3628 nearly optimal assuming equal probability for each case.
3629
3630 The tree is transformed into RTL, eliminating
3631 redundant test conditions at the same time.
3632
3633 If program flow could reach the end of the
3634 decision tree an unconditional jump to the
3635 default code is emitted. */
3636
3637 use_cost_table
3638 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3639 && default_label != 0
3640 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3641 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3642 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3643 default_label, TREE_TYPE (index_expr));
3644 emit_jump_if_reachable (default_label);
3645 }
3646 }
3647 else
3648 {
3649 int win = 0;
3650 #ifdef HAVE_casesi
3651 if (HAVE_casesi)
3652 {
3653 enum machine_mode index_mode = SImode;
3654 int index_bits = GET_MODE_BITSIZE (index_mode);
3655
3656 /* Convert the index to SImode. */
3657 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3658 > GET_MODE_BITSIZE (index_mode))
3659 {
3660 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3661 index_expr, minval);
3662 minval = integer_zero_node;
3663 }
3664 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3665 index_expr = convert (type_for_size (index_bits, 0),
3666 index_expr);
3667 index = expand_expr (index_expr, 0, VOIDmode, 0);
3668 emit_queue ();
3669 index = protect_from_queue (index, 0);
3670 do_pending_stack_adjust ();
3671
3672 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3673 expand_expr (range, 0, VOIDmode, 0),
3674 table_label, default_label));
3675 win = 1;
3676 }
3677 #endif
3678 #ifdef HAVE_tablejump
3679 if (! win && HAVE_tablejump)
3680 {
3681 index_expr = convert (thiscase->data.case_stmt.nominal_type,
3682 fold (build (MINUS_EXPR,
3683 TREE_TYPE (index_expr),
3684 index_expr, minval)));
3685 index = expand_expr (index_expr, 0, VOIDmode, 0);
3686 emit_queue ();
3687 /* convert_to_mode calls protect_from_queue. */
3688 index = convert_to_mode (Pmode, index, 1);
3689 do_pending_stack_adjust ();
3690
3691 do_tablejump (index, Pmode,
3692 gen_rtx (CONST_INT, VOIDmode,
3693 TREE_INT_CST_LOW (range)),
3694 table_label, default_label);
3695 win = 1;
3696 }
3697 #endif
3698 if (! win)
3699 abort ();
3700
3701 /* Get table of labels to jump to, in order of case index. */
3702
3703 ncases = TREE_INT_CST_LOW (range) + 1;
3704 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3705 bzero (labelvec, ncases * sizeof (rtx));
3706
3707 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3708 {
3709 register int i
3710 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3711
3712 while (1)
3713 {
3714 labelvec[i]
3715 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3716 if (i + TREE_INT_CST_LOW (minval)
3717 == TREE_INT_CST_LOW (n->high))
3718 break;
3719 i++;
3720 }
3721 }
3722
3723 /* Fill in the gaps with the default. */
3724 for (i = 0; i < ncases; i++)
3725 if (labelvec[i] == 0)
3726 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3727
3728 /* Output the table */
3729 emit_label (table_label);
3730
3731 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3732 were an expression, instead of a an #ifdef/#ifndef. */
3733 if (
3734 #ifdef CASE_VECTOR_PC_RELATIVE
3735 1 ||
3736 #endif
3737 flag_pic)
3738 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3739 gen_rtx (LABEL_REF, Pmode, table_label),
3740 gen_rtvec_v (ncases, labelvec)));
3741 else
3742 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3743 gen_rtvec_v (ncases, labelvec)));
3744
3745 /* If the case insn drops through the table,
3746 after the table we must jump to the default-label.
3747 Otherwise record no drop-through after the table. */
3748 #ifdef CASE_DROPS_THROUGH
3749 emit_jump (default_label);
3750 #else
3751 emit_barrier ();
3752 #endif
3753 }
3754
3755 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3756 reorder_insns (before_case, get_last_insn (),
3757 thiscase->data.case_stmt.start);
3758 }
3759 if (thiscase->exit_label)
3760 emit_label (thiscase->exit_label);
3761
3762 POPSTACK (case_stack);
3763
3764 free_temp_slots ();
3765 }
3766
3767 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3768
3769 static void
3770 do_jump_if_equal (op1, op2, label, unsignedp)
3771 rtx op1, op2, label;
3772 int unsignedp;
3773 {
3774 if (GET_CODE (op1) == CONST_INT
3775 && GET_CODE (op2) == CONST_INT)
3776 {
3777 if (INTVAL (op1) == INTVAL (op2))
3778 emit_jump (label);
3779 }
3780 else
3781 {
3782 enum machine_mode mode = GET_MODE (op1);
3783 if (mode == VOIDmode)
3784 mode = GET_MODE (op2);
3785 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3786 emit_jump_insn (gen_beq (label));
3787 }
3788 }
3789 \f
3790 /* Not all case values are encountered equally. This function
3791 uses a heuristic to weight case labels, in cases where that
3792 looks like a reasonable thing to do.
3793
3794 Right now, all we try to guess is text, and we establish the
3795 following weights:
3796
3797 chars above space: 16
3798 digits: 16
3799 default: 12
3800 space, punct: 8
3801 tab: 4
3802 newline: 2
3803 other "\" chars: 1
3804 remaining chars: 0
3805
3806 If we find any cases in the switch that are not either -1 or in the range
3807 of valid ASCII characters, or are control characters other than those
3808 commonly used with "\", don't treat this switch scanning text.
3809
3810 Return 1 if these nodes are suitable for cost estimation, otherwise
3811 return 0. */
3812
3813 static int
3814 estimate_case_costs (node)
3815 case_node_ptr node;
3816 {
3817 tree min_ascii = build_int_2 (-1, -1);
3818 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3819 case_node_ptr n;
3820 int i;
3821
3822 /* If we haven't already made the cost table, make it now. Note that the
3823 lower bound of the table is -1, not zero. */
3824
3825 if (cost_table == NULL)
3826 {
3827 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3828 bzero (cost_table - 1, 129 * sizeof (short));
3829
3830 for (i = 0; i < 128; i++)
3831 {
3832 if (isalnum (i))
3833 cost_table[i] = 16;
3834 else if (ispunct (i))
3835 cost_table[i] = 8;
3836 else if (iscntrl (i))
3837 cost_table[i] = -1;
3838 }
3839
3840 cost_table[' '] = 8;
3841 cost_table['\t'] = 4;
3842 cost_table['\0'] = 4;
3843 cost_table['\n'] = 2;
3844 cost_table['\f'] = 1;
3845 cost_table['\v'] = 1;
3846 cost_table['\b'] = 1;
3847 }
3848
3849 /* See if all the case expressions look like text. It is text if the
3850 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3851 as signed arithmetic since we don't want to ever access cost_table with a
3852 value less than -1. Also check that none of the constants in a range
3853 are strange control characters. */
3854
3855 for (n = node; n; n = n->right)
3856 {
3857 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3858 return 0;
3859
3860 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3861 if (cost_table[i] < 0)
3862 return 0;
3863 }
3864
3865 /* All interesting values are within the range of interesting
3866 ASCII characters. */
3867 return 1;
3868 }
3869
3870 /* Scan an ordered list of case nodes
3871 combining those with consecutive values or ranges.
3872
3873 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3874
3875 static void
3876 group_case_nodes (head)
3877 case_node_ptr head;
3878 {
3879 case_node_ptr node = head;
3880
3881 while (node)
3882 {
3883 rtx lb = next_real_insn (label_rtx (node->code_label));
3884 case_node_ptr np = node;
3885
3886 /* Try to group the successors of NODE with NODE. */
3887 while (((np = np->right) != 0)
3888 /* Do they jump to the same place? */
3889 && next_real_insn (label_rtx (np->code_label)) == lb
3890 /* Are their ranges consecutive? */
3891 && tree_int_cst_equal (np->low,
3892 fold (build (PLUS_EXPR,
3893 TREE_TYPE (node->high),
3894 node->high,
3895 integer_one_node)))
3896 /* An overflow is not consecutive. */
3897 && tree_int_cst_lt (node->high,
3898 fold (build (PLUS_EXPR,
3899 TREE_TYPE (node->high),
3900 node->high,
3901 integer_one_node))))
3902 {
3903 node->high = np->high;
3904 }
3905 /* NP is the first node after NODE which can't be grouped with it.
3906 Delete the nodes in between, and move on to that node. */
3907 node->right = np;
3908 node = np;
3909 }
3910 }
3911
3912 /* Take an ordered list of case nodes
3913 and transform them into a near optimal binary tree,
3914 on the assumtion that any target code selection value is as
3915 likely as any other.
3916
3917 The transformation is performed by splitting the ordered
3918 list into two equal sections plus a pivot. The parts are
3919 then attached to the pivot as left and right branches. Each
3920 branch is is then transformed recursively. */
3921
3922 static void
3923 balance_case_nodes (head, parent)
3924 case_node_ptr *head;
3925 case_node_ptr parent;
3926 {
3927 register case_node_ptr np;
3928
3929 np = *head;
3930 if (np)
3931 {
3932 int cost = 0;
3933 int i = 0;
3934 int ranges = 0;
3935 register case_node_ptr *npp;
3936 case_node_ptr left;
3937
3938 /* Count the number of entries on branch. Also count the ranges. */
3939
3940 while (np)
3941 {
3942 if (!tree_int_cst_equal (np->low, np->high))
3943 {
3944 ranges++;
3945 if (use_cost_table)
3946 cost += cost_table[TREE_INT_CST_LOW (np->high)];
3947 }
3948
3949 if (use_cost_table)
3950 cost += cost_table[TREE_INT_CST_LOW (np->low)];
3951
3952 i++;
3953 np = np->right;
3954 }
3955
3956 if (i > 2)
3957 {
3958 /* Split this list if it is long enough for that to help. */
3959 npp = head;
3960 left = *npp;
3961 if (use_cost_table)
3962 {
3963 /* Find the place in the list that bisects the list's total cost,
3964 Here I gets half the total cost. */
3965 int n_moved = 0;
3966 i = (cost + 1) / 2;
3967 while (1)
3968 {
3969 /* Skip nodes while their cost does not reach that amount. */
3970 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3971 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
3972 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
3973 if (i <= 0)
3974 break;
3975 npp = &(*npp)->right;
3976 n_moved += 1;
3977 }
3978 if (n_moved == 0)
3979 {
3980 /* Leave this branch lopsided, but optimize left-hand
3981 side and fill in `parent' fields for right-hand side. */
3982 np = *head;
3983 np->parent = parent;
3984 balance_case_nodes (&np->left, np);
3985 for (; np->right; np = np->right)
3986 np->right->parent = np;
3987 return;
3988 }
3989 }
3990 /* If there are just three nodes, split at the middle one. */
3991 else if (i == 3)
3992 npp = &(*npp)->right;
3993 else
3994 {
3995 /* Find the place in the list that bisects the list's total cost,
3996 where ranges count as 2.
3997 Here I gets half the total cost. */
3998 i = (i + ranges + 1) / 2;
3999 while (1)
4000 {
4001 /* Skip nodes while their cost does not reach that amount. */
4002 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4003 i--;
4004 i--;
4005 if (i <= 0)
4006 break;
4007 npp = &(*npp)->right;
4008 }
4009 }
4010 *head = np = *npp;
4011 *npp = 0;
4012 np->parent = parent;
4013 np->left = left;
4014
4015 /* Optimize each of the two split parts. */
4016 balance_case_nodes (&np->left, np);
4017 balance_case_nodes (&np->right, np);
4018 }
4019 else
4020 {
4021 /* Else leave this branch as one level,
4022 but fill in `parent' fields. */
4023 np = *head;
4024 np->parent = parent;
4025 for (; np->right; np = np->right)
4026 np->right->parent = np;
4027 }
4028 }
4029 }
4030 \f
4031 /* Search the parent sections of the case node tree
4032 to see if a test for the lower bound of NODE would be redundant.
4033 INDEX_TYPE is the type of the index expression.
4034
4035 The instructions to generate the case decision tree are
4036 output in the same order as nodes are processed so it is
4037 known that if a parent node checks the range of the current
4038 node minus one that the current node is bounded at its lower
4039 span. Thus the test would be redundant. */
4040
4041 static int
4042 node_has_low_bound (node, index_type)
4043 case_node_ptr node;
4044 tree index_type;
4045 {
4046 tree low_minus_one;
4047 case_node_ptr pnode;
4048
4049 /* If the lower bound of this node is the lowest value in the index type,
4050 we need not test it. */
4051
4052 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4053 return 1;
4054
4055 /* If this node has a left branch, the value at the left must be less
4056 than that at this node, so it cannot be bounded at the bottom and
4057 we need not bother testing any further. */
4058
4059 if (node->left)
4060 return 0;
4061
4062 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4063 node->low, integer_one_node));
4064
4065 /* If the subtraction above overflowed, we can't verify anything.
4066 Otherwise, look for a parent that tests our value - 1. */
4067
4068 if (! tree_int_cst_lt (low_minus_one, node->low))
4069 return 0;
4070
4071 for (pnode = node->parent; pnode; pnode = pnode->parent)
4072 if (tree_int_cst_equal (low_minus_one, pnode->high))
4073 return 1;
4074
4075 return 0;
4076 }
4077
4078 /* Search the parent sections of the case node tree
4079 to see if a test for the upper bound of NODE would be redundant.
4080 INDEX_TYPE is the type of the index expression.
4081
4082 The instructions to generate the case decision tree are
4083 output in the same order as nodes are processed so it is
4084 known that if a parent node checks the range of the current
4085 node plus one that the current node is bounded at its upper
4086 span. Thus the test would be redundant. */
4087
4088 static int
4089 node_has_high_bound (node, index_type)
4090 case_node_ptr node;
4091 tree index_type;
4092 {
4093 tree high_plus_one;
4094 case_node_ptr pnode;
4095
4096 /* If the upper bound of this node is the highest value in the type
4097 of the index expression, we need not test against it. */
4098
4099 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4100 return 1;
4101
4102 /* If this node has a right branch, the value at the right must be greater
4103 than that at this node, so it cannot be bounded at the top and
4104 we need not bother testing any further. */
4105
4106 if (node->right)
4107 return 0;
4108
4109 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4110 node->high, integer_one_node));
4111
4112 /* If the addition above overflowed, we can't verify anything.
4113 Otherwise, look for a parent that tests our value + 1. */
4114
4115 if (! tree_int_cst_lt (node->high, high_plus_one))
4116 return 0;
4117
4118 for (pnode = node->parent; pnode; pnode = pnode->parent)
4119 if (tree_int_cst_equal (high_plus_one, pnode->low))
4120 return 1;
4121
4122 return 0;
4123 }
4124
4125 /* Search the parent sections of the
4126 case node tree to see if both tests for the upper and lower
4127 bounds of NODE would be redundant. */
4128
4129 static int
4130 node_is_bounded (node, index_type)
4131 case_node_ptr node;
4132 tree index_type;
4133 {
4134 return (node_has_low_bound (node, index_type)
4135 && node_has_high_bound (node, index_type));
4136 }
4137
4138 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4139
4140 static void
4141 emit_jump_if_reachable (label)
4142 rtx label;
4143 {
4144 if (GET_CODE (get_last_insn ()) != BARRIER)
4145 emit_jump (label);
4146 }
4147 \f
4148 /* Emit step-by-step code to select a case for the value of INDEX.
4149 The thus generated decision tree follows the form of the
4150 case-node binary tree NODE, whose nodes represent test conditions.
4151 INDEX_TYPE is the type of the index of the switch.
4152
4153 Care is taken to prune redundant tests from the decision tree
4154 by detecting any boundary conditions already checked by
4155 emitted rtx. (See node_has_high_bound, node_has_low_bound
4156 and node_is_bounded, above.)
4157
4158 Where the test conditions can be shown to be redundant we emit
4159 an unconditional jump to the target code. As a further
4160 optimization, the subordinates of a tree node are examined to
4161 check for bounded nodes. In this case conditional and/or
4162 unconditional jumps as a result of the boundary check for the
4163 current node are arranged to target the subordinates associated
4164 code for out of bound conditions on the current node node.
4165
4166 We can asume that when control reaches the code generated here,
4167 the index value has already been compared with the parents
4168 of this node, and determined to be on the same side of each parent
4169 as this node is. Thus, if this node tests for the value 51,
4170 and a parent tested for 52, we don't need to consider
4171 the possibility of a value greater than 51. If another parent
4172 tests for the value 50, then this node need not test anything. */
4173
4174 static void
4175 emit_case_nodes (index, node, default_label, index_type)
4176 rtx index;
4177 case_node_ptr node;
4178 rtx default_label;
4179 tree index_type;
4180 {
4181 /* If INDEX has an unsigned type, we must make unsigned branches. */
4182 int unsignedp = TREE_UNSIGNED (index_type);
4183 typedef rtx rtx_function ();
4184 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4185 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4186 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4187 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4188 enum machine_mode mode = GET_MODE (index);
4189
4190 /* See if our parents have already tested everything for us.
4191 If they have, emit an unconditional jump for this node. */
4192 if (node_is_bounded (node, index_type))
4193 emit_jump (label_rtx (node->code_label));
4194
4195 else if (tree_int_cst_equal (node->low, node->high))
4196 {
4197 /* Node is single valued. First see if the index expression matches
4198 this node and then check our children, if any. */
4199
4200 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4201 label_rtx (node->code_label), unsignedp);
4202
4203 if (node->right != 0 && node->left != 0)
4204 {
4205 /* This node has children on both sides.
4206 Dispatch to one side or the other
4207 by comparing the index value with this node's value.
4208 If one subtree is bounded, check that one first,
4209 so we can avoid real branches in the tree. */
4210
4211 if (node_is_bounded (node->right, index_type))
4212 {
4213 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4214 GT, 0, mode, unsignedp, 0);
4215
4216 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4217 emit_case_nodes (index, node->left, default_label, index_type);
4218 }
4219
4220 else if (node_is_bounded (node->left, index_type))
4221 {
4222 emit_cmp_insn (index, expand_expr (node->high, 0,
4223 VOIDmode, 0),
4224 LT, 0, mode, unsignedp, 0);
4225 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4226 emit_case_nodes (index, node->right, default_label, index_type);
4227 }
4228
4229 else
4230 {
4231 /* Neither node is bounded. First distinguish the two sides;
4232 then emit the code for one side at a time. */
4233
4234 tree test_label
4235 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4236
4237 /* See if the value is on the right. */
4238 emit_cmp_insn (index, expand_expr (node->high, 0,
4239 VOIDmode, 0),
4240 GT, 0, mode, unsignedp, 0);
4241 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4242
4243 /* Value must be on the left.
4244 Handle the left-hand subtree. */
4245 emit_case_nodes (index, node->left, default_label, index_type);
4246 /* If left-hand subtree does nothing,
4247 go to default. */
4248 emit_jump_if_reachable (default_label);
4249
4250 /* Code branches here for the right-hand subtree. */
4251 expand_label (test_label);
4252 emit_case_nodes (index, node->right, default_label, index_type);
4253 }
4254 }
4255
4256 else if (node->right != 0 && node->left == 0)
4257 {
4258 /* Here we have a right child but no left so we issue conditional
4259 branch to default and process the right child.
4260
4261 Omit the conditional branch to default if we it avoid only one
4262 right child; it costs too much space to save so little time. */
4263
4264 if (node->right->right || node->right->left
4265 || !tree_int_cst_equal (node->right->low, node->right->high))
4266 {
4267 if (!node_has_low_bound (node, index_type))
4268 {
4269 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4270 LT, 0, mode, unsignedp, 0);
4271 emit_jump_insn ((*gen_blt_pat) (default_label));
4272 }
4273
4274 emit_case_nodes (index, node->right, default_label, index_type);
4275 }
4276 else
4277 /* We cannot process node->right normally
4278 since we haven't ruled out the numbers less than
4279 this node's value. So handle node->right explicitly. */
4280 do_jump_if_equal (index,
4281 expand_expr (node->right->low, 0, VOIDmode, 0),
4282 label_rtx (node->right->code_label), unsignedp);
4283 }
4284
4285 else if (node->right == 0 && node->left != 0)
4286 {
4287 /* Just one subtree, on the left. */
4288
4289 #if 0 /* The following code and comment were formerly part
4290 of the condition here, but they didn't work
4291 and I don't understand what the idea was. -- rms. */
4292 /* If our "most probable entry" is less probable
4293 than the default label, emit a jump to
4294 the default label using condition codes
4295 already lying around. With no right branch,
4296 a branch-greater-than will get us to the default
4297 label correctly. */
4298 if (use_cost_table
4299 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4300 ;
4301 #endif /* 0 */
4302 if (node->left->left || node->left->right
4303 || !tree_int_cst_equal (node->left->low, node->left->high))
4304 {
4305 if (!node_has_high_bound (node, index_type))
4306 {
4307 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4308 GT, 0, mode, unsignedp, 0);
4309 emit_jump_insn ((*gen_bgt_pat) (default_label));
4310 }
4311
4312 emit_case_nodes (index, node->left, default_label, index_type);
4313 }
4314 else
4315 /* We cannot process node->left normally
4316 since we haven't ruled out the numbers less than
4317 this node's value. So handle node->left explicitly. */
4318 do_jump_if_equal (index,
4319 expand_expr (node->left->low, 0, VOIDmode, 0),
4320 label_rtx (node->left->code_label), unsignedp);
4321 }
4322 }
4323 else
4324 {
4325 /* Node is a range. These cases are very similar to those for a single
4326 value, except that we do not start by testing whether this node
4327 is the one to branch to. */
4328
4329 if (node->right != 0 && node->left != 0)
4330 {
4331 /* Node has subtrees on both sides.
4332 If the right-hand subtree is bounded,
4333 test for it first, since we can go straight there.
4334 Otherwise, we need to make a branch in the control structure,
4335 then handle the two subtrees. */
4336 tree test_label = 0;
4337
4338 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4339 GT, 0, mode, unsignedp, 0);
4340
4341 if (node_is_bounded (node->right, index_type))
4342 /* Right hand node is fully bounded so we can eliminate any
4343 testing and branch directly to the target code. */
4344 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4345 else
4346 {
4347 /* Right hand node requires testing.
4348 Branch to a label where we will handle it later. */
4349
4350 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4351 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4352 }
4353
4354 /* Value belongs to this node or to the left-hand subtree. */
4355
4356 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4357 GE, 0, mode, unsignedp, 0);
4358 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4359
4360 /* Handle the left-hand subtree. */
4361 emit_case_nodes (index, node->left, default_label, index_type);
4362
4363 /* If right node had to be handled later, do that now. */
4364
4365 if (test_label)
4366 {
4367 /* If the left-hand subtree fell through,
4368 don't let it fall into the right-hand subtree. */
4369 emit_jump_if_reachable (default_label);
4370
4371 expand_label (test_label);
4372 emit_case_nodes (index, node->right, default_label, index_type);
4373 }
4374 }
4375
4376 else if (node->right != 0 && node->left == 0)
4377 {
4378 /* Deal with values to the left of this node,
4379 if they are possible. */
4380 if (!node_has_low_bound (node, index_type))
4381 {
4382 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4383 LT, 0, mode, unsignedp, 0);
4384 emit_jump_insn ((*gen_blt_pat) (default_label));
4385 }
4386
4387 /* Value belongs to this node or to the right-hand subtree. */
4388
4389 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4390 LE, 0, mode, unsignedp, 0);
4391 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4392
4393 emit_case_nodes (index, node->right, default_label, index_type);
4394 }
4395
4396 else if (node->right == 0 && node->left != 0)
4397 {
4398 /* Deal with values to the right of this node,
4399 if they are possible. */
4400 if (!node_has_high_bound (node, index_type))
4401 {
4402 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4403 GT, 0, mode, unsignedp, 0);
4404 emit_jump_insn ((*gen_bgt_pat) (default_label));
4405 }
4406
4407 /* Value belongs to this node or to the left-hand subtree. */
4408
4409 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4410 GE, 0, mode, unsignedp, 0);
4411 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4412
4413 emit_case_nodes (index, node->left, default_label, index_type);
4414 }
4415
4416 else
4417 {
4418 /* Node has no children so we check low and high bounds to remove
4419 redundant tests. Only one of the bounds can exist,
4420 since otherwise this node is bounded--a case tested already. */
4421
4422 if (!node_has_high_bound (node, index_type))
4423 {
4424 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4425 GT, 0, mode, unsignedp, 0);
4426 emit_jump_insn ((*gen_bgt_pat) (default_label));
4427 }
4428
4429 if (!node_has_low_bound (node, index_type))
4430 {
4431 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4432 LT, 0, mode, unsignedp, 0);
4433 emit_jump_insn ((*gen_blt_pat) (default_label));
4434 }
4435
4436 emit_jump (label_rtx (node->code_label));
4437 }
4438 }
4439 }
4440 \f
4441 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4442 so that the debugging info will be correct for the unrolled loop. */
4443
4444 /* Indexed by loop number, contains pointer to the first block in the loop,
4445 or zero if none. Only valid if doing loop unrolling and outputting debugger
4446 info. */
4447
4448 tree *loop_number_first_block;
4449
4450 /* Indexed by loop number, contains pointer to the last block in the loop,
4451 only valid if loop_number_first_block is nonzero. */
4452
4453 tree *loop_number_last_block;
4454
4455 /* Indexed by loop number, contains nesting level of first block in the
4456 loop, if any. Only valid if doing loop unrolling and outputting debugger
4457 info. */
4458
4459 int *loop_number_block_level;
4460
4461 /* Scan the function looking for loops, and walk the BLOCK tree at the
4462 same time. Record the first and last BLOCK tree corresponding to each
4463 loop. This function is similar to find_and_verify_loops in loop.c. */
4464
4465 void
4466 find_loop_tree_blocks (f)
4467 rtx f;
4468 {
4469 rtx insn;
4470 int current_loop = -1;
4471 int next_loop = -1;
4472 int loop;
4473 int block_level, tree_level;
4474 tree tree_block, parent_tree_block;
4475
4476 tree_block = DECL_INITIAL (current_function_decl);
4477 parent_tree_block = 0;
4478 block_level = 0;
4479 tree_level = -1;
4480
4481 /* Find boundaries of loops, and save the first and last BLOCK tree
4482 corresponding to each loop. */
4483
4484 for (insn = f; insn; insn = NEXT_INSN (insn))
4485 {
4486 if (GET_CODE (insn) == NOTE)
4487 switch (NOTE_LINE_NUMBER (insn))
4488 {
4489 case NOTE_INSN_LOOP_BEG:
4490 loop_number_block_level[++next_loop] = block_level;
4491 loop_number_first_block[next_loop] = 0;
4492 current_loop = next_loop;
4493 break;
4494
4495 case NOTE_INSN_LOOP_END:
4496 if (current_loop == -1)
4497 abort ();
4498
4499 current_loop = loop_outer_loop[current_loop];
4500 break;
4501
4502 case NOTE_INSN_BLOCK_BEG:
4503 if (tree_level < block_level)
4504 {
4505 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4506 we must now visit the subtree of the current block. */
4507 parent_tree_block = tree_block;
4508 tree_block = BLOCK_SUBBLOCKS (tree_block);
4509 tree_level++;
4510 }
4511 else if (tree_level > block_level)
4512 abort ();
4513
4514 /* Save this block tree here for all nested loops for which
4515 this is the topmost block. */
4516 for (loop = current_loop;
4517 loop != -1 && block_level == loop_number_block_level[loop];
4518 loop = loop_outer_loop[loop])
4519 {
4520 if (loop_number_first_block[loop] == 0)
4521 loop_number_first_block[loop] = tree_block;
4522 loop_number_last_block[loop] = tree_block;
4523 }
4524
4525 block_level++;
4526 break;
4527
4528 case NOTE_INSN_BLOCK_END:
4529 block_level--;
4530 if (tree_level > block_level)
4531 {
4532 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4533 we must now visit the parent of the current tree. */
4534 if (tree_block != 0 || parent_tree_block == 0)
4535 abort ();
4536 tree_block = parent_tree_block;
4537 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4538 tree_level--;
4539 }
4540 tree_block = BLOCK_CHAIN (tree_block);
4541 break;
4542 }
4543 }
4544 }
4545
4546 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4547 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4548
4549 Note that we only copy the topmost level of tree nodes; they will share
4550 pointers to the same subblocks. */
4551
4552 void
4553 unroll_block_trees (loop_number, copies)
4554 int loop_number;
4555 int copies;
4556 {
4557 int i;
4558
4559 /* First check whether there are any blocks that need to be copied. */
4560 if (loop_number_first_block[loop_number])
4561 {
4562 tree first_block = loop_number_first_block[loop_number];
4563 tree last_block = loop_number_last_block[loop_number];
4564 tree last_block_created = 0;
4565
4566 for (i = 0; i < copies - 1; i++)
4567 {
4568 tree block = first_block;
4569 tree insert_after = last_block;
4570 tree copied_block;
4571
4572 /* Copy every block between first_block and last_block inclusive,
4573 inserting the new blocks after last_block. */
4574 do
4575 {
4576 tree new_block = make_node (BLOCK);
4577 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4578 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4579 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4580 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4581 TREE_USED (new_block) = TREE_USED (block);
4582
4583 /* Insert the new block after the insertion point, and move
4584 the insertion point to the new block. This ensures that
4585 the copies are inserted in the right order. */
4586 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4587 BLOCK_CHAIN (insert_after) = new_block;
4588 insert_after = new_block;
4589
4590 copied_block = block;
4591 block = BLOCK_CHAIN (block);
4592 }
4593 while (copied_block != last_block);
4594
4595 /* Remember the last block created, so that we can update the
4596 info in the tables. */
4597 if (last_block_created == 0)
4598 last_block_created = insert_after;
4599 }
4600
4601 /* For all nested loops for which LAST_BLOCK was originally the last
4602 block, update the tables to indicate that LAST_BLOCK_CREATED is
4603 now the last block in the loop. */
4604 for (i = loop_number; last_block == loop_number_last_block[i];
4605 i = loop_outer_loop[i])
4606 loop_number_last_block[i] = last_block_created;
4607 }
4608 }
This page took 0.246332 seconds and 6 git commands to generate.