]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35 #include "config.h"
36
37 #include <stdio.h>
38 #include <ctype.h>
39
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack;
56
57 extern int xmalloc ();
58 extern void free ();
59
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62 char *emit_filename;
63 int emit_lineno;
64
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68 int expr_stmts_for_value;
69
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73 static tree last_expr_type;
74 static rtx last_expr_value;
75
76 /* Number of binding contours started so far in this function. */
77
78 int block_start_count;
79
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83 extern int current_function_returns_pcc_struct;
84
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89 extern rtx cleanup_label;
90
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95 extern rtx return_label;
96
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs;
100
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset;
105
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label;
109
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry;
112
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118 extern rtx arg_pointer_save_area;
119
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain;
122
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list;
128 #endif
129 \f
130 /* Functions and data structures for expanding case statements. */
131
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153 struct case_node
154 {
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161 };
162
163 typedef struct case_node case_node;
164 typedef struct case_node *case_node_ptr;
165
166 /* These are used by estimate_case_costs and balance_case_nodes. */
167
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table;
170 static int use_cost_table;
171
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
177
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
181 void fixup_gotos ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
188 \f
189 /* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214 struct nesting
215 {
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327 };
328
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
331
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334 struct nesting *stack_block_stack;
335
336 /* Chain of all pending conditional statements. */
337 struct nesting *cond_stack;
338
339 /* Chain of all pending loops. */
340 struct nesting *loop_stack;
341
342 /* Chain of all pending case or switch statements. */
343 struct nesting *case_stack;
344
345 /* Chain of all pending exception contours. */
346 struct nesting *except_stack;
347
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting *nesting_stack;
351
352 /* Number of entries on nesting_stack now. */
353 int nesting_depth;
354
355 /* Allocate and return a new `struct nesting'. */
356
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407 };
408
409 static struct goto_fixup *goto_fixup_chain;
410
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414 struct label_chain
415 {
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419 };
420 \f
421 void
422 init_stmt ()
423 {
424 gcc_obstack_init (&stmt_obstack);
425 #if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427 #endif
428 }
429
430 void
431 init_stmt_for_function ()
432 {
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449 }
450
451 void
452 save_stmt_status (p)
453 struct function *p;
454 {
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469 }
470
471 void
472 restore_stmt_status (p)
473 struct function *p;
474 {
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489 }
490 \f
491 /* Emit a no-op instruction. */
492
493 void
494 emit_nop ()
495 {
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501 }
502 \f
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506 rtx
507 label_rtx (label)
508 tree label;
509 {
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517 }
518
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521 void
522 emit_jump (label)
523 rtx label;
524 {
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528 }
529
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533 void
534 expand_computed_goto (exp)
535 tree exp;
536 {
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
538 emit_queue ();
539 emit_indirect_jump (x);
540 emit_barrier ();
541 }
542 \f
543 /* Handle goto statements and the labels that they can go to. */
544
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556 void
557 expand_label (label)
558 tree label;
559 {
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
569 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574 }
575
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579 void
580 declare_nonlocal_label (label)
581 tree label;
582 {
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slot == 0)
586 {
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
589 emit_stack_save (SAVE_NONLOCAL,
590 &nonlocal_goto_stack_level,
591 PREV_INSN (tail_recursion_reentry));
592 }
593 }
594
595 /* Generate RTL code for a `goto' statement with target label LABEL.
596 LABEL should be a LABEL_DECL tree node that was or will later be
597 defined with `expand_label'. */
598
599 void
600 expand_goto (label)
601 tree label;
602 {
603 /* Check for a nonlocal goto to a containing function. */
604 tree context = decl_function_context (label);
605 if (context != 0 && context != current_function_decl)
606 {
607 struct function *p = find_function_data (context);
608 rtx temp;
609 p->has_nonlocal_label = 1;
610
611 /* Copy the rtl for the slots so that they won't be shared in
612 case the virtual stack vars register gets instantiated differently
613 in the parent than in the child. */
614
615 #if HAVE_nonlocal_goto
616 if (HAVE_nonlocal_goto)
617 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
618 copy_rtx (p->nonlocal_goto_handler_slot),
619 copy_rtx (p->nonlocal_goto_stack_level),
620 gen_rtx (LABEL_REF, Pmode,
621 label_rtx (label))));
622 else
623 #endif
624 {
625 rtx addr;
626
627 /* Restore frame pointer for containing function.
628 This sets the actual hard register used for the frame pointer
629 to the location of the function's incoming static chain info.
630 The non-local goto handler will then adjust it to contain the
631 proper value and reload the argument pointer, if needed. */
632 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
633
634 /* We have now loaded the frame pointer hardware register with
635 the address of that corresponds to the start of the virtual
636 stack vars. So replace virtual_stack_vars_rtx in all
637 addresses we use with stack_pointer_rtx. */
638
639 /* Get addr of containing function's current nonlocal goto handler,
640 which will do any cleanups and then jump to the label. */
641 addr = copy_rtx (p->nonlocal_goto_handler_slot);
642 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
643 frame_pointer_rtx));
644
645 /* Restore the stack pointer. Note this uses fp just restored. */
646 addr = p->nonlocal_goto_stack_level;
647 if (addr)
648 addr = replace_rtx (copy_rtx (p->nonlocal_goto_stack_level),
649 replace_rtx (addr, virtual_stack_vars_rtx,
650 frame_pointer_rtx));
651
652 emit_stack_restore (SAVE_NONLOCAL, addr, 0);
653
654 /* Put in the static chain register the nonlocal label address. */
655 emit_move_insn (static_chain_rtx,
656 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
657 /* USE of frame_pointer_rtx added for consistency; not clear if
658 really needed. */
659 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
660 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
661 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
662 emit_indirect_jump (temp);
663 }
664 }
665 else
666 expand_goto_internal (label, label_rtx (label), 0);
667 }
668
669 /* Generate RTL code for a `goto' statement with target label BODY.
670 LABEL should be a LABEL_REF.
671 LAST_INSN, if non-0, is the rtx we should consider as the last
672 insn emitted (for the purposes of cleaning up a return). */
673
674 static void
675 expand_goto_internal (body, label, last_insn)
676 tree body;
677 rtx label;
678 rtx last_insn;
679 {
680 struct nesting *block;
681 rtx stack_level = 0;
682
683 if (GET_CODE (label) != CODE_LABEL)
684 abort ();
685
686 /* If label has already been defined, we can tell now
687 whether and how we must alter the stack level. */
688
689 if (PREV_INSN (label) != 0)
690 {
691 /* Find the innermost pending block that contains the label.
692 (Check containment by comparing insn-uids.)
693 Then restore the outermost stack level within that block,
694 and do cleanups of all blocks contained in it. */
695 for (block = block_stack; block; block = block->next)
696 {
697 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
698 break;
699 if (block->data.block.stack_level != 0)
700 stack_level = block->data.block.stack_level;
701 /* Execute the cleanups for blocks we are exiting. */
702 if (block->data.block.cleanups != 0)
703 {
704 expand_cleanups (block->data.block.cleanups, 0);
705 do_pending_stack_adjust ();
706 }
707 }
708
709 if (stack_level)
710 {
711 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
712 the stack pointer. This one should be deleted as dead by flow. */
713 clear_pending_stack_adjust ();
714 do_pending_stack_adjust ();
715 emit_stack_restore (SAVE_BLOCK, stack_level, 0);
716 }
717
718 if (body != 0 && DECL_TOO_LATE (body))
719 error ("jump to `%s' invalidly jumps into binding contour",
720 IDENTIFIER_POINTER (DECL_NAME (body)));
721 }
722 /* Label not yet defined: may need to put this goto
723 on the fixup list. */
724 else if (! expand_fixup (body, label, last_insn))
725 {
726 /* No fixup needed. Record that the label is the target
727 of at least one goto that has no fixup. */
728 if (body != 0)
729 TREE_ADDRESSABLE (body) = 1;
730 }
731
732 emit_jump (label);
733 }
734 \f
735 /* Generate if necessary a fixup for a goto
736 whose target label in tree structure (if any) is TREE_LABEL
737 and whose target in rtl is RTL_LABEL.
738
739 If LAST_INSN is nonzero, we pretend that the jump appears
740 after insn LAST_INSN instead of at the current point in the insn stream.
741
742 The fixup will be used later to insert insns at this point
743 to restore the stack level as appropriate for the target label.
744
745 Value is nonzero if a fixup is made. */
746
747 static int
748 expand_fixup (tree_label, rtl_label, last_insn)
749 tree tree_label;
750 rtx rtl_label;
751 rtx last_insn;
752 {
753 struct nesting *block, *end_block;
754
755 /* See if we can recognize which block the label will be output in.
756 This is possible in some very common cases.
757 If we succeed, set END_BLOCK to that block.
758 Otherwise, set it to 0. */
759
760 if (cond_stack
761 && (rtl_label == cond_stack->data.cond.endif_label
762 || rtl_label == cond_stack->data.cond.next_label))
763 end_block = cond_stack;
764 /* If we are in a loop, recognize certain labels which
765 are likely targets. This reduces the number of fixups
766 we need to create. */
767 else if (loop_stack
768 && (rtl_label == loop_stack->data.loop.start_label
769 || rtl_label == loop_stack->data.loop.end_label
770 || rtl_label == loop_stack->data.loop.continue_label))
771 end_block = loop_stack;
772 else
773 end_block = 0;
774
775 /* Now set END_BLOCK to the binding level to which we will return. */
776
777 if (end_block)
778 {
779 struct nesting *next_block = end_block->all;
780 block = block_stack;
781
782 /* First see if the END_BLOCK is inside the innermost binding level.
783 If so, then no cleanups or stack levels are relevant. */
784 while (next_block && next_block != block)
785 next_block = next_block->all;
786
787 if (next_block)
788 return 0;
789
790 /* Otherwise, set END_BLOCK to the innermost binding level
791 which is outside the relevant control-structure nesting. */
792 next_block = block_stack->next;
793 for (block = block_stack; block != end_block; block = block->all)
794 if (block == next_block)
795 next_block = next_block->next;
796 end_block = next_block;
797 }
798
799 /* Does any containing block have a stack level or cleanups?
800 If not, no fixup is needed, and that is the normal case
801 (the only case, for standard C). */
802 for (block = block_stack; block != end_block; block = block->next)
803 if (block->data.block.stack_level != 0
804 || block->data.block.cleanups != 0)
805 break;
806
807 if (block != end_block)
808 {
809 /* Ok, a fixup is needed. Add a fixup to the list of such. */
810 struct goto_fixup *fixup
811 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
812 /* In case an old stack level is restored, make sure that comes
813 after any pending stack adjust. */
814 /* ?? If the fixup isn't to come at the present position,
815 doing the stack adjust here isn't useful. Doing it with our
816 settings at that location isn't useful either. Let's hope
817 someone does it! */
818 if (last_insn == 0)
819 do_pending_stack_adjust ();
820 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
821 fixup->target = tree_label;
822 fixup->target_rtl = rtl_label;
823 fixup->block_start_count = block_start_count;
824 fixup->stack_level = 0;
825 fixup->cleanup_list_list
826 = (((block->data.block.outer_cleanups
827 #if 0
828 && block->data.block.outer_cleanups != empty_cleanup_list
829 #endif
830 )
831 || block->data.block.cleanups)
832 ? tree_cons (0, block->data.block.cleanups,
833 block->data.block.outer_cleanups)
834 : 0);
835 fixup->next = goto_fixup_chain;
836 goto_fixup_chain = fixup;
837 }
838
839 return block != 0;
840 }
841
842 /* When exiting a binding contour, process all pending gotos requiring fixups.
843 THISBLOCK is the structure that describes the block being exited.
844 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
845 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
846 FIRST_INSN is the insn that began this contour.
847
848 Gotos that jump out of this contour must restore the
849 stack level and do the cleanups before actually jumping.
850
851 DONT_JUMP_IN nonzero means report error there is a jump into this
852 contour from before the beginning of the contour.
853 This is also done if STACK_LEVEL is nonzero. */
854
855 void
856 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
857 struct nesting *thisblock;
858 rtx stack_level;
859 tree cleanup_list;
860 rtx first_insn;
861 int dont_jump_in;
862 {
863 register struct goto_fixup *f, *prev;
864
865 /* F is the fixup we are considering; PREV is the previous one. */
866 /* We run this loop in two passes so that cleanups of exited blocks
867 are run first, and blocks that are exited are marked so
868 afterwards. */
869
870 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
871 {
872 /* Test for a fixup that is inactive because it is already handled. */
873 if (f->before_jump == 0)
874 {
875 /* Delete inactive fixup from the chain, if that is easy to do. */
876 if (prev != 0)
877 prev->next = f->next;
878 }
879 /* Has this fixup's target label been defined?
880 If so, we can finalize it. */
881 else if (PREV_INSN (f->target_rtl) != 0)
882 {
883 /* Get the first non-label after the label
884 this goto jumps to. If that's before this scope begins,
885 we don't have a jump into the scope. */
886 rtx after_label = f->target_rtl;
887 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
888 after_label = NEXT_INSN (after_label);
889
890 /* If this fixup jumped into this contour from before the beginning
891 of this contour, report an error. */
892 /* ??? Bug: this does not detect jumping in through intermediate
893 blocks that have stack levels or cleanups.
894 It detects only a problem with the innermost block
895 around the label. */
896 if (f->target != 0
897 && (dont_jump_in || stack_level || cleanup_list)
898 /* If AFTER_LABEL is 0, it means the jump goes to the end
899 of the rtl, which means it jumps into this scope. */
900 && (after_label == 0
901 || INSN_UID (first_insn) < INSN_UID (after_label))
902 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
903 && ! TREE_REGDECL (f->target))
904 {
905 error_with_decl (f->target,
906 "label `%s' used before containing binding contour");
907 /* Prevent multiple errors for one label. */
908 TREE_REGDECL (f->target) = 1;
909 }
910
911 /* Execute cleanups for blocks this jump exits. */
912 if (f->cleanup_list_list)
913 {
914 tree lists;
915 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
916 /* Marked elements correspond to blocks that have been closed.
917 Do their cleanups. */
918 if (TREE_ADDRESSABLE (lists)
919 && TREE_VALUE (lists) != 0)
920 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
921 }
922
923 /* Restore stack level for the biggest contour that this
924 jump jumps out of. */
925 if (f->stack_level)
926 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
927 f->before_jump = 0;
928 }
929 }
930
931 /* Mark the cleanups of exited blocks so that they are executed
932 by the code above. */
933 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
934 if (f->before_jump != 0
935 && PREV_INSN (f->target_rtl) == 0
936 /* Label has still not appeared. If we are exiting a block with
937 a stack level to restore, that started before the fixup,
938 mark this stack level as needing restoration
939 when the fixup is later finalized.
940 Also mark the cleanup_list_list element for F
941 that corresponds to this block, so that ultimately
942 this block's cleanups will be executed by the code above. */
943 && thisblock != 0
944 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
945 it means the label is undefined. That's erroneous, but possible. */
946 && (thisblock->data.block.block_start_count
947 <= f->block_start_count))
948 {
949 tree lists = f->cleanup_list_list;
950 for (; lists; lists = TREE_CHAIN (lists))
951 /* If the following elt. corresponds to our containing block
952 then the elt. must be for this block. */
953 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
954 TREE_ADDRESSABLE (lists) = 1;
955
956 if (stack_level)
957 f->stack_level = stack_level;
958 }
959 }
960 \f
961 /* Generate RTL for an asm statement (explicit assembler code).
962 BODY is a STRING_CST node containing the assembler code text,
963 or an ADDR_EXPR containing a STRING_CST. */
964
965 void
966 expand_asm (body)
967 tree body;
968 {
969 if (TREE_CODE (body) == ADDR_EXPR)
970 body = TREE_OPERAND (body, 0);
971
972 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
973 TREE_STRING_POINTER (body)));
974 last_expr_type = 0;
975 }
976
977 /* Generate RTL for an asm statement with arguments.
978 STRING is the instruction template.
979 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
980 Each output or input has an expression in the TREE_VALUE and
981 a constraint-string in the TREE_PURPOSE.
982 CLOBBERS is a list of STRING_CST nodes each naming a hard register
983 that is clobbered by this insn.
984
985 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
986 Some elements of OUTPUTS may be replaced with trees representing temporary
987 values. The caller should copy those temporary values to the originally
988 specified lvalues.
989
990 VOL nonzero means the insn is volatile; don't optimize it. */
991
992 void
993 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
994 tree string, outputs, inputs, clobbers;
995 int vol;
996 char *filename;
997 int line;
998 {
999 rtvec argvec, constraints;
1000 rtx body;
1001 int ninputs = list_length (inputs);
1002 int noutputs = list_length (outputs);
1003 int nclobbers = list_length (clobbers);
1004 tree tail;
1005 register int i;
1006 /* Vector of RTX's of evaluated output operands. */
1007 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1008 /* The insn we have emitted. */
1009 rtx insn;
1010
1011 last_expr_type = 0;
1012
1013 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1014 {
1015 tree val = TREE_VALUE (tail);
1016 tree val1;
1017 int j;
1018 int found_equal;
1019
1020 /* If there's an erroneous arg, emit no insn. */
1021 if (TREE_TYPE (val) == error_mark_node)
1022 return;
1023
1024 /* Make sure constraint has `=' and does not have `+'. */
1025
1026 found_equal = 0;
1027 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1028 {
1029 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1030 {
1031 error ("output operand constraint contains `+'");
1032 return;
1033 }
1034 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1035 found_equal = 1;
1036 }
1037 if (! found_equal)
1038 {
1039 error ("output operand constraint lacks `='");
1040 return;
1041 }
1042
1043 /* If an output operand is not a variable or indirect ref,
1044 or a part of one,
1045 create a SAVE_EXPR which is a pseudo-reg
1046 to act as an intermediate temporary.
1047 Make the asm insn write into that, then copy it to
1048 the real output operand. */
1049
1050 while (TREE_CODE (val) == COMPONENT_REF
1051 || TREE_CODE (val) == ARRAY_REF)
1052 val = TREE_OPERAND (val, 0);
1053
1054 if (TREE_CODE (val) != VAR_DECL
1055 && TREE_CODE (val) != PARM_DECL
1056 && TREE_CODE (val) != INDIRECT_REF)
1057 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1058
1059 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1060 }
1061
1062 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1063 {
1064 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1065 return;
1066 }
1067
1068 /* Make vectors for the expression-rtx and constraint strings. */
1069
1070 argvec = rtvec_alloc (ninputs);
1071 constraints = rtvec_alloc (ninputs);
1072
1073 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1074 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1075 filename, line);
1076 MEM_VOLATILE_P (body) = vol;
1077
1078 /* Eval the inputs and put them into ARGVEC.
1079 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1080
1081 i = 0;
1082 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1083 {
1084 int j;
1085
1086 /* If there's an erroneous arg, emit no insn,
1087 because the ASM_INPUT would get VOIDmode
1088 and that could cause a crash in reload. */
1089 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1090 return;
1091 if (TREE_PURPOSE (tail) == NULL_TREE)
1092 {
1093 error ("hard register `%s' listed as input operand to `asm'",
1094 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1095 return;
1096 }
1097
1098 /* Make sure constraint has neither `=' nor `+'. */
1099
1100 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1101 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1102 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1103 {
1104 error ("input operand constraint contains `%c'",
1105 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1106 return;
1107 }
1108
1109 XVECEXP (body, 3, i) /* argvec */
1110 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1111 XVECEXP (body, 4, i) /* constraints */
1112 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1113 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1114 i++;
1115 }
1116
1117 /* Protect all the operands from the queue,
1118 now that they have all been evaluated. */
1119
1120 for (i = 0; i < ninputs; i++)
1121 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1122
1123 for (i = 0; i < noutputs; i++)
1124 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1125
1126 /* Now, for each output, construct an rtx
1127 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1128 ARGVEC CONSTRAINTS))
1129 If there is more than one, put them inside a PARALLEL. */
1130
1131 if (noutputs == 1 && nclobbers == 0)
1132 {
1133 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1134 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1135 }
1136 else if (noutputs == 0 && nclobbers == 0)
1137 {
1138 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1139 insn = emit_insn (body);
1140 }
1141 else
1142 {
1143 rtx obody = body;
1144 int num = noutputs;
1145 if (num == 0) num = 1;
1146 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1147
1148 /* For each output operand, store a SET. */
1149
1150 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1151 {
1152 XVECEXP (body, 0, i)
1153 = gen_rtx (SET, VOIDmode,
1154 output_rtx[i],
1155 gen_rtx (ASM_OPERANDS, VOIDmode,
1156 TREE_STRING_POINTER (string),
1157 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1158 i, argvec, constraints,
1159 filename, line));
1160 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1161 }
1162
1163 /* If there are no outputs (but there are some clobbers)
1164 store the bare ASM_OPERANDS into the PARALLEL. */
1165
1166 if (i == 0)
1167 XVECEXP (body, 0, i++) = obody;
1168
1169 /* Store (clobber REG) for each clobbered register specified. */
1170
1171 for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++)
1172 {
1173 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1174 int j = decode_reg_name (regname);
1175
1176 if (j < 0)
1177 {
1178 if (j == -3)
1179 continue;
1180
1181 error ("unknown register name `%s' in `asm'", regname);
1182 return;
1183 }
1184
1185 /* Use QImode since that's guaranteed to clobber just one reg. */
1186 XVECEXP (body, 0, i)
1187 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1188 }
1189
1190 insn = emit_insn (body);
1191 }
1192
1193 free_temp_slots ();
1194 }
1195 \f
1196 /* Generate RTL to evaluate the expression EXP
1197 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1198
1199 void
1200 expand_expr_stmt (exp)
1201 tree exp;
1202 {
1203 /* If -W, warn about statements with no side effects,
1204 except for an explicit cast to void (e.g. for assert()), and
1205 except inside a ({...}) where they may be useful. */
1206 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1207 {
1208 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1209 && !(TREE_CODE (exp) == CONVERT_EXPR
1210 && TREE_TYPE (exp) == void_type_node))
1211 warning_with_file_and_line (emit_filename, emit_lineno,
1212 "statement with no effect");
1213 else if (warn_unused)
1214 warn_if_unused_value (exp);
1215 }
1216 last_expr_type = TREE_TYPE (exp);
1217 if (! flag_syntax_only)
1218 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1219 VOIDmode, 0);
1220
1221 /* If all we do is reference a volatile value in memory,
1222 copy it to a register to be sure it is actually touched. */
1223 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1224 && TREE_THIS_VOLATILE (exp))
1225 {
1226 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1227 copy_to_reg (last_expr_value);
1228 else
1229 {
1230 rtx lab = gen_label_rtx ();
1231
1232 /* Compare the value with itself to reference it. */
1233 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1234 expand_expr (TYPE_SIZE (last_expr_type),
1235 0, VOIDmode, 0),
1236 BLKmode, 0,
1237 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1238 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1239 emit_label (lab);
1240 }
1241 }
1242
1243 /* If this expression is part of a ({...}) and is in memory, we may have
1244 to preserve temporaries. */
1245 preserve_temp_slots (last_expr_value);
1246
1247 /* Free any temporaries used to evaluate this expression. Any temporary
1248 used as a result of this expression will already have been preserved
1249 above. */
1250 free_temp_slots ();
1251
1252 emit_queue ();
1253 }
1254
1255 /* Warn if EXP contains any computations whose results are not used.
1256 Return 1 if a warning is printed; 0 otherwise. */
1257
1258 static int
1259 warn_if_unused_value (exp)
1260 tree exp;
1261 {
1262 if (TREE_USED (exp))
1263 return 0;
1264
1265 switch (TREE_CODE (exp))
1266 {
1267 case PREINCREMENT_EXPR:
1268 case POSTINCREMENT_EXPR:
1269 case PREDECREMENT_EXPR:
1270 case POSTDECREMENT_EXPR:
1271 case MODIFY_EXPR:
1272 case INIT_EXPR:
1273 case TARGET_EXPR:
1274 case CALL_EXPR:
1275 case METHOD_CALL_EXPR:
1276 case RTL_EXPR:
1277 case WRAPPER_EXPR:
1278 case ANTI_WRAPPER_EXPR:
1279 case WITH_CLEANUP_EXPR:
1280 case EXIT_EXPR:
1281 /* We don't warn about COND_EXPR because it may be a useful
1282 construct if either arm contains a side effect. */
1283 case COND_EXPR:
1284 return 0;
1285
1286 case BIND_EXPR:
1287 /* For a binding, warn if no side effect within it. */
1288 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1289
1290 case TRUTH_ORIF_EXPR:
1291 case TRUTH_ANDIF_EXPR:
1292 /* In && or ||, warn if 2nd operand has no side effect. */
1293 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1294
1295 case COMPOUND_EXPR:
1296 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1297 return 1;
1298 /* Let people do `(foo (), 0)' without a warning. */
1299 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1300 return 0;
1301 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1302
1303 case NOP_EXPR:
1304 case CONVERT_EXPR:
1305 case NON_LVALUE_EXPR:
1306 /* Don't warn about values cast to void. */
1307 if (TREE_TYPE (exp) == void_type_node)
1308 return 0;
1309 /* Don't warn about conversions not explicit in the user's program. */
1310 if (TREE_NO_UNUSED_WARNING (exp))
1311 return 0;
1312 /* Assignment to a cast usually results in a cast of a modify.
1313 Don't complain about that. */
1314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1315 return 0;
1316 /* Sometimes it results in a cast of a cast of a modify.
1317 Don't complain about that. */
1318 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1319 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1320 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1321 return 0;
1322
1323 default:
1324 /* Referencing a volatile value is a side effect, so don't warn. */
1325 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1326 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1327 && TREE_THIS_VOLATILE (exp))
1328 return 0;
1329 warning_with_file_and_line (emit_filename, emit_lineno,
1330 "value computed is not used");
1331 return 1;
1332 }
1333 }
1334
1335 /* Clear out the memory of the last expression evaluated. */
1336
1337 void
1338 clear_last_expr ()
1339 {
1340 last_expr_type = 0;
1341 }
1342
1343 /* Begin a statement which will return a value.
1344 Return the RTL_EXPR for this statement expr.
1345 The caller must save that value and pass it to expand_end_stmt_expr. */
1346
1347 tree
1348 expand_start_stmt_expr ()
1349 {
1350 /* Make the RTL_EXPR node temporary, not momentary,
1351 so that rtl_expr_chain doesn't become garbage. */
1352 int momentary = suspend_momentary ();
1353 tree t = make_node (RTL_EXPR);
1354 resume_momentary (momentary);
1355 start_sequence ();
1356 NO_DEFER_POP;
1357 expr_stmts_for_value++;
1358 return t;
1359 }
1360
1361 /* Restore the previous state at the end of a statement that returns a value.
1362 Returns a tree node representing the statement's value and the
1363 insns to compute the value.
1364
1365 The nodes of that expression have been freed by now, so we cannot use them.
1366 But we don't want to do that anyway; the expression has already been
1367 evaluated and now we just want to use the value. So generate a RTL_EXPR
1368 with the proper type and RTL value.
1369
1370 If the last substatement was not an expression,
1371 return something with type `void'. */
1372
1373 tree
1374 expand_end_stmt_expr (t)
1375 tree t;
1376 {
1377 OK_DEFER_POP;
1378
1379 if (last_expr_type == 0)
1380 {
1381 last_expr_type = void_type_node;
1382 last_expr_value = const0_rtx;
1383 }
1384 else if (last_expr_value == 0)
1385 /* There are some cases where this can happen, such as when the
1386 statement is void type. */
1387 last_expr_value = const0_rtx;
1388 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1389 /* Remove any possible QUEUED. */
1390 last_expr_value = protect_from_queue (last_expr_value, 0);
1391
1392 emit_queue ();
1393
1394 TREE_TYPE (t) = last_expr_type;
1395 RTL_EXPR_RTL (t) = last_expr_value;
1396 RTL_EXPR_SEQUENCE (t) = get_insns ();
1397
1398 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1399
1400 end_sequence ();
1401
1402 /* Don't consider deleting this expr or containing exprs at tree level. */
1403 TREE_SIDE_EFFECTS (t) = 1;
1404 /* Propagate volatility of the actual RTL expr. */
1405 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1406
1407 last_expr_type = 0;
1408 expr_stmts_for_value--;
1409
1410 return t;
1411 }
1412 \f
1413 /* The exception handling nesting looks like this:
1414
1415 <-- Level N-1
1416 { <-- exception handler block
1417 <-- Level N
1418 <-- in an exception handler
1419 { <-- try block
1420 : <-- in a TRY block
1421 : <-- in an exception handler
1422 :
1423 }
1424
1425 { <-- except block
1426 : <-- in an except block
1427 : <-- in an exception handler
1428 :
1429 }
1430
1431 }
1432
1433 /* Return nonzero iff in a try block at level LEVEL. */
1434
1435 int
1436 in_try_block (level)
1437 int level;
1438 {
1439 struct nesting *n = except_stack;
1440 while (1)
1441 {
1442 while (n && n->data.except_stmt.after_label != 0)
1443 n = n->next;
1444 if (n == 0)
1445 return 0;
1446 if (level == 0)
1447 return n != 0;
1448 level--;
1449 n = n->next;
1450 }
1451 }
1452
1453 /* Return nonzero iff in an except block at level LEVEL. */
1454
1455 int
1456 in_except_block (level)
1457 int level;
1458 {
1459 struct nesting *n = except_stack;
1460 while (1)
1461 {
1462 while (n && n->data.except_stmt.after_label == 0)
1463 n = n->next;
1464 if (n == 0)
1465 return 0;
1466 if (level == 0)
1467 return n != 0;
1468 level--;
1469 n = n->next;
1470 }
1471 }
1472
1473 /* Return nonzero iff in an exception handler at level LEVEL. */
1474
1475 int
1476 in_exception_handler (level)
1477 int level;
1478 {
1479 struct nesting *n = except_stack;
1480 while (n && level--)
1481 n = n->next;
1482 return n != 0;
1483 }
1484
1485 /* Record the fact that the current exception nesting raises
1486 exception EX. If not in an exception handler, return 0. */
1487 int
1488 expand_raise (ex)
1489 tree ex;
1490 {
1491 tree *raises_ptr;
1492
1493 if (except_stack == 0)
1494 return 0;
1495 raises_ptr = &except_stack->data.except_stmt.raised;
1496 if (! value_member (ex, *raises_ptr))
1497 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1498 return 1;
1499 }
1500
1501 /* Generate RTL for the start of a try block.
1502
1503 TRY_CLAUSE is the condition to test to enter the try block. */
1504
1505 void
1506 expand_start_try (try_clause, exitflag, escapeflag)
1507 tree try_clause;
1508 int exitflag;
1509 int escapeflag;
1510 {
1511 struct nesting *thishandler = ALLOC_NESTING ();
1512
1513 /* Make an entry on cond_stack for the cond we are entering. */
1514
1515 thishandler->next = except_stack;
1516 thishandler->all = nesting_stack;
1517 thishandler->depth = ++nesting_depth;
1518 thishandler->data.except_stmt.raised = 0;
1519 thishandler->data.except_stmt.handled = 0;
1520 thishandler->data.except_stmt.first_insn = get_insns ();
1521 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1522 thishandler->data.except_stmt.unhandled_label = 0;
1523 thishandler->data.except_stmt.after_label = 0;
1524 thishandler->data.except_stmt.escape_label
1525 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1526 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1527 except_stack = thishandler;
1528 nesting_stack = thishandler;
1529
1530 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1531 }
1532
1533 /* End of a TRY block. Nothing to do for now. */
1534
1535 void
1536 expand_end_try ()
1537 {
1538 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1539 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1540 }
1541
1542 /* Start an `except' nesting contour.
1543 EXITFLAG says whether this contour should be able to `exit' something.
1544 ESCAPEFLAG says whether this contour should be escapable. */
1545
1546 void
1547 expand_start_except (exitflag, escapeflag)
1548 int exitflag;
1549 int escapeflag;
1550 {
1551 if (exitflag)
1552 {
1553 struct nesting *n;
1554 /* An `exit' from catch clauses goes out to next exit level,
1555 if there is one. Otherwise, it just goes to the end
1556 of the construct. */
1557 for (n = except_stack->next; n; n = n->next)
1558 if (n->exit_label != 0)
1559 {
1560 except_stack->exit_label = n->exit_label;
1561 break;
1562 }
1563 if (n == 0)
1564 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1565 }
1566 if (escapeflag)
1567 {
1568 struct nesting *n;
1569 /* An `escape' from catch clauses goes out to next escape level,
1570 if there is one. Otherwise, it just goes to the end
1571 of the construct. */
1572 for (n = except_stack->next; n; n = n->next)
1573 if (n->data.except_stmt.escape_label != 0)
1574 {
1575 except_stack->data.except_stmt.escape_label
1576 = n->data.except_stmt.escape_label;
1577 break;
1578 }
1579 if (n == 0)
1580 except_stack->data.except_stmt.escape_label
1581 = except_stack->data.except_stmt.after_label;
1582 }
1583 do_pending_stack_adjust ();
1584 emit_label (except_stack->data.except_stmt.except_label);
1585 }
1586
1587 /* Generate code to `escape' from an exception contour. This
1588 is like `exiting', but does not conflict with constructs which
1589 use `exit_label'.
1590
1591 Return nonzero if this contour is escapable, otherwise
1592 return zero, and language-specific code will emit the
1593 appropriate error message. */
1594 int
1595 expand_escape_except ()
1596 {
1597 struct nesting *n;
1598 last_expr_type = 0;
1599 for (n = except_stack; n; n = n->next)
1600 if (n->data.except_stmt.escape_label != 0)
1601 {
1602 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1603 return 1;
1604 }
1605
1606 return 0;
1607 }
1608
1609 /* Finish processing and `except' contour.
1610 Culls out all exceptions which might be raise but not
1611 handled, and returns the list to the caller.
1612 Language-specific code is responsible for dealing with these
1613 exceptions. */
1614
1615 tree
1616 expand_end_except ()
1617 {
1618 struct nesting *n;
1619 tree raised = NULL_TREE;
1620
1621 do_pending_stack_adjust ();
1622 emit_label (except_stack->data.except_stmt.after_label);
1623
1624 n = except_stack->next;
1625 if (n)
1626 {
1627 /* Propagate exceptions raised but not handled to next
1628 highest level. */
1629 tree handled = except_stack->data.except_stmt.raised;
1630 if (handled != void_type_node)
1631 {
1632 tree prev = NULL_TREE;
1633 raised = except_stack->data.except_stmt.raised;
1634 while (handled)
1635 {
1636 tree this_raise;
1637 for (this_raise = raised, prev = 0; this_raise;
1638 this_raise = TREE_CHAIN (this_raise))
1639 {
1640 if (value_member (TREE_VALUE (this_raise), handled))
1641 {
1642 if (prev)
1643 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1644 else
1645 {
1646 raised = TREE_CHAIN (raised);
1647 if (raised == NULL_TREE)
1648 goto nada;
1649 }
1650 }
1651 else
1652 prev = this_raise;
1653 }
1654 handled = TREE_CHAIN (handled);
1655 }
1656 if (prev == NULL_TREE)
1657 prev = raised;
1658 if (prev)
1659 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1660 nada:
1661 n->data.except_stmt.raised = raised;
1662 }
1663 }
1664
1665 POPSTACK (except_stack);
1666 last_expr_type = 0;
1667 return raised;
1668 }
1669
1670 /* Record that exception EX is caught by this exception handler.
1671 Return nonzero if in exception handling construct, otherwise return 0. */
1672 int
1673 expand_catch (ex)
1674 tree ex;
1675 {
1676 tree *raises_ptr;
1677
1678 if (except_stack == 0)
1679 return 0;
1680 raises_ptr = &except_stack->data.except_stmt.handled;
1681 if (*raises_ptr != void_type_node
1682 && ex != NULL_TREE
1683 && ! value_member (ex, *raises_ptr))
1684 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1685 return 1;
1686 }
1687
1688 /* Record that this exception handler catches all exceptions.
1689 Return nonzero if in exception handling construct, otherwise return 0. */
1690
1691 int
1692 expand_catch_default ()
1693 {
1694 if (except_stack == 0)
1695 return 0;
1696 except_stack->data.except_stmt.handled = void_type_node;
1697 return 1;
1698 }
1699
1700 int
1701 expand_end_catch ()
1702 {
1703 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1704 return 0;
1705 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1706 return 1;
1707 }
1708 \f
1709 /* Generate RTL for the start of an if-then. COND is the expression
1710 whose truth should be tested.
1711
1712 If EXITFLAG is nonzero, this conditional is visible to
1713 `exit_something'. */
1714
1715 void
1716 expand_start_cond (cond, exitflag)
1717 tree cond;
1718 int exitflag;
1719 {
1720 struct nesting *thiscond = ALLOC_NESTING ();
1721
1722 /* Make an entry on cond_stack for the cond we are entering. */
1723
1724 thiscond->next = cond_stack;
1725 thiscond->all = nesting_stack;
1726 thiscond->depth = ++nesting_depth;
1727 thiscond->data.cond.next_label = gen_label_rtx ();
1728 /* Before we encounter an `else', we don't need a separate exit label
1729 unless there are supposed to be exit statements
1730 to exit this conditional. */
1731 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1732 thiscond->data.cond.endif_label = thiscond->exit_label;
1733 cond_stack = thiscond;
1734 nesting_stack = thiscond;
1735
1736 do_jump (cond, thiscond->data.cond.next_label, NULL);
1737 }
1738
1739 /* Generate RTL between then-clause and the elseif-clause
1740 of an if-then-elseif-.... */
1741
1742 void
1743 expand_start_elseif (cond)
1744 tree cond;
1745 {
1746 if (cond_stack->data.cond.endif_label == 0)
1747 cond_stack->data.cond.endif_label = gen_label_rtx ();
1748 emit_jump (cond_stack->data.cond.endif_label);
1749 emit_label (cond_stack->data.cond.next_label);
1750 cond_stack->data.cond.next_label = gen_label_rtx ();
1751 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1752 }
1753
1754 /* Generate RTL between the then-clause and the else-clause
1755 of an if-then-else. */
1756
1757 void
1758 expand_start_else ()
1759 {
1760 if (cond_stack->data.cond.endif_label == 0)
1761 cond_stack->data.cond.endif_label = gen_label_rtx ();
1762 emit_jump (cond_stack->data.cond.endif_label);
1763 emit_label (cond_stack->data.cond.next_label);
1764 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1765 }
1766
1767 /* Generate RTL for the end of an if-then.
1768 Pop the record for it off of cond_stack. */
1769
1770 void
1771 expand_end_cond ()
1772 {
1773 struct nesting *thiscond = cond_stack;
1774
1775 do_pending_stack_adjust ();
1776 if (thiscond->data.cond.next_label)
1777 emit_label (thiscond->data.cond.next_label);
1778 if (thiscond->data.cond.endif_label)
1779 emit_label (thiscond->data.cond.endif_label);
1780
1781 POPSTACK (cond_stack);
1782 last_expr_type = 0;
1783 }
1784 \f
1785 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1786 loop should be exited by `exit_something'. This is a loop for which
1787 `expand_continue' will jump to the top of the loop.
1788
1789 Make an entry on loop_stack to record the labels associated with
1790 this loop. */
1791
1792 struct nesting *
1793 expand_start_loop (exit_flag)
1794 int exit_flag;
1795 {
1796 register struct nesting *thisloop = ALLOC_NESTING ();
1797
1798 /* Make an entry on loop_stack for the loop we are entering. */
1799
1800 thisloop->next = loop_stack;
1801 thisloop->all = nesting_stack;
1802 thisloop->depth = ++nesting_depth;
1803 thisloop->data.loop.start_label = gen_label_rtx ();
1804 thisloop->data.loop.end_label = gen_label_rtx ();
1805 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1806 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1807 loop_stack = thisloop;
1808 nesting_stack = thisloop;
1809
1810 do_pending_stack_adjust ();
1811 emit_queue ();
1812 emit_note (0, NOTE_INSN_LOOP_BEG);
1813 emit_label (thisloop->data.loop.start_label);
1814
1815 return thisloop;
1816 }
1817
1818 /* Like expand_start_loop but for a loop where the continuation point
1819 (for expand_continue_loop) will be specified explicitly. */
1820
1821 struct nesting *
1822 expand_start_loop_continue_elsewhere (exit_flag)
1823 int exit_flag;
1824 {
1825 struct nesting *thisloop = expand_start_loop (exit_flag);
1826 loop_stack->data.loop.continue_label = gen_label_rtx ();
1827 return thisloop;
1828 }
1829
1830 /* Specify the continuation point for a loop started with
1831 expand_start_loop_continue_elsewhere.
1832 Use this at the point in the code to which a continue statement
1833 should jump. */
1834
1835 void
1836 expand_loop_continue_here ()
1837 {
1838 do_pending_stack_adjust ();
1839 emit_note (0, NOTE_INSN_LOOP_CONT);
1840 emit_label (loop_stack->data.loop.continue_label);
1841 }
1842
1843 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1844 Pop the block off of loop_stack. */
1845
1846 void
1847 expand_end_loop ()
1848 {
1849 register rtx insn = get_last_insn ();
1850 register rtx start_label = loop_stack->data.loop.start_label;
1851 rtx last_test_insn = 0;
1852 int num_insns = 0;
1853
1854 /* Mark the continue-point at the top of the loop if none elsewhere. */
1855 if (start_label == loop_stack->data.loop.continue_label)
1856 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1857
1858 do_pending_stack_adjust ();
1859
1860 /* If optimizing, perhaps reorder the loop. If the loop
1861 starts with a conditional exit, roll that to the end
1862 where it will optimize together with the jump back.
1863
1864 We look for the last conditional branch to the exit that we encounter
1865 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1866 branch to the exit first, use it.
1867
1868 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1869 because moving them is not valid. */
1870
1871 if (optimize
1872 &&
1873 ! (GET_CODE (insn) == JUMP_INSN
1874 && GET_CODE (PATTERN (insn)) == SET
1875 && SET_DEST (PATTERN (insn)) == pc_rtx
1876 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1877 {
1878 /* Scan insns from the top of the loop looking for a qualified
1879 conditional exit. */
1880 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1881 insn = NEXT_INSN (insn))
1882 {
1883 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1884 break;
1885
1886 if (GET_CODE (insn) == NOTE
1887 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1888 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1889 break;
1890
1891 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1892 num_insns++;
1893
1894 if (last_test_insn && num_insns > 30)
1895 break;
1896
1897 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1898 && SET_DEST (PATTERN (insn)) == pc_rtx
1899 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1900 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1901 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1902 == loop_stack->data.loop.end_label))
1903 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1904 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1905 == loop_stack->data.loop.end_label))))
1906 last_test_insn = insn;
1907
1908 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1909 && GET_CODE (PATTERN (insn)) == SET
1910 && SET_DEST (PATTERN (insn)) == pc_rtx
1911 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1912 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1913 == loop_stack->data.loop.end_label))
1914 /* Include BARRIER. */
1915 last_test_insn = NEXT_INSN (insn);
1916 }
1917
1918 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1919 {
1920 /* We found one. Move everything from there up
1921 to the end of the loop, and add a jump into the loop
1922 to jump to there. */
1923 register rtx newstart_label = gen_label_rtx ();
1924 register rtx start_move = start_label;
1925
1926 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1927 then we want to move this note also. */
1928 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1929 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1930 == NOTE_INSN_LOOP_CONT))
1931 start_move = PREV_INSN (start_move);
1932
1933 emit_label_after (newstart_label, PREV_INSN (start_move));
1934 reorder_insns (start_move, last_test_insn, get_last_insn ());
1935 emit_jump_insn_after (gen_jump (start_label),
1936 PREV_INSN (newstart_label));
1937 emit_barrier_after (PREV_INSN (newstart_label));
1938 start_label = newstart_label;
1939 }
1940 }
1941
1942 emit_jump (start_label);
1943 emit_note (0, NOTE_INSN_LOOP_END);
1944 emit_label (loop_stack->data.loop.end_label);
1945
1946 POPSTACK (loop_stack);
1947
1948 last_expr_type = 0;
1949 }
1950
1951 /* Generate a jump to the current loop's continue-point.
1952 This is usually the top of the loop, but may be specified
1953 explicitly elsewhere. If not currently inside a loop,
1954 return 0 and do nothing; caller will print an error message. */
1955
1956 int
1957 expand_continue_loop (whichloop)
1958 struct nesting *whichloop;
1959 {
1960 last_expr_type = 0;
1961 if (whichloop == 0)
1962 whichloop = loop_stack;
1963 if (whichloop == 0)
1964 return 0;
1965 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1966 return 1;
1967 }
1968
1969 /* Generate a jump to exit the current loop. If not currently inside a loop,
1970 return 0 and do nothing; caller will print an error message. */
1971
1972 int
1973 expand_exit_loop (whichloop)
1974 struct nesting *whichloop;
1975 {
1976 last_expr_type = 0;
1977 if (whichloop == 0)
1978 whichloop = loop_stack;
1979 if (whichloop == 0)
1980 return 0;
1981 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1982 return 1;
1983 }
1984
1985 /* Generate a conditional jump to exit the current loop if COND
1986 evaluates to zero. If not currently inside a loop,
1987 return 0 and do nothing; caller will print an error message. */
1988
1989 int
1990 expand_exit_loop_if_false (whichloop, cond)
1991 struct nesting *whichloop;
1992 tree cond;
1993 {
1994 last_expr_type = 0;
1995 if (whichloop == 0)
1996 whichloop = loop_stack;
1997 if (whichloop == 0)
1998 return 0;
1999 do_jump (cond, whichloop->data.loop.end_label, NULL);
2000 return 1;
2001 }
2002
2003 /* Return non-zero if we should preserve sub-expressions as separate
2004 pseudos. We never do so if we aren't optimizing. We always do so
2005 if -fexpensive-optimizations.
2006
2007 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2008 the loop may still be a small one. */
2009
2010 int
2011 preserve_subexpressions_p ()
2012 {
2013 rtx insn;
2014
2015 if (flag_expensive_optimizations)
2016 return 1;
2017
2018 if (optimize == 0 || loop_stack == 0)
2019 return 0;
2020
2021 insn = get_last_insn_anywhere ();
2022
2023 return (insn
2024 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2025 < n_non_fixed_regs * 3));
2026
2027 }
2028
2029 /* Generate a jump to exit the current loop, conditional, binding contour
2030 or case statement. Not all such constructs are visible to this function,
2031 only those started with EXIT_FLAG nonzero. Individual languages use
2032 the EXIT_FLAG parameter to control which kinds of constructs you can
2033 exit this way.
2034
2035 If not currently inside anything that can be exited,
2036 return 0 and do nothing; caller will print an error message. */
2037
2038 int
2039 expand_exit_something ()
2040 {
2041 struct nesting *n;
2042 last_expr_type = 0;
2043 for (n = nesting_stack; n; n = n->all)
2044 if (n->exit_label != 0)
2045 {
2046 expand_goto_internal (0, n->exit_label, 0);
2047 return 1;
2048 }
2049
2050 return 0;
2051 }
2052 \f
2053 /* Generate RTL to return from the current function, with no value.
2054 (That is, we do not do anything about returning any value.) */
2055
2056 void
2057 expand_null_return ()
2058 {
2059 struct nesting *block = block_stack;
2060 rtx last_insn = 0;
2061
2062 /* Does any pending block have cleanups? */
2063
2064 while (block && block->data.block.cleanups == 0)
2065 block = block->next;
2066
2067 /* If yes, use a goto to return, since that runs cleanups. */
2068
2069 expand_null_return_1 (last_insn, block != 0);
2070 }
2071
2072 /* Generate RTL to return from the current function, with value VAL. */
2073
2074 void
2075 expand_value_return (val)
2076 rtx val;
2077 {
2078 struct nesting *block = block_stack;
2079 rtx last_insn = get_last_insn ();
2080 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2081
2082 /* Copy the value to the return location
2083 unless it's already there. */
2084
2085 if (return_reg != val)
2086 emit_move_insn (return_reg, val);
2087 if (GET_CODE (return_reg) == REG
2088 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2089 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2090
2091 /* Does any pending block have cleanups? */
2092
2093 while (block && block->data.block.cleanups == 0)
2094 block = block->next;
2095
2096 /* If yes, use a goto to return, since that runs cleanups.
2097 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2098
2099 expand_null_return_1 (last_insn, block != 0);
2100 }
2101
2102 /* Output a return with no value. If LAST_INSN is nonzero,
2103 pretend that the return takes place after LAST_INSN.
2104 If USE_GOTO is nonzero then don't use a return instruction;
2105 go to the return label instead. This causes any cleanups
2106 of pending blocks to be executed normally. */
2107
2108 static void
2109 expand_null_return_1 (last_insn, use_goto)
2110 rtx last_insn;
2111 int use_goto;
2112 {
2113 rtx end_label = cleanup_label ? cleanup_label : return_label;
2114
2115 clear_pending_stack_adjust ();
2116 do_pending_stack_adjust ();
2117 last_expr_type = 0;
2118
2119 /* PCC-struct return always uses an epilogue. */
2120 if (current_function_returns_pcc_struct || use_goto)
2121 {
2122 if (end_label == 0)
2123 end_label = return_label = gen_label_rtx ();
2124 expand_goto_internal (0, end_label, last_insn);
2125 return;
2126 }
2127
2128 /* Otherwise output a simple return-insn if one is available,
2129 unless it won't do the job. */
2130 #ifdef HAVE_return
2131 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2132 {
2133 emit_jump_insn (gen_return ());
2134 emit_barrier ();
2135 return;
2136 }
2137 #endif
2138
2139 /* Otherwise jump to the epilogue. */
2140 expand_goto_internal (0, end_label, last_insn);
2141 }
2142 \f
2143 /* Generate RTL to evaluate the expression RETVAL and return it
2144 from the current function. */
2145
2146 void
2147 expand_return (retval)
2148 tree retval;
2149 {
2150 /* If there are any cleanups to be performed, then they will
2151 be inserted following LAST_INSN. It is desirable
2152 that the last_insn, for such purposes, should be the
2153 last insn before computing the return value. Otherwise, cleanups
2154 which call functions can clobber the return value. */
2155 /* ??? rms: I think that is erroneous, because in C++ it would
2156 run destructors on variables that might be used in the subsequent
2157 computation of the return value. */
2158 rtx last_insn = 0;
2159 register rtx val = 0;
2160 register rtx op0;
2161 tree retval_rhs;
2162 int cleanups;
2163 struct nesting *block;
2164
2165 /* If function wants no value, give it none. */
2166 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2167 {
2168 expand_expr (retval, 0, VOIDmode, 0);
2169 expand_null_return ();
2170 return;
2171 }
2172
2173 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2174 cleanups = any_pending_cleanups (1);
2175
2176 if (TREE_CODE (retval) == RESULT_DECL)
2177 retval_rhs = retval;
2178 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2179 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2180 retval_rhs = TREE_OPERAND (retval, 1);
2181 else if (TREE_TYPE (retval) == void_type_node)
2182 /* Recognize tail-recursive call to void function. */
2183 retval_rhs = retval;
2184 else
2185 retval_rhs = NULL_TREE;
2186
2187 /* Only use `last_insn' if there are cleanups which must be run. */
2188 if (cleanups || cleanup_label != 0)
2189 last_insn = get_last_insn ();
2190
2191 /* Distribute return down conditional expr if either of the sides
2192 may involve tail recursion (see test below). This enhances the number
2193 of tail recursions we see. Don't do this always since it can produce
2194 sub-optimal code in some cases and we distribute assignments into
2195 conditional expressions when it would help. */
2196
2197 if (optimize && retval_rhs != 0
2198 && frame_offset == 0
2199 && TREE_CODE (retval_rhs) == COND_EXPR
2200 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2201 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2202 {
2203 rtx label = gen_label_rtx ();
2204 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2205 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2206 DECL_RESULT (current_function_decl),
2207 TREE_OPERAND (retval_rhs, 1)));
2208 emit_label (label);
2209 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2210 DECL_RESULT (current_function_decl),
2211 TREE_OPERAND (retval_rhs, 2)));
2212 return;
2213 }
2214
2215 /* For tail-recursive call to current function,
2216 just jump back to the beginning.
2217 It's unsafe if any auto variable in this function
2218 has its address taken; for simplicity,
2219 require stack frame to be empty. */
2220 if (optimize && retval_rhs != 0
2221 && frame_offset == 0
2222 && TREE_CODE (retval_rhs) == CALL_EXPR
2223 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2224 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2225 /* Finish checking validity, and if valid emit code
2226 to set the argument variables for the new call. */
2227 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2228 DECL_ARGUMENTS (current_function_decl)))
2229 {
2230 if (tail_recursion_label == 0)
2231 {
2232 tail_recursion_label = gen_label_rtx ();
2233 emit_label_after (tail_recursion_label,
2234 tail_recursion_reentry);
2235 }
2236 expand_goto_internal (0, tail_recursion_label, last_insn);
2237 emit_barrier ();
2238 return;
2239 }
2240 #ifdef HAVE_return
2241 /* This optimization is safe if there are local cleanups
2242 because expand_null_return takes care of them.
2243 ??? I think it should also be safe when there is a cleanup label,
2244 because expand_null_return takes care of them, too.
2245 Any reason why not? */
2246 if (HAVE_return && cleanup_label == 0
2247 && ! current_function_returns_pcc_struct)
2248 {
2249 /* If this is return x == y; then generate
2250 if (x == y) return 1; else return 0;
2251 if we can do it with explicit return insns. */
2252 if (retval_rhs)
2253 switch (TREE_CODE (retval_rhs))
2254 {
2255 case EQ_EXPR:
2256 case NE_EXPR:
2257 case GT_EXPR:
2258 case GE_EXPR:
2259 case LT_EXPR:
2260 case LE_EXPR:
2261 case TRUTH_ANDIF_EXPR:
2262 case TRUTH_ORIF_EXPR:
2263 case TRUTH_AND_EXPR:
2264 case TRUTH_OR_EXPR:
2265 case TRUTH_NOT_EXPR:
2266 op0 = gen_label_rtx ();
2267 jumpifnot (retval_rhs, op0);
2268 expand_value_return (const1_rtx);
2269 emit_label (op0);
2270 expand_value_return (const0_rtx);
2271 return;
2272 }
2273 }
2274 #endif /* HAVE_return */
2275
2276 if (cleanups
2277 && retval_rhs != 0
2278 && TREE_TYPE (retval_rhs) != void_type_node
2279 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2280 {
2281 /* Calculate the return value into a pseudo reg. */
2282 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2283 emit_queue ();
2284 /* All temporaries have now been used. */
2285 free_temp_slots ();
2286 /* Return the calculated value, doing cleanups first. */
2287 expand_value_return (val);
2288 }
2289 else
2290 {
2291 /* No cleanups or no hard reg used;
2292 calculate value into hard return reg. */
2293 expand_expr (retval, 0, VOIDmode, 0);
2294 emit_queue ();
2295 free_temp_slots ();
2296 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2297 }
2298 }
2299
2300 /* Return 1 if the end of the generated RTX is not a barrier.
2301 This means code already compiled can drop through. */
2302
2303 int
2304 drop_through_at_end_p ()
2305 {
2306 rtx insn = get_last_insn ();
2307 while (insn && GET_CODE (insn) == NOTE)
2308 insn = PREV_INSN (insn);
2309 return insn && GET_CODE (insn) != BARRIER;
2310 }
2311 \f
2312 /* Emit code to alter this function's formal parms for a tail-recursive call.
2313 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2314 FORMALS is the chain of decls of formals.
2315 Return 1 if this can be done;
2316 otherwise return 0 and do not emit any code. */
2317
2318 static int
2319 tail_recursion_args (actuals, formals)
2320 tree actuals, formals;
2321 {
2322 register tree a = actuals, f = formals;
2323 register int i;
2324 register rtx *argvec;
2325
2326 /* Check that number and types of actuals are compatible
2327 with the formals. This is not always true in valid C code.
2328 Also check that no formal needs to be addressable
2329 and that all formals are scalars. */
2330
2331 /* Also count the args. */
2332
2333 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2334 {
2335 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2336 return 0;
2337 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2338 return 0;
2339 }
2340 if (a != 0 || f != 0)
2341 return 0;
2342
2343 /* Compute all the actuals. */
2344
2345 argvec = (rtx *) alloca (i * sizeof (rtx));
2346
2347 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2348 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2349
2350 /* Find which actual values refer to current values of previous formals.
2351 Copy each of them now, before any formal is changed. */
2352
2353 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2354 {
2355 int copy = 0;
2356 register int j;
2357 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2358 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2359 { copy = 1; break; }
2360 if (copy)
2361 argvec[i] = copy_to_reg (argvec[i]);
2362 }
2363
2364 /* Store the values of the actuals into the formals. */
2365
2366 for (f = formals, a = actuals, i = 0; f;
2367 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2368 {
2369 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2370 emit_move_insn (DECL_RTL (f), argvec[i]);
2371 else
2372 convert_move (DECL_RTL (f), argvec[i],
2373 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2374 }
2375
2376 free_temp_slots ();
2377 return 1;
2378 }
2379 \f
2380 /* Generate the RTL code for entering a binding contour.
2381 The variables are declared one by one, by calls to `expand_decl'.
2382
2383 EXIT_FLAG is nonzero if this construct should be visible to
2384 `exit_something'. */
2385
2386 void
2387 expand_start_bindings (exit_flag)
2388 int exit_flag;
2389 {
2390 struct nesting *thisblock = ALLOC_NESTING ();
2391
2392 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2393
2394 /* Make an entry on block_stack for the block we are entering. */
2395
2396 thisblock->next = block_stack;
2397 thisblock->all = nesting_stack;
2398 thisblock->depth = ++nesting_depth;
2399 thisblock->data.block.stack_level = 0;
2400 thisblock->data.block.cleanups = 0;
2401 thisblock->data.block.function_call_count = 0;
2402 #if 0
2403 if (block_stack)
2404 {
2405 if (block_stack->data.block.cleanups == NULL_TREE
2406 && (block_stack->data.block.outer_cleanups == NULL_TREE
2407 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2408 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2409 else
2410 thisblock->data.block.outer_cleanups
2411 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2412 block_stack->data.block.outer_cleanups);
2413 }
2414 else
2415 thisblock->data.block.outer_cleanups = 0;
2416 #endif
2417 #if 1
2418 if (block_stack
2419 && !(block_stack->data.block.cleanups == NULL_TREE
2420 && block_stack->data.block.outer_cleanups == NULL_TREE))
2421 thisblock->data.block.outer_cleanups
2422 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2423 block_stack->data.block.outer_cleanups);
2424 else
2425 thisblock->data.block.outer_cleanups = 0;
2426 #endif
2427 thisblock->data.block.label_chain = 0;
2428 thisblock->data.block.innermost_stack_block = stack_block_stack;
2429 thisblock->data.block.first_insn = note;
2430 thisblock->data.block.block_start_count = ++block_start_count;
2431 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2432 block_stack = thisblock;
2433 nesting_stack = thisblock;
2434
2435 /* Make a new level for allocating stack slots. */
2436 push_temp_slots ();
2437 }
2438
2439 /* Generate RTL code to terminate a binding contour.
2440 VARS is the chain of VAR_DECL nodes
2441 for the variables bound in this contour.
2442 MARK_ENDS is nonzero if we should put a note at the beginning
2443 and end of this binding contour.
2444
2445 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2446 (That is true automatically if the contour has a saved stack level.) */
2447
2448 void
2449 expand_end_bindings (vars, mark_ends, dont_jump_in)
2450 tree vars;
2451 int mark_ends;
2452 int dont_jump_in;
2453 {
2454 register struct nesting *thisblock = block_stack;
2455 register tree decl;
2456
2457 if (warn_unused)
2458 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2459 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2460 warning_with_decl (decl, "unused variable `%s'");
2461
2462 /* Mark the beginning and end of the scope if requested. */
2463
2464 if (mark_ends)
2465 emit_note (0, NOTE_INSN_BLOCK_END);
2466 else
2467 /* Get rid of the beginning-mark if we don't make an end-mark. */
2468 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2469
2470 if (thisblock->exit_label)
2471 {
2472 do_pending_stack_adjust ();
2473 emit_label (thisblock->exit_label);
2474 }
2475
2476 /* If necessary, make a handler for nonlocal gotos taking
2477 place in the function calls in this block. */
2478 if (function_call_count != thisblock->data.block.function_call_count
2479 && nonlocal_labels
2480 /* Make handler for outermost block
2481 if there were any nonlocal gotos to this function. */
2482 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2483 /* Make handler for inner block if it has something
2484 special to do when you jump out of it. */
2485 : (thisblock->data.block.cleanups != 0
2486 || thisblock->data.block.stack_level != 0)))
2487 {
2488 tree link;
2489 rtx afterward = gen_label_rtx ();
2490 rtx handler_label = gen_label_rtx ();
2491 rtx save_receiver = gen_reg_rtx (Pmode);
2492
2493 /* Don't let jump_optimize delete the handler. */
2494 LABEL_PRESERVE_P (handler_label) = 1;
2495
2496 /* Record the handler address in the stack slot for that purpose,
2497 during this block, saving and restoring the outer value. */
2498 if (thisblock->next != 0)
2499 {
2500 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2501 emit_insn_before (gen_move_insn (save_receiver,
2502 nonlocal_goto_handler_slot),
2503 thisblock->data.block.first_insn);
2504 }
2505 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2506 gen_rtx (LABEL_REF, Pmode,
2507 handler_label)),
2508 thisblock->data.block.first_insn);
2509
2510 /* Jump around the handler; it runs only when specially invoked. */
2511 emit_jump (afterward);
2512 emit_label (handler_label);
2513
2514 #ifdef HAVE_nonlocal_goto
2515 if (! HAVE_nonlocal_goto)
2516 #endif
2517 /* First adjust our frame pointer to its actual value. It was
2518 previously set to the start of the virtual area corresponding to
2519 the stacked variables when we branched here and now needs to be
2520 adjusted to the actual hardware fp value.
2521
2522 Assignments are to virtual registers are converted by
2523 instantiate_virtual_regs into the corresponding assignment
2524 to the underlying register (fp in this case) that makes
2525 the original assignment true.
2526 So the following insn will actually be
2527 decrementing fp by STARTING_FRAME_OFFSET. */
2528 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2529
2530 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2531 if (fixed_regs[ARG_POINTER_REGNUM])
2532 {
2533 /* Now restore our arg pointer from the address at which it was saved
2534 in our stack frame.
2535 If there hasn't be space allocated for it yet, make some now. */
2536 if (arg_pointer_save_area == 0)
2537 arg_pointer_save_area
2538 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2539 emit_move_insn (virtual_incoming_args_rtx,
2540 /* We need a pseudo here,
2541 or else instantiate_virtual_regs_1 complains. */
2542 copy_to_reg (arg_pointer_save_area));
2543 }
2544 #endif
2545
2546 /* The handler expects the desired label address in the static chain
2547 register. It tests the address and does an appropriate jump
2548 to whatever label is desired. */
2549 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2550 /* Skip any labels we shouldn't be able to jump to from here. */
2551 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2552 {
2553 rtx not_this = gen_label_rtx ();
2554 rtx this = gen_label_rtx ();
2555 do_jump_if_equal (static_chain_rtx,
2556 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2557 this, 0);
2558 emit_jump (not_this);
2559 emit_label (this);
2560 expand_goto (TREE_VALUE (link));
2561 emit_label (not_this);
2562 }
2563 /* If label is not recognized, abort. */
2564 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2565 VOIDmode, 0);
2566 emit_label (afterward);
2567 }
2568
2569 /* Don't allow jumping into a block that has cleanups or a stack level. */
2570 if (dont_jump_in
2571 || thisblock->data.block.stack_level != 0
2572 || thisblock->data.block.cleanups != 0)
2573 {
2574 struct label_chain *chain;
2575
2576 /* Any labels in this block are no longer valid to go to.
2577 Mark them to cause an error message. */
2578 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2579 {
2580 DECL_TOO_LATE (chain->label) = 1;
2581 /* If any goto without a fixup came to this label,
2582 that must be an error, because gotos without fixups
2583 come from outside all saved stack-levels and all cleanups. */
2584 if (TREE_ADDRESSABLE (chain->label))
2585 error_with_decl (chain->label,
2586 "label `%s' used before containing binding contour");
2587 }
2588 }
2589
2590 /* Restore stack level in effect before the block
2591 (only if variable-size objects allocated). */
2592 /* Perform any cleanups associated with the block. */
2593
2594 if (thisblock->data.block.stack_level != 0
2595 || thisblock->data.block.cleanups != 0)
2596 {
2597 /* Don't let cleanups affect ({...}) constructs. */
2598 int old_expr_stmts_for_value = expr_stmts_for_value;
2599 rtx old_last_expr_value = last_expr_value;
2600 tree old_last_expr_type = last_expr_type;
2601 expr_stmts_for_value = 0;
2602
2603 /* Do the cleanups. */
2604 expand_cleanups (thisblock->data.block.cleanups, 0);
2605 do_pending_stack_adjust ();
2606
2607 expr_stmts_for_value = old_expr_stmts_for_value;
2608 last_expr_value = old_last_expr_value;
2609 last_expr_type = old_last_expr_type;
2610
2611 /* Restore the stack level. */
2612
2613 if (thisblock->data.block.stack_level != 0)
2614 {
2615 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2616 thisblock->data.block.stack_level, 0);
2617 if (nonlocal_goto_handler_slot != 0)
2618 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
2619 }
2620
2621 /* Any gotos out of this block must also do these things.
2622 Also report any gotos with fixups that came to labels in this
2623 level. */
2624 fixup_gotos (thisblock,
2625 thisblock->data.block.stack_level,
2626 thisblock->data.block.cleanups,
2627 thisblock->data.block.first_insn,
2628 dont_jump_in);
2629 }
2630
2631 /* If doing stupid register allocation, make sure lives of all
2632 register variables declared here extend thru end of scope. */
2633
2634 if (obey_regdecls)
2635 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2636 {
2637 rtx rtl = DECL_RTL (decl);
2638 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2639 use_variable (rtl);
2640 }
2641
2642 /* Restore block_stack level for containing block. */
2643
2644 stack_block_stack = thisblock->data.block.innermost_stack_block;
2645 POPSTACK (block_stack);
2646
2647 /* Pop the stack slot nesting and free any slots at this level. */
2648 pop_temp_slots ();
2649 }
2650 \f
2651 /* Generate RTL for the automatic variable declaration DECL.
2652 (Other kinds of declarations are simply ignored if seen here.)
2653 CLEANUP is an expression to be executed at exit from this binding contour;
2654 for example, in C++, it might call the destructor for this variable.
2655
2656 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2657 either before or after calling `expand_decl' but before compiling
2658 any subsequent expressions. This is because CLEANUP may be expanded
2659 more than once, on different branches of execution.
2660 For the same reason, CLEANUP may not contain a CALL_EXPR
2661 except as its topmost node--else `preexpand_calls' would get confused.
2662
2663 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2664 that is not associated with any particular variable.
2665
2666 There is no special support here for C++ constructors.
2667 They should be handled by the proper code in DECL_INITIAL. */
2668
2669 void
2670 expand_decl (decl)
2671 register tree decl;
2672 {
2673 struct nesting *thisblock = block_stack;
2674 tree type = TREE_TYPE (decl);
2675
2676 /* Only automatic variables need any expansion done.
2677 Static and external variables, and external functions,
2678 will be handled by `assemble_variable' (called from finish_decl).
2679 TYPE_DECL and CONST_DECL require nothing.
2680 PARM_DECLs are handled in `assign_parms'. */
2681
2682 if (TREE_CODE (decl) != VAR_DECL)
2683 return;
2684 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2685 return;
2686
2687 /* Create the RTL representation for the variable. */
2688
2689 if (type == error_mark_node)
2690 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2691 else if (DECL_SIZE (decl) == 0)
2692 /* Variable with incomplete type. */
2693 {
2694 if (DECL_INITIAL (decl) == 0)
2695 /* Error message was already done; now avoid a crash. */
2696 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2697 else
2698 /* An initializer is going to decide the size of this array.
2699 Until we know the size, represent its address with a reg. */
2700 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2701 }
2702 else if (DECL_MODE (decl) != BLKmode
2703 /* If -ffloat-store, don't put explicit float vars
2704 into regs. */
2705 && !(flag_float_store
2706 && TREE_CODE (type) == REAL_TYPE)
2707 && ! TREE_THIS_VOLATILE (decl)
2708 && ! TREE_ADDRESSABLE (decl)
2709 && (TREE_REGDECL (decl) || ! obey_regdecls))
2710 {
2711 /* Automatic variable that can go in a register. */
2712 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2713 if (TREE_CODE (type) == POINTER_TYPE)
2714 mark_reg_pointer (DECL_RTL (decl));
2715 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2716 }
2717 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2718 {
2719 /* Variable of fixed size that goes on the stack. */
2720 rtx oldaddr = 0;
2721 rtx addr;
2722
2723 /* If we previously made RTL for this decl, it must be an array
2724 whose size was determined by the initializer.
2725 The old address was a register; set that register now
2726 to the proper address. */
2727 if (DECL_RTL (decl) != 0)
2728 {
2729 if (GET_CODE (DECL_RTL (decl)) != MEM
2730 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2731 abort ();
2732 oldaddr = XEXP (DECL_RTL (decl), 0);
2733 }
2734
2735 DECL_RTL (decl)
2736 = assign_stack_temp (DECL_MODE (decl),
2737 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2738 + BITS_PER_UNIT - 1)
2739 / BITS_PER_UNIT),
2740 1);
2741
2742 /* Set alignment we actually gave this decl. */
2743 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2744 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2745
2746 if (oldaddr)
2747 {
2748 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2749 if (addr != oldaddr)
2750 emit_move_insn (oldaddr, addr);
2751 }
2752
2753 /* If this is a memory ref that contains aggregate components,
2754 mark it as such for cse and loop optimize. */
2755 MEM_IN_STRUCT_P (DECL_RTL (decl))
2756 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2757 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2758 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2759 #if 0
2760 /* If this is in memory because of -ffloat-store,
2761 set the volatile bit, to prevent optimizations from
2762 undoing the effects. */
2763 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2764 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2765 #endif
2766 }
2767 else
2768 /* Dynamic-size object: must push space on the stack. */
2769 {
2770 rtx address, size;
2771
2772 /* Record the stack pointer on entry to block, if have
2773 not already done so. */
2774 if (thisblock->data.block.stack_level == 0)
2775 {
2776 do_pending_stack_adjust ();
2777 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2778 &thisblock->data.block.stack_level,
2779 thisblock->data.block.first_insn);
2780 stack_block_stack = thisblock;
2781 }
2782
2783 /* Compute the variable's size, in bytes. */
2784 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2785 DECL_SIZE (decl),
2786 size_int (BITS_PER_UNIT)),
2787 0, VOIDmode, 0);
2788 free_temp_slots ();
2789
2790 /* This is equivalent to calling alloca. */
2791 current_function_calls_alloca = 1;
2792
2793 /* Allocate space on the stack for the variable. */
2794 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
2795
2796 if (nonlocal_goto_handler_slot != 0)
2797 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
2798
2799 /* Reference the variable indirect through that rtx. */
2800 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2801
2802 /* If this is a memory ref that contains aggregate components,
2803 mark it as such for cse and loop optimize. */
2804 MEM_IN_STRUCT_P (DECL_RTL (decl))
2805 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2806 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2807 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2808
2809 /* Indicate the alignment we actually gave this variable. */
2810 #ifdef STACK_BOUNDARY
2811 DECL_ALIGN (decl) = STACK_BOUNDARY;
2812 #else
2813 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2814 #endif
2815 }
2816
2817 if (TREE_THIS_VOLATILE (decl))
2818 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2819 if (TREE_READONLY (decl))
2820 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2821
2822 /* If doing stupid register allocation, make sure life of any
2823 register variable starts here, at the start of its scope. */
2824
2825 if (obey_regdecls)
2826 use_variable (DECL_RTL (decl));
2827 }
2828 \f
2829 /* Emit code to perform the initialization of a declaration DECL. */
2830
2831 void
2832 expand_decl_init (decl)
2833 tree decl;
2834 {
2835 int was_used = TREE_USED (decl);
2836
2837 if (TREE_STATIC (decl))
2838 return;
2839
2840 /* Compute and store the initial value now. */
2841
2842 if (DECL_INITIAL (decl) == error_mark_node)
2843 {
2844 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2845 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2846 || code == POINTER_TYPE)
2847 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2848 0, 0);
2849 emit_queue ();
2850 }
2851 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2852 {
2853 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2854 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2855 emit_queue ();
2856 }
2857
2858 /* Don't let the initialization count as "using" the variable. */
2859 TREE_USED (decl) = was_used;
2860
2861 /* Free any temporaries we made while initializing the decl. */
2862 free_temp_slots ();
2863 }
2864
2865 /* CLEANUP is an expression to be executed at exit from this binding contour;
2866 for example, in C++, it might call the destructor for this variable.
2867
2868 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2869 either before or after calling `expand_decl' but before compiling
2870 any subsequent expressions. This is because CLEANUP may be expanded
2871 more than once, on different branches of execution.
2872 For the same reason, CLEANUP may not contain a CALL_EXPR
2873 except as its topmost node--else `preexpand_calls' would get confused.
2874
2875 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2876 that is not associated with any particular variable. */
2877
2878 int
2879 expand_decl_cleanup (decl, cleanup)
2880 tree decl, cleanup;
2881 {
2882 struct nesting *thisblock = block_stack;
2883
2884 /* Error if we are not in any block. */
2885 if (thisblock == 0)
2886 return 0;
2887
2888 /* Record the cleanup if there is one. */
2889
2890 if (cleanup != 0)
2891 {
2892 thisblock->data.block.cleanups
2893 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2894 /* If this block has a cleanup, it belongs in stack_block_stack. */
2895 stack_block_stack = thisblock;
2896 }
2897 return 1;
2898 }
2899 \f
2900 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2901 DECL_ELTS is the list of elements that belong to DECL's type.
2902 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2903
2904 void
2905 expand_anon_union_decl (decl, cleanup, decl_elts)
2906 tree decl, cleanup, decl_elts;
2907 {
2908 struct nesting *thisblock = block_stack;
2909 rtx x;
2910
2911 expand_decl (decl, cleanup);
2912 x = DECL_RTL (decl);
2913
2914 while (decl_elts)
2915 {
2916 tree decl_elt = TREE_VALUE (decl_elts);
2917 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2918 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2919
2920 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2921 instead create a new MEM rtx with the proper mode. */
2922 if (GET_CODE (x) == MEM)
2923 {
2924 if (mode == GET_MODE (x))
2925 DECL_RTL (decl_elt) = x;
2926 else
2927 {
2928 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2929 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2930 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2931 }
2932 }
2933 else if (GET_CODE (x) == REG)
2934 {
2935 if (mode == GET_MODE (x))
2936 DECL_RTL (decl_elt) = x;
2937 else
2938 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2939 }
2940 else
2941 abort ();
2942
2943 /* Record the cleanup if there is one. */
2944
2945 if (cleanup != 0)
2946 thisblock->data.block.cleanups
2947 = temp_tree_cons (decl_elt, cleanup_elt,
2948 thisblock->data.block.cleanups);
2949
2950 decl_elts = TREE_CHAIN (decl_elts);
2951 }
2952 }
2953 \f
2954 /* Expand a list of cleanups LIST.
2955 Elements may be expressions or may be nested lists.
2956
2957 If DONT_DO is nonnull, then any list-element
2958 whose TREE_PURPOSE matches DONT_DO is omitted.
2959 This is sometimes used to avoid a cleanup associated with
2960 a value that is being returned out of the scope. */
2961
2962 static void
2963 expand_cleanups (list, dont_do)
2964 tree list;
2965 tree dont_do;
2966 {
2967 tree tail;
2968 for (tail = list; tail; tail = TREE_CHAIN (tail))
2969 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2970 {
2971 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2972 expand_cleanups (TREE_VALUE (tail), dont_do);
2973 else
2974 {
2975 /* Cleanups may be run multiple times. For example,
2976 when exiting a binding contour, we expand the
2977 cleanups associated with that contour. When a goto
2978 within that binding contour has a target outside that
2979 contour, it will expand all cleanups from its scope to
2980 the target. Though the cleanups are expanded multiple
2981 times, the control paths are non-overlapping so the
2982 cleanups will not be executed twice. */
2983 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2984 free_temp_slots ();
2985 }
2986 }
2987 }
2988
2989 /* Expand a list of cleanups for a goto fixup.
2990 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2991 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2992
2993 static void
2994 fixup_cleanups (list, before_jump)
2995 tree list;
2996 rtx *before_jump;
2997 {
2998 rtx beyond_jump = get_last_insn ();
2999 rtx new_before_jump;
3000
3001 expand_cleanups (list, 0);
3002 /* Pop any pushes done in the cleanups,
3003 in case function is about to return. */
3004 do_pending_stack_adjust ();
3005
3006 new_before_jump = get_last_insn ();
3007
3008 if (beyond_jump != new_before_jump)
3009 {
3010 /* If cleanups expand to nothing, don't reorder. */
3011 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
3012 *before_jump = new_before_jump;
3013 }
3014 }
3015
3016 /* Move all cleanups from the current block_stack
3017 to the containing block_stack, where they are assumed to
3018 have been created. If anything can cause a temporary to
3019 be created, but not expanded for more than one level of
3020 block_stacks, then this code will have to change. */
3021
3022 void
3023 move_cleanups_up ()
3024 {
3025 struct nesting *block = block_stack;
3026 struct nesting *outer = block->next;
3027
3028 outer->data.block.cleanups
3029 = chainon (block->data.block.cleanups,
3030 outer->data.block.cleanups);
3031 block->data.block.cleanups = 0;
3032 }
3033
3034 tree
3035 last_cleanup_this_contour ()
3036 {
3037 if (block_stack == 0)
3038 return 0;
3039
3040 return block_stack->data.block.cleanups;
3041 }
3042
3043 /* Return 1 if there are any pending cleanups at this point.
3044 If THIS_CONTOUR is nonzero, check the current contour as well.
3045 Otherwise, look only at the contours that enclose this one. */
3046
3047 int
3048 any_pending_cleanups (this_contour)
3049 int this_contour;
3050 {
3051 struct nesting *block;
3052
3053 if (block_stack == 0)
3054 return 0;
3055
3056 if (this_contour && block_stack->data.block.cleanups != NULL)
3057 return 1;
3058 if (block_stack->data.block.cleanups == 0
3059 && (block_stack->data.block.outer_cleanups == 0
3060 #if 0
3061 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3062 #endif
3063 ))
3064 return 0;
3065
3066 for (block = block_stack->next; block; block = block->next)
3067 if (block->data.block.cleanups != 0)
3068 return 1;
3069
3070 return 0;
3071 }
3072 \f
3073 /* Enter a case (Pascal) or switch (C) statement.
3074 Push a block onto case_stack and nesting_stack
3075 to accumulate the case-labels that are seen
3076 and to record the labels generated for the statement.
3077
3078 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3079 Otherwise, this construct is transparent for `exit_something'.
3080
3081 EXPR is the index-expression to be dispatched on.
3082 TYPE is its nominal type. We could simply convert EXPR to this type,
3083 but instead we take short cuts. */
3084
3085 void
3086 expand_start_case (exit_flag, expr, type, printname)
3087 int exit_flag;
3088 tree expr;
3089 tree type;
3090 char *printname;
3091 {
3092 register struct nesting *thiscase = ALLOC_NESTING ();
3093
3094 /* Make an entry on case_stack for the case we are entering. */
3095
3096 thiscase->next = case_stack;
3097 thiscase->all = nesting_stack;
3098 thiscase->depth = ++nesting_depth;
3099 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3100 thiscase->data.case_stmt.case_list = 0;
3101 thiscase->data.case_stmt.index_expr = expr;
3102 thiscase->data.case_stmt.nominal_type = type;
3103 thiscase->data.case_stmt.default_label = 0;
3104 thiscase->data.case_stmt.num_ranges = 0;
3105 thiscase->data.case_stmt.printname = printname;
3106 thiscase->data.case_stmt.seenlabel = 0;
3107 case_stack = thiscase;
3108 nesting_stack = thiscase;
3109
3110 do_pending_stack_adjust ();
3111
3112 /* Make sure case_stmt.start points to something that won't
3113 need any transformation before expand_end_case. */
3114 if (GET_CODE (get_last_insn ()) != NOTE)
3115 emit_note (0, NOTE_INSN_DELETED);
3116
3117 thiscase->data.case_stmt.start = get_last_insn ();
3118 }
3119
3120 /* Start a "dummy case statement" within which case labels are invalid
3121 and are not connected to any larger real case statement.
3122 This can be used if you don't want to let a case statement jump
3123 into the middle of certain kinds of constructs. */
3124
3125 void
3126 expand_start_case_dummy ()
3127 {
3128 register struct nesting *thiscase = ALLOC_NESTING ();
3129
3130 /* Make an entry on case_stack for the dummy. */
3131
3132 thiscase->next = case_stack;
3133 thiscase->all = nesting_stack;
3134 thiscase->depth = ++nesting_depth;
3135 thiscase->exit_label = 0;
3136 thiscase->data.case_stmt.case_list = 0;
3137 thiscase->data.case_stmt.start = 0;
3138 thiscase->data.case_stmt.nominal_type = 0;
3139 thiscase->data.case_stmt.default_label = 0;
3140 thiscase->data.case_stmt.num_ranges = 0;
3141 case_stack = thiscase;
3142 nesting_stack = thiscase;
3143 }
3144
3145 /* End a dummy case statement. */
3146
3147 void
3148 expand_end_case_dummy ()
3149 {
3150 POPSTACK (case_stack);
3151 }
3152
3153 /* Return the data type of the index-expression
3154 of the innermost case statement, or null if none. */
3155
3156 tree
3157 case_index_expr_type ()
3158 {
3159 if (case_stack)
3160 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3161 return 0;
3162 }
3163 \f
3164 /* Accumulate one case or default label inside a case or switch statement.
3165 VALUE is the value of the case (a null pointer, for a default label).
3166
3167 If not currently inside a case or switch statement, return 1 and do
3168 nothing. The caller will print a language-specific error message.
3169 If VALUE is a duplicate or overlaps, return 2 and do nothing
3170 except store the (first) duplicate node in *DUPLICATE.
3171 If VALUE is out of range, return 3 and do nothing.
3172 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3173 Return 0 on success.
3174
3175 Extended to handle range statements. */
3176
3177 int
3178 pushcase (value, label, duplicate)
3179 register tree value;
3180 register tree label;
3181 tree *duplicate;
3182 {
3183 register struct case_node **l;
3184 register struct case_node *n;
3185 tree index_type;
3186 tree nominal_type;
3187
3188 /* Fail if not inside a real case statement. */
3189 if (! (case_stack && case_stack->data.case_stmt.start))
3190 return 1;
3191
3192 if (stack_block_stack
3193 && stack_block_stack->depth > case_stack->depth)
3194 return 5;
3195
3196 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3197 nominal_type = case_stack->data.case_stmt.nominal_type;
3198
3199 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3200 if (index_type == error_mark_node)
3201 return 0;
3202
3203 /* Convert VALUE to the type in which the comparisons are nominally done. */
3204 if (value != 0)
3205 value = convert (nominal_type, value);
3206
3207 /* If this is the first label, warn if any insns have been emitted. */
3208 if (case_stack->data.case_stmt.seenlabel == 0)
3209 {
3210 rtx insn;
3211 for (insn = case_stack->data.case_stmt.start;
3212 insn;
3213 insn = NEXT_INSN (insn))
3214 {
3215 if (GET_CODE (insn) == CODE_LABEL)
3216 break;
3217 if (GET_CODE (insn) != NOTE
3218 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3219 {
3220 warning ("unreachable code at beginning of %s",
3221 case_stack->data.case_stmt.printname);
3222 break;
3223 }
3224 }
3225 }
3226 case_stack->data.case_stmt.seenlabel = 1;
3227
3228 /* Fail if this value is out of range for the actual type of the index
3229 (which may be narrower than NOMINAL_TYPE). */
3230 if (value != 0 && ! int_fits_type_p (value, index_type))
3231 return 3;
3232
3233 /* Fail if this is a duplicate or overlaps another entry. */
3234 if (value == 0)
3235 {
3236 if (case_stack->data.case_stmt.default_label != 0)
3237 {
3238 *duplicate = case_stack->data.case_stmt.default_label;
3239 return 2;
3240 }
3241 case_stack->data.case_stmt.default_label = label;
3242 }
3243 else
3244 {
3245 /* Find the elt in the chain before which to insert the new value,
3246 to keep the chain sorted in increasing order.
3247 But report an error if this element is a duplicate. */
3248 for (l = &case_stack->data.case_stmt.case_list;
3249 /* Keep going past elements distinctly less than VALUE. */
3250 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3251 l = &(*l)->right)
3252 ;
3253 if (*l)
3254 {
3255 /* Element we will insert before must be distinctly greater;
3256 overlap means error. */
3257 if (! tree_int_cst_lt (value, (*l)->low))
3258 {
3259 *duplicate = (*l)->code_label;
3260 return 2;
3261 }
3262 }
3263
3264 /* Add this label to the chain, and succeed.
3265 Copy VALUE so it is on temporary rather than momentary
3266 obstack and will thus survive till the end of the case statement. */
3267 n = (struct case_node *) oballoc (sizeof (struct case_node));
3268 n->left = 0;
3269 n->right = *l;
3270 n->high = n->low = copy_node (value);
3271 n->code_label = label;
3272 *l = n;
3273 }
3274
3275 expand_label (label);
3276 return 0;
3277 }
3278
3279 /* Like pushcase but this case applies to all values
3280 between VALUE1 and VALUE2 (inclusive).
3281 The return value is the same as that of pushcase
3282 but there is one additional error code:
3283 4 means the specified range was empty. */
3284
3285 int
3286 pushcase_range (value1, value2, label, duplicate)
3287 register tree value1, value2;
3288 register tree label;
3289 tree *duplicate;
3290 {
3291 register struct case_node **l;
3292 register struct case_node *n;
3293 tree index_type;
3294 tree nominal_type;
3295
3296 /* Fail if not inside a real case statement. */
3297 if (! (case_stack && case_stack->data.case_stmt.start))
3298 return 1;
3299
3300 if (stack_block_stack
3301 && stack_block_stack->depth > case_stack->depth)
3302 return 5;
3303
3304 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3305 nominal_type = case_stack->data.case_stmt.nominal_type;
3306
3307 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3308 if (index_type == error_mark_node)
3309 return 0;
3310
3311 /* If this is the first label, warn if any insns have been emitted. */
3312 if (case_stack->data.case_stmt.seenlabel == 0)
3313 {
3314 rtx insn;
3315 for (insn = case_stack->data.case_stmt.start;
3316 insn;
3317 insn = NEXT_INSN (insn))
3318 {
3319 if (GET_CODE (insn) == CODE_LABEL)
3320 break;
3321 if (GET_CODE (insn) != NOTE
3322 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3323 {
3324 warning ("unreachable code at beginning of %s",
3325 case_stack->data.case_stmt.printname);
3326 break;
3327 }
3328 }
3329 }
3330 case_stack->data.case_stmt.seenlabel = 1;
3331
3332 /* Convert VALUEs to type in which the comparisons are nominally done. */
3333 if (value1 == 0) /* Negative infinity. */
3334 value1 = TYPE_MIN_VALUE(index_type);
3335 value1 = convert (nominal_type, value1);
3336
3337 if (value2 == 0) /* Positive infinity. */
3338 value2 = TYPE_MAX_VALUE(index_type);
3339 value2 = convert (nominal_type, value2);
3340
3341 /* Fail if these values are out of range. */
3342 if (! int_fits_type_p (value1, index_type))
3343 return 3;
3344
3345 if (! int_fits_type_p (value2, index_type))
3346 return 3;
3347
3348 /* Fail if the range is empty. */
3349 if (tree_int_cst_lt (value2, value1))
3350 return 4;
3351
3352 /* If the bounds are equal, turn this into the one-value case. */
3353 if (tree_int_cst_equal (value1, value2))
3354 return pushcase (value1, label, duplicate);
3355
3356 /* Find the elt in the chain before which to insert the new value,
3357 to keep the chain sorted in increasing order.
3358 But report an error if this element is a duplicate. */
3359 for (l = &case_stack->data.case_stmt.case_list;
3360 /* Keep going past elements distinctly less than this range. */
3361 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3362 l = &(*l)->right)
3363 ;
3364 if (*l)
3365 {
3366 /* Element we will insert before must be distinctly greater;
3367 overlap means error. */
3368 if (! tree_int_cst_lt (value2, (*l)->low))
3369 {
3370 *duplicate = (*l)->code_label;
3371 return 2;
3372 }
3373 }
3374
3375 /* Add this label to the chain, and succeed.
3376 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3377 obstack and will thus survive till the end of the case statement. */
3378
3379 n = (struct case_node *) oballoc (sizeof (struct case_node));
3380 n->left = 0;
3381 n->right = *l;
3382 n->low = copy_node (value1);
3383 n->high = copy_node (value2);
3384 n->code_label = label;
3385 *l = n;
3386
3387 expand_label (label);
3388
3389 case_stack->data.case_stmt.num_ranges++;
3390
3391 return 0;
3392 }
3393 \f
3394 /* Called when the index of a switch statement is an enumerated type
3395 and there is no default label.
3396
3397 Checks that all enumeration literals are covered by the case
3398 expressions of a switch. Also, warn if there are any extra
3399 switch cases that are *not* elements of the enumerated type.
3400
3401 If all enumeration literals were covered by the case expressions,
3402 turn one of the expressions into the default expression since it should
3403 not be possible to fall through such a switch. */
3404
3405 void
3406 check_for_full_enumeration_handling (type)
3407 tree type;
3408 {
3409 register struct case_node *n;
3410 register struct case_node **l;
3411 register tree chain;
3412 int all_values = 1;
3413
3414 /* The time complexity of this loop is currently O(N * M), with
3415 N being the number of enumerals in the enumerated type, and
3416 M being the number of case expressions in the switch. */
3417
3418 for (chain = TYPE_VALUES (type);
3419 chain;
3420 chain = TREE_CHAIN (chain))
3421 {
3422 /* Find a match between enumeral and case expression, if possible.
3423 Quit looking when we've gone too far (since case expressions
3424 are kept sorted in ascending order). Warn about enumerals not
3425 handled in the switch statement case expression list. */
3426
3427 for (n = case_stack->data.case_stmt.case_list;
3428 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3429 n = n->right)
3430 ;
3431
3432 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3433 {
3434 if (warn_switch)
3435 warning ("enumerated value `%s' not handled in switch",
3436 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3437 all_values = 0;
3438 }
3439 }
3440
3441 /* Now we go the other way around; we warn if there are case
3442 expressions that don't correspond to enumerals. This can
3443 occur since C and C++ don't enforce type-checking of
3444 assignments to enumeration variables. */
3445
3446 if (warn_switch)
3447 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3448 {
3449 for (chain = TYPE_VALUES (type);
3450 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3451 chain = TREE_CHAIN (chain))
3452 ;
3453
3454 if (!chain)
3455 warning ("case value `%d' not in enumerated type `%s'",
3456 TREE_INT_CST_LOW (n->low),
3457 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3458 == IDENTIFIER_NODE)
3459 ? TYPE_NAME (type)
3460 : DECL_NAME (TYPE_NAME (type))));
3461 }
3462
3463 /* If all values were found as case labels, make one of them the default
3464 label. Thus, this switch will never fall through. We arbitrarily pick
3465 the last one to make the default since this is likely the most
3466 efficient choice. */
3467
3468 if (all_values)
3469 {
3470 for (l = &case_stack->data.case_stmt.case_list;
3471 (*l)->right != 0;
3472 l = &(*l)->right)
3473 ;
3474
3475 case_stack->data.case_stmt.default_label = (*l)->code_label;
3476 *l = 0;
3477 }
3478 }
3479 \f
3480 /* Terminate a case (Pascal) or switch (C) statement
3481 in which CASE_INDEX is the expression to be tested.
3482 Generate the code to test it and jump to the right place. */
3483
3484 void
3485 expand_end_case (orig_index)
3486 tree orig_index;
3487 {
3488 tree minval, maxval, range;
3489 rtx default_label = 0;
3490 register struct case_node *n;
3491 int count;
3492 rtx index;
3493 rtx table_label = gen_label_rtx ();
3494 int ncases;
3495 rtx *labelvec;
3496 register int i;
3497 rtx before_case;
3498 register struct nesting *thiscase = case_stack;
3499 tree index_expr = thiscase->data.case_stmt.index_expr;
3500 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3501
3502 do_pending_stack_adjust ();
3503
3504 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3505 if (TREE_TYPE (index_expr) != error_mark_node)
3506 {
3507 /* If switch expression was an enumerated type, check that all
3508 enumeration literals are covered by the cases.
3509 No sense trying this if there's a default case, however. */
3510
3511 if (!thiscase->data.case_stmt.default_label
3512 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3513 && TREE_CODE (index_expr) != INTEGER_CST)
3514 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3515
3516 /* If this is the first label, warn if any insns have been emitted. */
3517 if (thiscase->data.case_stmt.seenlabel == 0)
3518 {
3519 rtx insn;
3520 for (insn = get_last_insn ();
3521 insn != case_stack->data.case_stmt.start;
3522 insn = PREV_INSN (insn))
3523 if (GET_CODE (insn) != NOTE
3524 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3525 {
3526 warning ("unreachable code at beginning of %s",
3527 case_stack->data.case_stmt.printname);
3528 break;
3529 }
3530 }
3531
3532 /* If we don't have a default-label, create one here,
3533 after the body of the switch. */
3534 if (thiscase->data.case_stmt.default_label == 0)
3535 {
3536 thiscase->data.case_stmt.default_label
3537 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3538 expand_label (thiscase->data.case_stmt.default_label);
3539 }
3540 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3541
3542 before_case = get_last_insn ();
3543
3544 /* Simplify the case-list before we count it. */
3545 group_case_nodes (thiscase->data.case_stmt.case_list);
3546
3547 /* Get upper and lower bounds of case values.
3548 Also convert all the case values to the index expr's data type. */
3549
3550 count = 0;
3551 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3552 {
3553 /* Check low and high label values are integers. */
3554 if (TREE_CODE (n->low) != INTEGER_CST)
3555 abort ();
3556 if (TREE_CODE (n->high) != INTEGER_CST)
3557 abort ();
3558
3559 n->low = convert (TREE_TYPE (index_expr), n->low);
3560 n->high = convert (TREE_TYPE (index_expr), n->high);
3561
3562 /* Count the elements and track the largest and smallest
3563 of them (treating them as signed even if they are not). */
3564 if (count++ == 0)
3565 {
3566 minval = n->low;
3567 maxval = n->high;
3568 }
3569 else
3570 {
3571 if (INT_CST_LT (n->low, minval))
3572 minval = n->low;
3573 if (INT_CST_LT (maxval, n->high))
3574 maxval = n->high;
3575 }
3576 /* A range counts double, since it requires two compares. */
3577 if (! tree_int_cst_equal (n->low, n->high))
3578 count++;
3579 }
3580
3581 /* Compute span of values. */
3582 if (count != 0)
3583 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3584 maxval, minval));
3585
3586 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3587 {
3588 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3589 emit_queue ();
3590 emit_jump (default_label);
3591 }
3592 /* If range of values is much bigger than number of values,
3593 make a sequence of conditional branches instead of a dispatch.
3594 If the switch-index is a constant, do it this way
3595 because we can optimize it. */
3596 else if (TREE_INT_CST_HIGH (range) != 0
3597 #ifdef HAVE_casesi
3598 || (HAVE_casesi ? count < 4 : count < 5)
3599 #else
3600 /* If machine does not have a case insn that compares the
3601 bounds, this means extra overhead for dispatch tables
3602 which raises the threshold for using them. */
3603 || count < 5
3604 #endif
3605 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3606 || TREE_CODE (index_expr) == INTEGER_CST
3607 /* These will reduce to a constant. */
3608 || (TREE_CODE (index_expr) == CALL_EXPR
3609 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
3610 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
3611 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3612 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3613 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
3614 {
3615 index = expand_expr (index_expr, 0, VOIDmode, 0);
3616
3617 /* If the index is a short or char that we do not have
3618 an insn to handle comparisons directly, convert it to
3619 a full integer now, rather than letting each comparison
3620 generate the conversion. */
3621
3622 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3623 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3624 == CODE_FOR_nothing))
3625 {
3626 enum machine_mode wider_mode;
3627 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3628 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3629 if (cmp_optab->handlers[(int) wider_mode].insn_code
3630 != CODE_FOR_nothing)
3631 {
3632 index = convert_to_mode (wider_mode, index, unsignedp);
3633 break;
3634 }
3635 }
3636
3637 emit_queue ();
3638 do_pending_stack_adjust ();
3639
3640 index = protect_from_queue (index, 0);
3641 if (GET_CODE (index) == MEM)
3642 index = copy_to_reg (index);
3643 if (GET_CODE (index) == CONST_INT
3644 || TREE_CODE (index_expr) == INTEGER_CST)
3645 {
3646 /* Make a tree node with the proper constant value
3647 if we don't already have one. */
3648 if (TREE_CODE (index_expr) != INTEGER_CST)
3649 {
3650 index_expr
3651 = build_int_2 (INTVAL (index),
3652 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3653 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3654 }
3655
3656 /* For constant index expressions we need only
3657 issue a unconditional branch to the appropriate
3658 target code. The job of removing any unreachable
3659 code is left to the optimisation phase if the
3660 "-O" option is specified. */
3661 for (n = thiscase->data.case_stmt.case_list;
3662 n;
3663 n = n->right)
3664 {
3665 if (! tree_int_cst_lt (index_expr, n->low)
3666 && ! tree_int_cst_lt (n->high, index_expr))
3667 break;
3668 }
3669 if (n)
3670 emit_jump (label_rtx (n->code_label));
3671 else
3672 emit_jump (default_label);
3673 }
3674 else
3675 {
3676 /* If the index expression is not constant we generate
3677 a binary decision tree to select the appropriate
3678 target code. This is done as follows:
3679
3680 The list of cases is rearranged into a binary tree,
3681 nearly optimal assuming equal probability for each case.
3682
3683 The tree is transformed into RTL, eliminating
3684 redundant test conditions at the same time.
3685
3686 If program flow could reach the end of the
3687 decision tree an unconditional jump to the
3688 default code is emitted. */
3689
3690 use_cost_table
3691 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3692 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3693 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3694 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3695 default_label, TREE_TYPE (index_expr));
3696 emit_jump_if_reachable (default_label);
3697 }
3698 }
3699 else
3700 {
3701 int win = 0;
3702 #ifdef HAVE_casesi
3703 if (HAVE_casesi)
3704 {
3705 enum machine_mode index_mode = SImode;
3706 int index_bits = GET_MODE_BITSIZE (index_mode);
3707
3708 /* Convert the index to SImode. */
3709 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3710 > GET_MODE_BITSIZE (index_mode))
3711 {
3712 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3713 rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
3714
3715 /* We must handle the endpoints in the original mode. */
3716 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3717 index_expr, minval);
3718 minval = integer_zero_node;
3719 index = expand_expr (index_expr, 0, VOIDmode, 0);
3720 emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
3721 emit_jump_insn (gen_bltu (default_label));
3722 /* Now we can safely truncate. */
3723 index = convert_to_mode (index_mode, index, 0);
3724 }
3725 else
3726 {
3727 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3728 index_expr = convert (type_for_size (index_bits, 0),
3729 index_expr);
3730 index = expand_expr (index_expr, 0, VOIDmode, 0);
3731 }
3732 emit_queue ();
3733 index = protect_from_queue (index, 0);
3734 do_pending_stack_adjust ();
3735
3736 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3737 expand_expr (range, 0, VOIDmode, 0),
3738 table_label, default_label));
3739 win = 1;
3740 }
3741 #endif
3742 #ifdef HAVE_tablejump
3743 if (! win && HAVE_tablejump)
3744 {
3745 index_expr = convert (thiscase->data.case_stmt.nominal_type,
3746 fold (build (MINUS_EXPR,
3747 TREE_TYPE (index_expr),
3748 index_expr, minval)));
3749 index = expand_expr (index_expr, 0, VOIDmode, 0);
3750 emit_queue ();
3751 index = protect_from_queue (index, 0);
3752 do_pending_stack_adjust ();
3753
3754 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3755 expand_expr (range, 0, VOIDmode, 0),
3756 table_label, default_label);
3757 win = 1;
3758 }
3759 #endif
3760 if (! win)
3761 abort ();
3762
3763 /* Get table of labels to jump to, in order of case index. */
3764
3765 ncases = TREE_INT_CST_LOW (range) + 1;
3766 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3767 bzero (labelvec, ncases * sizeof (rtx));
3768
3769 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3770 {
3771 register int i
3772 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3773
3774 while (1)
3775 {
3776 labelvec[i]
3777 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3778 if (i + TREE_INT_CST_LOW (minval)
3779 == TREE_INT_CST_LOW (n->high))
3780 break;
3781 i++;
3782 }
3783 }
3784
3785 /* Fill in the gaps with the default. */
3786 for (i = 0; i < ncases; i++)
3787 if (labelvec[i] == 0)
3788 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3789
3790 /* Output the table */
3791 emit_label (table_label);
3792
3793 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3794 were an expression, instead of a an #ifdef/#ifndef. */
3795 if (
3796 #ifdef CASE_VECTOR_PC_RELATIVE
3797 1 ||
3798 #endif
3799 flag_pic)
3800 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3801 gen_rtx (LABEL_REF, Pmode, table_label),
3802 gen_rtvec_v (ncases, labelvec)));
3803 else
3804 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3805 gen_rtvec_v (ncases, labelvec)));
3806
3807 /* If the case insn drops through the table,
3808 after the table we must jump to the default-label.
3809 Otherwise record no drop-through after the table. */
3810 #ifdef CASE_DROPS_THROUGH
3811 emit_jump (default_label);
3812 #else
3813 emit_barrier ();
3814 #endif
3815 }
3816
3817 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3818 reorder_insns (before_case, get_last_insn (),
3819 thiscase->data.case_stmt.start);
3820 }
3821 if (thiscase->exit_label)
3822 emit_label (thiscase->exit_label);
3823
3824 POPSTACK (case_stack);
3825
3826 free_temp_slots ();
3827 }
3828
3829 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3830
3831 static void
3832 do_jump_if_equal (op1, op2, label, unsignedp)
3833 rtx op1, op2, label;
3834 int unsignedp;
3835 {
3836 if (GET_CODE (op1) == CONST_INT
3837 && GET_CODE (op2) == CONST_INT)
3838 {
3839 if (INTVAL (op1) == INTVAL (op2))
3840 emit_jump (label);
3841 }
3842 else
3843 {
3844 enum machine_mode mode = GET_MODE (op1);
3845 if (mode == VOIDmode)
3846 mode = GET_MODE (op2);
3847 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3848 emit_jump_insn (gen_beq (label));
3849 }
3850 }
3851 \f
3852 /* Not all case values are encountered equally. This function
3853 uses a heuristic to weight case labels, in cases where that
3854 looks like a reasonable thing to do.
3855
3856 Right now, all we try to guess is text, and we establish the
3857 following weights:
3858
3859 chars above space: 16
3860 digits: 16
3861 default: 12
3862 space, punct: 8
3863 tab: 4
3864 newline: 2
3865 other "\" chars: 1
3866 remaining chars: 0
3867
3868 If we find any cases in the switch that are not either -1 or in the range
3869 of valid ASCII characters, or are control characters other than those
3870 commonly used with "\", don't treat this switch scanning text.
3871
3872 Return 1 if these nodes are suitable for cost estimation, otherwise
3873 return 0. */
3874
3875 static int
3876 estimate_case_costs (node)
3877 case_node_ptr node;
3878 {
3879 tree min_ascii = build_int_2 (-1, -1);
3880 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3881 case_node_ptr n;
3882 int i;
3883
3884 /* If we haven't already made the cost table, make it now. Note that the
3885 lower bound of the table is -1, not zero. */
3886
3887 if (cost_table == NULL)
3888 {
3889 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3890 bzero (cost_table - 1, 129 * sizeof (short));
3891
3892 for (i = 0; i < 128; i++)
3893 {
3894 if (isalnum (i))
3895 cost_table[i] = 16;
3896 else if (ispunct (i))
3897 cost_table[i] = 8;
3898 else if (iscntrl (i))
3899 cost_table[i] = -1;
3900 }
3901
3902 cost_table[' '] = 8;
3903 cost_table['\t'] = 4;
3904 cost_table['\0'] = 4;
3905 cost_table['\n'] = 2;
3906 cost_table['\f'] = 1;
3907 cost_table['\v'] = 1;
3908 cost_table['\b'] = 1;
3909 }
3910
3911 /* See if all the case expressions look like text. It is text if the
3912 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3913 as signed arithmetic since we don't want to ever access cost_table with a
3914 value less than -1. Also check that none of the constants in a range
3915 are strange control characters. */
3916
3917 for (n = node; n; n = n->right)
3918 {
3919 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3920 return 0;
3921
3922 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3923 if (cost_table[i] < 0)
3924 return 0;
3925 }
3926
3927 /* All interesting values are within the range of interesting
3928 ASCII characters. */
3929 return 1;
3930 }
3931
3932 /* Scan an ordered list of case nodes
3933 combining those with consecutive values or ranges.
3934
3935 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3936
3937 static void
3938 group_case_nodes (head)
3939 case_node_ptr head;
3940 {
3941 case_node_ptr node = head;
3942
3943 while (node)
3944 {
3945 rtx lb = next_real_insn (label_rtx (node->code_label));
3946 case_node_ptr np = node;
3947
3948 /* Try to group the successors of NODE with NODE. */
3949 while (((np = np->right) != 0)
3950 /* Do they jump to the same place? */
3951 && next_real_insn (label_rtx (np->code_label)) == lb
3952 /* Are their ranges consecutive? */
3953 && tree_int_cst_equal (np->low,
3954 fold (build (PLUS_EXPR,
3955 TREE_TYPE (node->high),
3956 node->high,
3957 integer_one_node)))
3958 /* An overflow is not consecutive. */
3959 && tree_int_cst_lt (node->high,
3960 fold (build (PLUS_EXPR,
3961 TREE_TYPE (node->high),
3962 node->high,
3963 integer_one_node))))
3964 {
3965 node->high = np->high;
3966 }
3967 /* NP is the first node after NODE which can't be grouped with it.
3968 Delete the nodes in between, and move on to that node. */
3969 node->right = np;
3970 node = np;
3971 }
3972 }
3973
3974 /* Take an ordered list of case nodes
3975 and transform them into a near optimal binary tree,
3976 on the assumption that any target code selection value is as
3977 likely as any other.
3978
3979 The transformation is performed by splitting the ordered
3980 list into two equal sections plus a pivot. The parts are
3981 then attached to the pivot as left and right branches. Each
3982 branch is is then transformed recursively. */
3983
3984 static void
3985 balance_case_nodes (head, parent)
3986 case_node_ptr *head;
3987 case_node_ptr parent;
3988 {
3989 register case_node_ptr np;
3990
3991 np = *head;
3992 if (np)
3993 {
3994 int cost = 0;
3995 int i = 0;
3996 int ranges = 0;
3997 register case_node_ptr *npp;
3998 case_node_ptr left;
3999
4000 /* Count the number of entries on branch. Also count the ranges. */
4001
4002 while (np)
4003 {
4004 if (!tree_int_cst_equal (np->low, np->high))
4005 {
4006 ranges++;
4007 if (use_cost_table)
4008 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4009 }
4010
4011 if (use_cost_table)
4012 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4013
4014 i++;
4015 np = np->right;
4016 }
4017
4018 if (i > 2)
4019 {
4020 /* Split this list if it is long enough for that to help. */
4021 npp = head;
4022 left = *npp;
4023 if (use_cost_table)
4024 {
4025 /* Find the place in the list that bisects the list's total cost,
4026 Here I gets half the total cost. */
4027 int n_moved = 0;
4028 i = (cost + 1) / 2;
4029 while (1)
4030 {
4031 /* Skip nodes while their cost does not reach that amount. */
4032 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4033 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4034 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4035 if (i <= 0)
4036 break;
4037 npp = &(*npp)->right;
4038 n_moved += 1;
4039 }
4040 if (n_moved == 0)
4041 {
4042 /* Leave this branch lopsided, but optimize left-hand
4043 side and fill in `parent' fields for right-hand side. */
4044 np = *head;
4045 np->parent = parent;
4046 balance_case_nodes (&np->left, np);
4047 for (; np->right; np = np->right)
4048 np->right->parent = np;
4049 return;
4050 }
4051 }
4052 /* If there are just three nodes, split at the middle one. */
4053 else if (i == 3)
4054 npp = &(*npp)->right;
4055 else
4056 {
4057 /* Find the place in the list that bisects the list's total cost,
4058 where ranges count as 2.
4059 Here I gets half the total cost. */
4060 i = (i + ranges + 1) / 2;
4061 while (1)
4062 {
4063 /* Skip nodes while their cost does not reach that amount. */
4064 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4065 i--;
4066 i--;
4067 if (i <= 0)
4068 break;
4069 npp = &(*npp)->right;
4070 }
4071 }
4072 *head = np = *npp;
4073 *npp = 0;
4074 np->parent = parent;
4075 np->left = left;
4076
4077 /* Optimize each of the two split parts. */
4078 balance_case_nodes (&np->left, np);
4079 balance_case_nodes (&np->right, np);
4080 }
4081 else
4082 {
4083 /* Else leave this branch as one level,
4084 but fill in `parent' fields. */
4085 np = *head;
4086 np->parent = parent;
4087 for (; np->right; np = np->right)
4088 np->right->parent = np;
4089 }
4090 }
4091 }
4092 \f
4093 /* Search the parent sections of the case node tree
4094 to see if a test for the lower bound of NODE would be redundant.
4095 INDEX_TYPE is the type of the index expression.
4096
4097 The instructions to generate the case decision tree are
4098 output in the same order as nodes are processed so it is
4099 known that if a parent node checks the range of the current
4100 node minus one that the current node is bounded at its lower
4101 span. Thus the test would be redundant. */
4102
4103 static int
4104 node_has_low_bound (node, index_type)
4105 case_node_ptr node;
4106 tree index_type;
4107 {
4108 tree low_minus_one;
4109 case_node_ptr pnode;
4110
4111 /* If the lower bound of this node is the lowest value in the index type,
4112 we need not test it. */
4113
4114 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4115 return 1;
4116
4117 /* If this node has a left branch, the value at the left must be less
4118 than that at this node, so it cannot be bounded at the bottom and
4119 we need not bother testing any further. */
4120
4121 if (node->left)
4122 return 0;
4123
4124 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4125 node->low, integer_one_node));
4126
4127 /* If the subtraction above overflowed, we can't verify anything.
4128 Otherwise, look for a parent that tests our value - 1. */
4129
4130 if (! tree_int_cst_lt (low_minus_one, node->low))
4131 return 0;
4132
4133 for (pnode = node->parent; pnode; pnode = pnode->parent)
4134 if (tree_int_cst_equal (low_minus_one, pnode->high))
4135 return 1;
4136
4137 return 0;
4138 }
4139
4140 /* Search the parent sections of the case node tree
4141 to see if a test for the upper bound of NODE would be redundant.
4142 INDEX_TYPE is the type of the index expression.
4143
4144 The instructions to generate the case decision tree are
4145 output in the same order as nodes are processed so it is
4146 known that if a parent node checks the range of the current
4147 node plus one that the current node is bounded at its upper
4148 span. Thus the test would be redundant. */
4149
4150 static int
4151 node_has_high_bound (node, index_type)
4152 case_node_ptr node;
4153 tree index_type;
4154 {
4155 tree high_plus_one;
4156 case_node_ptr pnode;
4157
4158 /* If the upper bound of this node is the highest value in the type
4159 of the index expression, we need not test against it. */
4160
4161 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4162 return 1;
4163
4164 /* If this node has a right branch, the value at the right must be greater
4165 than that at this node, so it cannot be bounded at the top and
4166 we need not bother testing any further. */
4167
4168 if (node->right)
4169 return 0;
4170
4171 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4172 node->high, integer_one_node));
4173
4174 /* If the addition above overflowed, we can't verify anything.
4175 Otherwise, look for a parent that tests our value + 1. */
4176
4177 if (! tree_int_cst_lt (node->high, high_plus_one))
4178 return 0;
4179
4180 for (pnode = node->parent; pnode; pnode = pnode->parent)
4181 if (tree_int_cst_equal (high_plus_one, pnode->low))
4182 return 1;
4183
4184 return 0;
4185 }
4186
4187 /* Search the parent sections of the
4188 case node tree to see if both tests for the upper and lower
4189 bounds of NODE would be redundant. */
4190
4191 static int
4192 node_is_bounded (node, index_type)
4193 case_node_ptr node;
4194 tree index_type;
4195 {
4196 return (node_has_low_bound (node, index_type)
4197 && node_has_high_bound (node, index_type));
4198 }
4199
4200 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4201
4202 static void
4203 emit_jump_if_reachable (label)
4204 rtx label;
4205 {
4206 if (GET_CODE (get_last_insn ()) != BARRIER)
4207 emit_jump (label);
4208 }
4209 \f
4210 /* Emit step-by-step code to select a case for the value of INDEX.
4211 The thus generated decision tree follows the form of the
4212 case-node binary tree NODE, whose nodes represent test conditions.
4213 INDEX_TYPE is the type of the index of the switch.
4214
4215 Care is taken to prune redundant tests from the decision tree
4216 by detecting any boundary conditions already checked by
4217 emitted rtx. (See node_has_high_bound, node_has_low_bound
4218 and node_is_bounded, above.)
4219
4220 Where the test conditions can be shown to be redundant we emit
4221 an unconditional jump to the target code. As a further
4222 optimization, the subordinates of a tree node are examined to
4223 check for bounded nodes. In this case conditional and/or
4224 unconditional jumps as a result of the boundary check for the
4225 current node are arranged to target the subordinates associated
4226 code for out of bound conditions on the current node node.
4227
4228 We can asume that when control reaches the code generated here,
4229 the index value has already been compared with the parents
4230 of this node, and determined to be on the same side of each parent
4231 as this node is. Thus, if this node tests for the value 51,
4232 and a parent tested for 52, we don't need to consider
4233 the possibility of a value greater than 51. If another parent
4234 tests for the value 50, then this node need not test anything. */
4235
4236 static void
4237 emit_case_nodes (index, node, default_label, index_type)
4238 rtx index;
4239 case_node_ptr node;
4240 rtx default_label;
4241 tree index_type;
4242 {
4243 /* If INDEX has an unsigned type, we must make unsigned branches. */
4244 int unsignedp = TREE_UNSIGNED (index_type);
4245 typedef rtx rtx_function ();
4246 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4247 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4248 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4249 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4250 enum machine_mode mode = GET_MODE (index);
4251
4252 /* See if our parents have already tested everything for us.
4253 If they have, emit an unconditional jump for this node. */
4254 if (node_is_bounded (node, index_type))
4255 emit_jump (label_rtx (node->code_label));
4256
4257 else if (tree_int_cst_equal (node->low, node->high))
4258 {
4259 /* Node is single valued. First see if the index expression matches
4260 this node and then check our children, if any. */
4261
4262 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4263 label_rtx (node->code_label), unsignedp);
4264
4265 if (node->right != 0 && node->left != 0)
4266 {
4267 /* This node has children on both sides.
4268 Dispatch to one side or the other
4269 by comparing the index value with this node's value.
4270 If one subtree is bounded, check that one first,
4271 so we can avoid real branches in the tree. */
4272
4273 if (node_is_bounded (node->right, index_type))
4274 {
4275 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4276 GT, 0, mode, unsignedp, 0);
4277
4278 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4279 emit_case_nodes (index, node->left, default_label, index_type);
4280 }
4281
4282 else if (node_is_bounded (node->left, index_type))
4283 {
4284 emit_cmp_insn (index, expand_expr (node->high, 0,
4285 VOIDmode, 0),
4286 LT, 0, mode, unsignedp, 0);
4287 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4288 emit_case_nodes (index, node->right, default_label, index_type);
4289 }
4290
4291 else
4292 {
4293 /* Neither node is bounded. First distinguish the two sides;
4294 then emit the code for one side at a time. */
4295
4296 tree test_label
4297 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4298
4299 /* See if the value is on the right. */
4300 emit_cmp_insn (index, expand_expr (node->high, 0,
4301 VOIDmode, 0),
4302 GT, 0, mode, unsignedp, 0);
4303 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4304
4305 /* Value must be on the left.
4306 Handle the left-hand subtree. */
4307 emit_case_nodes (index, node->left, default_label, index_type);
4308 /* If left-hand subtree does nothing,
4309 go to default. */
4310 emit_jump_if_reachable (default_label);
4311
4312 /* Code branches here for the right-hand subtree. */
4313 expand_label (test_label);
4314 emit_case_nodes (index, node->right, default_label, index_type);
4315 }
4316 }
4317
4318 else if (node->right != 0 && node->left == 0)
4319 {
4320 /* Here we have a right child but no left so we issue conditional
4321 branch to default and process the right child.
4322
4323 Omit the conditional branch to default if we it avoid only one
4324 right child; it costs too much space to save so little time. */
4325
4326 if (node->right->right || node->right->left
4327 || !tree_int_cst_equal (node->right->low, node->right->high))
4328 {
4329 if (!node_has_low_bound (node, index_type))
4330 {
4331 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4332 LT, 0, mode, unsignedp, 0);
4333 emit_jump_insn ((*gen_blt_pat) (default_label));
4334 }
4335
4336 emit_case_nodes (index, node->right, default_label, index_type);
4337 }
4338 else
4339 /* We cannot process node->right normally
4340 since we haven't ruled out the numbers less than
4341 this node's value. So handle node->right explicitly. */
4342 do_jump_if_equal (index,
4343 expand_expr (node->right->low, 0, VOIDmode, 0),
4344 label_rtx (node->right->code_label), unsignedp);
4345 }
4346
4347 else if (node->right == 0 && node->left != 0)
4348 {
4349 /* Just one subtree, on the left. */
4350
4351 #if 0 /* The following code and comment were formerly part
4352 of the condition here, but they didn't work
4353 and I don't understand what the idea was. -- rms. */
4354 /* If our "most probable entry" is less probable
4355 than the default label, emit a jump to
4356 the default label using condition codes
4357 already lying around. With no right branch,
4358 a branch-greater-than will get us to the default
4359 label correctly. */
4360 if (use_cost_table
4361 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4362 ;
4363 #endif /* 0 */
4364 if (node->left->left || node->left->right
4365 || !tree_int_cst_equal (node->left->low, node->left->high))
4366 {
4367 if (!node_has_high_bound (node, index_type))
4368 {
4369 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4370 GT, 0, mode, unsignedp, 0);
4371 emit_jump_insn ((*gen_bgt_pat) (default_label));
4372 }
4373
4374 emit_case_nodes (index, node->left, default_label, index_type);
4375 }
4376 else
4377 /* We cannot process node->left normally
4378 since we haven't ruled out the numbers less than
4379 this node's value. So handle node->left explicitly. */
4380 do_jump_if_equal (index,
4381 expand_expr (node->left->low, 0, VOIDmode, 0),
4382 label_rtx (node->left->code_label), unsignedp);
4383 }
4384 }
4385 else
4386 {
4387 /* Node is a range. These cases are very similar to those for a single
4388 value, except that we do not start by testing whether this node
4389 is the one to branch to. */
4390
4391 if (node->right != 0 && node->left != 0)
4392 {
4393 /* Node has subtrees on both sides.
4394 If the right-hand subtree is bounded,
4395 test for it first, since we can go straight there.
4396 Otherwise, we need to make a branch in the control structure,
4397 then handle the two subtrees. */
4398 tree test_label = 0;
4399
4400 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4401 GT, 0, mode, unsignedp, 0);
4402
4403 if (node_is_bounded (node->right, index_type))
4404 /* Right hand node is fully bounded so we can eliminate any
4405 testing and branch directly to the target code. */
4406 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4407 else
4408 {
4409 /* Right hand node requires testing.
4410 Branch to a label where we will handle it later. */
4411
4412 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4413 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4414 }
4415
4416 /* Value belongs to this node or to the left-hand subtree. */
4417
4418 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4419 GE, 0, mode, unsignedp, 0);
4420 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4421
4422 /* Handle the left-hand subtree. */
4423 emit_case_nodes (index, node->left, default_label, index_type);
4424
4425 /* If right node had to be handled later, do that now. */
4426
4427 if (test_label)
4428 {
4429 /* If the left-hand subtree fell through,
4430 don't let it fall into the right-hand subtree. */
4431 emit_jump_if_reachable (default_label);
4432
4433 expand_label (test_label);
4434 emit_case_nodes (index, node->right, default_label, index_type);
4435 }
4436 }
4437
4438 else if (node->right != 0 && node->left == 0)
4439 {
4440 /* Deal with values to the left of this node,
4441 if they are possible. */
4442 if (!node_has_low_bound (node, index_type))
4443 {
4444 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4445 LT, 0, mode, unsignedp, 0);
4446 emit_jump_insn ((*gen_blt_pat) (default_label));
4447 }
4448
4449 /* Value belongs to this node or to the right-hand subtree. */
4450
4451 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4452 LE, 0, mode, unsignedp, 0);
4453 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4454
4455 emit_case_nodes (index, node->right, default_label, index_type);
4456 }
4457
4458 else if (node->right == 0 && node->left != 0)
4459 {
4460 /* Deal with values to the right of this node,
4461 if they are possible. */
4462 if (!node_has_high_bound (node, index_type))
4463 {
4464 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4465 GT, 0, mode, unsignedp, 0);
4466 emit_jump_insn ((*gen_bgt_pat) (default_label));
4467 }
4468
4469 /* Value belongs to this node or to the left-hand subtree. */
4470
4471 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4472 GE, 0, mode, unsignedp, 0);
4473 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4474
4475 emit_case_nodes (index, node->left, default_label, index_type);
4476 }
4477
4478 else
4479 {
4480 /* Node has no children so we check low and high bounds to remove
4481 redundant tests. Only one of the bounds can exist,
4482 since otherwise this node is bounded--a case tested already. */
4483
4484 if (!node_has_high_bound (node, index_type))
4485 {
4486 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4487 GT, 0, mode, unsignedp, 0);
4488 emit_jump_insn ((*gen_bgt_pat) (default_label));
4489 }
4490
4491 if (!node_has_low_bound (node, index_type))
4492 {
4493 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4494 LT, 0, mode, unsignedp, 0);
4495 emit_jump_insn ((*gen_blt_pat) (default_label));
4496 }
4497
4498 emit_jump (label_rtx (node->code_label));
4499 }
4500 }
4501 }
4502 \f
4503 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4504 so that the debugging info will be correct for the unrolled loop. */
4505
4506 /* Indexed by loop number, contains pointer to the first block in the loop,
4507 or zero if none. Only valid if doing loop unrolling and outputting debugger
4508 info. */
4509
4510 tree *loop_number_first_block;
4511
4512 /* Indexed by loop number, contains pointer to the last block in the loop,
4513 only valid if loop_number_first_block is nonzero. */
4514
4515 tree *loop_number_last_block;
4516
4517 /* Indexed by loop number, contains nesting level of first block in the
4518 loop, if any. Only valid if doing loop unrolling and outputting debugger
4519 info. */
4520
4521 int *loop_number_block_level;
4522
4523 /* Scan the function looking for loops, and walk the BLOCK tree at the
4524 same time. Record the first and last BLOCK tree corresponding to each
4525 loop. This function is similar to find_and_verify_loops in loop.c. */
4526
4527 void
4528 find_loop_tree_blocks (f)
4529 rtx f;
4530 {
4531 rtx insn;
4532 int current_loop = -1;
4533 int next_loop = -1;
4534 int loop;
4535 int block_level, tree_level;
4536 tree tree_block, parent_tree_block;
4537
4538 tree_block = DECL_INITIAL (current_function_decl);
4539 parent_tree_block = 0;
4540 block_level = 0;
4541 tree_level = -1;
4542
4543 /* Find boundaries of loops, and save the first and last BLOCK tree
4544 corresponding to each loop. */
4545
4546 for (insn = f; insn; insn = NEXT_INSN (insn))
4547 {
4548 if (GET_CODE (insn) == NOTE)
4549 switch (NOTE_LINE_NUMBER (insn))
4550 {
4551 case NOTE_INSN_LOOP_BEG:
4552 loop_number_block_level[++next_loop] = block_level;
4553 loop_number_first_block[next_loop] = 0;
4554 current_loop = next_loop;
4555 break;
4556
4557 case NOTE_INSN_LOOP_END:
4558 if (current_loop == -1)
4559 abort ();
4560
4561 current_loop = loop_outer_loop[current_loop];
4562 break;
4563
4564 case NOTE_INSN_BLOCK_BEG:
4565 if (tree_level < block_level)
4566 {
4567 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4568 we must now visit the subtree of the current block. */
4569 parent_tree_block = tree_block;
4570 tree_block = BLOCK_SUBBLOCKS (tree_block);
4571 tree_level++;
4572 }
4573 else if (tree_level > block_level)
4574 abort ();
4575
4576 /* Save this block tree here for all nested loops for which
4577 this is the topmost block. */
4578 for (loop = current_loop;
4579 loop != -1 && block_level == loop_number_block_level[loop];
4580 loop = loop_outer_loop[loop])
4581 {
4582 if (loop_number_first_block[loop] == 0)
4583 loop_number_first_block[loop] = tree_block;
4584 loop_number_last_block[loop] = tree_block;
4585 }
4586
4587 block_level++;
4588 break;
4589
4590 case NOTE_INSN_BLOCK_END:
4591 block_level--;
4592 if (tree_level > block_level)
4593 {
4594 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4595 we must now visit the parent of the current tree. */
4596 if (tree_block != 0 || parent_tree_block == 0)
4597 abort ();
4598 tree_block = parent_tree_block;
4599 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4600 tree_level--;
4601 }
4602 tree_block = BLOCK_CHAIN (tree_block);
4603 break;
4604 }
4605 }
4606 }
4607
4608 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4609 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4610
4611 Note that we only copy the topmost level of tree nodes; they will share
4612 pointers to the same subblocks. */
4613
4614 void
4615 unroll_block_trees (loop_number, copies)
4616 int loop_number;
4617 int copies;
4618 {
4619 int i;
4620
4621 /* First check whether there are any blocks that need to be copied. */
4622 if (loop_number_first_block[loop_number])
4623 {
4624 tree first_block = loop_number_first_block[loop_number];
4625 tree last_block = loop_number_last_block[loop_number];
4626 tree last_block_created = 0;
4627
4628 for (i = 0; i < copies - 1; i++)
4629 {
4630 tree block = first_block;
4631 tree insert_after = last_block;
4632 tree copied_block;
4633
4634 /* Copy every block between first_block and last_block inclusive,
4635 inserting the new blocks after last_block. */
4636 do
4637 {
4638 tree new_block = make_node (BLOCK);
4639 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4640 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4641 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4642 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4643 TREE_USED (new_block) = TREE_USED (block);
4644
4645 /* Insert the new block after the insertion point, and move
4646 the insertion point to the new block. This ensures that
4647 the copies are inserted in the right order. */
4648 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4649 BLOCK_CHAIN (insert_after) = new_block;
4650 insert_after = new_block;
4651
4652 copied_block = block;
4653 block = BLOCK_CHAIN (block);
4654 }
4655 while (copied_block != last_block);
4656
4657 /* Remember the last block created, so that we can update the
4658 info in the tables. */
4659 if (last_block_created == 0)
4660 last_block_created = insert_after;
4661 }
4662
4663 /* For all nested loops for which LAST_BLOCK was originally the last
4664 block, update the tables to indicate that LAST_BLOCK_CREATED is
4665 now the last block in the loop. */
4666 for (i = loop_number; last_block == loop_number_last_block[i];
4667 i = loop_outer_loop[i])
4668 loop_number_last_block[i] = last_block_created;
4669 }
4670 }
This page took 0.254305 seconds and 6 git commands to generate.