]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
Initial revision
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35 #include "config.h"
36
37 #include <stdio.h>
38 #include <ctype.h>
39
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack;
56
57 extern int xmalloc ();
58 extern void free ();
59
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62 char *emit_filename;
63 int emit_lineno;
64
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68 int expr_stmts_for_value;
69
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73 static tree last_expr_type;
74 static rtx last_expr_value;
75
76 /* Number of binding contours started so far in this function. */
77
78 int block_start_count;
79
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83 extern int current_function_returns_pcc_struct;
84
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89 extern rtx cleanup_label;
90
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95 extern rtx return_label;
96
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs;
100
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset;
105
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label;
109
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry;
112
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118 extern rtx arg_pointer_save_area;
119
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain;
122
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list;
128 #endif
129 \f
130 /* Functions and data structures for expanding case statements. */
131
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. The tree is the
149 balanced, with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153 struct case_node
154 {
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161 };
162
163 typedef struct case_node case_node;
164 typedef struct case_node *case_node_ptr;
165
166 /* These are used by estimate_case_costs and balance_case_nodes. */
167
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table;
170 static int use_cost_table;
171
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
177
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
181 void fixup_gotos ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
188 \f
189 /* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214 struct nesting
215 {
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327 };
328
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
331
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334 struct nesting *stack_block_stack;
335
336 /* Chain of all pending conditional statements. */
337 struct nesting *cond_stack;
338
339 /* Chain of all pending loops. */
340 struct nesting *loop_stack;
341
342 /* Chain of all pending case or switch statements. */
343 struct nesting *case_stack;
344
345 /* Chain of all pending exception contours. */
346 struct nesting *except_stack;
347
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting *nesting_stack;
351
352 /* Number of entries on nesting_stack now. */
353 int nesting_depth;
354
355 /* Allocate and return a new `struct nesting'. */
356
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407 };
408
409 static struct goto_fixup *goto_fixup_chain;
410
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414 struct label_chain
415 {
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419 };
420 \f
421 void
422 init_stmt ()
423 {
424 gcc_obstack_init (&stmt_obstack);
425 #if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427 #endif
428 }
429
430 void
431 init_stmt_for_function ()
432 {
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449 }
450
451 void
452 save_stmt_status (p)
453 struct function *p;
454 {
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469 }
470
471 void
472 restore_stmt_status (p)
473 struct function *p;
474 {
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489 }
490 \f
491 /* Emit a no-op instruction. */
492
493 void
494 emit_nop ()
495 {
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501 }
502 \f
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506 rtx
507 label_rtx (label)
508 tree label;
509 {
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517 }
518
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521 void
522 emit_jump (label)
523 rtx label;
524 {
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528 }
529
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533 void
534 expand_computed_goto (exp)
535 tree exp;
536 {
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
538 emit_indirect_jump (x);
539 emit_barrier ();
540 }
541 \f
542 /* Handle goto statements and the labels that they can go to. */
543
544 /* Specify the location in the RTL code of a label LABEL,
545 which is a LABEL_DECL tree node.
546
547 This is used for the kind of label that the user can jump to with a
548 goto statement, and for alternatives of a switch or case statement.
549 RTL labels generated for loops and conditionals don't go through here;
550 they are generated directly at the RTL level, by other functions below.
551
552 Note that this has nothing to do with defining label *names*.
553 Languages vary in how they do that and what that even means. */
554
555 void
556 expand_label (label)
557 tree label;
558 {
559 struct label_chain *p;
560
561 do_pending_stack_adjust ();
562 emit_label (label_rtx (label));
563 if (DECL_NAME (label))
564 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
565
566 if (stack_block_stack != 0)
567 {
568 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
569 p->next = stack_block_stack->data.block.label_chain;
570 stack_block_stack->data.block.label_chain = p;
571 p->label = label;
572 }
573 }
574
575 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
576 from nested functions. */
577
578 void
579 declare_nonlocal_label (label)
580 tree label;
581 {
582 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
583 LABEL_PRESERVE_P (label_rtx (label)) = 1;
584 if (nonlocal_goto_handler_slot == 0)
585 {
586 nonlocal_goto_handler_slot
587 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
588 nonlocal_goto_stack_level
589 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
590 emit_insn_before (gen_move_insn (nonlocal_goto_stack_level,
591 stack_pointer_rtx),
592 tail_recursion_reentry);
593 }
594 }
595
596 /* Generate RTL code for a `goto' statement with target label LABEL.
597 LABEL should be a LABEL_DECL tree node that was or will later be
598 defined with `expand_label'. */
599
600 void
601 expand_goto (label)
602 tree label;
603 {
604 /* Check for a nonlocal goto to a containing function. */
605 tree context = decl_function_context (label);
606 if (context != 0 && context != current_function_decl)
607 {
608 struct function *p = find_function_data (context);
609 rtx temp;
610 p->has_nonlocal_label = 1;
611 #if HAVE_nonlocal_goto
612 if (HAVE_nonlocal_goto)
613 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
614 p->nonlocal_goto_handler_slot,
615 p->nonlocal_goto_stack_level,
616 gen_rtx (LABEL_REF, Pmode,
617 label_rtx (label))));
618 else
619 #endif
620 {
621 /* Restore frame pointer for containing function.
622 This sets the actual hard register used for the frame pointer
623 to the location of the function's incoming static chain info.
624 The non-local goto handler will then adjust it to contain the
625 proper value and reload the argument pointer, if needed. */
626 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
627 /* Get addr of containing function's current nonlocal goto handler,
628 which will do any cleanups and then jump to the label. */
629 temp = copy_to_reg (p->nonlocal_goto_handler_slot);
630 /* Restore the stack pointer. Note this uses fp just restored. */
631 emit_move_insn (stack_pointer_rtx, p->nonlocal_goto_stack_level);
632 /* Put in the static chain register the nonlocal label address. */
633 emit_move_insn (static_chain_rtx,
634 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
635 /* USE of frame_pointer_rtx added for consistency; not clear if
636 really needed. */
637 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
638 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
639 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
640 emit_indirect_jump (temp);
641 }
642 }
643 else
644 expand_goto_internal (label, label_rtx (label), 0);
645 }
646
647 /* Generate RTL code for a `goto' statement with target label BODY.
648 LABEL should be a LABEL_REF.
649 LAST_INSN, if non-0, is the rtx we should consider as the last
650 insn emitted (for the purposes of cleaning up a return). */
651
652 static void
653 expand_goto_internal (body, label, last_insn)
654 tree body;
655 rtx label;
656 rtx last_insn;
657 {
658 struct nesting *block;
659 rtx stack_level = 0;
660
661 if (GET_CODE (label) != CODE_LABEL)
662 abort ();
663
664 /* If label has already been defined, we can tell now
665 whether and how we must alter the stack level. */
666
667 if (PREV_INSN (label) != 0)
668 {
669 /* Find the innermost pending block that contains the label.
670 (Check containment by comparing insn-uids.)
671 Then restore the outermost stack level within that block,
672 and do cleanups of all blocks contained in it. */
673 for (block = block_stack; block; block = block->next)
674 {
675 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
676 break;
677 if (block->data.block.stack_level != 0)
678 stack_level = block->data.block.stack_level;
679 /* Execute the cleanups for blocks we are exiting. */
680 if (block->data.block.cleanups != 0)
681 {
682 expand_cleanups (block->data.block.cleanups, 0);
683 do_pending_stack_adjust ();
684 }
685 }
686
687 if (stack_level)
688 {
689 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
690 the stack pointer. This one should be deleted as dead by flow. */
691 clear_pending_stack_adjust ();
692 do_pending_stack_adjust ();
693 emit_move_insn (stack_pointer_rtx, stack_level);
694 }
695
696 if (body != 0 && DECL_TOO_LATE (body))
697 error ("jump to `%s' invalidly jumps into binding contour",
698 IDENTIFIER_POINTER (DECL_NAME (body)));
699 }
700 /* Label not yet defined: may need to put this goto
701 on the fixup list. */
702 else if (! expand_fixup (body, label, last_insn))
703 {
704 /* No fixup needed. Record that the label is the target
705 of at least one goto that has no fixup. */
706 if (body != 0)
707 TREE_ADDRESSABLE (body) = 1;
708 }
709
710 emit_jump (label);
711 }
712 \f
713 /* Generate if necessary a fixup for a goto
714 whose target label in tree structure (if any) is TREE_LABEL
715 and whose target in rtl is RTL_LABEL.
716
717 If LAST_INSN is nonzero, we pretend that the jump appears
718 after insn LAST_INSN instead of at the current point in the insn stream.
719
720 The fixup will be used later to insert insns at this point
721 to restore the stack level as appropriate for the target label.
722
723 Value is nonzero if a fixup is made. */
724
725 static int
726 expand_fixup (tree_label, rtl_label, last_insn)
727 tree tree_label;
728 rtx rtl_label;
729 rtx last_insn;
730 {
731 struct nesting *block, *end_block;
732
733 /* See if we can recognize which block the label will be output in.
734 This is possible in some very common cases.
735 If we succeed, set END_BLOCK to that block.
736 Otherwise, set it to 0. */
737
738 if (cond_stack
739 && (rtl_label == cond_stack->data.cond.endif_label
740 || rtl_label == cond_stack->data.cond.next_label))
741 end_block = cond_stack;
742 /* If we are in a loop, recognize certain labels which
743 are likely targets. This reduces the number of fixups
744 we need to create. */
745 else if (loop_stack
746 && (rtl_label == loop_stack->data.loop.start_label
747 || rtl_label == loop_stack->data.loop.end_label
748 || rtl_label == loop_stack->data.loop.continue_label))
749 end_block = loop_stack;
750 else
751 end_block = 0;
752
753 /* Now set END_BLOCK to the binding level to which we will return. */
754
755 if (end_block)
756 {
757 struct nesting *next_block = end_block->all;
758 block = block_stack;
759
760 /* First see if the END_BLOCK is inside the innermost binding level.
761 If so, then no cleanups or stack levels are relevant. */
762 while (next_block && next_block != block)
763 next_block = next_block->all;
764
765 if (next_block)
766 return 0;
767
768 /* Otherwise, set END_BLOCK to the innermost binding level
769 which is outside the relevant control-structure nesting. */
770 next_block = block_stack->next;
771 for (block = block_stack; block != end_block; block = block->all)
772 if (block == next_block)
773 next_block = next_block->next;
774 end_block = next_block;
775 }
776
777 /* Does any containing block have a stack level or cleanups?
778 If not, no fixup is needed, and that is the normal case
779 (the only case, for standard C). */
780 for (block = block_stack; block != end_block; block = block->next)
781 if (block->data.block.stack_level != 0
782 || block->data.block.cleanups != 0)
783 break;
784
785 if (block != end_block)
786 {
787 /* Ok, a fixup is needed. Add a fixup to the list of such. */
788 struct goto_fixup *fixup
789 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
790 /* In case an old stack level is restored, make sure that comes
791 after any pending stack adjust. */
792 /* ?? If the fixup isn't to come at the present position,
793 doing the stack adjust here isn't useful. Doing it with our
794 settings at that location isn't useful either. Let's hope
795 someone does it! */
796 if (last_insn == 0)
797 do_pending_stack_adjust ();
798 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
799 fixup->target = tree_label;
800 fixup->target_rtl = rtl_label;
801 fixup->block_start_count = block_start_count;
802 fixup->stack_level = 0;
803 fixup->cleanup_list_list
804 = (((block->data.block.outer_cleanups
805 #if 0
806 && block->data.block.outer_cleanups != empty_cleanup_list
807 #endif
808 )
809 || block->data.block.cleanups)
810 ? tree_cons (0, block->data.block.cleanups,
811 block->data.block.outer_cleanups)
812 : 0);
813 fixup->next = goto_fixup_chain;
814 goto_fixup_chain = fixup;
815 }
816
817 return block != 0;
818 }
819
820 /* When exiting a binding contour, process all pending gotos requiring fixups.
821 THISBLOCK is the structure that describes the block being exited.
822 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
823 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
824 FIRST_INSN is the insn that began this contour.
825
826 Gotos that jump out of this contour must restore the
827 stack level and do the cleanups before actually jumping.
828
829 DONT_JUMP_IN nonzero means report error there is a jump into this
830 contour from before the beginning of the contour.
831 This is also done if STACK_LEVEL is nonzero. */
832
833 void
834 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
835 struct nesting *thisblock;
836 rtx stack_level;
837 tree cleanup_list;
838 rtx first_insn;
839 int dont_jump_in;
840 {
841 register struct goto_fixup *f, *prev;
842
843 /* F is the fixup we are considering; PREV is the previous one. */
844 /* We run this loop in two passes so that cleanups of exited blocks
845 are run first, and blocks that are exited are marked so
846 afterwards. */
847
848 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
849 {
850 /* Test for a fixup that is inactive because it is already handled. */
851 if (f->before_jump == 0)
852 {
853 /* Delete inactive fixup from the chain, if that is easy to do. */
854 if (prev != 0)
855 prev->next = f->next;
856 }
857 /* Has this fixup's target label been defined?
858 If so, we can finalize it. */
859 else if (PREV_INSN (f->target_rtl) != 0)
860 {
861 /* Get the first non-label after the label
862 this goto jumps to. If that's before this scope begins,
863 we don't have a jump into the scope. */
864 rtx after_label = f->target_rtl;
865 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
866 after_label = NEXT_INSN (after_label);
867
868 /* If this fixup jumped into this contour from before the beginning
869 of this contour, report an error. */
870 /* ??? Bug: this does not detect jumping in through intermediate
871 blocks that have stack levels or cleanups.
872 It detects only a problem with the innermost block
873 around the label. */
874 if (f->target != 0
875 && (dont_jump_in || stack_level || cleanup_list)
876 /* If AFTER_LABEL is 0, it means the jump goes to the end
877 of the rtl, which means it jumps into this scope. */
878 && (after_label == 0
879 || INSN_UID (first_insn) < INSN_UID (after_label))
880 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
881 && ! TREE_REGDECL (f->target))
882 {
883 error_with_decl (f->target,
884 "label `%s' used before containing binding contour");
885 /* Prevent multiple errors for one label. */
886 TREE_REGDECL (f->target) = 1;
887 }
888
889 /* Execute cleanups for blocks this jump exits. */
890 if (f->cleanup_list_list)
891 {
892 tree lists;
893 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
894 /* Marked elements correspond to blocks that have been closed.
895 Do their cleanups. */
896 if (TREE_ADDRESSABLE (lists)
897 && TREE_VALUE (lists) != 0)
898 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
899 }
900
901 /* Restore stack level for the biggest contour that this
902 jump jumps out of. */
903 if (f->stack_level)
904 emit_insn_after (gen_move_insn (stack_pointer_rtx, f->stack_level),
905 f->before_jump);
906 f->before_jump = 0;
907 }
908 }
909
910 /* Mark the cleanups of exited blocks so that they are executed
911 by the code above. */
912 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
913 if (f->before_jump != 0
914 && PREV_INSN (f->target_rtl) == 0
915 /* Label has still not appeared. If we are exiting a block with
916 a stack level to restore, that started before the fixup,
917 mark this stack level as needing restoration
918 when the fixup is later finalized.
919 Also mark the cleanup_list_list element for F
920 that corresponds to this block, so that ultimately
921 this block's cleanups will be executed by the code above. */
922 && thisblock != 0
923 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
924 it means the label is undefined. That's erroneous, but possible. */
925 && (thisblock->data.block.block_start_count
926 <= f->block_start_count))
927 {
928 tree lists = f->cleanup_list_list;
929 for (; lists; lists = TREE_CHAIN (lists))
930 /* If the following elt. corresponds to our containing block
931 then the elt. must be for this block. */
932 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
933 TREE_ADDRESSABLE (lists) = 1;
934
935 if (stack_level)
936 f->stack_level = stack_level;
937 }
938 }
939 \f
940 /* Generate RTL for an asm statement (explicit assembler code).
941 BODY is a STRING_CST node containing the assembler code text,
942 or an ADDR_EXPR containing a STRING_CST. */
943
944 void
945 expand_asm (body)
946 tree body;
947 {
948 if (TREE_CODE (body) == ADDR_EXPR)
949 body = TREE_OPERAND (body, 0);
950
951 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
952 TREE_STRING_POINTER (body)));
953 last_expr_type = 0;
954 }
955
956 /* Generate RTL for an asm statement with arguments.
957 STRING is the instruction template.
958 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
959 Each output or input has an expression in the TREE_VALUE and
960 a constraint-string in the TREE_PURPOSE.
961 CLOBBERS is a list of STRING_CST nodes each naming a hard register
962 that is clobbered by this insn.
963
964 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
965 Some elements of OUTPUTS may be replaced with trees representing temporary
966 values. The caller should copy those temporary values to the originally
967 specified lvalues.
968
969 VOL nonzero means the insn is volatile; don't optimize it. */
970
971 void
972 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
973 tree string, outputs, inputs, clobbers;
974 int vol;
975 char *filename;
976 int line;
977 {
978 rtvec argvec, constraints;
979 rtx body;
980 int ninputs = list_length (inputs);
981 int noutputs = list_length (outputs);
982 int nclobbers = list_length (clobbers);
983 tree tail;
984 register int i;
985 /* Vector of RTX's of evaluated output operands. */
986 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
987 /* The insn we have emitted. */
988 rtx insn;
989
990 last_expr_type = 0;
991
992 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
993 {
994 tree val = TREE_VALUE (tail);
995 tree val1;
996 int j;
997 int found_equal;
998
999 /* If there's an erroneous arg, emit no insn. */
1000 if (TREE_TYPE (val) == error_mark_node)
1001 return;
1002
1003 /* Make sure constraint has `=' and does not have `+'. */
1004
1005 found_equal = 0;
1006 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1007 {
1008 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1009 {
1010 error ("output operand constraint contains `+'");
1011 return;
1012 }
1013 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1014 found_equal = 1;
1015 }
1016 if (! found_equal)
1017 {
1018 error ("output operand constraint lacks `='");
1019 return;
1020 }
1021
1022 /* If an output operand is not a variable or indirect ref,
1023 or a part of one,
1024 create a SAVE_EXPR which is a pseudo-reg
1025 to act as an intermediate temporary.
1026 Make the asm insn write into that, then copy it to
1027 the real output operand. */
1028
1029 while (TREE_CODE (val) == COMPONENT_REF
1030 || TREE_CODE (val) == ARRAY_REF)
1031 val = TREE_OPERAND (val, 0);
1032
1033 if (TREE_CODE (val) != VAR_DECL
1034 && TREE_CODE (val) != PARM_DECL
1035 && TREE_CODE (val) != INDIRECT_REF)
1036 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1037
1038 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1039 }
1040
1041 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1042 {
1043 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1044 return;
1045 }
1046
1047 /* Make vectors for the expression-rtx and constraint strings. */
1048
1049 argvec = rtvec_alloc (ninputs);
1050 constraints = rtvec_alloc (ninputs);
1051
1052 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1053 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1054 filename, line);
1055 MEM_VOLATILE_P (body) = vol;
1056
1057 /* Eval the inputs and put them into ARGVEC.
1058 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1059
1060 i = 0;
1061 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1062 {
1063 int j;
1064
1065 /* If there's an erroneous arg, emit no insn,
1066 because the ASM_INPUT would get VOIDmode
1067 and that could cause a crash in reload. */
1068 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1069 return;
1070 if (TREE_PURPOSE (tail) == NULL_TREE)
1071 {
1072 error ("hard register `%s' listed as input operand to `asm'",
1073 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1074 return;
1075 }
1076
1077 /* Make sure constraint has neither `=' nor `+'. */
1078
1079 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1080 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1081 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1082 {
1083 error ("input operand constraint contains `%c'",
1084 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1085 return;
1086 }
1087
1088 XVECEXP (body, 3, i) /* argvec */
1089 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1090 XVECEXP (body, 4, i) /* constraints */
1091 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1092 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1093 i++;
1094 }
1095
1096 /* Protect all the operands from the queue,
1097 now that they have all been evaluated. */
1098
1099 for (i = 0; i < ninputs; i++)
1100 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1101
1102 for (i = 0; i < noutputs; i++)
1103 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1104
1105 /* Now, for each output, construct an rtx
1106 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1107 ARGVEC CONSTRAINTS))
1108 If there is more than one, put them inside a PARALLEL. */
1109
1110 if (noutputs == 1 && nclobbers == 0)
1111 {
1112 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1113 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1114 }
1115 else if (noutputs == 0 && nclobbers == 0)
1116 {
1117 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1118 insn = emit_insn (body);
1119 }
1120 else
1121 {
1122 rtx obody = body;
1123 int num = noutputs;
1124 if (num == 0) num = 1;
1125 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1126
1127 /* For each output operand, store a SET. */
1128
1129 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1130 {
1131 XVECEXP (body, 0, i)
1132 = gen_rtx (SET, VOIDmode,
1133 output_rtx[i],
1134 gen_rtx (ASM_OPERANDS, VOIDmode,
1135 TREE_STRING_POINTER (string),
1136 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1137 i, argvec, constraints,
1138 filename, line));
1139 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1140 }
1141
1142 /* If there are no outputs (but there are some clobbers)
1143 store the bare ASM_OPERANDS into the PARALLEL. */
1144
1145 if (i == 0)
1146 XVECEXP (body, 0, i++) = obody;
1147
1148 /* Store (clobber REG) for each clobbered register specified. */
1149
1150 for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++)
1151 {
1152 int j;
1153 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1154
1155 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1156 if (!strcmp (regname, reg_names[j]))
1157 break;
1158
1159 if (j == FIRST_PSEUDO_REGISTER)
1160 {
1161 error ("unknown register name `%s' in `asm'", regname);
1162 return;
1163 }
1164
1165 /* Use QImode since that's guaranteed to clobber just one reg. */
1166 XVECEXP (body, 0, i)
1167 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1168 }
1169
1170 insn = emit_insn (body);
1171 }
1172
1173 free_temp_slots ();
1174 }
1175 \f
1176 /* Generate RTL to evaluate the expression EXP
1177 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1178
1179 void
1180 expand_expr_stmt (exp)
1181 tree exp;
1182 {
1183 /* If -W, warn about statements with no side effects,
1184 except for an explicit cast to void (e.g. for assert()), and
1185 except inside a ({...}) where they may be useful. */
1186 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1187 {
1188 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1189 && !(TREE_CODE (exp) == CONVERT_EXPR
1190 && TREE_TYPE (exp) == void_type_node))
1191 warning_with_file_and_line (emit_filename, emit_lineno,
1192 "statement with no effect");
1193 else if (warn_unused)
1194 warn_if_unused_value (exp);
1195 }
1196 last_expr_type = TREE_TYPE (exp);
1197 if (! flag_syntax_only)
1198 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1199 VOIDmode, 0);
1200
1201 /* If all we do is reference a volatile value in memory,
1202 copy it to a register to be sure it is actually touched. */
1203 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1204 && TREE_THIS_VOLATILE (exp))
1205 {
1206 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1207 copy_to_reg (last_expr_value);
1208 else
1209 /* This case needs to be written. */
1210 abort ();
1211 }
1212
1213 /* If this expression is part of a ({...}) and is in memory, we may have
1214 to preserve temporaries. */
1215 preserve_temp_slots (last_expr_value);
1216
1217 /* Free any temporaries used to evaluate this expression. Any temporary
1218 used as a result of this expression will already have been preserved
1219 above. */
1220 free_temp_slots ();
1221
1222 emit_queue ();
1223 }
1224
1225 /* Warn if EXP contains any computations whose results are not used.
1226 Return 1 if a warning is printed; 0 otherwise. */
1227
1228 static int
1229 warn_if_unused_value (exp)
1230 tree exp;
1231 {
1232 if (TREE_USED (exp))
1233 return 0;
1234
1235 switch (TREE_CODE (exp))
1236 {
1237 case PREINCREMENT_EXPR:
1238 case POSTINCREMENT_EXPR:
1239 case PREDECREMENT_EXPR:
1240 case POSTDECREMENT_EXPR:
1241 case MODIFY_EXPR:
1242 case INIT_EXPR:
1243 case TARGET_EXPR:
1244 case CALL_EXPR:
1245 case METHOD_CALL_EXPR:
1246 case RTL_EXPR:
1247 case WRAPPER_EXPR:
1248 case ANTI_WRAPPER_EXPR:
1249 case WITH_CLEANUP_EXPR:
1250 case EXIT_EXPR:
1251 /* We don't warn about COND_EXPR because it may be a useful
1252 construct if either arm contains a side effect. */
1253 case COND_EXPR:
1254 return 0;
1255
1256 case BIND_EXPR:
1257 /* For a binding, warn if no side effect within it. */
1258 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1259
1260 case TRUTH_ORIF_EXPR:
1261 case TRUTH_ANDIF_EXPR:
1262 /* In && or ||, warn if 2nd operand has no side effect. */
1263 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1264
1265 case COMPOUND_EXPR:
1266 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1267 return 1;
1268 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1269
1270 case NOP_EXPR:
1271 case CONVERT_EXPR:
1272 /* Don't warn about values cast to void. */
1273 if (TREE_TYPE (exp) == void_type_node)
1274 return 0;
1275 /* Don't warn about conversions not explicit in the user's program. */
1276 if (TREE_NO_UNUSED_WARNING (exp))
1277 return 0;
1278 /* Assignment to a cast usually results in a cast of a modify.
1279 Don't complain about that. */
1280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1281 return 0;
1282 /* Sometimes it results in a cast of a cast of a modify.
1283 Don't complain about that. */
1284 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1285 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1286 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1287 return 0;
1288
1289 default:
1290 warning_with_file_and_line (emit_filename, emit_lineno,
1291 "value computed is not used");
1292 return 1;
1293 }
1294 }
1295
1296 /* Clear out the memory of the last expression evaluated. */
1297
1298 void
1299 clear_last_expr ()
1300 {
1301 last_expr_type = 0;
1302 }
1303
1304 /* Begin a statement which will return a value.
1305 Return the RTL_EXPR for this statement expr.
1306 The caller must save that value and pass it to expand_end_stmt_expr. */
1307
1308 tree
1309 expand_start_stmt_expr ()
1310 {
1311 /* Make the RTL_EXPR node temporary, not momentary,
1312 so that rtl_expr_chain doesn't become garbage. */
1313 int momentary = suspend_momentary ();
1314 tree t = make_node (RTL_EXPR);
1315 resume_momentary (momentary);
1316 start_sequence ();
1317 NO_DEFER_POP;
1318 expr_stmts_for_value++;
1319 return t;
1320 }
1321
1322 /* Restore the previous state at the end of a statement that returns a value.
1323 Returns a tree node representing the statement's value and the
1324 insns to compute the value.
1325
1326 The nodes of that expression have been freed by now, so we cannot use them.
1327 But we don't want to do that anyway; the expression has already been
1328 evaluated and now we just want to use the value. So generate a RTL_EXPR
1329 with the proper type and RTL value.
1330
1331 If the last substatement was not an expression,
1332 return something with type `void'. */
1333
1334 tree
1335 expand_end_stmt_expr (t)
1336 tree t;
1337 {
1338 OK_DEFER_POP;
1339
1340 if (last_expr_type == 0)
1341 {
1342 last_expr_type = void_type_node;
1343 last_expr_value = const0_rtx;
1344 }
1345 else if (last_expr_value == 0)
1346 /* There are some cases where this can happen, such as when the
1347 statement is void type. */
1348 last_expr_value = const0_rtx;
1349 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1350 /* Remove any possible QUEUED. */
1351 last_expr_value = protect_from_queue (last_expr_value, 0);
1352
1353 emit_queue ();
1354
1355 TREE_TYPE (t) = last_expr_type;
1356 RTL_EXPR_RTL (t) = last_expr_value;
1357 RTL_EXPR_SEQUENCE (t) = get_insns ();
1358
1359 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1360
1361 end_sequence ();
1362
1363 /* Don't consider deleting this expr or containing exprs at tree level. */
1364 TREE_SIDE_EFFECTS (t) = 1;
1365 /* Propagate volatility of the actual RTL expr. */
1366 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1367
1368 last_expr_type = 0;
1369 expr_stmts_for_value--;
1370
1371 return t;
1372 }
1373 \f
1374 /* The exception handling nesting looks like this:
1375
1376 <-- Level N-1
1377 { <-- exception handler block
1378 <-- Level N
1379 <-- in an exception handler
1380 { <-- try block
1381 : <-- in a TRY block
1382 : <-- in an exception handler
1383 :
1384 }
1385
1386 { <-- except block
1387 : <-- in an except block
1388 : <-- in an exception handler
1389 :
1390 }
1391
1392 }
1393
1394 /* Return nonzero iff in a try block at level LEVEL. */
1395
1396 int
1397 in_try_block (level)
1398 int level;
1399 {
1400 struct nesting *n = except_stack;
1401 while (1)
1402 {
1403 while (n && n->data.except_stmt.after_label != 0)
1404 n = n->next;
1405 if (n == 0)
1406 return 0;
1407 if (level == 0)
1408 return n != 0;
1409 level--;
1410 n = n->next;
1411 }
1412 }
1413
1414 /* Return nonzero iff in an except block at level LEVEL. */
1415
1416 int
1417 in_except_block (level)
1418 int level;
1419 {
1420 struct nesting *n = except_stack;
1421 while (1)
1422 {
1423 while (n && n->data.except_stmt.after_label == 0)
1424 n = n->next;
1425 if (n == 0)
1426 return 0;
1427 if (level == 0)
1428 return n != 0;
1429 level--;
1430 n = n->next;
1431 }
1432 }
1433
1434 /* Return nonzero iff in an exception handler at level LEVEL. */
1435
1436 int
1437 in_exception_handler (level)
1438 int level;
1439 {
1440 struct nesting *n = except_stack;
1441 while (n && level--)
1442 n = n->next;
1443 return n != 0;
1444 }
1445
1446 /* Record the fact that the current exception nesting raises
1447 exception EX. If not in an exception handler, return 0. */
1448 int
1449 expand_raise (ex)
1450 tree ex;
1451 {
1452 tree *raises_ptr;
1453
1454 if (except_stack == 0)
1455 return 0;
1456 raises_ptr = &except_stack->data.except_stmt.raised;
1457 if (! value_member (ex, *raises_ptr))
1458 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1459 return 1;
1460 }
1461
1462 /* Generate RTL for the start of a try block.
1463
1464 TRY_CLAUSE is the condition to test to enter the try block. */
1465
1466 void
1467 expand_start_try (try_clause, exitflag, escapeflag)
1468 tree try_clause;
1469 int exitflag;
1470 int escapeflag;
1471 {
1472 struct nesting *thishandler = ALLOC_NESTING ();
1473
1474 /* Make an entry on cond_stack for the cond we are entering. */
1475
1476 thishandler->next = except_stack;
1477 thishandler->all = nesting_stack;
1478 thishandler->depth = ++nesting_depth;
1479 thishandler->data.except_stmt.raised = 0;
1480 thishandler->data.except_stmt.handled = 0;
1481 thishandler->data.except_stmt.first_insn = get_insns ();
1482 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1483 thishandler->data.except_stmt.unhandled_label = 0;
1484 thishandler->data.except_stmt.after_label = 0;
1485 thishandler->data.except_stmt.escape_label
1486 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1487 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1488 except_stack = thishandler;
1489 nesting_stack = thishandler;
1490
1491 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1492 }
1493
1494 /* End of a TRY block. Nothing to do for now. */
1495
1496 void
1497 expand_end_try ()
1498 {
1499 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1500 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1501 }
1502
1503 /* Start an `except' nesting contour.
1504 EXITFLAG says whether this contour should be able to `exit' something.
1505 ESCAPEFLAG says whether this contour should be escapable. */
1506
1507 void
1508 expand_start_except (exitflag, escapeflag)
1509 int exitflag;
1510 int escapeflag;
1511 {
1512 if (exitflag)
1513 {
1514 struct nesting *n;
1515 /* An `exit' from catch clauses goes out to next exit level,
1516 if there is one. Otherwise, it just goes to the end
1517 of the construct. */
1518 for (n = except_stack->next; n; n = n->next)
1519 if (n->exit_label != 0)
1520 {
1521 except_stack->exit_label = n->exit_label;
1522 break;
1523 }
1524 if (n == 0)
1525 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1526 }
1527 if (escapeflag)
1528 {
1529 struct nesting *n;
1530 /* An `escape' from catch clauses goes out to next escape level,
1531 if there is one. Otherwise, it just goes to the end
1532 of the construct. */
1533 for (n = except_stack->next; n; n = n->next)
1534 if (n->data.except_stmt.escape_label != 0)
1535 {
1536 except_stack->data.except_stmt.escape_label
1537 = n->data.except_stmt.escape_label;
1538 break;
1539 }
1540 if (n == 0)
1541 except_stack->data.except_stmt.escape_label
1542 = except_stack->data.except_stmt.after_label;
1543 }
1544 do_pending_stack_adjust ();
1545 emit_label (except_stack->data.except_stmt.except_label);
1546 }
1547
1548 /* Generate code to `escape' from an exception contour. This
1549 is like `exiting', but does not conflict with constructs which
1550 use `exit_label'.
1551
1552 Return nonzero if this contour is escapable, otherwise
1553 return zero, and language-specific code will emit the
1554 appropriate error message. */
1555 int
1556 expand_escape_except ()
1557 {
1558 struct nesting *n;
1559 last_expr_type = 0;
1560 for (n = except_stack; n; n = n->next)
1561 if (n->data.except_stmt.escape_label != 0)
1562 {
1563 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1564 return 1;
1565 }
1566
1567 return 0;
1568 }
1569
1570 /* Finish processing and `except' contour.
1571 Culls out all exceptions which might be raise but not
1572 handled, and returns the list to the caller.
1573 Language-specific code is responsible for dealing with these
1574 exceptions. */
1575
1576 tree
1577 expand_end_except ()
1578 {
1579 struct nesting *n;
1580 tree raised = NULL_TREE;
1581
1582 do_pending_stack_adjust ();
1583 emit_label (except_stack->data.except_stmt.after_label);
1584
1585 n = except_stack->next;
1586 if (n)
1587 {
1588 /* Propagate exceptions raised but not handled to next
1589 highest level. */
1590 tree handled = except_stack->data.except_stmt.raised;
1591 if (handled != void_type_node)
1592 {
1593 tree prev = NULL_TREE;
1594 raised = except_stack->data.except_stmt.raised;
1595 while (handled)
1596 {
1597 tree this_raise;
1598 for (this_raise = raised, prev = 0; this_raise;
1599 this_raise = TREE_CHAIN (this_raise))
1600 {
1601 if (value_member (TREE_VALUE (this_raise), handled))
1602 {
1603 if (prev)
1604 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1605 else
1606 {
1607 raised = TREE_CHAIN (raised);
1608 if (raised == NULL_TREE)
1609 goto nada;
1610 }
1611 }
1612 else
1613 prev = this_raise;
1614 }
1615 handled = TREE_CHAIN (handled);
1616 }
1617 if (prev == NULL_TREE)
1618 prev = raised;
1619 if (prev)
1620 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1621 nada:
1622 n->data.except_stmt.raised = raised;
1623 }
1624 }
1625
1626 POPSTACK (except_stack);
1627 last_expr_type = 0;
1628 return raised;
1629 }
1630
1631 /* Record that exception EX is caught by this exception handler.
1632 Return nonzero if in exception handling construct, otherwise return 0. */
1633 int
1634 expand_catch (ex)
1635 tree ex;
1636 {
1637 tree *raises_ptr;
1638
1639 if (except_stack == 0)
1640 return 0;
1641 raises_ptr = &except_stack->data.except_stmt.handled;
1642 if (*raises_ptr != void_type_node
1643 && ex != NULL_TREE
1644 && ! value_member (ex, *raises_ptr))
1645 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1646 return 1;
1647 }
1648
1649 /* Record that this exception handler catches all exceptions.
1650 Return nonzero if in exception handling construct, otherwise return 0. */
1651
1652 int
1653 expand_catch_default ()
1654 {
1655 if (except_stack == 0)
1656 return 0;
1657 except_stack->data.except_stmt.handled = void_type_node;
1658 return 1;
1659 }
1660
1661 int
1662 expand_end_catch ()
1663 {
1664 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1665 return 0;
1666 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1667 return 1;
1668 }
1669 \f
1670 /* Generate RTL for the start of an if-then. COND is the expression
1671 whose truth should be tested.
1672
1673 If EXITFLAG is nonzero, this conditional is visible to
1674 `exit_something'. */
1675
1676 void
1677 expand_start_cond (cond, exitflag)
1678 tree cond;
1679 int exitflag;
1680 {
1681 struct nesting *thiscond = ALLOC_NESTING ();
1682
1683 /* Make an entry on cond_stack for the cond we are entering. */
1684
1685 thiscond->next = cond_stack;
1686 thiscond->all = nesting_stack;
1687 thiscond->depth = ++nesting_depth;
1688 thiscond->data.cond.next_label = gen_label_rtx ();
1689 /* Before we encounter an `else', we don't need a separate exit label
1690 unless there are supposed to be exit statements
1691 to exit this conditional. */
1692 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1693 thiscond->data.cond.endif_label = thiscond->exit_label;
1694 cond_stack = thiscond;
1695 nesting_stack = thiscond;
1696
1697 do_jump (cond, thiscond->data.cond.next_label, NULL);
1698 }
1699
1700 /* Generate RTL between then-clause and the elseif-clause
1701 of an if-then-elseif-.... */
1702
1703 void
1704 expand_start_elseif (cond)
1705 tree cond;
1706 {
1707 if (cond_stack->data.cond.endif_label == 0)
1708 cond_stack->data.cond.endif_label = gen_label_rtx ();
1709 emit_jump (cond_stack->data.cond.endif_label);
1710 emit_label (cond_stack->data.cond.next_label);
1711 cond_stack->data.cond.next_label = gen_label_rtx ();
1712 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1713 }
1714
1715 /* Generate RTL between the then-clause and the else-clause
1716 of an if-then-else. */
1717
1718 void
1719 expand_start_else ()
1720 {
1721 if (cond_stack->data.cond.endif_label == 0)
1722 cond_stack->data.cond.endif_label = gen_label_rtx ();
1723 emit_jump (cond_stack->data.cond.endif_label);
1724 emit_label (cond_stack->data.cond.next_label);
1725 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1726 }
1727
1728 /* Generate RTL for the end of an if-then.
1729 Pop the record for it off of cond_stack. */
1730
1731 void
1732 expand_end_cond ()
1733 {
1734 struct nesting *thiscond = cond_stack;
1735
1736 do_pending_stack_adjust ();
1737 if (thiscond->data.cond.next_label)
1738 emit_label (thiscond->data.cond.next_label);
1739 if (thiscond->data.cond.endif_label)
1740 emit_label (thiscond->data.cond.endif_label);
1741
1742 POPSTACK (cond_stack);
1743 last_expr_type = 0;
1744 }
1745 \f
1746 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1747 loop should be exited by `exit_something'. This is a loop for which
1748 `expand_continue' will jump to the top of the loop.
1749
1750 Make an entry on loop_stack to record the labels associated with
1751 this loop. */
1752
1753 struct nesting *
1754 expand_start_loop (exit_flag)
1755 int exit_flag;
1756 {
1757 register struct nesting *thisloop = ALLOC_NESTING ();
1758
1759 /* Make an entry on loop_stack for the loop we are entering. */
1760
1761 thisloop->next = loop_stack;
1762 thisloop->all = nesting_stack;
1763 thisloop->depth = ++nesting_depth;
1764 thisloop->data.loop.start_label = gen_label_rtx ();
1765 thisloop->data.loop.end_label = gen_label_rtx ();
1766 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1767 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1768 loop_stack = thisloop;
1769 nesting_stack = thisloop;
1770
1771 do_pending_stack_adjust ();
1772 emit_queue ();
1773 emit_note (0, NOTE_INSN_LOOP_BEG);
1774 emit_label (thisloop->data.loop.start_label);
1775
1776 return thisloop;
1777 }
1778
1779 /* Like expand_start_loop but for a loop where the continuation point
1780 (for expand_continue_loop) will be specified explicitly. */
1781
1782 struct nesting *
1783 expand_start_loop_continue_elsewhere (exit_flag)
1784 int exit_flag;
1785 {
1786 struct nesting *thisloop = expand_start_loop (exit_flag);
1787 loop_stack->data.loop.continue_label = gen_label_rtx ();
1788 return thisloop;
1789 }
1790
1791 /* Specify the continuation point for a loop started with
1792 expand_start_loop_continue_elsewhere.
1793 Use this at the point in the code to which a continue statement
1794 should jump. */
1795
1796 void
1797 expand_loop_continue_here ()
1798 {
1799 do_pending_stack_adjust ();
1800 emit_note (0, NOTE_INSN_LOOP_CONT);
1801 emit_label (loop_stack->data.loop.continue_label);
1802 }
1803
1804 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1805 Pop the block off of loop_stack. */
1806
1807 void
1808 expand_end_loop ()
1809 {
1810 register rtx insn = get_last_insn ();
1811 register rtx start_label = loop_stack->data.loop.start_label;
1812 rtx last_test_insn = 0;
1813 int num_insns = 0;
1814
1815 /* Mark the continue-point at the top of the loop if none elsewhere. */
1816 if (start_label == loop_stack->data.loop.continue_label)
1817 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1818
1819 do_pending_stack_adjust ();
1820
1821 /* If optimizing, perhaps reorder the loop. If the loop
1822 starts with a conditional exit, roll that to the end
1823 where it will optimize together with the jump back.
1824
1825 We look for the last conditional branch to the exit that we encounter
1826 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1827 branch to the exit first, use it.
1828
1829 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1830 because moving them is not valid. */
1831
1832 if (optimize
1833 &&
1834 ! (GET_CODE (insn) == JUMP_INSN
1835 && GET_CODE (PATTERN (insn)) == SET
1836 && SET_DEST (PATTERN (insn)) == pc_rtx
1837 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1838 {
1839 /* Scan insns from the top of the loop looking for a qualified
1840 conditional exit. */
1841 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1842 insn = NEXT_INSN (insn))
1843 {
1844 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1845 break;
1846
1847 if (GET_CODE (insn) == NOTE
1848 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1849 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1850 break;
1851
1852 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1853 num_insns++;
1854
1855 if (last_test_insn && num_insns > 30)
1856 break;
1857
1858 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1859 && SET_DEST (PATTERN (insn)) == pc_rtx
1860 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1861 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1862 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1863 == loop_stack->data.loop.end_label))
1864 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1865 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1866 == loop_stack->data.loop.end_label))))
1867 last_test_insn = insn;
1868
1869 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1870 && GET_CODE (PATTERN (insn)) == SET
1871 && SET_DEST (PATTERN (insn)) == pc_rtx
1872 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1873 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1874 == loop_stack->data.loop.end_label))
1875 /* Include BARRIER. */
1876 last_test_insn = NEXT_INSN (insn);
1877 }
1878
1879 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1880 {
1881 /* We found one. Move everything from there up
1882 to the end of the loop, and add a jump into the loop
1883 to jump to there. */
1884 register rtx newstart_label = gen_label_rtx ();
1885 register rtx start_move = start_label;
1886
1887 /* If the start label is preceeded by a NOTE_INSN_LOOP_CONT note,
1888 then we want to move this note also. */
1889 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1890 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1891 == NOTE_INSN_LOOP_CONT))
1892 start_move = PREV_INSN (start_move);
1893
1894 emit_label_after (newstart_label, PREV_INSN (start_move));
1895 reorder_insns (start_move, last_test_insn, get_last_insn ());
1896 emit_jump_insn_after (gen_jump (start_label),
1897 PREV_INSN (newstart_label));
1898 emit_barrier_after (PREV_INSN (newstart_label));
1899 start_label = newstart_label;
1900 }
1901 }
1902
1903 emit_jump (start_label);
1904 emit_note (0, NOTE_INSN_LOOP_END);
1905 emit_label (loop_stack->data.loop.end_label);
1906
1907 POPSTACK (loop_stack);
1908
1909 last_expr_type = 0;
1910 }
1911
1912 /* Generate a jump to the current loop's continue-point.
1913 This is usually the top of the loop, but may be specified
1914 explicitly elsewhere. If not currently inside a loop,
1915 return 0 and do nothing; caller will print an error message. */
1916
1917 int
1918 expand_continue_loop (whichloop)
1919 struct nesting *whichloop;
1920 {
1921 last_expr_type = 0;
1922 if (whichloop == 0)
1923 whichloop = loop_stack;
1924 if (whichloop == 0)
1925 return 0;
1926 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1927 return 1;
1928 }
1929
1930 /* Generate a jump to exit the current loop. If not currently inside a loop,
1931 return 0 and do nothing; caller will print an error message. */
1932
1933 int
1934 expand_exit_loop (whichloop)
1935 struct nesting *whichloop;
1936 {
1937 last_expr_type = 0;
1938 if (whichloop == 0)
1939 whichloop = loop_stack;
1940 if (whichloop == 0)
1941 return 0;
1942 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1943 return 1;
1944 }
1945
1946 /* Generate a conditional jump to exit the current loop if COND
1947 evaluates to zero. If not currently inside a loop,
1948 return 0 and do nothing; caller will print an error message. */
1949
1950 int
1951 expand_exit_loop_if_false (whichloop, cond)
1952 struct nesting *whichloop;
1953 tree cond;
1954 {
1955 last_expr_type = 0;
1956 if (whichloop == 0)
1957 whichloop = loop_stack;
1958 if (whichloop == 0)
1959 return 0;
1960 do_jump (cond, whichloop->data.loop.end_label, NULL);
1961 return 1;
1962 }
1963
1964 /* Return non-zero if we should preserve sub-expressions as separate
1965 pseudos. We never do so if we aren't optimizing. We always do so
1966 if -fexpensive-optimizations.
1967
1968 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
1969 the loop may still be a small one. */
1970
1971 int
1972 preserve_subexpressions_p ()
1973 {
1974 rtx insn;
1975
1976 if (flag_expensive_optimizations)
1977 return 1;
1978
1979 if (optimize == 0 || loop_stack == 0)
1980 return 0;
1981
1982 insn = get_last_insn_anywhere ();
1983
1984 return (insn
1985 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
1986 < n_non_fixed_regs * 3));
1987
1988 }
1989
1990 /* Generate a jump to exit the current loop, conditional, binding contour
1991 or case statement. Not all such constructs are visible to this function,
1992 only those started with EXIT_FLAG nonzero. Individual languages use
1993 the EXIT_FLAG parameter to control which kinds of constructs you can
1994 exit this way.
1995
1996 If not currently inside anything that can be exited,
1997 return 0 and do nothing; caller will print an error message. */
1998
1999 int
2000 expand_exit_something ()
2001 {
2002 struct nesting *n;
2003 last_expr_type = 0;
2004 for (n = nesting_stack; n; n = n->all)
2005 if (n->exit_label != 0)
2006 {
2007 expand_goto_internal (0, n->exit_label, 0);
2008 return 1;
2009 }
2010
2011 return 0;
2012 }
2013 \f
2014 /* Generate RTL to return from the current function, with no value.
2015 (That is, we do not do anything about returning any value.) */
2016
2017 void
2018 expand_null_return ()
2019 {
2020 struct nesting *block = block_stack;
2021 rtx last_insn = 0;
2022
2023 /* Does any pending block have cleanups? */
2024
2025 while (block && block->data.block.cleanups == 0)
2026 block = block->next;
2027
2028 /* If yes, use a goto to return, since that runs cleanups. */
2029
2030 expand_null_return_1 (last_insn, block != 0);
2031 }
2032
2033 /* Generate RTL to return from the current function, with value VAL. */
2034
2035 void
2036 expand_value_return (val)
2037 rtx val;
2038 {
2039 struct nesting *block = block_stack;
2040 rtx last_insn = get_last_insn ();
2041 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2042
2043 /* Copy the value to the return location
2044 unless it's already there. */
2045
2046 if (return_reg != val)
2047 emit_move_insn (return_reg, val);
2048 if (GET_CODE (return_reg) == REG
2049 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2050 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2051
2052 /* Does any pending block have cleanups? */
2053
2054 while (block && block->data.block.cleanups == 0)
2055 block = block->next;
2056
2057 /* If yes, use a goto to return, since that runs cleanups.
2058 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2059
2060 expand_null_return_1 (last_insn, block != 0);
2061 }
2062
2063 /* Output a return with no value. If LAST_INSN is nonzero,
2064 pretend that the return takes place after LAST_INSN.
2065 If USE_GOTO is nonzero then don't use a return instruction;
2066 go to the return label instead. This causes any cleanups
2067 of pending blocks to be executed normally. */
2068
2069 static void
2070 expand_null_return_1 (last_insn, use_goto)
2071 rtx last_insn;
2072 int use_goto;
2073 {
2074 rtx end_label = cleanup_label ? cleanup_label : return_label;
2075
2076 clear_pending_stack_adjust ();
2077 do_pending_stack_adjust ();
2078 last_expr_type = 0;
2079
2080 /* PCC-struct return always uses an epilogue. */
2081 if (current_function_returns_pcc_struct || use_goto)
2082 {
2083 if (end_label == 0)
2084 end_label = return_label = gen_label_rtx ();
2085 expand_goto_internal (0, end_label, last_insn);
2086 return;
2087 }
2088
2089 /* Otherwise output a simple return-insn if one is available,
2090 unless it won't do the job. */
2091 #ifdef HAVE_return
2092 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2093 {
2094 emit_jump_insn (gen_return ());
2095 emit_barrier ();
2096 return;
2097 }
2098 #endif
2099
2100 /* Otherwise jump to the epilogue. */
2101 expand_goto_internal (0, end_label, last_insn);
2102 }
2103 \f
2104 /* Generate RTL to evaluate the expression RETVAL and return it
2105 from the current function. */
2106
2107 void
2108 expand_return (retval)
2109 tree retval;
2110 {
2111 /* If there are any cleanups to be performed, then they will
2112 be inserted following LAST_INSN. It is desirable
2113 that the last_insn, for such purposes, should be the
2114 last insn before computing the return value. Otherwise, cleanups
2115 which call functions can clobber the return value. */
2116 /* ??? rms: I think that is erroneous, because in C++ it would
2117 run destructors on variables that might be used in the subsequent
2118 computation of the return value. */
2119 rtx last_insn = 0;
2120 register rtx val = 0;
2121 register rtx op0;
2122 tree retval_rhs;
2123 int cleanups;
2124 struct nesting *block;
2125
2126 /* If function wants no value, give it none. */
2127 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2128 {
2129 expand_expr (retval, 0, VOIDmode, 0);
2130 expand_null_return ();
2131 return;
2132 }
2133
2134 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2135 cleanups = any_pending_cleanups (1);
2136
2137 if (TREE_CODE (retval) == RESULT_DECL)
2138 retval_rhs = retval;
2139 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2140 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2141 retval_rhs = TREE_OPERAND (retval, 1);
2142 else if (TREE_TYPE (retval) == void_type_node)
2143 /* Recognize tail-recursive call to void function. */
2144 retval_rhs = retval;
2145 else
2146 retval_rhs = NULL_TREE;
2147
2148 /* Only use `last_insn' if there are cleanups which must be run. */
2149 if (cleanups || cleanup_label != 0)
2150 last_insn = get_last_insn ();
2151
2152 /* Distribute return down conditional expr if either of the sides
2153 may involve tail recursion (see test below). This enhances the number
2154 of tail recursions we see. Don't do this always since it can produce
2155 sub-optimal code in some cases and we distribute assignments into
2156 conditional expressions when it would help. */
2157
2158 if (optimize && retval_rhs != 0
2159 && frame_offset == 0
2160 && TREE_CODE (retval_rhs) == COND_EXPR
2161 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2162 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2163 {
2164 rtx label = gen_label_rtx ();
2165 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2166 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2167 DECL_RESULT (current_function_decl),
2168 TREE_OPERAND (retval_rhs, 1)));
2169 emit_label (label);
2170 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2171 DECL_RESULT (current_function_decl),
2172 TREE_OPERAND (retval_rhs, 2)));
2173 return;
2174 }
2175
2176 /* For tail-recursive call to current function,
2177 just jump back to the beginning.
2178 It's unsafe if any auto variable in this function
2179 has its address taken; for simplicity,
2180 require stack frame to be empty. */
2181 if (optimize && retval_rhs != 0
2182 && frame_offset == 0
2183 && TREE_CODE (retval_rhs) == CALL_EXPR
2184 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2185 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2186 /* Finish checking validity, and if valid emit code
2187 to set the argument variables for the new call. */
2188 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2189 DECL_ARGUMENTS (current_function_decl)))
2190 {
2191 if (tail_recursion_label == 0)
2192 {
2193 tail_recursion_label = gen_label_rtx ();
2194 emit_label_after (tail_recursion_label,
2195 tail_recursion_reentry);
2196 }
2197 expand_goto_internal (0, tail_recursion_label, last_insn);
2198 emit_barrier ();
2199 return;
2200 }
2201 #ifdef HAVE_return
2202 /* This optimization is safe if there are local cleanups
2203 because expand_null_return takes care of them.
2204 ??? I think it should also be safe when there is a cleanup label,
2205 because expand_null_return takes care of them, too.
2206 Any reason why not? */
2207 if (HAVE_return && cleanup_label == 0
2208 && ! current_function_returns_pcc_struct)
2209 {
2210 /* If this is return x == y; then generate
2211 if (x == y) return 1; else return 0;
2212 if we can do it with explicit return insns. */
2213 if (retval_rhs)
2214 switch (TREE_CODE (retval_rhs))
2215 {
2216 case EQ_EXPR:
2217 case NE_EXPR:
2218 case GT_EXPR:
2219 case GE_EXPR:
2220 case LT_EXPR:
2221 case LE_EXPR:
2222 case TRUTH_ANDIF_EXPR:
2223 case TRUTH_ORIF_EXPR:
2224 case TRUTH_AND_EXPR:
2225 case TRUTH_OR_EXPR:
2226 case TRUTH_NOT_EXPR:
2227 op0 = gen_label_rtx ();
2228 jumpifnot (retval_rhs, op0);
2229 expand_value_return (const1_rtx);
2230 emit_label (op0);
2231 expand_value_return (const0_rtx);
2232 return;
2233 }
2234 }
2235 #endif /* HAVE_return */
2236
2237 if (cleanups
2238 && retval_rhs != 0
2239 && TREE_TYPE (retval_rhs) != void_type_node
2240 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2241 {
2242 /* Calculate the return value into a pseudo reg. */
2243 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2244 emit_queue ();
2245 /* All temporaries have now been used. */
2246 free_temp_slots ();
2247 /* Return the calculated value, doing cleanups first. */
2248 expand_value_return (val);
2249 }
2250 else
2251 {
2252 /* No cleanups or no hard reg used;
2253 calculate value into hard return reg. */
2254 expand_expr (retval, 0, VOIDmode, 0);
2255 emit_queue ();
2256 free_temp_slots ();
2257 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2258 }
2259 }
2260
2261 /* Return 1 if the end of the generated RTX is not a barrier.
2262 This means code already compiled can drop through. */
2263
2264 int
2265 drop_through_at_end_p ()
2266 {
2267 rtx insn = get_last_insn ();
2268 while (insn && GET_CODE (insn) == NOTE)
2269 insn = PREV_INSN (insn);
2270 return insn && GET_CODE (insn) != BARRIER;
2271 }
2272 \f
2273 /* Emit code to alter this function's formal parms for a tail-recursive call.
2274 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2275 FORMALS is the chain of decls of formals.
2276 Return 1 if this can be done;
2277 otherwise return 0 and do not emit any code. */
2278
2279 static int
2280 tail_recursion_args (actuals, formals)
2281 tree actuals, formals;
2282 {
2283 register tree a = actuals, f = formals;
2284 register int i;
2285 register rtx *argvec;
2286
2287 /* Check that number and types of actuals are compatible
2288 with the formals. This is not always true in valid C code.
2289 Also check that no formal needs to be addressable
2290 and that all formals are scalars. */
2291
2292 /* Also count the args. */
2293
2294 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2295 {
2296 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2297 return 0;
2298 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2299 return 0;
2300 }
2301 if (a != 0 || f != 0)
2302 return 0;
2303
2304 /* Compute all the actuals. */
2305
2306 argvec = (rtx *) alloca (i * sizeof (rtx));
2307
2308 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2309 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2310
2311 /* Find which actual values refer to current values of previous formals.
2312 Copy each of them now, before any formal is changed. */
2313
2314 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2315 {
2316 int copy = 0;
2317 register int j;
2318 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2319 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2320 { copy = 1; break; }
2321 if (copy)
2322 argvec[i] = copy_to_reg (argvec[i]);
2323 }
2324
2325 /* Store the values of the actuals into the formals. */
2326
2327 for (f = formals, a = actuals, i = 0; f;
2328 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2329 {
2330 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2331 emit_move_insn (DECL_RTL (f), argvec[i]);
2332 else
2333 convert_move (DECL_RTL (f), argvec[i],
2334 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2335 }
2336
2337 free_temp_slots ();
2338 return 1;
2339 }
2340 \f
2341 /* Generate the RTL code for entering a binding contour.
2342 The variables are declared one by one, by calls to `expand_decl'.
2343
2344 EXIT_FLAG is nonzero if this construct should be visible to
2345 `exit_something'. */
2346
2347 void
2348 expand_start_bindings (exit_flag)
2349 int exit_flag;
2350 {
2351 struct nesting *thisblock = ALLOC_NESTING ();
2352
2353 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2354
2355 /* Make an entry on block_stack for the block we are entering. */
2356
2357 thisblock->next = block_stack;
2358 thisblock->all = nesting_stack;
2359 thisblock->depth = ++nesting_depth;
2360 thisblock->data.block.stack_level = 0;
2361 thisblock->data.block.cleanups = 0;
2362 thisblock->data.block.function_call_count = 0;
2363 #if 0
2364 if (block_stack)
2365 {
2366 if (block_stack->data.block.cleanups == NULL_TREE
2367 && (block_stack->data.block.outer_cleanups == NULL_TREE
2368 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2369 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2370 else
2371 thisblock->data.block.outer_cleanups
2372 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2373 block_stack->data.block.outer_cleanups);
2374 }
2375 else
2376 thisblock->data.block.outer_cleanups = 0;
2377 #endif
2378 #if 1
2379 if (block_stack
2380 && !(block_stack->data.block.cleanups == NULL_TREE
2381 && block_stack->data.block.outer_cleanups == NULL_TREE))
2382 thisblock->data.block.outer_cleanups
2383 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2384 block_stack->data.block.outer_cleanups);
2385 else
2386 thisblock->data.block.outer_cleanups = 0;
2387 #endif
2388 thisblock->data.block.label_chain = 0;
2389 thisblock->data.block.innermost_stack_block = stack_block_stack;
2390 thisblock->data.block.first_insn = note;
2391 thisblock->data.block.block_start_count = ++block_start_count;
2392 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2393 block_stack = thisblock;
2394 nesting_stack = thisblock;
2395
2396 /* Make a new level for allocating stack slots. */
2397 push_temp_slots ();
2398 }
2399
2400 /* Generate RTL code to terminate a binding contour.
2401 VARS is the chain of VAR_DECL nodes
2402 for the variables bound in this contour.
2403 MARK_ENDS is nonzero if we should put a note at the beginning
2404 and end of this binding contour.
2405
2406 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2407 (That is true automatically if the contour has a saved stack level.) */
2408
2409 void
2410 expand_end_bindings (vars, mark_ends, dont_jump_in)
2411 tree vars;
2412 int mark_ends;
2413 int dont_jump_in;
2414 {
2415 register struct nesting *thisblock = block_stack;
2416 register tree decl;
2417
2418 if (warn_unused)
2419 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2420 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2421 warning_with_decl (decl, "unused variable `%s'");
2422
2423 /* Mark the beginning and end of the scope if requested. */
2424
2425 if (mark_ends)
2426 emit_note (0, NOTE_INSN_BLOCK_END);
2427 else
2428 /* Get rid of the beginning-mark if we don't make an end-mark. */
2429 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2430
2431 if (thisblock->exit_label)
2432 {
2433 do_pending_stack_adjust ();
2434 emit_label (thisblock->exit_label);
2435 }
2436
2437 /* If necessary, make a handler for nonlocal gotos taking
2438 place in the function calls in this block. */
2439 if (function_call_count != thisblock->data.block.function_call_count
2440 && nonlocal_labels
2441 /* Make handler for outermost block
2442 if there were any nonlocal gotos to this function. */
2443 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2444 /* Make handler for inner block if it has something
2445 special to do when you jump out of it. */
2446 : (thisblock->data.block.cleanups != 0
2447 || thisblock->data.block.stack_level != 0)))
2448 {
2449 tree link;
2450 rtx afterward = gen_label_rtx ();
2451 rtx handler_label = gen_label_rtx ();
2452 rtx save_receiver = gen_reg_rtx (Pmode);
2453
2454 /* Don't let jump_optimize delete the handler. */
2455 LABEL_PRESERVE_P (handler_label) = 1;
2456
2457 /* Record the handler address in the stack slot for that purpose,
2458 during this block, saving and restoring the outer value. */
2459 if (thisblock->next != 0)
2460 {
2461 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2462 emit_insn_before (gen_move_insn (save_receiver,
2463 nonlocal_goto_handler_slot),
2464 thisblock->data.block.first_insn);
2465 }
2466 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2467 gen_rtx (LABEL_REF, Pmode,
2468 handler_label)),
2469 thisblock->data.block.first_insn);
2470
2471 /* Jump around the handler; it runs only when specially invoked. */
2472 emit_jump (afterward);
2473 emit_label (handler_label);
2474
2475 #ifdef HAVE_nonlocal_goto
2476 if (! HAVE_nonlocal_goto)
2477 #endif
2478 /* First adjust our frame pointer to its actual value. It was
2479 previously set to the start of the virtual area corresponding to
2480 the stacked variables when we branched here and now needs to be
2481 adjusted to the actual hardware fp value.
2482
2483 Assignments are to virtual registers are converted by
2484 instantiate_virtual_regs into the corresponding assignment
2485 to the underlying register (fp in this case) that makes
2486 the original assignment true.
2487 So the following insn will actually be
2488 decrementing fp by STARTING_FRAME_OFFSET. */
2489 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2490
2491 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2492 if (fixed_regs[ARG_POINTER_REGNUM])
2493 {
2494 /* Now restore our arg pointer from the address at which it was saved
2495 in our stack frame.
2496 If there hasn't be space allocated for it yet, make some now. */
2497 if (arg_pointer_save_area == 0)
2498 arg_pointer_save_area
2499 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2500 emit_move_insn (virtual_incoming_args_rtx, arg_pointer_save_area);
2501 }
2502 #endif
2503
2504 /* The handler expects the desired label address in the static chain
2505 register. It tests the address and does an appropriate jump
2506 to whatever label is desired. */
2507 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2508 /* Skip any labels we shouldn't be able to jump to from here. */
2509 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2510 {
2511 rtx not_this = gen_label_rtx ();
2512 rtx this = gen_label_rtx ();
2513 do_jump_if_equal (static_chain_rtx,
2514 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2515 this, 0);
2516 emit_jump (not_this);
2517 emit_label (this);
2518 expand_goto (TREE_VALUE (link));
2519 emit_label (not_this);
2520 }
2521 /* If label is not recognized, abort. */
2522 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2523 VOIDmode, 0);
2524 emit_label (afterward);
2525 }
2526
2527 /* Don't allow jumping into a block that has cleanups or a stack level. */
2528 if (dont_jump_in
2529 || thisblock->data.block.stack_level != 0
2530 || thisblock->data.block.cleanups != 0)
2531 {
2532 struct label_chain *chain;
2533
2534 /* Any labels in this block are no longer valid to go to.
2535 Mark them to cause an error message. */
2536 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2537 {
2538 DECL_TOO_LATE (chain->label) = 1;
2539 /* If any goto without a fixup came to this label,
2540 that must be an error, because gotos without fixups
2541 come from outside all saved stack-levels and all cleanups. */
2542 if (TREE_ADDRESSABLE (chain->label))
2543 error_with_decl (chain->label,
2544 "label `%s' used before containing binding contour");
2545 }
2546 }
2547
2548 /* Restore stack level in effect before the block
2549 (only if variable-size objects allocated). */
2550 /* Perform any cleanups associated with the block. */
2551
2552 if (thisblock->data.block.stack_level != 0
2553 || thisblock->data.block.cleanups != 0)
2554 {
2555 /* Don't let cleanups affect ({...}) constructs. */
2556 int old_expr_stmts_for_value = expr_stmts_for_value;
2557 rtx old_last_expr_value = last_expr_value;
2558 tree old_last_expr_type = last_expr_type;
2559 expr_stmts_for_value = 0;
2560
2561 /* Do the cleanups. */
2562 expand_cleanups (thisblock->data.block.cleanups, 0);
2563 do_pending_stack_adjust ();
2564
2565 expr_stmts_for_value = old_expr_stmts_for_value;
2566 last_expr_value = old_last_expr_value;
2567 last_expr_type = old_last_expr_type;
2568
2569 /* Restore the stack level. */
2570
2571 if (thisblock->data.block.stack_level != 0)
2572 {
2573 emit_move_insn (stack_pointer_rtx,
2574 thisblock->data.block.stack_level);
2575 if (nonlocal_goto_stack_level != 0)
2576 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2577 }
2578
2579 /* Any gotos out of this block must also do these things.
2580 Also report any gotos with fixups that came to labels in this level. */
2581 fixup_gotos (thisblock,
2582 thisblock->data.block.stack_level,
2583 thisblock->data.block.cleanups,
2584 thisblock->data.block.first_insn,
2585 dont_jump_in);
2586 }
2587
2588 /* If doing stupid register allocation, make sure lives of all
2589 register variables declared here extend thru end of scope. */
2590
2591 if (obey_regdecls)
2592 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2593 {
2594 rtx rtl = DECL_RTL (decl);
2595 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2596 use_variable (rtl);
2597 }
2598
2599 /* Restore block_stack level for containing block. */
2600
2601 stack_block_stack = thisblock->data.block.innermost_stack_block;
2602 POPSTACK (block_stack);
2603
2604 /* Pop the stack slot nesting and free any slots at this level. */
2605 pop_temp_slots ();
2606 }
2607 \f
2608 /* Generate RTL for the automatic variable declaration DECL.
2609 (Other kinds of declarations are simply ignored if seen here.)
2610 CLEANUP is an expression to be executed at exit from this binding contour;
2611 for example, in C++, it might call the destructor for this variable.
2612
2613 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2614 either before or after calling `expand_decl' but before compiling
2615 any subsequent expressions. This is because CLEANUP may be expanded
2616 more than once, on different branches of execution.
2617 For the same reason, CLEANUP may not contain a CALL_EXPR
2618 except as its topmost node--else `preexpand_calls' would get confused.
2619
2620 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2621 that is not associated with any particular variable.
2622
2623 There is no special support here for C++ constructors.
2624 They should be handled by the proper code in DECL_INITIAL. */
2625
2626 void
2627 expand_decl (decl)
2628 register tree decl;
2629 {
2630 struct nesting *thisblock = block_stack;
2631 tree type = TREE_TYPE (decl);
2632
2633 /* Only automatic variables need any expansion done.
2634 Static and external variables, and external functions,
2635 will be handled by `assemble_variable' (called from finish_decl).
2636 TYPE_DECL and CONST_DECL require nothing.
2637 PARM_DECLs are handled in `assign_parms'. */
2638
2639 if (TREE_CODE (decl) != VAR_DECL)
2640 return;
2641 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2642 return;
2643
2644 /* Create the RTL representation for the variable. */
2645
2646 if (type == error_mark_node)
2647 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2648 else if (DECL_SIZE (decl) == 0)
2649 /* Variable with incomplete type. */
2650 {
2651 if (DECL_INITIAL (decl) == 0)
2652 /* Error message was already done; now avoid a crash. */
2653 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2654 else
2655 /* An initializer is going to decide the size of this array.
2656 Until we know the size, represent its address with a reg. */
2657 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2658 }
2659 else if (DECL_MODE (decl) != BLKmode
2660 /* If -ffloat-store, don't put explicit float vars
2661 into regs. */
2662 && !(flag_float_store
2663 && TREE_CODE (type) == REAL_TYPE)
2664 && ! TREE_THIS_VOLATILE (decl)
2665 && ! TREE_ADDRESSABLE (decl)
2666 && (TREE_REGDECL (decl) || ! obey_regdecls))
2667 {
2668 /* Automatic variable that can go in a register. */
2669 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2670 if (TREE_CODE (type) == POINTER_TYPE)
2671 mark_reg_pointer (DECL_RTL (decl));
2672 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2673 }
2674 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2675 {
2676 /* Variable of fixed size that goes on the stack. */
2677 rtx oldaddr = 0;
2678 rtx addr;
2679
2680 /* If we previously made RTL for this decl, it must be an array
2681 whose size was determined by the initializer.
2682 The old address was a register; set that register now
2683 to the proper address. */
2684 if (DECL_RTL (decl) != 0)
2685 {
2686 if (GET_CODE (DECL_RTL (decl)) != MEM
2687 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2688 abort ();
2689 oldaddr = XEXP (DECL_RTL (decl), 0);
2690 }
2691
2692 DECL_RTL (decl)
2693 = assign_stack_temp (DECL_MODE (decl),
2694 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2695 + BITS_PER_UNIT - 1)
2696 / BITS_PER_UNIT),
2697 1);
2698
2699 /* Set alignment we actually gave this decl. */
2700 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2701 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2702
2703 if (oldaddr)
2704 {
2705 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2706 if (addr != oldaddr)
2707 emit_move_insn (oldaddr, addr);
2708 }
2709
2710 /* If this is a memory ref that contains aggregate components,
2711 mark it as such for cse and loop optimize. */
2712 MEM_IN_STRUCT_P (DECL_RTL (decl))
2713 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2714 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2715 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2716 #if 0
2717 /* If this is in memory because of -ffloat-store,
2718 set the volatile bit, to prevent optimizations from
2719 undoing the effects. */
2720 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2721 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2722 #endif
2723 }
2724 else
2725 /* Dynamic-size object: must push space on the stack. */
2726 {
2727 rtx address, size;
2728
2729 /* Record the stack pointer on entry to block, if have
2730 not already done so. */
2731 if (thisblock->data.block.stack_level == 0)
2732 {
2733 do_pending_stack_adjust ();
2734 thisblock->data.block.stack_level
2735 = copy_to_reg (stack_pointer_rtx);
2736 stack_block_stack = thisblock;
2737 }
2738
2739 /* Compute the variable's size, in bytes. */
2740 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2741 DECL_SIZE (decl),
2742 size_int (BITS_PER_UNIT)),
2743 0, VOIDmode, 0);
2744 free_temp_slots ();
2745
2746 /* Allocate space on the stack for the variable. */
2747 address = allocate_dynamic_stack_space (size, 0);
2748
2749 if (nonlocal_goto_stack_level != 0)
2750 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2751
2752 /* Reference the variable indirect through that rtx. */
2753 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2754
2755 /* Indicate the alignment we actually gave this variable. */
2756 #ifdef STACK_BOUNDARY
2757 DECL_ALIGN (decl) = STACK_BOUNDARY;
2758 #else
2759 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2760 #endif
2761 }
2762
2763 if (TREE_THIS_VOLATILE (decl))
2764 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2765 if (TREE_READONLY (decl))
2766 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2767
2768 /* If doing stupid register allocation, make sure life of any
2769 register variable starts here, at the start of its scope. */
2770
2771 if (obey_regdecls)
2772 use_variable (DECL_RTL (decl));
2773 }
2774 \f
2775 /* Emit code to perform the initialization of a declaration DECL. */
2776
2777 void
2778 expand_decl_init (decl)
2779 tree decl;
2780 {
2781 if (TREE_STATIC (decl))
2782 return;
2783
2784 /* Compute and store the initial value now. */
2785
2786 if (DECL_INITIAL (decl) == error_mark_node)
2787 {
2788 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2789 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2790 || code == POINTER_TYPE)
2791 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2792 0, 0);
2793 emit_queue ();
2794 }
2795 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2796 {
2797 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2798 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2799 emit_queue ();
2800 }
2801
2802 /* Free any temporaries we made while initializing the decl. */
2803 free_temp_slots ();
2804 }
2805
2806 /* CLEANUP is an expression to be executed at exit from this binding contour;
2807 for example, in C++, it might call the destructor for this variable.
2808
2809 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2810 either before or after calling `expand_decl' but before compiling
2811 any subsequent expressions. This is because CLEANUP may be expanded
2812 more than once, on different branches of execution.
2813 For the same reason, CLEANUP may not contain a CALL_EXPR
2814 except as its topmost node--else `preexpand_calls' would get confused.
2815
2816 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2817 that is not associated with any particular variable. */
2818
2819 int
2820 expand_decl_cleanup (decl, cleanup)
2821 tree decl, cleanup;
2822 {
2823 struct nesting *thisblock = block_stack;
2824
2825 /* Error if we are not in any block. */
2826 if (thisblock == 0)
2827 return 0;
2828
2829 /* Record the cleanup if there is one. */
2830
2831 if (cleanup != 0)
2832 {
2833 thisblock->data.block.cleanups
2834 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2835 /* If this block has a cleanup, it belongs in stack_block_stack. */
2836 stack_block_stack = thisblock;
2837 }
2838 return 1;
2839 }
2840 \f
2841 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2842 DECL_ELTS is the list of elements that belong to DECL's type.
2843 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2844
2845 void
2846 expand_anon_union_decl (decl, cleanup, decl_elts)
2847 tree decl, cleanup, decl_elts;
2848 {
2849 struct nesting *thisblock = block_stack;
2850 rtx x;
2851
2852 expand_decl (decl, cleanup);
2853 x = DECL_RTL (decl);
2854
2855 while (decl_elts)
2856 {
2857 tree decl_elt = TREE_VALUE (decl_elts);
2858 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2859 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2860
2861 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2862 instead create a new MEM rtx with the proper mode. */
2863 if (GET_CODE (x) == MEM)
2864 {
2865 if (mode == GET_MODE (x))
2866 DECL_RTL (decl_elt) = x;
2867 else
2868 {
2869 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2870 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2871 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2872 }
2873 }
2874 else if (GET_CODE (x) == REG)
2875 {
2876 if (mode == GET_MODE (x))
2877 DECL_RTL (decl_elt) = x;
2878 else
2879 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2880 }
2881 else
2882 abort ();
2883
2884 /* Record the cleanup if there is one. */
2885
2886 if (cleanup != 0)
2887 thisblock->data.block.cleanups
2888 = temp_tree_cons (decl_elt, cleanup_elt,
2889 thisblock->data.block.cleanups);
2890
2891 decl_elts = TREE_CHAIN (decl_elts);
2892 }
2893 }
2894 \f
2895 /* Expand a list of cleanups LIST.
2896 Elements may be expressions or may be nested lists.
2897
2898 If DONT_DO is nonnull, then any list-element
2899 whose TREE_PURPOSE matches DONT_DO is omitted.
2900 This is sometimes used to avoid a cleanup associated with
2901 a value that is being returned out of the scope. */
2902
2903 static void
2904 expand_cleanups (list, dont_do)
2905 tree list;
2906 tree dont_do;
2907 {
2908 tree tail;
2909 for (tail = list; tail; tail = TREE_CHAIN (tail))
2910 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2911 {
2912 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2913 expand_cleanups (TREE_VALUE (tail), dont_do);
2914 else
2915 {
2916 /* Cleanups may be run multiple times. For example,
2917 when exiting a binding contour, we expand the
2918 cleanups associated with that contour. When a goto
2919 within that binding contour has a target outside that
2920 contour, it will expand all cleanups from its scope to
2921 the target. Though the cleanups are expanded multiple
2922 times, the control paths are non-overlapping so the
2923 cleanups will not be executed twice. */
2924 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2925 free_temp_slots ();
2926 }
2927 }
2928 }
2929
2930 /* Expand a list of cleanups for a goto fixup.
2931 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2932 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2933
2934 static void
2935 fixup_cleanups (list, before_jump)
2936 tree list;
2937 rtx *before_jump;
2938 {
2939 rtx beyond_jump = get_last_insn ();
2940 rtx new_before_jump;
2941
2942 expand_cleanups (list, 0);
2943 /* Pop any pushes done in the cleanups,
2944 in case function is about to return. */
2945 do_pending_stack_adjust ();
2946
2947 new_before_jump = get_last_insn ();
2948
2949 if (beyond_jump != new_before_jump)
2950 {
2951 /* If cleanups expand to nothing, don't reorder. */
2952 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
2953 *before_jump = new_before_jump;
2954 }
2955 }
2956
2957 /* Move all cleanups from the current block_stack
2958 to the containing block_stack, where they are assumed to
2959 have been created. If anything can cause a temporary to
2960 be created, but not expanded for more than one level of
2961 block_stacks, then this code will have to change. */
2962
2963 void
2964 move_cleanups_up ()
2965 {
2966 struct nesting *block = block_stack;
2967 struct nesting *outer = block->next;
2968
2969 outer->data.block.cleanups
2970 = chainon (block->data.block.cleanups,
2971 outer->data.block.cleanups);
2972 block->data.block.cleanups = 0;
2973 }
2974
2975 tree
2976 last_cleanup_this_contour ()
2977 {
2978 if (block_stack == 0)
2979 return 0;
2980
2981 return block_stack->data.block.cleanups;
2982 }
2983
2984 /* Return 1 if there are any pending cleanups at this point.
2985 If THIS_CONTOUR is nonzero, check the current contour as well.
2986 Otherwise, look only at the contours that enclose this one. */
2987
2988 int
2989 any_pending_cleanups (this_contour)
2990 int this_contour;
2991 {
2992 struct nesting *block;
2993
2994 if (block_stack == 0)
2995 return 0;
2996
2997 if (this_contour && block_stack->data.block.cleanups != NULL)
2998 return 1;
2999 if (block_stack->data.block.cleanups == 0
3000 && (block_stack->data.block.outer_cleanups == 0
3001 #if 0
3002 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3003 #endif
3004 ))
3005 return 0;
3006
3007 for (block = block_stack->next; block; block = block->next)
3008 if (block->data.block.cleanups != 0)
3009 return 1;
3010
3011 return 0;
3012 }
3013 \f
3014 /* Enter a case (Pascal) or switch (C) statement.
3015 Push a block onto case_stack and nesting_stack
3016 to accumulate the case-labels that are seen
3017 and to record the labels generated for the statement.
3018
3019 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3020 Otherwise, this construct is transparent for `exit_something'.
3021
3022 EXPR is the index-expression to be dispatched on.
3023 TYPE is its nominal type. We could simply convert EXPR to this type,
3024 but instead we take short cuts. */
3025
3026 void
3027 expand_start_case (exit_flag, expr, type, printname)
3028 int exit_flag;
3029 tree expr;
3030 tree type;
3031 char *printname;
3032 {
3033 register struct nesting *thiscase = ALLOC_NESTING ();
3034
3035 /* Make an entry on case_stack for the case we are entering. */
3036
3037 thiscase->next = case_stack;
3038 thiscase->all = nesting_stack;
3039 thiscase->depth = ++nesting_depth;
3040 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3041 thiscase->data.case_stmt.case_list = 0;
3042 thiscase->data.case_stmt.index_expr = expr;
3043 thiscase->data.case_stmt.nominal_type = type;
3044 thiscase->data.case_stmt.default_label = 0;
3045 thiscase->data.case_stmt.num_ranges = 0;
3046 thiscase->data.case_stmt.printname = printname;
3047 thiscase->data.case_stmt.seenlabel = 0;
3048 case_stack = thiscase;
3049 nesting_stack = thiscase;
3050
3051 do_pending_stack_adjust ();
3052
3053 /* Make sure case_stmt.start points to something that won't
3054 need any transformation before expand_end_case. */
3055 if (GET_CODE (get_last_insn ()) != NOTE)
3056 emit_note (0, NOTE_INSN_DELETED);
3057
3058 thiscase->data.case_stmt.start = get_last_insn ();
3059 }
3060
3061 /* Start a "dummy case statement" within which case labels are invalid
3062 and are not connected to any larger real case statement.
3063 This can be used if you don't want to let a case statement jump
3064 into the middle of certain kinds of constructs. */
3065
3066 void
3067 expand_start_case_dummy ()
3068 {
3069 register struct nesting *thiscase = ALLOC_NESTING ();
3070
3071 /* Make an entry on case_stack for the dummy. */
3072
3073 thiscase->next = case_stack;
3074 thiscase->all = nesting_stack;
3075 thiscase->depth = ++nesting_depth;
3076 thiscase->exit_label = 0;
3077 thiscase->data.case_stmt.case_list = 0;
3078 thiscase->data.case_stmt.start = 0;
3079 thiscase->data.case_stmt.nominal_type = 0;
3080 thiscase->data.case_stmt.default_label = 0;
3081 thiscase->data.case_stmt.num_ranges = 0;
3082 case_stack = thiscase;
3083 nesting_stack = thiscase;
3084 }
3085
3086 /* End a dummy case statement. */
3087
3088 void
3089 expand_end_case_dummy ()
3090 {
3091 POPSTACK (case_stack);
3092 }
3093
3094 /* Return the data type of the index-expression
3095 of the innermost case statement, or null if none. */
3096
3097 tree
3098 case_index_expr_type ()
3099 {
3100 if (case_stack)
3101 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3102 return 0;
3103 }
3104 \f
3105 /* Accumulate one case or default label inside a case or switch statement.
3106 VALUE is the value of the case (a null pointer, for a default label).
3107
3108 If not currently inside a case or switch statement, return 1 and do
3109 nothing. The caller will print a language-specific error message.
3110 If VALUE is a duplicate or overlaps, return 2 and do nothing
3111 except store the (first) duplicate node in *DUPLICATE.
3112 If VALUE is out of range, return 3 and do nothing.
3113 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3114 Return 0 on success.
3115
3116 Extended to handle range statements. */
3117
3118 int
3119 pushcase (value, label, duplicate)
3120 register tree value;
3121 register tree label;
3122 tree *duplicate;
3123 {
3124 register struct case_node **l;
3125 register struct case_node *n;
3126 tree index_type;
3127 tree nominal_type;
3128
3129 /* Fail if not inside a real case statement. */
3130 if (! (case_stack && case_stack->data.case_stmt.start))
3131 return 1;
3132
3133 if (stack_block_stack
3134 && stack_block_stack->depth > case_stack->depth)
3135 return 5;
3136
3137 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3138 nominal_type = case_stack->data.case_stmt.nominal_type;
3139
3140 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3141 if (index_type == error_mark_node)
3142 return 0;
3143
3144 /* Convert VALUE to the type in which the comparisons are nominally done. */
3145 if (value != 0)
3146 value = convert (nominal_type, value);
3147
3148 /* If this is the first label, warn if any insns have been emitted. */
3149 if (case_stack->data.case_stmt.seenlabel == 0)
3150 {
3151 rtx insn;
3152 for (insn = case_stack->data.case_stmt.start;
3153 insn;
3154 insn = NEXT_INSN (insn))
3155 {
3156 if (GET_CODE (insn) == CODE_LABEL)
3157 break;
3158 if (GET_CODE (insn) != NOTE
3159 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3160 {
3161 warning ("unreachable code at beginning of %s",
3162 case_stack->data.case_stmt.printname);
3163 break;
3164 }
3165 }
3166 }
3167 case_stack->data.case_stmt.seenlabel = 1;
3168
3169 /* Fail if this value is out of range for the actual type of the index
3170 (which may be narrower than NOMINAL_TYPE). */
3171 if (value != 0 && ! int_fits_type_p (value, index_type))
3172 return 3;
3173
3174 /* Fail if this is a duplicate or overlaps another entry. */
3175 if (value == 0)
3176 {
3177 if (case_stack->data.case_stmt.default_label != 0)
3178 {
3179 *duplicate = case_stack->data.case_stmt.default_label;
3180 return 2;
3181 }
3182 case_stack->data.case_stmt.default_label = label;
3183 }
3184 else
3185 {
3186 /* Find the elt in the chain before which to insert the new value,
3187 to keep the chain sorted in increasing order.
3188 But report an error if this element is a duplicate. */
3189 for (l = &case_stack->data.case_stmt.case_list;
3190 /* Keep going past elements distinctly less than VALUE. */
3191 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3192 l = &(*l)->right)
3193 ;
3194 if (*l)
3195 {
3196 /* Element we will insert before must be distinctly greater;
3197 overlap means error. */
3198 if (! tree_int_cst_lt (value, (*l)->low))
3199 {
3200 *duplicate = (*l)->code_label;
3201 return 2;
3202 }
3203 }
3204
3205 /* Add this label to the chain, and succeed.
3206 Copy VALUE so it is on temporary rather than momentary
3207 obstack and will thus survive till the end of the case statement. */
3208 n = (struct case_node *) oballoc (sizeof (struct case_node));
3209 n->left = 0;
3210 n->right = *l;
3211 n->high = n->low = copy_node (value);
3212 n->code_label = label;
3213 *l = n;
3214 }
3215
3216 expand_label (label);
3217 return 0;
3218 }
3219
3220 /* Like pushcase but this case applies to all values
3221 between VALUE1 and VALUE2 (inclusive).
3222 The return value is the same as that of pushcase
3223 but there is one additional error code:
3224 4 means the specified range was empty. */
3225
3226 int
3227 pushcase_range (value1, value2, label, duplicate)
3228 register tree value1, value2;
3229 register tree label;
3230 tree *duplicate;
3231 {
3232 register struct case_node **l;
3233 register struct case_node *n;
3234 tree index_type;
3235 tree nominal_type;
3236
3237 /* Fail if not inside a real case statement. */
3238 if (! (case_stack && case_stack->data.case_stmt.start))
3239 return 1;
3240
3241 if (stack_block_stack
3242 && stack_block_stack->depth > case_stack->depth)
3243 return 5;
3244
3245 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3246 nominal_type = case_stack->data.case_stmt.nominal_type;
3247
3248 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3249 if (index_type == error_mark_node)
3250 return 0;
3251
3252 /* If this is the first label, warn if any insns have been emitted. */
3253 if (case_stack->data.case_stmt.seenlabel == 0)
3254 {
3255 rtx insn;
3256 for (insn = case_stack->data.case_stmt.start;
3257 insn;
3258 insn = NEXT_INSN (insn))
3259 {
3260 if (GET_CODE (insn) == CODE_LABEL)
3261 break;
3262 if (GET_CODE (insn) != NOTE
3263 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3264 {
3265 warning ("unreachable code at beginning of %s",
3266 case_stack->data.case_stmt.printname);
3267 break;
3268 }
3269 }
3270 }
3271 case_stack->data.case_stmt.seenlabel = 1;
3272
3273 /* Convert VALUEs to type in which the comparisons are nominally done. */
3274 if (value1 == 0) /* Negative infinity. */
3275 value1 = TYPE_MIN_VALUE(index_type);
3276 value1 = convert (nominal_type, value1);
3277
3278 if (value2 == 0) /* Positive infinity. */
3279 value2 = TYPE_MAX_VALUE(index_type);
3280 value2 = convert (nominal_type, value2);
3281
3282 /* Fail if these values are out of range. */
3283 if (! int_fits_type_p (value1, index_type))
3284 return 3;
3285
3286 if (! int_fits_type_p (value2, index_type))
3287 return 3;
3288
3289 /* Fail if the range is empty. */
3290 if (tree_int_cst_lt (value2, value1))
3291 return 4;
3292
3293 /* If the bounds are equal, turn this into the one-value case. */
3294 if (tree_int_cst_equal (value1, value2))
3295 return pushcase (value1, label, duplicate);
3296
3297 /* Find the elt in the chain before which to insert the new value,
3298 to keep the chain sorted in increasing order.
3299 But report an error if this element is a duplicate. */
3300 for (l = &case_stack->data.case_stmt.case_list;
3301 /* Keep going past elements distinctly less than this range. */
3302 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3303 l = &(*l)->right)
3304 ;
3305 if (*l)
3306 {
3307 /* Element we will insert before must be distinctly greater;
3308 overlap means error. */
3309 if (! tree_int_cst_lt (value2, (*l)->low))
3310 {
3311 *duplicate = (*l)->code_label;
3312 return 2;
3313 }
3314 }
3315
3316 /* Add this label to the chain, and succeed.
3317 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3318 obstack and will thus survive till the end of the case statement. */
3319
3320 n = (struct case_node *) oballoc (sizeof (struct case_node));
3321 n->left = 0;
3322 n->right = *l;
3323 n->low = copy_node (value1);
3324 n->high = copy_node (value2);
3325 n->code_label = label;
3326 *l = n;
3327
3328 expand_label (label);
3329
3330 case_stack->data.case_stmt.num_ranges++;
3331
3332 return 0;
3333 }
3334 \f
3335 /* Called when the index of a switch statement is an enumerated type
3336 and there is no default label.
3337
3338 Checks that all enumeration literals are covered by the case
3339 expressions of a switch. Also, warn if there are any extra
3340 switch cases that are *not* elements of the enumerated type.
3341
3342 If all enumeration literals were covered by the case expressions,
3343 turn one of the expressions into the default expression since it should
3344 not be possible to fall through such a switch. */
3345
3346 void
3347 check_for_full_enumeration_handling (type)
3348 tree type;
3349 {
3350 register struct case_node *n;
3351 register struct case_node **l;
3352 register tree chain;
3353 int all_values = 1;
3354
3355 /* The time complexity of this loop is currently O(N * M), with
3356 N being the number of enumerals in the enumerated type, and
3357 M being the number of case expressions in the switch. */
3358
3359 for (chain = TYPE_VALUES (type);
3360 chain;
3361 chain = TREE_CHAIN (chain))
3362 {
3363 /* Find a match between enumeral and case expression, if possible.
3364 Quit looking when we've gone too far (since case expressions
3365 are kept sorted in ascending order). Warn about enumerals not
3366 handled in the switch statement case expression list. */
3367
3368 for (n = case_stack->data.case_stmt.case_list;
3369 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3370 n = n->right)
3371 ;
3372
3373 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3374 {
3375 if (warn_switch)
3376 warning ("enumerated value `%s' not handled in switch",
3377 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3378 all_values = 0;
3379 }
3380 }
3381
3382 /* Now we go the other way around; we warn if there are case
3383 expressions that don't correspond to enumerals. This can
3384 occur since C and C++ don't enforce type-checking of
3385 assignments to enumeration variables. */
3386
3387 if (warn_switch)
3388 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3389 {
3390 for (chain = TYPE_VALUES (type);
3391 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3392 chain = TREE_CHAIN (chain))
3393 ;
3394
3395 if (!chain)
3396 warning ("case value `%d' not in enumerated type `%s'",
3397 TREE_INT_CST_LOW (n->low),
3398 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3399 == IDENTIFIER_NODE)
3400 ? TYPE_NAME (type)
3401 : DECL_NAME (TYPE_NAME (type))));
3402 }
3403
3404 /* If all values were found as case labels, make one of them the default
3405 label. Thus, this switch will never fall through. We arbitrarily pick
3406 the last one to make the default since this is likely the most
3407 efficient choice. */
3408
3409 if (all_values)
3410 {
3411 for (l = &case_stack->data.case_stmt.case_list;
3412 (*l)->right != 0;
3413 l = &(*l)->right)
3414 ;
3415
3416 case_stack->data.case_stmt.default_label = (*l)->code_label;
3417 *l = 0;
3418 }
3419 }
3420 \f
3421 /* Terminate a case (Pascal) or switch (C) statement
3422 in which CASE_INDEX is the expression to be tested.
3423 Generate the code to test it and jump to the right place. */
3424
3425 void
3426 expand_end_case (orig_index)
3427 tree orig_index;
3428 {
3429 tree minval, maxval, range;
3430 rtx default_label = 0;
3431 register struct case_node *n;
3432 int count;
3433 rtx index;
3434 rtx table_label = gen_label_rtx ();
3435 int ncases;
3436 rtx *labelvec;
3437 register int i;
3438 rtx before_case;
3439 register struct nesting *thiscase = case_stack;
3440 tree index_expr = thiscase->data.case_stmt.index_expr;
3441 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3442
3443 do_pending_stack_adjust ();
3444
3445 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3446 if (TREE_TYPE (index_expr) != error_mark_node)
3447 {
3448 /* If switch expression was an enumerated type, check that all
3449 enumeration literals are covered by the cases.
3450 No sense trying this if there's a default case, however. */
3451
3452 if (!thiscase->data.case_stmt.default_label
3453 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3454 && TREE_CODE (index_expr) != INTEGER_CST)
3455 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3456
3457 /* If this is the first label, warn if any insns have been emitted. */
3458 if (thiscase->data.case_stmt.seenlabel == 0)
3459 {
3460 rtx insn;
3461 for (insn = get_last_insn ();
3462 insn != case_stack->data.case_stmt.start;
3463 insn = PREV_INSN (insn))
3464 if (GET_CODE (insn) != NOTE
3465 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3466 {
3467 warning ("unreachable code at beginning of %s",
3468 case_stack->data.case_stmt.printname);
3469 break;
3470 }
3471 }
3472
3473 /* If we don't have a default-label, create one here,
3474 after the body of the switch. */
3475 if (thiscase->data.case_stmt.default_label == 0)
3476 {
3477 thiscase->data.case_stmt.default_label
3478 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3479 expand_label (thiscase->data.case_stmt.default_label);
3480 }
3481 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3482
3483 before_case = get_last_insn ();
3484
3485 /* Simplify the case-list before we count it. */
3486 group_case_nodes (thiscase->data.case_stmt.case_list);
3487
3488 /* Get upper and lower bounds of case values.
3489 Also convert all the case values to the index expr's data type. */
3490
3491 count = 0;
3492 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3493 {
3494 /* Check low and high label values are integers. */
3495 if (TREE_CODE (n->low) != INTEGER_CST)
3496 abort ();
3497 if (TREE_CODE (n->high) != INTEGER_CST)
3498 abort ();
3499
3500 n->low = convert (TREE_TYPE (index_expr), n->low);
3501 n->high = convert (TREE_TYPE (index_expr), n->high);
3502
3503 /* Count the elements and track the largest and smallest
3504 of them (treating them as signed even if they are not). */
3505 if (count++ == 0)
3506 {
3507 minval = n->low;
3508 maxval = n->high;
3509 }
3510 else
3511 {
3512 if (INT_CST_LT (n->low, minval))
3513 minval = n->low;
3514 if (INT_CST_LT (maxval, n->high))
3515 maxval = n->high;
3516 }
3517 /* A range counts double, since it requires two compares. */
3518 if (! tree_int_cst_equal (n->low, n->high))
3519 count++;
3520 }
3521
3522 /* Compute span of values. */
3523 if (count != 0)
3524 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3525 maxval, minval));
3526
3527 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3528 {
3529 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3530 emit_queue ();
3531 emit_jump (default_label);
3532 }
3533 /* If range of values is much bigger than number of values,
3534 make a sequence of conditional branches instead of a dispatch.
3535 If the switch-index is a constant, do it this way
3536 because we can optimize it. */
3537 else if (TREE_INT_CST_HIGH (range) != 0
3538 #ifdef HAVE_casesi
3539 || (HAVE_casesi ? count < 4 : count < 5)
3540 #else
3541 /* If machine does not have a case insn that compares the
3542 bounds, this means extra overhead for dispatch tables
3543 which raises the threshold for using them. */
3544 || count < 5
3545 #endif
3546 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3547 || TREE_CODE (index_expr) == INTEGER_CST
3548 /* This will reduce to a constant. */
3549 || (TREE_CODE (index_expr) == CALL_EXPR
3550 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
3551 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE))
3552 {
3553 index = expand_expr (index_expr, 0, VOIDmode, 0);
3554
3555 /* If the index is a short or char that we do not have
3556 an insn to handle comparisons directly, convert it to
3557 a full integer now, rather than letting each comparison
3558 generate the conversion. */
3559
3560 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3561 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3562 == CODE_FOR_nothing))
3563 {
3564 enum machine_mode wider_mode;
3565 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3566 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3567 if (cmp_optab->handlers[(int) wider_mode].insn_code
3568 != CODE_FOR_nothing)
3569 {
3570 index = convert_to_mode (wider_mode, index, unsignedp);
3571 break;
3572 }
3573 }
3574
3575 emit_queue ();
3576 do_pending_stack_adjust ();
3577
3578 index = protect_from_queue (index, 0);
3579 if (GET_CODE (index) == MEM)
3580 index = copy_to_reg (index);
3581 if (GET_CODE (index) == CONST_INT
3582 || TREE_CODE (index_expr) == INTEGER_CST)
3583 {
3584 /* Make a tree node with the proper constant value
3585 if we don't already have one. */
3586 if (TREE_CODE (index_expr) != INTEGER_CST)
3587 {
3588 index_expr
3589 = build_int_2 (INTVAL (index),
3590 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3591 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3592 }
3593
3594 /* For constant index expressions we need only
3595 issue a unconditional branch to the appropriate
3596 target code. The job of removing any unreachable
3597 code is left to the optimisation phase if the
3598 "-O" option is specified. */
3599 for (n = thiscase->data.case_stmt.case_list;
3600 n;
3601 n = n->right)
3602 {
3603 if (! tree_int_cst_lt (index_expr, n->low)
3604 && ! tree_int_cst_lt (n->high, index_expr))
3605 break;
3606 }
3607 if (n)
3608 emit_jump (label_rtx (n->code_label));
3609 else
3610 emit_jump (default_label);
3611 }
3612 else
3613 {
3614 /* If the index expression is not constant we generate
3615 a binary decision tree to select the appropriate
3616 target code. This is done as follows:
3617
3618 The list of cases is rearranged into a binary tree,
3619 nearly optimal assuming equal probability for each case.
3620
3621 The tree is transformed into RTL, eliminating
3622 redundant test conditions at the same time.
3623
3624 If program flow could reach the end of the
3625 decision tree an unconditional jump to the
3626 default code is emitted. */
3627
3628 use_cost_table
3629 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3630 && default_label != 0
3631 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3632 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3633 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3634 default_label, TREE_TYPE (index_expr));
3635 emit_jump_if_reachable (default_label);
3636 }
3637 }
3638 else
3639 {
3640 int win = 0;
3641 #ifdef HAVE_casesi
3642 if (HAVE_casesi)
3643 {
3644 /* Convert the index to SImode. */
3645 if (TYPE_MODE (TREE_TYPE (index_expr)) == DImode)
3646 {
3647 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3648 index_expr, minval);
3649 minval = integer_zero_node;
3650 }
3651 if (TYPE_MODE (TREE_TYPE (index_expr)) != SImode)
3652 index_expr = convert (type_for_size (GET_MODE_BITSIZE (SImode), 0),
3653 index_expr);
3654 index = expand_expr (index_expr, 0, VOIDmode, 0);
3655 emit_queue ();
3656 index = protect_from_queue (index, 0);
3657 do_pending_stack_adjust ();
3658
3659 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3660 expand_expr (range, 0, VOIDmode, 0),
3661 table_label, default_label));
3662 win = 1;
3663 }
3664 #endif
3665 #ifdef HAVE_tablejump
3666 if (! win && HAVE_tablejump)
3667 {
3668 index_expr = convert (thiscase->data.case_stmt.nominal_type,
3669 build (MINUS_EXPR, TREE_TYPE (index_expr),
3670 index_expr, minval));
3671 index = expand_expr (index_expr, 0, VOIDmode, 0);
3672 emit_queue ();
3673 index = protect_from_queue (index, 0);
3674 do_pending_stack_adjust ();
3675
3676 do_tablejump (index,
3677 gen_rtx (CONST_INT, VOIDmode,
3678 TREE_INT_CST_LOW (range)),
3679 table_label, default_label);
3680 win = 1;
3681 }
3682 #endif
3683 if (! win)
3684 abort ();
3685
3686 /* Get table of labels to jump to, in order of case index. */
3687
3688 ncases = TREE_INT_CST_LOW (range) + 1;
3689 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3690 bzero (labelvec, ncases * sizeof (rtx));
3691
3692 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3693 {
3694 register int i
3695 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3696
3697 while (1)
3698 {
3699 labelvec[i]
3700 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3701 if (i + TREE_INT_CST_LOW (minval)
3702 == TREE_INT_CST_LOW (n->high))
3703 break;
3704 i++;
3705 }
3706 }
3707
3708 /* Fill in the gaps with the default. */
3709 for (i = 0; i < ncases; i++)
3710 if (labelvec[i] == 0)
3711 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3712
3713 /* Output the table */
3714 emit_label (table_label);
3715
3716 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3717 were an expression, instead of a an #ifdef/#ifndef. */
3718 if (
3719 #ifdef CASE_VECTOR_PC_RELATIVE
3720 1 ||
3721 #endif
3722 flag_pic)
3723 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3724 gen_rtx (LABEL_REF, Pmode, table_label),
3725 gen_rtvec_v (ncases, labelvec)));
3726 else
3727 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3728 gen_rtvec_v (ncases, labelvec)));
3729
3730 /* If the case insn drops through the table,
3731 after the table we must jump to the default-label.
3732 Otherwise record no drop-through after the table. */
3733 #ifdef CASE_DROPS_THROUGH
3734 emit_jump (default_label);
3735 #else
3736 emit_barrier ();
3737 #endif
3738 }
3739
3740 reorder_insns (NEXT_INSN (before_case), get_last_insn (),
3741 thiscase->data.case_stmt.start);
3742 }
3743 if (thiscase->exit_label)
3744 emit_label (thiscase->exit_label);
3745
3746 POPSTACK (case_stack);
3747
3748 free_temp_slots ();
3749 }
3750
3751 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3752
3753 static void
3754 do_jump_if_equal (op1, op2, label, unsignedp)
3755 rtx op1, op2, label;
3756 int unsignedp;
3757 {
3758 if (GET_CODE (op1) == CONST_INT
3759 && GET_CODE (op2) == CONST_INT)
3760 {
3761 if (INTVAL (op1) == INTVAL (op2))
3762 emit_jump (label);
3763 }
3764 else
3765 {
3766 enum machine_mode mode = GET_MODE (op1);
3767 if (mode == VOIDmode)
3768 mode = GET_MODE (op2);
3769 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3770 emit_jump_insn (gen_beq (label));
3771 }
3772 }
3773 \f
3774 /* Not all case values are encountered equally. This function
3775 uses a heuristic to weight case labels, in cases where that
3776 looks like a reasonable thing to do.
3777
3778 Right now, all we try to guess is text, and we establish the
3779 following weights:
3780
3781 chars above space: 16
3782 digits: 16
3783 default: 12
3784 space, punct: 8
3785 tab: 4
3786 newline: 2
3787 other "\" chars: 1
3788 remaining chars: 0
3789
3790 If we find any cases in the switch that are not either -1 or in the range
3791 of valid ASCII characters, or are control characters other than those
3792 commonly used with "\", don't treat this switch scanning text.
3793
3794 Return 1 if these nodes are suitable for cost estimation, otherwise
3795 return 0. */
3796
3797 static int
3798 estimate_case_costs (node)
3799 case_node_ptr node;
3800 {
3801 tree min_ascii = build_int_2 (-1, -1);
3802 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3803 case_node_ptr n;
3804 int i;
3805
3806 /* If we haven't already made the cost table, make it now. Note that the
3807 lower bound of the table is -1, not zero. */
3808
3809 if (cost_table == NULL)
3810 {
3811 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3812 bzero (cost_table - 1, 129 * sizeof (short));
3813
3814 for (i = 0; i < 128; i++)
3815 {
3816 if (isalnum (i))
3817 cost_table[i] = 16;
3818 else if (ispunct (i))
3819 cost_table[i] = 8;
3820 else if (iscntrl (i))
3821 cost_table[i] = -1;
3822 }
3823
3824 cost_table[' '] = 8;
3825 cost_table['\t'] = 4;
3826 cost_table['\0'] = 4;
3827 cost_table['\n'] = 2;
3828 cost_table['\f'] = 1;
3829 cost_table['\v'] = 1;
3830 cost_table['\b'] = 1;
3831 }
3832
3833 /* See if all the case expressions look like text. It is text if the
3834 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3835 as signed arithmetic since we don't want to ever access cost_table with a
3836 value less than -1. Also check that none of the constants in a range
3837 are strange control characters. */
3838
3839 for (n = node; n; n = n->right)
3840 {
3841 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3842 return 0;
3843
3844 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3845 if (cost_table[i] < 0)
3846 return 0;
3847 }
3848
3849 /* All interesting values are within the range of interesting
3850 ASCII characters. */
3851 return 1;
3852 }
3853
3854 /* Scan an ordered list of case nodes
3855 combining those with consecutive values or ranges.
3856
3857 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3858
3859 static void
3860 group_case_nodes (head)
3861 case_node_ptr head;
3862 {
3863 case_node_ptr node = head;
3864
3865 while (node)
3866 {
3867 rtx lb = next_real_insn (label_rtx (node->code_label));
3868 case_node_ptr np = node;
3869
3870 /* Try to group the successors of NODE with NODE. */
3871 while (((np = np->right) != 0)
3872 /* Do they jump to the same place? */
3873 && next_real_insn (label_rtx (np->code_label)) == lb
3874 /* Are their ranges consecutive? */
3875 && tree_int_cst_equal (np->low,
3876 fold (build (PLUS_EXPR,
3877 TREE_TYPE (node->high),
3878 node->high,
3879 integer_one_node)))
3880 /* An overflow is not consecutive. */
3881 && tree_int_cst_lt (node->high,
3882 fold (build (PLUS_EXPR,
3883 TREE_TYPE (node->high),
3884 node->high,
3885 integer_one_node))))
3886 {
3887 node->high = np->high;
3888 }
3889 /* NP is the first node after NODE which can't be grouped with it.
3890 Delete the nodes in between, and move on to that node. */
3891 node->right = np;
3892 node = np;
3893 }
3894 }
3895
3896 /* Take an ordered list of case nodes
3897 and transform them into a near optimal binary tree,
3898 on the assumtion that any target code selection value is as
3899 likely as any other.
3900
3901 The transformation is performed by splitting the ordered
3902 list into two equal sections plus a pivot. The parts are
3903 then attached to the pivot as left and right branches. Each
3904 branch is is then transformed recursively. */
3905
3906 static void
3907 balance_case_nodes (head, parent)
3908 case_node_ptr *head;
3909 case_node_ptr parent;
3910 {
3911 register case_node_ptr np;
3912
3913 np = *head;
3914 if (np)
3915 {
3916 int cost = 0;
3917 int i = 0;
3918 int ranges = 0;
3919 register case_node_ptr *npp;
3920 case_node_ptr left;
3921
3922 /* Count the number of entries on branch. Also count the ranges. */
3923
3924 while (np)
3925 {
3926 if (!tree_int_cst_equal (np->low, np->high))
3927 {
3928 ranges++;
3929 if (use_cost_table)
3930 cost += cost_table[TREE_INT_CST_LOW (np->high)];
3931 }
3932
3933 if (use_cost_table)
3934 cost += cost_table[TREE_INT_CST_LOW (np->low)];
3935
3936 i++;
3937 np = np->right;
3938 }
3939
3940 if (i > 2)
3941 {
3942 /* Split this list if it is long enough for that to help. */
3943 npp = head;
3944 left = *npp;
3945 if (use_cost_table)
3946 {
3947 /* Find the place in the list that bisects the list's total cost,
3948 Here I gets half the total cost. */
3949 int n_moved = 0;
3950 i = (cost + 1) / 2;
3951 while (1)
3952 {
3953 /* Skip nodes while their cost does not reach that amount. */
3954 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3955 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
3956 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
3957 if (i <= 0)
3958 break;
3959 npp = &(*npp)->right;
3960 n_moved += 1;
3961 }
3962 if (n_moved == 0)
3963 {
3964 /* Leave this branch lopsided, but optimize left-hand
3965 side and fill in `parent' fields for right-hand side. */
3966 np = *head;
3967 np->parent = parent;
3968 balance_case_nodes (&np->left, np);
3969 for (; np->right; np = np->right)
3970 np->right->parent = np;
3971 return;
3972 }
3973 }
3974 /* If there are just three nodes, split at the middle one. */
3975 else if (i == 3)
3976 npp = &(*npp)->right;
3977 else
3978 {
3979 /* Find the place in the list that bisects the list's total cost,
3980 where ranges count as 2.
3981 Here I gets half the total cost. */
3982 i = (i + ranges + 1) / 2;
3983 while (1)
3984 {
3985 /* Skip nodes while their cost does not reach that amount. */
3986 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3987 i--;
3988 i--;
3989 if (i <= 0)
3990 break;
3991 npp = &(*npp)->right;
3992 }
3993 }
3994 *head = np = *npp;
3995 *npp = 0;
3996 np->parent = parent;
3997 np->left = left;
3998
3999 /* Optimize each of the two split parts. */
4000 balance_case_nodes (&np->left, np);
4001 balance_case_nodes (&np->right, np);
4002 }
4003 else
4004 {
4005 /* Else leave this branch as one level,
4006 but fill in `parent' fields. */
4007 np = *head;
4008 np->parent = parent;
4009 for (; np->right; np = np->right)
4010 np->right->parent = np;
4011 }
4012 }
4013 }
4014 \f
4015 /* Search the parent sections of the case node tree
4016 to see if a test for the lower bound of NODE would be redundant.
4017 INDEX_TYPE is the type of the index expression.
4018
4019 The instructions to generate the case decision tree are
4020 output in the same order as nodes are processed so it is
4021 known that if a parent node checks the range of the current
4022 node minus one that the current node is bounded at its lower
4023 span. Thus the test would be redundant. */
4024
4025 static int
4026 node_has_low_bound (node, index_type)
4027 case_node_ptr node;
4028 tree index_type;
4029 {
4030 tree low_minus_one;
4031 case_node_ptr pnode;
4032
4033 /* If the lower bound of this node is the lowest value in the index type,
4034 we need not test it. */
4035
4036 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4037 return 1;
4038
4039 /* If this node has a left branch, the value at the left must be less
4040 than that at this node, so it cannot be bounded at the bottom and
4041 we need not bother testing any further. */
4042
4043 if (node->left)
4044 return 0;
4045
4046 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4047 node->low, integer_one_node));
4048
4049 /* If the subtraction above overflowed, we can't verify anything.
4050 Otherwise, look for a parent that tests our value - 1. */
4051
4052 if (! tree_int_cst_lt (low_minus_one, node->low))
4053 return 0;
4054
4055 for (pnode = node->parent; pnode; pnode = pnode->parent)
4056 if (tree_int_cst_equal (low_minus_one, pnode->high))
4057 return 1;
4058
4059 return 0;
4060 }
4061
4062 /* Search the parent sections of the case node tree
4063 to see if a test for the upper bound of NODE would be redundant.
4064 INDEX_TYPE is the type of the index expression.
4065
4066 The instructions to generate the case decision tree are
4067 output in the same order as nodes are processed so it is
4068 known that if a parent node checks the range of the current
4069 node plus one that the current node is bounded at its upper
4070 span. Thus the test would be redundant. */
4071
4072 static int
4073 node_has_high_bound (node, index_type)
4074 case_node_ptr node;
4075 tree index_type;
4076 {
4077 tree high_plus_one;
4078 case_node_ptr pnode;
4079
4080 /* If the upper bound of this node is the highest value in the type
4081 of the index expression, we need not test against it. */
4082
4083 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4084 return 1;
4085
4086 /* If this node has a right branch, the value at the right must be greater
4087 than that at this node, so it cannot be bounded at the top and
4088 we need not bother testing any further. */
4089
4090 if (node->right)
4091 return 0;
4092
4093 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4094 node->high, integer_one_node));
4095
4096 /* If the addition above overflowed, we can't verify anything.
4097 Otherwise, look for a parent that tests our value + 1. */
4098
4099 if (! tree_int_cst_lt (node->high, high_plus_one))
4100 return 0;
4101
4102 for (pnode = node->parent; pnode; pnode = pnode->parent)
4103 if (tree_int_cst_equal (high_plus_one, pnode->low))
4104 return 1;
4105
4106 return 0;
4107 }
4108
4109 /* Search the parent sections of the
4110 case node tree to see if both tests for the upper and lower
4111 bounds of NODE would be redundant. */
4112
4113 static int
4114 node_is_bounded (node, index_type)
4115 case_node_ptr node;
4116 tree index_type;
4117 {
4118 return (node_has_low_bound (node, index_type)
4119 && node_has_high_bound (node, index_type));
4120 }
4121
4122 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4123
4124 static void
4125 emit_jump_if_reachable (label)
4126 rtx label;
4127 {
4128 if (GET_CODE (get_last_insn ()) != BARRIER)
4129 emit_jump (label);
4130 }
4131 \f
4132 /* Emit step-by-step code to select a case for the value of INDEX.
4133 The thus generated decision tree follows the form of the
4134 case-node binary tree NODE, whose nodes represent test conditions.
4135 INDEX_TYPE is the type of the index of the switch.
4136
4137 Care is taken to prune redundant tests from the decision tree
4138 by detecting any boundary conditions already checked by
4139 emitted rtx. (See node_has_high_bound, node_has_low_bound
4140 and node_is_bounded, above.)
4141
4142 Where the test conditions can be shown to be redundant we emit
4143 an unconditional jump to the target code. As a further
4144 optimization, the subordinates of a tree node are examined to
4145 check for bounded nodes. In this case conditional and/or
4146 unconditional jumps as a result of the boundary check for the
4147 current node are arranged to target the subordinates associated
4148 code for out of bound conditions on the current node node.
4149
4150 We can asume that when control reaches the code generated here,
4151 the index value has already been compared with the parents
4152 of this node, and determined to be on the same side of each parent
4153 as this node is. Thus, if this node tests for the value 51,
4154 and a parent tested for 52, we don't need to consider
4155 the possibility of a value greater than 51. If another parent
4156 tests for the value 50, then this node need not test anything. */
4157
4158 static void
4159 emit_case_nodes (index, node, default_label, index_type)
4160 rtx index;
4161 case_node_ptr node;
4162 rtx default_label;
4163 tree index_type;
4164 {
4165 /* If INDEX has an unsigned type, we must make unsigned branches. */
4166 int unsignedp = TREE_UNSIGNED (index_type);
4167 typedef rtx rtx_function ();
4168 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4169 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4170 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4171 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4172 enum machine_mode mode = GET_MODE (index);
4173
4174 /* See if our parents have already tested everything for us.
4175 If they have, emit an unconditional jump for this node. */
4176 if (node_is_bounded (node, index_type))
4177 emit_jump (label_rtx (node->code_label));
4178
4179 else if (tree_int_cst_equal (node->low, node->high))
4180 {
4181 /* Node is single valued. First see if the index expression matches
4182 this node and then check our children, if any. */
4183
4184 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4185 label_rtx (node->code_label), unsignedp);
4186
4187 if (node->right != 0 && node->left != 0)
4188 {
4189 /* This node has children on both sides.
4190 Dispatch to one side or the other
4191 by comparing the index value with this node's value.
4192 If one subtree is bounded, check that one first,
4193 so we can avoid real branches in the tree. */
4194
4195 if (node_is_bounded (node->right, index_type))
4196 {
4197 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4198 GT, 0, mode, unsignedp, 0);
4199
4200 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4201 emit_case_nodes (index, node->left, default_label, index_type);
4202 }
4203
4204 else if (node_is_bounded (node->left, index_type))
4205 {
4206 emit_cmp_insn (index, expand_expr (node->high, 0,
4207 VOIDmode, 0),
4208 LT, 0, mode, unsignedp, 0);
4209 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4210 emit_case_nodes (index, node->right, default_label, index_type);
4211 }
4212
4213 else
4214 {
4215 /* Neither node is bounded. First distinguish the two sides;
4216 then emit the code for one side at a time. */
4217
4218 tree test_label
4219 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4220
4221 /* See if the value is on the right. */
4222 emit_cmp_insn (index, expand_expr (node->high, 0,
4223 VOIDmode, 0),
4224 GT, 0, mode, unsignedp, 0);
4225 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4226
4227 /* Value must be on the left.
4228 Handle the left-hand subtree. */
4229 emit_case_nodes (index, node->left, default_label, index_type);
4230 /* If left-hand subtree does nothing,
4231 go to default. */
4232 emit_jump_if_reachable (default_label);
4233
4234 /* Code branches here for the right-hand subtree. */
4235 expand_label (test_label);
4236 emit_case_nodes (index, node->right, default_label, index_type);
4237 }
4238 }
4239
4240 else if (node->right != 0 && node->left == 0)
4241 {
4242 /* Here we have a right child but no left so we issue conditional
4243 branch to default and process the right child.
4244
4245 Omit the conditional branch to default if we it avoid only one
4246 right child; it costs too much space to save so little time. */
4247
4248 if (node->right->right
4249 || !tree_int_cst_equal (node->right->low, node->right->high))
4250 {
4251 if (!node_has_low_bound (node, index_type))
4252 {
4253 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4254 LT, 0, mode, unsignedp, 0);
4255 emit_jump_insn ((*gen_blt_pat) (default_label));
4256 }
4257
4258 emit_case_nodes (index, node->right, default_label, index_type);
4259 }
4260 else
4261 /* We cannot process node->right normally
4262 since we haven't ruled out the numbers less than
4263 this node's value. So handle node->right explicitly. */
4264 do_jump_if_equal (index,
4265 expand_expr (node->right->low, 0, VOIDmode, 0),
4266 label_rtx (node->right->code_label), unsignedp);
4267 }
4268
4269 else if (node->right == 0 && node->left != 0)
4270 {
4271 /* Just one subtree, on the left. */
4272
4273 /* If our "most probably entry" is less probable
4274 than the default label, emit a jump to
4275 the default label using condition codes
4276 already lying around. With no right branch,
4277 a branch-greater-than will get us to the default
4278 label correctly. */
4279 if ((use_cost_table
4280 ? cost_table[TREE_INT_CST_LOW (node->high)] < 12
4281 : node->left->left != 0)
4282 || !tree_int_cst_equal (node->left->low, node->left->high))
4283 {
4284 if (!node_has_high_bound (node, index_type))
4285 {
4286 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4287 GT, 0, mode, unsignedp, 0);
4288 emit_jump_insn ((*gen_bgt_pat) (default_label));
4289 }
4290
4291 emit_case_nodes (index, node->left, default_label, index_type);
4292 }
4293 else
4294 /* We cannot process node->left normally
4295 since we haven't ruled out the numbers less than
4296 this node's value. So handle node->left explicitly. */
4297 do_jump_if_equal (index,
4298 expand_expr (node->left->low, 0, VOIDmode, 0),
4299 label_rtx (node->left->code_label), unsignedp);
4300 }
4301 }
4302 else
4303 {
4304 /* Node is a range. These cases are very similar to those for a single
4305 value, except that we do not start by testing whether this node
4306 is the one to branch to. */
4307
4308 if (node->right != 0 && node->left != 0)
4309 {
4310 /* Node has subtrees on both sides.
4311 If the right-hand subtree is bounded,
4312 test for it first, since we can go straight there.
4313 Otherwise, we need to make a branch in the control structure,
4314 then handle the two subtrees. */
4315 tree test_label = 0;
4316
4317 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4318 GT, 0, mode, unsignedp, 0);
4319
4320 if (node_is_bounded (node->right, index_type))
4321 /* Right hand node is fully bounded so we can eliminate any
4322 testing and branch directly to the target code. */
4323 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4324 else
4325 {
4326 /* Right hand node requires testing.
4327 Branch to a label where we will handle it later. */
4328
4329 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4330 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4331 }
4332
4333 /* Value belongs to this node or to the left-hand subtree. */
4334
4335 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4336 GE, 0, mode, unsignedp, 0);
4337 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4338
4339 /* Handle the left-hand subtree. */
4340 emit_case_nodes (index, node->left, default_label, index_type);
4341
4342 /* If right node had to be handled later, do that now. */
4343
4344 if (test_label)
4345 {
4346 /* If the left-hand subtree fell through,
4347 don't let it fall into the right-hand subtree. */
4348 emit_jump_if_reachable (default_label);
4349
4350 expand_label (test_label);
4351 emit_case_nodes (index, node->right, default_label, index_type);
4352 }
4353 }
4354
4355 else if (node->right != 0 && node->left == 0)
4356 {
4357 /* Deal with values to the left of this node,
4358 if they are possible. */
4359 if (!node_has_low_bound (node, index_type))
4360 {
4361 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4362 LT, 0, mode, unsignedp, 0);
4363 emit_jump_insn ((*gen_blt_pat) (default_label));
4364 }
4365
4366 /* Value belongs to this node or to the right-hand subtree. */
4367
4368 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4369 LE, 0, mode, unsignedp, 0);
4370 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4371
4372 emit_case_nodes (index, node->right, default_label, index_type);
4373 }
4374
4375 else if (node->right == 0 && node->left != 0)
4376 {
4377 /* Deal with values to the right of this node,
4378 if they are possible. */
4379 if (!node_has_high_bound (node, index_type))
4380 {
4381 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4382 GT, 0, mode, unsignedp, 0);
4383 emit_jump_insn ((*gen_bgt_pat) (default_label));
4384 }
4385
4386 /* Value belongs to this node or to the left-hand subtree. */
4387
4388 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4389 GE, 0, mode, unsignedp, 0);
4390 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4391
4392 emit_case_nodes (index, node->left, default_label, index_type);
4393 }
4394
4395 else
4396 {
4397 /* Node has no children so we check low and high bounds to remove
4398 redundant tests. Only one of the bounds can exist,
4399 since otherwise this node is bounded--a case tested already. */
4400
4401 if (!node_has_high_bound (node, index_type))
4402 {
4403 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4404 GT, 0, mode, unsignedp, 0);
4405 emit_jump_insn ((*gen_bgt_pat) (default_label));
4406 }
4407
4408 if (!node_has_low_bound (node, index_type))
4409 {
4410 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4411 LT, 0, mode, unsignedp, 0);
4412 emit_jump_insn ((*gen_blt_pat) (default_label));
4413 }
4414
4415 emit_jump (label_rtx (node->code_label));
4416 }
4417 }
4418 }
4419 \f
4420 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4421 so that the debugging info will be correct for the unrolled loop. */
4422
4423 /* Indexed by loop number, contains pointer to the first block in the loop,
4424 or zero if none. Only valid if doing loop unrolling and outputting debugger
4425 info. */
4426
4427 tree *loop_number_first_block;
4428
4429 /* Indexed by loop number, contains pointer to the last block in the loop,
4430 only valid if loop_number_first_block is nonzero. */
4431
4432 tree *loop_number_last_block;
4433
4434 /* Indexed by loop number, contains nesting level of first block in the
4435 loop, if any. Only valid if doing loop unrolling and outputting debugger
4436 info. */
4437
4438 int *loop_number_block_level;
4439
4440 /* Scan the function looking for loops, and walk the BLOCK tree at the
4441 same time. Record the first and last BLOCK tree corresponding to each
4442 loop. This function is similar to find_and_verify_loops in loop.c. */
4443
4444 void
4445 find_loop_tree_blocks (f)
4446 rtx f;
4447 {
4448 rtx insn;
4449 int current_loop = -1;
4450 int next_loop = -1;
4451 int loop;
4452 int block_level, tree_level;
4453 tree tree_block, parent_tree_block;
4454
4455 tree_block = DECL_INITIAL (current_function_decl);
4456 parent_tree_block = 0;
4457 block_level = 0;
4458 tree_level = -1;
4459
4460 /* Find boundaries of loops, and save the first and last BLOCK tree
4461 corresponding to each loop. */
4462
4463 for (insn = f; insn; insn = NEXT_INSN (insn))
4464 {
4465 if (GET_CODE (insn) == NOTE)
4466 switch (NOTE_LINE_NUMBER (insn))
4467 {
4468 case NOTE_INSN_LOOP_BEG:
4469 loop_number_block_level[++next_loop] = block_level;
4470 loop_number_first_block[next_loop] = 0;
4471 current_loop = next_loop;
4472 break;
4473
4474 case NOTE_INSN_LOOP_END:
4475 if (current_loop == -1)
4476 abort ();
4477
4478 current_loop = loop_outer_loop[current_loop];
4479 break;
4480
4481 case NOTE_INSN_BLOCK_BEG:
4482 if (tree_level < block_level)
4483 {
4484 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4485 we must now visit the subtree of the current block. */
4486 parent_tree_block = tree_block;
4487 tree_block = BLOCK_SUBBLOCKS (tree_block);
4488 tree_level++;
4489 }
4490 else if (tree_level > block_level)
4491 abort ();
4492
4493 /* Save this block tree here for all nested loops for which
4494 this is the topmost block. */
4495 for (loop = current_loop;
4496 loop != -1 && block_level == loop_number_block_level[loop];
4497 loop = loop_outer_loop[loop])
4498 {
4499 if (loop_number_first_block[loop] == 0)
4500 loop_number_first_block[loop] = tree_block;
4501 loop_number_last_block[loop] = tree_block;
4502 }
4503
4504 block_level++;
4505 break;
4506
4507 case NOTE_INSN_BLOCK_END:
4508 block_level--;
4509 if (tree_level > block_level)
4510 {
4511 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4512 we must now visit the parent of the current tree. */
4513 if (tree_block != 0 || parent_tree_block == 0)
4514 abort ();
4515 tree_block = parent_tree_block;
4516 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4517 tree_level--;
4518 }
4519 tree_block = BLOCK_CHAIN (tree_block);
4520 break;
4521 }
4522 }
4523 }
4524
4525 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4526 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4527
4528 Note that we only copy the topmost level of tree nodes; they will share
4529 pointers to the same subblocks. */
4530
4531 void
4532 unroll_block_trees (loop_number, copies)
4533 int loop_number;
4534 int copies;
4535 {
4536 int i;
4537
4538 /* First check whether there are any blocks that need to be copied. */
4539 if (loop_number_first_block[loop_number])
4540 {
4541 tree first_block = loop_number_first_block[loop_number];
4542 tree last_block = loop_number_last_block[loop_number];
4543 tree last_block_created = 0;
4544
4545 for (i = 0; i < copies - 1; i++)
4546 {
4547 tree block = first_block;
4548 tree insert_after = last_block;
4549 tree copied_block;
4550
4551 /* Copy every block between first_block and last_block inclusive,
4552 inserting the new blocks after last_block. */
4553 do
4554 {
4555 tree new_block = make_node (BLOCK);
4556 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4557 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4558 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4559 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4560 TREE_USED (new_block) = TREE_USED (block);
4561
4562 /* Insert the new block after the insertion point, and move
4563 the insertion point to the new block. This ensures that
4564 the copies are inserted in the right order. */
4565 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4566 BLOCK_CHAIN (insert_after) = new_block;
4567 insert_after = new_block;
4568
4569 copied_block = block;
4570 block = BLOCK_CHAIN (block);
4571 }
4572 while (copied_block != last_block);
4573
4574 /* Remember the last block created, so that we can update the
4575 info in the tables. */
4576 if (last_block_created == 0)
4577 last_block_created = insert_after;
4578 }
4579
4580 /* For all nested loops for which LAST_BLOCK was originally the last
4581 block, update the tables to indicate that LAST_BLOCK_CREATED is
4582 now the last block in the loop. */
4583 for (i = loop_number; last_block == loop_number_last_block[i];
4584 i = loop_outer_loop[i])
4585 loop_number_last_block[i] = last_block_created;
4586 }
4587 }
This page took 0.238501 seconds and 5 git commands to generate.