]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
*** empty log message ***
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
28
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
34
35 #include "config.h"
36
37 #include <stdio.h>
38 #include <ctype.h>
39
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack;
56
57 extern int xmalloc ();
58 extern void free ();
59
60 /* Filename and line number of last line-number note,
61 whether we actually emitted it or not. */
62 char *emit_filename;
63 int emit_lineno;
64
65 /* Nonzero if within a ({...}) grouping, in which case we must
66 always compute a value for each expr-stmt in case it is the last one. */
67
68 int expr_stmts_for_value;
69
70 /* Each time we expand an expression-statement,
71 record the expr's type and its RTL value here. */
72
73 static tree last_expr_type;
74 static rtx last_expr_value;
75
76 /* Number of binding contours started so far in this function. */
77
78 int block_start_count;
79
80 /* Nonzero if function being compiled needs to
81 return the address of where it has put a structure value. */
82
83 extern int current_function_returns_pcc_struct;
84
85 /* Label that will go on parm cleanup code, if any.
86 Jumping to this label runs cleanup code for parameters, if
87 such code must be run. Following this code is the logical return label. */
88
89 extern rtx cleanup_label;
90
91 /* Label that will go on function epilogue.
92 Jumping to this label serves as a "return" instruction
93 on machines which require execution of the epilogue on all returns. */
94
95 extern rtx return_label;
96
97 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
98 So we can mark them all live at the end of the function, if nonopt. */
99 extern rtx save_expr_regs;
100
101 /* Offset to end of allocated area of stack frame.
102 If stack grows down, this is the address of the last stack slot allocated.
103 If stack grows up, this is the address for the next slot. */
104 extern int frame_offset;
105
106 /* Label to jump back to for tail recursion, or 0 if we have
107 not yet needed one for this function. */
108 extern rtx tail_recursion_label;
109
110 /* Place after which to insert the tail_recursion_label if we need one. */
111 extern rtx tail_recursion_reentry;
112
113 /* Location at which to save the argument pointer if it will need to be
114 referenced. There are two cases where this is done: if nonlocal gotos
115 exist, or if vars whose is an offset from the argument pointer will be
116 needed by inner routines. */
117
118 extern rtx arg_pointer_save_area;
119
120 /* Chain of all RTL_EXPRs that have insns in them. */
121 extern tree rtl_expr_chain;
122
123 #if 0 /* Turned off because 0 seems to work just as well. */
124 /* Cleanup lists are required for binding levels regardless of whether
125 that binding level has cleanups or not. This node serves as the
126 cleanup list whenever an empty list is required. */
127 static tree empty_cleanup_list;
128 #endif
129 \f
130 /* Functions and data structures for expanding case statements. */
131
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 A chain of case nodes is initially maintained via the RIGHT fields
137 in the nodes. Nodes with higher case values are later in the list.
138
139 Switch statements can be output in one of two forms. A branch table
140 is used if there are more than a few labels and the labels are dense
141 within the range between the smallest and largest case value. If a
142 branch table is used, no further manipulations are done with the case
143 node chain.
144
145 The alternative to the use of a branch table is to generate a series
146 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
147 and PARENT fields to hold a binary tree. Initially the tree is
148 totally unbalanced, with everything on the right. We balance the tree
149 with nodes on the left having lower case values than the parent
150 and nodes on the right having higher values. We then output the tree
151 in order. */
152
153 struct case_node
154 {
155 struct case_node *left; /* Left son in binary tree */
156 struct case_node *right; /* Right son in binary tree; also node chain */
157 struct case_node *parent; /* Parent of node in binary tree */
158 tree low; /* Lowest index value for this label */
159 tree high; /* Highest index value for this label */
160 tree code_label; /* Label to jump to when node matches */
161 };
162
163 typedef struct case_node case_node;
164 typedef struct case_node *case_node_ptr;
165
166 /* These are used by estimate_case_costs and balance_case_nodes. */
167
168 /* This must be a signed type, and non-ANSI compilers lack signed char. */
169 static short *cost_table;
170 static int use_cost_table;
171
172 static int estimate_case_costs ();
173 static void balance_case_nodes ();
174 static void emit_case_nodes ();
175 static void group_case_nodes ();
176 static void emit_jump_if_reachable ();
177
178 static int warn_if_unused_value ();
179 static void expand_goto_internal ();
180 static int expand_fixup ();
181 void fixup_gotos ();
182 void free_temp_slots ();
183 static void expand_cleanups ();
184 static void fixup_cleanups ();
185 static void expand_null_return_1 ();
186 static int tail_recursion_args ();
187 static void do_jump_if_equal ();
188 \f
189 /* Stack of control and binding constructs we are currently inside.
190
191 These constructs begin when you call `expand_start_WHATEVER'
192 and end when you call `expand_end_WHATEVER'. This stack records
193 info about how the construct began that tells the end-function
194 what to do. It also may provide information about the construct
195 to alter the behavior of other constructs within the body.
196 For example, they may affect the behavior of C `break' and `continue'.
197
198 Each construct gets one `struct nesting' object.
199 All of these objects are chained through the `all' field.
200 `nesting_stack' points to the first object (innermost construct).
201 The position of an entry on `nesting_stack' is in its `depth' field.
202
203 Each type of construct has its own individual stack.
204 For example, loops have `loop_stack'. Each object points to the
205 next object of the same type through the `next' field.
206
207 Some constructs are visible to `break' exit-statements and others
208 are not. Which constructs are visible depends on the language.
209 Therefore, the data structure allows each construct to be visible
210 or not, according to the args given when the construct is started.
211 The construct is visible if the `exit_label' field is non-null.
212 In that case, the value should be a CODE_LABEL rtx. */
213
214 struct nesting
215 {
216 struct nesting *all;
217 struct nesting *next;
218 int depth;
219 rtx exit_label;
220 union
221 {
222 /* For conds (if-then and if-then-else statements). */
223 struct
224 {
225 /* Label for the end of the if construct.
226 There is none if EXITFLAG was not set
227 and no `else' has been seen yet. */
228 rtx endif_label;
229 /* Label for the end of this alternative.
230 This may be the end of the if or the next else/elseif. */
231 rtx next_label;
232 } cond;
233 /* For loops. */
234 struct
235 {
236 /* Label at the top of the loop; place to loop back to. */
237 rtx start_label;
238 /* Label at the end of the whole construct. */
239 rtx end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
281 {
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* A list of case labels, kept in ascending order by value
286 as the list is built.
287 During expand_end_case, this list may be rearranged into a
288 nearly balanced binary tree. */
289 struct case_node *case_list;
290 /* Label to jump to if no case matches. */
291 tree default_label;
292 /* The expression to be dispatched on. */
293 tree index_expr;
294 /* Type that INDEX_EXPR should be converted to. */
295 tree nominal_type;
296 /* Number of range exprs in case statement. */
297 int num_ranges;
298 /* Name of this kind of statement, for warnings. */
299 char *printname;
300 /* Nonzero if a case label has been seen in this case stmt. */
301 char seenlabel;
302 } case_stmt;
303 /* For exception contours. */
304 struct
305 {
306 /* List of exceptions raised. This is a TREE_LIST
307 of whatever you want. */
308 tree raised;
309 /* List of exceptions caught. This is also a TREE_LIST
310 of whatever you want. As a special case, it has the
311 value `void_type_node' if it handles default exceptions. */
312 tree handled;
313
314 /* First insn of TRY block, in case resumptive model is needed. */
315 rtx first_insn;
316 /* Label for the catch clauses. */
317 rtx except_label;
318 /* Label for unhandled exceptions. */
319 rtx unhandled_label;
320 /* Label at the end of whole construct. */
321 rtx after_label;
322 /* Label which "escapes" the exception construct.
323 Like EXIT_LABEL for BREAK construct, but for exceptions. */
324 rtx escape_label;
325 } except_stmt;
326 } data;
327 };
328
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
331
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334 struct nesting *stack_block_stack;
335
336 /* Chain of all pending conditional statements. */
337 struct nesting *cond_stack;
338
339 /* Chain of all pending loops. */
340 struct nesting *loop_stack;
341
342 /* Chain of all pending case or switch statements. */
343 struct nesting *case_stack;
344
345 /* Chain of all pending exception contours. */
346 struct nesting *except_stack;
347
348 /* Separate chain including all of the above,
349 chained through the `all' field. */
350 struct nesting *nesting_stack;
351
352 /* Number of entries on nesting_stack now. */
353 int nesting_depth;
354
355 /* Allocate and return a new `struct nesting'. */
356
357 #define ALLOC_NESTING() \
358 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359
360 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
361 and pop off `nesting_stack' down to the same level. */
362
363 #define POPSTACK(STACK) \
364 do { int initial_depth = nesting_stack->depth; \
365 do { struct nesting *this = STACK; \
366 STACK = this->next; \
367 nesting_stack = this->all; \
368 nesting_depth = this->depth; \
369 obstack_free (&stmt_obstack, this); } \
370 while (nesting_depth > initial_depth); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The CODE_LABEL rtx that this is jumping to. */
391 rtx target_rtl;
392 /* Number of binding contours started in current function
393 before the label reference. */
394 int block_start_count;
395 /* The outermost stack level that should be restored for this jump.
396 Each time a binding contour that resets the stack is exited,
397 if the target label is *not* yet defined, this slot is updated. */
398 rtx stack_level;
399 /* List of lists of cleanup expressions to be run by this goto.
400 There is one element for each block that this goto is within.
401 The tail of this list can be 0 (was empty_cleanup_list),
402 if all remaining elements would be empty.
403 The TREE_VALUE contains the cleanup list of that block as of the
404 time this goto was seen.
405 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
406 tree cleanup_list_list;
407 };
408
409 static struct goto_fixup *goto_fixup_chain;
410
411 /* Within any binding contour that must restore a stack level,
412 all labels are recorded with a chain of these structures. */
413
414 struct label_chain
415 {
416 /* Points to following fixup. */
417 struct label_chain *next;
418 tree label;
419 };
420 \f
421 void
422 init_stmt ()
423 {
424 gcc_obstack_init (&stmt_obstack);
425 #if 0
426 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
427 #endif
428 }
429
430 void
431 init_stmt_for_function ()
432 {
433 /* We are not currently within any block, conditional, loop or case. */
434 block_stack = 0;
435 loop_stack = 0;
436 case_stack = 0;
437 cond_stack = 0;
438 nesting_stack = 0;
439 nesting_depth = 0;
440
441 block_start_count = 0;
442
443 /* No gotos have been expanded yet. */
444 goto_fixup_chain = 0;
445
446 /* We are not processing a ({...}) grouping. */
447 expr_stmts_for_value = 0;
448 last_expr_type = 0;
449 }
450
451 void
452 save_stmt_status (p)
453 struct function *p;
454 {
455 p->block_stack = block_stack;
456 p->stack_block_stack = stack_block_stack;
457 p->cond_stack = cond_stack;
458 p->loop_stack = loop_stack;
459 p->case_stack = case_stack;
460 p->nesting_stack = nesting_stack;
461 p->nesting_depth = nesting_depth;
462 p->block_start_count = block_start_count;
463 p->last_expr_type = last_expr_type;
464 p->last_expr_value = last_expr_value;
465 p->expr_stmts_for_value = expr_stmts_for_value;
466 p->emit_filename = emit_filename;
467 p->emit_lineno = emit_lineno;
468 p->goto_fixup_chain = goto_fixup_chain;
469 }
470
471 void
472 restore_stmt_status (p)
473 struct function *p;
474 {
475 block_stack = p->block_stack;
476 stack_block_stack = p->stack_block_stack;
477 cond_stack = p->cond_stack;
478 loop_stack = p->loop_stack;
479 case_stack = p->case_stack;
480 nesting_stack = p->nesting_stack;
481 nesting_depth = p->nesting_depth;
482 block_start_count = p->block_start_count;
483 last_expr_type = p->last_expr_type;
484 last_expr_value = p->last_expr_value;
485 expr_stmts_for_value = p->expr_stmts_for_value;
486 emit_filename = p->emit_filename;
487 emit_lineno = p->emit_lineno;
488 goto_fixup_chain = p->goto_fixup_chain;
489 }
490 \f
491 /* Emit a no-op instruction. */
492
493 void
494 emit_nop ()
495 {
496 rtx last_insn = get_last_insn ();
497 if (!optimize
498 && (GET_CODE (last_insn) == CODE_LABEL
499 || prev_real_insn (last_insn) == 0))
500 emit_insn (gen_nop ());
501 }
502 \f
503 /* Return the rtx-label that corresponds to a LABEL_DECL,
504 creating it if necessary. */
505
506 rtx
507 label_rtx (label)
508 tree label;
509 {
510 if (TREE_CODE (label) != LABEL_DECL)
511 abort ();
512
513 if (DECL_RTL (label))
514 return DECL_RTL (label);
515
516 return DECL_RTL (label) = gen_label_rtx ();
517 }
518
519 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520
521 void
522 emit_jump (label)
523 rtx label;
524 {
525 do_pending_stack_adjust ();
526 emit_jump_insn (gen_jump (label));
527 emit_barrier ();
528 }
529
530 /* Emit code to jump to the address
531 specified by the pointer expression EXP. */
532
533 void
534 expand_computed_goto (exp)
535 tree exp;
536 {
537 rtx x = expand_expr (exp, 0, VOIDmode, 0);
538 emit_queue ();
539 emit_indirect_jump (x);
540 emit_barrier ();
541 }
542 \f
543 /* Handle goto statements and the labels that they can go to. */
544
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
547
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
552
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
555
556 void
557 expand_label (label)
558 tree label;
559 {
560 struct label_chain *p;
561
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
566
567 if (stack_block_stack != 0)
568 {
569 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
573 }
574 }
575
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
578
579 void
580 declare_nonlocal_label (label)
581 tree label;
582 {
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slot == 0)
586 {
587 nonlocal_goto_handler_slot
588 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
589 nonlocal_goto_stack_level
590 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
591 emit_insn_before (gen_move_insn (nonlocal_goto_stack_level,
592 stack_pointer_rtx),
593 tail_recursion_reentry);
594 }
595 }
596
597 /* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
600
601 void
602 expand_goto (label)
603 tree label;
604 {
605 /* Check for a nonlocal goto to a containing function. */
606 tree context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
608 {
609 struct function *p = find_function_data (context);
610 rtx temp;
611 p->has_nonlocal_label = 1;
612 #if HAVE_nonlocal_goto
613 if (HAVE_nonlocal_goto)
614 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
615 p->nonlocal_goto_handler_slot,
616 p->nonlocal_goto_stack_level,
617 gen_rtx (LABEL_REF, Pmode,
618 label_rtx (label))));
619 else
620 #endif
621 {
622 /* Restore frame pointer for containing function.
623 This sets the actual hard register used for the frame pointer
624 to the location of the function's incoming static chain info.
625 The non-local goto handler will then adjust it to contain the
626 proper value and reload the argument pointer, if needed. */
627 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
628 /* Get addr of containing function's current nonlocal goto handler,
629 which will do any cleanups and then jump to the label. */
630 temp = copy_to_reg (p->nonlocal_goto_handler_slot);
631 /* Restore the stack pointer. Note this uses fp just restored. */
632 emit_move_insn (stack_pointer_rtx, p->nonlocal_goto_stack_level);
633 /* Put in the static chain register the nonlocal label address. */
634 emit_move_insn (static_chain_rtx,
635 gen_rtx (LABEL_REF, Pmode, label_rtx (label)));
636 /* USE of frame_pointer_rtx added for consistency; not clear if
637 really needed. */
638 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
639 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
640 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
641 emit_indirect_jump (temp);
642 }
643 }
644 else
645 expand_goto_internal (label, label_rtx (label), 0);
646 }
647
648 /* Generate RTL code for a `goto' statement with target label BODY.
649 LABEL should be a LABEL_REF.
650 LAST_INSN, if non-0, is the rtx we should consider as the last
651 insn emitted (for the purposes of cleaning up a return). */
652
653 static void
654 expand_goto_internal (body, label, last_insn)
655 tree body;
656 rtx label;
657 rtx last_insn;
658 {
659 struct nesting *block;
660 rtx stack_level = 0;
661
662 if (GET_CODE (label) != CODE_LABEL)
663 abort ();
664
665 /* If label has already been defined, we can tell now
666 whether and how we must alter the stack level. */
667
668 if (PREV_INSN (label) != 0)
669 {
670 /* Find the innermost pending block that contains the label.
671 (Check containment by comparing insn-uids.)
672 Then restore the outermost stack level within that block,
673 and do cleanups of all blocks contained in it. */
674 for (block = block_stack; block; block = block->next)
675 {
676 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
677 break;
678 if (block->data.block.stack_level != 0)
679 stack_level = block->data.block.stack_level;
680 /* Execute the cleanups for blocks we are exiting. */
681 if (block->data.block.cleanups != 0)
682 {
683 expand_cleanups (block->data.block.cleanups, 0);
684 do_pending_stack_adjust ();
685 }
686 }
687
688 if (stack_level)
689 {
690 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
691 the stack pointer. This one should be deleted as dead by flow. */
692 clear_pending_stack_adjust ();
693 do_pending_stack_adjust ();
694 emit_move_insn (stack_pointer_rtx, stack_level);
695 }
696
697 if (body != 0 && DECL_TOO_LATE (body))
698 error ("jump to `%s' invalidly jumps into binding contour",
699 IDENTIFIER_POINTER (DECL_NAME (body)));
700 }
701 /* Label not yet defined: may need to put this goto
702 on the fixup list. */
703 else if (! expand_fixup (body, label, last_insn))
704 {
705 /* No fixup needed. Record that the label is the target
706 of at least one goto that has no fixup. */
707 if (body != 0)
708 TREE_ADDRESSABLE (body) = 1;
709 }
710
711 emit_jump (label);
712 }
713 \f
714 /* Generate if necessary a fixup for a goto
715 whose target label in tree structure (if any) is TREE_LABEL
716 and whose target in rtl is RTL_LABEL.
717
718 If LAST_INSN is nonzero, we pretend that the jump appears
719 after insn LAST_INSN instead of at the current point in the insn stream.
720
721 The fixup will be used later to insert insns at this point
722 to restore the stack level as appropriate for the target label.
723
724 Value is nonzero if a fixup is made. */
725
726 static int
727 expand_fixup (tree_label, rtl_label, last_insn)
728 tree tree_label;
729 rtx rtl_label;
730 rtx last_insn;
731 {
732 struct nesting *block, *end_block;
733
734 /* See if we can recognize which block the label will be output in.
735 This is possible in some very common cases.
736 If we succeed, set END_BLOCK to that block.
737 Otherwise, set it to 0. */
738
739 if (cond_stack
740 && (rtl_label == cond_stack->data.cond.endif_label
741 || rtl_label == cond_stack->data.cond.next_label))
742 end_block = cond_stack;
743 /* If we are in a loop, recognize certain labels which
744 are likely targets. This reduces the number of fixups
745 we need to create. */
746 else if (loop_stack
747 && (rtl_label == loop_stack->data.loop.start_label
748 || rtl_label == loop_stack->data.loop.end_label
749 || rtl_label == loop_stack->data.loop.continue_label))
750 end_block = loop_stack;
751 else
752 end_block = 0;
753
754 /* Now set END_BLOCK to the binding level to which we will return. */
755
756 if (end_block)
757 {
758 struct nesting *next_block = end_block->all;
759 block = block_stack;
760
761 /* First see if the END_BLOCK is inside the innermost binding level.
762 If so, then no cleanups or stack levels are relevant. */
763 while (next_block && next_block != block)
764 next_block = next_block->all;
765
766 if (next_block)
767 return 0;
768
769 /* Otherwise, set END_BLOCK to the innermost binding level
770 which is outside the relevant control-structure nesting. */
771 next_block = block_stack->next;
772 for (block = block_stack; block != end_block; block = block->all)
773 if (block == next_block)
774 next_block = next_block->next;
775 end_block = next_block;
776 }
777
778 /* Does any containing block have a stack level or cleanups?
779 If not, no fixup is needed, and that is the normal case
780 (the only case, for standard C). */
781 for (block = block_stack; block != end_block; block = block->next)
782 if (block->data.block.stack_level != 0
783 || block->data.block.cleanups != 0)
784 break;
785
786 if (block != end_block)
787 {
788 /* Ok, a fixup is needed. Add a fixup to the list of such. */
789 struct goto_fixup *fixup
790 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
791 /* In case an old stack level is restored, make sure that comes
792 after any pending stack adjust. */
793 /* ?? If the fixup isn't to come at the present position,
794 doing the stack adjust here isn't useful. Doing it with our
795 settings at that location isn't useful either. Let's hope
796 someone does it! */
797 if (last_insn == 0)
798 do_pending_stack_adjust ();
799 fixup->before_jump = last_insn ? last_insn : get_last_insn ();
800 fixup->target = tree_label;
801 fixup->target_rtl = rtl_label;
802 fixup->block_start_count = block_start_count;
803 fixup->stack_level = 0;
804 fixup->cleanup_list_list
805 = (((block->data.block.outer_cleanups
806 #if 0
807 && block->data.block.outer_cleanups != empty_cleanup_list
808 #endif
809 )
810 || block->data.block.cleanups)
811 ? tree_cons (0, block->data.block.cleanups,
812 block->data.block.outer_cleanups)
813 : 0);
814 fixup->next = goto_fixup_chain;
815 goto_fixup_chain = fixup;
816 }
817
818 return block != 0;
819 }
820
821 /* When exiting a binding contour, process all pending gotos requiring fixups.
822 THISBLOCK is the structure that describes the block being exited.
823 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
824 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
825 FIRST_INSN is the insn that began this contour.
826
827 Gotos that jump out of this contour must restore the
828 stack level and do the cleanups before actually jumping.
829
830 DONT_JUMP_IN nonzero means report error there is a jump into this
831 contour from before the beginning of the contour.
832 This is also done if STACK_LEVEL is nonzero. */
833
834 void
835 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
836 struct nesting *thisblock;
837 rtx stack_level;
838 tree cleanup_list;
839 rtx first_insn;
840 int dont_jump_in;
841 {
842 register struct goto_fixup *f, *prev;
843
844 /* F is the fixup we are considering; PREV is the previous one. */
845 /* We run this loop in two passes so that cleanups of exited blocks
846 are run first, and blocks that are exited are marked so
847 afterwards. */
848
849 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
850 {
851 /* Test for a fixup that is inactive because it is already handled. */
852 if (f->before_jump == 0)
853 {
854 /* Delete inactive fixup from the chain, if that is easy to do. */
855 if (prev != 0)
856 prev->next = f->next;
857 }
858 /* Has this fixup's target label been defined?
859 If so, we can finalize it. */
860 else if (PREV_INSN (f->target_rtl) != 0)
861 {
862 /* Get the first non-label after the label
863 this goto jumps to. If that's before this scope begins,
864 we don't have a jump into the scope. */
865 rtx after_label = f->target_rtl;
866 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
867 after_label = NEXT_INSN (after_label);
868
869 /* If this fixup jumped into this contour from before the beginning
870 of this contour, report an error. */
871 /* ??? Bug: this does not detect jumping in through intermediate
872 blocks that have stack levels or cleanups.
873 It detects only a problem with the innermost block
874 around the label. */
875 if (f->target != 0
876 && (dont_jump_in || stack_level || cleanup_list)
877 /* If AFTER_LABEL is 0, it means the jump goes to the end
878 of the rtl, which means it jumps into this scope. */
879 && (after_label == 0
880 || INSN_UID (first_insn) < INSN_UID (after_label))
881 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
882 && ! TREE_REGDECL (f->target))
883 {
884 error_with_decl (f->target,
885 "label `%s' used before containing binding contour");
886 /* Prevent multiple errors for one label. */
887 TREE_REGDECL (f->target) = 1;
888 }
889
890 /* Execute cleanups for blocks this jump exits. */
891 if (f->cleanup_list_list)
892 {
893 tree lists;
894 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
895 /* Marked elements correspond to blocks that have been closed.
896 Do their cleanups. */
897 if (TREE_ADDRESSABLE (lists)
898 && TREE_VALUE (lists) != 0)
899 fixup_cleanups (TREE_VALUE (lists), &f->before_jump);
900 }
901
902 /* Restore stack level for the biggest contour that this
903 jump jumps out of. */
904 if (f->stack_level)
905 emit_insn_after (gen_move_insn (stack_pointer_rtx, f->stack_level),
906 f->before_jump);
907 f->before_jump = 0;
908 }
909 }
910
911 /* Mark the cleanups of exited blocks so that they are executed
912 by the code above. */
913 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
914 if (f->before_jump != 0
915 && PREV_INSN (f->target_rtl) == 0
916 /* Label has still not appeared. If we are exiting a block with
917 a stack level to restore, that started before the fixup,
918 mark this stack level as needing restoration
919 when the fixup is later finalized.
920 Also mark the cleanup_list_list element for F
921 that corresponds to this block, so that ultimately
922 this block's cleanups will be executed by the code above. */
923 && thisblock != 0
924 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
925 it means the label is undefined. That's erroneous, but possible. */
926 && (thisblock->data.block.block_start_count
927 <= f->block_start_count))
928 {
929 tree lists = f->cleanup_list_list;
930 for (; lists; lists = TREE_CHAIN (lists))
931 /* If the following elt. corresponds to our containing block
932 then the elt. must be for this block. */
933 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
934 TREE_ADDRESSABLE (lists) = 1;
935
936 if (stack_level)
937 f->stack_level = stack_level;
938 }
939 }
940 \f
941 /* Generate RTL for an asm statement (explicit assembler code).
942 BODY is a STRING_CST node containing the assembler code text,
943 or an ADDR_EXPR containing a STRING_CST. */
944
945 void
946 expand_asm (body)
947 tree body;
948 {
949 if (TREE_CODE (body) == ADDR_EXPR)
950 body = TREE_OPERAND (body, 0);
951
952 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
953 TREE_STRING_POINTER (body)));
954 last_expr_type = 0;
955 }
956
957 /* Generate RTL for an asm statement with arguments.
958 STRING is the instruction template.
959 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
960 Each output or input has an expression in the TREE_VALUE and
961 a constraint-string in the TREE_PURPOSE.
962 CLOBBERS is a list of STRING_CST nodes each naming a hard register
963 that is clobbered by this insn.
964
965 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
966 Some elements of OUTPUTS may be replaced with trees representing temporary
967 values. The caller should copy those temporary values to the originally
968 specified lvalues.
969
970 VOL nonzero means the insn is volatile; don't optimize it. */
971
972 void
973 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
974 tree string, outputs, inputs, clobbers;
975 int vol;
976 char *filename;
977 int line;
978 {
979 rtvec argvec, constraints;
980 rtx body;
981 int ninputs = list_length (inputs);
982 int noutputs = list_length (outputs);
983 int nclobbers = list_length (clobbers);
984 tree tail;
985 register int i;
986 /* Vector of RTX's of evaluated output operands. */
987 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
988 /* The insn we have emitted. */
989 rtx insn;
990
991 last_expr_type = 0;
992
993 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
994 {
995 tree val = TREE_VALUE (tail);
996 tree val1;
997 int j;
998 int found_equal;
999
1000 /* If there's an erroneous arg, emit no insn. */
1001 if (TREE_TYPE (val) == error_mark_node)
1002 return;
1003
1004 /* Make sure constraint has `=' and does not have `+'. */
1005
1006 found_equal = 0;
1007 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1008 {
1009 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1010 {
1011 error ("output operand constraint contains `+'");
1012 return;
1013 }
1014 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1015 found_equal = 1;
1016 }
1017 if (! found_equal)
1018 {
1019 error ("output operand constraint lacks `='");
1020 return;
1021 }
1022
1023 /* If an output operand is not a variable or indirect ref,
1024 or a part of one,
1025 create a SAVE_EXPR which is a pseudo-reg
1026 to act as an intermediate temporary.
1027 Make the asm insn write into that, then copy it to
1028 the real output operand. */
1029
1030 while (TREE_CODE (val) == COMPONENT_REF
1031 || TREE_CODE (val) == ARRAY_REF)
1032 val = TREE_OPERAND (val, 0);
1033
1034 if (TREE_CODE (val) != VAR_DECL
1035 && TREE_CODE (val) != PARM_DECL
1036 && TREE_CODE (val) != INDIRECT_REF)
1037 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1038
1039 output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1040 }
1041
1042 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1043 {
1044 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1045 return;
1046 }
1047
1048 /* Make vectors for the expression-rtx and constraint strings. */
1049
1050 argvec = rtvec_alloc (ninputs);
1051 constraints = rtvec_alloc (ninputs);
1052
1053 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1054 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1055 filename, line);
1056 MEM_VOLATILE_P (body) = vol;
1057
1058 /* Eval the inputs and put them into ARGVEC.
1059 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1060
1061 i = 0;
1062 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1063 {
1064 int j;
1065
1066 /* If there's an erroneous arg, emit no insn,
1067 because the ASM_INPUT would get VOIDmode
1068 and that could cause a crash in reload. */
1069 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1070 return;
1071 if (TREE_PURPOSE (tail) == NULL_TREE)
1072 {
1073 error ("hard register `%s' listed as input operand to `asm'",
1074 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1075 return;
1076 }
1077
1078 /* Make sure constraint has neither `=' nor `+'. */
1079
1080 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1081 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1082 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1083 {
1084 error ("input operand constraint contains `%c'",
1085 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1086 return;
1087 }
1088
1089 XVECEXP (body, 3, i) /* argvec */
1090 = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
1091 XVECEXP (body, 4, i) /* constraints */
1092 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1093 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1094 i++;
1095 }
1096
1097 /* Protect all the operands from the queue,
1098 now that they have all been evaluated. */
1099
1100 for (i = 0; i < ninputs; i++)
1101 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1102
1103 for (i = 0; i < noutputs; i++)
1104 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1105
1106 /* Now, for each output, construct an rtx
1107 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1108 ARGVEC CONSTRAINTS))
1109 If there is more than one, put them inside a PARALLEL. */
1110
1111 if (noutputs == 1 && nclobbers == 0)
1112 {
1113 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1114 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1115 }
1116 else if (noutputs == 0 && nclobbers == 0)
1117 {
1118 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1119 insn = emit_insn (body);
1120 }
1121 else
1122 {
1123 rtx obody = body;
1124 int num = noutputs;
1125 if (num == 0) num = 1;
1126 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1127
1128 /* For each output operand, store a SET. */
1129
1130 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1131 {
1132 XVECEXP (body, 0, i)
1133 = gen_rtx (SET, VOIDmode,
1134 output_rtx[i],
1135 gen_rtx (ASM_OPERANDS, VOIDmode,
1136 TREE_STRING_POINTER (string),
1137 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1138 i, argvec, constraints,
1139 filename, line));
1140 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1141 }
1142
1143 /* If there are no outputs (but there are some clobbers)
1144 store the bare ASM_OPERANDS into the PARALLEL. */
1145
1146 if (i == 0)
1147 XVECEXP (body, 0, i++) = obody;
1148
1149 /* Store (clobber REG) for each clobbered register specified. */
1150
1151 for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++)
1152 {
1153 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1154 int j = decode_reg_name (regname);
1155
1156 if (j < 0)
1157 {
1158 if (j == -3)
1159 continue;
1160
1161 error ("unknown register name `%s' in `asm'", regname);
1162 return;
1163 }
1164
1165 /* Use QImode since that's guaranteed to clobber just one reg. */
1166 XVECEXP (body, 0, i)
1167 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1168 }
1169
1170 insn = emit_insn (body);
1171 }
1172
1173 free_temp_slots ();
1174 }
1175 \f
1176 /* Generate RTL to evaluate the expression EXP
1177 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1178
1179 void
1180 expand_expr_stmt (exp)
1181 tree exp;
1182 {
1183 /* If -W, warn about statements with no side effects,
1184 except for an explicit cast to void (e.g. for assert()), and
1185 except inside a ({...}) where they may be useful. */
1186 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1187 {
1188 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1189 && !(TREE_CODE (exp) == CONVERT_EXPR
1190 && TREE_TYPE (exp) == void_type_node))
1191 warning_with_file_and_line (emit_filename, emit_lineno,
1192 "statement with no effect");
1193 else if (warn_unused)
1194 warn_if_unused_value (exp);
1195 }
1196 last_expr_type = TREE_TYPE (exp);
1197 if (! flag_syntax_only)
1198 last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
1199 VOIDmode, 0);
1200
1201 /* If all we do is reference a volatile value in memory,
1202 copy it to a register to be sure it is actually touched. */
1203 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1204 && TREE_THIS_VOLATILE (exp))
1205 {
1206 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1207 copy_to_reg (last_expr_value);
1208 else
1209 /* This case needs to be written. */
1210 abort ();
1211 }
1212
1213 /* If this expression is part of a ({...}) and is in memory, we may have
1214 to preserve temporaries. */
1215 preserve_temp_slots (last_expr_value);
1216
1217 /* Free any temporaries used to evaluate this expression. Any temporary
1218 used as a result of this expression will already have been preserved
1219 above. */
1220 free_temp_slots ();
1221
1222 emit_queue ();
1223 }
1224
1225 /* Warn if EXP contains any computations whose results are not used.
1226 Return 1 if a warning is printed; 0 otherwise. */
1227
1228 static int
1229 warn_if_unused_value (exp)
1230 tree exp;
1231 {
1232 if (TREE_USED (exp))
1233 return 0;
1234
1235 switch (TREE_CODE (exp))
1236 {
1237 case PREINCREMENT_EXPR:
1238 case POSTINCREMENT_EXPR:
1239 case PREDECREMENT_EXPR:
1240 case POSTDECREMENT_EXPR:
1241 case MODIFY_EXPR:
1242 case INIT_EXPR:
1243 case TARGET_EXPR:
1244 case CALL_EXPR:
1245 case METHOD_CALL_EXPR:
1246 case RTL_EXPR:
1247 case WRAPPER_EXPR:
1248 case ANTI_WRAPPER_EXPR:
1249 case WITH_CLEANUP_EXPR:
1250 case EXIT_EXPR:
1251 /* We don't warn about COND_EXPR because it may be a useful
1252 construct if either arm contains a side effect. */
1253 case COND_EXPR:
1254 return 0;
1255
1256 case BIND_EXPR:
1257 /* For a binding, warn if no side effect within it. */
1258 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1259
1260 case TRUTH_ORIF_EXPR:
1261 case TRUTH_ANDIF_EXPR:
1262 /* In && or ||, warn if 2nd operand has no side effect. */
1263 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1264
1265 case COMPOUND_EXPR:
1266 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1267 return 1;
1268 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1269
1270 case NOP_EXPR:
1271 case CONVERT_EXPR:
1272 case NON_LVALUE_EXPR:
1273 /* Don't warn about values cast to void. */
1274 if (TREE_TYPE (exp) == void_type_node)
1275 return 0;
1276 /* Don't warn about conversions not explicit in the user's program. */
1277 if (TREE_NO_UNUSED_WARNING (exp))
1278 return 0;
1279 /* Assignment to a cast usually results in a cast of a modify.
1280 Don't complain about that. */
1281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1282 return 0;
1283 /* Sometimes it results in a cast of a cast of a modify.
1284 Don't complain about that. */
1285 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1286 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1287 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1288 return 0;
1289
1290 default:
1291 warning_with_file_and_line (emit_filename, emit_lineno,
1292 "value computed is not used");
1293 return 1;
1294 }
1295 }
1296
1297 /* Clear out the memory of the last expression evaluated. */
1298
1299 void
1300 clear_last_expr ()
1301 {
1302 last_expr_type = 0;
1303 }
1304
1305 /* Begin a statement which will return a value.
1306 Return the RTL_EXPR for this statement expr.
1307 The caller must save that value and pass it to expand_end_stmt_expr. */
1308
1309 tree
1310 expand_start_stmt_expr ()
1311 {
1312 /* Make the RTL_EXPR node temporary, not momentary,
1313 so that rtl_expr_chain doesn't become garbage. */
1314 int momentary = suspend_momentary ();
1315 tree t = make_node (RTL_EXPR);
1316 resume_momentary (momentary);
1317 start_sequence ();
1318 NO_DEFER_POP;
1319 expr_stmts_for_value++;
1320 return t;
1321 }
1322
1323 /* Restore the previous state at the end of a statement that returns a value.
1324 Returns a tree node representing the statement's value and the
1325 insns to compute the value.
1326
1327 The nodes of that expression have been freed by now, so we cannot use them.
1328 But we don't want to do that anyway; the expression has already been
1329 evaluated and now we just want to use the value. So generate a RTL_EXPR
1330 with the proper type and RTL value.
1331
1332 If the last substatement was not an expression,
1333 return something with type `void'. */
1334
1335 tree
1336 expand_end_stmt_expr (t)
1337 tree t;
1338 {
1339 OK_DEFER_POP;
1340
1341 if (last_expr_type == 0)
1342 {
1343 last_expr_type = void_type_node;
1344 last_expr_value = const0_rtx;
1345 }
1346 else if (last_expr_value == 0)
1347 /* There are some cases where this can happen, such as when the
1348 statement is void type. */
1349 last_expr_value = const0_rtx;
1350 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1351 /* Remove any possible QUEUED. */
1352 last_expr_value = protect_from_queue (last_expr_value, 0);
1353
1354 emit_queue ();
1355
1356 TREE_TYPE (t) = last_expr_type;
1357 RTL_EXPR_RTL (t) = last_expr_value;
1358 RTL_EXPR_SEQUENCE (t) = get_insns ();
1359
1360 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1361
1362 end_sequence ();
1363
1364 /* Don't consider deleting this expr or containing exprs at tree level. */
1365 TREE_SIDE_EFFECTS (t) = 1;
1366 /* Propagate volatility of the actual RTL expr. */
1367 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1368
1369 last_expr_type = 0;
1370 expr_stmts_for_value--;
1371
1372 return t;
1373 }
1374 \f
1375 /* The exception handling nesting looks like this:
1376
1377 <-- Level N-1
1378 { <-- exception handler block
1379 <-- Level N
1380 <-- in an exception handler
1381 { <-- try block
1382 : <-- in a TRY block
1383 : <-- in an exception handler
1384 :
1385 }
1386
1387 { <-- except block
1388 : <-- in an except block
1389 : <-- in an exception handler
1390 :
1391 }
1392
1393 }
1394
1395 /* Return nonzero iff in a try block at level LEVEL. */
1396
1397 int
1398 in_try_block (level)
1399 int level;
1400 {
1401 struct nesting *n = except_stack;
1402 while (1)
1403 {
1404 while (n && n->data.except_stmt.after_label != 0)
1405 n = n->next;
1406 if (n == 0)
1407 return 0;
1408 if (level == 0)
1409 return n != 0;
1410 level--;
1411 n = n->next;
1412 }
1413 }
1414
1415 /* Return nonzero iff in an except block at level LEVEL. */
1416
1417 int
1418 in_except_block (level)
1419 int level;
1420 {
1421 struct nesting *n = except_stack;
1422 while (1)
1423 {
1424 while (n && n->data.except_stmt.after_label == 0)
1425 n = n->next;
1426 if (n == 0)
1427 return 0;
1428 if (level == 0)
1429 return n != 0;
1430 level--;
1431 n = n->next;
1432 }
1433 }
1434
1435 /* Return nonzero iff in an exception handler at level LEVEL. */
1436
1437 int
1438 in_exception_handler (level)
1439 int level;
1440 {
1441 struct nesting *n = except_stack;
1442 while (n && level--)
1443 n = n->next;
1444 return n != 0;
1445 }
1446
1447 /* Record the fact that the current exception nesting raises
1448 exception EX. If not in an exception handler, return 0. */
1449 int
1450 expand_raise (ex)
1451 tree ex;
1452 {
1453 tree *raises_ptr;
1454
1455 if (except_stack == 0)
1456 return 0;
1457 raises_ptr = &except_stack->data.except_stmt.raised;
1458 if (! value_member (ex, *raises_ptr))
1459 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1460 return 1;
1461 }
1462
1463 /* Generate RTL for the start of a try block.
1464
1465 TRY_CLAUSE is the condition to test to enter the try block. */
1466
1467 void
1468 expand_start_try (try_clause, exitflag, escapeflag)
1469 tree try_clause;
1470 int exitflag;
1471 int escapeflag;
1472 {
1473 struct nesting *thishandler = ALLOC_NESTING ();
1474
1475 /* Make an entry on cond_stack for the cond we are entering. */
1476
1477 thishandler->next = except_stack;
1478 thishandler->all = nesting_stack;
1479 thishandler->depth = ++nesting_depth;
1480 thishandler->data.except_stmt.raised = 0;
1481 thishandler->data.except_stmt.handled = 0;
1482 thishandler->data.except_stmt.first_insn = get_insns ();
1483 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1484 thishandler->data.except_stmt.unhandled_label = 0;
1485 thishandler->data.except_stmt.after_label = 0;
1486 thishandler->data.except_stmt.escape_label
1487 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1488 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1489 except_stack = thishandler;
1490 nesting_stack = thishandler;
1491
1492 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
1493 }
1494
1495 /* End of a TRY block. Nothing to do for now. */
1496
1497 void
1498 expand_end_try ()
1499 {
1500 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1501 expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
1502 }
1503
1504 /* Start an `except' nesting contour.
1505 EXITFLAG says whether this contour should be able to `exit' something.
1506 ESCAPEFLAG says whether this contour should be escapable. */
1507
1508 void
1509 expand_start_except (exitflag, escapeflag)
1510 int exitflag;
1511 int escapeflag;
1512 {
1513 if (exitflag)
1514 {
1515 struct nesting *n;
1516 /* An `exit' from catch clauses goes out to next exit level,
1517 if there is one. Otherwise, it just goes to the end
1518 of the construct. */
1519 for (n = except_stack->next; n; n = n->next)
1520 if (n->exit_label != 0)
1521 {
1522 except_stack->exit_label = n->exit_label;
1523 break;
1524 }
1525 if (n == 0)
1526 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1527 }
1528 if (escapeflag)
1529 {
1530 struct nesting *n;
1531 /* An `escape' from catch clauses goes out to next escape level,
1532 if there is one. Otherwise, it just goes to the end
1533 of the construct. */
1534 for (n = except_stack->next; n; n = n->next)
1535 if (n->data.except_stmt.escape_label != 0)
1536 {
1537 except_stack->data.except_stmt.escape_label
1538 = n->data.except_stmt.escape_label;
1539 break;
1540 }
1541 if (n == 0)
1542 except_stack->data.except_stmt.escape_label
1543 = except_stack->data.except_stmt.after_label;
1544 }
1545 do_pending_stack_adjust ();
1546 emit_label (except_stack->data.except_stmt.except_label);
1547 }
1548
1549 /* Generate code to `escape' from an exception contour. This
1550 is like `exiting', but does not conflict with constructs which
1551 use `exit_label'.
1552
1553 Return nonzero if this contour is escapable, otherwise
1554 return zero, and language-specific code will emit the
1555 appropriate error message. */
1556 int
1557 expand_escape_except ()
1558 {
1559 struct nesting *n;
1560 last_expr_type = 0;
1561 for (n = except_stack; n; n = n->next)
1562 if (n->data.except_stmt.escape_label != 0)
1563 {
1564 expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
1565 return 1;
1566 }
1567
1568 return 0;
1569 }
1570
1571 /* Finish processing and `except' contour.
1572 Culls out all exceptions which might be raise but not
1573 handled, and returns the list to the caller.
1574 Language-specific code is responsible for dealing with these
1575 exceptions. */
1576
1577 tree
1578 expand_end_except ()
1579 {
1580 struct nesting *n;
1581 tree raised = NULL_TREE;
1582
1583 do_pending_stack_adjust ();
1584 emit_label (except_stack->data.except_stmt.after_label);
1585
1586 n = except_stack->next;
1587 if (n)
1588 {
1589 /* Propagate exceptions raised but not handled to next
1590 highest level. */
1591 tree handled = except_stack->data.except_stmt.raised;
1592 if (handled != void_type_node)
1593 {
1594 tree prev = NULL_TREE;
1595 raised = except_stack->data.except_stmt.raised;
1596 while (handled)
1597 {
1598 tree this_raise;
1599 for (this_raise = raised, prev = 0; this_raise;
1600 this_raise = TREE_CHAIN (this_raise))
1601 {
1602 if (value_member (TREE_VALUE (this_raise), handled))
1603 {
1604 if (prev)
1605 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1606 else
1607 {
1608 raised = TREE_CHAIN (raised);
1609 if (raised == NULL_TREE)
1610 goto nada;
1611 }
1612 }
1613 else
1614 prev = this_raise;
1615 }
1616 handled = TREE_CHAIN (handled);
1617 }
1618 if (prev == NULL_TREE)
1619 prev = raised;
1620 if (prev)
1621 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1622 nada:
1623 n->data.except_stmt.raised = raised;
1624 }
1625 }
1626
1627 POPSTACK (except_stack);
1628 last_expr_type = 0;
1629 return raised;
1630 }
1631
1632 /* Record that exception EX is caught by this exception handler.
1633 Return nonzero if in exception handling construct, otherwise return 0. */
1634 int
1635 expand_catch (ex)
1636 tree ex;
1637 {
1638 tree *raises_ptr;
1639
1640 if (except_stack == 0)
1641 return 0;
1642 raises_ptr = &except_stack->data.except_stmt.handled;
1643 if (*raises_ptr != void_type_node
1644 && ex != NULL_TREE
1645 && ! value_member (ex, *raises_ptr))
1646 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1647 return 1;
1648 }
1649
1650 /* Record that this exception handler catches all exceptions.
1651 Return nonzero if in exception handling construct, otherwise return 0. */
1652
1653 int
1654 expand_catch_default ()
1655 {
1656 if (except_stack == 0)
1657 return 0;
1658 except_stack->data.except_stmt.handled = void_type_node;
1659 return 1;
1660 }
1661
1662 int
1663 expand_end_catch ()
1664 {
1665 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1666 return 0;
1667 expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
1668 return 1;
1669 }
1670 \f
1671 /* Generate RTL for the start of an if-then. COND is the expression
1672 whose truth should be tested.
1673
1674 If EXITFLAG is nonzero, this conditional is visible to
1675 `exit_something'. */
1676
1677 void
1678 expand_start_cond (cond, exitflag)
1679 tree cond;
1680 int exitflag;
1681 {
1682 struct nesting *thiscond = ALLOC_NESTING ();
1683
1684 /* Make an entry on cond_stack for the cond we are entering. */
1685
1686 thiscond->next = cond_stack;
1687 thiscond->all = nesting_stack;
1688 thiscond->depth = ++nesting_depth;
1689 thiscond->data.cond.next_label = gen_label_rtx ();
1690 /* Before we encounter an `else', we don't need a separate exit label
1691 unless there are supposed to be exit statements
1692 to exit this conditional. */
1693 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1694 thiscond->data.cond.endif_label = thiscond->exit_label;
1695 cond_stack = thiscond;
1696 nesting_stack = thiscond;
1697
1698 do_jump (cond, thiscond->data.cond.next_label, NULL);
1699 }
1700
1701 /* Generate RTL between then-clause and the elseif-clause
1702 of an if-then-elseif-.... */
1703
1704 void
1705 expand_start_elseif (cond)
1706 tree cond;
1707 {
1708 if (cond_stack->data.cond.endif_label == 0)
1709 cond_stack->data.cond.endif_label = gen_label_rtx ();
1710 emit_jump (cond_stack->data.cond.endif_label);
1711 emit_label (cond_stack->data.cond.next_label);
1712 cond_stack->data.cond.next_label = gen_label_rtx ();
1713 do_jump (cond, cond_stack->data.cond.next_label, NULL);
1714 }
1715
1716 /* Generate RTL between the then-clause and the else-clause
1717 of an if-then-else. */
1718
1719 void
1720 expand_start_else ()
1721 {
1722 if (cond_stack->data.cond.endif_label == 0)
1723 cond_stack->data.cond.endif_label = gen_label_rtx ();
1724 emit_jump (cond_stack->data.cond.endif_label);
1725 emit_label (cond_stack->data.cond.next_label);
1726 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1727 }
1728
1729 /* Generate RTL for the end of an if-then.
1730 Pop the record for it off of cond_stack. */
1731
1732 void
1733 expand_end_cond ()
1734 {
1735 struct nesting *thiscond = cond_stack;
1736
1737 do_pending_stack_adjust ();
1738 if (thiscond->data.cond.next_label)
1739 emit_label (thiscond->data.cond.next_label);
1740 if (thiscond->data.cond.endif_label)
1741 emit_label (thiscond->data.cond.endif_label);
1742
1743 POPSTACK (cond_stack);
1744 last_expr_type = 0;
1745 }
1746 \f
1747 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1748 loop should be exited by `exit_something'. This is a loop for which
1749 `expand_continue' will jump to the top of the loop.
1750
1751 Make an entry on loop_stack to record the labels associated with
1752 this loop. */
1753
1754 struct nesting *
1755 expand_start_loop (exit_flag)
1756 int exit_flag;
1757 {
1758 register struct nesting *thisloop = ALLOC_NESTING ();
1759
1760 /* Make an entry on loop_stack for the loop we are entering. */
1761
1762 thisloop->next = loop_stack;
1763 thisloop->all = nesting_stack;
1764 thisloop->depth = ++nesting_depth;
1765 thisloop->data.loop.start_label = gen_label_rtx ();
1766 thisloop->data.loop.end_label = gen_label_rtx ();
1767 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1768 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1769 loop_stack = thisloop;
1770 nesting_stack = thisloop;
1771
1772 do_pending_stack_adjust ();
1773 emit_queue ();
1774 emit_note (0, NOTE_INSN_LOOP_BEG);
1775 emit_label (thisloop->data.loop.start_label);
1776
1777 return thisloop;
1778 }
1779
1780 /* Like expand_start_loop but for a loop where the continuation point
1781 (for expand_continue_loop) will be specified explicitly. */
1782
1783 struct nesting *
1784 expand_start_loop_continue_elsewhere (exit_flag)
1785 int exit_flag;
1786 {
1787 struct nesting *thisloop = expand_start_loop (exit_flag);
1788 loop_stack->data.loop.continue_label = gen_label_rtx ();
1789 return thisloop;
1790 }
1791
1792 /* Specify the continuation point for a loop started with
1793 expand_start_loop_continue_elsewhere.
1794 Use this at the point in the code to which a continue statement
1795 should jump. */
1796
1797 void
1798 expand_loop_continue_here ()
1799 {
1800 do_pending_stack_adjust ();
1801 emit_note (0, NOTE_INSN_LOOP_CONT);
1802 emit_label (loop_stack->data.loop.continue_label);
1803 }
1804
1805 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1806 Pop the block off of loop_stack. */
1807
1808 void
1809 expand_end_loop ()
1810 {
1811 register rtx insn = get_last_insn ();
1812 register rtx start_label = loop_stack->data.loop.start_label;
1813 rtx last_test_insn = 0;
1814 int num_insns = 0;
1815
1816 /* Mark the continue-point at the top of the loop if none elsewhere. */
1817 if (start_label == loop_stack->data.loop.continue_label)
1818 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1819
1820 do_pending_stack_adjust ();
1821
1822 /* If optimizing, perhaps reorder the loop. If the loop
1823 starts with a conditional exit, roll that to the end
1824 where it will optimize together with the jump back.
1825
1826 We look for the last conditional branch to the exit that we encounter
1827 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1828 branch to the exit first, use it.
1829
1830 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1831 because moving them is not valid. */
1832
1833 if (optimize
1834 &&
1835 ! (GET_CODE (insn) == JUMP_INSN
1836 && GET_CODE (PATTERN (insn)) == SET
1837 && SET_DEST (PATTERN (insn)) == pc_rtx
1838 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1839 {
1840 /* Scan insns from the top of the loop looking for a qualified
1841 conditional exit. */
1842 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1843 insn = NEXT_INSN (insn))
1844 {
1845 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1846 break;
1847
1848 if (GET_CODE (insn) == NOTE
1849 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1850 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1851 break;
1852
1853 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1854 num_insns++;
1855
1856 if (last_test_insn && num_insns > 30)
1857 break;
1858
1859 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1860 && SET_DEST (PATTERN (insn)) == pc_rtx
1861 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1862 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1863 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1864 == loop_stack->data.loop.end_label))
1865 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1866 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1867 == loop_stack->data.loop.end_label))))
1868 last_test_insn = insn;
1869
1870 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1871 && GET_CODE (PATTERN (insn)) == SET
1872 && SET_DEST (PATTERN (insn)) == pc_rtx
1873 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1874 && (XEXP (SET_SRC (PATTERN (insn)), 0)
1875 == loop_stack->data.loop.end_label))
1876 /* Include BARRIER. */
1877 last_test_insn = NEXT_INSN (insn);
1878 }
1879
1880 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1881 {
1882 /* We found one. Move everything from there up
1883 to the end of the loop, and add a jump into the loop
1884 to jump to there. */
1885 register rtx newstart_label = gen_label_rtx ();
1886 register rtx start_move = start_label;
1887
1888 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1889 then we want to move this note also. */
1890 if (GET_CODE (PREV_INSN (start_move)) == NOTE
1891 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
1892 == NOTE_INSN_LOOP_CONT))
1893 start_move = PREV_INSN (start_move);
1894
1895 emit_label_after (newstart_label, PREV_INSN (start_move));
1896 reorder_insns (start_move, last_test_insn, get_last_insn ());
1897 emit_jump_insn_after (gen_jump (start_label),
1898 PREV_INSN (newstart_label));
1899 emit_barrier_after (PREV_INSN (newstart_label));
1900 start_label = newstart_label;
1901 }
1902 }
1903
1904 emit_jump (start_label);
1905 emit_note (0, NOTE_INSN_LOOP_END);
1906 emit_label (loop_stack->data.loop.end_label);
1907
1908 POPSTACK (loop_stack);
1909
1910 last_expr_type = 0;
1911 }
1912
1913 /* Generate a jump to the current loop's continue-point.
1914 This is usually the top of the loop, but may be specified
1915 explicitly elsewhere. If not currently inside a loop,
1916 return 0 and do nothing; caller will print an error message. */
1917
1918 int
1919 expand_continue_loop (whichloop)
1920 struct nesting *whichloop;
1921 {
1922 last_expr_type = 0;
1923 if (whichloop == 0)
1924 whichloop = loop_stack;
1925 if (whichloop == 0)
1926 return 0;
1927 expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
1928 return 1;
1929 }
1930
1931 /* Generate a jump to exit the current loop. If not currently inside a loop,
1932 return 0 and do nothing; caller will print an error message. */
1933
1934 int
1935 expand_exit_loop (whichloop)
1936 struct nesting *whichloop;
1937 {
1938 last_expr_type = 0;
1939 if (whichloop == 0)
1940 whichloop = loop_stack;
1941 if (whichloop == 0)
1942 return 0;
1943 expand_goto_internal (0, whichloop->data.loop.end_label, 0);
1944 return 1;
1945 }
1946
1947 /* Generate a conditional jump to exit the current loop if COND
1948 evaluates to zero. If not currently inside a loop,
1949 return 0 and do nothing; caller will print an error message. */
1950
1951 int
1952 expand_exit_loop_if_false (whichloop, cond)
1953 struct nesting *whichloop;
1954 tree cond;
1955 {
1956 last_expr_type = 0;
1957 if (whichloop == 0)
1958 whichloop = loop_stack;
1959 if (whichloop == 0)
1960 return 0;
1961 do_jump (cond, whichloop->data.loop.end_label, NULL);
1962 return 1;
1963 }
1964
1965 /* Return non-zero if we should preserve sub-expressions as separate
1966 pseudos. We never do so if we aren't optimizing. We always do so
1967 if -fexpensive-optimizations.
1968
1969 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
1970 the loop may still be a small one. */
1971
1972 int
1973 preserve_subexpressions_p ()
1974 {
1975 rtx insn;
1976
1977 if (flag_expensive_optimizations)
1978 return 1;
1979
1980 if (optimize == 0 || loop_stack == 0)
1981 return 0;
1982
1983 insn = get_last_insn_anywhere ();
1984
1985 return (insn
1986 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
1987 < n_non_fixed_regs * 3));
1988
1989 }
1990
1991 /* Generate a jump to exit the current loop, conditional, binding contour
1992 or case statement. Not all such constructs are visible to this function,
1993 only those started with EXIT_FLAG nonzero. Individual languages use
1994 the EXIT_FLAG parameter to control which kinds of constructs you can
1995 exit this way.
1996
1997 If not currently inside anything that can be exited,
1998 return 0 and do nothing; caller will print an error message. */
1999
2000 int
2001 expand_exit_something ()
2002 {
2003 struct nesting *n;
2004 last_expr_type = 0;
2005 for (n = nesting_stack; n; n = n->all)
2006 if (n->exit_label != 0)
2007 {
2008 expand_goto_internal (0, n->exit_label, 0);
2009 return 1;
2010 }
2011
2012 return 0;
2013 }
2014 \f
2015 /* Generate RTL to return from the current function, with no value.
2016 (That is, we do not do anything about returning any value.) */
2017
2018 void
2019 expand_null_return ()
2020 {
2021 struct nesting *block = block_stack;
2022 rtx last_insn = 0;
2023
2024 /* Does any pending block have cleanups? */
2025
2026 while (block && block->data.block.cleanups == 0)
2027 block = block->next;
2028
2029 /* If yes, use a goto to return, since that runs cleanups. */
2030
2031 expand_null_return_1 (last_insn, block != 0);
2032 }
2033
2034 /* Generate RTL to return from the current function, with value VAL. */
2035
2036 void
2037 expand_value_return (val)
2038 rtx val;
2039 {
2040 struct nesting *block = block_stack;
2041 rtx last_insn = get_last_insn ();
2042 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2043
2044 /* Copy the value to the return location
2045 unless it's already there. */
2046
2047 if (return_reg != val)
2048 emit_move_insn (return_reg, val);
2049 if (GET_CODE (return_reg) == REG
2050 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2051 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2052
2053 /* Does any pending block have cleanups? */
2054
2055 while (block && block->data.block.cleanups == 0)
2056 block = block->next;
2057
2058 /* If yes, use a goto to return, since that runs cleanups.
2059 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2060
2061 expand_null_return_1 (last_insn, block != 0);
2062 }
2063
2064 /* Output a return with no value. If LAST_INSN is nonzero,
2065 pretend that the return takes place after LAST_INSN.
2066 If USE_GOTO is nonzero then don't use a return instruction;
2067 go to the return label instead. This causes any cleanups
2068 of pending blocks to be executed normally. */
2069
2070 static void
2071 expand_null_return_1 (last_insn, use_goto)
2072 rtx last_insn;
2073 int use_goto;
2074 {
2075 rtx end_label = cleanup_label ? cleanup_label : return_label;
2076
2077 clear_pending_stack_adjust ();
2078 do_pending_stack_adjust ();
2079 last_expr_type = 0;
2080
2081 /* PCC-struct return always uses an epilogue. */
2082 if (current_function_returns_pcc_struct || use_goto)
2083 {
2084 if (end_label == 0)
2085 end_label = return_label = gen_label_rtx ();
2086 expand_goto_internal (0, end_label, last_insn);
2087 return;
2088 }
2089
2090 /* Otherwise output a simple return-insn if one is available,
2091 unless it won't do the job. */
2092 #ifdef HAVE_return
2093 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2094 {
2095 emit_jump_insn (gen_return ());
2096 emit_barrier ();
2097 return;
2098 }
2099 #endif
2100
2101 /* Otherwise jump to the epilogue. */
2102 expand_goto_internal (0, end_label, last_insn);
2103 }
2104 \f
2105 /* Generate RTL to evaluate the expression RETVAL and return it
2106 from the current function. */
2107
2108 void
2109 expand_return (retval)
2110 tree retval;
2111 {
2112 /* If there are any cleanups to be performed, then they will
2113 be inserted following LAST_INSN. It is desirable
2114 that the last_insn, for such purposes, should be the
2115 last insn before computing the return value. Otherwise, cleanups
2116 which call functions can clobber the return value. */
2117 /* ??? rms: I think that is erroneous, because in C++ it would
2118 run destructors on variables that might be used in the subsequent
2119 computation of the return value. */
2120 rtx last_insn = 0;
2121 register rtx val = 0;
2122 register rtx op0;
2123 tree retval_rhs;
2124 int cleanups;
2125 struct nesting *block;
2126
2127 /* If function wants no value, give it none. */
2128 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2129 {
2130 expand_expr (retval, 0, VOIDmode, 0);
2131 expand_null_return ();
2132 return;
2133 }
2134
2135 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2136 cleanups = any_pending_cleanups (1);
2137
2138 if (TREE_CODE (retval) == RESULT_DECL)
2139 retval_rhs = retval;
2140 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2141 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2142 retval_rhs = TREE_OPERAND (retval, 1);
2143 else if (TREE_TYPE (retval) == void_type_node)
2144 /* Recognize tail-recursive call to void function. */
2145 retval_rhs = retval;
2146 else
2147 retval_rhs = NULL_TREE;
2148
2149 /* Only use `last_insn' if there are cleanups which must be run. */
2150 if (cleanups || cleanup_label != 0)
2151 last_insn = get_last_insn ();
2152
2153 /* Distribute return down conditional expr if either of the sides
2154 may involve tail recursion (see test below). This enhances the number
2155 of tail recursions we see. Don't do this always since it can produce
2156 sub-optimal code in some cases and we distribute assignments into
2157 conditional expressions when it would help. */
2158
2159 if (optimize && retval_rhs != 0
2160 && frame_offset == 0
2161 && TREE_CODE (retval_rhs) == COND_EXPR
2162 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2163 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2164 {
2165 rtx label = gen_label_rtx ();
2166 do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
2167 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2168 DECL_RESULT (current_function_decl),
2169 TREE_OPERAND (retval_rhs, 1)));
2170 emit_label (label);
2171 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2172 DECL_RESULT (current_function_decl),
2173 TREE_OPERAND (retval_rhs, 2)));
2174 return;
2175 }
2176
2177 /* For tail-recursive call to current function,
2178 just jump back to the beginning.
2179 It's unsafe if any auto variable in this function
2180 has its address taken; for simplicity,
2181 require stack frame to be empty. */
2182 if (optimize && retval_rhs != 0
2183 && frame_offset == 0
2184 && TREE_CODE (retval_rhs) == CALL_EXPR
2185 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2186 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2187 /* Finish checking validity, and if valid emit code
2188 to set the argument variables for the new call. */
2189 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2190 DECL_ARGUMENTS (current_function_decl)))
2191 {
2192 if (tail_recursion_label == 0)
2193 {
2194 tail_recursion_label = gen_label_rtx ();
2195 emit_label_after (tail_recursion_label,
2196 tail_recursion_reentry);
2197 }
2198 expand_goto_internal (0, tail_recursion_label, last_insn);
2199 emit_barrier ();
2200 return;
2201 }
2202 #ifdef HAVE_return
2203 /* This optimization is safe if there are local cleanups
2204 because expand_null_return takes care of them.
2205 ??? I think it should also be safe when there is a cleanup label,
2206 because expand_null_return takes care of them, too.
2207 Any reason why not? */
2208 if (HAVE_return && cleanup_label == 0
2209 && ! current_function_returns_pcc_struct)
2210 {
2211 /* If this is return x == y; then generate
2212 if (x == y) return 1; else return 0;
2213 if we can do it with explicit return insns. */
2214 if (retval_rhs)
2215 switch (TREE_CODE (retval_rhs))
2216 {
2217 case EQ_EXPR:
2218 case NE_EXPR:
2219 case GT_EXPR:
2220 case GE_EXPR:
2221 case LT_EXPR:
2222 case LE_EXPR:
2223 case TRUTH_ANDIF_EXPR:
2224 case TRUTH_ORIF_EXPR:
2225 case TRUTH_AND_EXPR:
2226 case TRUTH_OR_EXPR:
2227 case TRUTH_NOT_EXPR:
2228 op0 = gen_label_rtx ();
2229 jumpifnot (retval_rhs, op0);
2230 expand_value_return (const1_rtx);
2231 emit_label (op0);
2232 expand_value_return (const0_rtx);
2233 return;
2234 }
2235 }
2236 #endif /* HAVE_return */
2237
2238 if (cleanups
2239 && retval_rhs != 0
2240 && TREE_TYPE (retval_rhs) != void_type_node
2241 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2242 {
2243 /* Calculate the return value into a pseudo reg. */
2244 val = expand_expr (retval_rhs, 0, VOIDmode, 0);
2245 emit_queue ();
2246 /* All temporaries have now been used. */
2247 free_temp_slots ();
2248 /* Return the calculated value, doing cleanups first. */
2249 expand_value_return (val);
2250 }
2251 else
2252 {
2253 /* No cleanups or no hard reg used;
2254 calculate value into hard return reg. */
2255 expand_expr (retval, 0, VOIDmode, 0);
2256 emit_queue ();
2257 free_temp_slots ();
2258 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2259 }
2260 }
2261
2262 /* Return 1 if the end of the generated RTX is not a barrier.
2263 This means code already compiled can drop through. */
2264
2265 int
2266 drop_through_at_end_p ()
2267 {
2268 rtx insn = get_last_insn ();
2269 while (insn && GET_CODE (insn) == NOTE)
2270 insn = PREV_INSN (insn);
2271 return insn && GET_CODE (insn) != BARRIER;
2272 }
2273 \f
2274 /* Emit code to alter this function's formal parms for a tail-recursive call.
2275 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2276 FORMALS is the chain of decls of formals.
2277 Return 1 if this can be done;
2278 otherwise return 0 and do not emit any code. */
2279
2280 static int
2281 tail_recursion_args (actuals, formals)
2282 tree actuals, formals;
2283 {
2284 register tree a = actuals, f = formals;
2285 register int i;
2286 register rtx *argvec;
2287
2288 /* Check that number and types of actuals are compatible
2289 with the formals. This is not always true in valid C code.
2290 Also check that no formal needs to be addressable
2291 and that all formals are scalars. */
2292
2293 /* Also count the args. */
2294
2295 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2296 {
2297 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2298 return 0;
2299 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2300 return 0;
2301 }
2302 if (a != 0 || f != 0)
2303 return 0;
2304
2305 /* Compute all the actuals. */
2306
2307 argvec = (rtx *) alloca (i * sizeof (rtx));
2308
2309 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2310 argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
2311
2312 /* Find which actual values refer to current values of previous formals.
2313 Copy each of them now, before any formal is changed. */
2314
2315 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2316 {
2317 int copy = 0;
2318 register int j;
2319 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2320 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2321 { copy = 1; break; }
2322 if (copy)
2323 argvec[i] = copy_to_reg (argvec[i]);
2324 }
2325
2326 /* Store the values of the actuals into the formals. */
2327
2328 for (f = formals, a = actuals, i = 0; f;
2329 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2330 {
2331 if (DECL_MODE (f) == GET_MODE (argvec[i]))
2332 emit_move_insn (DECL_RTL (f), argvec[i]);
2333 else
2334 convert_move (DECL_RTL (f), argvec[i],
2335 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2336 }
2337
2338 free_temp_slots ();
2339 return 1;
2340 }
2341 \f
2342 /* Generate the RTL code for entering a binding contour.
2343 The variables are declared one by one, by calls to `expand_decl'.
2344
2345 EXIT_FLAG is nonzero if this construct should be visible to
2346 `exit_something'. */
2347
2348 void
2349 expand_start_bindings (exit_flag)
2350 int exit_flag;
2351 {
2352 struct nesting *thisblock = ALLOC_NESTING ();
2353
2354 rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
2355
2356 /* Make an entry on block_stack for the block we are entering. */
2357
2358 thisblock->next = block_stack;
2359 thisblock->all = nesting_stack;
2360 thisblock->depth = ++nesting_depth;
2361 thisblock->data.block.stack_level = 0;
2362 thisblock->data.block.cleanups = 0;
2363 thisblock->data.block.function_call_count = 0;
2364 #if 0
2365 if (block_stack)
2366 {
2367 if (block_stack->data.block.cleanups == NULL_TREE
2368 && (block_stack->data.block.outer_cleanups == NULL_TREE
2369 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2370 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2371 else
2372 thisblock->data.block.outer_cleanups
2373 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2374 block_stack->data.block.outer_cleanups);
2375 }
2376 else
2377 thisblock->data.block.outer_cleanups = 0;
2378 #endif
2379 #if 1
2380 if (block_stack
2381 && !(block_stack->data.block.cleanups == NULL_TREE
2382 && block_stack->data.block.outer_cleanups == NULL_TREE))
2383 thisblock->data.block.outer_cleanups
2384 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2385 block_stack->data.block.outer_cleanups);
2386 else
2387 thisblock->data.block.outer_cleanups = 0;
2388 #endif
2389 thisblock->data.block.label_chain = 0;
2390 thisblock->data.block.innermost_stack_block = stack_block_stack;
2391 thisblock->data.block.first_insn = note;
2392 thisblock->data.block.block_start_count = ++block_start_count;
2393 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2394 block_stack = thisblock;
2395 nesting_stack = thisblock;
2396
2397 /* Make a new level for allocating stack slots. */
2398 push_temp_slots ();
2399 }
2400
2401 /* Generate RTL code to terminate a binding contour.
2402 VARS is the chain of VAR_DECL nodes
2403 for the variables bound in this contour.
2404 MARK_ENDS is nonzero if we should put a note at the beginning
2405 and end of this binding contour.
2406
2407 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2408 (That is true automatically if the contour has a saved stack level.) */
2409
2410 void
2411 expand_end_bindings (vars, mark_ends, dont_jump_in)
2412 tree vars;
2413 int mark_ends;
2414 int dont_jump_in;
2415 {
2416 register struct nesting *thisblock = block_stack;
2417 register tree decl;
2418
2419 if (warn_unused)
2420 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2421 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
2422 warning_with_decl (decl, "unused variable `%s'");
2423
2424 /* Mark the beginning and end of the scope if requested. */
2425
2426 if (mark_ends)
2427 emit_note (0, NOTE_INSN_BLOCK_END);
2428 else
2429 /* Get rid of the beginning-mark if we don't make an end-mark. */
2430 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2431
2432 if (thisblock->exit_label)
2433 {
2434 do_pending_stack_adjust ();
2435 emit_label (thisblock->exit_label);
2436 }
2437
2438 /* If necessary, make a handler for nonlocal gotos taking
2439 place in the function calls in this block. */
2440 if (function_call_count != thisblock->data.block.function_call_count
2441 && nonlocal_labels
2442 /* Make handler for outermost block
2443 if there were any nonlocal gotos to this function. */
2444 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2445 /* Make handler for inner block if it has something
2446 special to do when you jump out of it. */
2447 : (thisblock->data.block.cleanups != 0
2448 || thisblock->data.block.stack_level != 0)))
2449 {
2450 tree link;
2451 rtx afterward = gen_label_rtx ();
2452 rtx handler_label = gen_label_rtx ();
2453 rtx save_receiver = gen_reg_rtx (Pmode);
2454
2455 /* Don't let jump_optimize delete the handler. */
2456 LABEL_PRESERVE_P (handler_label) = 1;
2457
2458 /* Record the handler address in the stack slot for that purpose,
2459 during this block, saving and restoring the outer value. */
2460 if (thisblock->next != 0)
2461 {
2462 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2463 emit_insn_before (gen_move_insn (save_receiver,
2464 nonlocal_goto_handler_slot),
2465 thisblock->data.block.first_insn);
2466 }
2467 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2468 gen_rtx (LABEL_REF, Pmode,
2469 handler_label)),
2470 thisblock->data.block.first_insn);
2471
2472 /* Jump around the handler; it runs only when specially invoked. */
2473 emit_jump (afterward);
2474 emit_label (handler_label);
2475
2476 #ifdef HAVE_nonlocal_goto
2477 if (! HAVE_nonlocal_goto)
2478 #endif
2479 /* First adjust our frame pointer to its actual value. It was
2480 previously set to the start of the virtual area corresponding to
2481 the stacked variables when we branched here and now needs to be
2482 adjusted to the actual hardware fp value.
2483
2484 Assignments are to virtual registers are converted by
2485 instantiate_virtual_regs into the corresponding assignment
2486 to the underlying register (fp in this case) that makes
2487 the original assignment true.
2488 So the following insn will actually be
2489 decrementing fp by STARTING_FRAME_OFFSET. */
2490 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2491
2492 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2493 if (fixed_regs[ARG_POINTER_REGNUM])
2494 {
2495 /* Now restore our arg pointer from the address at which it was saved
2496 in our stack frame.
2497 If there hasn't be space allocated for it yet, make some now. */
2498 if (arg_pointer_save_area == 0)
2499 arg_pointer_save_area
2500 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2501 emit_move_insn (virtual_incoming_args_rtx,
2502 /* We need a pseudo here,
2503 or else instantiate_virtual_regs_1 complains. */
2504 copy_to_reg (arg_pointer_save_area));
2505 }
2506 #endif
2507
2508 /* The handler expects the desired label address in the static chain
2509 register. It tests the address and does an appropriate jump
2510 to whatever label is desired. */
2511 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2512 /* Skip any labels we shouldn't be able to jump to from here. */
2513 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2514 {
2515 rtx not_this = gen_label_rtx ();
2516 rtx this = gen_label_rtx ();
2517 do_jump_if_equal (static_chain_rtx,
2518 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2519 this, 0);
2520 emit_jump (not_this);
2521 emit_label (this);
2522 expand_goto (TREE_VALUE (link));
2523 emit_label (not_this);
2524 }
2525 /* If label is not recognized, abort. */
2526 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2527 VOIDmode, 0);
2528 emit_label (afterward);
2529 }
2530
2531 /* Don't allow jumping into a block that has cleanups or a stack level. */
2532 if (dont_jump_in
2533 || thisblock->data.block.stack_level != 0
2534 || thisblock->data.block.cleanups != 0)
2535 {
2536 struct label_chain *chain;
2537
2538 /* Any labels in this block are no longer valid to go to.
2539 Mark them to cause an error message. */
2540 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2541 {
2542 DECL_TOO_LATE (chain->label) = 1;
2543 /* If any goto without a fixup came to this label,
2544 that must be an error, because gotos without fixups
2545 come from outside all saved stack-levels and all cleanups. */
2546 if (TREE_ADDRESSABLE (chain->label))
2547 error_with_decl (chain->label,
2548 "label `%s' used before containing binding contour");
2549 }
2550 }
2551
2552 /* Restore stack level in effect before the block
2553 (only if variable-size objects allocated). */
2554 /* Perform any cleanups associated with the block. */
2555
2556 if (thisblock->data.block.stack_level != 0
2557 || thisblock->data.block.cleanups != 0)
2558 {
2559 /* Don't let cleanups affect ({...}) constructs. */
2560 int old_expr_stmts_for_value = expr_stmts_for_value;
2561 rtx old_last_expr_value = last_expr_value;
2562 tree old_last_expr_type = last_expr_type;
2563 expr_stmts_for_value = 0;
2564
2565 /* Do the cleanups. */
2566 expand_cleanups (thisblock->data.block.cleanups, 0);
2567 do_pending_stack_adjust ();
2568
2569 expr_stmts_for_value = old_expr_stmts_for_value;
2570 last_expr_value = old_last_expr_value;
2571 last_expr_type = old_last_expr_type;
2572
2573 /* Restore the stack level. */
2574
2575 if (thisblock->data.block.stack_level != 0)
2576 {
2577 emit_move_insn (stack_pointer_rtx,
2578 thisblock->data.block.stack_level);
2579 if (nonlocal_goto_stack_level != 0)
2580 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2581 }
2582
2583 /* Any gotos out of this block must also do these things.
2584 Also report any gotos with fixups that came to labels in this level. */
2585 fixup_gotos (thisblock,
2586 thisblock->data.block.stack_level,
2587 thisblock->data.block.cleanups,
2588 thisblock->data.block.first_insn,
2589 dont_jump_in);
2590 }
2591
2592 /* If doing stupid register allocation, make sure lives of all
2593 register variables declared here extend thru end of scope. */
2594
2595 if (obey_regdecls)
2596 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2597 {
2598 rtx rtl = DECL_RTL (decl);
2599 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2600 use_variable (rtl);
2601 }
2602
2603 /* Restore block_stack level for containing block. */
2604
2605 stack_block_stack = thisblock->data.block.innermost_stack_block;
2606 POPSTACK (block_stack);
2607
2608 /* Pop the stack slot nesting and free any slots at this level. */
2609 pop_temp_slots ();
2610 }
2611 \f
2612 /* Generate RTL for the automatic variable declaration DECL.
2613 (Other kinds of declarations are simply ignored if seen here.)
2614 CLEANUP is an expression to be executed at exit from this binding contour;
2615 for example, in C++, it might call the destructor for this variable.
2616
2617 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2618 either before or after calling `expand_decl' but before compiling
2619 any subsequent expressions. This is because CLEANUP may be expanded
2620 more than once, on different branches of execution.
2621 For the same reason, CLEANUP may not contain a CALL_EXPR
2622 except as its topmost node--else `preexpand_calls' would get confused.
2623
2624 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2625 that is not associated with any particular variable.
2626
2627 There is no special support here for C++ constructors.
2628 They should be handled by the proper code in DECL_INITIAL. */
2629
2630 void
2631 expand_decl (decl)
2632 register tree decl;
2633 {
2634 struct nesting *thisblock = block_stack;
2635 tree type = TREE_TYPE (decl);
2636
2637 /* Only automatic variables need any expansion done.
2638 Static and external variables, and external functions,
2639 will be handled by `assemble_variable' (called from finish_decl).
2640 TYPE_DECL and CONST_DECL require nothing.
2641 PARM_DECLs are handled in `assign_parms'. */
2642
2643 if (TREE_CODE (decl) != VAR_DECL)
2644 return;
2645 if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
2646 return;
2647
2648 /* Create the RTL representation for the variable. */
2649
2650 if (type == error_mark_node)
2651 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2652 else if (DECL_SIZE (decl) == 0)
2653 /* Variable with incomplete type. */
2654 {
2655 if (DECL_INITIAL (decl) == 0)
2656 /* Error message was already done; now avoid a crash. */
2657 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2658 else
2659 /* An initializer is going to decide the size of this array.
2660 Until we know the size, represent its address with a reg. */
2661 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2662 }
2663 else if (DECL_MODE (decl) != BLKmode
2664 /* If -ffloat-store, don't put explicit float vars
2665 into regs. */
2666 && !(flag_float_store
2667 && TREE_CODE (type) == REAL_TYPE)
2668 && ! TREE_THIS_VOLATILE (decl)
2669 && ! TREE_ADDRESSABLE (decl)
2670 && (TREE_REGDECL (decl) || ! obey_regdecls))
2671 {
2672 /* Automatic variable that can go in a register. */
2673 DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
2674 if (TREE_CODE (type) == POINTER_TYPE)
2675 mark_reg_pointer (DECL_RTL (decl));
2676 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2677 }
2678 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2679 {
2680 /* Variable of fixed size that goes on the stack. */
2681 rtx oldaddr = 0;
2682 rtx addr;
2683
2684 /* If we previously made RTL for this decl, it must be an array
2685 whose size was determined by the initializer.
2686 The old address was a register; set that register now
2687 to the proper address. */
2688 if (DECL_RTL (decl) != 0)
2689 {
2690 if (GET_CODE (DECL_RTL (decl)) != MEM
2691 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2692 abort ();
2693 oldaddr = XEXP (DECL_RTL (decl), 0);
2694 }
2695
2696 DECL_RTL (decl)
2697 = assign_stack_temp (DECL_MODE (decl),
2698 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2699 + BITS_PER_UNIT - 1)
2700 / BITS_PER_UNIT),
2701 1);
2702
2703 /* Set alignment we actually gave this decl. */
2704 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2705 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2706
2707 if (oldaddr)
2708 {
2709 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2710 if (addr != oldaddr)
2711 emit_move_insn (oldaddr, addr);
2712 }
2713
2714 /* If this is a memory ref that contains aggregate components,
2715 mark it as such for cse and loop optimize. */
2716 MEM_IN_STRUCT_P (DECL_RTL (decl))
2717 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2718 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2719 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2720 #if 0
2721 /* If this is in memory because of -ffloat-store,
2722 set the volatile bit, to prevent optimizations from
2723 undoing the effects. */
2724 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2725 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2726 #endif
2727 }
2728 else
2729 /* Dynamic-size object: must push space on the stack. */
2730 {
2731 rtx address, size;
2732
2733 /* Record the stack pointer on entry to block, if have
2734 not already done so. */
2735 if (thisblock->data.block.stack_level == 0)
2736 {
2737 do_pending_stack_adjust ();
2738 thisblock->data.block.stack_level
2739 = copy_to_reg (stack_pointer_rtx);
2740 stack_block_stack = thisblock;
2741 }
2742
2743 /* Compute the variable's size, in bytes. */
2744 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2745 DECL_SIZE (decl),
2746 size_int (BITS_PER_UNIT)),
2747 0, VOIDmode, 0);
2748 free_temp_slots ();
2749
2750 /* Allocate space on the stack for the variable. */
2751 address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
2752
2753 if (nonlocal_goto_stack_level != 0)
2754 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
2755
2756 /* Reference the variable indirect through that rtx. */
2757 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2758
2759 /* Indicate the alignment we actually gave this variable. */
2760 #ifdef STACK_BOUNDARY
2761 DECL_ALIGN (decl) = STACK_BOUNDARY;
2762 #else
2763 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2764 #endif
2765 }
2766
2767 if (TREE_THIS_VOLATILE (decl))
2768 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2769 if (TREE_READONLY (decl))
2770 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2771
2772 /* If doing stupid register allocation, make sure life of any
2773 register variable starts here, at the start of its scope. */
2774
2775 if (obey_regdecls)
2776 use_variable (DECL_RTL (decl));
2777 }
2778 \f
2779 /* Emit code to perform the initialization of a declaration DECL. */
2780
2781 void
2782 expand_decl_init (decl)
2783 tree decl;
2784 {
2785 int was_used = TREE_USED (decl);
2786
2787 if (TREE_STATIC (decl))
2788 return;
2789
2790 /* Compute and store the initial value now. */
2791
2792 if (DECL_INITIAL (decl) == error_mark_node)
2793 {
2794 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2795 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2796 || code == POINTER_TYPE)
2797 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2798 0, 0);
2799 emit_queue ();
2800 }
2801 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2802 {
2803 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
2804 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
2805 emit_queue ();
2806 }
2807
2808 /* Don't let the initialization count as "using" the variable. */
2809 TREE_USED (decl) = was_used;
2810
2811 /* Free any temporaries we made while initializing the decl. */
2812 free_temp_slots ();
2813 }
2814
2815 /* CLEANUP is an expression to be executed at exit from this binding contour;
2816 for example, in C++, it might call the destructor for this variable.
2817
2818 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2819 either before or after calling `expand_decl' but before compiling
2820 any subsequent expressions. This is because CLEANUP may be expanded
2821 more than once, on different branches of execution.
2822 For the same reason, CLEANUP may not contain a CALL_EXPR
2823 except as its topmost node--else `preexpand_calls' would get confused.
2824
2825 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2826 that is not associated with any particular variable. */
2827
2828 int
2829 expand_decl_cleanup (decl, cleanup)
2830 tree decl, cleanup;
2831 {
2832 struct nesting *thisblock = block_stack;
2833
2834 /* Error if we are not in any block. */
2835 if (thisblock == 0)
2836 return 0;
2837
2838 /* Record the cleanup if there is one. */
2839
2840 if (cleanup != 0)
2841 {
2842 thisblock->data.block.cleanups
2843 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
2844 /* If this block has a cleanup, it belongs in stack_block_stack. */
2845 stack_block_stack = thisblock;
2846 }
2847 return 1;
2848 }
2849 \f
2850 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2851 DECL_ELTS is the list of elements that belong to DECL's type.
2852 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2853
2854 void
2855 expand_anon_union_decl (decl, cleanup, decl_elts)
2856 tree decl, cleanup, decl_elts;
2857 {
2858 struct nesting *thisblock = block_stack;
2859 rtx x;
2860
2861 expand_decl (decl, cleanup);
2862 x = DECL_RTL (decl);
2863
2864 while (decl_elts)
2865 {
2866 tree decl_elt = TREE_VALUE (decl_elts);
2867 tree cleanup_elt = TREE_PURPOSE (decl_elts);
2868 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2869
2870 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2871 instead create a new MEM rtx with the proper mode. */
2872 if (GET_CODE (x) == MEM)
2873 {
2874 if (mode == GET_MODE (x))
2875 DECL_RTL (decl_elt) = x;
2876 else
2877 {
2878 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
2879 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
2880 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
2881 }
2882 }
2883 else if (GET_CODE (x) == REG)
2884 {
2885 if (mode == GET_MODE (x))
2886 DECL_RTL (decl_elt) = x;
2887 else
2888 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
2889 }
2890 else
2891 abort ();
2892
2893 /* Record the cleanup if there is one. */
2894
2895 if (cleanup != 0)
2896 thisblock->data.block.cleanups
2897 = temp_tree_cons (decl_elt, cleanup_elt,
2898 thisblock->data.block.cleanups);
2899
2900 decl_elts = TREE_CHAIN (decl_elts);
2901 }
2902 }
2903 \f
2904 /* Expand a list of cleanups LIST.
2905 Elements may be expressions or may be nested lists.
2906
2907 If DONT_DO is nonnull, then any list-element
2908 whose TREE_PURPOSE matches DONT_DO is omitted.
2909 This is sometimes used to avoid a cleanup associated with
2910 a value that is being returned out of the scope. */
2911
2912 static void
2913 expand_cleanups (list, dont_do)
2914 tree list;
2915 tree dont_do;
2916 {
2917 tree tail;
2918 for (tail = list; tail; tail = TREE_CHAIN (tail))
2919 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
2920 {
2921 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2922 expand_cleanups (TREE_VALUE (tail), dont_do);
2923 else
2924 {
2925 /* Cleanups may be run multiple times. For example,
2926 when exiting a binding contour, we expand the
2927 cleanups associated with that contour. When a goto
2928 within that binding contour has a target outside that
2929 contour, it will expand all cleanups from its scope to
2930 the target. Though the cleanups are expanded multiple
2931 times, the control paths are non-overlapping so the
2932 cleanups will not be executed twice. */
2933 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
2934 free_temp_slots ();
2935 }
2936 }
2937 }
2938
2939 /* Expand a list of cleanups for a goto fixup.
2940 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2941 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2942
2943 static void
2944 fixup_cleanups (list, before_jump)
2945 tree list;
2946 rtx *before_jump;
2947 {
2948 rtx beyond_jump = get_last_insn ();
2949 rtx new_before_jump;
2950
2951 expand_cleanups (list, 0);
2952 /* Pop any pushes done in the cleanups,
2953 in case function is about to return. */
2954 do_pending_stack_adjust ();
2955
2956 new_before_jump = get_last_insn ();
2957
2958 if (beyond_jump != new_before_jump)
2959 {
2960 /* If cleanups expand to nothing, don't reorder. */
2961 reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump);
2962 *before_jump = new_before_jump;
2963 }
2964 }
2965
2966 /* Move all cleanups from the current block_stack
2967 to the containing block_stack, where they are assumed to
2968 have been created. If anything can cause a temporary to
2969 be created, but not expanded for more than one level of
2970 block_stacks, then this code will have to change. */
2971
2972 void
2973 move_cleanups_up ()
2974 {
2975 struct nesting *block = block_stack;
2976 struct nesting *outer = block->next;
2977
2978 outer->data.block.cleanups
2979 = chainon (block->data.block.cleanups,
2980 outer->data.block.cleanups);
2981 block->data.block.cleanups = 0;
2982 }
2983
2984 tree
2985 last_cleanup_this_contour ()
2986 {
2987 if (block_stack == 0)
2988 return 0;
2989
2990 return block_stack->data.block.cleanups;
2991 }
2992
2993 /* Return 1 if there are any pending cleanups at this point.
2994 If THIS_CONTOUR is nonzero, check the current contour as well.
2995 Otherwise, look only at the contours that enclose this one. */
2996
2997 int
2998 any_pending_cleanups (this_contour)
2999 int this_contour;
3000 {
3001 struct nesting *block;
3002
3003 if (block_stack == 0)
3004 return 0;
3005
3006 if (this_contour && block_stack->data.block.cleanups != NULL)
3007 return 1;
3008 if (block_stack->data.block.cleanups == 0
3009 && (block_stack->data.block.outer_cleanups == 0
3010 #if 0
3011 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3012 #endif
3013 ))
3014 return 0;
3015
3016 for (block = block_stack->next; block; block = block->next)
3017 if (block->data.block.cleanups != 0)
3018 return 1;
3019
3020 return 0;
3021 }
3022 \f
3023 /* Enter a case (Pascal) or switch (C) statement.
3024 Push a block onto case_stack and nesting_stack
3025 to accumulate the case-labels that are seen
3026 and to record the labels generated for the statement.
3027
3028 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3029 Otherwise, this construct is transparent for `exit_something'.
3030
3031 EXPR is the index-expression to be dispatched on.
3032 TYPE is its nominal type. We could simply convert EXPR to this type,
3033 but instead we take short cuts. */
3034
3035 void
3036 expand_start_case (exit_flag, expr, type, printname)
3037 int exit_flag;
3038 tree expr;
3039 tree type;
3040 char *printname;
3041 {
3042 register struct nesting *thiscase = ALLOC_NESTING ();
3043
3044 /* Make an entry on case_stack for the case we are entering. */
3045
3046 thiscase->next = case_stack;
3047 thiscase->all = nesting_stack;
3048 thiscase->depth = ++nesting_depth;
3049 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3050 thiscase->data.case_stmt.case_list = 0;
3051 thiscase->data.case_stmt.index_expr = expr;
3052 thiscase->data.case_stmt.nominal_type = type;
3053 thiscase->data.case_stmt.default_label = 0;
3054 thiscase->data.case_stmt.num_ranges = 0;
3055 thiscase->data.case_stmt.printname = printname;
3056 thiscase->data.case_stmt.seenlabel = 0;
3057 case_stack = thiscase;
3058 nesting_stack = thiscase;
3059
3060 do_pending_stack_adjust ();
3061
3062 /* Make sure case_stmt.start points to something that won't
3063 need any transformation before expand_end_case. */
3064 if (GET_CODE (get_last_insn ()) != NOTE)
3065 emit_note (0, NOTE_INSN_DELETED);
3066
3067 thiscase->data.case_stmt.start = get_last_insn ();
3068 }
3069
3070 /* Start a "dummy case statement" within which case labels are invalid
3071 and are not connected to any larger real case statement.
3072 This can be used if you don't want to let a case statement jump
3073 into the middle of certain kinds of constructs. */
3074
3075 void
3076 expand_start_case_dummy ()
3077 {
3078 register struct nesting *thiscase = ALLOC_NESTING ();
3079
3080 /* Make an entry on case_stack for the dummy. */
3081
3082 thiscase->next = case_stack;
3083 thiscase->all = nesting_stack;
3084 thiscase->depth = ++nesting_depth;
3085 thiscase->exit_label = 0;
3086 thiscase->data.case_stmt.case_list = 0;
3087 thiscase->data.case_stmt.start = 0;
3088 thiscase->data.case_stmt.nominal_type = 0;
3089 thiscase->data.case_stmt.default_label = 0;
3090 thiscase->data.case_stmt.num_ranges = 0;
3091 case_stack = thiscase;
3092 nesting_stack = thiscase;
3093 }
3094
3095 /* End a dummy case statement. */
3096
3097 void
3098 expand_end_case_dummy ()
3099 {
3100 POPSTACK (case_stack);
3101 }
3102
3103 /* Return the data type of the index-expression
3104 of the innermost case statement, or null if none. */
3105
3106 tree
3107 case_index_expr_type ()
3108 {
3109 if (case_stack)
3110 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3111 return 0;
3112 }
3113 \f
3114 /* Accumulate one case or default label inside a case or switch statement.
3115 VALUE is the value of the case (a null pointer, for a default label).
3116
3117 If not currently inside a case or switch statement, return 1 and do
3118 nothing. The caller will print a language-specific error message.
3119 If VALUE is a duplicate or overlaps, return 2 and do nothing
3120 except store the (first) duplicate node in *DUPLICATE.
3121 If VALUE is out of range, return 3 and do nothing.
3122 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3123 Return 0 on success.
3124
3125 Extended to handle range statements. */
3126
3127 int
3128 pushcase (value, label, duplicate)
3129 register tree value;
3130 register tree label;
3131 tree *duplicate;
3132 {
3133 register struct case_node **l;
3134 register struct case_node *n;
3135 tree index_type;
3136 tree nominal_type;
3137
3138 /* Fail if not inside a real case statement. */
3139 if (! (case_stack && case_stack->data.case_stmt.start))
3140 return 1;
3141
3142 if (stack_block_stack
3143 && stack_block_stack->depth > case_stack->depth)
3144 return 5;
3145
3146 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3147 nominal_type = case_stack->data.case_stmt.nominal_type;
3148
3149 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3150 if (index_type == error_mark_node)
3151 return 0;
3152
3153 /* Convert VALUE to the type in which the comparisons are nominally done. */
3154 if (value != 0)
3155 value = convert (nominal_type, value);
3156
3157 /* If this is the first label, warn if any insns have been emitted. */
3158 if (case_stack->data.case_stmt.seenlabel == 0)
3159 {
3160 rtx insn;
3161 for (insn = case_stack->data.case_stmt.start;
3162 insn;
3163 insn = NEXT_INSN (insn))
3164 {
3165 if (GET_CODE (insn) == CODE_LABEL)
3166 break;
3167 if (GET_CODE (insn) != NOTE
3168 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3169 {
3170 warning ("unreachable code at beginning of %s",
3171 case_stack->data.case_stmt.printname);
3172 break;
3173 }
3174 }
3175 }
3176 case_stack->data.case_stmt.seenlabel = 1;
3177
3178 /* Fail if this value is out of range for the actual type of the index
3179 (which may be narrower than NOMINAL_TYPE). */
3180 if (value != 0 && ! int_fits_type_p (value, index_type))
3181 return 3;
3182
3183 /* Fail if this is a duplicate or overlaps another entry. */
3184 if (value == 0)
3185 {
3186 if (case_stack->data.case_stmt.default_label != 0)
3187 {
3188 *duplicate = case_stack->data.case_stmt.default_label;
3189 return 2;
3190 }
3191 case_stack->data.case_stmt.default_label = label;
3192 }
3193 else
3194 {
3195 /* Find the elt in the chain before which to insert the new value,
3196 to keep the chain sorted in increasing order.
3197 But report an error if this element is a duplicate. */
3198 for (l = &case_stack->data.case_stmt.case_list;
3199 /* Keep going past elements distinctly less than VALUE. */
3200 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3201 l = &(*l)->right)
3202 ;
3203 if (*l)
3204 {
3205 /* Element we will insert before must be distinctly greater;
3206 overlap means error. */
3207 if (! tree_int_cst_lt (value, (*l)->low))
3208 {
3209 *duplicate = (*l)->code_label;
3210 return 2;
3211 }
3212 }
3213
3214 /* Add this label to the chain, and succeed.
3215 Copy VALUE so it is on temporary rather than momentary
3216 obstack and will thus survive till the end of the case statement. */
3217 n = (struct case_node *) oballoc (sizeof (struct case_node));
3218 n->left = 0;
3219 n->right = *l;
3220 n->high = n->low = copy_node (value);
3221 n->code_label = label;
3222 *l = n;
3223 }
3224
3225 expand_label (label);
3226 return 0;
3227 }
3228
3229 /* Like pushcase but this case applies to all values
3230 between VALUE1 and VALUE2 (inclusive).
3231 The return value is the same as that of pushcase
3232 but there is one additional error code:
3233 4 means the specified range was empty. */
3234
3235 int
3236 pushcase_range (value1, value2, label, duplicate)
3237 register tree value1, value2;
3238 register tree label;
3239 tree *duplicate;
3240 {
3241 register struct case_node **l;
3242 register struct case_node *n;
3243 tree index_type;
3244 tree nominal_type;
3245
3246 /* Fail if not inside a real case statement. */
3247 if (! (case_stack && case_stack->data.case_stmt.start))
3248 return 1;
3249
3250 if (stack_block_stack
3251 && stack_block_stack->depth > case_stack->depth)
3252 return 5;
3253
3254 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3255 nominal_type = case_stack->data.case_stmt.nominal_type;
3256
3257 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3258 if (index_type == error_mark_node)
3259 return 0;
3260
3261 /* If this is the first label, warn if any insns have been emitted. */
3262 if (case_stack->data.case_stmt.seenlabel == 0)
3263 {
3264 rtx insn;
3265 for (insn = case_stack->data.case_stmt.start;
3266 insn;
3267 insn = NEXT_INSN (insn))
3268 {
3269 if (GET_CODE (insn) == CODE_LABEL)
3270 break;
3271 if (GET_CODE (insn) != NOTE
3272 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3273 {
3274 warning ("unreachable code at beginning of %s",
3275 case_stack->data.case_stmt.printname);
3276 break;
3277 }
3278 }
3279 }
3280 case_stack->data.case_stmt.seenlabel = 1;
3281
3282 /* Convert VALUEs to type in which the comparisons are nominally done. */
3283 if (value1 == 0) /* Negative infinity. */
3284 value1 = TYPE_MIN_VALUE(index_type);
3285 value1 = convert (nominal_type, value1);
3286
3287 if (value2 == 0) /* Positive infinity. */
3288 value2 = TYPE_MAX_VALUE(index_type);
3289 value2 = convert (nominal_type, value2);
3290
3291 /* Fail if these values are out of range. */
3292 if (! int_fits_type_p (value1, index_type))
3293 return 3;
3294
3295 if (! int_fits_type_p (value2, index_type))
3296 return 3;
3297
3298 /* Fail if the range is empty. */
3299 if (tree_int_cst_lt (value2, value1))
3300 return 4;
3301
3302 /* If the bounds are equal, turn this into the one-value case. */
3303 if (tree_int_cst_equal (value1, value2))
3304 return pushcase (value1, label, duplicate);
3305
3306 /* Find the elt in the chain before which to insert the new value,
3307 to keep the chain sorted in increasing order.
3308 But report an error if this element is a duplicate. */
3309 for (l = &case_stack->data.case_stmt.case_list;
3310 /* Keep going past elements distinctly less than this range. */
3311 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3312 l = &(*l)->right)
3313 ;
3314 if (*l)
3315 {
3316 /* Element we will insert before must be distinctly greater;
3317 overlap means error. */
3318 if (! tree_int_cst_lt (value2, (*l)->low))
3319 {
3320 *duplicate = (*l)->code_label;
3321 return 2;
3322 }
3323 }
3324
3325 /* Add this label to the chain, and succeed.
3326 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3327 obstack and will thus survive till the end of the case statement. */
3328
3329 n = (struct case_node *) oballoc (sizeof (struct case_node));
3330 n->left = 0;
3331 n->right = *l;
3332 n->low = copy_node (value1);
3333 n->high = copy_node (value2);
3334 n->code_label = label;
3335 *l = n;
3336
3337 expand_label (label);
3338
3339 case_stack->data.case_stmt.num_ranges++;
3340
3341 return 0;
3342 }
3343 \f
3344 /* Called when the index of a switch statement is an enumerated type
3345 and there is no default label.
3346
3347 Checks that all enumeration literals are covered by the case
3348 expressions of a switch. Also, warn if there are any extra
3349 switch cases that are *not* elements of the enumerated type.
3350
3351 If all enumeration literals were covered by the case expressions,
3352 turn one of the expressions into the default expression since it should
3353 not be possible to fall through such a switch. */
3354
3355 void
3356 check_for_full_enumeration_handling (type)
3357 tree type;
3358 {
3359 register struct case_node *n;
3360 register struct case_node **l;
3361 register tree chain;
3362 int all_values = 1;
3363
3364 /* The time complexity of this loop is currently O(N * M), with
3365 N being the number of enumerals in the enumerated type, and
3366 M being the number of case expressions in the switch. */
3367
3368 for (chain = TYPE_VALUES (type);
3369 chain;
3370 chain = TREE_CHAIN (chain))
3371 {
3372 /* Find a match between enumeral and case expression, if possible.
3373 Quit looking when we've gone too far (since case expressions
3374 are kept sorted in ascending order). Warn about enumerals not
3375 handled in the switch statement case expression list. */
3376
3377 for (n = case_stack->data.case_stmt.case_list;
3378 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3379 n = n->right)
3380 ;
3381
3382 if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain))))
3383 {
3384 if (warn_switch)
3385 warning ("enumerated value `%s' not handled in switch",
3386 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3387 all_values = 0;
3388 }
3389 }
3390
3391 /* Now we go the other way around; we warn if there are case
3392 expressions that don't correspond to enumerals. This can
3393 occur since C and C++ don't enforce type-checking of
3394 assignments to enumeration variables. */
3395
3396 if (warn_switch)
3397 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3398 {
3399 for (chain = TYPE_VALUES (type);
3400 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3401 chain = TREE_CHAIN (chain))
3402 ;
3403
3404 if (!chain)
3405 warning ("case value `%d' not in enumerated type `%s'",
3406 TREE_INT_CST_LOW (n->low),
3407 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3408 == IDENTIFIER_NODE)
3409 ? TYPE_NAME (type)
3410 : DECL_NAME (TYPE_NAME (type))));
3411 }
3412
3413 /* If all values were found as case labels, make one of them the default
3414 label. Thus, this switch will never fall through. We arbitrarily pick
3415 the last one to make the default since this is likely the most
3416 efficient choice. */
3417
3418 if (all_values)
3419 {
3420 for (l = &case_stack->data.case_stmt.case_list;
3421 (*l)->right != 0;
3422 l = &(*l)->right)
3423 ;
3424
3425 case_stack->data.case_stmt.default_label = (*l)->code_label;
3426 *l = 0;
3427 }
3428 }
3429 \f
3430 /* Terminate a case (Pascal) or switch (C) statement
3431 in which CASE_INDEX is the expression to be tested.
3432 Generate the code to test it and jump to the right place. */
3433
3434 void
3435 expand_end_case (orig_index)
3436 tree orig_index;
3437 {
3438 tree minval, maxval, range;
3439 rtx default_label = 0;
3440 register struct case_node *n;
3441 int count;
3442 rtx index;
3443 rtx table_label = gen_label_rtx ();
3444 int ncases;
3445 rtx *labelvec;
3446 register int i;
3447 rtx before_case;
3448 register struct nesting *thiscase = case_stack;
3449 tree index_expr = thiscase->data.case_stmt.index_expr;
3450 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3451
3452 do_pending_stack_adjust ();
3453
3454 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3455 if (TREE_TYPE (index_expr) != error_mark_node)
3456 {
3457 /* If switch expression was an enumerated type, check that all
3458 enumeration literals are covered by the cases.
3459 No sense trying this if there's a default case, however. */
3460
3461 if (!thiscase->data.case_stmt.default_label
3462 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3463 && TREE_CODE (index_expr) != INTEGER_CST)
3464 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3465
3466 /* If this is the first label, warn if any insns have been emitted. */
3467 if (thiscase->data.case_stmt.seenlabel == 0)
3468 {
3469 rtx insn;
3470 for (insn = get_last_insn ();
3471 insn != case_stack->data.case_stmt.start;
3472 insn = PREV_INSN (insn))
3473 if (GET_CODE (insn) != NOTE
3474 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3475 {
3476 warning ("unreachable code at beginning of %s",
3477 case_stack->data.case_stmt.printname);
3478 break;
3479 }
3480 }
3481
3482 /* If we don't have a default-label, create one here,
3483 after the body of the switch. */
3484 if (thiscase->data.case_stmt.default_label == 0)
3485 {
3486 thiscase->data.case_stmt.default_label
3487 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3488 expand_label (thiscase->data.case_stmt.default_label);
3489 }
3490 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3491
3492 before_case = get_last_insn ();
3493
3494 /* Simplify the case-list before we count it. */
3495 group_case_nodes (thiscase->data.case_stmt.case_list);
3496
3497 /* Get upper and lower bounds of case values.
3498 Also convert all the case values to the index expr's data type. */
3499
3500 count = 0;
3501 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3502 {
3503 /* Check low and high label values are integers. */
3504 if (TREE_CODE (n->low) != INTEGER_CST)
3505 abort ();
3506 if (TREE_CODE (n->high) != INTEGER_CST)
3507 abort ();
3508
3509 n->low = convert (TREE_TYPE (index_expr), n->low);
3510 n->high = convert (TREE_TYPE (index_expr), n->high);
3511
3512 /* Count the elements and track the largest and smallest
3513 of them (treating them as signed even if they are not). */
3514 if (count++ == 0)
3515 {
3516 minval = n->low;
3517 maxval = n->high;
3518 }
3519 else
3520 {
3521 if (INT_CST_LT (n->low, minval))
3522 minval = n->low;
3523 if (INT_CST_LT (maxval, n->high))
3524 maxval = n->high;
3525 }
3526 /* A range counts double, since it requires two compares. */
3527 if (! tree_int_cst_equal (n->low, n->high))
3528 count++;
3529 }
3530
3531 /* Compute span of values. */
3532 if (count != 0)
3533 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3534 maxval, minval));
3535
3536 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3537 {
3538 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3539 emit_queue ();
3540 emit_jump (default_label);
3541 }
3542 /* If range of values is much bigger than number of values,
3543 make a sequence of conditional branches instead of a dispatch.
3544 If the switch-index is a constant, do it this way
3545 because we can optimize it. */
3546 else if (TREE_INT_CST_HIGH (range) != 0
3547 #ifdef HAVE_casesi
3548 || (HAVE_casesi ? count < 4 : count < 5)
3549 #else
3550 /* If machine does not have a case insn that compares the
3551 bounds, this means extra overhead for dispatch tables
3552 which raises the threshold for using them. */
3553 || count < 5
3554 #endif
3555 || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
3556 || TREE_CODE (index_expr) == INTEGER_CST
3557 /* These will reduce to a constant. */
3558 || (TREE_CODE (index_expr) == CALL_EXPR
3559 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
3560 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
3561 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3562 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3563 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
3564 {
3565 index = expand_expr (index_expr, 0, VOIDmode, 0);
3566
3567 /* If the index is a short or char that we do not have
3568 an insn to handle comparisons directly, convert it to
3569 a full integer now, rather than letting each comparison
3570 generate the conversion. */
3571
3572 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3573 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3574 == CODE_FOR_nothing))
3575 {
3576 enum machine_mode wider_mode;
3577 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3578 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3579 if (cmp_optab->handlers[(int) wider_mode].insn_code
3580 != CODE_FOR_nothing)
3581 {
3582 index = convert_to_mode (wider_mode, index, unsignedp);
3583 break;
3584 }
3585 }
3586
3587 emit_queue ();
3588 do_pending_stack_adjust ();
3589
3590 index = protect_from_queue (index, 0);
3591 if (GET_CODE (index) == MEM)
3592 index = copy_to_reg (index);
3593 if (GET_CODE (index) == CONST_INT
3594 || TREE_CODE (index_expr) == INTEGER_CST)
3595 {
3596 /* Make a tree node with the proper constant value
3597 if we don't already have one. */
3598 if (TREE_CODE (index_expr) != INTEGER_CST)
3599 {
3600 index_expr
3601 = build_int_2 (INTVAL (index),
3602 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3603 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3604 }
3605
3606 /* For constant index expressions we need only
3607 issue a unconditional branch to the appropriate
3608 target code. The job of removing any unreachable
3609 code is left to the optimisation phase if the
3610 "-O" option is specified. */
3611 for (n = thiscase->data.case_stmt.case_list;
3612 n;
3613 n = n->right)
3614 {
3615 if (! tree_int_cst_lt (index_expr, n->low)
3616 && ! tree_int_cst_lt (n->high, index_expr))
3617 break;
3618 }
3619 if (n)
3620 emit_jump (label_rtx (n->code_label));
3621 else
3622 emit_jump (default_label);
3623 }
3624 else
3625 {
3626 /* If the index expression is not constant we generate
3627 a binary decision tree to select the appropriate
3628 target code. This is done as follows:
3629
3630 The list of cases is rearranged into a binary tree,
3631 nearly optimal assuming equal probability for each case.
3632
3633 The tree is transformed into RTL, eliminating
3634 redundant test conditions at the same time.
3635
3636 If program flow could reach the end of the
3637 decision tree an unconditional jump to the
3638 default code is emitted. */
3639
3640 use_cost_table
3641 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3642 && default_label != 0
3643 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3644 balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
3645 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3646 default_label, TREE_TYPE (index_expr));
3647 emit_jump_if_reachable (default_label);
3648 }
3649 }
3650 else
3651 {
3652 int win = 0;
3653 #ifdef HAVE_casesi
3654 if (HAVE_casesi)
3655 {
3656 enum machine_mode index_mode = SImode;
3657 int index_bits = GET_MODE_BITSIZE (index_mode);
3658
3659 /* Convert the index to SImode. */
3660 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3661 > GET_MODE_BITSIZE (index_mode))
3662 {
3663 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3664 index_expr, minval);
3665 minval = integer_zero_node;
3666 }
3667 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3668 index_expr = convert (type_for_size (index_bits, 0),
3669 index_expr);
3670 index = expand_expr (index_expr, 0, VOIDmode, 0);
3671 emit_queue ();
3672 index = protect_from_queue (index, 0);
3673 do_pending_stack_adjust ();
3674
3675 emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
3676 expand_expr (range, 0, VOIDmode, 0),
3677 table_label, default_label));
3678 win = 1;
3679 }
3680 #endif
3681 #ifdef HAVE_tablejump
3682 if (! win && HAVE_tablejump)
3683 {
3684 index_expr = convert (thiscase->data.case_stmt.nominal_type,
3685 fold (build (MINUS_EXPR,
3686 TREE_TYPE (index_expr),
3687 index_expr, minval)));
3688 index = expand_expr (index_expr, 0, VOIDmode, 0);
3689 emit_queue ();
3690 /* convert_to_mode calls protect_from_queue. */
3691 index = convert_to_mode (Pmode, index, 1);
3692 do_pending_stack_adjust ();
3693
3694 do_tablejump (index, Pmode,
3695 gen_rtx (CONST_INT, VOIDmode,
3696 TREE_INT_CST_LOW (range)),
3697 table_label, default_label);
3698 win = 1;
3699 }
3700 #endif
3701 if (! win)
3702 abort ();
3703
3704 /* Get table of labels to jump to, in order of case index. */
3705
3706 ncases = TREE_INT_CST_LOW (range) + 1;
3707 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3708 bzero (labelvec, ncases * sizeof (rtx));
3709
3710 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3711 {
3712 register int i
3713 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3714
3715 while (1)
3716 {
3717 labelvec[i]
3718 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3719 if (i + TREE_INT_CST_LOW (minval)
3720 == TREE_INT_CST_LOW (n->high))
3721 break;
3722 i++;
3723 }
3724 }
3725
3726 /* Fill in the gaps with the default. */
3727 for (i = 0; i < ncases; i++)
3728 if (labelvec[i] == 0)
3729 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3730
3731 /* Output the table */
3732 emit_label (table_label);
3733
3734 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3735 were an expression, instead of a an #ifdef/#ifndef. */
3736 if (
3737 #ifdef CASE_VECTOR_PC_RELATIVE
3738 1 ||
3739 #endif
3740 flag_pic)
3741 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3742 gen_rtx (LABEL_REF, Pmode, table_label),
3743 gen_rtvec_v (ncases, labelvec)));
3744 else
3745 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3746 gen_rtvec_v (ncases, labelvec)));
3747
3748 /* If the case insn drops through the table,
3749 after the table we must jump to the default-label.
3750 Otherwise record no drop-through after the table. */
3751 #ifdef CASE_DROPS_THROUGH
3752 emit_jump (default_label);
3753 #else
3754 emit_barrier ();
3755 #endif
3756 }
3757
3758 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3759 reorder_insns (before_case, get_last_insn (),
3760 thiscase->data.case_stmt.start);
3761 }
3762 if (thiscase->exit_label)
3763 emit_label (thiscase->exit_label);
3764
3765 POPSTACK (case_stack);
3766
3767 free_temp_slots ();
3768 }
3769
3770 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3771
3772 static void
3773 do_jump_if_equal (op1, op2, label, unsignedp)
3774 rtx op1, op2, label;
3775 int unsignedp;
3776 {
3777 if (GET_CODE (op1) == CONST_INT
3778 && GET_CODE (op2) == CONST_INT)
3779 {
3780 if (INTVAL (op1) == INTVAL (op2))
3781 emit_jump (label);
3782 }
3783 else
3784 {
3785 enum machine_mode mode = GET_MODE (op1);
3786 if (mode == VOIDmode)
3787 mode = GET_MODE (op2);
3788 emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
3789 emit_jump_insn (gen_beq (label));
3790 }
3791 }
3792 \f
3793 /* Not all case values are encountered equally. This function
3794 uses a heuristic to weight case labels, in cases where that
3795 looks like a reasonable thing to do.
3796
3797 Right now, all we try to guess is text, and we establish the
3798 following weights:
3799
3800 chars above space: 16
3801 digits: 16
3802 default: 12
3803 space, punct: 8
3804 tab: 4
3805 newline: 2
3806 other "\" chars: 1
3807 remaining chars: 0
3808
3809 If we find any cases in the switch that are not either -1 or in the range
3810 of valid ASCII characters, or are control characters other than those
3811 commonly used with "\", don't treat this switch scanning text.
3812
3813 Return 1 if these nodes are suitable for cost estimation, otherwise
3814 return 0. */
3815
3816 static int
3817 estimate_case_costs (node)
3818 case_node_ptr node;
3819 {
3820 tree min_ascii = build_int_2 (-1, -1);
3821 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3822 case_node_ptr n;
3823 int i;
3824
3825 /* If we haven't already made the cost table, make it now. Note that the
3826 lower bound of the table is -1, not zero. */
3827
3828 if (cost_table == NULL)
3829 {
3830 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
3831 bzero (cost_table - 1, 129 * sizeof (short));
3832
3833 for (i = 0; i < 128; i++)
3834 {
3835 if (isalnum (i))
3836 cost_table[i] = 16;
3837 else if (ispunct (i))
3838 cost_table[i] = 8;
3839 else if (iscntrl (i))
3840 cost_table[i] = -1;
3841 }
3842
3843 cost_table[' '] = 8;
3844 cost_table['\t'] = 4;
3845 cost_table['\0'] = 4;
3846 cost_table['\n'] = 2;
3847 cost_table['\f'] = 1;
3848 cost_table['\v'] = 1;
3849 cost_table['\b'] = 1;
3850 }
3851
3852 /* See if all the case expressions look like text. It is text if the
3853 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3854 as signed arithmetic since we don't want to ever access cost_table with a
3855 value less than -1. Also check that none of the constants in a range
3856 are strange control characters. */
3857
3858 for (n = node; n; n = n->right)
3859 {
3860 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3861 return 0;
3862
3863 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
3864 if (cost_table[i] < 0)
3865 return 0;
3866 }
3867
3868 /* All interesting values are within the range of interesting
3869 ASCII characters. */
3870 return 1;
3871 }
3872
3873 /* Scan an ordered list of case nodes
3874 combining those with consecutive values or ranges.
3875
3876 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3877
3878 static void
3879 group_case_nodes (head)
3880 case_node_ptr head;
3881 {
3882 case_node_ptr node = head;
3883
3884 while (node)
3885 {
3886 rtx lb = next_real_insn (label_rtx (node->code_label));
3887 case_node_ptr np = node;
3888
3889 /* Try to group the successors of NODE with NODE. */
3890 while (((np = np->right) != 0)
3891 /* Do they jump to the same place? */
3892 && next_real_insn (label_rtx (np->code_label)) == lb
3893 /* Are their ranges consecutive? */
3894 && tree_int_cst_equal (np->low,
3895 fold (build (PLUS_EXPR,
3896 TREE_TYPE (node->high),
3897 node->high,
3898 integer_one_node)))
3899 /* An overflow is not consecutive. */
3900 && tree_int_cst_lt (node->high,
3901 fold (build (PLUS_EXPR,
3902 TREE_TYPE (node->high),
3903 node->high,
3904 integer_one_node))))
3905 {
3906 node->high = np->high;
3907 }
3908 /* NP is the first node after NODE which can't be grouped with it.
3909 Delete the nodes in between, and move on to that node. */
3910 node->right = np;
3911 node = np;
3912 }
3913 }
3914
3915 /* Take an ordered list of case nodes
3916 and transform them into a near optimal binary tree,
3917 on the assumtion that any target code selection value is as
3918 likely as any other.
3919
3920 The transformation is performed by splitting the ordered
3921 list into two equal sections plus a pivot. The parts are
3922 then attached to the pivot as left and right branches. Each
3923 branch is is then transformed recursively. */
3924
3925 static void
3926 balance_case_nodes (head, parent)
3927 case_node_ptr *head;
3928 case_node_ptr parent;
3929 {
3930 register case_node_ptr np;
3931
3932 np = *head;
3933 if (np)
3934 {
3935 int cost = 0;
3936 int i = 0;
3937 int ranges = 0;
3938 register case_node_ptr *npp;
3939 case_node_ptr left;
3940
3941 /* Count the number of entries on branch. Also count the ranges. */
3942
3943 while (np)
3944 {
3945 if (!tree_int_cst_equal (np->low, np->high))
3946 {
3947 ranges++;
3948 if (use_cost_table)
3949 cost += cost_table[TREE_INT_CST_LOW (np->high)];
3950 }
3951
3952 if (use_cost_table)
3953 cost += cost_table[TREE_INT_CST_LOW (np->low)];
3954
3955 i++;
3956 np = np->right;
3957 }
3958
3959 if (i > 2)
3960 {
3961 /* Split this list if it is long enough for that to help. */
3962 npp = head;
3963 left = *npp;
3964 if (use_cost_table)
3965 {
3966 /* Find the place in the list that bisects the list's total cost,
3967 Here I gets half the total cost. */
3968 int n_moved = 0;
3969 i = (cost + 1) / 2;
3970 while (1)
3971 {
3972 /* Skip nodes while their cost does not reach that amount. */
3973 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3974 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
3975 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
3976 if (i <= 0)
3977 break;
3978 npp = &(*npp)->right;
3979 n_moved += 1;
3980 }
3981 if (n_moved == 0)
3982 {
3983 /* Leave this branch lopsided, but optimize left-hand
3984 side and fill in `parent' fields for right-hand side. */
3985 np = *head;
3986 np->parent = parent;
3987 balance_case_nodes (&np->left, np);
3988 for (; np->right; np = np->right)
3989 np->right->parent = np;
3990 return;
3991 }
3992 }
3993 /* If there are just three nodes, split at the middle one. */
3994 else if (i == 3)
3995 npp = &(*npp)->right;
3996 else
3997 {
3998 /* Find the place in the list that bisects the list's total cost,
3999 where ranges count as 2.
4000 Here I gets half the total cost. */
4001 i = (i + ranges + 1) / 2;
4002 while (1)
4003 {
4004 /* Skip nodes while their cost does not reach that amount. */
4005 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4006 i--;
4007 i--;
4008 if (i <= 0)
4009 break;
4010 npp = &(*npp)->right;
4011 }
4012 }
4013 *head = np = *npp;
4014 *npp = 0;
4015 np->parent = parent;
4016 np->left = left;
4017
4018 /* Optimize each of the two split parts. */
4019 balance_case_nodes (&np->left, np);
4020 balance_case_nodes (&np->right, np);
4021 }
4022 else
4023 {
4024 /* Else leave this branch as one level,
4025 but fill in `parent' fields. */
4026 np = *head;
4027 np->parent = parent;
4028 for (; np->right; np = np->right)
4029 np->right->parent = np;
4030 }
4031 }
4032 }
4033 \f
4034 /* Search the parent sections of the case node tree
4035 to see if a test for the lower bound of NODE would be redundant.
4036 INDEX_TYPE is the type of the index expression.
4037
4038 The instructions to generate the case decision tree are
4039 output in the same order as nodes are processed so it is
4040 known that if a parent node checks the range of the current
4041 node minus one that the current node is bounded at its lower
4042 span. Thus the test would be redundant. */
4043
4044 static int
4045 node_has_low_bound (node, index_type)
4046 case_node_ptr node;
4047 tree index_type;
4048 {
4049 tree low_minus_one;
4050 case_node_ptr pnode;
4051
4052 /* If the lower bound of this node is the lowest value in the index type,
4053 we need not test it. */
4054
4055 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4056 return 1;
4057
4058 /* If this node has a left branch, the value at the left must be less
4059 than that at this node, so it cannot be bounded at the bottom and
4060 we need not bother testing any further. */
4061
4062 if (node->left)
4063 return 0;
4064
4065 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4066 node->low, integer_one_node));
4067
4068 /* If the subtraction above overflowed, we can't verify anything.
4069 Otherwise, look for a parent that tests our value - 1. */
4070
4071 if (! tree_int_cst_lt (low_minus_one, node->low))
4072 return 0;
4073
4074 for (pnode = node->parent; pnode; pnode = pnode->parent)
4075 if (tree_int_cst_equal (low_minus_one, pnode->high))
4076 return 1;
4077
4078 return 0;
4079 }
4080
4081 /* Search the parent sections of the case node tree
4082 to see if a test for the upper bound of NODE would be redundant.
4083 INDEX_TYPE is the type of the index expression.
4084
4085 The instructions to generate the case decision tree are
4086 output in the same order as nodes are processed so it is
4087 known that if a parent node checks the range of the current
4088 node plus one that the current node is bounded at its upper
4089 span. Thus the test would be redundant. */
4090
4091 static int
4092 node_has_high_bound (node, index_type)
4093 case_node_ptr node;
4094 tree index_type;
4095 {
4096 tree high_plus_one;
4097 case_node_ptr pnode;
4098
4099 /* If the upper bound of this node is the highest value in the type
4100 of the index expression, we need not test against it. */
4101
4102 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4103 return 1;
4104
4105 /* If this node has a right branch, the value at the right must be greater
4106 than that at this node, so it cannot be bounded at the top and
4107 we need not bother testing any further. */
4108
4109 if (node->right)
4110 return 0;
4111
4112 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4113 node->high, integer_one_node));
4114
4115 /* If the addition above overflowed, we can't verify anything.
4116 Otherwise, look for a parent that tests our value + 1. */
4117
4118 if (! tree_int_cst_lt (node->high, high_plus_one))
4119 return 0;
4120
4121 for (pnode = node->parent; pnode; pnode = pnode->parent)
4122 if (tree_int_cst_equal (high_plus_one, pnode->low))
4123 return 1;
4124
4125 return 0;
4126 }
4127
4128 /* Search the parent sections of the
4129 case node tree to see if both tests for the upper and lower
4130 bounds of NODE would be redundant. */
4131
4132 static int
4133 node_is_bounded (node, index_type)
4134 case_node_ptr node;
4135 tree index_type;
4136 {
4137 return (node_has_low_bound (node, index_type)
4138 && node_has_high_bound (node, index_type));
4139 }
4140
4141 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4142
4143 static void
4144 emit_jump_if_reachable (label)
4145 rtx label;
4146 {
4147 if (GET_CODE (get_last_insn ()) != BARRIER)
4148 emit_jump (label);
4149 }
4150 \f
4151 /* Emit step-by-step code to select a case for the value of INDEX.
4152 The thus generated decision tree follows the form of the
4153 case-node binary tree NODE, whose nodes represent test conditions.
4154 INDEX_TYPE is the type of the index of the switch.
4155
4156 Care is taken to prune redundant tests from the decision tree
4157 by detecting any boundary conditions already checked by
4158 emitted rtx. (See node_has_high_bound, node_has_low_bound
4159 and node_is_bounded, above.)
4160
4161 Where the test conditions can be shown to be redundant we emit
4162 an unconditional jump to the target code. As a further
4163 optimization, the subordinates of a tree node are examined to
4164 check for bounded nodes. In this case conditional and/or
4165 unconditional jumps as a result of the boundary check for the
4166 current node are arranged to target the subordinates associated
4167 code for out of bound conditions on the current node node.
4168
4169 We can asume that when control reaches the code generated here,
4170 the index value has already been compared with the parents
4171 of this node, and determined to be on the same side of each parent
4172 as this node is. Thus, if this node tests for the value 51,
4173 and a parent tested for 52, we don't need to consider
4174 the possibility of a value greater than 51. If another parent
4175 tests for the value 50, then this node need not test anything. */
4176
4177 static void
4178 emit_case_nodes (index, node, default_label, index_type)
4179 rtx index;
4180 case_node_ptr node;
4181 rtx default_label;
4182 tree index_type;
4183 {
4184 /* If INDEX has an unsigned type, we must make unsigned branches. */
4185 int unsignedp = TREE_UNSIGNED (index_type);
4186 typedef rtx rtx_function ();
4187 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4188 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4189 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4190 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4191 enum machine_mode mode = GET_MODE (index);
4192
4193 /* See if our parents have already tested everything for us.
4194 If they have, emit an unconditional jump for this node. */
4195 if (node_is_bounded (node, index_type))
4196 emit_jump (label_rtx (node->code_label));
4197
4198 else if (tree_int_cst_equal (node->low, node->high))
4199 {
4200 /* Node is single valued. First see if the index expression matches
4201 this node and then check our children, if any. */
4202
4203 do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
4204 label_rtx (node->code_label), unsignedp);
4205
4206 if (node->right != 0 && node->left != 0)
4207 {
4208 /* This node has children on both sides.
4209 Dispatch to one side or the other
4210 by comparing the index value with this node's value.
4211 If one subtree is bounded, check that one first,
4212 so we can avoid real branches in the tree. */
4213
4214 if (node_is_bounded (node->right, index_type))
4215 {
4216 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4217 GT, 0, mode, unsignedp, 0);
4218
4219 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4220 emit_case_nodes (index, node->left, default_label, index_type);
4221 }
4222
4223 else if (node_is_bounded (node->left, index_type))
4224 {
4225 emit_cmp_insn (index, expand_expr (node->high, 0,
4226 VOIDmode, 0),
4227 LT, 0, mode, unsignedp, 0);
4228 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4229 emit_case_nodes (index, node->right, default_label, index_type);
4230 }
4231
4232 else
4233 {
4234 /* Neither node is bounded. First distinguish the two sides;
4235 then emit the code for one side at a time. */
4236
4237 tree test_label
4238 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4239
4240 /* See if the value is on the right. */
4241 emit_cmp_insn (index, expand_expr (node->high, 0,
4242 VOIDmode, 0),
4243 GT, 0, mode, unsignedp, 0);
4244 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4245
4246 /* Value must be on the left.
4247 Handle the left-hand subtree. */
4248 emit_case_nodes (index, node->left, default_label, index_type);
4249 /* If left-hand subtree does nothing,
4250 go to default. */
4251 emit_jump_if_reachable (default_label);
4252
4253 /* Code branches here for the right-hand subtree. */
4254 expand_label (test_label);
4255 emit_case_nodes (index, node->right, default_label, index_type);
4256 }
4257 }
4258
4259 else if (node->right != 0 && node->left == 0)
4260 {
4261 /* Here we have a right child but no left so we issue conditional
4262 branch to default and process the right child.
4263
4264 Omit the conditional branch to default if we it avoid only one
4265 right child; it costs too much space to save so little time. */
4266
4267 if (node->right->right || node->right->left
4268 || !tree_int_cst_equal (node->right->low, node->right->high))
4269 {
4270 if (!node_has_low_bound (node, index_type))
4271 {
4272 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4273 LT, 0, mode, unsignedp, 0);
4274 emit_jump_insn ((*gen_blt_pat) (default_label));
4275 }
4276
4277 emit_case_nodes (index, node->right, default_label, index_type);
4278 }
4279 else
4280 /* We cannot process node->right normally
4281 since we haven't ruled out the numbers less than
4282 this node's value. So handle node->right explicitly. */
4283 do_jump_if_equal (index,
4284 expand_expr (node->right->low, 0, VOIDmode, 0),
4285 label_rtx (node->right->code_label), unsignedp);
4286 }
4287
4288 else if (node->right == 0 && node->left != 0)
4289 {
4290 /* Just one subtree, on the left. */
4291
4292 #if 0 /* The following code and comment were formerly part
4293 of the condition here, but they didn't work
4294 and I don't understand what the idea was. -- rms. */
4295 /* If our "most probable entry" is less probable
4296 than the default label, emit a jump to
4297 the default label using condition codes
4298 already lying around. With no right branch,
4299 a branch-greater-than will get us to the default
4300 label correctly. */
4301 if (use_cost_table
4302 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4303 ;
4304 #endif /* 0 */
4305 if (node->left->left || node->left->right
4306 || !tree_int_cst_equal (node->left->low, node->left->high))
4307 {
4308 if (!node_has_high_bound (node, index_type))
4309 {
4310 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4311 GT, 0, mode, unsignedp, 0);
4312 emit_jump_insn ((*gen_bgt_pat) (default_label));
4313 }
4314
4315 emit_case_nodes (index, node->left, default_label, index_type);
4316 }
4317 else
4318 /* We cannot process node->left normally
4319 since we haven't ruled out the numbers less than
4320 this node's value. So handle node->left explicitly. */
4321 do_jump_if_equal (index,
4322 expand_expr (node->left->low, 0, VOIDmode, 0),
4323 label_rtx (node->left->code_label), unsignedp);
4324 }
4325 }
4326 else
4327 {
4328 /* Node is a range. These cases are very similar to those for a single
4329 value, except that we do not start by testing whether this node
4330 is the one to branch to. */
4331
4332 if (node->right != 0 && node->left != 0)
4333 {
4334 /* Node has subtrees on both sides.
4335 If the right-hand subtree is bounded,
4336 test for it first, since we can go straight there.
4337 Otherwise, we need to make a branch in the control structure,
4338 then handle the two subtrees. */
4339 tree test_label = 0;
4340
4341 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4342 GT, 0, mode, unsignedp, 0);
4343
4344 if (node_is_bounded (node->right, index_type))
4345 /* Right hand node is fully bounded so we can eliminate any
4346 testing and branch directly to the target code. */
4347 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4348 else
4349 {
4350 /* Right hand node requires testing.
4351 Branch to a label where we will handle it later. */
4352
4353 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4354 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4355 }
4356
4357 /* Value belongs to this node or to the left-hand subtree. */
4358
4359 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4360 GE, 0, mode, unsignedp, 0);
4361 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4362
4363 /* Handle the left-hand subtree. */
4364 emit_case_nodes (index, node->left, default_label, index_type);
4365
4366 /* If right node had to be handled later, do that now. */
4367
4368 if (test_label)
4369 {
4370 /* If the left-hand subtree fell through,
4371 don't let it fall into the right-hand subtree. */
4372 emit_jump_if_reachable (default_label);
4373
4374 expand_label (test_label);
4375 emit_case_nodes (index, node->right, default_label, index_type);
4376 }
4377 }
4378
4379 else if (node->right != 0 && node->left == 0)
4380 {
4381 /* Deal with values to the left of this node,
4382 if they are possible. */
4383 if (!node_has_low_bound (node, index_type))
4384 {
4385 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4386 LT, 0, mode, unsignedp, 0);
4387 emit_jump_insn ((*gen_blt_pat) (default_label));
4388 }
4389
4390 /* Value belongs to this node or to the right-hand subtree. */
4391
4392 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4393 LE, 0, mode, unsignedp, 0);
4394 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4395
4396 emit_case_nodes (index, node->right, default_label, index_type);
4397 }
4398
4399 else if (node->right == 0 && node->left != 0)
4400 {
4401 /* Deal with values to the right of this node,
4402 if they are possible. */
4403 if (!node_has_high_bound (node, index_type))
4404 {
4405 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4406 GT, 0, mode, unsignedp, 0);
4407 emit_jump_insn ((*gen_bgt_pat) (default_label));
4408 }
4409
4410 /* Value belongs to this node or to the left-hand subtree. */
4411
4412 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4413 GE, 0, mode, unsignedp, 0);
4414 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4415
4416 emit_case_nodes (index, node->left, default_label, index_type);
4417 }
4418
4419 else
4420 {
4421 /* Node has no children so we check low and high bounds to remove
4422 redundant tests. Only one of the bounds can exist,
4423 since otherwise this node is bounded--a case tested already. */
4424
4425 if (!node_has_high_bound (node, index_type))
4426 {
4427 emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
4428 GT, 0, mode, unsignedp, 0);
4429 emit_jump_insn ((*gen_bgt_pat) (default_label));
4430 }
4431
4432 if (!node_has_low_bound (node, index_type))
4433 {
4434 emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
4435 LT, 0, mode, unsignedp, 0);
4436 emit_jump_insn ((*gen_blt_pat) (default_label));
4437 }
4438
4439 emit_jump (label_rtx (node->code_label));
4440 }
4441 }
4442 }
4443 \f
4444 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4445 so that the debugging info will be correct for the unrolled loop. */
4446
4447 /* Indexed by loop number, contains pointer to the first block in the loop,
4448 or zero if none. Only valid if doing loop unrolling and outputting debugger
4449 info. */
4450
4451 tree *loop_number_first_block;
4452
4453 /* Indexed by loop number, contains pointer to the last block in the loop,
4454 only valid if loop_number_first_block is nonzero. */
4455
4456 tree *loop_number_last_block;
4457
4458 /* Indexed by loop number, contains nesting level of first block in the
4459 loop, if any. Only valid if doing loop unrolling and outputting debugger
4460 info. */
4461
4462 int *loop_number_block_level;
4463
4464 /* Scan the function looking for loops, and walk the BLOCK tree at the
4465 same time. Record the first and last BLOCK tree corresponding to each
4466 loop. This function is similar to find_and_verify_loops in loop.c. */
4467
4468 void
4469 find_loop_tree_blocks (f)
4470 rtx f;
4471 {
4472 rtx insn;
4473 int current_loop = -1;
4474 int next_loop = -1;
4475 int loop;
4476 int block_level, tree_level;
4477 tree tree_block, parent_tree_block;
4478
4479 tree_block = DECL_INITIAL (current_function_decl);
4480 parent_tree_block = 0;
4481 block_level = 0;
4482 tree_level = -1;
4483
4484 /* Find boundaries of loops, and save the first and last BLOCK tree
4485 corresponding to each loop. */
4486
4487 for (insn = f; insn; insn = NEXT_INSN (insn))
4488 {
4489 if (GET_CODE (insn) == NOTE)
4490 switch (NOTE_LINE_NUMBER (insn))
4491 {
4492 case NOTE_INSN_LOOP_BEG:
4493 loop_number_block_level[++next_loop] = block_level;
4494 loop_number_first_block[next_loop] = 0;
4495 current_loop = next_loop;
4496 break;
4497
4498 case NOTE_INSN_LOOP_END:
4499 if (current_loop == -1)
4500 abort ();
4501
4502 current_loop = loop_outer_loop[current_loop];
4503 break;
4504
4505 case NOTE_INSN_BLOCK_BEG:
4506 if (tree_level < block_level)
4507 {
4508 /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
4509 we must now visit the subtree of the current block. */
4510 parent_tree_block = tree_block;
4511 tree_block = BLOCK_SUBBLOCKS (tree_block);
4512 tree_level++;
4513 }
4514 else if (tree_level > block_level)
4515 abort ();
4516
4517 /* Save this block tree here for all nested loops for which
4518 this is the topmost block. */
4519 for (loop = current_loop;
4520 loop != -1 && block_level == loop_number_block_level[loop];
4521 loop = loop_outer_loop[loop])
4522 {
4523 if (loop_number_first_block[loop] == 0)
4524 loop_number_first_block[loop] = tree_block;
4525 loop_number_last_block[loop] = tree_block;
4526 }
4527
4528 block_level++;
4529 break;
4530
4531 case NOTE_INSN_BLOCK_END:
4532 block_level--;
4533 if (tree_level > block_level)
4534 {
4535 /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
4536 we must now visit the parent of the current tree. */
4537 if (tree_block != 0 || parent_tree_block == 0)
4538 abort ();
4539 tree_block = parent_tree_block;
4540 parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
4541 tree_level--;
4542 }
4543 tree_block = BLOCK_CHAIN (tree_block);
4544 break;
4545 }
4546 }
4547 }
4548
4549 /* This routine will make COPIES-1 copies of all BLOCK trees that correspond
4550 to BLOCK_BEG notes inside the loop LOOP_NUMBER.
4551
4552 Note that we only copy the topmost level of tree nodes; they will share
4553 pointers to the same subblocks. */
4554
4555 void
4556 unroll_block_trees (loop_number, copies)
4557 int loop_number;
4558 int copies;
4559 {
4560 int i;
4561
4562 /* First check whether there are any blocks that need to be copied. */
4563 if (loop_number_first_block[loop_number])
4564 {
4565 tree first_block = loop_number_first_block[loop_number];
4566 tree last_block = loop_number_last_block[loop_number];
4567 tree last_block_created = 0;
4568
4569 for (i = 0; i < copies - 1; i++)
4570 {
4571 tree block = first_block;
4572 tree insert_after = last_block;
4573 tree copied_block;
4574
4575 /* Copy every block between first_block and last_block inclusive,
4576 inserting the new blocks after last_block. */
4577 do
4578 {
4579 tree new_block = make_node (BLOCK);
4580 BLOCK_VARS (new_block) = BLOCK_VARS (block);
4581 BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
4582 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
4583 BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
4584 TREE_USED (new_block) = TREE_USED (block);
4585
4586 /* Insert the new block after the insertion point, and move
4587 the insertion point to the new block. This ensures that
4588 the copies are inserted in the right order. */
4589 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
4590 BLOCK_CHAIN (insert_after) = new_block;
4591 insert_after = new_block;
4592
4593 copied_block = block;
4594 block = BLOCK_CHAIN (block);
4595 }
4596 while (copied_block != last_block);
4597
4598 /* Remember the last block created, so that we can update the
4599 info in the tables. */
4600 if (last_block_created == 0)
4601 last_block_created = insert_after;
4602 }
4603
4604 /* For all nested loops for which LAST_BLOCK was originally the last
4605 block, update the tables to indicate that LAST_BLOCK_CREATED is
4606 now the last block in the loop. */
4607 for (i = loop_number; last_block == loop_number_last_block[i];
4608 i = loop_outer_loop[i])
4609 loop_number_last_block[i] = last_block_created;
4610 }
4611 }
This page took 0.249588 seconds and 6 git commands to generate.