]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
cc0419d2a9717fac5391880f61f9633eb47ea462
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
59
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
64
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
69
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73 int expr_stmts_for_value;
74
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78 static tree last_expr_type;
79 static rtx last_expr_value;
80
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86 static rtx last_block_end_note;
87
88 /* Number of binding contours started so far in this function. */
89
90 int block_start_count;
91
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95 extern int current_function_returns_pcc_struct;
96
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101 extern rtx cleanup_label;
102
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107 extern rtx return_label;
108
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
113
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
117
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
120
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126 extern rtx arg_pointer_save_area;
127
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
130 \f
131 /* Functions and data structures for expanding case statements. */
132
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
136
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
140
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
146
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
154
155 struct case_node
156 {
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
164 };
165
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
168
169 /* These are used by estimate_case_costs and balance_case_nodes. */
170
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
174 \f
175 /* Stack of control and binding constructs we are currently inside.
176
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
183
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
188
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
192
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
199
200 struct nesting
201 {
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
207 {
208 /* For conds (if-then and if-then-else statements). */
209 struct
210 {
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
221 {
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
235 {
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
292 {
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
316 };
317
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
320
321 /* If any new stacks are added here, add them to POPSTACKS too. */
322
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
326
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
329
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
332
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
335
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
339
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
342
343 /* Allocate and return a new `struct nesting'. */
344
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
347
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
352
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
409 };
410
411 static struct goto_fixup *goto_fixup_chain;
412
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
415
416 struct label_chain
417 {
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
421 };
422
423
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
426
427
428 static int n_occurrences PROTO((int, char *));
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static int expand_fixup PROTO((tree, rtx, rtx));
431 static void expand_nl_handler_label PROTO((rtx, rtx));
432 static void expand_nl_goto_receiver PROTO((void));
433 static void expand_nl_goto_receivers PROTO((struct nesting *));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void expand_null_return_1 PROTO((rtx, int));
437 static void expand_value_return PROTO((rtx));
438 static int tail_recursion_args PROTO((tree, tree));
439 static void expand_cleanups PROTO((tree, tree, int, int));
440 static void check_seenlabel PROTO((void));
441 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
442 static int estimate_case_costs PROTO((case_node_ptr));
443 static void group_case_nodes PROTO((case_node_ptr));
444 static void balance_case_nodes PROTO((case_node_ptr *,
445 case_node_ptr));
446 static int node_has_low_bound PROTO((case_node_ptr, tree));
447 static int node_has_high_bound PROTO((case_node_ptr, tree));
448 static int node_is_bounded PROTO((case_node_ptr, tree));
449 static void emit_jump_if_reachable PROTO((rtx));
450 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
451 static int add_case_node PROTO((tree, tree, tree, tree *));
452 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
453 \f
454 void
455 using_eh_for_cleanups ()
456 {
457 using_eh_for_cleanups_p = 1;
458 }
459
460 void
461 init_stmt ()
462 {
463 gcc_obstack_init (&stmt_obstack);
464 init_eh ();
465 }
466
467 void
468 init_stmt_for_function ()
469 {
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
472 stack_block_stack = 0;
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
478
479 block_start_count = 0;
480
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
483
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
487
488 init_eh_for_function ();
489 }
490
491 void
492 save_stmt_status (p)
493 struct function *p;
494 {
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
509 save_eh_status (p);
510 }
511
512 void
513 restore_stmt_status (p)
514 struct function *p;
515 {
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
530 restore_eh_status (p);
531 }
532 \f
533 /* Emit a no-op instruction. */
534
535 void
536 emit_nop ()
537 {
538 rtx last_insn;
539
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
546 }
547 \f
548 /* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
550
551 rtx
552 label_rtx (label)
553 tree label;
554 {
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
557
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
560
561 return DECL_RTL (label) = gen_label_rtx ();
562 }
563
564 /* Add an unconditional jump to LABEL as the next sequential instruction. */
565
566 void
567 emit_jump (label)
568 rtx label;
569 {
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
573 }
574
575 /* Emit code to jump to the address
576 specified by the pointer expression EXP. */
577
578 void
579 expand_computed_goto (exp)
580 tree exp;
581 {
582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
583
584 #ifdef POINTERS_EXTEND_UNSIGNED
585 x = convert_memory_address (Pmode, x);
586 #endif
587
588 emit_queue ();
589 /* Be sure the function is executable. */
590 if (current_function_check_memory_usage)
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
593
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
596 }
597 \f
598 /* Handle goto statements and the labels that they can go to. */
599
600 /* Specify the location in the RTL code of a label LABEL,
601 which is a LABEL_DECL tree node.
602
603 This is used for the kind of label that the user can jump to with a
604 goto statement, and for alternatives of a switch or case statement.
605 RTL labels generated for loops and conditionals don't go through here;
606 they are generated directly at the RTL level, by other functions below.
607
608 Note that this has nothing to do with defining label *names*.
609 Languages vary in how they do that and what that even means. */
610
611 void
612 expand_label (label)
613 tree label;
614 {
615 struct label_chain *p;
616
617 do_pending_stack_adjust ();
618 emit_label (label_rtx (label));
619 if (DECL_NAME (label))
620 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
621
622 if (stack_block_stack != 0)
623 {
624 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
625 p->next = stack_block_stack->data.block.label_chain;
626 stack_block_stack->data.block.label_chain = p;
627 p->label = label;
628 }
629 }
630
631 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
632 from nested functions. */
633
634 void
635 declare_nonlocal_label (label)
636 tree label;
637 {
638 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
639
640 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
641 LABEL_PRESERVE_P (label_rtx (label)) = 1;
642 if (nonlocal_goto_handler_slots == 0)
643 {
644 emit_stack_save (SAVE_NONLOCAL,
645 &nonlocal_goto_stack_level,
646 PREV_INSN (tail_recursion_reentry));
647 }
648 nonlocal_goto_handler_slots
649 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
650 }
651
652 /* Generate RTL code for a `goto' statement with target label LABEL.
653 LABEL should be a LABEL_DECL tree node that was or will later be
654 defined with `expand_label'. */
655
656 void
657 expand_goto (label)
658 tree label;
659 {
660 tree context;
661
662 /* Check for a nonlocal goto to a containing function. */
663 context = decl_function_context (label);
664 if (context != 0 && context != current_function_decl)
665 {
666 struct function *p = find_function_data (context);
667 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
668 rtx temp, handler_slot;
669 tree link;
670
671 /* Find the corresponding handler slot for this label. */
672 handler_slot = p->nonlocal_goto_handler_slots;
673 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
674 link = TREE_CHAIN (link))
675 handler_slot = XEXP (handler_slot, 1);
676 handler_slot = XEXP (handler_slot, 0);
677
678 p->has_nonlocal_label = 1;
679 current_function_has_nonlocal_goto = 1;
680 LABEL_REF_NONLOCAL_P (label_ref) = 1;
681
682 /* Copy the rtl for the slots so that they won't be shared in
683 case the virtual stack vars register gets instantiated differently
684 in the parent than in the child. */
685
686 #if HAVE_nonlocal_goto
687 if (HAVE_nonlocal_goto)
688 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
689 copy_rtx (handler_slot),
690 copy_rtx (p->nonlocal_goto_stack_level),
691 label_ref));
692 else
693 #endif
694 {
695 rtx addr;
696
697 /* Restore frame pointer for containing function.
698 This sets the actual hard register used for the frame pointer
699 to the location of the function's incoming static chain info.
700 The non-local goto handler will then adjust it to contain the
701 proper value and reload the argument pointer, if needed. */
702 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
703
704 /* We have now loaded the frame pointer hardware register with
705 the address of that corresponds to the start of the virtual
706 stack vars. So replace virtual_stack_vars_rtx in all
707 addresses we use with stack_pointer_rtx. */
708
709 /* Get addr of containing function's current nonlocal goto handler,
710 which will do any cleanups and then jump to the label. */
711 addr = copy_rtx (handler_slot);
712 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
713 hard_frame_pointer_rtx));
714
715 /* Restore the stack pointer. Note this uses fp just restored. */
716 addr = p->nonlocal_goto_stack_level;
717 if (addr)
718 addr = replace_rtx (copy_rtx (addr),
719 virtual_stack_vars_rtx,
720 hard_frame_pointer_rtx);
721
722 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
723
724 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
725 really needed. */
726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
728 emit_indirect_jump (temp);
729 }
730 }
731 else
732 expand_goto_internal (label, label_rtx (label), NULL_RTX);
733 }
734
735 /* Generate RTL code for a `goto' statement with target label BODY.
736 LABEL should be a LABEL_REF.
737 LAST_INSN, if non-0, is the rtx we should consider as the last
738 insn emitted (for the purposes of cleaning up a return). */
739
740 static void
741 expand_goto_internal (body, label, last_insn)
742 tree body;
743 rtx label;
744 rtx last_insn;
745 {
746 struct nesting *block;
747 rtx stack_level = 0;
748
749 if (GET_CODE (label) != CODE_LABEL)
750 abort ();
751
752 /* If label has already been defined, we can tell now
753 whether and how we must alter the stack level. */
754
755 if (PREV_INSN (label) != 0)
756 {
757 /* Find the innermost pending block that contains the label.
758 (Check containment by comparing insn-uids.)
759 Then restore the outermost stack level within that block,
760 and do cleanups of all blocks contained in it. */
761 for (block = block_stack; block; block = block->next)
762 {
763 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
764 break;
765 if (block->data.block.stack_level != 0)
766 stack_level = block->data.block.stack_level;
767 /* Execute the cleanups for blocks we are exiting. */
768 if (block->data.block.cleanups != 0)
769 {
770 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
771 do_pending_stack_adjust ();
772 }
773 }
774
775 if (stack_level)
776 {
777 /* Ensure stack adjust isn't done by emit_jump, as this
778 would clobber the stack pointer. This one should be
779 deleted as dead by flow. */
780 clear_pending_stack_adjust ();
781 do_pending_stack_adjust ();
782 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
783 }
784
785 if (body != 0 && DECL_TOO_LATE (body))
786 error ("jump to `%s' invalidly jumps into binding contour",
787 IDENTIFIER_POINTER (DECL_NAME (body)));
788 }
789 /* Label not yet defined: may need to put this goto
790 on the fixup list. */
791 else if (! expand_fixup (body, label, last_insn))
792 {
793 /* No fixup needed. Record that the label is the target
794 of at least one goto that has no fixup. */
795 if (body != 0)
796 TREE_ADDRESSABLE (body) = 1;
797 }
798
799 emit_jump (label);
800 }
801 \f
802 /* Generate if necessary a fixup for a goto
803 whose target label in tree structure (if any) is TREE_LABEL
804 and whose target in rtl is RTL_LABEL.
805
806 If LAST_INSN is nonzero, we pretend that the jump appears
807 after insn LAST_INSN instead of at the current point in the insn stream.
808
809 The fixup will be used later to insert insns just before the goto.
810 Those insns will restore the stack level as appropriate for the
811 target label, and will (in the case of C++) also invoke any object
812 destructors which have to be invoked when we exit the scopes which
813 are exited by the goto.
814
815 Value is nonzero if a fixup is made. */
816
817 static int
818 expand_fixup (tree_label, rtl_label, last_insn)
819 tree tree_label;
820 rtx rtl_label;
821 rtx last_insn;
822 {
823 struct nesting *block, *end_block;
824
825 /* See if we can recognize which block the label will be output in.
826 This is possible in some very common cases.
827 If we succeed, set END_BLOCK to that block.
828 Otherwise, set it to 0. */
829
830 if (cond_stack
831 && (rtl_label == cond_stack->data.cond.endif_label
832 || rtl_label == cond_stack->data.cond.next_label))
833 end_block = cond_stack;
834 /* If we are in a loop, recognize certain labels which
835 are likely targets. This reduces the number of fixups
836 we need to create. */
837 else if (loop_stack
838 && (rtl_label == loop_stack->data.loop.start_label
839 || rtl_label == loop_stack->data.loop.end_label
840 || rtl_label == loop_stack->data.loop.continue_label))
841 end_block = loop_stack;
842 else
843 end_block = 0;
844
845 /* Now set END_BLOCK to the binding level to which we will return. */
846
847 if (end_block)
848 {
849 struct nesting *next_block = end_block->all;
850 block = block_stack;
851
852 /* First see if the END_BLOCK is inside the innermost binding level.
853 If so, then no cleanups or stack levels are relevant. */
854 while (next_block && next_block != block)
855 next_block = next_block->all;
856
857 if (next_block)
858 return 0;
859
860 /* Otherwise, set END_BLOCK to the innermost binding level
861 which is outside the relevant control-structure nesting. */
862 next_block = block_stack->next;
863 for (block = block_stack; block != end_block; block = block->all)
864 if (block == next_block)
865 next_block = next_block->next;
866 end_block = next_block;
867 }
868
869 /* Does any containing block have a stack level or cleanups?
870 If not, no fixup is needed, and that is the normal case
871 (the only case, for standard C). */
872 for (block = block_stack; block != end_block; block = block->next)
873 if (block->data.block.stack_level != 0
874 || block->data.block.cleanups != 0)
875 break;
876
877 if (block != end_block)
878 {
879 /* Ok, a fixup is needed. Add a fixup to the list of such. */
880 struct goto_fixup *fixup
881 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
882 /* In case an old stack level is restored, make sure that comes
883 after any pending stack adjust. */
884 /* ?? If the fixup isn't to come at the present position,
885 doing the stack adjust here isn't useful. Doing it with our
886 settings at that location isn't useful either. Let's hope
887 someone does it! */
888 if (last_insn == 0)
889 do_pending_stack_adjust ();
890 fixup->target = tree_label;
891 fixup->target_rtl = rtl_label;
892
893 /* Create a BLOCK node and a corresponding matched set of
894 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
895 this point. The notes will encapsulate any and all fixup
896 code which we might later insert at this point in the insn
897 stream. Also, the BLOCK node will be the parent (i.e. the
898 `SUPERBLOCK') of any other BLOCK nodes which we might create
899 later on when we are expanding the fixup code.
900
901 Note that optimization passes (including expand_end_loop)
902 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
903 as a placeholder. */
904
905 {
906 register rtx original_before_jump
907 = last_insn ? last_insn : get_last_insn ();
908 rtx start;
909
910 start_sequence ();
911 pushlevel (0);
912 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
913 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
914 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
915 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
916 end_sequence ();
917 emit_insns_after (start, original_before_jump);
918 }
919
920 fixup->block_start_count = block_start_count;
921 fixup->stack_level = 0;
922 fixup->cleanup_list_list
923 = ((block->data.block.outer_cleanups
924 || block->data.block.cleanups)
925 ? tree_cons (NULL_TREE, block->data.block.cleanups,
926 block->data.block.outer_cleanups)
927 : 0);
928 fixup->next = goto_fixup_chain;
929 goto_fixup_chain = fixup;
930 }
931
932 return block != 0;
933 }
934
935
936 \f
937 /* Expand any needed fixups in the outputmost binding level of the
938 function. FIRST_INSN is the first insn in the function. */
939
940 void
941 expand_fixups (first_insn)
942 rtx first_insn;
943 {
944 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
945 }
946
947 /* When exiting a binding contour, process all pending gotos requiring fixups.
948 THISBLOCK is the structure that describes the block being exited.
949 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
950 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
951 FIRST_INSN is the insn that began this contour.
952
953 Gotos that jump out of this contour must restore the
954 stack level and do the cleanups before actually jumping.
955
956 DONT_JUMP_IN nonzero means report error there is a jump into this
957 contour from before the beginning of the contour.
958 This is also done if STACK_LEVEL is nonzero. */
959
960 static void
961 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
962 struct nesting *thisblock;
963 rtx stack_level;
964 tree cleanup_list;
965 rtx first_insn;
966 int dont_jump_in;
967 {
968 register struct goto_fixup *f, *prev;
969
970 /* F is the fixup we are considering; PREV is the previous one. */
971 /* We run this loop in two passes so that cleanups of exited blocks
972 are run first, and blocks that are exited are marked so
973 afterwards. */
974
975 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
976 {
977 /* Test for a fixup that is inactive because it is already handled. */
978 if (f->before_jump == 0)
979 {
980 /* Delete inactive fixup from the chain, if that is easy to do. */
981 if (prev != 0)
982 prev->next = f->next;
983 }
984 /* Has this fixup's target label been defined?
985 If so, we can finalize it. */
986 else if (PREV_INSN (f->target_rtl) != 0)
987 {
988 register rtx cleanup_insns;
989
990 /* Get the first non-label after the label
991 this goto jumps to. If that's before this scope begins,
992 we don't have a jump into the scope. */
993 rtx after_label = f->target_rtl;
994 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
995 after_label = NEXT_INSN (after_label);
996
997 /* If this fixup jumped into this contour from before the beginning
998 of this contour, report an error. */
999 /* ??? Bug: this does not detect jumping in through intermediate
1000 blocks that have stack levels or cleanups.
1001 It detects only a problem with the innermost block
1002 around the label. */
1003 if (f->target != 0
1004 && (dont_jump_in || stack_level || cleanup_list)
1005 /* If AFTER_LABEL is 0, it means the jump goes to the end
1006 of the rtl, which means it jumps into this scope. */
1007 && (after_label == 0
1008 || INSN_UID (first_insn) < INSN_UID (after_label))
1009 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1010 && ! DECL_ERROR_ISSUED (f->target))
1011 {
1012 error_with_decl (f->target,
1013 "label `%s' used before containing binding contour");
1014 /* Prevent multiple errors for one label. */
1015 DECL_ERROR_ISSUED (f->target) = 1;
1016 }
1017
1018 /* We will expand the cleanups into a sequence of their own and
1019 then later on we will attach this new sequence to the insn
1020 stream just ahead of the actual jump insn. */
1021
1022 start_sequence ();
1023
1024 /* Temporarily restore the lexical context where we will
1025 logically be inserting the fixup code. We do this for the
1026 sake of getting the debugging information right. */
1027
1028 pushlevel (0);
1029 set_block (f->context);
1030
1031 /* Expand the cleanups for blocks this jump exits. */
1032 if (f->cleanup_list_list)
1033 {
1034 tree lists;
1035 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1036 /* Marked elements correspond to blocks that have been closed.
1037 Do their cleanups. */
1038 if (TREE_ADDRESSABLE (lists)
1039 && TREE_VALUE (lists) != 0)
1040 {
1041 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1042 /* Pop any pushes done in the cleanups,
1043 in case function is about to return. */
1044 do_pending_stack_adjust ();
1045 }
1046 }
1047
1048 /* Restore stack level for the biggest contour that this
1049 jump jumps out of. */
1050 if (f->stack_level)
1051 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1052
1053 /* Finish up the sequence containing the insns which implement the
1054 necessary cleanups, and then attach that whole sequence to the
1055 insn stream just ahead of the actual jump insn. Attaching it
1056 at that point insures that any cleanups which are in fact
1057 implicit C++ object destructions (which must be executed upon
1058 leaving the block) appear (to the debugger) to be taking place
1059 in an area of the generated code where the object(s) being
1060 destructed are still "in scope". */
1061
1062 cleanup_insns = get_insns ();
1063 poplevel (1, 0, 0);
1064
1065 end_sequence ();
1066 emit_insns_after (cleanup_insns, f->before_jump);
1067
1068
1069 f->before_jump = 0;
1070 }
1071 }
1072
1073 /* For any still-undefined labels, do the cleanups for this block now.
1074 We must do this now since items in the cleanup list may go out
1075 of scope when the block ends. */
1076 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1077 if (f->before_jump != 0
1078 && PREV_INSN (f->target_rtl) == 0
1079 /* Label has still not appeared. If we are exiting a block with
1080 a stack level to restore, that started before the fixup,
1081 mark this stack level as needing restoration
1082 when the fixup is later finalized. */
1083 && thisblock != 0
1084 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1085 means the label is undefined. That's erroneous, but possible. */
1086 && (thisblock->data.block.block_start_count
1087 <= f->block_start_count))
1088 {
1089 tree lists = f->cleanup_list_list;
1090 rtx cleanup_insns;
1091
1092 for (; lists; lists = TREE_CHAIN (lists))
1093 /* If the following elt. corresponds to our containing block
1094 then the elt. must be for this block. */
1095 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1096 {
1097 start_sequence ();
1098 pushlevel (0);
1099 set_block (f->context);
1100 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1101 do_pending_stack_adjust ();
1102 cleanup_insns = get_insns ();
1103 poplevel (1, 0, 0);
1104 end_sequence ();
1105 if (cleanup_insns != 0)
1106 f->before_jump
1107 = emit_insns_after (cleanup_insns, f->before_jump);
1108
1109 f->cleanup_list_list = TREE_CHAIN (lists);
1110 }
1111
1112 if (stack_level)
1113 f->stack_level = stack_level;
1114 }
1115 }
1116 \f
1117 /* Return the number of times character C occurs in string S. */
1118 static int
1119 n_occurrences (c, s)
1120 int c;
1121 char *s;
1122 {
1123 int n = 0;
1124 while (*s)
1125 n += (*s++ == c);
1126 return n;
1127 }
1128 \f
1129 /* Generate RTL for an asm statement (explicit assembler code).
1130 BODY is a STRING_CST node containing the assembler code text,
1131 or an ADDR_EXPR containing a STRING_CST. */
1132
1133 void
1134 expand_asm (body)
1135 tree body;
1136 {
1137 if (current_function_check_memory_usage)
1138 {
1139 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1140 return;
1141 }
1142
1143 if (TREE_CODE (body) == ADDR_EXPR)
1144 body = TREE_OPERAND (body, 0);
1145
1146 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1147 TREE_STRING_POINTER (body)));
1148 last_expr_type = 0;
1149 }
1150
1151 /* Generate RTL for an asm statement with arguments.
1152 STRING is the instruction template.
1153 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1154 Each output or input has an expression in the TREE_VALUE and
1155 a constraint-string in the TREE_PURPOSE.
1156 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1157 that is clobbered by this insn.
1158
1159 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1160 Some elements of OUTPUTS may be replaced with trees representing temporary
1161 values. The caller should copy those temporary values to the originally
1162 specified lvalues.
1163
1164 VOL nonzero means the insn is volatile; don't optimize it. */
1165
1166 void
1167 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1168 tree string, outputs, inputs, clobbers;
1169 int vol;
1170 char *filename;
1171 int line;
1172 {
1173 rtvec argvec, constraints;
1174 rtx body;
1175 int ninputs = list_length (inputs);
1176 int noutputs = list_length (outputs);
1177 int ninout = 0;
1178 int nclobbers;
1179 tree tail;
1180 register int i;
1181 /* Vector of RTX's of evaluated output operands. */
1182 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1183 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1184 enum machine_mode *inout_mode
1185 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1186 /* The insn we have emitted. */
1187 rtx insn;
1188
1189 /* An ASM with no outputs needs to be treated as volatile, for now. */
1190 if (noutputs == 0)
1191 vol = 1;
1192
1193 if (current_function_check_memory_usage)
1194 {
1195 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1196 return;
1197 }
1198
1199 /* Count the number of meaningful clobbered registers, ignoring what
1200 we would ignore later. */
1201 nclobbers = 0;
1202 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1203 {
1204 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1205 i = decode_reg_name (regname);
1206 if (i >= 0 || i == -4)
1207 ++nclobbers;
1208 else if (i == -2)
1209 error ("unknown register name `%s' in `asm'", regname);
1210 }
1211
1212 last_expr_type = 0;
1213
1214 /* Check that the number of alternatives is constant across all
1215 operands. */
1216 if (outputs || inputs)
1217 {
1218 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1219 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1220 tree next = inputs;
1221
1222 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1223 {
1224 error ("too many alternatives in `asm'");
1225 return;
1226 }
1227
1228 tmp = outputs;
1229 while (tmp)
1230 {
1231 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1232 if (n_occurrences (',', constraint) != nalternatives)
1233 {
1234 error ("operand constraints for `asm' differ in number of alternatives");
1235 return;
1236 }
1237 if (TREE_CHAIN (tmp))
1238 tmp = TREE_CHAIN (tmp);
1239 else
1240 tmp = next, next = 0;
1241 }
1242 }
1243
1244 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1245 {
1246 tree val = TREE_VALUE (tail);
1247 tree type = TREE_TYPE (val);
1248 char *constraint;
1249 char *p;
1250 int c_len;
1251 int j;
1252 int is_inout = 0;
1253 int allows_reg = 0;
1254
1255 /* If there's an erroneous arg, emit no insn. */
1256 if (TREE_TYPE (val) == error_mark_node)
1257 return;
1258
1259 /* Make sure constraint has `=' and does not have `+'. Also, see
1260 if it allows any register. Be liberal on the latter test, since
1261 the worst that happens if we get it wrong is we issue an error
1262 message. */
1263
1264 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1265 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1266
1267 /* Allow the `=' or `+' to not be at the beginning of the string,
1268 since it wasn't explicitly documented that way, and there is a
1269 large body of code that puts it last. Swap the character to
1270 the front, so as not to uglify any place else. */
1271 switch (c_len)
1272 {
1273 default:
1274 if ((p = strchr (constraint, '=')) != NULL)
1275 break;
1276 if ((p = strchr (constraint, '+')) != NULL)
1277 break;
1278 case 0:
1279 error ("output operand constraint lacks `='");
1280 return;
1281 }
1282
1283 if (p != constraint)
1284 {
1285 j = *p;
1286 bcopy (constraint, constraint+1, p-constraint);
1287 *constraint = j;
1288
1289 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1290 }
1291
1292 is_inout = constraint[0] == '+';
1293 /* Replace '+' with '='. */
1294 constraint[0] = '=';
1295 /* Make sure we can specify the matching operand. */
1296 if (is_inout && i > 9)
1297 {
1298 error ("output operand constraint %d contains `+'", i);
1299 return;
1300 }
1301
1302 for (j = 1; j < c_len; j++)
1303 switch (constraint[j])
1304 {
1305 case '+':
1306 case '=':
1307 error ("operand constraint contains '+' or '=' at illegal position.");
1308 return;
1309
1310 case '%':
1311 if (i + 1 == ninputs + noutputs)
1312 {
1313 error ("`%%' constraint used with last operand");
1314 return;
1315 }
1316 break;
1317
1318 case '?': case '!': case '*': case '&':
1319 case 'V': case 'm': case 'o': case '<': case '>':
1320 case 'E': case 'F': case 'G': case 'H': case 'X':
1321 case 's': case 'i': case 'n':
1322 case 'I': case 'J': case 'K': case 'L': case 'M':
1323 case 'N': case 'O': case 'P': case ',':
1324 #ifdef EXTRA_CONSTRAINT
1325 case 'Q': case 'R': case 'S': case 'T': case 'U':
1326 #endif
1327 break;
1328
1329 case '0': case '1': case '2': case '3': case '4':
1330 case '5': case '6': case '7': case '8': case '9':
1331 error ("matching constraint not valid in output operand");
1332 break;
1333
1334 case 'p': case 'g': case 'r':
1335 default:
1336 allows_reg = 1;
1337 break;
1338 }
1339
1340 /* If an output operand is not a decl or indirect ref and our constraint
1341 allows a register, make a temporary to act as an intermediate.
1342 Make the asm insn write into that, then our caller will copy it to
1343 the real output operand. Likewise for promoted variables. */
1344
1345 if (TREE_CODE (val) == INDIRECT_REF
1346 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1347 && ! (GET_CODE (DECL_RTL (val)) == REG
1348 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1349 || ! allows_reg
1350 || is_inout)
1351 {
1352 if (! allows_reg)
1353 mark_addressable (TREE_VALUE (tail));
1354
1355 output_rtx[i]
1356 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1357 EXPAND_MEMORY_USE_WO);
1358
1359 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1360 error ("output number %d not directly addressable", i);
1361 }
1362 else
1363 {
1364 output_rtx[i] = assign_temp (type, 0, 0, 0);
1365 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1366 }
1367
1368 if (is_inout)
1369 {
1370 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1371 inout_opnum[ninout++] = i;
1372 }
1373 }
1374
1375 ninputs += ninout;
1376 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1377 {
1378 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1379 return;
1380 }
1381
1382 /* Make vectors for the expression-rtx and constraint strings. */
1383
1384 argvec = rtvec_alloc (ninputs);
1385 constraints = rtvec_alloc (ninputs);
1386
1387 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1388 TREE_STRING_POINTER (string), "", 0, argvec,
1389 constraints, filename, line);
1390
1391 MEM_VOLATILE_P (body) = vol;
1392
1393 /* Eval the inputs and put them into ARGVEC.
1394 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1395
1396 i = 0;
1397 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1398 {
1399 int j;
1400 int allows_reg = 0, allows_mem = 0;
1401 char *constraint, *orig_constraint;
1402 int c_len;
1403 rtx op;
1404
1405 /* If there's an erroneous arg, emit no insn,
1406 because the ASM_INPUT would get VOIDmode
1407 and that could cause a crash in reload. */
1408 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1409 return;
1410
1411 /* ??? Can this happen, and does the error message make any sense? */
1412 if (TREE_PURPOSE (tail) == NULL_TREE)
1413 {
1414 error ("hard register `%s' listed as input operand to `asm'",
1415 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1416 return;
1417 }
1418
1419 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1420 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1421 orig_constraint = constraint;
1422
1423 /* Make sure constraint has neither `=', `+', nor '&'. */
1424
1425 for (j = 0; j < c_len; j++)
1426 switch (constraint[j])
1427 {
1428 case '+': case '=': case '&':
1429 if (constraint == orig_constraint)
1430 {
1431 error ("input operand constraint contains `%c'", constraint[j]);
1432 return;
1433 }
1434 break;
1435
1436 case '%':
1437 if (constraint == orig_constraint
1438 && i + 1 == ninputs - ninout)
1439 {
1440 error ("`%%' constraint used with last operand");
1441 return;
1442 }
1443 break;
1444
1445 case 'V': case 'm': case 'o':
1446 allows_mem = 1;
1447 break;
1448
1449 case '<': case '>':
1450 case '?': case '!': case '*':
1451 case 'E': case 'F': case 'G': case 'H': case 'X':
1452 case 's': case 'i': case 'n':
1453 case 'I': case 'J': case 'K': case 'L': case 'M':
1454 case 'N': case 'O': case 'P': case ',':
1455 #ifdef EXTRA_CONSTRAINT
1456 case 'Q': case 'R': case 'S': case 'T': case 'U':
1457 #endif
1458 break;
1459
1460 /* Whether or not a numeric constraint allows a register is
1461 decided by the matching constraint, and so there is no need
1462 to do anything special with them. We must handle them in
1463 the default case, so that we don't unnecessarily force
1464 operands to memory. */
1465 case '0': case '1': case '2': case '3': case '4':
1466 case '5': case '6': case '7': case '8': case '9':
1467 if (constraint[j] >= '0' + noutputs)
1468 {
1469 error
1470 ("matching constraint references invalid operand number");
1471 return;
1472 }
1473
1474 /* Try and find the real constraint for this dup. */
1475 if (j == 0 && c_len == 1)
1476 {
1477 tree o = outputs;
1478 for (j = constraint[j] - '0'; j > 0; --j)
1479 o = TREE_CHAIN (o);
1480
1481 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1482 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1483 j = 0;
1484 break;
1485 }
1486
1487 /* ... fall through ... */
1488
1489 case 'p': case 'r':
1490 default:
1491 allows_reg = 1;
1492 break;
1493
1494 case 'g':
1495 allows_reg = 1;
1496 allows_mem = 1;
1497 break;
1498 }
1499
1500 if (! allows_reg && allows_mem)
1501 mark_addressable (TREE_VALUE (tail));
1502
1503 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1504
1505 if (! asm_operand_ok (op, constraint))
1506 {
1507 if (allows_reg)
1508 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1509 else if (!allows_mem)
1510 warning ("asm operand %d probably doesn't match constraints", i);
1511 else if (CONSTANT_P (op))
1512 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1513 op);
1514 else if (GET_CODE (op) == REG
1515 || GET_CODE (op) == SUBREG
1516 || GET_CODE (op) == CONCAT)
1517 {
1518 tree type = TREE_TYPE (TREE_VALUE (tail));
1519 rtx memloc = assign_temp (type, 1, 1, 1);
1520
1521 emit_move_insn (memloc, op);
1522 op = memloc;
1523 }
1524 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1525 /* We won't recognize volatile memory as available a
1526 memory_operand at this point. Ignore it. */
1527 ;
1528 else if (queued_subexp_p (op))
1529 ;
1530 else
1531 /* ??? Leave this only until we have experience with what
1532 happens in combine and elsewhere when constraints are
1533 not satisfied. */
1534 warning ("asm operand %d probably doesn't match constraints", i);
1535 }
1536 XVECEXP (body, 3, i) = op;
1537
1538 XVECEXP (body, 4, i) /* constraints */
1539 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1540 orig_constraint);
1541 i++;
1542 }
1543
1544 /* Protect all the operands from the queue,
1545 now that they have all been evaluated. */
1546
1547 for (i = 0; i < ninputs - ninout; i++)
1548 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1549
1550 for (i = 0; i < noutputs; i++)
1551 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1552
1553 /* For in-out operands, copy output rtx to input rtx. */
1554 for (i = 0; i < ninout; i++)
1555 {
1556 static char match[9+1][2]
1557 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1558 int j = inout_opnum[i];
1559
1560 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1561 = output_rtx[j];
1562 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1563 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1564 }
1565
1566 /* Now, for each output, construct an rtx
1567 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1568 ARGVEC CONSTRAINTS))
1569 If there is more than one, put them inside a PARALLEL. */
1570
1571 if (noutputs == 1 && nclobbers == 0)
1572 {
1573 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1574 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1575 }
1576 else if (noutputs == 0 && nclobbers == 0)
1577 {
1578 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1579 insn = emit_insn (body);
1580 }
1581 else
1582 {
1583 rtx obody = body;
1584 int num = noutputs;
1585 if (num == 0) num = 1;
1586 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1587
1588 /* For each output operand, store a SET. */
1589
1590 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1591 {
1592 XVECEXP (body, 0, i)
1593 = gen_rtx_SET (VOIDmode,
1594 output_rtx[i],
1595 gen_rtx_ASM_OPERANDS (VOIDmode,
1596 TREE_STRING_POINTER (string),
1597 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1598 i, argvec, constraints,
1599 filename, line));
1600 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1601 }
1602
1603 /* If there are no outputs (but there are some clobbers)
1604 store the bare ASM_OPERANDS into the PARALLEL. */
1605
1606 if (i == 0)
1607 XVECEXP (body, 0, i++) = obody;
1608
1609 /* Store (clobber REG) for each clobbered register specified. */
1610
1611 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1612 {
1613 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1614 int j = decode_reg_name (regname);
1615
1616 if (j < 0)
1617 {
1618 if (j == -3) /* `cc', which is not a register */
1619 continue;
1620
1621 if (j == -4) /* `memory', don't cache memory across asm */
1622 {
1623 XVECEXP (body, 0, i++)
1624 = gen_rtx_CLOBBER (VOIDmode,
1625 gen_rtx_MEM (BLKmode,
1626 gen_rtx_SCRATCH (VOIDmode)));
1627 continue;
1628 }
1629
1630 /* Ignore unknown register, error already signaled. */
1631 continue;
1632 }
1633
1634 /* Use QImode since that's guaranteed to clobber just one reg. */
1635 XVECEXP (body, 0, i++)
1636 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1637 }
1638
1639 insn = emit_insn (body);
1640 }
1641
1642 free_temp_slots ();
1643 }
1644 \f
1645 /* Generate RTL to evaluate the expression EXP
1646 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1647
1648 void
1649 expand_expr_stmt (exp)
1650 tree exp;
1651 {
1652 /* If -W, warn about statements with no side effects,
1653 except for an explicit cast to void (e.g. for assert()), and
1654 except inside a ({...}) where they may be useful. */
1655 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1656 {
1657 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1658 && !(TREE_CODE (exp) == CONVERT_EXPR
1659 && TREE_TYPE (exp) == void_type_node))
1660 warning_with_file_and_line (emit_filename, emit_lineno,
1661 "statement with no effect");
1662 else if (warn_unused)
1663 warn_if_unused_value (exp);
1664 }
1665
1666 /* If EXP is of function type and we are expanding statements for
1667 value, convert it to pointer-to-function. */
1668 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1669 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1670
1671 last_expr_type = TREE_TYPE (exp);
1672 if (flag_syntax_only && ! expr_stmts_for_value)
1673 last_expr_value = 0;
1674 else
1675 last_expr_value = expand_expr (exp,
1676 (expr_stmts_for_value
1677 ? NULL_RTX : const0_rtx),
1678 VOIDmode, 0);
1679
1680 /* If all we do is reference a volatile value in memory,
1681 copy it to a register to be sure it is actually touched. */
1682 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1683 && TREE_THIS_VOLATILE (exp))
1684 {
1685 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1686 ;
1687 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1688 copy_to_reg (last_expr_value);
1689 else
1690 {
1691 rtx lab = gen_label_rtx ();
1692
1693 /* Compare the value with itself to reference it. */
1694 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1695 expand_expr (TYPE_SIZE (last_expr_type),
1696 NULL_RTX, VOIDmode, 0),
1697 BLKmode, 0,
1698 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1699 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1700 emit_label (lab);
1701 }
1702 }
1703
1704 /* If this expression is part of a ({...}) and is in memory, we may have
1705 to preserve temporaries. */
1706 preserve_temp_slots (last_expr_value);
1707
1708 /* Free any temporaries used to evaluate this expression. Any temporary
1709 used as a result of this expression will already have been preserved
1710 above. */
1711 free_temp_slots ();
1712
1713 emit_queue ();
1714 }
1715
1716 /* Warn if EXP contains any computations whose results are not used.
1717 Return 1 if a warning is printed; 0 otherwise. */
1718
1719 int
1720 warn_if_unused_value (exp)
1721 tree exp;
1722 {
1723 if (TREE_USED (exp))
1724 return 0;
1725
1726 switch (TREE_CODE (exp))
1727 {
1728 case PREINCREMENT_EXPR:
1729 case POSTINCREMENT_EXPR:
1730 case PREDECREMENT_EXPR:
1731 case POSTDECREMENT_EXPR:
1732 case MODIFY_EXPR:
1733 case INIT_EXPR:
1734 case TARGET_EXPR:
1735 case CALL_EXPR:
1736 case METHOD_CALL_EXPR:
1737 case RTL_EXPR:
1738 case TRY_CATCH_EXPR:
1739 case WITH_CLEANUP_EXPR:
1740 case EXIT_EXPR:
1741 /* We don't warn about COND_EXPR because it may be a useful
1742 construct if either arm contains a side effect. */
1743 case COND_EXPR:
1744 return 0;
1745
1746 case BIND_EXPR:
1747 /* For a binding, warn if no side effect within it. */
1748 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1749
1750 case SAVE_EXPR:
1751 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1752
1753 case TRUTH_ORIF_EXPR:
1754 case TRUTH_ANDIF_EXPR:
1755 /* In && or ||, warn if 2nd operand has no side effect. */
1756 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1757
1758 case COMPOUND_EXPR:
1759 if (TREE_NO_UNUSED_WARNING (exp))
1760 return 0;
1761 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1762 return 1;
1763 /* Let people do `(foo (), 0)' without a warning. */
1764 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1765 return 0;
1766 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1767
1768 case NOP_EXPR:
1769 case CONVERT_EXPR:
1770 case NON_LVALUE_EXPR:
1771 /* Don't warn about values cast to void. */
1772 if (TREE_TYPE (exp) == void_type_node)
1773 return 0;
1774 /* Don't warn about conversions not explicit in the user's program. */
1775 if (TREE_NO_UNUSED_WARNING (exp))
1776 return 0;
1777 /* Assignment to a cast usually results in a cast of a modify.
1778 Don't complain about that. There can be an arbitrary number of
1779 casts before the modify, so we must loop until we find the first
1780 non-cast expression and then test to see if that is a modify. */
1781 {
1782 tree tem = TREE_OPERAND (exp, 0);
1783
1784 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1785 tem = TREE_OPERAND (tem, 0);
1786
1787 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1788 || TREE_CODE (tem) == CALL_EXPR)
1789 return 0;
1790 }
1791 goto warn;
1792
1793 case INDIRECT_REF:
1794 /* Don't warn about automatic dereferencing of references, since
1795 the user cannot control it. */
1796 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1797 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1798 /* ... fall through ... */
1799
1800 default:
1801 /* Referencing a volatile value is a side effect, so don't warn. */
1802 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1803 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1804 && TREE_THIS_VOLATILE (exp))
1805 return 0;
1806 warn:
1807 warning_with_file_and_line (emit_filename, emit_lineno,
1808 "value computed is not used");
1809 return 1;
1810 }
1811 }
1812
1813 /* Clear out the memory of the last expression evaluated. */
1814
1815 void
1816 clear_last_expr ()
1817 {
1818 last_expr_type = 0;
1819 }
1820
1821 /* Begin a statement which will return a value.
1822 Return the RTL_EXPR for this statement expr.
1823 The caller must save that value and pass it to expand_end_stmt_expr. */
1824
1825 tree
1826 expand_start_stmt_expr ()
1827 {
1828 int momentary;
1829 tree t;
1830
1831 /* Make the RTL_EXPR node temporary, not momentary,
1832 so that rtl_expr_chain doesn't become garbage. */
1833 momentary = suspend_momentary ();
1834 t = make_node (RTL_EXPR);
1835 resume_momentary (momentary);
1836 do_pending_stack_adjust ();
1837 start_sequence_for_rtl_expr (t);
1838 NO_DEFER_POP;
1839 expr_stmts_for_value++;
1840 return t;
1841 }
1842
1843 /* Restore the previous state at the end of a statement that returns a value.
1844 Returns a tree node representing the statement's value and the
1845 insns to compute the value.
1846
1847 The nodes of that expression have been freed by now, so we cannot use them.
1848 But we don't want to do that anyway; the expression has already been
1849 evaluated and now we just want to use the value. So generate a RTL_EXPR
1850 with the proper type and RTL value.
1851
1852 If the last substatement was not an expression,
1853 return something with type `void'. */
1854
1855 tree
1856 expand_end_stmt_expr (t)
1857 tree t;
1858 {
1859 OK_DEFER_POP;
1860
1861 if (last_expr_type == 0)
1862 {
1863 last_expr_type = void_type_node;
1864 last_expr_value = const0_rtx;
1865 }
1866 else if (last_expr_value == 0)
1867 /* There are some cases where this can happen, such as when the
1868 statement is void type. */
1869 last_expr_value = const0_rtx;
1870 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1871 /* Remove any possible QUEUED. */
1872 last_expr_value = protect_from_queue (last_expr_value, 0);
1873
1874 emit_queue ();
1875
1876 TREE_TYPE (t) = last_expr_type;
1877 RTL_EXPR_RTL (t) = last_expr_value;
1878 RTL_EXPR_SEQUENCE (t) = get_insns ();
1879
1880 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1881
1882 end_sequence ();
1883
1884 /* Don't consider deleting this expr or containing exprs at tree level. */
1885 TREE_SIDE_EFFECTS (t) = 1;
1886 /* Propagate volatility of the actual RTL expr. */
1887 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1888
1889 last_expr_type = 0;
1890 expr_stmts_for_value--;
1891
1892 return t;
1893 }
1894 \f
1895 /* Generate RTL for the start of an if-then. COND is the expression
1896 whose truth should be tested.
1897
1898 If EXITFLAG is nonzero, this conditional is visible to
1899 `exit_something'. */
1900
1901 void
1902 expand_start_cond (cond, exitflag)
1903 tree cond;
1904 int exitflag;
1905 {
1906 struct nesting *thiscond = ALLOC_NESTING ();
1907
1908 /* Make an entry on cond_stack for the cond we are entering. */
1909
1910 thiscond->next = cond_stack;
1911 thiscond->all = nesting_stack;
1912 thiscond->depth = ++nesting_depth;
1913 thiscond->data.cond.next_label = gen_label_rtx ();
1914 /* Before we encounter an `else', we don't need a separate exit label
1915 unless there are supposed to be exit statements
1916 to exit this conditional. */
1917 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1918 thiscond->data.cond.endif_label = thiscond->exit_label;
1919 cond_stack = thiscond;
1920 nesting_stack = thiscond;
1921
1922 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1923 }
1924
1925 /* Generate RTL between then-clause and the elseif-clause
1926 of an if-then-elseif-.... */
1927
1928 void
1929 expand_start_elseif (cond)
1930 tree cond;
1931 {
1932 if (cond_stack->data.cond.endif_label == 0)
1933 cond_stack->data.cond.endif_label = gen_label_rtx ();
1934 emit_jump (cond_stack->data.cond.endif_label);
1935 emit_label (cond_stack->data.cond.next_label);
1936 cond_stack->data.cond.next_label = gen_label_rtx ();
1937 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1938 }
1939
1940 /* Generate RTL between the then-clause and the else-clause
1941 of an if-then-else. */
1942
1943 void
1944 expand_start_else ()
1945 {
1946 if (cond_stack->data.cond.endif_label == 0)
1947 cond_stack->data.cond.endif_label = gen_label_rtx ();
1948
1949 emit_jump (cond_stack->data.cond.endif_label);
1950 emit_label (cond_stack->data.cond.next_label);
1951 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1952 }
1953
1954 /* After calling expand_start_else, turn this "else" into an "else if"
1955 by providing another condition. */
1956
1957 void
1958 expand_elseif (cond)
1959 tree cond;
1960 {
1961 cond_stack->data.cond.next_label = gen_label_rtx ();
1962 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1963 }
1964
1965 /* Generate RTL for the end of an if-then.
1966 Pop the record for it off of cond_stack. */
1967
1968 void
1969 expand_end_cond ()
1970 {
1971 struct nesting *thiscond = cond_stack;
1972
1973 do_pending_stack_adjust ();
1974 if (thiscond->data.cond.next_label)
1975 emit_label (thiscond->data.cond.next_label);
1976 if (thiscond->data.cond.endif_label)
1977 emit_label (thiscond->data.cond.endif_label);
1978
1979 POPSTACK (cond_stack);
1980 last_expr_type = 0;
1981 }
1982
1983
1984 \f
1985 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1986 loop should be exited by `exit_something'. This is a loop for which
1987 `expand_continue' will jump to the top of the loop.
1988
1989 Make an entry on loop_stack to record the labels associated with
1990 this loop. */
1991
1992 struct nesting *
1993 expand_start_loop (exit_flag)
1994 int exit_flag;
1995 {
1996 register struct nesting *thisloop = ALLOC_NESTING ();
1997
1998 /* Make an entry on loop_stack for the loop we are entering. */
1999
2000 thisloop->next = loop_stack;
2001 thisloop->all = nesting_stack;
2002 thisloop->depth = ++nesting_depth;
2003 thisloop->data.loop.start_label = gen_label_rtx ();
2004 thisloop->data.loop.end_label = gen_label_rtx ();
2005 thisloop->data.loop.alt_end_label = 0;
2006 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2007 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2008 loop_stack = thisloop;
2009 nesting_stack = thisloop;
2010
2011 do_pending_stack_adjust ();
2012 emit_queue ();
2013 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2014 emit_label (thisloop->data.loop.start_label);
2015
2016 return thisloop;
2017 }
2018
2019 /* Like expand_start_loop but for a loop where the continuation point
2020 (for expand_continue_loop) will be specified explicitly. */
2021
2022 struct nesting *
2023 expand_start_loop_continue_elsewhere (exit_flag)
2024 int exit_flag;
2025 {
2026 struct nesting *thisloop = expand_start_loop (exit_flag);
2027 loop_stack->data.loop.continue_label = gen_label_rtx ();
2028 return thisloop;
2029 }
2030
2031 /* Specify the continuation point for a loop started with
2032 expand_start_loop_continue_elsewhere.
2033 Use this at the point in the code to which a continue statement
2034 should jump. */
2035
2036 void
2037 expand_loop_continue_here ()
2038 {
2039 do_pending_stack_adjust ();
2040 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2041 emit_label (loop_stack->data.loop.continue_label);
2042 }
2043
2044 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2045 Pop the block off of loop_stack. */
2046
2047 void
2048 expand_end_loop ()
2049 {
2050 rtx start_label = loop_stack->data.loop.start_label;
2051 rtx insn = get_last_insn ();
2052
2053 /* Mark the continue-point at the top of the loop if none elsewhere. */
2054 if (start_label == loop_stack->data.loop.continue_label)
2055 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2056
2057 do_pending_stack_adjust ();
2058
2059 /* If optimizing, perhaps reorder the loop. If the loop starts with
2060 a loop exit, roll that to the end where it will optimize together
2061 with the jump back.
2062
2063 We look for the conditional branch to the exit, except that once
2064 we find such a branch, we don't look past 30 instructions.
2065
2066 In more detail, if the loop presently looks like this (in pseudo-C):
2067
2068 start_label:
2069 if (test) goto end_label;
2070 body;
2071 goto start_label;
2072 end_label:
2073
2074 transform it to look like:
2075
2076 goto start_label;
2077 newstart_label:
2078 body;
2079 start_label:
2080 if (test) goto end_label;
2081 goto newstart_label;
2082 end_label:
2083
2084 Here, the `test' may actually consist of some reasonably complex
2085 code, terminating in a test. */
2086
2087 if (optimize
2088 &&
2089 ! (GET_CODE (insn) == JUMP_INSN
2090 && GET_CODE (PATTERN (insn)) == SET
2091 && SET_DEST (PATTERN (insn)) == pc_rtx
2092 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2093 {
2094 int eh_regions = 0;
2095 int num_insns = 0;
2096 rtx last_test_insn = NULL_RTX;
2097
2098 /* Scan insns from the top of the loop looking for a qualified
2099 conditional exit. */
2100 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2101 insn = NEXT_INSN (insn))
2102 {
2103 if (GET_CODE (insn) == NOTE)
2104 {
2105 if (optimize < 2
2106 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2107 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2108 /* The code that actually moves the exit test will
2109 carefully leave BLOCK notes in their original
2110 location. That means, however, that we can't debug
2111 the exit test itself. So, we refuse to move code
2112 containing BLOCK notes at low optimization levels. */
2113 break;
2114
2115 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2116 ++eh_regions;
2117 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2118 {
2119 --eh_regions;
2120 if (eh_regions < 0)
2121 /* We've come to the end of an EH region, but
2122 never saw the beginning of that region. That
2123 means that an EH region begins before the top
2124 of the loop, and ends in the middle of it. The
2125 existence of such a situation violates a basic
2126 assumption in this code, since that would imply
2127 that even when EH_REGIONS is zero, we might
2128 move code out of an exception region. */
2129 abort ();
2130 }
2131
2132 /* We already know this INSN is a NOTE, so there's no
2133 point in looking at it to see if it's a JUMP. */
2134 continue;
2135 }
2136
2137 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2138 num_insns++;
2139
2140 if (last_test_insn && num_insns > 30)
2141 break;
2142
2143 if (eh_regions > 0)
2144 /* We don't want to move a partial EH region. Consider:
2145
2146 while ( ( { try {
2147 if (cond ()) 0;
2148 else {
2149 bar();
2150 1;
2151 }
2152 } catch (...) {
2153 1;
2154 } )) {
2155 body;
2156 }
2157
2158 This isn't legal C++, but here's what it's supposed to
2159 mean: if cond() is true, stop looping. Otherwise,
2160 call bar, and keep looping. In addition, if cond
2161 throws an exception, catch it and keep looping. Such
2162 constructs are certainy legal in LISP.
2163
2164 We should not move the `if (cond()) 0' test since then
2165 the EH-region for the try-block would be broken up.
2166 (In this case we would the EH_BEG note for the `try'
2167 and `if cond()' but not the call to bar() or the
2168 EH_END note.)
2169
2170 So we don't look for tests within an EH region. */
2171 continue;
2172
2173 if (GET_CODE (insn) == JUMP_INSN
2174 && GET_CODE (PATTERN (insn)) == SET
2175 && SET_DEST (PATTERN (insn)) == pc_rtx)
2176 {
2177 /* This is indeed a jump. */
2178 rtx dest1 = NULL_RTX;
2179 rtx dest2 = NULL_RTX;
2180 rtx potential_last_test;
2181 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2182 {
2183 /* A conditional jump. */
2184 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2185 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2186 potential_last_test = insn;
2187 }
2188 else
2189 {
2190 /* An unconditional jump. */
2191 dest1 = SET_SRC (PATTERN (insn));
2192 /* Include the BARRIER after the JUMP. */
2193 potential_last_test = NEXT_INSN (insn);
2194 }
2195
2196 do {
2197 if (dest1 && GET_CODE (dest1) == LABEL_REF
2198 && ((XEXP (dest1, 0)
2199 == loop_stack->data.loop.alt_end_label)
2200 || (XEXP (dest1, 0)
2201 == loop_stack->data.loop.end_label)))
2202 {
2203 last_test_insn = potential_last_test;
2204 break;
2205 }
2206
2207 /* If this was a conditional jump, there may be
2208 another label at which we should look. */
2209 dest1 = dest2;
2210 dest2 = NULL_RTX;
2211 } while (dest1);
2212 }
2213 }
2214
2215 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2216 {
2217 /* We found one. Move everything from there up
2218 to the end of the loop, and add a jump into the loop
2219 to jump to there. */
2220 register rtx newstart_label = gen_label_rtx ();
2221 register rtx start_move = start_label;
2222 rtx next_insn;
2223
2224 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2225 then we want to move this note also. */
2226 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2227 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2228 == NOTE_INSN_LOOP_CONT))
2229 start_move = PREV_INSN (start_move);
2230
2231 emit_label_after (newstart_label, PREV_INSN (start_move));
2232
2233 /* Actually move the insns. Start at the beginning, and
2234 keep copying insns until we've copied the
2235 last_test_insn. */
2236 for (insn = start_move; insn; insn = next_insn)
2237 {
2238 /* Figure out which insn comes after this one. We have
2239 to do this before we move INSN. */
2240 if (insn == last_test_insn)
2241 /* We've moved all the insns. */
2242 next_insn = NULL_RTX;
2243 else
2244 next_insn = NEXT_INSN (insn);
2245
2246 if (GET_CODE (insn) == NOTE
2247 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2248 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2249 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2250 NOTE_INSN_BLOCK_ENDs because the correct generation
2251 of debugging information depends on these appearing
2252 in the same order in the RTL and in the tree
2253 structure, where they are represented as BLOCKs.
2254 So, we don't move block notes. Of course, moving
2255 the code inside the block is likely to make it
2256 impossible to debug the instructions in the exit
2257 test, but such is the price of optimization. */
2258 continue;
2259
2260 /* Move the INSN. */
2261 reorder_insns (insn, insn, get_last_insn ());
2262 }
2263
2264 emit_jump_insn_after (gen_jump (start_label),
2265 PREV_INSN (newstart_label));
2266 emit_barrier_after (PREV_INSN (newstart_label));
2267 start_label = newstart_label;
2268 }
2269 }
2270
2271 emit_jump (start_label);
2272 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2273 emit_label (loop_stack->data.loop.end_label);
2274
2275 POPSTACK (loop_stack);
2276
2277 last_expr_type = 0;
2278 }
2279
2280 /* Generate a jump to the current loop's continue-point.
2281 This is usually the top of the loop, but may be specified
2282 explicitly elsewhere. If not currently inside a loop,
2283 return 0 and do nothing; caller will print an error message. */
2284
2285 int
2286 expand_continue_loop (whichloop)
2287 struct nesting *whichloop;
2288 {
2289 last_expr_type = 0;
2290 if (whichloop == 0)
2291 whichloop = loop_stack;
2292 if (whichloop == 0)
2293 return 0;
2294 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2295 NULL_RTX);
2296 return 1;
2297 }
2298
2299 /* Generate a jump to exit the current loop. If not currently inside a loop,
2300 return 0 and do nothing; caller will print an error message. */
2301
2302 int
2303 expand_exit_loop (whichloop)
2304 struct nesting *whichloop;
2305 {
2306 last_expr_type = 0;
2307 if (whichloop == 0)
2308 whichloop = loop_stack;
2309 if (whichloop == 0)
2310 return 0;
2311 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2312 return 1;
2313 }
2314
2315 /* Generate a conditional jump to exit the current loop if COND
2316 evaluates to zero. If not currently inside a loop,
2317 return 0 and do nothing; caller will print an error message. */
2318
2319 int
2320 expand_exit_loop_if_false (whichloop, cond)
2321 struct nesting *whichloop;
2322 tree cond;
2323 {
2324 rtx label = gen_label_rtx ();
2325 rtx last_insn;
2326 last_expr_type = 0;
2327
2328 if (whichloop == 0)
2329 whichloop = loop_stack;
2330 if (whichloop == 0)
2331 return 0;
2332 /* In order to handle fixups, we actually create a conditional jump
2333 around a unconditional branch to exit the loop. If fixups are
2334 necessary, they go before the unconditional branch. */
2335
2336
2337 do_jump (cond, NULL_RTX, label);
2338 last_insn = get_last_insn ();
2339 if (GET_CODE (last_insn) == CODE_LABEL)
2340 whichloop->data.loop.alt_end_label = last_insn;
2341 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2342 NULL_RTX);
2343 emit_label (label);
2344
2345 return 1;
2346 }
2347
2348 /* Return nonzero if the loop nest is empty. Else return zero. */
2349
2350 int
2351 stmt_loop_nest_empty ()
2352 {
2353 return (loop_stack == NULL);
2354 }
2355
2356 /* Return non-zero if we should preserve sub-expressions as separate
2357 pseudos. We never do so if we aren't optimizing. We always do so
2358 if -fexpensive-optimizations.
2359
2360 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2361 the loop may still be a small one. */
2362
2363 int
2364 preserve_subexpressions_p ()
2365 {
2366 rtx insn;
2367
2368 if (flag_expensive_optimizations)
2369 return 1;
2370
2371 if (optimize == 0 || loop_stack == 0)
2372 return 0;
2373
2374 insn = get_last_insn_anywhere ();
2375
2376 return (insn
2377 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2378 < n_non_fixed_regs * 3));
2379
2380 }
2381
2382 /* Generate a jump to exit the current loop, conditional, binding contour
2383 or case statement. Not all such constructs are visible to this function,
2384 only those started with EXIT_FLAG nonzero. Individual languages use
2385 the EXIT_FLAG parameter to control which kinds of constructs you can
2386 exit this way.
2387
2388 If not currently inside anything that can be exited,
2389 return 0 and do nothing; caller will print an error message. */
2390
2391 int
2392 expand_exit_something ()
2393 {
2394 struct nesting *n;
2395 last_expr_type = 0;
2396 for (n = nesting_stack; n; n = n->all)
2397 if (n->exit_label != 0)
2398 {
2399 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2400 return 1;
2401 }
2402
2403 return 0;
2404 }
2405 \f
2406 /* Generate RTL to return from the current function, with no value.
2407 (That is, we do not do anything about returning any value.) */
2408
2409 void
2410 expand_null_return ()
2411 {
2412 struct nesting *block = block_stack;
2413 rtx last_insn = 0;
2414
2415 /* Does any pending block have cleanups? */
2416
2417 while (block && block->data.block.cleanups == 0)
2418 block = block->next;
2419
2420 /* If yes, use a goto to return, since that runs cleanups. */
2421
2422 expand_null_return_1 (last_insn, block != 0);
2423 }
2424
2425 /* Generate RTL to return from the current function, with value VAL. */
2426
2427 static void
2428 expand_value_return (val)
2429 rtx val;
2430 {
2431 struct nesting *block = block_stack;
2432 rtx last_insn = get_last_insn ();
2433 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2434
2435 /* Copy the value to the return location
2436 unless it's already there. */
2437
2438 if (return_reg != val)
2439 {
2440 #ifdef PROMOTE_FUNCTION_RETURN
2441 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2442 int unsignedp = TREE_UNSIGNED (type);
2443 enum machine_mode mode
2444 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2445 &unsignedp, 1);
2446
2447 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2448 convert_move (return_reg, val, unsignedp);
2449 else
2450 #endif
2451 emit_move_insn (return_reg, val);
2452 }
2453 if (GET_CODE (return_reg) == REG
2454 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2455 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2456 /* Handle calls that return values in multiple non-contiguous locations.
2457 The Irix 6 ABI has examples of this. */
2458 else if (GET_CODE (return_reg) == PARALLEL)
2459 {
2460 int i;
2461
2462 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2463 {
2464 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2465
2466 if (GET_CODE (x) == REG
2467 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2468 emit_insn (gen_rtx_USE (VOIDmode, x));
2469 }
2470 }
2471
2472 /* Does any pending block have cleanups? */
2473
2474 while (block && block->data.block.cleanups == 0)
2475 block = block->next;
2476
2477 /* If yes, use a goto to return, since that runs cleanups.
2478 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2479
2480 expand_null_return_1 (last_insn, block != 0);
2481 }
2482
2483 /* Output a return with no value. If LAST_INSN is nonzero,
2484 pretend that the return takes place after LAST_INSN.
2485 If USE_GOTO is nonzero then don't use a return instruction;
2486 go to the return label instead. This causes any cleanups
2487 of pending blocks to be executed normally. */
2488
2489 static void
2490 expand_null_return_1 (last_insn, use_goto)
2491 rtx last_insn;
2492 int use_goto;
2493 {
2494 rtx end_label = cleanup_label ? cleanup_label : return_label;
2495
2496 clear_pending_stack_adjust ();
2497 do_pending_stack_adjust ();
2498 last_expr_type = 0;
2499
2500 /* PCC-struct return always uses an epilogue. */
2501 if (current_function_returns_pcc_struct || use_goto)
2502 {
2503 if (end_label == 0)
2504 end_label = return_label = gen_label_rtx ();
2505 expand_goto_internal (NULL_TREE, end_label, last_insn);
2506 return;
2507 }
2508
2509 /* Otherwise output a simple return-insn if one is available,
2510 unless it won't do the job. */
2511 #ifdef HAVE_return
2512 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2513 {
2514 emit_jump_insn (gen_return ());
2515 emit_barrier ();
2516 return;
2517 }
2518 #endif
2519
2520 /* Otherwise jump to the epilogue. */
2521 expand_goto_internal (NULL_TREE, end_label, last_insn);
2522 }
2523 \f
2524 /* Generate RTL to evaluate the expression RETVAL and return it
2525 from the current function. */
2526
2527 void
2528 expand_return (retval)
2529 tree retval;
2530 {
2531 /* If there are any cleanups to be performed, then they will
2532 be inserted following LAST_INSN. It is desirable
2533 that the last_insn, for such purposes, should be the
2534 last insn before computing the return value. Otherwise, cleanups
2535 which call functions can clobber the return value. */
2536 /* ??? rms: I think that is erroneous, because in C++ it would
2537 run destructors on variables that might be used in the subsequent
2538 computation of the return value. */
2539 rtx last_insn = 0;
2540 register rtx val = 0;
2541 register rtx op0;
2542 tree retval_rhs;
2543 int cleanups;
2544
2545 /* If function wants no value, give it none. */
2546 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2547 {
2548 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2549 emit_queue ();
2550 expand_null_return ();
2551 return;
2552 }
2553
2554 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2555 /* This is not sufficient. We also need to watch for cleanups of the
2556 expression we are about to expand. Unfortunately, we cannot know
2557 if it has cleanups until we expand it, and we want to change how we
2558 expand it depending upon if we need cleanups. We can't win. */
2559 #if 0
2560 cleanups = any_pending_cleanups (1);
2561 #else
2562 cleanups = 1;
2563 #endif
2564
2565 if (TREE_CODE (retval) == RESULT_DECL)
2566 retval_rhs = retval;
2567 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2568 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2569 retval_rhs = TREE_OPERAND (retval, 1);
2570 else if (TREE_TYPE (retval) == void_type_node)
2571 /* Recognize tail-recursive call to void function. */
2572 retval_rhs = retval;
2573 else
2574 retval_rhs = NULL_TREE;
2575
2576 /* Only use `last_insn' if there are cleanups which must be run. */
2577 if (cleanups || cleanup_label != 0)
2578 last_insn = get_last_insn ();
2579
2580 /* Distribute return down conditional expr if either of the sides
2581 may involve tail recursion (see test below). This enhances the number
2582 of tail recursions we see. Don't do this always since it can produce
2583 sub-optimal code in some cases and we distribute assignments into
2584 conditional expressions when it would help. */
2585
2586 if (optimize && retval_rhs != 0
2587 && frame_offset == 0
2588 && TREE_CODE (retval_rhs) == COND_EXPR
2589 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2590 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2591 {
2592 rtx label = gen_label_rtx ();
2593 tree expr;
2594
2595 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2596 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2597 DECL_RESULT (current_function_decl),
2598 TREE_OPERAND (retval_rhs, 1));
2599 TREE_SIDE_EFFECTS (expr) = 1;
2600 expand_return (expr);
2601 emit_label (label);
2602
2603 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2604 DECL_RESULT (current_function_decl),
2605 TREE_OPERAND (retval_rhs, 2));
2606 TREE_SIDE_EFFECTS (expr) = 1;
2607 expand_return (expr);
2608 return;
2609 }
2610
2611 /* Attempt to optimize the call if it is tail recursive. */
2612 optimize_tail_recursion (retval_rhs, last_insn);
2613
2614 #ifdef HAVE_return
2615 /* This optimization is safe if there are local cleanups
2616 because expand_null_return takes care of them.
2617 ??? I think it should also be safe when there is a cleanup label,
2618 because expand_null_return takes care of them, too.
2619 Any reason why not? */
2620 if (HAVE_return && cleanup_label == 0
2621 && ! current_function_returns_pcc_struct
2622 && BRANCH_COST <= 1)
2623 {
2624 /* If this is return x == y; then generate
2625 if (x == y) return 1; else return 0;
2626 if we can do it with explicit return insns and branches are cheap,
2627 but not if we have the corresponding scc insn. */
2628 int has_scc = 0;
2629 if (retval_rhs)
2630 switch (TREE_CODE (retval_rhs))
2631 {
2632 case EQ_EXPR:
2633 #ifdef HAVE_seq
2634 has_scc = HAVE_seq;
2635 #endif
2636 case NE_EXPR:
2637 #ifdef HAVE_sne
2638 has_scc = HAVE_sne;
2639 #endif
2640 case GT_EXPR:
2641 #ifdef HAVE_sgt
2642 has_scc = HAVE_sgt;
2643 #endif
2644 case GE_EXPR:
2645 #ifdef HAVE_sge
2646 has_scc = HAVE_sge;
2647 #endif
2648 case LT_EXPR:
2649 #ifdef HAVE_slt
2650 has_scc = HAVE_slt;
2651 #endif
2652 case LE_EXPR:
2653 #ifdef HAVE_sle
2654 has_scc = HAVE_sle;
2655 #endif
2656 case TRUTH_ANDIF_EXPR:
2657 case TRUTH_ORIF_EXPR:
2658 case TRUTH_AND_EXPR:
2659 case TRUTH_OR_EXPR:
2660 case TRUTH_NOT_EXPR:
2661 case TRUTH_XOR_EXPR:
2662 if (! has_scc)
2663 {
2664 op0 = gen_label_rtx ();
2665 jumpifnot (retval_rhs, op0);
2666 expand_value_return (const1_rtx);
2667 emit_label (op0);
2668 expand_value_return (const0_rtx);
2669 return;
2670 }
2671 break;
2672
2673 default:
2674 break;
2675 }
2676 }
2677 #endif /* HAVE_return */
2678
2679 /* If the result is an aggregate that is being returned in one (or more)
2680 registers, load the registers here. The compiler currently can't handle
2681 copying a BLKmode value into registers. We could put this code in a
2682 more general area (for use by everyone instead of just function
2683 call/return), but until this feature is generally usable it is kept here
2684 (and in expand_call). The value must go into a pseudo in case there
2685 are cleanups that will clobber the real return register. */
2686
2687 if (retval_rhs != 0
2688 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2689 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2690 {
2691 int i, bitpos, xbitpos;
2692 int big_endian_correction = 0;
2693 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2694 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2695 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2696 (unsigned int)BITS_PER_WORD);
2697 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2698 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2699 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2700 enum machine_mode tmpmode, result_reg_mode;
2701
2702 /* Structures whose size is not a multiple of a word are aligned
2703 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2704 machine, this means we must skip the empty high order bytes when
2705 calculating the bit offset. */
2706 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2707 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2708 * BITS_PER_UNIT));
2709
2710 /* Copy the structure BITSIZE bits at a time. */
2711 for (bitpos = 0, xbitpos = big_endian_correction;
2712 bitpos < bytes * BITS_PER_UNIT;
2713 bitpos += bitsize, xbitpos += bitsize)
2714 {
2715 /* We need a new destination pseudo each time xbitpos is
2716 on a word boundary and when xbitpos == big_endian_correction
2717 (the first time through). */
2718 if (xbitpos % BITS_PER_WORD == 0
2719 || xbitpos == big_endian_correction)
2720 {
2721 /* Generate an appropriate register. */
2722 dst = gen_reg_rtx (word_mode);
2723 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2724
2725 /* Clobber the destination before we move anything into it. */
2726 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2727 }
2728
2729 /* We need a new source operand each time bitpos is on a word
2730 boundary. */
2731 if (bitpos % BITS_PER_WORD == 0)
2732 src = operand_subword_force (result_val,
2733 bitpos / BITS_PER_WORD,
2734 BLKmode);
2735
2736 /* Use bitpos for the source extraction (left justified) and
2737 xbitpos for the destination store (right justified). */
2738 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2739 extract_bit_field (src, bitsize,
2740 bitpos % BITS_PER_WORD, 1,
2741 NULL_RTX, word_mode,
2742 word_mode,
2743 bitsize / BITS_PER_UNIT,
2744 BITS_PER_WORD),
2745 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2746 }
2747
2748 /* Find the smallest integer mode large enough to hold the
2749 entire structure and use that mode instead of BLKmode
2750 on the USE insn for the return register. */
2751 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2752 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2753 tmpmode != MAX_MACHINE_MODE;
2754 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2755 {
2756 /* Have we found a large enough mode? */
2757 if (GET_MODE_SIZE (tmpmode) >= bytes)
2758 break;
2759 }
2760
2761 /* No suitable mode found. */
2762 if (tmpmode == MAX_MACHINE_MODE)
2763 abort ();
2764
2765 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2766
2767 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2768 result_reg_mode = word_mode;
2769 else
2770 result_reg_mode = tmpmode;
2771 result_reg = gen_reg_rtx (result_reg_mode);
2772
2773 emit_queue ();
2774 for (i = 0; i < n_regs; i++)
2775 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2776 result_pseudos[i]);
2777
2778 if (tmpmode != result_reg_mode)
2779 result_reg = gen_lowpart (tmpmode, result_reg);
2780
2781 expand_value_return (result_reg);
2782 }
2783 else if (cleanups
2784 && retval_rhs != 0
2785 && TREE_TYPE (retval_rhs) != void_type_node
2786 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2787 {
2788 /* Calculate the return value into a pseudo reg. */
2789 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2790 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2791 val = force_not_mem (val);
2792 emit_queue ();
2793 /* Return the calculated value, doing cleanups first. */
2794 expand_value_return (val);
2795 }
2796 else
2797 {
2798 /* No cleanups or no hard reg used;
2799 calculate value into hard return reg. */
2800 expand_expr (retval, const0_rtx, VOIDmode, 0);
2801 emit_queue ();
2802 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2803 }
2804 }
2805
2806 /* Return 1 if the end of the generated RTX is not a barrier.
2807 This means code already compiled can drop through. */
2808
2809 int
2810 drop_through_at_end_p ()
2811 {
2812 rtx insn = get_last_insn ();
2813 while (insn && GET_CODE (insn) == NOTE)
2814 insn = PREV_INSN (insn);
2815 return insn && GET_CODE (insn) != BARRIER;
2816 }
2817 \f
2818 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2819 and emit code to optimize the tail recursion. LAST_INSN indicates where
2820 to place the jump to the tail recursion label.
2821
2822 This is only used by expand_return, but expand_call is expected to
2823 use it soon. */
2824
2825 void
2826 optimize_tail_recursion (call_expr, last_insn)
2827 tree call_expr;
2828 rtx last_insn;
2829 {
2830 /* For tail-recursive call to current function,
2831 just jump back to the beginning.
2832 It's unsafe if any auto variable in this function
2833 has its address taken; for simplicity,
2834 require stack frame to be empty. */
2835 if (optimize && call_expr != 0
2836 && frame_offset == 0
2837 && TREE_CODE (call_expr) == CALL_EXPR
2838 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2839 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2840 /* Finish checking validity, and if valid emit code
2841 to set the argument variables for the new call. */
2842 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2843 DECL_ARGUMENTS (current_function_decl)))
2844 {
2845 if (tail_recursion_label == 0)
2846 {
2847 tail_recursion_label = gen_label_rtx ();
2848 emit_label_after (tail_recursion_label,
2849 tail_recursion_reentry);
2850 }
2851 emit_queue ();
2852 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2853 emit_barrier ();
2854 }
2855 }
2856
2857 /* Emit code to alter this function's formal parms for a tail-recursive call.
2858 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2859 FORMALS is the chain of decls of formals.
2860 Return 1 if this can be done;
2861 otherwise return 0 and do not emit any code. */
2862
2863 static int
2864 tail_recursion_args (actuals, formals)
2865 tree actuals, formals;
2866 {
2867 register tree a = actuals, f = formals;
2868 register int i;
2869 register rtx *argvec;
2870
2871 /* Check that number and types of actuals are compatible
2872 with the formals. This is not always true in valid C code.
2873 Also check that no formal needs to be addressable
2874 and that all formals are scalars. */
2875
2876 /* Also count the args. */
2877
2878 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2879 {
2880 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2881 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2882 return 0;
2883 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2884 return 0;
2885 }
2886 if (a != 0 || f != 0)
2887 return 0;
2888
2889 /* Compute all the actuals. */
2890
2891 argvec = (rtx *) alloca (i * sizeof (rtx));
2892
2893 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2894 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2895
2896 /* Find which actual values refer to current values of previous formals.
2897 Copy each of them now, before any formal is changed. */
2898
2899 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2900 {
2901 int copy = 0;
2902 register int j;
2903 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2904 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2905 { copy = 1; break; }
2906 if (copy)
2907 argvec[i] = copy_to_reg (argvec[i]);
2908 }
2909
2910 /* Store the values of the actuals into the formals. */
2911
2912 for (f = formals, a = actuals, i = 0; f;
2913 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2914 {
2915 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2916 emit_move_insn (DECL_RTL (f), argvec[i]);
2917 else
2918 convert_move (DECL_RTL (f), argvec[i],
2919 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2920 }
2921
2922 free_temp_slots ();
2923 return 1;
2924 }
2925 \f
2926 /* Generate the RTL code for entering a binding contour.
2927 The variables are declared one by one, by calls to `expand_decl'.
2928
2929 EXIT_FLAG is nonzero if this construct should be visible to
2930 `exit_something'. */
2931
2932 void
2933 expand_start_bindings (exit_flag)
2934 int exit_flag;
2935 {
2936 struct nesting *thisblock = ALLOC_NESTING ();
2937 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2938
2939 /* Make an entry on block_stack for the block we are entering. */
2940
2941 thisblock->next = block_stack;
2942 thisblock->all = nesting_stack;
2943 thisblock->depth = ++nesting_depth;
2944 thisblock->data.block.stack_level = 0;
2945 thisblock->data.block.cleanups = 0;
2946 thisblock->data.block.function_call_count = 0;
2947 thisblock->data.block.exception_region = 0;
2948 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2949
2950 thisblock->data.block.conditional_code = 0;
2951 thisblock->data.block.last_unconditional_cleanup = note;
2952 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2953
2954 if (block_stack
2955 && !(block_stack->data.block.cleanups == NULL_TREE
2956 && block_stack->data.block.outer_cleanups == NULL_TREE))
2957 thisblock->data.block.outer_cleanups
2958 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2959 block_stack->data.block.outer_cleanups);
2960 else
2961 thisblock->data.block.outer_cleanups = 0;
2962 thisblock->data.block.label_chain = 0;
2963 thisblock->data.block.innermost_stack_block = stack_block_stack;
2964 thisblock->data.block.first_insn = note;
2965 thisblock->data.block.block_start_count = ++block_start_count;
2966 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2967 block_stack = thisblock;
2968 nesting_stack = thisblock;
2969
2970 /* Make a new level for allocating stack slots. */
2971 push_temp_slots ();
2972 }
2973
2974 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2975 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2976 expand_expr are made. After we end the region, we know that all
2977 space for all temporaries that were created by TARGET_EXPRs will be
2978 destroyed and their space freed for reuse. */
2979
2980 void
2981 expand_start_target_temps ()
2982 {
2983 /* This is so that even if the result is preserved, the space
2984 allocated will be freed, as we know that it is no longer in use. */
2985 push_temp_slots ();
2986
2987 /* Start a new binding layer that will keep track of all cleanup
2988 actions to be performed. */
2989 expand_start_bindings (0);
2990
2991 target_temp_slot_level = temp_slot_level;
2992 }
2993
2994 void
2995 expand_end_target_temps ()
2996 {
2997 expand_end_bindings (NULL_TREE, 0, 0);
2998
2999 /* This is so that even if the result is preserved, the space
3000 allocated will be freed, as we know that it is no longer in use. */
3001 pop_temp_slots ();
3002 }
3003
3004 /* Mark top block of block_stack as an implicit binding for an
3005 exception region. This is used to prevent infinite recursion when
3006 ending a binding with expand_end_bindings. It is only ever called
3007 by expand_eh_region_start, as that it the only way to create a
3008 block stack for a exception region. */
3009
3010 void
3011 mark_block_as_eh_region ()
3012 {
3013 block_stack->data.block.exception_region = 1;
3014 if (block_stack->next
3015 && block_stack->next->data.block.conditional_code)
3016 {
3017 block_stack->data.block.conditional_code
3018 = block_stack->next->data.block.conditional_code;
3019 block_stack->data.block.last_unconditional_cleanup
3020 = block_stack->next->data.block.last_unconditional_cleanup;
3021 block_stack->data.block.cleanup_ptr
3022 = block_stack->next->data.block.cleanup_ptr;
3023 }
3024 }
3025
3026 /* True if we are currently emitting insns in an area of output code
3027 that is controlled by a conditional expression. This is used by
3028 the cleanup handling code to generate conditional cleanup actions. */
3029
3030 int
3031 conditional_context ()
3032 {
3033 return block_stack && block_stack->data.block.conditional_code;
3034 }
3035
3036 /* Mark top block of block_stack as not for an implicit binding for an
3037 exception region. This is only ever done by expand_eh_region_end
3038 to let expand_end_bindings know that it is being called explicitly
3039 to end the binding layer for just the binding layer associated with
3040 the exception region, otherwise expand_end_bindings would try and
3041 end all implicit binding layers for exceptions regions, and then
3042 one normal binding layer. */
3043
3044 void
3045 mark_block_as_not_eh_region ()
3046 {
3047 block_stack->data.block.exception_region = 0;
3048 }
3049
3050 /* True if the top block of block_stack was marked as for an exception
3051 region by mark_block_as_eh_region. */
3052
3053 int
3054 is_eh_region ()
3055 {
3056 return block_stack && block_stack->data.block.exception_region;
3057 }
3058
3059 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3060 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3061 BLOCK node. */
3062
3063 void
3064 remember_end_note (block)
3065 register tree block;
3066 {
3067 BLOCK_END_NOTE (block) = last_block_end_note;
3068 last_block_end_note = NULL_RTX;
3069 }
3070
3071 /* Emit a handler label for a nonlocal goto handler.
3072 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3073
3074 static void
3075 expand_nl_handler_label (slot, before_insn)
3076 rtx slot, before_insn;
3077 {
3078 rtx insns;
3079 rtx handler_label = gen_label_rtx ();
3080
3081 /* Don't let jump_optimize delete the handler. */
3082 LABEL_PRESERVE_P (handler_label) = 1;
3083
3084 start_sequence ();
3085 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3086 insns = get_insns ();
3087 end_sequence ();
3088 emit_insns_before (insns, before_insn);
3089
3090 emit_label (handler_label);
3091 }
3092
3093 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3094 handler. */
3095 static void
3096 expand_nl_goto_receiver ()
3097 {
3098 #ifdef HAVE_nonlocal_goto
3099 if (! HAVE_nonlocal_goto)
3100 #endif
3101 /* First adjust our frame pointer to its actual value. It was
3102 previously set to the start of the virtual area corresponding to
3103 the stacked variables when we branched here and now needs to be
3104 adjusted to the actual hardware fp value.
3105
3106 Assignments are to virtual registers are converted by
3107 instantiate_virtual_regs into the corresponding assignment
3108 to the underlying register (fp in this case) that makes
3109 the original assignment true.
3110 So the following insn will actually be
3111 decrementing fp by STARTING_FRAME_OFFSET. */
3112 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3113
3114 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3115 if (fixed_regs[ARG_POINTER_REGNUM])
3116 {
3117 #ifdef ELIMINABLE_REGS
3118 /* If the argument pointer can be eliminated in favor of the
3119 frame pointer, we don't need to restore it. We assume here
3120 that if such an elimination is present, it can always be used.
3121 This is the case on all known machines; if we don't make this
3122 assumption, we do unnecessary saving on many machines. */
3123 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3124 size_t i;
3125
3126 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3127 if (elim_regs[i].from == ARG_POINTER_REGNUM
3128 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3129 break;
3130
3131 if (i == sizeof elim_regs / sizeof elim_regs [0])
3132 #endif
3133 {
3134 /* Now restore our arg pointer from the address at which it
3135 was saved in our stack frame.
3136 If there hasn't be space allocated for it yet, make
3137 some now. */
3138 if (arg_pointer_save_area == 0)
3139 arg_pointer_save_area
3140 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3141 emit_move_insn (virtual_incoming_args_rtx,
3142 /* We need a pseudo here, or else
3143 instantiate_virtual_regs_1 complains. */
3144 copy_to_reg (arg_pointer_save_area));
3145 }
3146 }
3147 #endif
3148
3149 #ifdef HAVE_nonlocal_goto_receiver
3150 if (HAVE_nonlocal_goto_receiver)
3151 emit_insn (gen_nonlocal_goto_receiver ());
3152 #endif
3153 }
3154
3155 /* Make handlers for nonlocal gotos taking place in the function calls in
3156 block THISBLOCK. */
3157
3158 static void
3159 expand_nl_goto_receivers (thisblock)
3160 struct nesting *thisblock;
3161 {
3162 tree link;
3163 rtx afterward = gen_label_rtx ();
3164 rtx insns, slot;
3165 int any_invalid;
3166
3167 /* Record the handler address in the stack slot for that purpose,
3168 during this block, saving and restoring the outer value. */
3169 if (thisblock->next != 0)
3170 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3171 {
3172 rtx save_receiver = gen_reg_rtx (Pmode);
3173 emit_move_insn (XEXP (slot, 0), save_receiver);
3174
3175 start_sequence ();
3176 emit_move_insn (save_receiver, XEXP (slot, 0));
3177 insns = get_insns ();
3178 end_sequence ();
3179 emit_insns_before (insns, thisblock->data.block.first_insn);
3180 }
3181
3182 /* Jump around the handlers; they run only when specially invoked. */
3183 emit_jump (afterward);
3184
3185 /* Make a separate handler for each label. */
3186 link = nonlocal_labels;
3187 slot = nonlocal_goto_handler_slots;
3188 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3189 /* Skip any labels we shouldn't be able to jump to from here,
3190 we generate one special handler for all of them below which just calls
3191 abort. */
3192 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3193 {
3194 expand_nl_handler_label (XEXP (slot, 0),
3195 thisblock->data.block.first_insn);
3196 expand_nl_goto_receiver ();
3197
3198 /* Jump to the "real" nonlocal label. */
3199 expand_goto (TREE_VALUE (link));
3200 }
3201
3202 /* A second pass over all nonlocal labels; this time we handle those
3203 we should not be able to jump to at this point. */
3204 link = nonlocal_labels;
3205 slot = nonlocal_goto_handler_slots;
3206 any_invalid = 0;
3207 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3208 if (DECL_TOO_LATE (TREE_VALUE (link)))
3209 {
3210 expand_nl_handler_label (XEXP (slot, 0),
3211 thisblock->data.block.first_insn);
3212 any_invalid = 1;
3213 }
3214
3215 if (any_invalid)
3216 {
3217 expand_nl_goto_receiver ();
3218 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3219 VOIDmode, 0);
3220 emit_barrier ();
3221 }
3222
3223 emit_label (afterward);
3224 }
3225
3226 /* Generate RTL code to terminate a binding contour.
3227 VARS is the chain of VAR_DECL nodes
3228 for the variables bound in this contour.
3229 MARK_ENDS is nonzero if we should put a note at the beginning
3230 and end of this binding contour.
3231
3232 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3233 (That is true automatically if the contour has a saved stack level.) */
3234
3235 void
3236 expand_end_bindings (vars, mark_ends, dont_jump_in)
3237 tree vars;
3238 int mark_ends;
3239 int dont_jump_in;
3240 {
3241 register struct nesting *thisblock;
3242 register tree decl;
3243
3244 while (block_stack->data.block.exception_region)
3245 {
3246 /* Because we don't need or want a new temporary level and
3247 because we didn't create one in expand_eh_region_start,
3248 create a fake one now to avoid removing one in
3249 expand_end_bindings. */
3250 push_temp_slots ();
3251
3252 block_stack->data.block.exception_region = 0;
3253
3254 expand_end_bindings (NULL_TREE, 0, 0);
3255 }
3256
3257 /* Since expand_eh_region_start does an expand_start_bindings, we
3258 have to first end all the bindings that were created by
3259 expand_eh_region_start. */
3260
3261 thisblock = block_stack;
3262
3263 if (warn_unused)
3264 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3265 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3266 && ! DECL_IN_SYSTEM_HEADER (decl)
3267 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3268 warning_with_decl (decl, "unused variable `%s'");
3269
3270 if (thisblock->exit_label)
3271 {
3272 do_pending_stack_adjust ();
3273 emit_label (thisblock->exit_label);
3274 }
3275
3276 /* If necessary, make handlers for nonlocal gotos taking
3277 place in the function calls in this block. */
3278 if (function_call_count != thisblock->data.block.function_call_count
3279 && nonlocal_labels
3280 /* Make handler for outermost block
3281 if there were any nonlocal gotos to this function. */
3282 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3283 /* Make handler for inner block if it has something
3284 special to do when you jump out of it. */
3285 : (thisblock->data.block.cleanups != 0
3286 || thisblock->data.block.stack_level != 0)))
3287 expand_nl_goto_receivers (thisblock);
3288
3289 /* Don't allow jumping into a block that has a stack level.
3290 Cleanups are allowed, though. */
3291 if (dont_jump_in
3292 || thisblock->data.block.stack_level != 0)
3293 {
3294 struct label_chain *chain;
3295
3296 /* Any labels in this block are no longer valid to go to.
3297 Mark them to cause an error message. */
3298 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3299 {
3300 DECL_TOO_LATE (chain->label) = 1;
3301 /* If any goto without a fixup came to this label,
3302 that must be an error, because gotos without fixups
3303 come from outside all saved stack-levels. */
3304 if (TREE_ADDRESSABLE (chain->label))
3305 error_with_decl (chain->label,
3306 "label `%s' used before containing binding contour");
3307 }
3308 }
3309
3310 /* Restore stack level in effect before the block
3311 (only if variable-size objects allocated). */
3312 /* Perform any cleanups associated with the block. */
3313
3314 if (thisblock->data.block.stack_level != 0
3315 || thisblock->data.block.cleanups != 0)
3316 {
3317 /* Only clean up here if this point can actually be reached. */
3318 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3319
3320 /* Don't let cleanups affect ({...}) constructs. */
3321 int old_expr_stmts_for_value = expr_stmts_for_value;
3322 rtx old_last_expr_value = last_expr_value;
3323 tree old_last_expr_type = last_expr_type;
3324 expr_stmts_for_value = 0;
3325
3326 /* Do the cleanups. */
3327 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3328 if (reachable)
3329 do_pending_stack_adjust ();
3330
3331 expr_stmts_for_value = old_expr_stmts_for_value;
3332 last_expr_value = old_last_expr_value;
3333 last_expr_type = old_last_expr_type;
3334
3335 /* Restore the stack level. */
3336
3337 if (reachable && thisblock->data.block.stack_level != 0)
3338 {
3339 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3340 thisblock->data.block.stack_level, NULL_RTX);
3341 if (nonlocal_goto_handler_slots != 0)
3342 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3343 NULL_RTX);
3344 }
3345
3346 /* Any gotos out of this block must also do these things.
3347 Also report any gotos with fixups that came to labels in this
3348 level. */
3349 fixup_gotos (thisblock,
3350 thisblock->data.block.stack_level,
3351 thisblock->data.block.cleanups,
3352 thisblock->data.block.first_insn,
3353 dont_jump_in);
3354 }
3355
3356 /* Mark the beginning and end of the scope if requested.
3357 We do this now, after running cleanups on the variables
3358 just going out of scope, so they are in scope for their cleanups. */
3359
3360 if (mark_ends)
3361 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3362 else
3363 /* Get rid of the beginning-mark if we don't make an end-mark. */
3364 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3365
3366 /* If doing stupid register allocation, make sure lives of all
3367 register variables declared here extend thru end of scope. */
3368
3369 if (obey_regdecls)
3370 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3371 {
3372 rtx rtl = DECL_RTL (decl);
3373 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3374 use_variable (rtl);
3375 }
3376
3377 /* Restore the temporary level of TARGET_EXPRs. */
3378 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3379
3380 /* Restore block_stack level for containing block. */
3381
3382 stack_block_stack = thisblock->data.block.innermost_stack_block;
3383 POPSTACK (block_stack);
3384
3385 /* Pop the stack slot nesting and free any slots at this level. */
3386 pop_temp_slots ();
3387 }
3388 \f
3389 /* Generate RTL for the automatic variable declaration DECL.
3390 (Other kinds of declarations are simply ignored if seen here.) */
3391
3392 void
3393 expand_decl (decl)
3394 register tree decl;
3395 {
3396 struct nesting *thisblock = block_stack;
3397 tree type;
3398
3399 type = TREE_TYPE (decl);
3400
3401 /* Only automatic variables need any expansion done.
3402 Static and external variables, and external functions,
3403 will be handled by `assemble_variable' (called from finish_decl).
3404 TYPE_DECL and CONST_DECL require nothing.
3405 PARM_DECLs are handled in `assign_parms'. */
3406
3407 if (TREE_CODE (decl) != VAR_DECL)
3408 return;
3409 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3410 return;
3411
3412 /* Create the RTL representation for the variable. */
3413
3414 if (type == error_mark_node)
3415 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3416 else if (DECL_SIZE (decl) == 0)
3417 /* Variable with incomplete type. */
3418 {
3419 if (DECL_INITIAL (decl) == 0)
3420 /* Error message was already done; now avoid a crash. */
3421 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3422 else
3423 /* An initializer is going to decide the size of this array.
3424 Until we know the size, represent its address with a reg. */
3425 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3426 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3427 }
3428 else if (DECL_MODE (decl) != BLKmode
3429 /* If -ffloat-store, don't put explicit float vars
3430 into regs. */
3431 && !(flag_float_store
3432 && TREE_CODE (type) == REAL_TYPE)
3433 && ! TREE_THIS_VOLATILE (decl)
3434 && ! TREE_ADDRESSABLE (decl)
3435 && (DECL_REGISTER (decl) || ! obey_regdecls)
3436 /* if -fcheck-memory-usage, check all variables. */
3437 && ! current_function_check_memory_usage)
3438 {
3439 /* Automatic variable that can go in a register. */
3440 int unsignedp = TREE_UNSIGNED (type);
3441 enum machine_mode reg_mode
3442 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3443
3444 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3445 mark_user_reg (DECL_RTL (decl));
3446
3447 if (POINTER_TYPE_P (type))
3448 mark_reg_pointer (DECL_RTL (decl),
3449 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3450 / BITS_PER_UNIT));
3451 }
3452
3453 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3454 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3455 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3456 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3457 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3458 {
3459 /* Variable of fixed size that goes on the stack. */
3460 rtx oldaddr = 0;
3461 rtx addr;
3462
3463 /* If we previously made RTL for this decl, it must be an array
3464 whose size was determined by the initializer.
3465 The old address was a register; set that register now
3466 to the proper address. */
3467 if (DECL_RTL (decl) != 0)
3468 {
3469 if (GET_CODE (DECL_RTL (decl)) != MEM
3470 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3471 abort ();
3472 oldaddr = XEXP (DECL_RTL (decl), 0);
3473 }
3474
3475 DECL_RTL (decl)
3476 = assign_stack_temp (DECL_MODE (decl),
3477 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3478 + BITS_PER_UNIT - 1)
3479 / BITS_PER_UNIT),
3480 1);
3481 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3482 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3483
3484 /* Set alignment we actually gave this decl. */
3485 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3486 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3487
3488 if (oldaddr)
3489 {
3490 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3491 if (addr != oldaddr)
3492 emit_move_insn (oldaddr, addr);
3493 }
3494
3495 /* If this is a memory ref that contains aggregate components,
3496 mark it as such for cse and loop optimize. */
3497 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3498 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3499 #if 0
3500 /* If this is in memory because of -ffloat-store,
3501 set the volatile bit, to prevent optimizations from
3502 undoing the effects. */
3503 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3504 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3505 #endif
3506
3507 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3508 }
3509 else
3510 /* Dynamic-size object: must push space on the stack. */
3511 {
3512 rtx address, size;
3513
3514 /* Record the stack pointer on entry to block, if have
3515 not already done so. */
3516 if (thisblock->data.block.stack_level == 0)
3517 {
3518 do_pending_stack_adjust ();
3519 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3520 &thisblock->data.block.stack_level,
3521 thisblock->data.block.first_insn);
3522 stack_block_stack = thisblock;
3523 }
3524
3525 /* Compute the variable's size, in bytes. */
3526 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3527 DECL_SIZE (decl),
3528 size_int (BITS_PER_UNIT)),
3529 NULL_RTX, VOIDmode, 0);
3530 free_temp_slots ();
3531
3532 /* Allocate space on the stack for the variable. Note that
3533 DECL_ALIGN says how the variable is to be aligned and we
3534 cannot use it to conclude anything about the alignment of
3535 the size. */
3536 address = allocate_dynamic_stack_space (size, NULL_RTX,
3537 TYPE_ALIGN (TREE_TYPE (decl)));
3538
3539 /* Reference the variable indirect through that rtx. */
3540 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3541
3542 /* If this is a memory ref that contains aggregate components,
3543 mark it as such for cse and loop optimize. */
3544 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3545 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3546
3547 /* Indicate the alignment we actually gave this variable. */
3548 #ifdef STACK_BOUNDARY
3549 DECL_ALIGN (decl) = STACK_BOUNDARY;
3550 #else
3551 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3552 #endif
3553 }
3554
3555 if (TREE_THIS_VOLATILE (decl))
3556 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3557 #if 0 /* A variable is not necessarily unchanging
3558 just because it is const. RTX_UNCHANGING_P
3559 means no change in the function,
3560 not merely no change in the variable's scope.
3561 It is correct to set RTX_UNCHANGING_P if the variable's scope
3562 is the whole function. There's no convenient way to test that. */
3563 if (TREE_READONLY (decl))
3564 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3565 #endif
3566
3567 /* If doing stupid register allocation, make sure life of any
3568 register variable starts here, at the start of its scope. */
3569
3570 if (obey_regdecls)
3571 use_variable (DECL_RTL (decl));
3572 }
3573
3574
3575 \f
3576 /* Emit code to perform the initialization of a declaration DECL. */
3577
3578 void
3579 expand_decl_init (decl)
3580 tree decl;
3581 {
3582 int was_used = TREE_USED (decl);
3583
3584 /* If this is a CONST_DECL, we don't have to generate any code, but
3585 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3586 to be set while in the obstack containing the constant. If we don't
3587 do this, we can lose if we have functions nested three deep and the middle
3588 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3589 the innermost function is the first to expand that STRING_CST. */
3590 if (TREE_CODE (decl) == CONST_DECL)
3591 {
3592 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3593 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3594 EXPAND_INITIALIZER);
3595 return;
3596 }
3597
3598 if (TREE_STATIC (decl))
3599 return;
3600
3601 /* Compute and store the initial value now. */
3602
3603 if (DECL_INITIAL (decl) == error_mark_node)
3604 {
3605 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3606
3607 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3608 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3609 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3610 0, 0);
3611 emit_queue ();
3612 }
3613 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3614 {
3615 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3616 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3617 emit_queue ();
3618 }
3619
3620 /* Don't let the initialization count as "using" the variable. */
3621 TREE_USED (decl) = was_used;
3622
3623 /* Free any temporaries we made while initializing the decl. */
3624 preserve_temp_slots (NULL_RTX);
3625 free_temp_slots ();
3626 }
3627
3628 /* CLEANUP is an expression to be executed at exit from this binding contour;
3629 for example, in C++, it might call the destructor for this variable.
3630
3631 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3632 CLEANUP multiple times, and have the correct semantics. This
3633 happens in exception handling, for gotos, returns, breaks that
3634 leave the current scope.
3635
3636 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3637 that is not associated with any particular variable. */
3638
3639 int
3640 expand_decl_cleanup (decl, cleanup)
3641 tree decl, cleanup;
3642 {
3643 struct nesting *thisblock = block_stack;
3644
3645 /* Error if we are not in any block. */
3646 if (thisblock == 0)
3647 return 0;
3648
3649 /* Record the cleanup if there is one. */
3650
3651 if (cleanup != 0)
3652 {
3653 tree t;
3654 rtx seq;
3655 tree *cleanups = &thisblock->data.block.cleanups;
3656 int cond_context = conditional_context ();
3657
3658 if (cond_context)
3659 {
3660 rtx flag = gen_reg_rtx (word_mode);
3661 rtx set_flag_0;
3662 tree cond;
3663
3664 start_sequence ();
3665 emit_move_insn (flag, const0_rtx);
3666 set_flag_0 = get_insns ();
3667 end_sequence ();
3668
3669 thisblock->data.block.last_unconditional_cleanup
3670 = emit_insns_after (set_flag_0,
3671 thisblock->data.block.last_unconditional_cleanup);
3672
3673 emit_move_insn (flag, const1_rtx);
3674
3675 /* All cleanups must be on the function_obstack. */
3676 push_obstacks_nochange ();
3677 resume_temporary_allocation ();
3678
3679 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3680 DECL_RTL (cond) = flag;
3681
3682 /* Conditionalize the cleanup. */
3683 cleanup = build (COND_EXPR, void_type_node,
3684 truthvalue_conversion (cond),
3685 cleanup, integer_zero_node);
3686 cleanup = fold (cleanup);
3687
3688 pop_obstacks ();
3689
3690 cleanups = thisblock->data.block.cleanup_ptr;
3691 }
3692
3693 /* All cleanups must be on the function_obstack. */
3694 push_obstacks_nochange ();
3695 resume_temporary_allocation ();
3696 cleanup = unsave_expr (cleanup);
3697 pop_obstacks ();
3698
3699 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3700
3701 if (! cond_context)
3702 /* If this block has a cleanup, it belongs in stack_block_stack. */
3703 stack_block_stack = thisblock;
3704
3705 if (cond_context)
3706 {
3707 start_sequence ();
3708 }
3709
3710 /* If this was optimized so that there is no exception region for the
3711 cleanup, then mark the TREE_LIST node, so that we can later tell
3712 if we need to call expand_eh_region_end. */
3713 if (! using_eh_for_cleanups_p
3714 || expand_eh_region_start_tree (decl, cleanup))
3715 TREE_ADDRESSABLE (t) = 1;
3716 /* If that started a new EH region, we're in a new block. */
3717 thisblock = block_stack;
3718
3719 if (cond_context)
3720 {
3721 seq = get_insns ();
3722 end_sequence ();
3723 if (seq)
3724 thisblock->data.block.last_unconditional_cleanup
3725 = emit_insns_after (seq,
3726 thisblock->data.block.last_unconditional_cleanup);
3727 }
3728 else
3729 {
3730 thisblock->data.block.last_unconditional_cleanup
3731 = get_last_insn ();
3732 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3733 }
3734 }
3735 return 1;
3736 }
3737
3738 /* Like expand_decl_cleanup, but suppress generating an exception handler
3739 to perform the cleanup. */
3740
3741 int
3742 expand_decl_cleanup_no_eh (decl, cleanup)
3743 tree decl, cleanup;
3744 {
3745 int save_eh = using_eh_for_cleanups_p;
3746 int result;
3747
3748 using_eh_for_cleanups_p = 0;
3749 result = expand_decl_cleanup (decl, cleanup);
3750 using_eh_for_cleanups_p = save_eh;
3751
3752 return result;
3753 }
3754
3755 /* Arrange for the top element of the dynamic cleanup chain to be
3756 popped if we exit the current binding contour. DECL is the
3757 associated declaration, if any, otherwise NULL_TREE. If the
3758 current contour is left via an exception, then __sjthrow will pop
3759 the top element off the dynamic cleanup chain. The code that
3760 avoids doing the action we push into the cleanup chain in the
3761 exceptional case is contained in expand_cleanups.
3762
3763 This routine is only used by expand_eh_region_start, and that is
3764 the only way in which an exception region should be started. This
3765 routine is only used when using the setjmp/longjmp codegen method
3766 for exception handling. */
3767
3768 int
3769 expand_dcc_cleanup (decl)
3770 tree decl;
3771 {
3772 struct nesting *thisblock = block_stack;
3773 tree cleanup;
3774
3775 /* Error if we are not in any block. */
3776 if (thisblock == 0)
3777 return 0;
3778
3779 /* Record the cleanup for the dynamic handler chain. */
3780
3781 /* All cleanups must be on the function_obstack. */
3782 push_obstacks_nochange ();
3783 resume_temporary_allocation ();
3784 cleanup = make_node (POPDCC_EXPR);
3785 pop_obstacks ();
3786
3787 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3788 thisblock->data.block.cleanups
3789 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3790
3791 /* If this block has a cleanup, it belongs in stack_block_stack. */
3792 stack_block_stack = thisblock;
3793 return 1;
3794 }
3795
3796 /* Arrange for the top element of the dynamic handler chain to be
3797 popped if we exit the current binding contour. DECL is the
3798 associated declaration, if any, otherwise NULL_TREE. If the current
3799 contour is left via an exception, then __sjthrow will pop the top
3800 element off the dynamic handler chain. The code that avoids doing
3801 the action we push into the handler chain in the exceptional case
3802 is contained in expand_cleanups.
3803
3804 This routine is only used by expand_eh_region_start, and that is
3805 the only way in which an exception region should be started. This
3806 routine is only used when using the setjmp/longjmp codegen method
3807 for exception handling. */
3808
3809 int
3810 expand_dhc_cleanup (decl)
3811 tree decl;
3812 {
3813 struct nesting *thisblock = block_stack;
3814 tree cleanup;
3815
3816 /* Error if we are not in any block. */
3817 if (thisblock == 0)
3818 return 0;
3819
3820 /* Record the cleanup for the dynamic handler chain. */
3821
3822 /* All cleanups must be on the function_obstack. */
3823 push_obstacks_nochange ();
3824 resume_temporary_allocation ();
3825 cleanup = make_node (POPDHC_EXPR);
3826 pop_obstacks ();
3827
3828 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3829 thisblock->data.block.cleanups
3830 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3831
3832 /* If this block has a cleanup, it belongs in stack_block_stack. */
3833 stack_block_stack = thisblock;
3834 return 1;
3835 }
3836 \f
3837 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3838 DECL_ELTS is the list of elements that belong to DECL's type.
3839 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3840
3841 void
3842 expand_anon_union_decl (decl, cleanup, decl_elts)
3843 tree decl, cleanup, decl_elts;
3844 {
3845 struct nesting *thisblock = block_stack;
3846 rtx x;
3847
3848 expand_decl (decl);
3849 expand_decl_cleanup (decl, cleanup);
3850 x = DECL_RTL (decl);
3851
3852 while (decl_elts)
3853 {
3854 tree decl_elt = TREE_VALUE (decl_elts);
3855 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3856 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3857
3858 /* Propagate the union's alignment to the elements. */
3859 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3860
3861 /* If the element has BLKmode and the union doesn't, the union is
3862 aligned such that the element doesn't need to have BLKmode, so
3863 change the element's mode to the appropriate one for its size. */
3864 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3865 DECL_MODE (decl_elt) = mode
3866 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3867 MODE_INT, 1);
3868
3869 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3870 instead create a new MEM rtx with the proper mode. */
3871 if (GET_CODE (x) == MEM)
3872 {
3873 if (mode == GET_MODE (x))
3874 DECL_RTL (decl_elt) = x;
3875 else
3876 {
3877 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3878 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3879 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3880 }
3881 }
3882 else if (GET_CODE (x) == REG)
3883 {
3884 if (mode == GET_MODE (x))
3885 DECL_RTL (decl_elt) = x;
3886 else
3887 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3888 }
3889 else
3890 abort ();
3891
3892 /* Record the cleanup if there is one. */
3893
3894 if (cleanup != 0)
3895 thisblock->data.block.cleanups
3896 = temp_tree_cons (decl_elt, cleanup_elt,
3897 thisblock->data.block.cleanups);
3898
3899 decl_elts = TREE_CHAIN (decl_elts);
3900 }
3901 }
3902 \f
3903 /* Expand a list of cleanups LIST.
3904 Elements may be expressions or may be nested lists.
3905
3906 If DONT_DO is nonnull, then any list-element
3907 whose TREE_PURPOSE matches DONT_DO is omitted.
3908 This is sometimes used to avoid a cleanup associated with
3909 a value that is being returned out of the scope.
3910
3911 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3912 goto and handle protection regions specially in that case.
3913
3914 If REACHABLE, we emit code, otherwise just inform the exception handling
3915 code about this finalization. */
3916
3917 static void
3918 expand_cleanups (list, dont_do, in_fixup, reachable)
3919 tree list;
3920 tree dont_do;
3921 int in_fixup;
3922 int reachable;
3923 {
3924 tree tail;
3925 for (tail = list; tail; tail = TREE_CHAIN (tail))
3926 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3927 {
3928 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3929 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3930 else
3931 {
3932 if (! in_fixup)
3933 {
3934 tree cleanup = TREE_VALUE (tail);
3935
3936 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3937 if (TREE_CODE (cleanup) != POPDHC_EXPR
3938 && TREE_CODE (cleanup) != POPDCC_EXPR
3939 /* See expand_eh_region_start_tree for this case. */
3940 && ! TREE_ADDRESSABLE (tail))
3941 {
3942 cleanup = protect_with_terminate (cleanup);
3943 expand_eh_region_end (cleanup);
3944 }
3945 }
3946
3947 if (reachable)
3948 {
3949 /* Cleanups may be run multiple times. For example,
3950 when exiting a binding contour, we expand the
3951 cleanups associated with that contour. When a goto
3952 within that binding contour has a target outside that
3953 contour, it will expand all cleanups from its scope to
3954 the target. Though the cleanups are expanded multiple
3955 times, the control paths are non-overlapping so the
3956 cleanups will not be executed twice. */
3957
3958 /* We may need to protect fixups with rethrow regions. */
3959 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3960
3961 if (protect)
3962 expand_fixup_region_start ();
3963
3964 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3965 if (protect)
3966 expand_fixup_region_end (TREE_VALUE (tail));
3967 free_temp_slots ();
3968 }
3969 }
3970 }
3971 }
3972
3973 /* Mark when the context we are emitting RTL for as a conditional
3974 context, so that any cleanup actions we register with
3975 expand_decl_init will be properly conditionalized when those
3976 cleanup actions are later performed. Must be called before any
3977 expression (tree) is expanded that is within a conditional context. */
3978
3979 void
3980 start_cleanup_deferral ()
3981 {
3982 /* block_stack can be NULL if we are inside the parameter list. It is
3983 OK to do nothing, because cleanups aren't possible here. */
3984 if (block_stack)
3985 ++block_stack->data.block.conditional_code;
3986 }
3987
3988 /* Mark the end of a conditional region of code. Because cleanup
3989 deferrals may be nested, we may still be in a conditional region
3990 after we end the currently deferred cleanups, only after we end all
3991 deferred cleanups, are we back in unconditional code. */
3992
3993 void
3994 end_cleanup_deferral ()
3995 {
3996 /* block_stack can be NULL if we are inside the parameter list. It is
3997 OK to do nothing, because cleanups aren't possible here. */
3998 if (block_stack)
3999 --block_stack->data.block.conditional_code;
4000 }
4001
4002 /* Move all cleanups from the current block_stack
4003 to the containing block_stack, where they are assumed to
4004 have been created. If anything can cause a temporary to
4005 be created, but not expanded for more than one level of
4006 block_stacks, then this code will have to change. */
4007
4008 void
4009 move_cleanups_up ()
4010 {
4011 struct nesting *block = block_stack;
4012 struct nesting *outer = block->next;
4013
4014 outer->data.block.cleanups
4015 = chainon (block->data.block.cleanups,
4016 outer->data.block.cleanups);
4017 block->data.block.cleanups = 0;
4018 }
4019
4020 tree
4021 last_cleanup_this_contour ()
4022 {
4023 if (block_stack == 0)
4024 return 0;
4025
4026 return block_stack->data.block.cleanups;
4027 }
4028
4029 /* Return 1 if there are any pending cleanups at this point.
4030 If THIS_CONTOUR is nonzero, check the current contour as well.
4031 Otherwise, look only at the contours that enclose this one. */
4032
4033 int
4034 any_pending_cleanups (this_contour)
4035 int this_contour;
4036 {
4037 struct nesting *block;
4038
4039 if (block_stack == 0)
4040 return 0;
4041
4042 if (this_contour && block_stack->data.block.cleanups != NULL)
4043 return 1;
4044 if (block_stack->data.block.cleanups == 0
4045 && block_stack->data.block.outer_cleanups == 0)
4046 return 0;
4047
4048 for (block = block_stack->next; block; block = block->next)
4049 if (block->data.block.cleanups != 0)
4050 return 1;
4051
4052 return 0;
4053 }
4054 \f
4055 /* Enter a case (Pascal) or switch (C) statement.
4056 Push a block onto case_stack and nesting_stack
4057 to accumulate the case-labels that are seen
4058 and to record the labels generated for the statement.
4059
4060 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4061 Otherwise, this construct is transparent for `exit_something'.
4062
4063 EXPR is the index-expression to be dispatched on.
4064 TYPE is its nominal type. We could simply convert EXPR to this type,
4065 but instead we take short cuts. */
4066
4067 void
4068 expand_start_case (exit_flag, expr, type, printname)
4069 int exit_flag;
4070 tree expr;
4071 tree type;
4072 char *printname;
4073 {
4074 register struct nesting *thiscase = ALLOC_NESTING ();
4075
4076 /* Make an entry on case_stack for the case we are entering. */
4077
4078 thiscase->next = case_stack;
4079 thiscase->all = nesting_stack;
4080 thiscase->depth = ++nesting_depth;
4081 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4082 thiscase->data.case_stmt.case_list = 0;
4083 thiscase->data.case_stmt.index_expr = expr;
4084 thiscase->data.case_stmt.nominal_type = type;
4085 thiscase->data.case_stmt.default_label = 0;
4086 thiscase->data.case_stmt.num_ranges = 0;
4087 thiscase->data.case_stmt.printname = printname;
4088 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4089 case_stack = thiscase;
4090 nesting_stack = thiscase;
4091
4092 do_pending_stack_adjust ();
4093
4094 /* Make sure case_stmt.start points to something that won't
4095 need any transformation before expand_end_case. */
4096 if (GET_CODE (get_last_insn ()) != NOTE)
4097 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4098
4099 thiscase->data.case_stmt.start = get_last_insn ();
4100
4101 start_cleanup_deferral ();
4102 }
4103
4104
4105 /* Start a "dummy case statement" within which case labels are invalid
4106 and are not connected to any larger real case statement.
4107 This can be used if you don't want to let a case statement jump
4108 into the middle of certain kinds of constructs. */
4109
4110 void
4111 expand_start_case_dummy ()
4112 {
4113 register struct nesting *thiscase = ALLOC_NESTING ();
4114
4115 /* Make an entry on case_stack for the dummy. */
4116
4117 thiscase->next = case_stack;
4118 thiscase->all = nesting_stack;
4119 thiscase->depth = ++nesting_depth;
4120 thiscase->exit_label = 0;
4121 thiscase->data.case_stmt.case_list = 0;
4122 thiscase->data.case_stmt.start = 0;
4123 thiscase->data.case_stmt.nominal_type = 0;
4124 thiscase->data.case_stmt.default_label = 0;
4125 thiscase->data.case_stmt.num_ranges = 0;
4126 case_stack = thiscase;
4127 nesting_stack = thiscase;
4128 start_cleanup_deferral ();
4129 }
4130
4131 /* End a dummy case statement. */
4132
4133 void
4134 expand_end_case_dummy ()
4135 {
4136 end_cleanup_deferral ();
4137 POPSTACK (case_stack);
4138 }
4139
4140 /* Return the data type of the index-expression
4141 of the innermost case statement, or null if none. */
4142
4143 tree
4144 case_index_expr_type ()
4145 {
4146 if (case_stack)
4147 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4148 return 0;
4149 }
4150 \f
4151 static void
4152 check_seenlabel ()
4153 {
4154 /* If this is the first label, warn if any insns have been emitted. */
4155 if (case_stack->data.case_stmt.line_number_status >= 0)
4156 {
4157 rtx insn;
4158
4159 restore_line_number_status
4160 (case_stack->data.case_stmt.line_number_status);
4161 case_stack->data.case_stmt.line_number_status = -1;
4162
4163 for (insn = case_stack->data.case_stmt.start;
4164 insn;
4165 insn = NEXT_INSN (insn))
4166 {
4167 if (GET_CODE (insn) == CODE_LABEL)
4168 break;
4169 if (GET_CODE (insn) != NOTE
4170 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4171 {
4172 do
4173 insn = PREV_INSN (insn);
4174 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4175
4176 /* If insn is zero, then there must have been a syntax error. */
4177 if (insn)
4178 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4179 NOTE_LINE_NUMBER(insn),
4180 "unreachable code at beginning of %s",
4181 case_stack->data.case_stmt.printname);
4182 break;
4183 }
4184 }
4185 }
4186 }
4187
4188 /* Accumulate one case or default label inside a case or switch statement.
4189 VALUE is the value of the case (a null pointer, for a default label).
4190 The function CONVERTER, when applied to arguments T and V,
4191 converts the value V to the type T.
4192
4193 If not currently inside a case or switch statement, return 1 and do
4194 nothing. The caller will print a language-specific error message.
4195 If VALUE is a duplicate or overlaps, return 2 and do nothing
4196 except store the (first) duplicate node in *DUPLICATE.
4197 If VALUE is out of range, return 3 and do nothing.
4198 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4199 Return 0 on success.
4200
4201 Extended to handle range statements. */
4202
4203 int
4204 pushcase (value, converter, label, duplicate)
4205 register tree value;
4206 tree (*converter) PROTO((tree, tree));
4207 register tree label;
4208 tree *duplicate;
4209 {
4210 tree index_type;
4211 tree nominal_type;
4212
4213 /* Fail if not inside a real case statement. */
4214 if (! (case_stack && case_stack->data.case_stmt.start))
4215 return 1;
4216
4217 if (stack_block_stack
4218 && stack_block_stack->depth > case_stack->depth)
4219 return 5;
4220
4221 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4222 nominal_type = case_stack->data.case_stmt.nominal_type;
4223
4224 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4225 if (index_type == error_mark_node)
4226 return 0;
4227
4228 /* Convert VALUE to the type in which the comparisons are nominally done. */
4229 if (value != 0)
4230 value = (*converter) (nominal_type, value);
4231
4232 check_seenlabel ();
4233
4234 /* Fail if this value is out of range for the actual type of the index
4235 (which may be narrower than NOMINAL_TYPE). */
4236 if (value != 0 && ! int_fits_type_p (value, index_type))
4237 return 3;
4238
4239 /* Fail if this is a duplicate or overlaps another entry. */
4240 if (value == 0)
4241 {
4242 if (case_stack->data.case_stmt.default_label != 0)
4243 {
4244 *duplicate = case_stack->data.case_stmt.default_label;
4245 return 2;
4246 }
4247 case_stack->data.case_stmt.default_label = label;
4248 }
4249 else
4250 return add_case_node (value, value, label, duplicate);
4251
4252 expand_label (label);
4253 return 0;
4254 }
4255
4256 /* Like pushcase but this case applies to all values between VALUE1 and
4257 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4258 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4259 starts at VALUE1 and ends at the highest value of the index type.
4260 If both are NULL, this case applies to all values.
4261
4262 The return value is the same as that of pushcase but there is one
4263 additional error code: 4 means the specified range was empty. */
4264
4265 int
4266 pushcase_range (value1, value2, converter, label, duplicate)
4267 register tree value1, value2;
4268 tree (*converter) PROTO((tree, tree));
4269 register tree label;
4270 tree *duplicate;
4271 {
4272 tree index_type;
4273 tree nominal_type;
4274
4275 /* Fail if not inside a real case statement. */
4276 if (! (case_stack && case_stack->data.case_stmt.start))
4277 return 1;
4278
4279 if (stack_block_stack
4280 && stack_block_stack->depth > case_stack->depth)
4281 return 5;
4282
4283 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4284 nominal_type = case_stack->data.case_stmt.nominal_type;
4285
4286 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4287 if (index_type == error_mark_node)
4288 return 0;
4289
4290 check_seenlabel ();
4291
4292 /* Convert VALUEs to type in which the comparisons are nominally done
4293 and replace any unspecified value with the corresponding bound. */
4294 if (value1 == 0)
4295 value1 = TYPE_MIN_VALUE (index_type);
4296 if (value2 == 0)
4297 value2 = TYPE_MAX_VALUE (index_type);
4298
4299 /* Fail if the range is empty. Do this before any conversion since
4300 we want to allow out-of-range empty ranges. */
4301 if (value2 && tree_int_cst_lt (value2, value1))
4302 return 4;
4303
4304 value1 = (*converter) (nominal_type, value1);
4305
4306 /* If the max was unbounded, use the max of the nominal_type we are
4307 converting to. Do this after the < check above to suppress false
4308 positives. */
4309 if (!value2)
4310 value2 = TYPE_MAX_VALUE (nominal_type);
4311 value2 = (*converter) (nominal_type, value2);
4312
4313 /* Fail if these values are out of range. */
4314 if (TREE_CONSTANT_OVERFLOW (value1)
4315 || ! int_fits_type_p (value1, index_type))
4316 return 3;
4317
4318 if (TREE_CONSTANT_OVERFLOW (value2)
4319 || ! int_fits_type_p (value2, index_type))
4320 return 3;
4321
4322 return add_case_node (value1, value2, label, duplicate);
4323 }
4324
4325 /* Do the actual insertion of a case label for pushcase and pushcase_range
4326 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4327 slowdown for large switch statements. */
4328
4329 static int
4330 add_case_node (low, high, label, duplicate)
4331 tree low, high;
4332 tree label;
4333 tree *duplicate;
4334 {
4335 struct case_node *p, **q, *r;
4336
4337 q = &case_stack->data.case_stmt.case_list;
4338 p = *q;
4339
4340 while ((r = *q))
4341 {
4342 p = r;
4343
4344 /* Keep going past elements distinctly greater than HIGH. */
4345 if (tree_int_cst_lt (high, p->low))
4346 q = &p->left;
4347
4348 /* or distinctly less than LOW. */
4349 else if (tree_int_cst_lt (p->high, low))
4350 q = &p->right;
4351
4352 else
4353 {
4354 /* We have an overlap; this is an error. */
4355 *duplicate = p->code_label;
4356 return 2;
4357 }
4358 }
4359
4360 /* Add this label to the chain, and succeed.
4361 Copy LOW, HIGH so they are on temporary rather than momentary
4362 obstack and will thus survive till the end of the case statement. */
4363
4364 r = (struct case_node *) oballoc (sizeof (struct case_node));
4365 r->low = copy_node (low);
4366
4367 /* If the bounds are equal, turn this into the one-value case. */
4368
4369 if (tree_int_cst_equal (low, high))
4370 r->high = r->low;
4371 else
4372 {
4373 r->high = copy_node (high);
4374 case_stack->data.case_stmt.num_ranges++;
4375 }
4376
4377 r->code_label = label;
4378 expand_label (label);
4379
4380 *q = r;
4381 r->parent = p;
4382 r->left = 0;
4383 r->right = 0;
4384 r->balance = 0;
4385
4386 while (p)
4387 {
4388 struct case_node *s;
4389
4390 if (r == p->left)
4391 {
4392 int b;
4393
4394 if (! (b = p->balance))
4395 /* Growth propagation from left side. */
4396 p->balance = -1;
4397 else if (b < 0)
4398 {
4399 if (r->balance < 0)
4400 {
4401 /* R-Rotation */
4402 if ((p->left = s = r->right))
4403 s->parent = p;
4404
4405 r->right = p;
4406 p->balance = 0;
4407 r->balance = 0;
4408 s = p->parent;
4409 p->parent = r;
4410
4411 if ((r->parent = s))
4412 {
4413 if (s->left == p)
4414 s->left = r;
4415 else
4416 s->right = r;
4417 }
4418 else
4419 case_stack->data.case_stmt.case_list = r;
4420 }
4421 else
4422 /* r->balance == +1 */
4423 {
4424 /* LR-Rotation */
4425
4426 int b2;
4427 struct case_node *t = r->right;
4428
4429 if ((p->left = s = t->right))
4430 s->parent = p;
4431
4432 t->right = p;
4433 if ((r->right = s = t->left))
4434 s->parent = r;
4435
4436 t->left = r;
4437 b = t->balance;
4438 b2 = b < 0;
4439 p->balance = b2;
4440 b2 = -b2 - b;
4441 r->balance = b2;
4442 t->balance = 0;
4443 s = p->parent;
4444 p->parent = t;
4445 r->parent = t;
4446
4447 if ((t->parent = s))
4448 {
4449 if (s->left == p)
4450 s->left = t;
4451 else
4452 s->right = t;
4453 }
4454 else
4455 case_stack->data.case_stmt.case_list = t;
4456 }
4457 break;
4458 }
4459
4460 else
4461 {
4462 /* p->balance == +1; growth of left side balances the node. */
4463 p->balance = 0;
4464 break;
4465 }
4466 }
4467 else
4468 /* r == p->right */
4469 {
4470 int b;
4471
4472 if (! (b = p->balance))
4473 /* Growth propagation from right side. */
4474 p->balance++;
4475 else if (b > 0)
4476 {
4477 if (r->balance > 0)
4478 {
4479 /* L-Rotation */
4480
4481 if ((p->right = s = r->left))
4482 s->parent = p;
4483
4484 r->left = p;
4485 p->balance = 0;
4486 r->balance = 0;
4487 s = p->parent;
4488 p->parent = r;
4489 if ((r->parent = s))
4490 {
4491 if (s->left == p)
4492 s->left = r;
4493 else
4494 s->right = r;
4495 }
4496
4497 else
4498 case_stack->data.case_stmt.case_list = r;
4499 }
4500
4501 else
4502 /* r->balance == -1 */
4503 {
4504 /* RL-Rotation */
4505 int b2;
4506 struct case_node *t = r->left;
4507
4508 if ((p->right = s = t->left))
4509 s->parent = p;
4510
4511 t->left = p;
4512
4513 if ((r->left = s = t->right))
4514 s->parent = r;
4515
4516 t->right = r;
4517 b = t->balance;
4518 b2 = b < 0;
4519 r->balance = b2;
4520 b2 = -b2 - b;
4521 p->balance = b2;
4522 t->balance = 0;
4523 s = p->parent;
4524 p->parent = t;
4525 r->parent = t;
4526
4527 if ((t->parent = s))
4528 {
4529 if (s->left == p)
4530 s->left = t;
4531 else
4532 s->right = t;
4533 }
4534
4535 else
4536 case_stack->data.case_stmt.case_list = t;
4537 }
4538 break;
4539 }
4540 else
4541 {
4542 /* p->balance == -1; growth of right side balances the node. */
4543 p->balance = 0;
4544 break;
4545 }
4546 }
4547
4548 r = p;
4549 p = p->parent;
4550 }
4551
4552 return 0;
4553 }
4554
4555 \f
4556 /* Returns the number of possible values of TYPE.
4557 Returns -1 if the number is unknown or variable.
4558 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4559 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4560 do not increase monotonically (there may be duplicates);
4561 to 1 if the values increase monotonically, but not always by 1;
4562 otherwise sets it to 0. */
4563
4564 HOST_WIDE_INT
4565 all_cases_count (type, spareness)
4566 tree type;
4567 int *spareness;
4568 {
4569 HOST_WIDE_INT count;
4570 *spareness = 0;
4571
4572 switch (TREE_CODE (type))
4573 {
4574 tree t;
4575 case BOOLEAN_TYPE:
4576 count = 2;
4577 break;
4578 case CHAR_TYPE:
4579 count = 1 << BITS_PER_UNIT;
4580 break;
4581 default:
4582 case INTEGER_TYPE:
4583 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4584 || TYPE_MAX_VALUE (type) == NULL
4585 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4586 return -1;
4587 else
4588 {
4589 /* count
4590 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4591 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4592 but with overflow checking. */
4593 tree mint = TYPE_MIN_VALUE (type);
4594 tree maxt = TYPE_MAX_VALUE (type);
4595 HOST_WIDE_INT lo, hi;
4596 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4597 &lo, &hi);
4598 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4599 lo, hi, &lo, &hi);
4600 add_double (lo, hi, 1, 0, &lo, &hi);
4601 if (hi != 0 || lo < 0)
4602 return -2;
4603 count = lo;
4604 }
4605 break;
4606 case ENUMERAL_TYPE:
4607 count = 0;
4608 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4609 {
4610 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4611 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4612 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4613 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4614 *spareness = 1;
4615 count++;
4616 }
4617 if (*spareness == 1)
4618 {
4619 tree prev = TREE_VALUE (TYPE_VALUES (type));
4620 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4621 {
4622 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4623 {
4624 *spareness = 2;
4625 break;
4626 }
4627 prev = TREE_VALUE (t);
4628 }
4629
4630 }
4631 }
4632 return count;
4633 }
4634
4635
4636 #define BITARRAY_TEST(ARRAY, INDEX) \
4637 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4638 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4639 #define BITARRAY_SET(ARRAY, INDEX) \
4640 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4641 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4642
4643 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4644 with the case values we have seen, assuming the case expression
4645 has the given TYPE.
4646 SPARSENESS is as determined by all_cases_count.
4647
4648 The time needed is proportional to COUNT, unless
4649 SPARSENESS is 2, in which case quadratic time is needed. */
4650
4651 void
4652 mark_seen_cases (type, cases_seen, count, sparseness)
4653 tree type;
4654 unsigned char *cases_seen;
4655 long count;
4656 int sparseness;
4657 {
4658 tree next_node_to_try = NULL_TREE;
4659 long next_node_offset = 0;
4660
4661 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4662 tree val = make_node (INTEGER_CST);
4663 TREE_TYPE (val) = type;
4664 if (! root)
4665 ; /* Do nothing */
4666 else if (sparseness == 2)
4667 {
4668 tree t;
4669 HOST_WIDE_INT xlo;
4670
4671 /* This less efficient loop is only needed to handle
4672 duplicate case values (multiple enum constants
4673 with the same value). */
4674 TREE_TYPE (val) = TREE_TYPE (root->low);
4675 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4676 t = TREE_CHAIN (t), xlo++)
4677 {
4678 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4679 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4680 n = root;
4681 do
4682 {
4683 /* Keep going past elements distinctly greater than VAL. */
4684 if (tree_int_cst_lt (val, n->low))
4685 n = n->left;
4686
4687 /* or distinctly less than VAL. */
4688 else if (tree_int_cst_lt (n->high, val))
4689 n = n->right;
4690
4691 else
4692 {
4693 /* We have found a matching range. */
4694 BITARRAY_SET (cases_seen, xlo);
4695 break;
4696 }
4697 }
4698 while (n);
4699 }
4700 }
4701 else
4702 {
4703 if (root->left)
4704 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4705 for (n = root; n; n = n->right)
4706 {
4707 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4708 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4709 while ( ! tree_int_cst_lt (n->high, val))
4710 {
4711 /* Calculate (into xlo) the "offset" of the integer (val).
4712 The element with lowest value has offset 0, the next smallest
4713 element has offset 1, etc. */
4714
4715 HOST_WIDE_INT xlo, xhi;
4716 tree t;
4717 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4718 {
4719 /* The TYPE_VALUES will be in increasing order, so
4720 starting searching where we last ended. */
4721 t = next_node_to_try;
4722 xlo = next_node_offset;
4723 xhi = 0;
4724 for (;;)
4725 {
4726 if (t == NULL_TREE)
4727 {
4728 t = TYPE_VALUES (type);
4729 xlo = 0;
4730 }
4731 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4732 {
4733 next_node_to_try = TREE_CHAIN (t);
4734 next_node_offset = xlo + 1;
4735 break;
4736 }
4737 xlo++;
4738 t = TREE_CHAIN (t);
4739 if (t == next_node_to_try)
4740 {
4741 xlo = -1;
4742 break;
4743 }
4744 }
4745 }
4746 else
4747 {
4748 t = TYPE_MIN_VALUE (type);
4749 if (t)
4750 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4751 &xlo, &xhi);
4752 else
4753 xlo = xhi = 0;
4754 add_double (xlo, xhi,
4755 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4756 &xlo, &xhi);
4757 }
4758
4759 if (xhi == 0 && xlo >= 0 && xlo < count)
4760 BITARRAY_SET (cases_seen, xlo);
4761 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4762 1, 0,
4763 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4764 }
4765 }
4766 }
4767 }
4768
4769 /* Called when the index of a switch statement is an enumerated type
4770 and there is no default label.
4771
4772 Checks that all enumeration literals are covered by the case
4773 expressions of a switch. Also, warn if there are any extra
4774 switch cases that are *not* elements of the enumerated type.
4775
4776 If all enumeration literals were covered by the case expressions,
4777 turn one of the expressions into the default expression since it should
4778 not be possible to fall through such a switch. */
4779
4780 void
4781 check_for_full_enumeration_handling (type)
4782 tree type;
4783 {
4784 register struct case_node *n;
4785 register tree chain;
4786 #if 0 /* variable used by 'if 0'ed code below. */
4787 register struct case_node **l;
4788 int all_values = 1;
4789 #endif
4790
4791 /* True iff the selector type is a numbered set mode. */
4792 int sparseness = 0;
4793
4794 /* The number of possible selector values. */
4795 HOST_WIDE_INT size;
4796
4797 /* For each possible selector value. a one iff it has been matched
4798 by a case value alternative. */
4799 unsigned char *cases_seen;
4800
4801 /* The allocated size of cases_seen, in chars. */
4802 long bytes_needed;
4803
4804 if (! warn_switch)
4805 return;
4806
4807 size = all_cases_count (type, &sparseness);
4808 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4809
4810 if (size > 0 && size < 600000
4811 /* We deliberately use malloc here - not xmalloc. */
4812 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4813 {
4814 long i;
4815 tree v = TYPE_VALUES (type);
4816 bzero (cases_seen, bytes_needed);
4817
4818 /* The time complexity of this code is normally O(N), where
4819 N being the number of members in the enumerated type.
4820 However, if type is a ENUMERAL_TYPE whose values do not
4821 increase monotonically, O(N*log(N)) time may be needed. */
4822
4823 mark_seen_cases (type, cases_seen, size, sparseness);
4824
4825 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4826 {
4827 if (BITARRAY_TEST(cases_seen, i) == 0)
4828 warning ("enumeration value `%s' not handled in switch",
4829 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4830 }
4831
4832 free (cases_seen);
4833 }
4834
4835 /* Now we go the other way around; we warn if there are case
4836 expressions that don't correspond to enumerators. This can
4837 occur since C and C++ don't enforce type-checking of
4838 assignments to enumeration variables. */
4839
4840 if (case_stack->data.case_stmt.case_list
4841 && case_stack->data.case_stmt.case_list->left)
4842 case_stack->data.case_stmt.case_list
4843 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4844 if (warn_switch)
4845 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4846 {
4847 for (chain = TYPE_VALUES (type);
4848 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4849 chain = TREE_CHAIN (chain))
4850 ;
4851
4852 if (!chain)
4853 {
4854 if (TYPE_NAME (type) == 0)
4855 warning ("case value `%ld' not in enumerated type",
4856 (long) TREE_INT_CST_LOW (n->low));
4857 else
4858 warning ("case value `%ld' not in enumerated type `%s'",
4859 (long) TREE_INT_CST_LOW (n->low),
4860 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4861 == IDENTIFIER_NODE)
4862 ? TYPE_NAME (type)
4863 : DECL_NAME (TYPE_NAME (type))));
4864 }
4865 if (!tree_int_cst_equal (n->low, n->high))
4866 {
4867 for (chain = TYPE_VALUES (type);
4868 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4869 chain = TREE_CHAIN (chain))
4870 ;
4871
4872 if (!chain)
4873 {
4874 if (TYPE_NAME (type) == 0)
4875 warning ("case value `%ld' not in enumerated type",
4876 (long) TREE_INT_CST_LOW (n->high));
4877 else
4878 warning ("case value `%ld' not in enumerated type `%s'",
4879 (long) TREE_INT_CST_LOW (n->high),
4880 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4881 == IDENTIFIER_NODE)
4882 ? TYPE_NAME (type)
4883 : DECL_NAME (TYPE_NAME (type))));
4884 }
4885 }
4886 }
4887
4888 #if 0
4889 /* ??? This optimization is disabled because it causes valid programs to
4890 fail. ANSI C does not guarantee that an expression with enum type
4891 will have a value that is the same as one of the enumeration literals. */
4892
4893 /* If all values were found as case labels, make one of them the default
4894 label. Thus, this switch will never fall through. We arbitrarily pick
4895 the last one to make the default since this is likely the most
4896 efficient choice. */
4897
4898 if (all_values)
4899 {
4900 for (l = &case_stack->data.case_stmt.case_list;
4901 (*l)->right != 0;
4902 l = &(*l)->right)
4903 ;
4904
4905 case_stack->data.case_stmt.default_label = (*l)->code_label;
4906 *l = 0;
4907 }
4908 #endif /* 0 */
4909 }
4910
4911 \f
4912 /* Terminate a case (Pascal) or switch (C) statement
4913 in which ORIG_INDEX is the expression to be tested.
4914 Generate the code to test it and jump to the right place. */
4915
4916 void
4917 expand_end_case (orig_index)
4918 tree orig_index;
4919 {
4920 tree minval, maxval, range, orig_minval;
4921 rtx default_label = 0;
4922 register struct case_node *n;
4923 unsigned int count;
4924 rtx index;
4925 rtx table_label;
4926 int ncases;
4927 rtx *labelvec;
4928 register int i;
4929 rtx before_case;
4930 register struct nesting *thiscase = case_stack;
4931 tree index_expr, index_type;
4932 int unsignedp;
4933
4934 table_label = gen_label_rtx ();
4935 index_expr = thiscase->data.case_stmt.index_expr;
4936 index_type = TREE_TYPE (index_expr);
4937 unsignedp = TREE_UNSIGNED (index_type);
4938
4939 do_pending_stack_adjust ();
4940
4941 /* This might get an spurious warning in the presence of a syntax error;
4942 it could be fixed by moving the call to check_seenlabel after the
4943 check for error_mark_node, and copying the code of check_seenlabel that
4944 deals with case_stack->data.case_stmt.line_number_status /
4945 restore_line_number_status in front of the call to end_cleanup_deferral;
4946 However, this might miss some useful warnings in the presence of
4947 non-syntax errors. */
4948 check_seenlabel ();
4949
4950 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4951 if (index_type != error_mark_node)
4952 {
4953 /* If switch expression was an enumerated type, check that all
4954 enumeration literals are covered by the cases.
4955 No sense trying this if there's a default case, however. */
4956
4957 if (!thiscase->data.case_stmt.default_label
4958 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4959 && TREE_CODE (index_expr) != INTEGER_CST)
4960 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4961
4962 /* If we don't have a default-label, create one here,
4963 after the body of the switch. */
4964 if (thiscase->data.case_stmt.default_label == 0)
4965 {
4966 thiscase->data.case_stmt.default_label
4967 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4968 expand_label (thiscase->data.case_stmt.default_label);
4969 }
4970 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4971
4972 before_case = get_last_insn ();
4973
4974 if (thiscase->data.case_stmt.case_list
4975 && thiscase->data.case_stmt.case_list->left)
4976 thiscase->data.case_stmt.case_list
4977 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4978
4979 /* Simplify the case-list before we count it. */
4980 group_case_nodes (thiscase->data.case_stmt.case_list);
4981
4982 /* Get upper and lower bounds of case values.
4983 Also convert all the case values to the index expr's data type. */
4984
4985 count = 0;
4986 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4987 {
4988 /* Check low and high label values are integers. */
4989 if (TREE_CODE (n->low) != INTEGER_CST)
4990 abort ();
4991 if (TREE_CODE (n->high) != INTEGER_CST)
4992 abort ();
4993
4994 n->low = convert (index_type, n->low);
4995 n->high = convert (index_type, n->high);
4996
4997 /* Count the elements and track the largest and smallest
4998 of them (treating them as signed even if they are not). */
4999 if (count++ == 0)
5000 {
5001 minval = n->low;
5002 maxval = n->high;
5003 }
5004 else
5005 {
5006 if (INT_CST_LT (n->low, minval))
5007 minval = n->low;
5008 if (INT_CST_LT (maxval, n->high))
5009 maxval = n->high;
5010 }
5011 /* A range counts double, since it requires two compares. */
5012 if (! tree_int_cst_equal (n->low, n->high))
5013 count++;
5014 }
5015
5016 orig_minval = minval;
5017
5018 /* Compute span of values. */
5019 if (count != 0)
5020 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5021
5022 end_cleanup_deferral ();
5023
5024 if (count == 0)
5025 {
5026 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5027 emit_queue ();
5028 emit_jump (default_label);
5029 }
5030
5031 /* If range of values is much bigger than number of values,
5032 make a sequence of conditional branches instead of a dispatch.
5033 If the switch-index is a constant, do it this way
5034 because we can optimize it. */
5035
5036 #ifndef CASE_VALUES_THRESHOLD
5037 #ifdef HAVE_casesi
5038 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5039 #else
5040 /* If machine does not have a case insn that compares the
5041 bounds, this means extra overhead for dispatch tables
5042 which raises the threshold for using them. */
5043 #define CASE_VALUES_THRESHOLD 5
5044 #endif /* HAVE_casesi */
5045 #endif /* CASE_VALUES_THRESHOLD */
5046
5047 else if (TREE_INT_CST_HIGH (range) != 0
5048 || count < (unsigned int) CASE_VALUES_THRESHOLD
5049 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5050 > 10 * count)
5051 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5052 || flag_pic
5053 #endif
5054 || TREE_CODE (index_expr) == INTEGER_CST
5055 /* These will reduce to a constant. */
5056 || (TREE_CODE (index_expr) == CALL_EXPR
5057 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5058 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5059 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5060 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5061 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5062 {
5063 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5064
5065 /* If the index is a short or char that we do not have
5066 an insn to handle comparisons directly, convert it to
5067 a full integer now, rather than letting each comparison
5068 generate the conversion. */
5069
5070 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5071 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5072 == CODE_FOR_nothing))
5073 {
5074 enum machine_mode wider_mode;
5075 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5076 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5077 if (cmp_optab->handlers[(int) wider_mode].insn_code
5078 != CODE_FOR_nothing)
5079 {
5080 index = convert_to_mode (wider_mode, index, unsignedp);
5081 break;
5082 }
5083 }
5084
5085 emit_queue ();
5086 do_pending_stack_adjust ();
5087
5088 index = protect_from_queue (index, 0);
5089 if (GET_CODE (index) == MEM)
5090 index = copy_to_reg (index);
5091 if (GET_CODE (index) == CONST_INT
5092 || TREE_CODE (index_expr) == INTEGER_CST)
5093 {
5094 /* Make a tree node with the proper constant value
5095 if we don't already have one. */
5096 if (TREE_CODE (index_expr) != INTEGER_CST)
5097 {
5098 index_expr
5099 = build_int_2 (INTVAL (index),
5100 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5101 index_expr = convert (index_type, index_expr);
5102 }
5103
5104 /* For constant index expressions we need only
5105 issue a unconditional branch to the appropriate
5106 target code. The job of removing any unreachable
5107 code is left to the optimisation phase if the
5108 "-O" option is specified. */
5109 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5110 if (! tree_int_cst_lt (index_expr, n->low)
5111 && ! tree_int_cst_lt (n->high, index_expr))
5112 break;
5113
5114 if (n)
5115 emit_jump (label_rtx (n->code_label));
5116 else
5117 emit_jump (default_label);
5118 }
5119 else
5120 {
5121 /* If the index expression is not constant we generate
5122 a binary decision tree to select the appropriate
5123 target code. This is done as follows:
5124
5125 The list of cases is rearranged into a binary tree,
5126 nearly optimal assuming equal probability for each case.
5127
5128 The tree is transformed into RTL, eliminating
5129 redundant test conditions at the same time.
5130
5131 If program flow could reach the end of the
5132 decision tree an unconditional jump to the
5133 default code is emitted. */
5134
5135 use_cost_table
5136 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5137 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5138 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5139 NULL_PTR);
5140 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5141 default_label, index_type);
5142 emit_jump_if_reachable (default_label);
5143 }
5144 }
5145 else
5146 {
5147 int win = 0;
5148 #ifdef HAVE_casesi
5149 if (HAVE_casesi)
5150 {
5151 enum machine_mode index_mode = SImode;
5152 int index_bits = GET_MODE_BITSIZE (index_mode);
5153 rtx op1, op2;
5154 enum machine_mode op_mode;
5155
5156 /* Convert the index to SImode. */
5157 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5158 > GET_MODE_BITSIZE (index_mode))
5159 {
5160 enum machine_mode omode = TYPE_MODE (index_type);
5161 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5162
5163 /* We must handle the endpoints in the original mode. */
5164 index_expr = build (MINUS_EXPR, index_type,
5165 index_expr, minval);
5166 minval = integer_zero_node;
5167 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5168 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5169 emit_jump_insn (gen_bltu (default_label));
5170 /* Now we can safely truncate. */
5171 index = convert_to_mode (index_mode, index, 0);
5172 }
5173 else
5174 {
5175 if (TYPE_MODE (index_type) != index_mode)
5176 {
5177 index_expr = convert (type_for_size (index_bits, 0),
5178 index_expr);
5179 index_type = TREE_TYPE (index_expr);
5180 }
5181
5182 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5183 }
5184 emit_queue ();
5185 index = protect_from_queue (index, 0);
5186 do_pending_stack_adjust ();
5187
5188 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5189 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5190 (index, op_mode))
5191 index = copy_to_mode_reg (op_mode, index);
5192
5193 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5194
5195 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5196 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5197 (op1, op_mode))
5198 op1 = copy_to_mode_reg (op_mode, op1);
5199
5200 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5201
5202 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5203 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5204 (op2, op_mode))
5205 op2 = copy_to_mode_reg (op_mode, op2);
5206
5207 emit_jump_insn (gen_casesi (index, op1, op2,
5208 table_label, default_label));
5209 win = 1;
5210 }
5211 #endif
5212 #ifdef HAVE_tablejump
5213 if (! win && HAVE_tablejump)
5214 {
5215 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5216 fold (build (MINUS_EXPR, index_type,
5217 index_expr, minval)));
5218 index_type = TREE_TYPE (index_expr);
5219 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5220 emit_queue ();
5221 index = protect_from_queue (index, 0);
5222 do_pending_stack_adjust ();
5223
5224 do_tablejump (index, TYPE_MODE (index_type),
5225 expand_expr (range, NULL_RTX, VOIDmode, 0),
5226 table_label, default_label);
5227 win = 1;
5228 }
5229 #endif
5230 if (! win)
5231 abort ();
5232
5233 /* Get table of labels to jump to, in order of case index. */
5234
5235 ncases = TREE_INT_CST_LOW (range) + 1;
5236 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5237 bzero ((char *) labelvec, ncases * sizeof (rtx));
5238
5239 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5240 {
5241 register HOST_WIDE_INT i
5242 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5243
5244 while (1)
5245 {
5246 labelvec[i]
5247 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5248 if (i + TREE_INT_CST_LOW (orig_minval)
5249 == TREE_INT_CST_LOW (n->high))
5250 break;
5251 i++;
5252 }
5253 }
5254
5255 /* Fill in the gaps with the default. */
5256 for (i = 0; i < ncases; i++)
5257 if (labelvec[i] == 0)
5258 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5259
5260 /* Output the table */
5261 emit_label (table_label);
5262
5263 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5264 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5265 gen_rtx_LABEL_REF (Pmode, table_label),
5266 gen_rtvec_v (ncases, labelvec),
5267 const0_rtx, const0_rtx, 0));
5268 else
5269 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5270 gen_rtvec_v (ncases, labelvec)));
5271
5272 /* If the case insn drops through the table,
5273 after the table we must jump to the default-label.
5274 Otherwise record no drop-through after the table. */
5275 #ifdef CASE_DROPS_THROUGH
5276 emit_jump (default_label);
5277 #else
5278 emit_barrier ();
5279 #endif
5280 }
5281
5282 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5283 reorder_insns (before_case, get_last_insn (),
5284 thiscase->data.case_stmt.start);
5285 }
5286 else
5287 end_cleanup_deferral ();
5288
5289 if (thiscase->exit_label)
5290 emit_label (thiscase->exit_label);
5291
5292 POPSTACK (case_stack);
5293
5294 free_temp_slots ();
5295 }
5296
5297 /* Convert the tree NODE into a list linked by the right field, with the left
5298 field zeroed. RIGHT is used for recursion; it is a list to be placed
5299 rightmost in the resulting list. */
5300
5301 static struct case_node *
5302 case_tree2list (node, right)
5303 struct case_node *node, *right;
5304 {
5305 struct case_node *left;
5306
5307 if (node->right)
5308 right = case_tree2list (node->right, right);
5309
5310 node->right = right;
5311 if ((left = node->left))
5312 {
5313 node->left = 0;
5314 return case_tree2list (left, node);
5315 }
5316
5317 return node;
5318 }
5319
5320 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5321
5322 static void
5323 do_jump_if_equal (op1, op2, label, unsignedp)
5324 rtx op1, op2, label;
5325 int unsignedp;
5326 {
5327 if (GET_CODE (op1) == CONST_INT
5328 && GET_CODE (op2) == CONST_INT)
5329 {
5330 if (INTVAL (op1) == INTVAL (op2))
5331 emit_jump (label);
5332 }
5333 else
5334 {
5335 enum machine_mode mode = GET_MODE (op1);
5336 if (mode == VOIDmode)
5337 mode = GET_MODE (op2);
5338 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5339 emit_jump_insn (gen_beq (label));
5340 }
5341 }
5342 \f
5343 /* Not all case values are encountered equally. This function
5344 uses a heuristic to weight case labels, in cases where that
5345 looks like a reasonable thing to do.
5346
5347 Right now, all we try to guess is text, and we establish the
5348 following weights:
5349
5350 chars above space: 16
5351 digits: 16
5352 default: 12
5353 space, punct: 8
5354 tab: 4
5355 newline: 2
5356 other "\" chars: 1
5357 remaining chars: 0
5358
5359 If we find any cases in the switch that are not either -1 or in the range
5360 of valid ASCII characters, or are control characters other than those
5361 commonly used with "\", don't treat this switch scanning text.
5362
5363 Return 1 if these nodes are suitable for cost estimation, otherwise
5364 return 0. */
5365
5366 static int
5367 estimate_case_costs (node)
5368 case_node_ptr node;
5369 {
5370 tree min_ascii = build_int_2 (-1, -1);
5371 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5372 case_node_ptr n;
5373 int i;
5374
5375 /* If we haven't already made the cost table, make it now. Note that the
5376 lower bound of the table is -1, not zero. */
5377
5378 if (cost_table == NULL)
5379 {
5380 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5381 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5382
5383 for (i = 0; i < 128; i++)
5384 {
5385 if (ISALNUM (i))
5386 cost_table[i] = 16;
5387 else if (ISPUNCT (i))
5388 cost_table[i] = 8;
5389 else if (ISCNTRL (i))
5390 cost_table[i] = -1;
5391 }
5392
5393 cost_table[' '] = 8;
5394 cost_table['\t'] = 4;
5395 cost_table['\0'] = 4;
5396 cost_table['\n'] = 2;
5397 cost_table['\f'] = 1;
5398 cost_table['\v'] = 1;
5399 cost_table['\b'] = 1;
5400 }
5401
5402 /* See if all the case expressions look like text. It is text if the
5403 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5404 as signed arithmetic since we don't want to ever access cost_table with a
5405 value less than -1. Also check that none of the constants in a range
5406 are strange control characters. */
5407
5408 for (n = node; n; n = n->right)
5409 {
5410 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5411 return 0;
5412
5413 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5414 if (cost_table[i] < 0)
5415 return 0;
5416 }
5417
5418 /* All interesting values are within the range of interesting
5419 ASCII characters. */
5420 return 1;
5421 }
5422
5423 /* Scan an ordered list of case nodes
5424 combining those with consecutive values or ranges.
5425
5426 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5427
5428 static void
5429 group_case_nodes (head)
5430 case_node_ptr head;
5431 {
5432 case_node_ptr node = head;
5433
5434 while (node)
5435 {
5436 rtx lb = next_real_insn (label_rtx (node->code_label));
5437 rtx lb2;
5438 case_node_ptr np = node;
5439
5440 /* Try to group the successors of NODE with NODE. */
5441 while (((np = np->right) != 0)
5442 /* Do they jump to the same place? */
5443 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5444 || (lb != 0 && lb2 != 0
5445 && simplejump_p (lb)
5446 && simplejump_p (lb2)
5447 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5448 SET_SRC (PATTERN (lb2)))))
5449 /* Are their ranges consecutive? */
5450 && tree_int_cst_equal (np->low,
5451 fold (build (PLUS_EXPR,
5452 TREE_TYPE (node->high),
5453 node->high,
5454 integer_one_node)))
5455 /* An overflow is not consecutive. */
5456 && tree_int_cst_lt (node->high,
5457 fold (build (PLUS_EXPR,
5458 TREE_TYPE (node->high),
5459 node->high,
5460 integer_one_node))))
5461 {
5462 node->high = np->high;
5463 }
5464 /* NP is the first node after NODE which can't be grouped with it.
5465 Delete the nodes in between, and move on to that node. */
5466 node->right = np;
5467 node = np;
5468 }
5469 }
5470
5471 /* Take an ordered list of case nodes
5472 and transform them into a near optimal binary tree,
5473 on the assumption that any target code selection value is as
5474 likely as any other.
5475
5476 The transformation is performed by splitting the ordered
5477 list into two equal sections plus a pivot. The parts are
5478 then attached to the pivot as left and right branches. Each
5479 branch is then transformed recursively. */
5480
5481 static void
5482 balance_case_nodes (head, parent)
5483 case_node_ptr *head;
5484 case_node_ptr parent;
5485 {
5486 register case_node_ptr np;
5487
5488 np = *head;
5489 if (np)
5490 {
5491 int cost = 0;
5492 int i = 0;
5493 int ranges = 0;
5494 register case_node_ptr *npp;
5495 case_node_ptr left;
5496
5497 /* Count the number of entries on branch. Also count the ranges. */
5498
5499 while (np)
5500 {
5501 if (!tree_int_cst_equal (np->low, np->high))
5502 {
5503 ranges++;
5504 if (use_cost_table)
5505 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5506 }
5507
5508 if (use_cost_table)
5509 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5510
5511 i++;
5512 np = np->right;
5513 }
5514
5515 if (i > 2)
5516 {
5517 /* Split this list if it is long enough for that to help. */
5518 npp = head;
5519 left = *npp;
5520 if (use_cost_table)
5521 {
5522 /* Find the place in the list that bisects the list's total cost,
5523 Here I gets half the total cost. */
5524 int n_moved = 0;
5525 i = (cost + 1) / 2;
5526 while (1)
5527 {
5528 /* Skip nodes while their cost does not reach that amount. */
5529 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5530 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5531 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5532 if (i <= 0)
5533 break;
5534 npp = &(*npp)->right;
5535 n_moved += 1;
5536 }
5537 if (n_moved == 0)
5538 {
5539 /* Leave this branch lopsided, but optimize left-hand
5540 side and fill in `parent' fields for right-hand side. */
5541 np = *head;
5542 np->parent = parent;
5543 balance_case_nodes (&np->left, np);
5544 for (; np->right; np = np->right)
5545 np->right->parent = np;
5546 return;
5547 }
5548 }
5549 /* If there are just three nodes, split at the middle one. */
5550 else if (i == 3)
5551 npp = &(*npp)->right;
5552 else
5553 {
5554 /* Find the place in the list that bisects the list's total cost,
5555 where ranges count as 2.
5556 Here I gets half the total cost. */
5557 i = (i + ranges + 1) / 2;
5558 while (1)
5559 {
5560 /* Skip nodes while their cost does not reach that amount. */
5561 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5562 i--;
5563 i--;
5564 if (i <= 0)
5565 break;
5566 npp = &(*npp)->right;
5567 }
5568 }
5569 *head = np = *npp;
5570 *npp = 0;
5571 np->parent = parent;
5572 np->left = left;
5573
5574 /* Optimize each of the two split parts. */
5575 balance_case_nodes (&np->left, np);
5576 balance_case_nodes (&np->right, np);
5577 }
5578 else
5579 {
5580 /* Else leave this branch as one level,
5581 but fill in `parent' fields. */
5582 np = *head;
5583 np->parent = parent;
5584 for (; np->right; np = np->right)
5585 np->right->parent = np;
5586 }
5587 }
5588 }
5589 \f
5590 /* Search the parent sections of the case node tree
5591 to see if a test for the lower bound of NODE would be redundant.
5592 INDEX_TYPE is the type of the index expression.
5593
5594 The instructions to generate the case decision tree are
5595 output in the same order as nodes are processed so it is
5596 known that if a parent node checks the range of the current
5597 node minus one that the current node is bounded at its lower
5598 span. Thus the test would be redundant. */
5599
5600 static int
5601 node_has_low_bound (node, index_type)
5602 case_node_ptr node;
5603 tree index_type;
5604 {
5605 tree low_minus_one;
5606 case_node_ptr pnode;
5607
5608 /* If the lower bound of this node is the lowest value in the index type,
5609 we need not test it. */
5610
5611 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5612 return 1;
5613
5614 /* If this node has a left branch, the value at the left must be less
5615 than that at this node, so it cannot be bounded at the bottom and
5616 we need not bother testing any further. */
5617
5618 if (node->left)
5619 return 0;
5620
5621 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5622 node->low, integer_one_node));
5623
5624 /* If the subtraction above overflowed, we can't verify anything.
5625 Otherwise, look for a parent that tests our value - 1. */
5626
5627 if (! tree_int_cst_lt (low_minus_one, node->low))
5628 return 0;
5629
5630 for (pnode = node->parent; pnode; pnode = pnode->parent)
5631 if (tree_int_cst_equal (low_minus_one, pnode->high))
5632 return 1;
5633
5634 return 0;
5635 }
5636
5637 /* Search the parent sections of the case node tree
5638 to see if a test for the upper bound of NODE would be redundant.
5639 INDEX_TYPE is the type of the index expression.
5640
5641 The instructions to generate the case decision tree are
5642 output in the same order as nodes are processed so it is
5643 known that if a parent node checks the range of the current
5644 node plus one that the current node is bounded at its upper
5645 span. Thus the test would be redundant. */
5646
5647 static int
5648 node_has_high_bound (node, index_type)
5649 case_node_ptr node;
5650 tree index_type;
5651 {
5652 tree high_plus_one;
5653 case_node_ptr pnode;
5654
5655 /* If there is no upper bound, obviously no test is needed. */
5656
5657 if (TYPE_MAX_VALUE (index_type) == NULL)
5658 return 1;
5659
5660 /* If the upper bound of this node is the highest value in the type
5661 of the index expression, we need not test against it. */
5662
5663 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5664 return 1;
5665
5666 /* If this node has a right branch, the value at the right must be greater
5667 than that at this node, so it cannot be bounded at the top and
5668 we need not bother testing any further. */
5669
5670 if (node->right)
5671 return 0;
5672
5673 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5674 node->high, integer_one_node));
5675
5676 /* If the addition above overflowed, we can't verify anything.
5677 Otherwise, look for a parent that tests our value + 1. */
5678
5679 if (! tree_int_cst_lt (node->high, high_plus_one))
5680 return 0;
5681
5682 for (pnode = node->parent; pnode; pnode = pnode->parent)
5683 if (tree_int_cst_equal (high_plus_one, pnode->low))
5684 return 1;
5685
5686 return 0;
5687 }
5688
5689 /* Search the parent sections of the
5690 case node tree to see if both tests for the upper and lower
5691 bounds of NODE would be redundant. */
5692
5693 static int
5694 node_is_bounded (node, index_type)
5695 case_node_ptr node;
5696 tree index_type;
5697 {
5698 return (node_has_low_bound (node, index_type)
5699 && node_has_high_bound (node, index_type));
5700 }
5701
5702 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5703
5704 static void
5705 emit_jump_if_reachable (label)
5706 rtx label;
5707 {
5708 if (GET_CODE (get_last_insn ()) != BARRIER)
5709 emit_jump (label);
5710 }
5711 \f
5712 /* Emit step-by-step code to select a case for the value of INDEX.
5713 The thus generated decision tree follows the form of the
5714 case-node binary tree NODE, whose nodes represent test conditions.
5715 INDEX_TYPE is the type of the index of the switch.
5716
5717 Care is taken to prune redundant tests from the decision tree
5718 by detecting any boundary conditions already checked by
5719 emitted rtx. (See node_has_high_bound, node_has_low_bound
5720 and node_is_bounded, above.)
5721
5722 Where the test conditions can be shown to be redundant we emit
5723 an unconditional jump to the target code. As a further
5724 optimization, the subordinates of a tree node are examined to
5725 check for bounded nodes. In this case conditional and/or
5726 unconditional jumps as a result of the boundary check for the
5727 current node are arranged to target the subordinates associated
5728 code for out of bound conditions on the current node.
5729
5730 We can assume that when control reaches the code generated here,
5731 the index value has already been compared with the parents
5732 of this node, and determined to be on the same side of each parent
5733 as this node is. Thus, if this node tests for the value 51,
5734 and a parent tested for 52, we don't need to consider
5735 the possibility of a value greater than 51. If another parent
5736 tests for the value 50, then this node need not test anything. */
5737
5738 static void
5739 emit_case_nodes (index, node, default_label, index_type)
5740 rtx index;
5741 case_node_ptr node;
5742 rtx default_label;
5743 tree index_type;
5744 {
5745 /* If INDEX has an unsigned type, we must make unsigned branches. */
5746 int unsignedp = TREE_UNSIGNED (index_type);
5747 typedef rtx rtx_fn ();
5748 rtx_fn *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5749 rtx_fn *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5750 rtx_fn *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5751 rtx_fn *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5752 enum machine_mode mode = GET_MODE (index);
5753
5754 /* See if our parents have already tested everything for us.
5755 If they have, emit an unconditional jump for this node. */
5756 if (node_is_bounded (node, index_type))
5757 emit_jump (label_rtx (node->code_label));
5758
5759 else if (tree_int_cst_equal (node->low, node->high))
5760 {
5761 /* Node is single valued. First see if the index expression matches
5762 this node and then check our children, if any. */
5763
5764 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5765 label_rtx (node->code_label), unsignedp);
5766
5767 if (node->right != 0 && node->left != 0)
5768 {
5769 /* This node has children on both sides.
5770 Dispatch to one side or the other
5771 by comparing the index value with this node's value.
5772 If one subtree is bounded, check that one first,
5773 so we can avoid real branches in the tree. */
5774
5775 if (node_is_bounded (node->right, index_type))
5776 {
5777 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5778 VOIDmode, 0),
5779 GT, NULL_RTX, mode, unsignedp, 0);
5780
5781 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5782 emit_case_nodes (index, node->left, default_label, index_type);
5783 }
5784
5785 else if (node_is_bounded (node->left, index_type))
5786 {
5787 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5788 VOIDmode, 0),
5789 LT, NULL_RTX, mode, unsignedp, 0);
5790 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5791 emit_case_nodes (index, node->right, default_label, index_type);
5792 }
5793
5794 else
5795 {
5796 /* Neither node is bounded. First distinguish the two sides;
5797 then emit the code for one side at a time. */
5798
5799 tree test_label
5800 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5801
5802 /* See if the value is on the right. */
5803 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5804 VOIDmode, 0),
5805 GT, NULL_RTX, mode, unsignedp, 0);
5806 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5807
5808 /* Value must be on the left.
5809 Handle the left-hand subtree. */
5810 emit_case_nodes (index, node->left, default_label, index_type);
5811 /* If left-hand subtree does nothing,
5812 go to default. */
5813 emit_jump_if_reachable (default_label);
5814
5815 /* Code branches here for the right-hand subtree. */
5816 expand_label (test_label);
5817 emit_case_nodes (index, node->right, default_label, index_type);
5818 }
5819 }
5820
5821 else if (node->right != 0 && node->left == 0)
5822 {
5823 /* Here we have a right child but no left so we issue conditional
5824 branch to default and process the right child.
5825
5826 Omit the conditional branch to default if we it avoid only one
5827 right child; it costs too much space to save so little time. */
5828
5829 if (node->right->right || node->right->left
5830 || !tree_int_cst_equal (node->right->low, node->right->high))
5831 {
5832 if (!node_has_low_bound (node, index_type))
5833 {
5834 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5835 VOIDmode, 0),
5836 LT, NULL_RTX, mode, unsignedp, 0);
5837 emit_jump_insn ((*gen_blt_pat) (default_label));
5838 }
5839
5840 emit_case_nodes (index, node->right, default_label, index_type);
5841 }
5842 else
5843 /* We cannot process node->right normally
5844 since we haven't ruled out the numbers less than
5845 this node's value. So handle node->right explicitly. */
5846 do_jump_if_equal (index,
5847 expand_expr (node->right->low, NULL_RTX,
5848 VOIDmode, 0),
5849 label_rtx (node->right->code_label), unsignedp);
5850 }
5851
5852 else if (node->right == 0 && node->left != 0)
5853 {
5854 /* Just one subtree, on the left. */
5855
5856 #if 0 /* The following code and comment were formerly part
5857 of the condition here, but they didn't work
5858 and I don't understand what the idea was. -- rms. */
5859 /* If our "most probable entry" is less probable
5860 than the default label, emit a jump to
5861 the default label using condition codes
5862 already lying around. With no right branch,
5863 a branch-greater-than will get us to the default
5864 label correctly. */
5865 if (use_cost_table
5866 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5867 ;
5868 #endif /* 0 */
5869 if (node->left->left || node->left->right
5870 || !tree_int_cst_equal (node->left->low, node->left->high))
5871 {
5872 if (!node_has_high_bound (node, index_type))
5873 {
5874 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5875 VOIDmode, 0),
5876 GT, NULL_RTX, mode, unsignedp, 0);
5877 emit_jump_insn ((*gen_bgt_pat) (default_label));
5878 }
5879
5880 emit_case_nodes (index, node->left, default_label, index_type);
5881 }
5882 else
5883 /* We cannot process node->left normally
5884 since we haven't ruled out the numbers less than
5885 this node's value. So handle node->left explicitly. */
5886 do_jump_if_equal (index,
5887 expand_expr (node->left->low, NULL_RTX,
5888 VOIDmode, 0),
5889 label_rtx (node->left->code_label), unsignedp);
5890 }
5891 }
5892 else
5893 {
5894 /* Node is a range. These cases are very similar to those for a single
5895 value, except that we do not start by testing whether this node
5896 is the one to branch to. */
5897
5898 if (node->right != 0 && node->left != 0)
5899 {
5900 /* Node has subtrees on both sides.
5901 If the right-hand subtree is bounded,
5902 test for it first, since we can go straight there.
5903 Otherwise, we need to make a branch in the control structure,
5904 then handle the two subtrees. */
5905 tree test_label = 0;
5906
5907 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5908 VOIDmode, 0),
5909 GT, NULL_RTX, mode, unsignedp, 0);
5910
5911 if (node_is_bounded (node->right, index_type))
5912 /* Right hand node is fully bounded so we can eliminate any
5913 testing and branch directly to the target code. */
5914 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5915 else
5916 {
5917 /* Right hand node requires testing.
5918 Branch to a label where we will handle it later. */
5919
5920 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5921 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5922 }
5923
5924 /* Value belongs to this node or to the left-hand subtree. */
5925
5926 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5927 GE, NULL_RTX, mode, unsignedp, 0);
5928 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5929
5930 /* Handle the left-hand subtree. */
5931 emit_case_nodes (index, node->left, default_label, index_type);
5932
5933 /* If right node had to be handled later, do that now. */
5934
5935 if (test_label)
5936 {
5937 /* If the left-hand subtree fell through,
5938 don't let it fall into the right-hand subtree. */
5939 emit_jump_if_reachable (default_label);
5940
5941 expand_label (test_label);
5942 emit_case_nodes (index, node->right, default_label, index_type);
5943 }
5944 }
5945
5946 else if (node->right != 0 && node->left == 0)
5947 {
5948 /* Deal with values to the left of this node,
5949 if they are possible. */
5950 if (!node_has_low_bound (node, index_type))
5951 {
5952 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5953 VOIDmode, 0),
5954 LT, NULL_RTX, mode, unsignedp, 0);
5955 emit_jump_insn ((*gen_blt_pat) (default_label));
5956 }
5957
5958 /* Value belongs to this node or to the right-hand subtree. */
5959
5960 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5961 VOIDmode, 0),
5962 LE, NULL_RTX, mode, unsignedp, 0);
5963 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5964
5965 emit_case_nodes (index, node->right, default_label, index_type);
5966 }
5967
5968 else if (node->right == 0 && node->left != 0)
5969 {
5970 /* Deal with values to the right of this node,
5971 if they are possible. */
5972 if (!node_has_high_bound (node, index_type))
5973 {
5974 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5975 VOIDmode, 0),
5976 GT, NULL_RTX, mode, unsignedp, 0);
5977 emit_jump_insn ((*gen_bgt_pat) (default_label));
5978 }
5979
5980 /* Value belongs to this node or to the left-hand subtree. */
5981
5982 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5983 GE, NULL_RTX, mode, unsignedp, 0);
5984 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5985
5986 emit_case_nodes (index, node->left, default_label, index_type);
5987 }
5988
5989 else
5990 {
5991 /* Node has no children so we check low and high bounds to remove
5992 redundant tests. Only one of the bounds can exist,
5993 since otherwise this node is bounded--a case tested already. */
5994
5995 if (!node_has_high_bound (node, index_type))
5996 {
5997 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5998 VOIDmode, 0),
5999 GT, NULL_RTX, mode, unsignedp, 0);
6000 emit_jump_insn ((*gen_bgt_pat) (default_label));
6001 }
6002
6003 if (!node_has_low_bound (node, index_type))
6004 {
6005 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6006 VOIDmode, 0),
6007 LT, NULL_RTX, mode, unsignedp, 0);
6008 emit_jump_insn ((*gen_blt_pat) (default_label));
6009 }
6010
6011 emit_jump (label_rtx (node->code_label));
6012 }
6013 }
6014 }
6015 \f
6016 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6017 so that the debugging info will be correct for the unrolled loop. */
6018
6019 /* Indexed by block number, contains a pointer to the N'th block node.
6020
6021 Allocated by the call to identify_blocks, then released after the call
6022 to reorder_blocks in the function unroll_block_trees. */
6023
6024 static tree *block_vector;
6025
6026 void
6027 find_loop_tree_blocks ()
6028 {
6029 tree block = DECL_INITIAL (current_function_decl);
6030
6031 block_vector = identify_blocks (block, get_insns ());
6032 }
6033
6034 void
6035 unroll_block_trees ()
6036 {
6037 tree block = DECL_INITIAL (current_function_decl);
6038
6039 reorder_blocks (block_vector, block, get_insns ());
6040
6041 /* Release any memory allocated by identify_blocks. */
6042 if (block_vector)
6043 free (block_vector);
6044 }
This page took 0.291768 seconds and 4 git commands to generate.