]> gcc.gnu.org Git - gcc.git/blob - gcc/stmt.c
Edit to add a missing $(exeext) for CCCP.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37
38 #include <stdio.h>
39 #include <ctype.h>
40
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54
55 #include "bytecode.h"
56 #include "bc-typecd.h"
57 #include "bc-opcode.h"
58 #include "bc-optab.h"
59 #include "bc-emit.h"
60
61 #define obstack_chunk_alloc xmalloc
62 #define obstack_chunk_free free
63 struct obstack stmt_obstack;
64
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
69
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73 int expr_stmts_for_value;
74
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78 static tree last_expr_type;
79 static rtx last_expr_value;
80
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86 static rtx last_block_end_note;
87
88 /* Number of binding contours started so far in this function. */
89
90 int block_start_count;
91
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95 extern int current_function_returns_pcc_struct;
96
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101 extern rtx cleanup_label;
102
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107 extern rtx return_label;
108
109 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
110 So we can mark them all live at the end of the function, if nonopt. */
111 extern rtx save_expr_regs;
112
113 /* Offset to end of allocated area of stack frame.
114 If stack grows down, this is the address of the last stack slot allocated.
115 If stack grows up, this is the address for the next slot. */
116 extern int frame_offset;
117
118 /* Label to jump back to for tail recursion, or 0 if we have
119 not yet needed one for this function. */
120 extern rtx tail_recursion_label;
121
122 /* Place after which to insert the tail_recursion_label if we need one. */
123 extern rtx tail_recursion_reentry;
124
125 /* Location at which to save the argument pointer if it will need to be
126 referenced. There are two cases where this is done: if nonlocal gotos
127 exist, or if vars whose is an offset from the argument pointer will be
128 needed by inner routines. */
129
130 extern rtx arg_pointer_save_area;
131
132 /* Chain of all RTL_EXPRs that have insns in them. */
133 extern tree rtl_expr_chain;
134
135 #if 0 /* Turned off because 0 seems to work just as well. */
136 /* Cleanup lists are required for binding levels regardless of whether
137 that binding level has cleanups or not. This node serves as the
138 cleanup list whenever an empty list is required. */
139 static tree empty_cleanup_list;
140 #endif
141
142 extern void (*interim_eh_hook) PROTO((tree));
143 \f
144 /* Functions and data structures for expanding case statements. */
145
146 /* Case label structure, used to hold info on labels within case
147 statements. We handle "range" labels; for a single-value label
148 as in C, the high and low limits are the same.
149
150 A chain of case nodes is initially maintained via the RIGHT fields
151 in the nodes. Nodes with higher case values are later in the list.
152
153 Switch statements can be output in one of two forms. A branch table
154 is used if there are more than a few labels and the labels are dense
155 within the range between the smallest and largest case value. If a
156 branch table is used, no further manipulations are done with the case
157 node chain.
158
159 The alternative to the use of a branch table is to generate a series
160 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
161 and PARENT fields to hold a binary tree. Initially the tree is
162 totally unbalanced, with everything on the right. We balance the tree
163 with nodes on the left having lower case values than the parent
164 and nodes on the right having higher values. We then output the tree
165 in order. */
166
167 struct case_node
168 {
169 struct case_node *left; /* Left son in binary tree */
170 struct case_node *right; /* Right son in binary tree; also node chain */
171 struct case_node *parent; /* Parent of node in binary tree */
172 tree low; /* Lowest index value for this label */
173 tree high; /* Highest index value for this label */
174 tree code_label; /* Label to jump to when node matches */
175 };
176
177 typedef struct case_node case_node;
178 typedef struct case_node *case_node_ptr;
179
180 /* These are used by estimate_case_costs and balance_case_nodes. */
181
182 /* This must be a signed type, and non-ANSI compilers lack signed char. */
183 static short *cost_table;
184 static int use_cost_table;
185 \f
186 /* Stack of control and binding constructs we are currently inside.
187
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
194
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
199
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
203
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
210
211 struct nesting
212 {
213 struct nesting *all;
214 struct nesting *next;
215 int depth;
216 rtx exit_label;
217 union
218 {
219 /* For conds (if-then and if-then-else statements). */
220 struct
221 {
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
225 rtx endif_label;
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
228 rtx next_label;
229 } cond;
230 /* For loops. */
231 struct
232 {
233 /* Label at the top of the loop; place to loop back to. */
234 rtx start_label;
235 /* Label at the end of the whole construct. */
236 rtx end_label;
237 /* Label before a jump that branches to the end of the whole
238 construct. This is where destructors go if any. */
239 rtx alt_end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
246 {
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. Complemented by
251 bc_stack_level (see below) when generating bytecodes. */
252 rtx stack_level;
253 /* The NOTE that starts this contour.
254 Used by expand_goto to check whether the destination
255 is within each contour or not. */
256 rtx first_insn;
257 /* Innermost containing binding contour that has a stack level. */
258 struct nesting *innermost_stack_block;
259 /* List of cleanups to be run on exit from this contour.
260 This is a list of expressions to be evaluated.
261 The TREE_PURPOSE of each link is the ..._DECL node
262 which the cleanup pertains to. */
263 tree cleanups;
264 /* List of cleanup-lists of blocks containing this block,
265 as they were at the locus where this block appears.
266 There is an element for each containing block,
267 ordered innermost containing block first.
268 The tail of this list can be 0 (was empty_cleanup_list),
269 if all remaining elements would be empty lists.
270 The element's TREE_VALUE is the cleanup-list of that block,
271 which may be null. */
272 tree outer_cleanups;
273 /* Chain of labels defined inside this binding contour.
274 For contours that have stack levels or cleanups. */
275 struct label_chain *label_chain;
276 /* Number of function calls seen, as of start of this block. */
277 int function_call_count;
278 /* Bytecode specific: stack level to restore stack to on exit. */
279 int bc_stack_level;
280 } block;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
283 struct
284 {
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
287 rtx start;
288 /* For bytecodes, the case table is in-lined right in the code.
289 A label is needed for skipping over this block. It is only
290 used when generating bytecodes. */
291 rtx skip_label;
292 /* A list of case labels, kept in ascending order by value
293 as the list is built.
294 During expand_end_case, this list may be rearranged into a
295 nearly balanced binary tree. */
296 struct case_node *case_list;
297 /* Label to jump to if no case matches. */
298 tree default_label;
299 /* The expression to be dispatched on. */
300 tree index_expr;
301 /* Type that INDEX_EXPR should be converted to. */
302 tree nominal_type;
303 /* Number of range exprs in case statement. */
304 int num_ranges;
305 /* Name of this kind of statement, for warnings. */
306 char *printname;
307 /* Nonzero if a case label has been seen in this case stmt. */
308 char seenlabel;
309 } case_stmt;
310 } data;
311 };
312
313 /* Chain of all pending binding contours. */
314 struct nesting *block_stack;
315
316 /* If any new stacks are added here, add them to POPSTACKS too. */
317
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting *stack_block_stack;
321
322 /* Chain of all pending conditional statements. */
323 struct nesting *cond_stack;
324
325 /* Chain of all pending loops. */
326 struct nesting *loop_stack;
327
328 /* Chain of all pending case or switch statements. */
329 struct nesting *case_stack;
330
331 /* Separate chain including all of the above,
332 chained through the `all' field. */
333 struct nesting *nesting_stack;
334
335 /* Number of entries on nesting_stack now. */
336 int nesting_depth;
337
338 /* Allocate and return a new `struct nesting'. */
339
340 #define ALLOC_NESTING() \
341 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
342
343 /* Pop the nesting stack element by element until we pop off
344 the element which is at the top of STACK.
345 Update all the other stacks, popping off elements from them
346 as we pop them from nesting_stack. */
347
348 #define POPSTACK(STACK) \
349 do { struct nesting *target = STACK; \
350 struct nesting *this; \
351 do { this = nesting_stack; \
352 if (loop_stack == this) \
353 loop_stack = loop_stack->next; \
354 if (cond_stack == this) \
355 cond_stack = cond_stack->next; \
356 if (block_stack == this) \
357 block_stack = block_stack->next; \
358 if (stack_block_stack == this) \
359 stack_block_stack = stack_block_stack->next; \
360 if (case_stack == this) \
361 case_stack = case_stack->next; \
362 nesting_depth = nesting_stack->depth - 1; \
363 nesting_stack = this->all; \
364 obstack_free (&stmt_obstack, this); } \
365 while (this != target); } while (0)
366 \f
367 /* In some cases it is impossible to generate code for a forward goto
368 until the label definition is seen. This happens when it may be necessary
369 for the goto to reset the stack pointer: we don't yet know how to do that.
370 So expand_goto puts an entry on this fixup list.
371 Each time a binding contour that resets the stack is exited,
372 we check each fixup.
373 If the target label has now been defined, we can insert the proper code. */
374
375 struct goto_fixup
376 {
377 /* Points to following fixup. */
378 struct goto_fixup *next;
379 /* Points to the insn before the jump insn.
380 If more code must be inserted, it goes after this insn. */
381 rtx before_jump;
382 /* The LABEL_DECL that this jump is jumping to, or 0
383 for break, continue or return. */
384 tree target;
385 /* The BLOCK for the place where this goto was found. */
386 tree context;
387 /* The CODE_LABEL rtx that this is jumping to. */
388 rtx target_rtl;
389 /* Number of binding contours started in current function
390 before the label reference. */
391 int block_start_count;
392 /* The outermost stack level that should be restored for this jump.
393 Each time a binding contour that resets the stack is exited,
394 if the target label is *not* yet defined, this slot is updated. */
395 rtx stack_level;
396 /* List of lists of cleanup expressions to be run by this goto.
397 There is one element for each block that this goto is within.
398 The tail of this list can be 0 (was empty_cleanup_list),
399 if all remaining elements would be empty.
400 The TREE_VALUE contains the cleanup list of that block as of the
401 time this goto was seen.
402 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
403 tree cleanup_list_list;
404
405 /* Bytecode specific members follow */
406
407 /* The label that this jump is jumping to, or 0 for break, continue
408 or return. */
409 struct bc_label *bc_target;
410
411 /* The label we use for the fixup patch */
412 struct bc_label *label;
413
414 /* True (non-0) if fixup has been handled */
415 int bc_handled:1;
416
417 /* Like stack_level above, except refers to the interpreter stack */
418 int bc_stack_level;
419 };
420
421 static struct goto_fixup *goto_fixup_chain;
422
423 /* Within any binding contour that must restore a stack level,
424 all labels are recorded with a chain of these structures. */
425
426 struct label_chain
427 {
428 /* Points to following fixup. */
429 struct label_chain *next;
430 tree label;
431 };
432 static void expand_goto_internal PROTO((tree, rtx, rtx));
433 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
434 struct bc_label *, tree));
435 static int expand_fixup PROTO((tree, rtx, rtx));
436 static void bc_expand_fixup PROTO((enum bytecode_opcode,
437 struct bc_label *, int));
438 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
439 rtx, int));
440 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
441 rtx, int));
442 static void bc_expand_start_cond PROTO((tree, int));
443 static void bc_expand_end_cond PROTO((void));
444 static void bc_expand_start_else PROTO((void));
445 static void bc_expand_end_loop PROTO((void));
446 static void bc_expand_end_bindings PROTO((tree, int, int));
447 static void bc_expand_decl PROTO((tree, tree));
448 static void bc_expand_variable_local_init PROTO((tree));
449 static void bc_expand_decl_init PROTO((tree));
450 static void expand_null_return_1 PROTO((rtx, int));
451 static void expand_value_return PROTO((rtx));
452 static int tail_recursion_args PROTO((tree, tree));
453 static void expand_cleanups PROTO((tree, tree, int, int));
454 static void bc_expand_start_case PROTO((struct nesting *, tree,
455 tree, char *));
456 static int bc_pushcase PROTO((tree, tree));
457 static void bc_check_for_full_enumeration_handling PROTO((tree));
458 static void bc_expand_end_case PROTO((tree));
459 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
460 static int estimate_case_costs PROTO((case_node_ptr));
461 static void group_case_nodes PROTO((case_node_ptr));
462 static void balance_case_nodes PROTO((case_node_ptr *,
463 case_node_ptr));
464 static int node_has_low_bound PROTO((case_node_ptr, tree));
465 static int node_has_high_bound PROTO((case_node_ptr, tree));
466 static int node_is_bounded PROTO((case_node_ptr, tree));
467 static void emit_jump_if_reachable PROTO((rtx));
468 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
469
470 int bc_expand_exit_loop_if_false ();
471 void bc_expand_start_cond ();
472 void bc_expand_end_cond ();
473 void bc_expand_start_else ();
474 void bc_expand_end_bindings ();
475 void bc_expand_start_case ();
476 void bc_check_for_full_enumeration_handling ();
477 void bc_expand_end_case ();
478 void bc_expand_decl ();
479
480 extern rtx bc_allocate_local ();
481 extern rtx bc_allocate_variable_array ();
482 \f
483 void
484 init_stmt ()
485 {
486 gcc_obstack_init (&stmt_obstack);
487 #if 0
488 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
489 #endif
490 }
491
492 void
493 init_stmt_for_function ()
494 {
495 /* We are not currently within any block, conditional, loop or case. */
496 block_stack = 0;
497 stack_block_stack = 0;
498 loop_stack = 0;
499 case_stack = 0;
500 cond_stack = 0;
501 nesting_stack = 0;
502 nesting_depth = 0;
503
504 block_start_count = 0;
505
506 /* No gotos have been expanded yet. */
507 goto_fixup_chain = 0;
508
509 /* We are not processing a ({...}) grouping. */
510 expr_stmts_for_value = 0;
511 last_expr_type = 0;
512 }
513
514 void
515 save_stmt_status (p)
516 struct function *p;
517 {
518 p->block_stack = block_stack;
519 p->stack_block_stack = stack_block_stack;
520 p->cond_stack = cond_stack;
521 p->loop_stack = loop_stack;
522 p->case_stack = case_stack;
523 p->nesting_stack = nesting_stack;
524 p->nesting_depth = nesting_depth;
525 p->block_start_count = block_start_count;
526 p->last_expr_type = last_expr_type;
527 p->last_expr_value = last_expr_value;
528 p->expr_stmts_for_value = expr_stmts_for_value;
529 p->emit_filename = emit_filename;
530 p->emit_lineno = emit_lineno;
531 p->goto_fixup_chain = goto_fixup_chain;
532 }
533
534 void
535 restore_stmt_status (p)
536 struct function *p;
537 {
538 block_stack = p->block_stack;
539 stack_block_stack = p->stack_block_stack;
540 cond_stack = p->cond_stack;
541 loop_stack = p->loop_stack;
542 case_stack = p->case_stack;
543 nesting_stack = p->nesting_stack;
544 nesting_depth = p->nesting_depth;
545 block_start_count = p->block_start_count;
546 last_expr_type = p->last_expr_type;
547 last_expr_value = p->last_expr_value;
548 expr_stmts_for_value = p->expr_stmts_for_value;
549 emit_filename = p->emit_filename;
550 emit_lineno = p->emit_lineno;
551 goto_fixup_chain = p->goto_fixup_chain;
552 }
553 \f
554 /* Emit a no-op instruction. */
555
556 void
557 emit_nop ()
558 {
559 rtx last_insn;
560
561 if (!output_bytecode)
562 {
563 last_insn = get_last_insn ();
564 if (!optimize
565 && (GET_CODE (last_insn) == CODE_LABEL
566 || prev_real_insn (last_insn) == 0))
567 emit_insn (gen_nop ());
568 }
569 }
570 \f
571 /* Return the rtx-label that corresponds to a LABEL_DECL,
572 creating it if necessary. */
573
574 rtx
575 label_rtx (label)
576 tree label;
577 {
578 if (TREE_CODE (label) != LABEL_DECL)
579 abort ();
580
581 if (DECL_RTL (label))
582 return DECL_RTL (label);
583
584 return DECL_RTL (label) = gen_label_rtx ();
585 }
586
587 /* Add an unconditional jump to LABEL as the next sequential instruction. */
588
589 void
590 emit_jump (label)
591 rtx label;
592 {
593 do_pending_stack_adjust ();
594 emit_jump_insn (gen_jump (label));
595 emit_barrier ();
596 }
597
598 /* Emit code to jump to the address
599 specified by the pointer expression EXP. */
600
601 void
602 expand_computed_goto (exp)
603 tree exp;
604 {
605 if (output_bytecode)
606 {
607 bc_expand_expr (exp);
608 bc_emit_instruction (jumpP);
609 }
610 else
611 {
612 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
613
614 #ifdef POINTERS_EXTEND_UNSIGNED
615 x = convert_memory_address (Pmode, x);
616 #endif
617
618 emit_queue ();
619 do_pending_stack_adjust ();
620 emit_indirect_jump (x);
621 }
622 }
623 \f
624 /* Handle goto statements and the labels that they can go to. */
625
626 /* Specify the location in the RTL code of a label LABEL,
627 which is a LABEL_DECL tree node.
628
629 This is used for the kind of label that the user can jump to with a
630 goto statement, and for alternatives of a switch or case statement.
631 RTL labels generated for loops and conditionals don't go through here;
632 they are generated directly at the RTL level, by other functions below.
633
634 Note that this has nothing to do with defining label *names*.
635 Languages vary in how they do that and what that even means. */
636
637 void
638 expand_label (label)
639 tree label;
640 {
641 struct label_chain *p;
642
643 if (output_bytecode)
644 {
645 if (! DECL_RTL (label))
646 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
647 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
648 error ("multiply defined label");
649 return;
650 }
651
652 do_pending_stack_adjust ();
653 emit_label (label_rtx (label));
654 if (DECL_NAME (label))
655 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
656
657 if (stack_block_stack != 0)
658 {
659 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
660 p->next = stack_block_stack->data.block.label_chain;
661 stack_block_stack->data.block.label_chain = p;
662 p->label = label;
663 }
664 }
665
666 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
667 from nested functions. */
668
669 void
670 declare_nonlocal_label (label)
671 tree label;
672 {
673 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
674 LABEL_PRESERVE_P (label_rtx (label)) = 1;
675 if (nonlocal_goto_handler_slot == 0)
676 {
677 nonlocal_goto_handler_slot
678 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
679 emit_stack_save (SAVE_NONLOCAL,
680 &nonlocal_goto_stack_level,
681 PREV_INSN (tail_recursion_reentry));
682 }
683 }
684
685 /* Generate RTL code for a `goto' statement with target label LABEL.
686 LABEL should be a LABEL_DECL tree node that was or will later be
687 defined with `expand_label'. */
688
689 void
690 expand_goto (label)
691 tree label;
692 {
693 tree context;
694
695 if (output_bytecode)
696 {
697 expand_goto_internal (label, label_rtx (label), NULL_RTX);
698 return;
699 }
700
701 /* Check for a nonlocal goto to a containing function. */
702 context = decl_function_context (label);
703 if (context != 0 && context != current_function_decl)
704 {
705 struct function *p = find_function_data (context);
706 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
707 rtx temp;
708
709 p->has_nonlocal_label = 1;
710 current_function_has_nonlocal_goto = 1;
711 LABEL_REF_NONLOCAL_P (label_ref) = 1;
712
713 /* Copy the rtl for the slots so that they won't be shared in
714 case the virtual stack vars register gets instantiated differently
715 in the parent than in the child. */
716
717 #if HAVE_nonlocal_goto
718 if (HAVE_nonlocal_goto)
719 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
720 copy_rtx (p->nonlocal_goto_handler_slot),
721 copy_rtx (p->nonlocal_goto_stack_level),
722 label_ref));
723 else
724 #endif
725 {
726 rtx addr;
727
728 /* Restore frame pointer for containing function.
729 This sets the actual hard register used for the frame pointer
730 to the location of the function's incoming static chain info.
731 The non-local goto handler will then adjust it to contain the
732 proper value and reload the argument pointer, if needed. */
733 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
734
735 /* We have now loaded the frame pointer hardware register with
736 the address of that corresponds to the start of the virtual
737 stack vars. So replace virtual_stack_vars_rtx in all
738 addresses we use with stack_pointer_rtx. */
739
740 /* Get addr of containing function's current nonlocal goto handler,
741 which will do any cleanups and then jump to the label. */
742 addr = copy_rtx (p->nonlocal_goto_handler_slot);
743 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
744 hard_frame_pointer_rtx));
745
746 /* Restore the stack pointer. Note this uses fp just restored. */
747 addr = p->nonlocal_goto_stack_level;
748 if (addr)
749 addr = replace_rtx (copy_rtx (addr),
750 virtual_stack_vars_rtx,
751 hard_frame_pointer_rtx);
752
753 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
754
755 /* Put in the static chain register the nonlocal label address. */
756 emit_move_insn (static_chain_rtx, label_ref);
757 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
758 really needed. */
759 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
760 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
761 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
762 emit_indirect_jump (temp);
763 }
764 }
765 else
766 expand_goto_internal (label, label_rtx (label), NULL_RTX);
767 }
768
769 /* Generate RTL code for a `goto' statement with target label BODY.
770 LABEL should be a LABEL_REF.
771 LAST_INSN, if non-0, is the rtx we should consider as the last
772 insn emitted (for the purposes of cleaning up a return). */
773
774 static void
775 expand_goto_internal (body, label, last_insn)
776 tree body;
777 rtx label;
778 rtx last_insn;
779 {
780 struct nesting *block;
781 rtx stack_level = 0;
782
783 /* NOTICE! If a bytecode instruction other than `jump' is needed,
784 then the caller has to call bc_expand_goto_internal()
785 directly. This is rather an exceptional case, and there aren't
786 that many places where this is necessary. */
787 if (output_bytecode)
788 {
789 expand_goto_internal (body, label, last_insn);
790 return;
791 }
792
793 if (GET_CODE (label) != CODE_LABEL)
794 abort ();
795
796 /* If label has already been defined, we can tell now
797 whether and how we must alter the stack level. */
798
799 if (PREV_INSN (label) != 0)
800 {
801 /* Find the innermost pending block that contains the label.
802 (Check containment by comparing insn-uids.)
803 Then restore the outermost stack level within that block,
804 and do cleanups of all blocks contained in it. */
805 for (block = block_stack; block; block = block->next)
806 {
807 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
808 break;
809 if (block->data.block.stack_level != 0)
810 stack_level = block->data.block.stack_level;
811 /* Execute the cleanups for blocks we are exiting. */
812 if (block->data.block.cleanups != 0)
813 {
814 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
815 do_pending_stack_adjust ();
816 }
817 }
818
819 if (stack_level)
820 {
821 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
822 the stack pointer. This one should be deleted as dead by flow. */
823 clear_pending_stack_adjust ();
824 do_pending_stack_adjust ();
825 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
826 }
827
828 if (body != 0 && DECL_TOO_LATE (body))
829 error ("jump to `%s' invalidly jumps into binding contour",
830 IDENTIFIER_POINTER (DECL_NAME (body)));
831 }
832 /* Label not yet defined: may need to put this goto
833 on the fixup list. */
834 else if (! expand_fixup (body, label, last_insn))
835 {
836 /* No fixup needed. Record that the label is the target
837 of at least one goto that has no fixup. */
838 if (body != 0)
839 TREE_ADDRESSABLE (body) = 1;
840 }
841
842 emit_jump (label);
843 }
844 \f
845 /* Generate a jump with OPCODE to the given bytecode LABEL which is
846 found within BODY. */
847
848 static void
849 bc_expand_goto_internal (opcode, label, body)
850 enum bytecode_opcode opcode;
851 struct bc_label *label;
852 tree body;
853 {
854 struct nesting *block;
855 int stack_level = -1;
856
857 /* If the label is defined, adjust the stack as necessary.
858 If it's not defined, we have to push the reference on the
859 fixup list. */
860
861 if (label->defined)
862 {
863
864 /* Find the innermost pending block that contains the label.
865 (Check containment by comparing bytecode uids.) Then restore the
866 outermost stack level within that block. */
867
868 for (block = block_stack; block; block = block->next)
869 {
870 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
871 break;
872 if (block->data.block.bc_stack_level)
873 stack_level = block->data.block.bc_stack_level;
874
875 /* Execute the cleanups for blocks we are exiting. */
876 if (block->data.block.cleanups != 0)
877 {
878 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
879 do_pending_stack_adjust ();
880 }
881 }
882
883 /* Restore the stack level. If we need to adjust the stack, we
884 must do so after the jump, since the jump may depend on
885 what's on the stack. Thus, any stack-modifying conditional
886 jumps (these are the only ones that rely on what's on the
887 stack) go into the fixup list. */
888
889 if (stack_level >= 0
890 && stack_depth != stack_level
891 && opcode != jump)
892
893 bc_expand_fixup (opcode, label, stack_level);
894 else
895 {
896 if (stack_level >= 0)
897 bc_adjust_stack (stack_depth - stack_level);
898
899 if (body && DECL_BIT_FIELD (body))
900 error ("jump to `%s' invalidly jumps into binding contour",
901 IDENTIFIER_POINTER (DECL_NAME (body)));
902
903 /* Emit immediate jump */
904 bc_emit_bytecode (opcode);
905 bc_emit_bytecode_labelref (label);
906
907 #ifdef DEBUG_PRINT_CODE
908 fputc ('\n', stderr);
909 #endif
910 }
911 }
912 else
913 /* Put goto in the fixup list */
914 bc_expand_fixup (opcode, label, stack_level);
915 }
916 \f
917 /* Generate if necessary a fixup for a goto
918 whose target label in tree structure (if any) is TREE_LABEL
919 and whose target in rtl is RTL_LABEL.
920
921 If LAST_INSN is nonzero, we pretend that the jump appears
922 after insn LAST_INSN instead of at the current point in the insn stream.
923
924 The fixup will be used later to insert insns just before the goto.
925 Those insns will restore the stack level as appropriate for the
926 target label, and will (in the case of C++) also invoke any object
927 destructors which have to be invoked when we exit the scopes which
928 are exited by the goto.
929
930 Value is nonzero if a fixup is made. */
931
932 static int
933 expand_fixup (tree_label, rtl_label, last_insn)
934 tree tree_label;
935 rtx rtl_label;
936 rtx last_insn;
937 {
938 struct nesting *block, *end_block;
939
940 /* See if we can recognize which block the label will be output in.
941 This is possible in some very common cases.
942 If we succeed, set END_BLOCK to that block.
943 Otherwise, set it to 0. */
944
945 if (cond_stack
946 && (rtl_label == cond_stack->data.cond.endif_label
947 || rtl_label == cond_stack->data.cond.next_label))
948 end_block = cond_stack;
949 /* If we are in a loop, recognize certain labels which
950 are likely targets. This reduces the number of fixups
951 we need to create. */
952 else if (loop_stack
953 && (rtl_label == loop_stack->data.loop.start_label
954 || rtl_label == loop_stack->data.loop.end_label
955 || rtl_label == loop_stack->data.loop.continue_label))
956 end_block = loop_stack;
957 else
958 end_block = 0;
959
960 /* Now set END_BLOCK to the binding level to which we will return. */
961
962 if (end_block)
963 {
964 struct nesting *next_block = end_block->all;
965 block = block_stack;
966
967 /* First see if the END_BLOCK is inside the innermost binding level.
968 If so, then no cleanups or stack levels are relevant. */
969 while (next_block && next_block != block)
970 next_block = next_block->all;
971
972 if (next_block)
973 return 0;
974
975 /* Otherwise, set END_BLOCK to the innermost binding level
976 which is outside the relevant control-structure nesting. */
977 next_block = block_stack->next;
978 for (block = block_stack; block != end_block; block = block->all)
979 if (block == next_block)
980 next_block = next_block->next;
981 end_block = next_block;
982 }
983
984 /* Does any containing block have a stack level or cleanups?
985 If not, no fixup is needed, and that is the normal case
986 (the only case, for standard C). */
987 for (block = block_stack; block != end_block; block = block->next)
988 if (block->data.block.stack_level != 0
989 || block->data.block.cleanups != 0)
990 break;
991
992 if (block != end_block)
993 {
994 /* Ok, a fixup is needed. Add a fixup to the list of such. */
995 struct goto_fixup *fixup
996 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
997 /* In case an old stack level is restored, make sure that comes
998 after any pending stack adjust. */
999 /* ?? If the fixup isn't to come at the present position,
1000 doing the stack adjust here isn't useful. Doing it with our
1001 settings at that location isn't useful either. Let's hope
1002 someone does it! */
1003 if (last_insn == 0)
1004 do_pending_stack_adjust ();
1005 fixup->target = tree_label;
1006 fixup->target_rtl = rtl_label;
1007
1008 /* Create a BLOCK node and a corresponding matched set of
1009 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1010 this point. The notes will encapsulate any and all fixup
1011 code which we might later insert at this point in the insn
1012 stream. Also, the BLOCK node will be the parent (i.e. the
1013 `SUPERBLOCK') of any other BLOCK nodes which we might create
1014 later on when we are expanding the fixup code. */
1015
1016 {
1017 register rtx original_before_jump
1018 = last_insn ? last_insn : get_last_insn ();
1019
1020 start_sequence ();
1021 pushlevel (0);
1022 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1023 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1024 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1025 end_sequence ();
1026 emit_insns_after (fixup->before_jump, original_before_jump);
1027 }
1028
1029 fixup->block_start_count = block_start_count;
1030 fixup->stack_level = 0;
1031 fixup->cleanup_list_list
1032 = (((block->data.block.outer_cleanups
1033 #if 0
1034 && block->data.block.outer_cleanups != empty_cleanup_list
1035 #endif
1036 )
1037 || block->data.block.cleanups)
1038 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1039 block->data.block.outer_cleanups)
1040 : 0);
1041 fixup->next = goto_fixup_chain;
1042 goto_fixup_chain = fixup;
1043 }
1044
1045 return block != 0;
1046 }
1047
1048
1049 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1050 Make the fixup restore the stack level to STACK_LEVEL. */
1051
1052 static void
1053 bc_expand_fixup (opcode, label, stack_level)
1054 enum bytecode_opcode opcode;
1055 struct bc_label *label;
1056 int stack_level;
1057 {
1058 struct goto_fixup *fixup
1059 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1060
1061 fixup->label = bc_get_bytecode_label ();
1062 fixup->bc_target = label;
1063 fixup->bc_stack_level = stack_level;
1064 fixup->bc_handled = FALSE;
1065
1066 fixup->next = goto_fixup_chain;
1067 goto_fixup_chain = fixup;
1068
1069 /* Insert a jump to the fixup code */
1070 bc_emit_bytecode (opcode);
1071 bc_emit_bytecode_labelref (fixup->label);
1072
1073 #ifdef DEBUG_PRINT_CODE
1074 fputc ('\n', stderr);
1075 #endif
1076 }
1077 \f
1078 /* Expand any needed fixups in the outputmost binding level of the
1079 function. FIRST_INSN is the first insn in the function. */
1080
1081 void
1082 expand_fixups (first_insn)
1083 rtx first_insn;
1084 {
1085 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1086 }
1087
1088 /* When exiting a binding contour, process all pending gotos requiring fixups.
1089 THISBLOCK is the structure that describes the block being exited.
1090 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1091 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1092 FIRST_INSN is the insn that began this contour.
1093
1094 Gotos that jump out of this contour must restore the
1095 stack level and do the cleanups before actually jumping.
1096
1097 DONT_JUMP_IN nonzero means report error there is a jump into this
1098 contour from before the beginning of the contour.
1099 This is also done if STACK_LEVEL is nonzero. */
1100
1101 static void
1102 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1103 struct nesting *thisblock;
1104 rtx stack_level;
1105 tree cleanup_list;
1106 rtx first_insn;
1107 int dont_jump_in;
1108 {
1109 register struct goto_fixup *f, *prev;
1110
1111 if (output_bytecode)
1112 {
1113 /* ??? The second arg is the bc stack level, which is not the same
1114 as STACK_LEVEL. I have no idea what should go here, so I'll
1115 just pass 0. */
1116 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1117 return;
1118 }
1119
1120 /* F is the fixup we are considering; PREV is the previous one. */
1121 /* We run this loop in two passes so that cleanups of exited blocks
1122 are run first, and blocks that are exited are marked so
1123 afterwards. */
1124
1125 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1126 {
1127 /* Test for a fixup that is inactive because it is already handled. */
1128 if (f->before_jump == 0)
1129 {
1130 /* Delete inactive fixup from the chain, if that is easy to do. */
1131 if (prev != 0)
1132 prev->next = f->next;
1133 }
1134 /* Has this fixup's target label been defined?
1135 If so, we can finalize it. */
1136 else if (PREV_INSN (f->target_rtl) != 0)
1137 {
1138 register rtx cleanup_insns;
1139
1140 /* Get the first non-label after the label
1141 this goto jumps to. If that's before this scope begins,
1142 we don't have a jump into the scope. */
1143 rtx after_label = f->target_rtl;
1144 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1145 after_label = NEXT_INSN (after_label);
1146
1147 /* If this fixup jumped into this contour from before the beginning
1148 of this contour, report an error. */
1149 /* ??? Bug: this does not detect jumping in through intermediate
1150 blocks that have stack levels or cleanups.
1151 It detects only a problem with the innermost block
1152 around the label. */
1153 if (f->target != 0
1154 && (dont_jump_in || stack_level || cleanup_list)
1155 /* If AFTER_LABEL is 0, it means the jump goes to the end
1156 of the rtl, which means it jumps into this scope. */
1157 && (after_label == 0
1158 || INSN_UID (first_insn) < INSN_UID (after_label))
1159 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1160 && ! DECL_REGISTER (f->target))
1161 {
1162 error_with_decl (f->target,
1163 "label `%s' used before containing binding contour");
1164 /* Prevent multiple errors for one label. */
1165 DECL_REGISTER (f->target) = 1;
1166 }
1167
1168 /* We will expand the cleanups into a sequence of their own and
1169 then later on we will attach this new sequence to the insn
1170 stream just ahead of the actual jump insn. */
1171
1172 start_sequence ();
1173
1174 /* Temporarily restore the lexical context where we will
1175 logically be inserting the fixup code. We do this for the
1176 sake of getting the debugging information right. */
1177
1178 pushlevel (0);
1179 set_block (f->context);
1180
1181 /* Expand the cleanups for blocks this jump exits. */
1182 if (f->cleanup_list_list)
1183 {
1184 tree lists;
1185 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1186 /* Marked elements correspond to blocks that have been closed.
1187 Do their cleanups. */
1188 if (TREE_ADDRESSABLE (lists)
1189 && TREE_VALUE (lists) != 0)
1190 {
1191 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1192 /* Pop any pushes done in the cleanups,
1193 in case function is about to return. */
1194 do_pending_stack_adjust ();
1195 }
1196 }
1197
1198 /* Restore stack level for the biggest contour that this
1199 jump jumps out of. */
1200 if (f->stack_level)
1201 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1202
1203 /* Finish up the sequence containing the insns which implement the
1204 necessary cleanups, and then attach that whole sequence to the
1205 insn stream just ahead of the actual jump insn. Attaching it
1206 at that point insures that any cleanups which are in fact
1207 implicit C++ object destructions (which must be executed upon
1208 leaving the block) appear (to the debugger) to be taking place
1209 in an area of the generated code where the object(s) being
1210 destructed are still "in scope". */
1211
1212 cleanup_insns = get_insns ();
1213 poplevel (1, 0, 0);
1214
1215 end_sequence ();
1216 emit_insns_after (cleanup_insns, f->before_jump);
1217
1218
1219 f->before_jump = 0;
1220 }
1221 }
1222
1223 /* For any still-undefined labels, do the cleanups for this block now.
1224 We must do this now since items in the cleanup list may go out
1225 of scope when the block ends. */
1226 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1227 if (f->before_jump != 0
1228 && PREV_INSN (f->target_rtl) == 0
1229 /* Label has still not appeared. If we are exiting a block with
1230 a stack level to restore, that started before the fixup,
1231 mark this stack level as needing restoration
1232 when the fixup is later finalized. */
1233 && thisblock != 0
1234 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1235 means the label is undefined. That's erroneous, but possible. */
1236 && (thisblock->data.block.block_start_count
1237 <= f->block_start_count))
1238 {
1239 tree lists = f->cleanup_list_list;
1240 rtx cleanup_insns;
1241
1242 for (; lists; lists = TREE_CHAIN (lists))
1243 /* If the following elt. corresponds to our containing block
1244 then the elt. must be for this block. */
1245 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1246 {
1247 start_sequence ();
1248 pushlevel (0);
1249 set_block (f->context);
1250 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1251 do_pending_stack_adjust ();
1252 cleanup_insns = get_insns ();
1253 poplevel (1, 0, 0);
1254 end_sequence ();
1255 f->before_jump
1256 = emit_insns_after (cleanup_insns, f->before_jump);
1257
1258 TREE_VALUE (lists) = 0;
1259 }
1260
1261 if (stack_level)
1262 f->stack_level = stack_level;
1263 }
1264 }
1265
1266
1267 /* When exiting a binding contour, process all pending gotos requiring fixups.
1268 Note: STACK_DEPTH is not altered.
1269
1270 The arguments are currently not used in the bytecode compiler, but we may
1271 need them one day for languages other than C.
1272
1273 THISBLOCK is the structure that describes the block being exited.
1274 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1275 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1276 FIRST_INSN is the insn that began this contour.
1277
1278 Gotos that jump out of this contour must restore the
1279 stack level and do the cleanups before actually jumping.
1280
1281 DONT_JUMP_IN nonzero means report error there is a jump into this
1282 contour from before the beginning of the contour.
1283 This is also done if STACK_LEVEL is nonzero. */
1284
1285 static void
1286 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1287 struct nesting *thisblock;
1288 int stack_level;
1289 tree cleanup_list;
1290 rtx first_insn;
1291 int dont_jump_in;
1292 {
1293 register struct goto_fixup *f, *prev;
1294 int saved_stack_depth;
1295
1296 /* F is the fixup we are considering; PREV is the previous one. */
1297
1298 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1299 {
1300 /* Test for a fixup that is inactive because it is already handled. */
1301 if (f->before_jump == 0)
1302 {
1303 /* Delete inactive fixup from the chain, if that is easy to do. */
1304 if (prev)
1305 prev->next = f->next;
1306 }
1307
1308 /* Emit code to restore the stack and continue */
1309 bc_emit_bytecode_labeldef (f->label);
1310
1311 /* Save stack_depth across call, since bc_adjust_stack () will alter
1312 the perceived stack depth via the instructions generated. */
1313
1314 if (f->bc_stack_level >= 0)
1315 {
1316 saved_stack_depth = stack_depth;
1317 bc_adjust_stack (stack_depth - f->bc_stack_level);
1318 stack_depth = saved_stack_depth;
1319 }
1320
1321 bc_emit_bytecode (jump);
1322 bc_emit_bytecode_labelref (f->bc_target);
1323
1324 #ifdef DEBUG_PRINT_CODE
1325 fputc ('\n', stderr);
1326 #endif
1327 }
1328
1329 goto_fixup_chain = NULL;
1330 }
1331 \f
1332 /* Generate RTL for an asm statement (explicit assembler code).
1333 BODY is a STRING_CST node containing the assembler code text,
1334 or an ADDR_EXPR containing a STRING_CST. */
1335
1336 void
1337 expand_asm (body)
1338 tree body;
1339 {
1340 if (output_bytecode)
1341 {
1342 error ("`asm' is invalid when generating bytecode");
1343 return;
1344 }
1345
1346 if (TREE_CODE (body) == ADDR_EXPR)
1347 body = TREE_OPERAND (body, 0);
1348
1349 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1350 TREE_STRING_POINTER (body)));
1351 last_expr_type = 0;
1352 }
1353
1354 /* Generate RTL for an asm statement with arguments.
1355 STRING is the instruction template.
1356 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1357 Each output or input has an expression in the TREE_VALUE and
1358 a constraint-string in the TREE_PURPOSE.
1359 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1360 that is clobbered by this insn.
1361
1362 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1363 Some elements of OUTPUTS may be replaced with trees representing temporary
1364 values. The caller should copy those temporary values to the originally
1365 specified lvalues.
1366
1367 VOL nonzero means the insn is volatile; don't optimize it. */
1368
1369 void
1370 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1371 tree string, outputs, inputs, clobbers;
1372 int vol;
1373 char *filename;
1374 int line;
1375 {
1376 rtvec argvec, constraints;
1377 rtx body;
1378 int ninputs = list_length (inputs);
1379 int noutputs = list_length (outputs);
1380 int nclobbers;
1381 tree tail;
1382 register int i;
1383 /* Vector of RTX's of evaluated output operands. */
1384 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1385 /* The insn we have emitted. */
1386 rtx insn;
1387
1388 if (output_bytecode)
1389 {
1390 error ("`asm' is invalid when generating bytecode");
1391 return;
1392 }
1393
1394 /* Count the number of meaningful clobbered registers, ignoring what
1395 we would ignore later. */
1396 nclobbers = 0;
1397 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1398 {
1399 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1400 i = decode_reg_name (regname);
1401 if (i >= 0 || i == -4)
1402 ++nclobbers;
1403 else if (i == -2)
1404 error ("unknown register name `%s' in `asm'", regname);
1405 }
1406
1407 last_expr_type = 0;
1408
1409 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1410 {
1411 tree val = TREE_VALUE (tail);
1412 tree type = TREE_TYPE (val);
1413 tree val1;
1414 int j;
1415 int found_equal = 0;
1416 int allows_reg = 0;
1417
1418 /* If there's an erroneous arg, emit no insn. */
1419 if (TREE_TYPE (val) == error_mark_node)
1420 return;
1421
1422 /* Make sure constraint has `=' and does not have `+'. Also, see
1423 if it allows any register. Be liberal on the latter test, since
1424 the worst that happens if we get it wrong is we issue an error
1425 message. */
1426
1427 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1428 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1429 {
1430 case '+':
1431 error ("output operand constraint contains `+'");
1432 return;
1433
1434 case '=':
1435 found_equal = 1;
1436 break;
1437
1438 case '?': case '!': case '*': case '%': case '&':
1439 case '0': case '1': case '2': case '3': case '4':
1440 case 'V': case 'm': case 'o': case '<': case '>':
1441 case 'E': case 'F': case 'G': case 'H': case 'X':
1442 case 's': case 'i': case 'n':
1443 case 'I': case 'J': case 'K': case 'L': case 'M':
1444 case 'N': case 'O': case 'P': case ',':
1445 #ifdef EXTRA_CONSTRAINT
1446 case 'Q': case 'R': case 'S': case 'T': case 'U':
1447 #endif
1448 break;
1449
1450 case 'p': case 'g': case 'r':
1451 default:
1452 allows_reg = 1;
1453 break;
1454 }
1455
1456 if (! found_equal)
1457 {
1458 error ("output operand constraint lacks `='");
1459 return;
1460 }
1461
1462 /* If an output operand is not a decl or indirect ref and our constraint
1463 allows a register, make a temporary to act as an intermediate.
1464 Make the asm insn write into that, then our caller will copy it to
1465 the real output operand. Likewise for promoted variables. */
1466
1467 if (TREE_CODE (val) == INDIRECT_REF
1468 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1469 && ! (GET_CODE (DECL_RTL (val)) == REG
1470 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1471 || ! allows_reg)
1472 {
1473 if (! allows_reg)
1474 mark_addressable (TREE_VALUE (tail));
1475
1476 output_rtx[i]
1477 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1478
1479 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1480 error ("output number %d not directly addressable", i);
1481 }
1482 else
1483 {
1484 if (TYPE_MODE (type) == BLKmode)
1485 {
1486 output_rtx[i] = assign_stack_temp (BLKmode,
1487 int_size_in_bytes (type), 0);
1488 MEM_IN_STRUCT_P (output_rtx[i]) = AGGREGATE_TYPE_P (type);
1489 }
1490 else
1491 output_rtx[i] = gen_reg_rtx (TYPE_MODE (type));
1492
1493 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1494 }
1495 }
1496
1497 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1498 {
1499 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1500 return;
1501 }
1502
1503 /* Make vectors for the expression-rtx and constraint strings. */
1504
1505 argvec = rtvec_alloc (ninputs);
1506 constraints = rtvec_alloc (ninputs);
1507
1508 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1509 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1510 filename, line);
1511 MEM_VOLATILE_P (body) = vol;
1512
1513 /* Eval the inputs and put them into ARGVEC.
1514 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1515
1516 i = 0;
1517 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1518 {
1519 int j;
1520
1521 /* If there's an erroneous arg, emit no insn,
1522 because the ASM_INPUT would get VOIDmode
1523 and that could cause a crash in reload. */
1524 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1525 return;
1526 if (TREE_PURPOSE (tail) == NULL_TREE)
1527 {
1528 error ("hard register `%s' listed as input operand to `asm'",
1529 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1530 return;
1531 }
1532
1533 /* Make sure constraint has neither `=' nor `+'. */
1534
1535 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1536 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1537 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1538 {
1539 error ("input operand constraint contains `%c'",
1540 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1541 return;
1542 }
1543
1544 XVECEXP (body, 3, i) /* argvec */
1545 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1546 if (CONSTANT_P (XVECEXP (body, 3, i))
1547 && ! general_operand (XVECEXP (body, 3, i),
1548 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1549 XVECEXP (body, 3, i)
1550 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1551 XVECEXP (body, 3, i));
1552 XVECEXP (body, 4, i) /* constraints */
1553 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1554 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1555 i++;
1556 }
1557
1558 /* Protect all the operands from the queue,
1559 now that they have all been evaluated. */
1560
1561 for (i = 0; i < ninputs; i++)
1562 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1563
1564 for (i = 0; i < noutputs; i++)
1565 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1566
1567 /* Now, for each output, construct an rtx
1568 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1569 ARGVEC CONSTRAINTS))
1570 If there is more than one, put them inside a PARALLEL. */
1571
1572 if (noutputs == 1 && nclobbers == 0)
1573 {
1574 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1575 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1576 }
1577 else if (noutputs == 0 && nclobbers == 0)
1578 {
1579 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1580 insn = emit_insn (body);
1581 }
1582 else
1583 {
1584 rtx obody = body;
1585 int num = noutputs;
1586 if (num == 0) num = 1;
1587 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1588
1589 /* For each output operand, store a SET. */
1590
1591 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1592 {
1593 XVECEXP (body, 0, i)
1594 = gen_rtx (SET, VOIDmode,
1595 output_rtx[i],
1596 gen_rtx (ASM_OPERANDS, VOIDmode,
1597 TREE_STRING_POINTER (string),
1598 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1599 i, argvec, constraints,
1600 filename, line));
1601 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1602 }
1603
1604 /* If there are no outputs (but there are some clobbers)
1605 store the bare ASM_OPERANDS into the PARALLEL. */
1606
1607 if (i == 0)
1608 XVECEXP (body, 0, i++) = obody;
1609
1610 /* Store (clobber REG) for each clobbered register specified. */
1611
1612 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1613 {
1614 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1615 int j = decode_reg_name (regname);
1616
1617 if (j < 0)
1618 {
1619 if (j == -3) /* `cc', which is not a register */
1620 continue;
1621
1622 if (j == -4) /* `memory', don't cache memory across asm */
1623 {
1624 XVECEXP (body, 0, i++)
1625 = gen_rtx (CLOBBER, VOIDmode,
1626 gen_rtx (MEM, BLKmode,
1627 gen_rtx (SCRATCH, VOIDmode, 0)));
1628 continue;
1629 }
1630
1631 /* Ignore unknown register, error already signalled. */
1632 continue;
1633 }
1634
1635 /* Use QImode since that's guaranteed to clobber just one reg. */
1636 XVECEXP (body, 0, i++)
1637 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1638 }
1639
1640 insn = emit_insn (body);
1641 }
1642
1643 free_temp_slots ();
1644 }
1645 \f
1646 /* Generate RTL to evaluate the expression EXP
1647 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1648
1649 void
1650 expand_expr_stmt (exp)
1651 tree exp;
1652 {
1653 if (output_bytecode)
1654 {
1655 int org_stack_depth = stack_depth;
1656
1657 bc_expand_expr (exp);
1658
1659 /* Restore stack depth */
1660 if (stack_depth < org_stack_depth)
1661 abort ();
1662
1663 bc_emit_instruction (drop);
1664
1665 last_expr_type = TREE_TYPE (exp);
1666 return;
1667 }
1668
1669 /* If -W, warn about statements with no side effects,
1670 except for an explicit cast to void (e.g. for assert()), and
1671 except inside a ({...}) where they may be useful. */
1672 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1673 {
1674 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1675 && !(TREE_CODE (exp) == CONVERT_EXPR
1676 && TREE_TYPE (exp) == void_type_node))
1677 warning_with_file_and_line (emit_filename, emit_lineno,
1678 "statement with no effect");
1679 else if (warn_unused)
1680 warn_if_unused_value (exp);
1681 }
1682
1683 /* If EXP is of function type and we are expanding statements for
1684 value, convert it to pointer-to-function. */
1685 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1686 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1687
1688 last_expr_type = TREE_TYPE (exp);
1689 if (! flag_syntax_only)
1690 last_expr_value = expand_expr (exp,
1691 (expr_stmts_for_value
1692 ? NULL_RTX : const0_rtx),
1693 VOIDmode, 0);
1694
1695 /* If all we do is reference a volatile value in memory,
1696 copy it to a register to be sure it is actually touched. */
1697 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1698 && TREE_THIS_VOLATILE (exp))
1699 {
1700 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1701 ;
1702 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1703 copy_to_reg (last_expr_value);
1704 else
1705 {
1706 rtx lab = gen_label_rtx ();
1707
1708 /* Compare the value with itself to reference it. */
1709 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1710 expand_expr (TYPE_SIZE (last_expr_type),
1711 NULL_RTX, VOIDmode, 0),
1712 BLKmode, 0,
1713 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1714 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1715 emit_label (lab);
1716 }
1717 }
1718
1719 /* If this expression is part of a ({...}) and is in memory, we may have
1720 to preserve temporaries. */
1721 preserve_temp_slots (last_expr_value);
1722
1723 /* Free any temporaries used to evaluate this expression. Any temporary
1724 used as a result of this expression will already have been preserved
1725 above. */
1726 free_temp_slots ();
1727
1728 emit_queue ();
1729 }
1730
1731 /* Warn if EXP contains any computations whose results are not used.
1732 Return 1 if a warning is printed; 0 otherwise. */
1733
1734 int
1735 warn_if_unused_value (exp)
1736 tree exp;
1737 {
1738 if (TREE_USED (exp))
1739 return 0;
1740
1741 switch (TREE_CODE (exp))
1742 {
1743 case PREINCREMENT_EXPR:
1744 case POSTINCREMENT_EXPR:
1745 case PREDECREMENT_EXPR:
1746 case POSTDECREMENT_EXPR:
1747 case MODIFY_EXPR:
1748 case INIT_EXPR:
1749 case TARGET_EXPR:
1750 case CALL_EXPR:
1751 case METHOD_CALL_EXPR:
1752 case RTL_EXPR:
1753 case WITH_CLEANUP_EXPR:
1754 case EXIT_EXPR:
1755 /* We don't warn about COND_EXPR because it may be a useful
1756 construct if either arm contains a side effect. */
1757 case COND_EXPR:
1758 return 0;
1759
1760 case BIND_EXPR:
1761 /* For a binding, warn if no side effect within it. */
1762 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1763
1764 case SAVE_EXPR:
1765 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1766
1767 case TRUTH_ORIF_EXPR:
1768 case TRUTH_ANDIF_EXPR:
1769 /* In && or ||, warn if 2nd operand has no side effect. */
1770 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1771
1772 case COMPOUND_EXPR:
1773 if (TREE_NO_UNUSED_WARNING (exp))
1774 return 0;
1775 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1776 return 1;
1777 /* Let people do `(foo (), 0)' without a warning. */
1778 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1779 return 0;
1780 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1781
1782 case NOP_EXPR:
1783 case CONVERT_EXPR:
1784 case NON_LVALUE_EXPR:
1785 /* Don't warn about values cast to void. */
1786 if (TREE_TYPE (exp) == void_type_node)
1787 return 0;
1788 /* Don't warn about conversions not explicit in the user's program. */
1789 if (TREE_NO_UNUSED_WARNING (exp))
1790 return 0;
1791 /* Assignment to a cast usually results in a cast of a modify.
1792 Don't complain about that. There can be an arbitrary number of
1793 casts before the modify, so we must loop until we find the first
1794 non-cast expression and then test to see if that is a modify. */
1795 {
1796 tree tem = TREE_OPERAND (exp, 0);
1797
1798 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1799 tem = TREE_OPERAND (tem, 0);
1800
1801 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1802 || TREE_CODE (tem) == CALL_EXPR)
1803 return 0;
1804 }
1805 goto warn;
1806
1807 case INDIRECT_REF:
1808 /* Don't warn about automatic dereferencing of references, since
1809 the user cannot control it. */
1810 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1811 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1812 /* ... fall through ... */
1813
1814 default:
1815 /* Referencing a volatile value is a side effect, so don't warn. */
1816 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1817 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1818 && TREE_THIS_VOLATILE (exp))
1819 return 0;
1820 warn:
1821 warning_with_file_and_line (emit_filename, emit_lineno,
1822 "value computed is not used");
1823 return 1;
1824 }
1825 }
1826
1827 /* Clear out the memory of the last expression evaluated. */
1828
1829 void
1830 clear_last_expr ()
1831 {
1832 last_expr_type = 0;
1833 }
1834
1835 /* Begin a statement which will return a value.
1836 Return the RTL_EXPR for this statement expr.
1837 The caller must save that value and pass it to expand_end_stmt_expr. */
1838
1839 tree
1840 expand_start_stmt_expr ()
1841 {
1842 int momentary;
1843 tree t;
1844
1845 /* When generating bytecode just note down the stack depth */
1846 if (output_bytecode)
1847 return (build_int_2 (stack_depth, 0));
1848
1849 /* Make the RTL_EXPR node temporary, not momentary,
1850 so that rtl_expr_chain doesn't become garbage. */
1851 momentary = suspend_momentary ();
1852 t = make_node (RTL_EXPR);
1853 resume_momentary (momentary);
1854 start_sequence_for_rtl_expr (t);
1855 NO_DEFER_POP;
1856 expr_stmts_for_value++;
1857 return t;
1858 }
1859
1860 /* Restore the previous state at the end of a statement that returns a value.
1861 Returns a tree node representing the statement's value and the
1862 insns to compute the value.
1863
1864 The nodes of that expression have been freed by now, so we cannot use them.
1865 But we don't want to do that anyway; the expression has already been
1866 evaluated and now we just want to use the value. So generate a RTL_EXPR
1867 with the proper type and RTL value.
1868
1869 If the last substatement was not an expression,
1870 return something with type `void'. */
1871
1872 tree
1873 expand_end_stmt_expr (t)
1874 tree t;
1875 {
1876 if (output_bytecode)
1877 {
1878 int i;
1879 tree t;
1880
1881
1882 /* At this point, all expressions have been evaluated in order.
1883 However, all expression values have been popped when evaluated,
1884 which means we have to recover the last expression value. This is
1885 the last value removed by means of a `drop' instruction. Instead
1886 of adding code to inhibit dropping the last expression value, it
1887 is here recovered by undoing the `drop'. Since `drop' is
1888 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1889 [-1]'. */
1890
1891 bc_adjust_stack (-1);
1892
1893 if (!last_expr_type)
1894 last_expr_type = void_type_node;
1895
1896 t = make_node (RTL_EXPR);
1897 TREE_TYPE (t) = last_expr_type;
1898 RTL_EXPR_RTL (t) = NULL;
1899 RTL_EXPR_SEQUENCE (t) = NULL;
1900
1901 /* Don't consider deleting this expr or containing exprs at tree level. */
1902 TREE_THIS_VOLATILE (t) = 1;
1903
1904 last_expr_type = 0;
1905 return t;
1906 }
1907
1908 OK_DEFER_POP;
1909
1910 if (last_expr_type == 0)
1911 {
1912 last_expr_type = void_type_node;
1913 last_expr_value = const0_rtx;
1914 }
1915 else if (last_expr_value == 0)
1916 /* There are some cases where this can happen, such as when the
1917 statement is void type. */
1918 last_expr_value = const0_rtx;
1919 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1920 /* Remove any possible QUEUED. */
1921 last_expr_value = protect_from_queue (last_expr_value, 0);
1922
1923 emit_queue ();
1924
1925 TREE_TYPE (t) = last_expr_type;
1926 RTL_EXPR_RTL (t) = last_expr_value;
1927 RTL_EXPR_SEQUENCE (t) = get_insns ();
1928
1929 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1930
1931 end_sequence ();
1932
1933 /* Don't consider deleting this expr or containing exprs at tree level. */
1934 TREE_SIDE_EFFECTS (t) = 1;
1935 /* Propagate volatility of the actual RTL expr. */
1936 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1937
1938 last_expr_type = 0;
1939 expr_stmts_for_value--;
1940
1941 return t;
1942 }
1943 \f
1944 /* Generate RTL for the start of an if-then. COND is the expression
1945 whose truth should be tested.
1946
1947 If EXITFLAG is nonzero, this conditional is visible to
1948 `exit_something'. */
1949
1950 void
1951 expand_start_cond (cond, exitflag)
1952 tree cond;
1953 int exitflag;
1954 {
1955 struct nesting *thiscond = ALLOC_NESTING ();
1956
1957 /* Make an entry on cond_stack for the cond we are entering. */
1958
1959 thiscond->next = cond_stack;
1960 thiscond->all = nesting_stack;
1961 thiscond->depth = ++nesting_depth;
1962 thiscond->data.cond.next_label = gen_label_rtx ();
1963 /* Before we encounter an `else', we don't need a separate exit label
1964 unless there are supposed to be exit statements
1965 to exit this conditional. */
1966 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1967 thiscond->data.cond.endif_label = thiscond->exit_label;
1968 cond_stack = thiscond;
1969 nesting_stack = thiscond;
1970
1971 if (output_bytecode)
1972 bc_expand_start_cond (cond, exitflag);
1973 else
1974 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1975 }
1976
1977 /* Generate RTL between then-clause and the elseif-clause
1978 of an if-then-elseif-.... */
1979
1980 void
1981 expand_start_elseif (cond)
1982 tree cond;
1983 {
1984 if (cond_stack->data.cond.endif_label == 0)
1985 cond_stack->data.cond.endif_label = gen_label_rtx ();
1986 emit_jump (cond_stack->data.cond.endif_label);
1987 emit_label (cond_stack->data.cond.next_label);
1988 cond_stack->data.cond.next_label = gen_label_rtx ();
1989 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1990 }
1991
1992 /* Generate RTL between the then-clause and the else-clause
1993 of an if-then-else. */
1994
1995 void
1996 expand_start_else ()
1997 {
1998 if (cond_stack->data.cond.endif_label == 0)
1999 cond_stack->data.cond.endif_label = gen_label_rtx ();
2000
2001 if (output_bytecode)
2002 {
2003 bc_expand_start_else ();
2004 return;
2005 }
2006
2007 emit_jump (cond_stack->data.cond.endif_label);
2008 emit_label (cond_stack->data.cond.next_label);
2009 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2010 }
2011
2012 /* After calling expand_start_else, turn this "else" into an "else if"
2013 by providing another condition. */
2014
2015 void
2016 expand_elseif (cond)
2017 tree cond;
2018 {
2019 cond_stack->data.cond.next_label = gen_label_rtx ();
2020 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2021 }
2022
2023 /* Generate RTL for the end of an if-then.
2024 Pop the record for it off of cond_stack. */
2025
2026 void
2027 expand_end_cond ()
2028 {
2029 struct nesting *thiscond = cond_stack;
2030
2031 if (output_bytecode)
2032 bc_expand_end_cond ();
2033 else
2034 {
2035 do_pending_stack_adjust ();
2036 if (thiscond->data.cond.next_label)
2037 emit_label (thiscond->data.cond.next_label);
2038 if (thiscond->data.cond.endif_label)
2039 emit_label (thiscond->data.cond.endif_label);
2040 }
2041
2042 POPSTACK (cond_stack);
2043 last_expr_type = 0;
2044 }
2045
2046
2047 /* Generate code for the start of an if-then. COND is the expression
2048 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2049 is to be visible to exit_something. It is assumed that the caller
2050 has pushed the previous context on the cond stack. */
2051
2052 static void
2053 bc_expand_start_cond (cond, exitflag)
2054 tree cond;
2055 int exitflag;
2056 {
2057 struct nesting *thiscond = cond_stack;
2058
2059 thiscond->data.case_stmt.nominal_type = cond;
2060 if (! exitflag)
2061 thiscond->exit_label = gen_label_rtx ();
2062 bc_expand_expr (cond);
2063 bc_emit_bytecode (xjumpifnot);
2064 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2065
2066 #ifdef DEBUG_PRINT_CODE
2067 fputc ('\n', stderr);
2068 #endif
2069 }
2070
2071 /* Generate the label for the end of an if with
2072 no else- clause. */
2073
2074 static void
2075 bc_expand_end_cond ()
2076 {
2077 struct nesting *thiscond = cond_stack;
2078
2079 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2080 }
2081
2082 /* Generate code for the start of the else- clause of
2083 an if-then-else. */
2084
2085 static void
2086 bc_expand_start_else ()
2087 {
2088 struct nesting *thiscond = cond_stack;
2089
2090 thiscond->data.cond.endif_label = thiscond->exit_label;
2091 thiscond->exit_label = gen_label_rtx ();
2092 bc_emit_bytecode (jump);
2093 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2094
2095 #ifdef DEBUG_PRINT_CODE
2096 fputc ('\n', stderr);
2097 #endif
2098
2099 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2100 }
2101 \f
2102 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2103 loop should be exited by `exit_something'. This is a loop for which
2104 `expand_continue' will jump to the top of the loop.
2105
2106 Make an entry on loop_stack to record the labels associated with
2107 this loop. */
2108
2109 struct nesting *
2110 expand_start_loop (exit_flag)
2111 int exit_flag;
2112 {
2113 register struct nesting *thisloop = ALLOC_NESTING ();
2114
2115 /* Make an entry on loop_stack for the loop we are entering. */
2116
2117 thisloop->next = loop_stack;
2118 thisloop->all = nesting_stack;
2119 thisloop->depth = ++nesting_depth;
2120 thisloop->data.loop.start_label = gen_label_rtx ();
2121 thisloop->data.loop.end_label = gen_label_rtx ();
2122 thisloop->data.loop.alt_end_label = 0;
2123 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2124 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2125 loop_stack = thisloop;
2126 nesting_stack = thisloop;
2127
2128 if (output_bytecode)
2129 {
2130 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2131 return thisloop;
2132 }
2133
2134 do_pending_stack_adjust ();
2135 emit_queue ();
2136 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2137 emit_label (thisloop->data.loop.start_label);
2138
2139 return thisloop;
2140 }
2141
2142 /* Like expand_start_loop but for a loop where the continuation point
2143 (for expand_continue_loop) will be specified explicitly. */
2144
2145 struct nesting *
2146 expand_start_loop_continue_elsewhere (exit_flag)
2147 int exit_flag;
2148 {
2149 struct nesting *thisloop = expand_start_loop (exit_flag);
2150 loop_stack->data.loop.continue_label = gen_label_rtx ();
2151 return thisloop;
2152 }
2153
2154 /* Specify the continuation point for a loop started with
2155 expand_start_loop_continue_elsewhere.
2156 Use this at the point in the code to which a continue statement
2157 should jump. */
2158
2159 void
2160 expand_loop_continue_here ()
2161 {
2162 if (output_bytecode)
2163 {
2164 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2165 return;
2166 }
2167 do_pending_stack_adjust ();
2168 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2169 emit_label (loop_stack->data.loop.continue_label);
2170 }
2171
2172 /* End a loop. */
2173
2174 static void
2175 bc_expand_end_loop ()
2176 {
2177 struct nesting *thisloop = loop_stack;
2178
2179 bc_emit_bytecode (jump);
2180 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2181
2182 #ifdef DEBUG_PRINT_CODE
2183 fputc ('\n', stderr);
2184 #endif
2185
2186 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2187 POPSTACK (loop_stack);
2188 last_expr_type = 0;
2189 }
2190
2191
2192 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2193 Pop the block off of loop_stack. */
2194
2195 void
2196 expand_end_loop ()
2197 {
2198 register rtx insn;
2199 register rtx start_label;
2200 rtx last_test_insn = 0;
2201 int num_insns = 0;
2202
2203 if (output_bytecode)
2204 {
2205 bc_expand_end_loop ();
2206 return;
2207 }
2208
2209 insn = get_last_insn ();
2210 start_label = loop_stack->data.loop.start_label;
2211
2212 /* Mark the continue-point at the top of the loop if none elsewhere. */
2213 if (start_label == loop_stack->data.loop.continue_label)
2214 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2215
2216 do_pending_stack_adjust ();
2217
2218 /* If optimizing, perhaps reorder the loop. If the loop
2219 starts with a conditional exit, roll that to the end
2220 where it will optimize together with the jump back.
2221
2222 We look for the last conditional branch to the exit that we encounter
2223 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2224 branch to the exit first, use it.
2225
2226 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2227 because moving them is not valid. */
2228
2229 if (optimize
2230 &&
2231 ! (GET_CODE (insn) == JUMP_INSN
2232 && GET_CODE (PATTERN (insn)) == SET
2233 && SET_DEST (PATTERN (insn)) == pc_rtx
2234 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2235 {
2236 /* Scan insns from the top of the loop looking for a qualified
2237 conditional exit. */
2238 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2239 insn = NEXT_INSN (insn))
2240 {
2241 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2242 break;
2243
2244 if (GET_CODE (insn) == NOTE
2245 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2246 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2247 break;
2248
2249 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2250 num_insns++;
2251
2252 if (last_test_insn && num_insns > 30)
2253 break;
2254
2255 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2256 && SET_DEST (PATTERN (insn)) == pc_rtx
2257 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2258 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2259 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2260 == loop_stack->data.loop.end_label)
2261 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2262 == loop_stack->data.loop.alt_end_label)))
2263 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2264 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2265 == loop_stack->data.loop.end_label)
2266 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2267 == loop_stack->data.loop.alt_end_label)))))
2268 last_test_insn = insn;
2269
2270 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2271 && GET_CODE (PATTERN (insn)) == SET
2272 && SET_DEST (PATTERN (insn)) == pc_rtx
2273 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2274 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2275 == loop_stack->data.loop.end_label)
2276 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2277 == loop_stack->data.loop.alt_end_label)))
2278 /* Include BARRIER. */
2279 last_test_insn = NEXT_INSN (insn);
2280 }
2281
2282 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2283 {
2284 /* We found one. Move everything from there up
2285 to the end of the loop, and add a jump into the loop
2286 to jump to there. */
2287 register rtx newstart_label = gen_label_rtx ();
2288 register rtx start_move = start_label;
2289
2290 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2291 then we want to move this note also. */
2292 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2293 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2294 == NOTE_INSN_LOOP_CONT))
2295 start_move = PREV_INSN (start_move);
2296
2297 emit_label_after (newstart_label, PREV_INSN (start_move));
2298 reorder_insns (start_move, last_test_insn, get_last_insn ());
2299 emit_jump_insn_after (gen_jump (start_label),
2300 PREV_INSN (newstart_label));
2301 emit_barrier_after (PREV_INSN (newstart_label));
2302 start_label = newstart_label;
2303 }
2304 }
2305
2306 emit_jump (start_label);
2307 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2308 emit_label (loop_stack->data.loop.end_label);
2309
2310 POPSTACK (loop_stack);
2311
2312 last_expr_type = 0;
2313 }
2314
2315 /* Generate a jump to the current loop's continue-point.
2316 This is usually the top of the loop, but may be specified
2317 explicitly elsewhere. If not currently inside a loop,
2318 return 0 and do nothing; caller will print an error message. */
2319
2320 int
2321 expand_continue_loop (whichloop)
2322 struct nesting *whichloop;
2323 {
2324 last_expr_type = 0;
2325 if (whichloop == 0)
2326 whichloop = loop_stack;
2327 if (whichloop == 0)
2328 return 0;
2329 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2330 NULL_RTX);
2331 return 1;
2332 }
2333
2334 /* Generate a jump to exit the current loop. If not currently inside a loop,
2335 return 0 and do nothing; caller will print an error message. */
2336
2337 int
2338 expand_exit_loop (whichloop)
2339 struct nesting *whichloop;
2340 {
2341 last_expr_type = 0;
2342 if (whichloop == 0)
2343 whichloop = loop_stack;
2344 if (whichloop == 0)
2345 return 0;
2346 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2347 return 1;
2348 }
2349
2350 /* Generate a conditional jump to exit the current loop if COND
2351 evaluates to zero. If not currently inside a loop,
2352 return 0 and do nothing; caller will print an error message. */
2353
2354 int
2355 expand_exit_loop_if_false (whichloop, cond)
2356 struct nesting *whichloop;
2357 tree cond;
2358 {
2359 last_expr_type = 0;
2360 if (whichloop == 0)
2361 whichloop = loop_stack;
2362 if (whichloop == 0)
2363 return 0;
2364 if (output_bytecode)
2365 {
2366 bc_expand_expr (cond);
2367 bc_expand_goto_internal (xjumpifnot,
2368 BYTECODE_BC_LABEL (whichloop->exit_label),
2369 NULL_TREE);
2370 }
2371 else
2372 {
2373 /* In order to handle fixups, we actually create a conditional jump
2374 around a unconditional branch to exit the loop. If fixups are
2375 necessary, they go before the unconditional branch. */
2376
2377 rtx label = gen_label_rtx ();
2378 rtx last_insn;
2379
2380 do_jump (cond, NULL_RTX, label);
2381 last_insn = get_last_insn ();
2382 if (GET_CODE (last_insn) == CODE_LABEL)
2383 whichloop->data.loop.alt_end_label = last_insn;
2384 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2385 NULL_RTX);
2386 emit_label (label);
2387 }
2388
2389 return 1;
2390 }
2391
2392 /* Return non-zero if we should preserve sub-expressions as separate
2393 pseudos. We never do so if we aren't optimizing. We always do so
2394 if -fexpensive-optimizations.
2395
2396 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2397 the loop may still be a small one. */
2398
2399 int
2400 preserve_subexpressions_p ()
2401 {
2402 rtx insn;
2403
2404 if (flag_expensive_optimizations)
2405 return 1;
2406
2407 if (optimize == 0 || loop_stack == 0)
2408 return 0;
2409
2410 insn = get_last_insn_anywhere ();
2411
2412 return (insn
2413 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2414 < n_non_fixed_regs * 3));
2415
2416 }
2417
2418 /* Generate a jump to exit the current loop, conditional, binding contour
2419 or case statement. Not all such constructs are visible to this function,
2420 only those started with EXIT_FLAG nonzero. Individual languages use
2421 the EXIT_FLAG parameter to control which kinds of constructs you can
2422 exit this way.
2423
2424 If not currently inside anything that can be exited,
2425 return 0 and do nothing; caller will print an error message. */
2426
2427 int
2428 expand_exit_something ()
2429 {
2430 struct nesting *n;
2431 last_expr_type = 0;
2432 for (n = nesting_stack; n; n = n->all)
2433 if (n->exit_label != 0)
2434 {
2435 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2436 return 1;
2437 }
2438
2439 return 0;
2440 }
2441 \f
2442 /* Generate RTL to return from the current function, with no value.
2443 (That is, we do not do anything about returning any value.) */
2444
2445 void
2446 expand_null_return ()
2447 {
2448 struct nesting *block = block_stack;
2449 rtx last_insn = 0;
2450
2451 if (output_bytecode)
2452 {
2453 bc_emit_instruction (ret);
2454 return;
2455 }
2456
2457 /* Does any pending block have cleanups? */
2458
2459 while (block && block->data.block.cleanups == 0)
2460 block = block->next;
2461
2462 /* If yes, use a goto to return, since that runs cleanups. */
2463
2464 expand_null_return_1 (last_insn, block != 0);
2465 }
2466
2467 /* Generate RTL to return from the current function, with value VAL. */
2468
2469 static void
2470 expand_value_return (val)
2471 rtx val;
2472 {
2473 struct nesting *block = block_stack;
2474 rtx last_insn = get_last_insn ();
2475 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2476
2477 /* Copy the value to the return location
2478 unless it's already there. */
2479
2480 if (return_reg != val)
2481 {
2482 #ifdef PROMOTE_FUNCTION_RETURN
2483 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2484 int unsignedp = TREE_UNSIGNED (type);
2485 enum machine_mode mode
2486 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2487 &unsignedp, 1);
2488
2489 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2490 convert_move (return_reg, val, unsignedp);
2491 else
2492 #endif
2493 emit_move_insn (return_reg, val);
2494 }
2495 if (GET_CODE (return_reg) == REG
2496 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2497 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2498
2499 /* Does any pending block have cleanups? */
2500
2501 while (block && block->data.block.cleanups == 0)
2502 block = block->next;
2503
2504 /* If yes, use a goto to return, since that runs cleanups.
2505 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2506
2507 expand_null_return_1 (last_insn, block != 0);
2508 }
2509
2510 /* Output a return with no value. If LAST_INSN is nonzero,
2511 pretend that the return takes place after LAST_INSN.
2512 If USE_GOTO is nonzero then don't use a return instruction;
2513 go to the return label instead. This causes any cleanups
2514 of pending blocks to be executed normally. */
2515
2516 static void
2517 expand_null_return_1 (last_insn, use_goto)
2518 rtx last_insn;
2519 int use_goto;
2520 {
2521 rtx end_label = cleanup_label ? cleanup_label : return_label;
2522
2523 clear_pending_stack_adjust ();
2524 do_pending_stack_adjust ();
2525 last_expr_type = 0;
2526
2527 /* PCC-struct return always uses an epilogue. */
2528 if (current_function_returns_pcc_struct || use_goto)
2529 {
2530 if (end_label == 0)
2531 end_label = return_label = gen_label_rtx ();
2532 expand_goto_internal (NULL_TREE, end_label, last_insn);
2533 return;
2534 }
2535
2536 /* Otherwise output a simple return-insn if one is available,
2537 unless it won't do the job. */
2538 #ifdef HAVE_return
2539 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2540 {
2541 emit_jump_insn (gen_return ());
2542 emit_barrier ();
2543 return;
2544 }
2545 #endif
2546
2547 /* Otherwise jump to the epilogue. */
2548 expand_goto_internal (NULL_TREE, end_label, last_insn);
2549 }
2550 \f
2551 /* Generate RTL to evaluate the expression RETVAL and return it
2552 from the current function. */
2553
2554 void
2555 expand_return (retval)
2556 tree retval;
2557 {
2558 /* If there are any cleanups to be performed, then they will
2559 be inserted following LAST_INSN. It is desirable
2560 that the last_insn, for such purposes, should be the
2561 last insn before computing the return value. Otherwise, cleanups
2562 which call functions can clobber the return value. */
2563 /* ??? rms: I think that is erroneous, because in C++ it would
2564 run destructors on variables that might be used in the subsequent
2565 computation of the return value. */
2566 rtx last_insn = 0;
2567 register rtx val = 0;
2568 register rtx op0;
2569 tree retval_rhs;
2570 int cleanups;
2571 struct nesting *block;
2572
2573 /* Bytecode returns are quite simple, just leave the result on the
2574 arithmetic stack. */
2575 if (output_bytecode)
2576 {
2577 bc_expand_expr (retval);
2578 bc_emit_instruction (ret);
2579 return;
2580 }
2581
2582 /* If function wants no value, give it none. */
2583 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2584 {
2585 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2586 emit_queue ();
2587 expand_null_return ();
2588 return;
2589 }
2590
2591 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2592 /* This is not sufficient. We also need to watch for cleanups of the
2593 expression we are about to expand. Unfortunately, we cannot know
2594 if it has cleanups until we expand it, and we want to change how we
2595 expand it depending upon if we need cleanups. We can't win. */
2596 #if 0
2597 cleanups = any_pending_cleanups (1);
2598 #else
2599 cleanups = 1;
2600 #endif
2601
2602 if (TREE_CODE (retval) == RESULT_DECL)
2603 retval_rhs = retval;
2604 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2605 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2606 retval_rhs = TREE_OPERAND (retval, 1);
2607 else if (TREE_TYPE (retval) == void_type_node)
2608 /* Recognize tail-recursive call to void function. */
2609 retval_rhs = retval;
2610 else
2611 retval_rhs = NULL_TREE;
2612
2613 /* Only use `last_insn' if there are cleanups which must be run. */
2614 if (cleanups || cleanup_label != 0)
2615 last_insn = get_last_insn ();
2616
2617 /* Distribute return down conditional expr if either of the sides
2618 may involve tail recursion (see test below). This enhances the number
2619 of tail recursions we see. Don't do this always since it can produce
2620 sub-optimal code in some cases and we distribute assignments into
2621 conditional expressions when it would help. */
2622
2623 if (optimize && retval_rhs != 0
2624 && frame_offset == 0
2625 && TREE_CODE (retval_rhs) == COND_EXPR
2626 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2627 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2628 {
2629 rtx label = gen_label_rtx ();
2630 tree expr;
2631
2632 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2633 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2634 DECL_RESULT (current_function_decl),
2635 TREE_OPERAND (retval_rhs, 1));
2636 TREE_SIDE_EFFECTS (expr) = 1;
2637 expand_return (expr);
2638 emit_label (label);
2639
2640 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2641 DECL_RESULT (current_function_decl),
2642 TREE_OPERAND (retval_rhs, 2));
2643 TREE_SIDE_EFFECTS (expr) = 1;
2644 expand_return (expr);
2645 return;
2646 }
2647
2648 /* For tail-recursive call to current function,
2649 just jump back to the beginning.
2650 It's unsafe if any auto variable in this function
2651 has its address taken; for simplicity,
2652 require stack frame to be empty. */
2653 if (optimize && retval_rhs != 0
2654 && frame_offset == 0
2655 && TREE_CODE (retval_rhs) == CALL_EXPR
2656 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2657 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2658 /* Finish checking validity, and if valid emit code
2659 to set the argument variables for the new call. */
2660 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2661 DECL_ARGUMENTS (current_function_decl)))
2662 {
2663 if (tail_recursion_label == 0)
2664 {
2665 tail_recursion_label = gen_label_rtx ();
2666 emit_label_after (tail_recursion_label,
2667 tail_recursion_reentry);
2668 }
2669 emit_queue ();
2670 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2671 emit_barrier ();
2672 return;
2673 }
2674 #ifdef HAVE_return
2675 /* This optimization is safe if there are local cleanups
2676 because expand_null_return takes care of them.
2677 ??? I think it should also be safe when there is a cleanup label,
2678 because expand_null_return takes care of them, too.
2679 Any reason why not? */
2680 if (HAVE_return && cleanup_label == 0
2681 && ! current_function_returns_pcc_struct
2682 && BRANCH_COST <= 1)
2683 {
2684 /* If this is return x == y; then generate
2685 if (x == y) return 1; else return 0;
2686 if we can do it with explicit return insns and
2687 branches are cheap. */
2688 if (retval_rhs)
2689 switch (TREE_CODE (retval_rhs))
2690 {
2691 case EQ_EXPR:
2692 case NE_EXPR:
2693 case GT_EXPR:
2694 case GE_EXPR:
2695 case LT_EXPR:
2696 case LE_EXPR:
2697 case TRUTH_ANDIF_EXPR:
2698 case TRUTH_ORIF_EXPR:
2699 case TRUTH_AND_EXPR:
2700 case TRUTH_OR_EXPR:
2701 case TRUTH_NOT_EXPR:
2702 case TRUTH_XOR_EXPR:
2703 op0 = gen_label_rtx ();
2704 jumpifnot (retval_rhs, op0);
2705 expand_value_return (const1_rtx);
2706 emit_label (op0);
2707 expand_value_return (const0_rtx);
2708 return;
2709 }
2710 }
2711 #endif /* HAVE_return */
2712
2713 /* If the result is an aggregate that is being returned in one (or more)
2714 registers, load the registers here. The compiler currently can't handle
2715 copying a BLKmode value into registers. We could put this code in a
2716 more general area (for use by everyone instead of just function
2717 call/return), but until this feature is generally usable it is kept here
2718 (and in expand_call). The value must go into a pseudo in case there
2719 are cleanups that will clobber the real return register. */
2720
2721 if (retval_rhs != 0
2722 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2723 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2724 {
2725 int i;
2726 int big_endian_correction = 0;
2727 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2728 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2729 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2730 rtx result_reg;
2731 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2732 enum machine_mode tmpmode, result_reg_mode;
2733
2734 /* Structures smaller than a word are aligned to the least significant
2735 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
2736 must skip the empty high order bytes when calculating the bit
2737 offset. */
2738 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
2739 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
2740
2741 for (i = 0; i < n_regs; i++)
2742 {
2743 rtx reg = gen_reg_rtx (word_mode);
2744 rtx word = operand_subword_force (result_val, i, BLKmode);
2745 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2746 int bitpos;
2747
2748 result_pseudos[i] = reg;
2749
2750 /* Clobber REG and move each partword into it. Ensure we don't
2751 go past the end of the structure. Note that the loop below
2752 works because we've already verified that padding and
2753 endianness are compatible. */
2754 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2755
2756 for (bitpos = 0;
2757 bitpos < BITS_PER_WORD && bytes > 0;
2758 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
2759 {
2760 int xbitpos = bitpos + big_endian_correction;
2761
2762 store_bit_field (reg, bitsize, xbitpos, word_mode,
2763 extract_bit_field (word, bitsize, bitpos, 1,
2764 NULL_RTX, word_mode,
2765 word_mode,
2766 bitsize / BITS_PER_UNIT,
2767 BITS_PER_WORD),
2768 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2769 }
2770 }
2771
2772 /* Find the smallest integer mode large enough to hold the
2773 entire structure and use that mode instead of BLKmode
2774 on the USE insn for the return register. */
2775 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2776 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2777 tmpmode != MAX_MACHINE_MODE;
2778 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2779 {
2780 /* Have we found a large enough mode? */
2781 if (GET_MODE_SIZE (tmpmode) >= bytes)
2782 break;
2783 }
2784
2785 /* No suitable mode found. */
2786 if (tmpmode == MAX_MACHINE_MODE)
2787 abort ();
2788
2789 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2790
2791 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2792 result_reg_mode = word_mode;
2793 else
2794 result_reg_mode = tmpmode;
2795 result_reg = gen_reg_rtx (result_reg_mode);
2796
2797 /* Now that the value is in pseudos, copy it to the result reg(s). */
2798 emit_queue ();
2799 free_temp_slots ();
2800 for (i = 0; i < n_regs; i++)
2801 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2802 result_pseudos[i]);
2803
2804 if (tmpmode != result_reg_mode)
2805 result_reg = gen_lowpart (tmpmode, result_reg);
2806
2807 expand_value_return (result_reg);
2808 }
2809 else if (cleanups
2810 && retval_rhs != 0
2811 && TREE_TYPE (retval_rhs) != void_type_node
2812 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2813 {
2814 /* Calculate the return value into a pseudo reg. */
2815 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2816 emit_queue ();
2817 /* All temporaries have now been used. */
2818 free_temp_slots ();
2819 /* Return the calculated value, doing cleanups first. */
2820 expand_value_return (val);
2821 }
2822 else
2823 {
2824 /* No cleanups or no hard reg used;
2825 calculate value into hard return reg. */
2826 expand_expr (retval, const0_rtx, VOIDmode, 0);
2827 emit_queue ();
2828 free_temp_slots ();
2829 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2830 }
2831 }
2832
2833 /* Return 1 if the end of the generated RTX is not a barrier.
2834 This means code already compiled can drop through. */
2835
2836 int
2837 drop_through_at_end_p ()
2838 {
2839 rtx insn = get_last_insn ();
2840 while (insn && GET_CODE (insn) == NOTE)
2841 insn = PREV_INSN (insn);
2842 return insn && GET_CODE (insn) != BARRIER;
2843 }
2844 \f
2845 /* Emit code to alter this function's formal parms for a tail-recursive call.
2846 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2847 FORMALS is the chain of decls of formals.
2848 Return 1 if this can be done;
2849 otherwise return 0 and do not emit any code. */
2850
2851 static int
2852 tail_recursion_args (actuals, formals)
2853 tree actuals, formals;
2854 {
2855 register tree a = actuals, f = formals;
2856 register int i;
2857 register rtx *argvec;
2858
2859 /* Check that number and types of actuals are compatible
2860 with the formals. This is not always true in valid C code.
2861 Also check that no formal needs to be addressable
2862 and that all formals are scalars. */
2863
2864 /* Also count the args. */
2865
2866 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2867 {
2868 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2869 return 0;
2870 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2871 return 0;
2872 }
2873 if (a != 0 || f != 0)
2874 return 0;
2875
2876 /* Compute all the actuals. */
2877
2878 argvec = (rtx *) alloca (i * sizeof (rtx));
2879
2880 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2881 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2882
2883 /* Find which actual values refer to current values of previous formals.
2884 Copy each of them now, before any formal is changed. */
2885
2886 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2887 {
2888 int copy = 0;
2889 register int j;
2890 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2891 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2892 { copy = 1; break; }
2893 if (copy)
2894 argvec[i] = copy_to_reg (argvec[i]);
2895 }
2896
2897 /* Store the values of the actuals into the formals. */
2898
2899 for (f = formals, a = actuals, i = 0; f;
2900 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2901 {
2902 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2903 emit_move_insn (DECL_RTL (f), argvec[i]);
2904 else
2905 convert_move (DECL_RTL (f), argvec[i],
2906 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2907 }
2908
2909 free_temp_slots ();
2910 return 1;
2911 }
2912 \f
2913 /* Generate the RTL code for entering a binding contour.
2914 The variables are declared one by one, by calls to `expand_decl'.
2915
2916 EXIT_FLAG is nonzero if this construct should be visible to
2917 `exit_something'. */
2918
2919 void
2920 expand_start_bindings (exit_flag)
2921 int exit_flag;
2922 {
2923 struct nesting *thisblock = ALLOC_NESTING ();
2924 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2925
2926 /* Make an entry on block_stack for the block we are entering. */
2927
2928 thisblock->next = block_stack;
2929 thisblock->all = nesting_stack;
2930 thisblock->depth = ++nesting_depth;
2931 thisblock->data.block.stack_level = 0;
2932 thisblock->data.block.cleanups = 0;
2933 thisblock->data.block.function_call_count = 0;
2934 #if 0
2935 if (block_stack)
2936 {
2937 if (block_stack->data.block.cleanups == NULL_TREE
2938 && (block_stack->data.block.outer_cleanups == NULL_TREE
2939 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2940 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2941 else
2942 thisblock->data.block.outer_cleanups
2943 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2944 block_stack->data.block.outer_cleanups);
2945 }
2946 else
2947 thisblock->data.block.outer_cleanups = 0;
2948 #endif
2949 #if 1
2950 if (block_stack
2951 && !(block_stack->data.block.cleanups == NULL_TREE
2952 && block_stack->data.block.outer_cleanups == NULL_TREE))
2953 thisblock->data.block.outer_cleanups
2954 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2955 block_stack->data.block.outer_cleanups);
2956 else
2957 thisblock->data.block.outer_cleanups = 0;
2958 #endif
2959 thisblock->data.block.label_chain = 0;
2960 thisblock->data.block.innermost_stack_block = stack_block_stack;
2961 thisblock->data.block.first_insn = note;
2962 thisblock->data.block.block_start_count = ++block_start_count;
2963 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2964 block_stack = thisblock;
2965 nesting_stack = thisblock;
2966
2967 if (!output_bytecode)
2968 {
2969 /* Make a new level for allocating stack slots. */
2970 push_temp_slots ();
2971 }
2972 }
2973
2974 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2975 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2976 BLOCK node. */
2977
2978 void
2979 remember_end_note (block)
2980 register tree block;
2981 {
2982 BLOCK_END_NOTE (block) = last_block_end_note;
2983 last_block_end_note = NULL_RTX;
2984 }
2985
2986 /* Generate RTL code to terminate a binding contour.
2987 VARS is the chain of VAR_DECL nodes
2988 for the variables bound in this contour.
2989 MARK_ENDS is nonzero if we should put a note at the beginning
2990 and end of this binding contour.
2991
2992 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2993 (That is true automatically if the contour has a saved stack level.) */
2994
2995 void
2996 expand_end_bindings (vars, mark_ends, dont_jump_in)
2997 tree vars;
2998 int mark_ends;
2999 int dont_jump_in;
3000 {
3001 register struct nesting *thisblock = block_stack;
3002 register tree decl;
3003
3004 if (output_bytecode)
3005 {
3006 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3007 return;
3008 }
3009
3010 if (warn_unused)
3011 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3012 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3013 && ! DECL_IN_SYSTEM_HEADER (decl))
3014 warning_with_decl (decl, "unused variable `%s'");
3015
3016 if (thisblock->exit_label)
3017 {
3018 do_pending_stack_adjust ();
3019 emit_label (thisblock->exit_label);
3020 }
3021
3022 /* If necessary, make a handler for nonlocal gotos taking
3023 place in the function calls in this block. */
3024 if (function_call_count != thisblock->data.block.function_call_count
3025 && nonlocal_labels
3026 /* Make handler for outermost block
3027 if there were any nonlocal gotos to this function. */
3028 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3029 /* Make handler for inner block if it has something
3030 special to do when you jump out of it. */
3031 : (thisblock->data.block.cleanups != 0
3032 || thisblock->data.block.stack_level != 0)))
3033 {
3034 tree link;
3035 rtx afterward = gen_label_rtx ();
3036 rtx handler_label = gen_label_rtx ();
3037 rtx save_receiver = gen_reg_rtx (Pmode);
3038 rtx insns;
3039
3040 /* Don't let jump_optimize delete the handler. */
3041 LABEL_PRESERVE_P (handler_label) = 1;
3042
3043 /* Record the handler address in the stack slot for that purpose,
3044 during this block, saving and restoring the outer value. */
3045 if (thisblock->next != 0)
3046 {
3047 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3048
3049 start_sequence ();
3050 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3051 insns = get_insns ();
3052 end_sequence ();
3053 emit_insns_before (insns, thisblock->data.block.first_insn);
3054 }
3055
3056 start_sequence ();
3057 emit_move_insn (nonlocal_goto_handler_slot,
3058 gen_rtx (LABEL_REF, Pmode, handler_label));
3059 insns = get_insns ();
3060 end_sequence ();
3061 emit_insns_before (insns, thisblock->data.block.first_insn);
3062
3063 /* Jump around the handler; it runs only when specially invoked. */
3064 emit_jump (afterward);
3065 emit_label (handler_label);
3066
3067 #ifdef HAVE_nonlocal_goto
3068 if (! HAVE_nonlocal_goto)
3069 #endif
3070 /* First adjust our frame pointer to its actual value. It was
3071 previously set to the start of the virtual area corresponding to
3072 the stacked variables when we branched here and now needs to be
3073 adjusted to the actual hardware fp value.
3074
3075 Assignments are to virtual registers are converted by
3076 instantiate_virtual_regs into the corresponding assignment
3077 to the underlying register (fp in this case) that makes
3078 the original assignment true.
3079 So the following insn will actually be
3080 decrementing fp by STARTING_FRAME_OFFSET. */
3081 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3082
3083 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3084 if (fixed_regs[ARG_POINTER_REGNUM])
3085 {
3086 #ifdef ELIMINABLE_REGS
3087 /* If the argument pointer can be eliminated in favor of the
3088 frame pointer, we don't need to restore it. We assume here
3089 that if such an elimination is present, it can always be used.
3090 This is the case on all known machines; if we don't make this
3091 assumption, we do unnecessary saving on many machines. */
3092 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3093 int i;
3094
3095 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3096 if (elim_regs[i].from == ARG_POINTER_REGNUM
3097 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3098 break;
3099
3100 if (i == sizeof elim_regs / sizeof elim_regs [0])
3101 #endif
3102 {
3103 /* Now restore our arg pointer from the address at which it
3104 was saved in our stack frame.
3105 If there hasn't be space allocated for it yet, make
3106 some now. */
3107 if (arg_pointer_save_area == 0)
3108 arg_pointer_save_area
3109 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3110 emit_move_insn (virtual_incoming_args_rtx,
3111 /* We need a pseudo here, or else
3112 instantiate_virtual_regs_1 complains. */
3113 copy_to_reg (arg_pointer_save_area));
3114 }
3115 }
3116 #endif
3117
3118 /* The handler expects the desired label address in the static chain
3119 register. It tests the address and does an appropriate jump
3120 to whatever label is desired. */
3121 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3122 /* Skip any labels we shouldn't be able to jump to from here. */
3123 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3124 {
3125 rtx not_this = gen_label_rtx ();
3126 rtx this = gen_label_rtx ();
3127 do_jump_if_equal (static_chain_rtx,
3128 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3129 this, 0);
3130 emit_jump (not_this);
3131 emit_label (this);
3132 expand_goto (TREE_VALUE (link));
3133 emit_label (not_this);
3134 }
3135 /* If label is not recognized, abort. */
3136 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3137 VOIDmode, 0);
3138 emit_barrier ();
3139 emit_label (afterward);
3140 }
3141
3142 /* Don't allow jumping into a block that has cleanups or a stack level. */
3143 if (dont_jump_in
3144 || thisblock->data.block.stack_level != 0
3145 || thisblock->data.block.cleanups != 0)
3146 {
3147 struct label_chain *chain;
3148
3149 /* Any labels in this block are no longer valid to go to.
3150 Mark them to cause an error message. */
3151 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3152 {
3153 DECL_TOO_LATE (chain->label) = 1;
3154 /* If any goto without a fixup came to this label,
3155 that must be an error, because gotos without fixups
3156 come from outside all saved stack-levels and all cleanups. */
3157 if (TREE_ADDRESSABLE (chain->label))
3158 error_with_decl (chain->label,
3159 "label `%s' used before containing binding contour");
3160 }
3161 }
3162
3163 /* Restore stack level in effect before the block
3164 (only if variable-size objects allocated). */
3165 /* Perform any cleanups associated with the block. */
3166
3167 if (thisblock->data.block.stack_level != 0
3168 || thisblock->data.block.cleanups != 0)
3169 {
3170 /* Only clean up here if this point can actually be reached. */
3171 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3172
3173 /* Don't let cleanups affect ({...}) constructs. */
3174 int old_expr_stmts_for_value = expr_stmts_for_value;
3175 rtx old_last_expr_value = last_expr_value;
3176 tree old_last_expr_type = last_expr_type;
3177 expr_stmts_for_value = 0;
3178
3179 /* Do the cleanups. */
3180 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3181 if (reachable)
3182 do_pending_stack_adjust ();
3183
3184 expr_stmts_for_value = old_expr_stmts_for_value;
3185 last_expr_value = old_last_expr_value;
3186 last_expr_type = old_last_expr_type;
3187
3188 /* Restore the stack level. */
3189
3190 if (reachable && thisblock->data.block.stack_level != 0)
3191 {
3192 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3193 thisblock->data.block.stack_level, NULL_RTX);
3194 if (nonlocal_goto_handler_slot != 0)
3195 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3196 NULL_RTX);
3197 }
3198
3199 /* Any gotos out of this block must also do these things.
3200 Also report any gotos with fixups that came to labels in this
3201 level. */
3202 fixup_gotos (thisblock,
3203 thisblock->data.block.stack_level,
3204 thisblock->data.block.cleanups,
3205 thisblock->data.block.first_insn,
3206 dont_jump_in);
3207 }
3208
3209 /* Mark the beginning and end of the scope if requested.
3210 We do this now, after running cleanups on the variables
3211 just going out of scope, so they are in scope for their cleanups. */
3212
3213 if (mark_ends)
3214 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3215 else
3216 /* Get rid of the beginning-mark if we don't make an end-mark. */
3217 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3218
3219 /* If doing stupid register allocation, make sure lives of all
3220 register variables declared here extend thru end of scope. */
3221
3222 if (obey_regdecls)
3223 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3224 {
3225 rtx rtl = DECL_RTL (decl);
3226 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3227 use_variable (rtl);
3228 }
3229
3230 /* Restore block_stack level for containing block. */
3231
3232 stack_block_stack = thisblock->data.block.innermost_stack_block;
3233 POPSTACK (block_stack);
3234
3235 /* Pop the stack slot nesting and free any slots at this level. */
3236 pop_temp_slots ();
3237 }
3238
3239
3240 /* End a binding contour.
3241 VARS is the chain of VAR_DECL nodes for the variables bound
3242 in this contour. MARK_ENDS is nonzer if we should put a note
3243 at the beginning and end of this binding contour.
3244 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3245 contour. */
3246
3247 static void
3248 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3249 tree vars;
3250 int mark_ends;
3251 int dont_jump_in;
3252 {
3253 struct nesting *thisbind = nesting_stack;
3254 tree decl;
3255
3256 if (warn_unused)
3257 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3258 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3259 warning_with_decl (decl, "unused variable `%s'");
3260
3261 if (thisbind->exit_label)
3262 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3263
3264 /* Pop block/bindings off stack */
3265 POPSTACK (block_stack);
3266 }
3267 \f
3268 /* Generate RTL for the automatic variable declaration DECL.
3269 (Other kinds of declarations are simply ignored if seen here.)
3270 CLEANUP is an expression to be executed at exit from this binding contour;
3271 for example, in C++, it might call the destructor for this variable.
3272
3273 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3274 either before or after calling `expand_decl' but before compiling
3275 any subsequent expressions. This is because CLEANUP may be expanded
3276 more than once, on different branches of execution.
3277 For the same reason, CLEANUP may not contain a CALL_EXPR
3278 except as its topmost node--else `preexpand_calls' would get confused.
3279
3280 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3281 that is not associated with any particular variable.
3282
3283 There is no special support here for C++ constructors.
3284 They should be handled by the proper code in DECL_INITIAL. */
3285
3286 void
3287 expand_decl (decl)
3288 register tree decl;
3289 {
3290 struct nesting *thisblock = block_stack;
3291 tree type;
3292
3293 if (output_bytecode)
3294 {
3295 bc_expand_decl (decl, 0);
3296 return;
3297 }
3298
3299 type = TREE_TYPE (decl);
3300
3301 /* Only automatic variables need any expansion done.
3302 Static and external variables, and external functions,
3303 will be handled by `assemble_variable' (called from finish_decl).
3304 TYPE_DECL and CONST_DECL require nothing.
3305 PARM_DECLs are handled in `assign_parms'. */
3306
3307 if (TREE_CODE (decl) != VAR_DECL)
3308 return;
3309 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3310 return;
3311
3312 /* Create the RTL representation for the variable. */
3313
3314 if (type == error_mark_node)
3315 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3316 else if (DECL_SIZE (decl) == 0)
3317 /* Variable with incomplete type. */
3318 {
3319 if (DECL_INITIAL (decl) == 0)
3320 /* Error message was already done; now avoid a crash. */
3321 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3322 else
3323 /* An initializer is going to decide the size of this array.
3324 Until we know the size, represent its address with a reg. */
3325 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3326 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3327 }
3328 else if (DECL_MODE (decl) != BLKmode
3329 /* If -ffloat-store, don't put explicit float vars
3330 into regs. */
3331 && !(flag_float_store
3332 && TREE_CODE (type) == REAL_TYPE)
3333 && ! TREE_THIS_VOLATILE (decl)
3334 && ! TREE_ADDRESSABLE (decl)
3335 && (DECL_REGISTER (decl) || ! obey_regdecls))
3336 {
3337 /* Automatic variable that can go in a register. */
3338 int unsignedp = TREE_UNSIGNED (type);
3339 enum machine_mode reg_mode
3340 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3341
3342 if (TREE_CODE (type) == COMPLEX_TYPE)
3343 {
3344 rtx realpart, imagpart;
3345 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3346
3347 /* For a complex type variable, make a CONCAT of two pseudos
3348 so that the real and imaginary parts
3349 can be allocated separately. */
3350 realpart = gen_reg_rtx (partmode);
3351 REG_USERVAR_P (realpart) = 1;
3352 imagpart = gen_reg_rtx (partmode);
3353 REG_USERVAR_P (imagpart) = 1;
3354 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3355 }
3356 else
3357 {
3358 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3359 if (TREE_CODE (type) == POINTER_TYPE)
3360 mark_reg_pointer (DECL_RTL (decl));
3361 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3362 }
3363 }
3364 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3365 {
3366 /* Variable of fixed size that goes on the stack. */
3367 rtx oldaddr = 0;
3368 rtx addr;
3369
3370 /* If we previously made RTL for this decl, it must be an array
3371 whose size was determined by the initializer.
3372 The old address was a register; set that register now
3373 to the proper address. */
3374 if (DECL_RTL (decl) != 0)
3375 {
3376 if (GET_CODE (DECL_RTL (decl)) != MEM
3377 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3378 abort ();
3379 oldaddr = XEXP (DECL_RTL (decl), 0);
3380 }
3381
3382 DECL_RTL (decl)
3383 = assign_stack_temp (DECL_MODE (decl),
3384 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3385 + BITS_PER_UNIT - 1)
3386 / BITS_PER_UNIT),
3387 1);
3388 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3389
3390 /* Set alignment we actually gave this decl. */
3391 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3392 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3393
3394 if (oldaddr)
3395 {
3396 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3397 if (addr != oldaddr)
3398 emit_move_insn (oldaddr, addr);
3399 }
3400
3401 /* If this is a memory ref that contains aggregate components,
3402 mark it as such for cse and loop optimize. */
3403 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3404 #if 0
3405 /* If this is in memory because of -ffloat-store,
3406 set the volatile bit, to prevent optimizations from
3407 undoing the effects. */
3408 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3409 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3410 #endif
3411 }
3412 else
3413 /* Dynamic-size object: must push space on the stack. */
3414 {
3415 rtx address, size;
3416
3417 /* Record the stack pointer on entry to block, if have
3418 not already done so. */
3419 if (thisblock->data.block.stack_level == 0)
3420 {
3421 do_pending_stack_adjust ();
3422 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3423 &thisblock->data.block.stack_level,
3424 thisblock->data.block.first_insn);
3425 stack_block_stack = thisblock;
3426 }
3427
3428 /* Compute the variable's size, in bytes. */
3429 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3430 DECL_SIZE (decl),
3431 size_int (BITS_PER_UNIT)),
3432 NULL_RTX, VOIDmode, 0);
3433 free_temp_slots ();
3434
3435 /* Allocate space on the stack for the variable. */
3436 address = allocate_dynamic_stack_space (size, NULL_RTX,
3437 DECL_ALIGN (decl));
3438
3439 /* Reference the variable indirect through that rtx. */
3440 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3441
3442 /* If this is a memory ref that contains aggregate components,
3443 mark it as such for cse and loop optimize. */
3444 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3445
3446 /* Indicate the alignment we actually gave this variable. */
3447 #ifdef STACK_BOUNDARY
3448 DECL_ALIGN (decl) = STACK_BOUNDARY;
3449 #else
3450 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3451 #endif
3452 }
3453
3454 if (TREE_THIS_VOLATILE (decl))
3455 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3456 #if 0 /* A variable is not necessarily unchanging
3457 just because it is const. RTX_UNCHANGING_P
3458 means no change in the function,
3459 not merely no change in the variable's scope.
3460 It is correct to set RTX_UNCHANGING_P if the variable's scope
3461 is the whole function. There's no convenient way to test that. */
3462 if (TREE_READONLY (decl))
3463 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3464 #endif
3465
3466 /* If doing stupid register allocation, make sure life of any
3467 register variable starts here, at the start of its scope. */
3468
3469 if (obey_regdecls)
3470 use_variable (DECL_RTL (decl));
3471 }
3472
3473
3474 /* Generate code for the automatic variable declaration DECL. For
3475 most variables this just means we give it a stack offset. The
3476 compiler sometimes emits cleanups without variables and we will
3477 have to deal with those too. */
3478
3479 static void
3480 bc_expand_decl (decl, cleanup)
3481 tree decl;
3482 tree cleanup;
3483 {
3484 tree type;
3485
3486 if (!decl)
3487 {
3488 /* A cleanup with no variable. */
3489 if (!cleanup)
3490 abort ();
3491
3492 return;
3493 }
3494
3495 /* Only auto variables need any work. */
3496 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3497 return;
3498
3499 type = TREE_TYPE (decl);
3500
3501 if (type == error_mark_node)
3502 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3503
3504 else if (DECL_SIZE (decl) == 0)
3505
3506 /* Variable with incomplete type. The stack offset herein will be
3507 fixed later in expand_decl_init (). */
3508 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3509
3510 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3511 {
3512 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3513 DECL_ALIGN (decl));
3514 }
3515 else
3516 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3517 }
3518 \f
3519 /* Emit code to perform the initialization of a declaration DECL. */
3520
3521 void
3522 expand_decl_init (decl)
3523 tree decl;
3524 {
3525 int was_used = TREE_USED (decl);
3526
3527 if (output_bytecode)
3528 {
3529 bc_expand_decl_init (decl);
3530 return;
3531 }
3532
3533 /* If this is a CONST_DECL, we don't have to generate any code, but
3534 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3535 to be set while in the obstack containing the constant. If we don't
3536 do this, we can lose if we have functions nested three deep and the middle
3537 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3538 the innermost function is the first to expand that STRING_CST. */
3539 if (TREE_CODE (decl) == CONST_DECL)
3540 {
3541 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3542 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3543 EXPAND_INITIALIZER);
3544 return;
3545 }
3546
3547 if (TREE_STATIC (decl))
3548 return;
3549
3550 /* Compute and store the initial value now. */
3551
3552 if (DECL_INITIAL (decl) == error_mark_node)
3553 {
3554 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3555 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3556 || code == POINTER_TYPE)
3557 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3558 0, 0);
3559 emit_queue ();
3560 }
3561 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3562 {
3563 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3564 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3565 emit_queue ();
3566 }
3567
3568 /* Don't let the initialization count as "using" the variable. */
3569 TREE_USED (decl) = was_used;
3570
3571 /* Free any temporaries we made while initializing the decl. */
3572 preserve_temp_slots (NULL_RTX);
3573 free_temp_slots ();
3574 }
3575
3576 /* Expand initialization for variable-sized types. Allocate array
3577 using newlocalSI and set local variable, which is a pointer to the
3578 storage. */
3579
3580 static void
3581 bc_expand_variable_local_init (decl)
3582 tree decl;
3583 {
3584 /* Evaluate size expression and coerce to SI */
3585 bc_expand_expr (DECL_SIZE (decl));
3586
3587 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3588 no coercion is necessary (?) */
3589
3590 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3591 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3592
3593 /* Emit code to allocate array */
3594 bc_emit_instruction (newlocalSI);
3595
3596 /* Store array pointer in local variable. This is the only instance
3597 where we actually want the address of the pointer to the
3598 variable-size block, rather than the pointer itself. We avoid
3599 using expand_address() since that would cause the pointer to be
3600 pushed rather than its address. Hence the hard-coded reference;
3601 notice also that the variable is always local (no global
3602 variable-size type variables). */
3603
3604 bc_load_localaddr (DECL_RTL (decl));
3605 bc_emit_instruction (storeP);
3606 }
3607
3608
3609 /* Emit code to initialize a declaration. */
3610
3611 static void
3612 bc_expand_decl_init (decl)
3613 tree decl;
3614 {
3615 int org_stack_depth;
3616
3617 /* Statical initializers are handled elsewhere */
3618
3619 if (TREE_STATIC (decl))
3620 return;
3621
3622 /* Memory original stack depth */
3623 org_stack_depth = stack_depth;
3624
3625 /* If the type is variable-size, we first create its space (we ASSUME
3626 it CAN'T be static). We do this regardless of whether there's an
3627 initializer assignment or not. */
3628
3629 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3630 bc_expand_variable_local_init (decl);
3631
3632 /* Expand initializer assignment */
3633 if (DECL_INITIAL (decl) == error_mark_node)
3634 {
3635 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3636
3637 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3638 || code == POINTER_TYPE)
3639
3640 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3641 }
3642 else if (DECL_INITIAL (decl))
3643 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3644
3645 /* Restore stack depth */
3646 if (org_stack_depth > stack_depth)
3647 abort ();
3648
3649 bc_adjust_stack (stack_depth - org_stack_depth);
3650 }
3651
3652
3653 /* CLEANUP is an expression to be executed at exit from this binding contour;
3654 for example, in C++, it might call the destructor for this variable.
3655
3656 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3657 either before or after calling `expand_decl' but before compiling
3658 any subsequent expressions. This is because CLEANUP may be expanded
3659 more than once, on different branches of execution.
3660 For the same reason, CLEANUP may not contain a CALL_EXPR
3661 except as its topmost node--else `preexpand_calls' would get confused.
3662
3663 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3664 that is not associated with any particular variable. */
3665
3666 int
3667 expand_decl_cleanup (decl, cleanup)
3668 tree decl, cleanup;
3669 {
3670 struct nesting *thisblock = block_stack;
3671
3672 /* Error if we are not in any block. */
3673 if (thisblock == 0)
3674 return 0;
3675
3676 /* Record the cleanup if there is one. */
3677
3678 if (cleanup != 0)
3679 {
3680 thisblock->data.block.cleanups
3681 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3682 /* If this block has a cleanup, it belongs in stack_block_stack. */
3683 stack_block_stack = thisblock;
3684 (*interim_eh_hook) (NULL_TREE);
3685 }
3686 return 1;
3687 }
3688 \f
3689 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3690 DECL_ELTS is the list of elements that belong to DECL's type.
3691 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3692
3693 void
3694 expand_anon_union_decl (decl, cleanup, decl_elts)
3695 tree decl, cleanup, decl_elts;
3696 {
3697 struct nesting *thisblock = block_stack;
3698 rtx x;
3699
3700 expand_decl (decl, cleanup);
3701 x = DECL_RTL (decl);
3702
3703 while (decl_elts)
3704 {
3705 tree decl_elt = TREE_VALUE (decl_elts);
3706 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3707 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3708
3709 /* Propagate the union's alignment to the elements. */
3710 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3711
3712 /* If the element has BLKmode and the union doesn't, the union is
3713 aligned such that the element doesn't need to have BLKmode, so
3714 change the element's mode to the appropriate one for its size. */
3715 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3716 DECL_MODE (decl_elt) = mode
3717 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3718 MODE_INT, 1);
3719
3720 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3721 instead create a new MEM rtx with the proper mode. */
3722 if (GET_CODE (x) == MEM)
3723 {
3724 if (mode == GET_MODE (x))
3725 DECL_RTL (decl_elt) = x;
3726 else
3727 {
3728 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3729 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3730 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3731 }
3732 }
3733 else if (GET_CODE (x) == REG)
3734 {
3735 if (mode == GET_MODE (x))
3736 DECL_RTL (decl_elt) = x;
3737 else
3738 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3739 }
3740 else
3741 abort ();
3742
3743 /* Record the cleanup if there is one. */
3744
3745 if (cleanup != 0)
3746 thisblock->data.block.cleanups
3747 = temp_tree_cons (decl_elt, cleanup_elt,
3748 thisblock->data.block.cleanups);
3749
3750 decl_elts = TREE_CHAIN (decl_elts);
3751 }
3752 }
3753 \f
3754 /* Expand a list of cleanups LIST.
3755 Elements may be expressions or may be nested lists.
3756
3757 If DONT_DO is nonnull, then any list-element
3758 whose TREE_PURPOSE matches DONT_DO is omitted.
3759 This is sometimes used to avoid a cleanup associated with
3760 a value that is being returned out of the scope.
3761
3762 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3763 goto and handle protection regions specially in that case.
3764
3765 If REACHABLE, we emit code, otherwise just inform the exception handling
3766 code about this finalization. */
3767
3768 static void
3769 expand_cleanups (list, dont_do, in_fixup, reachable)
3770 tree list;
3771 tree dont_do;
3772 int in_fixup;
3773 int reachable;
3774 {
3775 tree tail;
3776 for (tail = list; tail; tail = TREE_CHAIN (tail))
3777 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3778 {
3779 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3780 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3781 else
3782 {
3783 if (! in_fixup)
3784 (*interim_eh_hook) (TREE_VALUE (tail));
3785
3786 if (reachable)
3787 {
3788 /* Cleanups may be run multiple times. For example,
3789 when exiting a binding contour, we expand the
3790 cleanups associated with that contour. When a goto
3791 within that binding contour has a target outside that
3792 contour, it will expand all cleanups from its scope to
3793 the target. Though the cleanups are expanded multiple
3794 times, the control paths are non-overlapping so the
3795 cleanups will not be executed twice. */
3796 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3797 free_temp_slots ();
3798 }
3799 }
3800 }
3801 }
3802
3803 /* Move all cleanups from the current block_stack
3804 to the containing block_stack, where they are assumed to
3805 have been created. If anything can cause a temporary to
3806 be created, but not expanded for more than one level of
3807 block_stacks, then this code will have to change. */
3808
3809 void
3810 move_cleanups_up ()
3811 {
3812 struct nesting *block = block_stack;
3813 struct nesting *outer = block->next;
3814
3815 outer->data.block.cleanups
3816 = chainon (block->data.block.cleanups,
3817 outer->data.block.cleanups);
3818 block->data.block.cleanups = 0;
3819 }
3820
3821 tree
3822 last_cleanup_this_contour ()
3823 {
3824 if (block_stack == 0)
3825 return 0;
3826
3827 return block_stack->data.block.cleanups;
3828 }
3829
3830 /* Return 1 if there are any pending cleanups at this point.
3831 If THIS_CONTOUR is nonzero, check the current contour as well.
3832 Otherwise, look only at the contours that enclose this one. */
3833
3834 int
3835 any_pending_cleanups (this_contour)
3836 int this_contour;
3837 {
3838 struct nesting *block;
3839
3840 if (block_stack == 0)
3841 return 0;
3842
3843 if (this_contour && block_stack->data.block.cleanups != NULL)
3844 return 1;
3845 if (block_stack->data.block.cleanups == 0
3846 && (block_stack->data.block.outer_cleanups == 0
3847 #if 0
3848 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3849 #endif
3850 ))
3851 return 0;
3852
3853 for (block = block_stack->next; block; block = block->next)
3854 if (block->data.block.cleanups != 0)
3855 return 1;
3856
3857 return 0;
3858 }
3859 \f
3860 /* Enter a case (Pascal) or switch (C) statement.
3861 Push a block onto case_stack and nesting_stack
3862 to accumulate the case-labels that are seen
3863 and to record the labels generated for the statement.
3864
3865 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3866 Otherwise, this construct is transparent for `exit_something'.
3867
3868 EXPR is the index-expression to be dispatched on.
3869 TYPE is its nominal type. We could simply convert EXPR to this type,
3870 but instead we take short cuts. */
3871
3872 void
3873 expand_start_case (exit_flag, expr, type, printname)
3874 int exit_flag;
3875 tree expr;
3876 tree type;
3877 char *printname;
3878 {
3879 register struct nesting *thiscase = ALLOC_NESTING ();
3880
3881 /* Make an entry on case_stack for the case we are entering. */
3882
3883 thiscase->next = case_stack;
3884 thiscase->all = nesting_stack;
3885 thiscase->depth = ++nesting_depth;
3886 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3887 thiscase->data.case_stmt.case_list = 0;
3888 thiscase->data.case_stmt.index_expr = expr;
3889 thiscase->data.case_stmt.nominal_type = type;
3890 thiscase->data.case_stmt.default_label = 0;
3891 thiscase->data.case_stmt.num_ranges = 0;
3892 thiscase->data.case_stmt.printname = printname;
3893 thiscase->data.case_stmt.seenlabel = 0;
3894 case_stack = thiscase;
3895 nesting_stack = thiscase;
3896
3897 if (output_bytecode)
3898 {
3899 bc_expand_start_case (thiscase, expr, type, printname);
3900 return;
3901 }
3902
3903 do_pending_stack_adjust ();
3904
3905 /* Make sure case_stmt.start points to something that won't
3906 need any transformation before expand_end_case. */
3907 if (GET_CODE (get_last_insn ()) != NOTE)
3908 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3909
3910 thiscase->data.case_stmt.start = get_last_insn ();
3911 }
3912
3913
3914 /* Enter a case statement. It is assumed that the caller has pushed
3915 the current context onto the case stack. */
3916
3917 static void
3918 bc_expand_start_case (thiscase, expr, type, printname)
3919 struct nesting *thiscase;
3920 tree expr;
3921 tree type;
3922 char *printname;
3923 {
3924 bc_expand_expr (expr);
3925 bc_expand_conversion (TREE_TYPE (expr), type);
3926
3927 /* For cases, the skip is a place we jump to that's emitted after
3928 the size of the jump table is known. */
3929
3930 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3931 bc_emit_bytecode (jump);
3932 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3933
3934 #ifdef DEBUG_PRINT_CODE
3935 fputc ('\n', stderr);
3936 #endif
3937 }
3938
3939
3940 /* Start a "dummy case statement" within which case labels are invalid
3941 and are not connected to any larger real case statement.
3942 This can be used if you don't want to let a case statement jump
3943 into the middle of certain kinds of constructs. */
3944
3945 void
3946 expand_start_case_dummy ()
3947 {
3948 register struct nesting *thiscase = ALLOC_NESTING ();
3949
3950 /* Make an entry on case_stack for the dummy. */
3951
3952 thiscase->next = case_stack;
3953 thiscase->all = nesting_stack;
3954 thiscase->depth = ++nesting_depth;
3955 thiscase->exit_label = 0;
3956 thiscase->data.case_stmt.case_list = 0;
3957 thiscase->data.case_stmt.start = 0;
3958 thiscase->data.case_stmt.nominal_type = 0;
3959 thiscase->data.case_stmt.default_label = 0;
3960 thiscase->data.case_stmt.num_ranges = 0;
3961 case_stack = thiscase;
3962 nesting_stack = thiscase;
3963 }
3964
3965 /* End a dummy case statement. */
3966
3967 void
3968 expand_end_case_dummy ()
3969 {
3970 POPSTACK (case_stack);
3971 }
3972
3973 /* Return the data type of the index-expression
3974 of the innermost case statement, or null if none. */
3975
3976 tree
3977 case_index_expr_type ()
3978 {
3979 if (case_stack)
3980 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3981 return 0;
3982 }
3983 \f
3984 /* Accumulate one case or default label inside a case or switch statement.
3985 VALUE is the value of the case (a null pointer, for a default label).
3986 The function CONVERTER, when applied to arguments T and V,
3987 converts the value V to the type T.
3988
3989 If not currently inside a case or switch statement, return 1 and do
3990 nothing. The caller will print a language-specific error message.
3991 If VALUE is a duplicate or overlaps, return 2 and do nothing
3992 except store the (first) duplicate node in *DUPLICATE.
3993 If VALUE is out of range, return 3 and do nothing.
3994 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3995 Return 0 on success.
3996
3997 Extended to handle range statements. */
3998
3999 int
4000 pushcase (value, converter, label, duplicate)
4001 register tree value;
4002 tree (*converter) PROTO((tree, tree));
4003 register tree label;
4004 tree *duplicate;
4005 {
4006 register struct case_node **l;
4007 register struct case_node *n;
4008 tree index_type;
4009 tree nominal_type;
4010
4011 if (output_bytecode)
4012 return bc_pushcase (value, label);
4013
4014 /* Fail if not inside a real case statement. */
4015 if (! (case_stack && case_stack->data.case_stmt.start))
4016 return 1;
4017
4018 if (stack_block_stack
4019 && stack_block_stack->depth > case_stack->depth)
4020 return 5;
4021
4022 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4023 nominal_type = case_stack->data.case_stmt.nominal_type;
4024
4025 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4026 if (index_type == error_mark_node)
4027 return 0;
4028
4029 /* Convert VALUE to the type in which the comparisons are nominally done. */
4030 if (value != 0)
4031 value = (*converter) (nominal_type, value);
4032
4033 /* If this is the first label, warn if any insns have been emitted. */
4034 if (case_stack->data.case_stmt.seenlabel == 0)
4035 {
4036 rtx insn;
4037 for (insn = case_stack->data.case_stmt.start;
4038 insn;
4039 insn = NEXT_INSN (insn))
4040 {
4041 if (GET_CODE (insn) == CODE_LABEL)
4042 break;
4043 if (GET_CODE (insn) != NOTE
4044 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4045 {
4046 warning ("unreachable code at beginning of %s",
4047 case_stack->data.case_stmt.printname);
4048 break;
4049 }
4050 }
4051 }
4052 case_stack->data.case_stmt.seenlabel = 1;
4053
4054 /* Fail if this value is out of range for the actual type of the index
4055 (which may be narrower than NOMINAL_TYPE). */
4056 if (value != 0 && ! int_fits_type_p (value, index_type))
4057 return 3;
4058
4059 /* Fail if this is a duplicate or overlaps another entry. */
4060 if (value == 0)
4061 {
4062 if (case_stack->data.case_stmt.default_label != 0)
4063 {
4064 *duplicate = case_stack->data.case_stmt.default_label;
4065 return 2;
4066 }
4067 case_stack->data.case_stmt.default_label = label;
4068 }
4069 else
4070 {
4071 /* Find the elt in the chain before which to insert the new value,
4072 to keep the chain sorted in increasing order.
4073 But report an error if this element is a duplicate. */
4074 for (l = &case_stack->data.case_stmt.case_list;
4075 /* Keep going past elements distinctly less than VALUE. */
4076 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4077 l = &(*l)->right)
4078 ;
4079 if (*l)
4080 {
4081 /* Element we will insert before must be distinctly greater;
4082 overlap means error. */
4083 if (! tree_int_cst_lt (value, (*l)->low))
4084 {
4085 *duplicate = (*l)->code_label;
4086 return 2;
4087 }
4088 }
4089
4090 /* Add this label to the chain, and succeed.
4091 Copy VALUE so it is on temporary rather than momentary
4092 obstack and will thus survive till the end of the case statement. */
4093 n = (struct case_node *) oballoc (sizeof (struct case_node));
4094 n->left = 0;
4095 n->right = *l;
4096 n->high = n->low = copy_node (value);
4097 n->code_label = label;
4098 *l = n;
4099 }
4100
4101 expand_label (label);
4102 return 0;
4103 }
4104
4105 /* Like pushcase but this case applies to all values
4106 between VALUE1 and VALUE2 (inclusive).
4107 The return value is the same as that of pushcase
4108 but there is one additional error code:
4109 4 means the specified range was empty. */
4110
4111 int
4112 pushcase_range (value1, value2, converter, label, duplicate)
4113 register tree value1, value2;
4114 tree (*converter) PROTO((tree, tree));
4115 register tree label;
4116 tree *duplicate;
4117 {
4118 register struct case_node **l;
4119 register struct case_node *n;
4120 tree index_type;
4121 tree nominal_type;
4122
4123 /* Fail if not inside a real case statement. */
4124 if (! (case_stack && case_stack->data.case_stmt.start))
4125 return 1;
4126
4127 if (stack_block_stack
4128 && stack_block_stack->depth > case_stack->depth)
4129 return 5;
4130
4131 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4132 nominal_type = case_stack->data.case_stmt.nominal_type;
4133
4134 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4135 if (index_type == error_mark_node)
4136 return 0;
4137
4138 /* If this is the first label, warn if any insns have been emitted. */
4139 if (case_stack->data.case_stmt.seenlabel == 0)
4140 {
4141 rtx insn;
4142 for (insn = case_stack->data.case_stmt.start;
4143 insn;
4144 insn = NEXT_INSN (insn))
4145 {
4146 if (GET_CODE (insn) == CODE_LABEL)
4147 break;
4148 if (GET_CODE (insn) != NOTE
4149 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4150 {
4151 warning ("unreachable code at beginning of %s",
4152 case_stack->data.case_stmt.printname);
4153 break;
4154 }
4155 }
4156 }
4157 case_stack->data.case_stmt.seenlabel = 1;
4158
4159 /* Convert VALUEs to type in which the comparisons are nominally done. */
4160 if (value1 == 0) /* Negative infinity. */
4161 value1 = TYPE_MIN_VALUE(index_type);
4162 value1 = (*converter) (nominal_type, value1);
4163
4164 if (value2 == 0) /* Positive infinity. */
4165 value2 = TYPE_MAX_VALUE(index_type);
4166 value2 = (*converter) (nominal_type, value2);
4167
4168 /* Fail if these values are out of range. */
4169 if (! int_fits_type_p (value1, index_type))
4170 return 3;
4171
4172 if (! int_fits_type_p (value2, index_type))
4173 return 3;
4174
4175 /* Fail if the range is empty. */
4176 if (tree_int_cst_lt (value2, value1))
4177 return 4;
4178
4179 /* If the bounds are equal, turn this into the one-value case. */
4180 if (tree_int_cst_equal (value1, value2))
4181 return pushcase (value1, converter, label, duplicate);
4182
4183 /* Find the elt in the chain before which to insert the new value,
4184 to keep the chain sorted in increasing order.
4185 But report an error if this element is a duplicate. */
4186 for (l = &case_stack->data.case_stmt.case_list;
4187 /* Keep going past elements distinctly less than this range. */
4188 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4189 l = &(*l)->right)
4190 ;
4191 if (*l)
4192 {
4193 /* Element we will insert before must be distinctly greater;
4194 overlap means error. */
4195 if (! tree_int_cst_lt (value2, (*l)->low))
4196 {
4197 *duplicate = (*l)->code_label;
4198 return 2;
4199 }
4200 }
4201
4202 /* Add this label to the chain, and succeed.
4203 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4204 obstack and will thus survive till the end of the case statement. */
4205
4206 n = (struct case_node *) oballoc (sizeof (struct case_node));
4207 n->left = 0;
4208 n->right = *l;
4209 n->low = copy_node (value1);
4210 n->high = copy_node (value2);
4211 n->code_label = label;
4212 *l = n;
4213
4214 expand_label (label);
4215
4216 case_stack->data.case_stmt.num_ranges++;
4217
4218 return 0;
4219 }
4220
4221
4222 /* Accumulate one case or default label; VALUE is the value of the
4223 case, or nil for a default label. If not currently inside a case,
4224 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4225 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4226 Return 0 on success. This function is a leftover from the earlier
4227 bytecode compiler, which was based on gcc 1.37. It should be
4228 merged into pushcase. */
4229
4230 static int
4231 bc_pushcase (value, label)
4232 tree value;
4233 tree label;
4234 {
4235 struct nesting *thiscase = case_stack;
4236 struct case_node *case_label, *new_label;
4237
4238 if (! thiscase)
4239 return 1;
4240
4241 /* Fail if duplicate, overlap, or out of type range. */
4242 if (value)
4243 {
4244 value = convert (thiscase->data.case_stmt.nominal_type, value);
4245 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4246 return 3;
4247
4248 for (case_label = thiscase->data.case_stmt.case_list;
4249 case_label->left; case_label = case_label->left)
4250 if (! tree_int_cst_lt (case_label->left->high, value))
4251 break;
4252
4253 if (case_label != thiscase->data.case_stmt.case_list
4254 && ! tree_int_cst_lt (case_label->high, value)
4255 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4256 return 2;
4257
4258 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4259 new_label->low = new_label->high = copy_node (value);
4260 new_label->code_label = label;
4261 new_label->left = case_label->left;
4262
4263 case_label->left = new_label;
4264 thiscase->data.case_stmt.num_ranges++;
4265 }
4266 else
4267 {
4268 if (thiscase->data.case_stmt.default_label)
4269 return 2;
4270 thiscase->data.case_stmt.default_label = label;
4271 }
4272
4273 expand_label (label);
4274 return 0;
4275 }
4276 \f
4277 /* Returns the number of possible values of TYPE.
4278 Returns -1 if the number is unknown or variable.
4279 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4280 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4281 do not increase monotonically (there may be duplicates);
4282 to 1 if the values increase monotonically, but not always by 1;
4283 otherwise sets it to 0. */
4284
4285 HOST_WIDE_INT
4286 all_cases_count (type, spareness)
4287 tree type;
4288 int *spareness;
4289 {
4290 HOST_WIDE_INT count, count_high = 0;
4291 *spareness = 0;
4292
4293 switch (TREE_CODE (type))
4294 {
4295 tree t;
4296 case BOOLEAN_TYPE:
4297 count = 2;
4298 break;
4299 case CHAR_TYPE:
4300 count = 1 << BITS_PER_UNIT;
4301 break;
4302 default:
4303 case INTEGER_TYPE:
4304 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4305 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4306 return -1;
4307 else
4308 {
4309 /* count
4310 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4311 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4312 but with overflow checking. */
4313 tree mint = TYPE_MIN_VALUE (type);
4314 tree maxt = TYPE_MAX_VALUE (type);
4315 HOST_WIDE_INT lo, hi;
4316 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4317 &lo, &hi);
4318 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4319 lo, hi, &lo, &hi);
4320 add_double (lo, hi, 1, 0, &lo, &hi);
4321 if (hi != 0 || lo < 0)
4322 return -2;
4323 count = lo;
4324 }
4325 break;
4326 case ENUMERAL_TYPE:
4327 count = 0;
4328 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4329 {
4330 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4331 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4332 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4333 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4334 *spareness = 1;
4335 count++;
4336 }
4337 if (*spareness == 1)
4338 {
4339 tree prev = TREE_VALUE (TYPE_VALUES (type));
4340 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4341 {
4342 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4343 {
4344 *spareness = 2;
4345 break;
4346 }
4347 prev = TREE_VALUE (t);
4348 }
4349
4350 }
4351 }
4352 return count;
4353 }
4354
4355
4356 #define BITARRAY_TEST(ARRAY, INDEX) \
4357 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4358 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4359 #define BITARRAY_SET(ARRAY, INDEX) \
4360 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4361 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4362
4363 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4364 with the case values we have seen, assuming the case expression
4365 has the given TYPE.
4366 SPARSENESS is as determined by all_cases_count.
4367
4368 The time needed is proportional to COUNT, unless
4369 SPARSENESS is 2, in which case quadratic time is needed. */
4370
4371 void
4372 mark_seen_cases (type, cases_seen, count, sparseness)
4373 tree type;
4374 unsigned char *cases_seen;
4375 long count;
4376 int sparseness;
4377 {
4378 long i;
4379
4380 tree next_node_to_try = NULL_TREE;
4381 long next_node_offset = 0;
4382
4383 register struct case_node *n;
4384 tree val = make_node (INTEGER_CST);
4385 TREE_TYPE (val) = type;
4386 for (n = case_stack->data.case_stmt.case_list; n;
4387 n = n->right)
4388 {
4389 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4390 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4391 while ( ! tree_int_cst_lt (n->high, val))
4392 {
4393 /* Calculate (into xlo) the "offset" of the integer (val).
4394 The element with lowest value has offset 0, the next smallest
4395 element has offset 1, etc. */
4396
4397 HOST_WIDE_INT xlo, xhi;
4398 tree t;
4399 if (sparseness == 2)
4400 {
4401 /* This less efficient loop is only needed to handle
4402 duplicate case values (multiple enum constants
4403 with the same value). */
4404 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4405 t = TREE_CHAIN (t), xlo++)
4406 {
4407 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4408 BITARRAY_SET (cases_seen, xlo);
4409 }
4410 }
4411 else
4412 {
4413 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4414 {
4415 /* The TYPE_VALUES will be in increasing order, so
4416 starting searching where we last ended. */
4417 t = next_node_to_try;
4418 xlo = next_node_offset;
4419 xhi = 0;
4420 for (;;)
4421 {
4422 if (t == NULL_TREE)
4423 {
4424 t = TYPE_VALUES (type);
4425 xlo = 0;
4426 }
4427 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4428 {
4429 next_node_to_try = TREE_CHAIN (t);
4430 next_node_offset = xlo + 1;
4431 break;
4432 }
4433 xlo++;
4434 t = TREE_CHAIN (t);
4435 if (t == next_node_to_try)
4436 break;
4437 }
4438 }
4439 else
4440 {
4441 t = TYPE_MIN_VALUE (type);
4442 if (t)
4443 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4444 &xlo, &xhi);
4445 else
4446 xlo = xhi = 0;
4447 add_double (xlo, xhi,
4448 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4449 &xlo, &xhi);
4450 }
4451
4452 if (xhi == 0 && xlo >= 0 && xlo < count)
4453 BITARRAY_SET (cases_seen, xlo);
4454 }
4455 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4456 1, 0,
4457 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4458 }
4459 }
4460 }
4461
4462 /* Called when the index of a switch statement is an enumerated type
4463 and there is no default label.
4464
4465 Checks that all enumeration literals are covered by the case
4466 expressions of a switch. Also, warn if there are any extra
4467 switch cases that are *not* elements of the enumerated type.
4468
4469 If all enumeration literals were covered by the case expressions,
4470 turn one of the expressions into the default expression since it should
4471 not be possible to fall through such a switch. */
4472
4473 void
4474 check_for_full_enumeration_handling (type)
4475 tree type;
4476 {
4477 register struct case_node *n;
4478 register struct case_node **l;
4479 register tree chain;
4480 int all_values = 1;
4481
4482 /* True iff the selector type is a numbered set mode. */
4483 int sparseness = 0;
4484
4485 /* The number of possible selector values. */
4486 HOST_WIDE_INT size;
4487
4488 /* For each possible selector value. a one iff it has been matched
4489 by a case value alternative. */
4490 unsigned char *cases_seen;
4491
4492 /* The allocated size of cases_seen, in chars. */
4493 long bytes_needed;
4494 tree t;
4495
4496 if (output_bytecode)
4497 {
4498 bc_check_for_full_enumeration_handling (type);
4499 return;
4500 }
4501
4502 if (! warn_switch)
4503 return;
4504
4505 size = all_cases_count (type, &sparseness);
4506 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4507
4508 if (size > 0 && size < 600000
4509 /* We deliberately use malloc here - not xmalloc. */
4510 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4511 {
4512 long i;
4513 tree v = TYPE_VALUES (type);
4514 bzero (cases_seen, bytes_needed);
4515
4516 /* The time complexity of this code is normally O(N), where
4517 N being the number of members in the enumerated type.
4518 However, if type is a ENUMERAL_TYPE whose values do not
4519 increase monotonically, quadratic time may be needed. */
4520
4521 mark_seen_cases (type, cases_seen, size, sparseness);
4522
4523 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4524 {
4525 if (BITARRAY_TEST(cases_seen, i) == 0)
4526 warning ("enumeration value `%s' not handled in switch",
4527 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4528 }
4529
4530 free (cases_seen);
4531 }
4532
4533 /* Now we go the other way around; we warn if there are case
4534 expressions that don't correspond to enumerators. This can
4535 occur since C and C++ don't enforce type-checking of
4536 assignments to enumeration variables. */
4537
4538 if (warn_switch)
4539 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4540 {
4541 for (chain = TYPE_VALUES (type);
4542 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4543 chain = TREE_CHAIN (chain))
4544 ;
4545
4546 if (!chain)
4547 {
4548 if (TYPE_NAME (type) == 0)
4549 warning ("case value `%d' not in enumerated type",
4550 TREE_INT_CST_LOW (n->low));
4551 else
4552 warning ("case value `%d' not in enumerated type `%s'",
4553 TREE_INT_CST_LOW (n->low),
4554 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4555 == IDENTIFIER_NODE)
4556 ? TYPE_NAME (type)
4557 : DECL_NAME (TYPE_NAME (type))));
4558 }
4559 if (!tree_int_cst_equal (n->low, n->high))
4560 {
4561 for (chain = TYPE_VALUES (type);
4562 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4563 chain = TREE_CHAIN (chain))
4564 ;
4565
4566 if (!chain)
4567 {
4568 if (TYPE_NAME (type) == 0)
4569 warning ("case value `%d' not in enumerated type",
4570 TREE_INT_CST_LOW (n->high));
4571 else
4572 warning ("case value `%d' not in enumerated type `%s'",
4573 TREE_INT_CST_LOW (n->high),
4574 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4575 == IDENTIFIER_NODE)
4576 ? TYPE_NAME (type)
4577 : DECL_NAME (TYPE_NAME (type))));
4578 }
4579 }
4580 }
4581
4582 #if 0
4583 /* ??? This optimization is disabled because it causes valid programs to
4584 fail. ANSI C does not guarantee that an expression with enum type
4585 will have a value that is the same as one of the enumeration literals. */
4586
4587 /* If all values were found as case labels, make one of them the default
4588 label. Thus, this switch will never fall through. We arbitrarily pick
4589 the last one to make the default since this is likely the most
4590 efficient choice. */
4591
4592 if (all_values)
4593 {
4594 for (l = &case_stack->data.case_stmt.case_list;
4595 (*l)->right != 0;
4596 l = &(*l)->right)
4597 ;
4598
4599 case_stack->data.case_stmt.default_label = (*l)->code_label;
4600 *l = 0;
4601 }
4602 #endif /* 0 */
4603 }
4604
4605
4606 /* Check that all enumeration literals are covered by the case
4607 expressions of a switch. Also warn if there are any cases
4608 that are not elements of the enumerated type. */
4609
4610 static void
4611 bc_check_for_full_enumeration_handling (type)
4612 tree type;
4613 {
4614 struct nesting *thiscase = case_stack;
4615 struct case_node *c;
4616 tree e;
4617
4618 /* Check for enums not handled. */
4619 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4620 {
4621 for (c = thiscase->data.case_stmt.case_list->left;
4622 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4623 c = c->left)
4624 ;
4625 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4626 warning ("enumerated value `%s' not handled in switch",
4627 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4628 }
4629
4630 /* Check for cases not in the enumeration. */
4631 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4632 {
4633 for (e = TYPE_VALUES (type);
4634 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4635 e = TREE_CHAIN (e))
4636 ;
4637 if (! e)
4638 warning ("case value `%d' not in enumerated type `%s'",
4639 TREE_INT_CST_LOW (c->low),
4640 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4641 ? TYPE_NAME (type)
4642 : DECL_NAME (TYPE_NAME (type))));
4643 }
4644 }
4645 \f
4646 /* Terminate a case (Pascal) or switch (C) statement
4647 in which ORIG_INDEX is the expression to be tested.
4648 Generate the code to test it and jump to the right place. */
4649
4650 void
4651 expand_end_case (orig_index)
4652 tree orig_index;
4653 {
4654 tree minval, maxval, range, orig_minval;
4655 rtx default_label = 0;
4656 register struct case_node *n;
4657 int count;
4658 rtx index;
4659 rtx table_label;
4660 int ncases;
4661 rtx *labelvec;
4662 register int i;
4663 rtx before_case;
4664 register struct nesting *thiscase = case_stack;
4665 tree index_expr, index_type;
4666 int unsignedp;
4667
4668 if (output_bytecode)
4669 {
4670 bc_expand_end_case (orig_index);
4671 return;
4672 }
4673
4674 table_label = gen_label_rtx ();
4675 index_expr = thiscase->data.case_stmt.index_expr;
4676 index_type = TREE_TYPE (index_expr);
4677 unsignedp = TREE_UNSIGNED (index_type);
4678
4679 do_pending_stack_adjust ();
4680
4681 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4682 if (index_type != error_mark_node)
4683 {
4684 /* If switch expression was an enumerated type, check that all
4685 enumeration literals are covered by the cases.
4686 No sense trying this if there's a default case, however. */
4687
4688 if (!thiscase->data.case_stmt.default_label
4689 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4690 && TREE_CODE (index_expr) != INTEGER_CST)
4691 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4692
4693 /* If this is the first label, warn if any insns have been emitted. */
4694 if (thiscase->data.case_stmt.seenlabel == 0)
4695 {
4696 rtx insn;
4697 for (insn = get_last_insn ();
4698 insn != case_stack->data.case_stmt.start;
4699 insn = PREV_INSN (insn))
4700 if (GET_CODE (insn) != NOTE
4701 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4702 {
4703 warning ("unreachable code at beginning of %s",
4704 case_stack->data.case_stmt.printname);
4705 break;
4706 }
4707 }
4708
4709 /* If we don't have a default-label, create one here,
4710 after the body of the switch. */
4711 if (thiscase->data.case_stmt.default_label == 0)
4712 {
4713 thiscase->data.case_stmt.default_label
4714 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4715 expand_label (thiscase->data.case_stmt.default_label);
4716 }
4717 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4718
4719 before_case = get_last_insn ();
4720
4721 /* Simplify the case-list before we count it. */
4722 group_case_nodes (thiscase->data.case_stmt.case_list);
4723
4724 /* Get upper and lower bounds of case values.
4725 Also convert all the case values to the index expr's data type. */
4726
4727 count = 0;
4728 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4729 {
4730 /* Check low and high label values are integers. */
4731 if (TREE_CODE (n->low) != INTEGER_CST)
4732 abort ();
4733 if (TREE_CODE (n->high) != INTEGER_CST)
4734 abort ();
4735
4736 n->low = convert (index_type, n->low);
4737 n->high = convert (index_type, n->high);
4738
4739 /* Count the elements and track the largest and smallest
4740 of them (treating them as signed even if they are not). */
4741 if (count++ == 0)
4742 {
4743 minval = n->low;
4744 maxval = n->high;
4745 }
4746 else
4747 {
4748 if (INT_CST_LT (n->low, minval))
4749 minval = n->low;
4750 if (INT_CST_LT (maxval, n->high))
4751 maxval = n->high;
4752 }
4753 /* A range counts double, since it requires two compares. */
4754 if (! tree_int_cst_equal (n->low, n->high))
4755 count++;
4756 }
4757
4758 orig_minval = minval;
4759
4760 /* Compute span of values. */
4761 if (count != 0)
4762 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4763
4764 if (count == 0)
4765 {
4766 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4767 emit_queue ();
4768 emit_jump (default_label);
4769 }
4770
4771 /* If range of values is much bigger than number of values,
4772 make a sequence of conditional branches instead of a dispatch.
4773 If the switch-index is a constant, do it this way
4774 because we can optimize it. */
4775
4776 #ifndef CASE_VALUES_THRESHOLD
4777 #ifdef HAVE_casesi
4778 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4779 #else
4780 /* If machine does not have a case insn that compares the
4781 bounds, this means extra overhead for dispatch tables
4782 which raises the threshold for using them. */
4783 #define CASE_VALUES_THRESHOLD 5
4784 #endif /* HAVE_casesi */
4785 #endif /* CASE_VALUES_THRESHOLD */
4786
4787 else if (TREE_INT_CST_HIGH (range) != 0
4788 || count < CASE_VALUES_THRESHOLD
4789 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4790 > 10 * count)
4791 || TREE_CODE (index_expr) == INTEGER_CST
4792 /* These will reduce to a constant. */
4793 || (TREE_CODE (index_expr) == CALL_EXPR
4794 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4795 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4796 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4797 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4798 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4799 {
4800 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4801
4802 /* If the index is a short or char that we do not have
4803 an insn to handle comparisons directly, convert it to
4804 a full integer now, rather than letting each comparison
4805 generate the conversion. */
4806
4807 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4808 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4809 == CODE_FOR_nothing))
4810 {
4811 enum machine_mode wider_mode;
4812 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4813 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4814 if (cmp_optab->handlers[(int) wider_mode].insn_code
4815 != CODE_FOR_nothing)
4816 {
4817 index = convert_to_mode (wider_mode, index, unsignedp);
4818 break;
4819 }
4820 }
4821
4822 emit_queue ();
4823 do_pending_stack_adjust ();
4824
4825 index = protect_from_queue (index, 0);
4826 if (GET_CODE (index) == MEM)
4827 index = copy_to_reg (index);
4828 if (GET_CODE (index) == CONST_INT
4829 || TREE_CODE (index_expr) == INTEGER_CST)
4830 {
4831 /* Make a tree node with the proper constant value
4832 if we don't already have one. */
4833 if (TREE_CODE (index_expr) != INTEGER_CST)
4834 {
4835 index_expr
4836 = build_int_2 (INTVAL (index),
4837 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4838 index_expr = convert (index_type, index_expr);
4839 }
4840
4841 /* For constant index expressions we need only
4842 issue a unconditional branch to the appropriate
4843 target code. The job of removing any unreachable
4844 code is left to the optimisation phase if the
4845 "-O" option is specified. */
4846 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4847 if (! tree_int_cst_lt (index_expr, n->low)
4848 && ! tree_int_cst_lt (n->high, index_expr))
4849 break;
4850
4851 if (n)
4852 emit_jump (label_rtx (n->code_label));
4853 else
4854 emit_jump (default_label);
4855 }
4856 else
4857 {
4858 /* If the index expression is not constant we generate
4859 a binary decision tree to select the appropriate
4860 target code. This is done as follows:
4861
4862 The list of cases is rearranged into a binary tree,
4863 nearly optimal assuming equal probability for each case.
4864
4865 The tree is transformed into RTL, eliminating
4866 redundant test conditions at the same time.
4867
4868 If program flow could reach the end of the
4869 decision tree an unconditional jump to the
4870 default code is emitted. */
4871
4872 use_cost_table
4873 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4874 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4875 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4876 NULL_PTR);
4877 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4878 default_label, index_type);
4879 emit_jump_if_reachable (default_label);
4880 }
4881 }
4882 else
4883 {
4884 int win = 0;
4885 #ifdef HAVE_casesi
4886 if (HAVE_casesi)
4887 {
4888 enum machine_mode index_mode = SImode;
4889 int index_bits = GET_MODE_BITSIZE (index_mode);
4890 rtx op1, op2;
4891 enum machine_mode op_mode;
4892
4893 /* Convert the index to SImode. */
4894 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4895 > GET_MODE_BITSIZE (index_mode))
4896 {
4897 enum machine_mode omode = TYPE_MODE (index_type);
4898 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4899
4900 /* We must handle the endpoints in the original mode. */
4901 index_expr = build (MINUS_EXPR, index_type,
4902 index_expr, minval);
4903 minval = integer_zero_node;
4904 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4905 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4906 emit_jump_insn (gen_bltu (default_label));
4907 /* Now we can safely truncate. */
4908 index = convert_to_mode (index_mode, index, 0);
4909 }
4910 else
4911 {
4912 if (TYPE_MODE (index_type) != index_mode)
4913 {
4914 index_expr = convert (type_for_size (index_bits, 0),
4915 index_expr);
4916 index_type = TREE_TYPE (index_expr);
4917 }
4918
4919 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4920 }
4921 emit_queue ();
4922 index = protect_from_queue (index, 0);
4923 do_pending_stack_adjust ();
4924
4925 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4926 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4927 (index, op_mode))
4928 index = copy_to_mode_reg (op_mode, index);
4929
4930 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4931
4932 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4933 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4934 (op1, op_mode))
4935 op1 = copy_to_mode_reg (op_mode, op1);
4936
4937 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4938
4939 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4940 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4941 (op2, op_mode))
4942 op2 = copy_to_mode_reg (op_mode, op2);
4943
4944 emit_jump_insn (gen_casesi (index, op1, op2,
4945 table_label, default_label));
4946 win = 1;
4947 }
4948 #endif
4949 #ifdef HAVE_tablejump
4950 if (! win && HAVE_tablejump)
4951 {
4952 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4953 fold (build (MINUS_EXPR, index_type,
4954 index_expr, minval)));
4955 index_type = TREE_TYPE (index_expr);
4956 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4957 emit_queue ();
4958 index = protect_from_queue (index, 0);
4959 do_pending_stack_adjust ();
4960
4961 do_tablejump (index, TYPE_MODE (index_type),
4962 expand_expr (range, NULL_RTX, VOIDmode, 0),
4963 table_label, default_label);
4964 win = 1;
4965 }
4966 #endif
4967 if (! win)
4968 abort ();
4969
4970 /* Get table of labels to jump to, in order of case index. */
4971
4972 ncases = TREE_INT_CST_LOW (range) + 1;
4973 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4974 bzero ((char *) labelvec, ncases * sizeof (rtx));
4975
4976 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4977 {
4978 register HOST_WIDE_INT i
4979 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4980
4981 while (1)
4982 {
4983 labelvec[i]
4984 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4985 if (i + TREE_INT_CST_LOW (orig_minval)
4986 == TREE_INT_CST_LOW (n->high))
4987 break;
4988 i++;
4989 }
4990 }
4991
4992 /* Fill in the gaps with the default. */
4993 for (i = 0; i < ncases; i++)
4994 if (labelvec[i] == 0)
4995 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4996
4997 /* Output the table */
4998 emit_label (table_label);
4999
5000 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5001 were an expression, instead of an #ifdef/#ifndef. */
5002 if (
5003 #ifdef CASE_VECTOR_PC_RELATIVE
5004 1 ||
5005 #endif
5006 flag_pic)
5007 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5008 gen_rtx (LABEL_REF, Pmode, table_label),
5009 gen_rtvec_v (ncases, labelvec)));
5010 else
5011 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5012 gen_rtvec_v (ncases, labelvec)));
5013
5014 /* If the case insn drops through the table,
5015 after the table we must jump to the default-label.
5016 Otherwise record no drop-through after the table. */
5017 #ifdef CASE_DROPS_THROUGH
5018 emit_jump (default_label);
5019 #else
5020 emit_barrier ();
5021 #endif
5022 }
5023
5024 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5025 reorder_insns (before_case, get_last_insn (),
5026 thiscase->data.case_stmt.start);
5027 }
5028
5029 if (thiscase->exit_label)
5030 emit_label (thiscase->exit_label);
5031
5032 POPSTACK (case_stack);
5033
5034 free_temp_slots ();
5035 }
5036
5037
5038 /* Terminate a case statement. EXPR is the original index
5039 expression. */
5040
5041 static void
5042 bc_expand_end_case (expr)
5043 tree expr;
5044 {
5045 struct nesting *thiscase = case_stack;
5046 enum bytecode_opcode opcode;
5047 struct bc_label *jump_label;
5048 struct case_node *c;
5049
5050 bc_emit_bytecode (jump);
5051 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5052
5053 #ifdef DEBUG_PRINT_CODE
5054 fputc ('\n', stderr);
5055 #endif
5056
5057 /* Now that the size of the jump table is known, emit the actual
5058 indexed jump instruction. */
5059 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5060
5061 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5062 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5063 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5064
5065 bc_emit_bytecode (opcode);
5066
5067 /* Now emit the case instructions literal arguments, in order.
5068 In addition to the value on the stack, it uses:
5069 1. The address of the jump table.
5070 2. The size of the jump table.
5071 3. The default label. */
5072
5073 jump_label = bc_get_bytecode_label ();
5074 bc_emit_bytecode_labelref (jump_label);
5075 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5076 sizeof thiscase->data.case_stmt.num_ranges);
5077
5078 if (thiscase->data.case_stmt.default_label)
5079 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5080 else
5081 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5082
5083 /* Output the jump table. */
5084
5085 bc_align_bytecode (3 /* PTR_ALIGN */);
5086 bc_emit_bytecode_labeldef (jump_label);
5087
5088 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5089 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5090 {
5091 opcode = TREE_INT_CST_LOW (c->low);
5092 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5093
5094 opcode = TREE_INT_CST_LOW (c->high);
5095 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5096
5097 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5098 }
5099 else
5100 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5101 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5102 {
5103 bc_emit_bytecode_DI_const (c->low);
5104 bc_emit_bytecode_DI_const (c->high);
5105
5106 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5107 }
5108 else
5109 /* Bad mode */
5110 abort ();
5111
5112
5113 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5114
5115 /* Possibly issue enumeration warnings. */
5116
5117 if (!thiscase->data.case_stmt.default_label
5118 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5119 && TREE_CODE (expr) != INTEGER_CST
5120 && warn_switch)
5121 check_for_full_enumeration_handling (TREE_TYPE (expr));
5122
5123
5124 #ifdef DEBUG_PRINT_CODE
5125 fputc ('\n', stderr);
5126 #endif
5127
5128 POPSTACK (case_stack);
5129 }
5130
5131
5132 /* Return unique bytecode ID. */
5133
5134 int
5135 bc_new_uid ()
5136 {
5137 static int bc_uid = 0;
5138
5139 return (++bc_uid);
5140 }
5141
5142 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5143
5144 static void
5145 do_jump_if_equal (op1, op2, label, unsignedp)
5146 rtx op1, op2, label;
5147 int unsignedp;
5148 {
5149 if (GET_CODE (op1) == CONST_INT
5150 && GET_CODE (op2) == CONST_INT)
5151 {
5152 if (INTVAL (op1) == INTVAL (op2))
5153 emit_jump (label);
5154 }
5155 else
5156 {
5157 enum machine_mode mode = GET_MODE (op1);
5158 if (mode == VOIDmode)
5159 mode = GET_MODE (op2);
5160 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5161 emit_jump_insn (gen_beq (label));
5162 }
5163 }
5164 \f
5165 /* Not all case values are encountered equally. This function
5166 uses a heuristic to weight case labels, in cases where that
5167 looks like a reasonable thing to do.
5168
5169 Right now, all we try to guess is text, and we establish the
5170 following weights:
5171
5172 chars above space: 16
5173 digits: 16
5174 default: 12
5175 space, punct: 8
5176 tab: 4
5177 newline: 2
5178 other "\" chars: 1
5179 remaining chars: 0
5180
5181 If we find any cases in the switch that are not either -1 or in the range
5182 of valid ASCII characters, or are control characters other than those
5183 commonly used with "\", don't treat this switch scanning text.
5184
5185 Return 1 if these nodes are suitable for cost estimation, otherwise
5186 return 0. */
5187
5188 static int
5189 estimate_case_costs (node)
5190 case_node_ptr node;
5191 {
5192 tree min_ascii = build_int_2 (-1, -1);
5193 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5194 case_node_ptr n;
5195 int i;
5196
5197 /* If we haven't already made the cost table, make it now. Note that the
5198 lower bound of the table is -1, not zero. */
5199
5200 if (cost_table == NULL)
5201 {
5202 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5203 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5204
5205 for (i = 0; i < 128; i++)
5206 {
5207 if (isalnum (i))
5208 cost_table[i] = 16;
5209 else if (ispunct (i))
5210 cost_table[i] = 8;
5211 else if (iscntrl (i))
5212 cost_table[i] = -1;
5213 }
5214
5215 cost_table[' '] = 8;
5216 cost_table['\t'] = 4;
5217 cost_table['\0'] = 4;
5218 cost_table['\n'] = 2;
5219 cost_table['\f'] = 1;
5220 cost_table['\v'] = 1;
5221 cost_table['\b'] = 1;
5222 }
5223
5224 /* See if all the case expressions look like text. It is text if the
5225 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5226 as signed arithmetic since we don't want to ever access cost_table with a
5227 value less than -1. Also check that none of the constants in a range
5228 are strange control characters. */
5229
5230 for (n = node; n; n = n->right)
5231 {
5232 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5233 return 0;
5234
5235 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5236 if (cost_table[i] < 0)
5237 return 0;
5238 }
5239
5240 /* All interesting values are within the range of interesting
5241 ASCII characters. */
5242 return 1;
5243 }
5244
5245 /* Scan an ordered list of case nodes
5246 combining those with consecutive values or ranges.
5247
5248 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5249
5250 static void
5251 group_case_nodes (head)
5252 case_node_ptr head;
5253 {
5254 case_node_ptr node = head;
5255
5256 while (node)
5257 {
5258 rtx lb = next_real_insn (label_rtx (node->code_label));
5259 case_node_ptr np = node;
5260
5261 /* Try to group the successors of NODE with NODE. */
5262 while (((np = np->right) != 0)
5263 /* Do they jump to the same place? */
5264 && next_real_insn (label_rtx (np->code_label)) == lb
5265 /* Are their ranges consecutive? */
5266 && tree_int_cst_equal (np->low,
5267 fold (build (PLUS_EXPR,
5268 TREE_TYPE (node->high),
5269 node->high,
5270 integer_one_node)))
5271 /* An overflow is not consecutive. */
5272 && tree_int_cst_lt (node->high,
5273 fold (build (PLUS_EXPR,
5274 TREE_TYPE (node->high),
5275 node->high,
5276 integer_one_node))))
5277 {
5278 node->high = np->high;
5279 }
5280 /* NP is the first node after NODE which can't be grouped with it.
5281 Delete the nodes in between, and move on to that node. */
5282 node->right = np;
5283 node = np;
5284 }
5285 }
5286
5287 /* Take an ordered list of case nodes
5288 and transform them into a near optimal binary tree,
5289 on the assumption that any target code selection value is as
5290 likely as any other.
5291
5292 The transformation is performed by splitting the ordered
5293 list into two equal sections plus a pivot. The parts are
5294 then attached to the pivot as left and right branches. Each
5295 branch is is then transformed recursively. */
5296
5297 static void
5298 balance_case_nodes (head, parent)
5299 case_node_ptr *head;
5300 case_node_ptr parent;
5301 {
5302 register case_node_ptr np;
5303
5304 np = *head;
5305 if (np)
5306 {
5307 int cost = 0;
5308 int i = 0;
5309 int ranges = 0;
5310 register case_node_ptr *npp;
5311 case_node_ptr left;
5312
5313 /* Count the number of entries on branch. Also count the ranges. */
5314
5315 while (np)
5316 {
5317 if (!tree_int_cst_equal (np->low, np->high))
5318 {
5319 ranges++;
5320 if (use_cost_table)
5321 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5322 }
5323
5324 if (use_cost_table)
5325 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5326
5327 i++;
5328 np = np->right;
5329 }
5330
5331 if (i > 2)
5332 {
5333 /* Split this list if it is long enough for that to help. */
5334 npp = head;
5335 left = *npp;
5336 if (use_cost_table)
5337 {
5338 /* Find the place in the list that bisects the list's total cost,
5339 Here I gets half the total cost. */
5340 int n_moved = 0;
5341 i = (cost + 1) / 2;
5342 while (1)
5343 {
5344 /* Skip nodes while their cost does not reach that amount. */
5345 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5346 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5347 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5348 if (i <= 0)
5349 break;
5350 npp = &(*npp)->right;
5351 n_moved += 1;
5352 }
5353 if (n_moved == 0)
5354 {
5355 /* Leave this branch lopsided, but optimize left-hand
5356 side and fill in `parent' fields for right-hand side. */
5357 np = *head;
5358 np->parent = parent;
5359 balance_case_nodes (&np->left, np);
5360 for (; np->right; np = np->right)
5361 np->right->parent = np;
5362 return;
5363 }
5364 }
5365 /* If there are just three nodes, split at the middle one. */
5366 else if (i == 3)
5367 npp = &(*npp)->right;
5368 else
5369 {
5370 /* Find the place in the list that bisects the list's total cost,
5371 where ranges count as 2.
5372 Here I gets half the total cost. */
5373 i = (i + ranges + 1) / 2;
5374 while (1)
5375 {
5376 /* Skip nodes while their cost does not reach that amount. */
5377 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5378 i--;
5379 i--;
5380 if (i <= 0)
5381 break;
5382 npp = &(*npp)->right;
5383 }
5384 }
5385 *head = np = *npp;
5386 *npp = 0;
5387 np->parent = parent;
5388 np->left = left;
5389
5390 /* Optimize each of the two split parts. */
5391 balance_case_nodes (&np->left, np);
5392 balance_case_nodes (&np->right, np);
5393 }
5394 else
5395 {
5396 /* Else leave this branch as one level,
5397 but fill in `parent' fields. */
5398 np = *head;
5399 np->parent = parent;
5400 for (; np->right; np = np->right)
5401 np->right->parent = np;
5402 }
5403 }
5404 }
5405 \f
5406 /* Search the parent sections of the case node tree
5407 to see if a test for the lower bound of NODE would be redundant.
5408 INDEX_TYPE is the type of the index expression.
5409
5410 The instructions to generate the case decision tree are
5411 output in the same order as nodes are processed so it is
5412 known that if a parent node checks the range of the current
5413 node minus one that the current node is bounded at its lower
5414 span. Thus the test would be redundant. */
5415
5416 static int
5417 node_has_low_bound (node, index_type)
5418 case_node_ptr node;
5419 tree index_type;
5420 {
5421 tree low_minus_one;
5422 case_node_ptr pnode;
5423
5424 /* If the lower bound of this node is the lowest value in the index type,
5425 we need not test it. */
5426
5427 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5428 return 1;
5429
5430 /* If this node has a left branch, the value at the left must be less
5431 than that at this node, so it cannot be bounded at the bottom and
5432 we need not bother testing any further. */
5433
5434 if (node->left)
5435 return 0;
5436
5437 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5438 node->low, integer_one_node));
5439
5440 /* If the subtraction above overflowed, we can't verify anything.
5441 Otherwise, look for a parent that tests our value - 1. */
5442
5443 if (! tree_int_cst_lt (low_minus_one, node->low))
5444 return 0;
5445
5446 for (pnode = node->parent; pnode; pnode = pnode->parent)
5447 if (tree_int_cst_equal (low_minus_one, pnode->high))
5448 return 1;
5449
5450 return 0;
5451 }
5452
5453 /* Search the parent sections of the case node tree
5454 to see if a test for the upper bound of NODE would be redundant.
5455 INDEX_TYPE is the type of the index expression.
5456
5457 The instructions to generate the case decision tree are
5458 output in the same order as nodes are processed so it is
5459 known that if a parent node checks the range of the current
5460 node plus one that the current node is bounded at its upper
5461 span. Thus the test would be redundant. */
5462
5463 static int
5464 node_has_high_bound (node, index_type)
5465 case_node_ptr node;
5466 tree index_type;
5467 {
5468 tree high_plus_one;
5469 case_node_ptr pnode;
5470
5471 /* If the upper bound of this node is the highest value in the type
5472 of the index expression, we need not test against it. */
5473
5474 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5475 return 1;
5476
5477 /* If this node has a right branch, the value at the right must be greater
5478 than that at this node, so it cannot be bounded at the top and
5479 we need not bother testing any further. */
5480
5481 if (node->right)
5482 return 0;
5483
5484 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5485 node->high, integer_one_node));
5486
5487 /* If the addition above overflowed, we can't verify anything.
5488 Otherwise, look for a parent that tests our value + 1. */
5489
5490 if (! tree_int_cst_lt (node->high, high_plus_one))
5491 return 0;
5492
5493 for (pnode = node->parent; pnode; pnode = pnode->parent)
5494 if (tree_int_cst_equal (high_plus_one, pnode->low))
5495 return 1;
5496
5497 return 0;
5498 }
5499
5500 /* Search the parent sections of the
5501 case node tree to see if both tests for the upper and lower
5502 bounds of NODE would be redundant. */
5503
5504 static int
5505 node_is_bounded (node, index_type)
5506 case_node_ptr node;
5507 tree index_type;
5508 {
5509 return (node_has_low_bound (node, index_type)
5510 && node_has_high_bound (node, index_type));
5511 }
5512
5513 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5514
5515 static void
5516 emit_jump_if_reachable (label)
5517 rtx label;
5518 {
5519 if (GET_CODE (get_last_insn ()) != BARRIER)
5520 emit_jump (label);
5521 }
5522 \f
5523 /* Emit step-by-step code to select a case for the value of INDEX.
5524 The thus generated decision tree follows the form of the
5525 case-node binary tree NODE, whose nodes represent test conditions.
5526 INDEX_TYPE is the type of the index of the switch.
5527
5528 Care is taken to prune redundant tests from the decision tree
5529 by detecting any boundary conditions already checked by
5530 emitted rtx. (See node_has_high_bound, node_has_low_bound
5531 and node_is_bounded, above.)
5532
5533 Where the test conditions can be shown to be redundant we emit
5534 an unconditional jump to the target code. As a further
5535 optimization, the subordinates of a tree node are examined to
5536 check for bounded nodes. In this case conditional and/or
5537 unconditional jumps as a result of the boundary check for the
5538 current node are arranged to target the subordinates associated
5539 code for out of bound conditions on the current node node.
5540
5541 We can assume that when control reaches the code generated here,
5542 the index value has already been compared with the parents
5543 of this node, and determined to be on the same side of each parent
5544 as this node is. Thus, if this node tests for the value 51,
5545 and a parent tested for 52, we don't need to consider
5546 the possibility of a value greater than 51. If another parent
5547 tests for the value 50, then this node need not test anything. */
5548
5549 static void
5550 emit_case_nodes (index, node, default_label, index_type)
5551 rtx index;
5552 case_node_ptr node;
5553 rtx default_label;
5554 tree index_type;
5555 {
5556 /* If INDEX has an unsigned type, we must make unsigned branches. */
5557 int unsignedp = TREE_UNSIGNED (index_type);
5558 typedef rtx rtx_function ();
5559 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5560 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5561 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5562 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5563 enum machine_mode mode = GET_MODE (index);
5564
5565 /* See if our parents have already tested everything for us.
5566 If they have, emit an unconditional jump for this node. */
5567 if (node_is_bounded (node, index_type))
5568 emit_jump (label_rtx (node->code_label));
5569
5570 else if (tree_int_cst_equal (node->low, node->high))
5571 {
5572 /* Node is single valued. First see if the index expression matches
5573 this node and then check our children, if any. */
5574
5575 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5576 label_rtx (node->code_label), unsignedp);
5577
5578 if (node->right != 0 && node->left != 0)
5579 {
5580 /* This node has children on both sides.
5581 Dispatch to one side or the other
5582 by comparing the index value with this node's value.
5583 If one subtree is bounded, check that one first,
5584 so we can avoid real branches in the tree. */
5585
5586 if (node_is_bounded (node->right, index_type))
5587 {
5588 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5589 VOIDmode, 0),
5590 GT, NULL_RTX, mode, unsignedp, 0);
5591
5592 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5593 emit_case_nodes (index, node->left, default_label, index_type);
5594 }
5595
5596 else if (node_is_bounded (node->left, index_type))
5597 {
5598 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5599 VOIDmode, 0),
5600 LT, NULL_RTX, mode, unsignedp, 0);
5601 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5602 emit_case_nodes (index, node->right, default_label, index_type);
5603 }
5604
5605 else
5606 {
5607 /* Neither node is bounded. First distinguish the two sides;
5608 then emit the code for one side at a time. */
5609
5610 tree test_label
5611 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5612
5613 /* See if the value is on the right. */
5614 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5615 VOIDmode, 0),
5616 GT, NULL_RTX, mode, unsignedp, 0);
5617 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5618
5619 /* Value must be on the left.
5620 Handle the left-hand subtree. */
5621 emit_case_nodes (index, node->left, default_label, index_type);
5622 /* If left-hand subtree does nothing,
5623 go to default. */
5624 emit_jump_if_reachable (default_label);
5625
5626 /* Code branches here for the right-hand subtree. */
5627 expand_label (test_label);
5628 emit_case_nodes (index, node->right, default_label, index_type);
5629 }
5630 }
5631
5632 else if (node->right != 0 && node->left == 0)
5633 {
5634 /* Here we have a right child but no left so we issue conditional
5635 branch to default and process the right child.
5636
5637 Omit the conditional branch to default if we it avoid only one
5638 right child; it costs too much space to save so little time. */
5639
5640 if (node->right->right || node->right->left
5641 || !tree_int_cst_equal (node->right->low, node->right->high))
5642 {
5643 if (!node_has_low_bound (node, index_type))
5644 {
5645 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5646 VOIDmode, 0),
5647 LT, NULL_RTX, mode, unsignedp, 0);
5648 emit_jump_insn ((*gen_blt_pat) (default_label));
5649 }
5650
5651 emit_case_nodes (index, node->right, default_label, index_type);
5652 }
5653 else
5654 /* We cannot process node->right normally
5655 since we haven't ruled out the numbers less than
5656 this node's value. So handle node->right explicitly. */
5657 do_jump_if_equal (index,
5658 expand_expr (node->right->low, NULL_RTX,
5659 VOIDmode, 0),
5660 label_rtx (node->right->code_label), unsignedp);
5661 }
5662
5663 else if (node->right == 0 && node->left != 0)
5664 {
5665 /* Just one subtree, on the left. */
5666
5667 #if 0 /* The following code and comment were formerly part
5668 of the condition here, but they didn't work
5669 and I don't understand what the idea was. -- rms. */
5670 /* If our "most probable entry" is less probable
5671 than the default label, emit a jump to
5672 the default label using condition codes
5673 already lying around. With no right branch,
5674 a branch-greater-than will get us to the default
5675 label correctly. */
5676 if (use_cost_table
5677 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5678 ;
5679 #endif /* 0 */
5680 if (node->left->left || node->left->right
5681 || !tree_int_cst_equal (node->left->low, node->left->high))
5682 {
5683 if (!node_has_high_bound (node, index_type))
5684 {
5685 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5686 VOIDmode, 0),
5687 GT, NULL_RTX, mode, unsignedp, 0);
5688 emit_jump_insn ((*gen_bgt_pat) (default_label));
5689 }
5690
5691 emit_case_nodes (index, node->left, default_label, index_type);
5692 }
5693 else
5694 /* We cannot process node->left normally
5695 since we haven't ruled out the numbers less than
5696 this node's value. So handle node->left explicitly. */
5697 do_jump_if_equal (index,
5698 expand_expr (node->left->low, NULL_RTX,
5699 VOIDmode, 0),
5700 label_rtx (node->left->code_label), unsignedp);
5701 }
5702 }
5703 else
5704 {
5705 /* Node is a range. These cases are very similar to those for a single
5706 value, except that we do not start by testing whether this node
5707 is the one to branch to. */
5708
5709 if (node->right != 0 && node->left != 0)
5710 {
5711 /* Node has subtrees on both sides.
5712 If the right-hand subtree is bounded,
5713 test for it first, since we can go straight there.
5714 Otherwise, we need to make a branch in the control structure,
5715 then handle the two subtrees. */
5716 tree test_label = 0;
5717
5718 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5719 VOIDmode, 0),
5720 GT, NULL_RTX, mode, unsignedp, 0);
5721
5722 if (node_is_bounded (node->right, index_type))
5723 /* Right hand node is fully bounded so we can eliminate any
5724 testing and branch directly to the target code. */
5725 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5726 else
5727 {
5728 /* Right hand node requires testing.
5729 Branch to a label where we will handle it later. */
5730
5731 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5732 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5733 }
5734
5735 /* Value belongs to this node or to the left-hand subtree. */
5736
5737 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5738 GE, NULL_RTX, mode, unsignedp, 0);
5739 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5740
5741 /* Handle the left-hand subtree. */
5742 emit_case_nodes (index, node->left, default_label, index_type);
5743
5744 /* If right node had to be handled later, do that now. */
5745
5746 if (test_label)
5747 {
5748 /* If the left-hand subtree fell through,
5749 don't let it fall into the right-hand subtree. */
5750 emit_jump_if_reachable (default_label);
5751
5752 expand_label (test_label);
5753 emit_case_nodes (index, node->right, default_label, index_type);
5754 }
5755 }
5756
5757 else if (node->right != 0 && node->left == 0)
5758 {
5759 /* Deal with values to the left of this node,
5760 if they are possible. */
5761 if (!node_has_low_bound (node, index_type))
5762 {
5763 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5764 VOIDmode, 0),
5765 LT, NULL_RTX, mode, unsignedp, 0);
5766 emit_jump_insn ((*gen_blt_pat) (default_label));
5767 }
5768
5769 /* Value belongs to this node or to the right-hand subtree. */
5770
5771 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5772 VOIDmode, 0),
5773 LE, NULL_RTX, mode, unsignedp, 0);
5774 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5775
5776 emit_case_nodes (index, node->right, default_label, index_type);
5777 }
5778
5779 else if (node->right == 0 && node->left != 0)
5780 {
5781 /* Deal with values to the right of this node,
5782 if they are possible. */
5783 if (!node_has_high_bound (node, index_type))
5784 {
5785 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5786 VOIDmode, 0),
5787 GT, NULL_RTX, mode, unsignedp, 0);
5788 emit_jump_insn ((*gen_bgt_pat) (default_label));
5789 }
5790
5791 /* Value belongs to this node or to the left-hand subtree. */
5792
5793 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5794 GE, NULL_RTX, mode, unsignedp, 0);
5795 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5796
5797 emit_case_nodes (index, node->left, default_label, index_type);
5798 }
5799
5800 else
5801 {
5802 /* Node has no children so we check low and high bounds to remove
5803 redundant tests. Only one of the bounds can exist,
5804 since otherwise this node is bounded--a case tested already. */
5805
5806 if (!node_has_high_bound (node, index_type))
5807 {
5808 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5809 VOIDmode, 0),
5810 GT, NULL_RTX, mode, unsignedp, 0);
5811 emit_jump_insn ((*gen_bgt_pat) (default_label));
5812 }
5813
5814 if (!node_has_low_bound (node, index_type))
5815 {
5816 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5817 VOIDmode, 0),
5818 LT, NULL_RTX, mode, unsignedp, 0);
5819 emit_jump_insn ((*gen_blt_pat) (default_label));
5820 }
5821
5822 emit_jump (label_rtx (node->code_label));
5823 }
5824 }
5825 }
5826 \f
5827 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5828 so that the debugging info will be correct for the unrolled loop. */
5829
5830 /* Indexed by block number, contains a pointer to the N'th block node. */
5831
5832 static tree *block_vector;
5833
5834 void
5835 find_loop_tree_blocks ()
5836 {
5837 tree block = DECL_INITIAL (current_function_decl);
5838
5839 /* There first block is for the function body, and does not have
5840 corresponding block notes. Don't include it in the block vector. */
5841 block = BLOCK_SUBBLOCKS (block);
5842
5843 block_vector = identify_blocks (block, get_insns ());
5844 }
5845
5846 void
5847 unroll_block_trees ()
5848 {
5849 tree block = DECL_INITIAL (current_function_decl);
5850
5851 reorder_blocks (block_vector, block, get_insns ());
5852 }
5853
This page took 0.310447 seconds and 5 git commands to generate.