]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
(push_function_context): Save epilogue_delay_list.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57
58 /* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63 /* Similar, but round to the next highest integer that meets the
64 alignment. */
65 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74 #define NEED_SEPARATE_AP
75 #endif
76
77 /* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81 int current_function_pops_args;
82
83 /* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86 int current_function_returns_struct;
87
88 /* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91 int current_function_returns_pcc_struct;
92
93 /* Nonzero if function being compiled needs to be passed a static chain. */
94
95 int current_function_needs_context;
96
97 /* Nonzero if function being compiled can call setjmp. */
98
99 int current_function_calls_setjmp;
100
101 /* Nonzero if function being compiled can call longjmp. */
102
103 int current_function_calls_longjmp;
104
105 /* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108 int current_function_has_nonlocal_label;
109
110 /* Nonzero if function being compiled contains nested functions. */
111
112 int current_function_contains_functions;
113
114 /* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117 int current_function_calls_alloca;
118
119 /* Nonzero if the current function returns a pointer type */
120
121 int current_function_returns_pointer;
122
123 /* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126 rtx current_function_epilogue_delay_list;
127
128 /* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132 int current_function_args_size;
133
134 /* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137 int current_function_pretend_args_size;
138
139 /* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143 int current_function_outgoing_args_size;
144
145 /* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148 rtx current_function_arg_offset_rtx;
149
150 /* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153 int current_function_varargs;
154
155 /* Quantities of various kinds of registers
156 used for the current function's args. */
157
158 CUMULATIVE_ARGS current_function_args_info;
159
160 /* Name of function now being compiled. */
161
162 char *current_function_name;
163
164 /* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169 rtx current_function_return_rtx;
170
171 /* Nonzero if the current function uses the constant pool. */
172
173 int current_function_uses_const_pool;
174
175 /* Nonzero if the current function uses pic_offset_table_rtx. */
176 int current_function_uses_pic_offset_table;
177
178 /* The arg pointer hard register, or the pseudo into which it was copied. */
179 rtx current_function_internal_arg_pointer;
180
181 /* The FUNCTION_DECL for an inline function currently being expanded. */
182 tree inline_function_decl;
183
184 /* Number of function calls seen so far in current function. */
185
186 int function_call_count;
187
188 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192 tree nonlocal_labels;
193
194 /* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197 rtx nonlocal_goto_handler_slot;
198
199 /* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203 rtx nonlocal_goto_stack_level;
204
205 /* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209 rtx cleanup_label;
210
211 /* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215 rtx return_label;
216
217 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219 rtx save_expr_regs;
220
221 /* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223 rtx stack_slot_list;
224
225 /* Chain of all RTL_EXPRs that have insns in them. */
226 tree rtl_expr_chain;
227
228 /* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230 rtx tail_recursion_label;
231
232 /* Place after which to insert the tail_recursion_label if we need one. */
233 rtx tail_recursion_reentry;
234
235 /* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240 rtx arg_pointer_save_area;
241
242 /* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245 int frame_offset;
246
247 /* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250 static tree context_display;
251
252 /* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258 static tree trampoline_list;
259
260 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261 static rtx parm_birth_insn;
262
263 #if 0
264 /* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267 static int invalid_stack_slot;
268 #endif
269
270 /* Last insn of those whose job was to put parms into their nominal homes. */
271 static rtx last_parm_insn;
272
273 /* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275 static int max_parm_reg;
276
277 /* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280 static rtx *parm_reg_stack_loc;
281
282 #if 0 /* Turned off because 0 seems to work just as well. */
283 /* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286 static tree empty_cleanup_list;
287 #endif
288
289 /* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291 static int virtuals_instantiated;
292
293 /* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297 extern int rtx_equal_function_value_matters;
298
299 void fixup_gotos ();
300
301 static tree round_down ();
302 static rtx round_trampoline_addr ();
303 static rtx fixup_stack_1 ();
304 static void fixup_var_refs ();
305 static void fixup_var_refs_insns ();
306 static void fixup_var_refs_1 ();
307 static void optimize_bit_field ();
308 static void instantiate_decls ();
309 static void instantiate_decls_1 ();
310 static void instantiate_decl ();
311 static int instantiate_virtual_regs_1 ();
312 static rtx fixup_memory_subreg ();
313 static rtx walk_fixup_memory_subreg ();
314 \f
315 /* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333 struct temp_slot
334 {
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347 };
348
349 /* List of all temporaries allocated, both available and in use. */
350
351 struct temp_slot *temp_slots;
352
353 /* Current nesting level for temporaries. */
354
355 int temp_slot_level;
356 \f
357 /* Pointer to chain of `struct function' for containing functions. */
358 struct function *outer_function_chain;
359
360 /* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363 struct function *
364 find_function_data (decl)
365 tree decl;
366 {
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372 }
373
374 /* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
377 since this function knows only about language-independent variables. */
378
379 void
380 push_function_context ()
381 {
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427 p->epilogue_delay_list = current_function_epilogue_delay_list;
428
429 save_tree_status (p);
430 save_storage_status (p);
431 save_emit_status (p);
432 init_emit ();
433 save_expr_status (p);
434 save_stmt_status (p);
435 }
436
437 /* Restore the last saved context, at the end of a nested function.
438 This function is called from language-specific code. */
439
440 void
441 pop_function_context ()
442 {
443 struct function *p = outer_function_chain;
444
445 outer_function_chain = p->next;
446
447 current_function_name = p->name;
448 current_function_decl = p->decl;
449 current_function_pops_args = p->pops_args;
450 current_function_returns_struct = p->returns_struct;
451 current_function_returns_pcc_struct = p->returns_pcc_struct;
452 current_function_needs_context = p->needs_context;
453 current_function_calls_setjmp = p->calls_setjmp;
454 current_function_calls_longjmp = p->calls_longjmp;
455 current_function_calls_alloca = p->calls_alloca;
456 current_function_has_nonlocal_label = p->has_nonlocal_label;
457 current_function_contains_functions = 1;
458 current_function_args_size = p->args_size;
459 current_function_pretend_args_size = p->pretend_args_size;
460 current_function_arg_offset_rtx = p->arg_offset_rtx;
461 current_function_uses_const_pool = p->uses_const_pool;
462 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
463 current_function_internal_arg_pointer = p->internal_arg_pointer;
464 max_parm_reg = p->max_parm_reg;
465 parm_reg_stack_loc = p->parm_reg_stack_loc;
466 current_function_outgoing_args_size = p->outgoing_args_size;
467 current_function_return_rtx = p->return_rtx;
468 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
469 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
470 nonlocal_labels = p->nonlocal_labels;
471 cleanup_label = p->cleanup_label;
472 return_label = p->return_label;
473 save_expr_regs = p->save_expr_regs;
474 stack_slot_list = p->stack_slot_list;
475 parm_birth_insn = p->parm_birth_insn;
476 frame_offset = p->frame_offset;
477 tail_recursion_label = p->tail_recursion_label;
478 tail_recursion_reentry = p->tail_recursion_reentry;
479 arg_pointer_save_area = p->arg_pointer_save_area;
480 rtl_expr_chain = p->rtl_expr_chain;
481 last_parm_insn = p->last_parm_insn;
482 context_display = p->context_display;
483 trampoline_list = p->trampoline_list;
484 function_call_count = p->function_call_count;
485 temp_slots = p->temp_slots;
486 temp_slot_level = p->temp_slot_level;
487 current_function_epilogue_delay_list = p->epilogue_delay_list;
488
489 restore_tree_status (p);
490 restore_storage_status (p);
491 restore_expr_status (p);
492 restore_emit_status (p);
493 restore_stmt_status (p);
494
495 /* Finish doing put_var_into_stack for any of our variables
496 which became addressable during the nested function. */
497 {
498 struct var_refs_queue *queue = p->fixup_var_refs_queue;
499 for (; queue; queue = queue->next)
500 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
501 }
502
503 free (p);
504
505 /* Reset variables that have known state during rtx generation. */
506 rtx_equal_function_value_matters = 1;
507 virtuals_instantiated = 0;
508 }
509 \f
510 /* Allocate fixed slots in the stack frame of the current function. */
511
512 /* Return size needed for stack frame based on slots so far allocated.
513 This size counts from zero. It is not rounded to STACK_BOUNDARY;
514 the caller may have to do that. */
515
516 int
517 get_frame_size ()
518 {
519 #ifdef FRAME_GROWS_DOWNWARD
520 return -frame_offset;
521 #else
522 return frame_offset;
523 #endif
524 }
525
526 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
527 with machine mode MODE.
528
529 ALIGN controls the amount of alignment for the address of the slot:
530 0 means according to MODE,
531 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
532 positive specifies alignment boundary in bits.
533
534 We do not round to stack_boundary here. */
535
536 rtx
537 assign_stack_local (mode, size, align)
538 enum machine_mode mode;
539 int size;
540 int align;
541 {
542 register rtx x, addr;
543 int bigend_correction = 0;
544 int alignment;
545
546 if (align == 0)
547 {
548 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
549 if (mode == BLKmode)
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 }
552 else if (align == -1)
553 {
554 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
555 size = CEIL_ROUND (size, alignment);
556 }
557 else
558 alignment = align / BITS_PER_UNIT;
559
560 /* Round frame offset to that alignment.
561 We must be careful here, since FRAME_OFFSET might be negative and
562 division with a negative dividend isn't as well defined as we might
563 like. So we instead assume that ALIGNMENT is a power of two and
564 use logical operations which are unambiguous. */
565 #ifdef FRAME_GROWS_DOWNWARD
566 frame_offset = FLOOR_ROUND (frame_offset, alignment);
567 #else
568 frame_offset = CEIL_ROUND (frame_offset, alignment);
569 #endif
570
571 /* On a big-endian machine, if we are allocating more space than we will use,
572 use the least significant bytes of those that are allocated. */
573 #if BYTES_BIG_ENDIAN
574 if (mode != BLKmode)
575 bigend_correction = size - GET_MODE_SIZE (mode);
576 #endif
577
578 #ifdef FRAME_GROWS_DOWNWARD
579 frame_offset -= size;
580 #endif
581
582 /* If we have already instantiated virtual registers, return the actual
583 address relative to the frame pointer. */
584 if (virtuals_instantiated)
585 addr = plus_constant (frame_pointer_rtx,
586 (frame_offset + bigend_correction
587 + STARTING_FRAME_OFFSET));
588 else
589 addr = plus_constant (virtual_stack_vars_rtx,
590 frame_offset + bigend_correction);
591
592 #ifndef FRAME_GROWS_DOWNWARD
593 frame_offset += size;
594 #endif
595
596 x = gen_rtx (MEM, mode, addr);
597
598 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
599
600 return x;
601 }
602
603 /* Assign a stack slot in a containing function.
604 First three arguments are same as in preceding function.
605 The last argument specifies the function to allocate in. */
606
607 rtx
608 assign_outer_stack_local (mode, size, align, function)
609 enum machine_mode mode;
610 int size;
611 int align;
612 struct function *function;
613 {
614 register rtx x, addr;
615 int bigend_correction = 0;
616 int alignment;
617
618 /* Allocate in the memory associated with the function in whose frame
619 we are assigning. */
620 push_obstacks (function->function_obstack,
621 function->function_maybepermanent_obstack);
622
623 if (align == 0)
624 {
625 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
626 if (mode == BLKmode)
627 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
628 }
629 else if (align == -1)
630 {
631 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
632 size = CEIL_ROUND (size, alignment);
633 }
634 else
635 alignment = align / BITS_PER_UNIT;
636
637 /* Round frame offset to that alignment. */
638 #ifdef FRAME_GROWS_DOWNWARD
639 frame_offset = FLOOR_ROUND (frame_offset, alignment);
640 #else
641 frame_offset = CEIL_ROUND (frame_offset, alignment);
642 #endif
643
644 /* On a big-endian machine, if we are allocating more space than we will use,
645 use the least significant bytes of those that are allocated. */
646 #if BYTES_BIG_ENDIAN
647 if (mode != BLKmode)
648 bigend_correction = size - GET_MODE_SIZE (mode);
649 #endif
650
651 #ifdef FRAME_GROWS_DOWNWARD
652 function->frame_offset -= size;
653 #endif
654 addr = plus_constant (virtual_stack_vars_rtx,
655 function->frame_offset + bigend_correction);
656 #ifndef FRAME_GROWS_DOWNWARD
657 function->frame_offset += size;
658 #endif
659
660 x = gen_rtx (MEM, mode, addr);
661
662 function->stack_slot_list
663 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
664
665 pop_obstacks ();
666
667 return x;
668 }
669 \f
670 /* Allocate a temporary stack slot and record it for possible later
671 reuse.
672
673 MODE is the machine mode to be given to the returned rtx.
674
675 SIZE is the size in units of the space required. We do no rounding here
676 since assign_stack_local will do any required rounding.
677
678 KEEP is non-zero if this slot is to be retained after a call to
679 free_temp_slots. Automatic variables for a block are allocated with this
680 flag. */
681
682 rtx
683 assign_stack_temp (mode, size, keep)
684 enum machine_mode mode;
685 int size;
686 int keep;
687 {
688 struct temp_slot *p, *best_p = 0;
689
690 /* First try to find an available, already-allocated temporary that is the
691 exact size we require. */
692 for (p = temp_slots; p; p = p->next)
693 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
694 break;
695
696 /* If we didn't find, one, try one that is larger than what we want. We
697 find the smallest such. */
698 if (p == 0)
699 for (p = temp_slots; p; p = p->next)
700 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
701 && (best_p == 0 || best_p->size > p->size))
702 best_p = p;
703
704 /* Make our best, if any, the one to use. */
705 if (best_p)
706 p = best_p;
707
708 /* If we still didn't find one, make a new temporary. */
709 if (p == 0)
710 {
711 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
712 p->size = size;
713 /* If the temp slot mode doesn't indicate the alignment,
714 use the largest possible, so no one will be disappointed. */
715 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
716 p->next = temp_slots;
717 temp_slots = p;
718 }
719
720 p->in_use = 1;
721 p->level = temp_slot_level;
722 p->keep = keep;
723 return p->slot;
724 }
725 \f
726 /* If X could be a reference to a temporary slot, mark that slot as belonging
727 to the to one level higher. If X matched one of our slots, just mark that
728 one. Otherwise, we can't easily predict which it is, so upgrade all of
729 them. Kept slots need not be touched.
730
731 This is called when an ({...}) construct occurs and a statement
732 returns a value in memory. */
733
734 void
735 preserve_temp_slots (x)
736 rtx x;
737 {
738 struct temp_slot *p;
739
740 /* If X is not in memory or is at a constant address, it cannot be in
741 a temporary slot. */
742 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
743 return;
744
745 /* First see if we can find a match. */
746 for (p = temp_slots; p; p = p->next)
747 if (p->in_use && x == p->slot)
748 {
749 p->level--;
750 return;
751 }
752
753 /* Otherwise, preserve all non-kept slots at this level. */
754 for (p = temp_slots; p; p = p->next)
755 if (p->in_use && p->level == temp_slot_level && ! p->keep)
756 p->level--;
757 }
758
759 /* Free all temporaries used so far. This is normally called at the end
760 of generating code for a statement. */
761
762 void
763 free_temp_slots ()
764 {
765 struct temp_slot *p;
766
767 for (p = temp_slots; p; p = p->next)
768 if (p->in_use && p->level == temp_slot_level && ! p->keep)
769 p->in_use = 0;
770 }
771
772 /* Push deeper into the nesting level for stack temporaries. */
773
774 void
775 push_temp_slots ()
776 {
777 /* For GNU C++, we must allow a sequence to be emitted anywhere in
778 the level where the sequence was started. By not changing levels
779 when the compiler is inside a sequence, the temporaries for the
780 sequence and the temporaries will not unwittingly conflict with
781 the temporaries for other sequences and/or code at that level. */
782 if (in_sequence_p ())
783 return;
784
785 temp_slot_level++;
786 }
787
788 /* Pop a temporary nesting level. All slots in use in the current level
789 are freed. */
790
791 void
792 pop_temp_slots ()
793 {
794 struct temp_slot *p;
795
796 /* See comment in push_temp_slots about why we don't change levels
797 in sequences. */
798 if (in_sequence_p ())
799 return;
800
801 for (p = temp_slots; p; p = p->next)
802 if (p->in_use && p->level == temp_slot_level)
803 p->in_use = 0;
804
805 temp_slot_level--;
806 }
807 \f
808 /* Retroactively move an auto variable from a register to a stack slot.
809 This is done when an address-reference to the variable is seen. */
810
811 void
812 put_var_into_stack (decl)
813 tree decl;
814 {
815 register rtx reg;
816 register rtx new = 0;
817 enum machine_mode promoted_mode, decl_mode;
818 struct function *function = 0;
819 tree context = decl_function_context (decl);
820
821 /* Get the current rtl used for this object and it's original mode. */
822 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
823
824 /* No need to do anything if decl has no rtx yet
825 since in that case caller is setting TREE_ADDRESSABLE
826 and a stack slot will be assigned when the rtl is made. */
827 if (reg == 0)
828 return;
829
830 /* Get the declared mode for this object. */
831 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
832 : DECL_MODE (decl));
833 /* Get the mode it's actually stored in. */
834 promoted_mode = GET_MODE (reg);
835
836 /* If this variable comes from an outer function,
837 find that function's saved context. */
838 if (context != current_function_decl)
839 for (function = outer_function_chain; function; function = function->next)
840 if (function->decl == context)
841 break;
842
843 /* If this is a variable-size object with a pseudo to address it,
844 put that pseudo into the stack, if the var is nonlocal. */
845 if (DECL_NONLOCAL (decl)
846 && GET_CODE (reg) == MEM
847 && GET_CODE (XEXP (reg, 0)) == REG
848 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
849 reg = XEXP (reg, 0);
850 if (GET_CODE (reg) != REG)
851 return;
852
853 if (function)
854 {
855 if (REGNO (reg) < function->max_parm_reg)
856 new = function->parm_reg_stack_loc[REGNO (reg)];
857 if (new == 0)
858 new = assign_outer_stack_local (GET_MODE (reg),
859 GET_MODE_SIZE (decl_mode),
860 0, function);
861 }
862 else
863 {
864 if (REGNO (reg) < max_parm_reg)
865 new = parm_reg_stack_loc[REGNO (reg)];
866 if (new == 0)
867 new = assign_stack_local (GET_MODE (reg),
868 GET_MODE_SIZE (decl_mode), 0);
869 }
870
871 XEXP (reg, 0) = XEXP (new, 0);
872 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
873 REG_USERVAR_P (reg) = 0;
874 PUT_CODE (reg, MEM);
875 PUT_MODE (reg, decl_mode);
876
877 /* If this is a memory ref that contains aggregate components,
878 mark it as such for cse and loop optimize. */
879 MEM_IN_STRUCT_P (reg)
880 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
881 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
882 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
883
884 /* Now make sure that all refs to the variable, previously made
885 when it was a register, are fixed up to be valid again. */
886 if (function)
887 {
888 struct var_refs_queue *temp;
889
890 /* Variable is inherited; fix it up when we get back to its function. */
891 push_obstacks (function->function_obstack,
892 function->function_maybepermanent_obstack);
893 temp
894 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
895 temp->modified = reg;
896 temp->promoted_mode = promoted_mode;
897 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
898 temp->next = function->fixup_var_refs_queue;
899 function->fixup_var_refs_queue = temp;
900 pop_obstacks ();
901 }
902 else
903 /* Variable is local; fix it up now. */
904 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
905 }
906 \f
907 static void
908 fixup_var_refs (var, promoted_mode, unsignedp)
909 rtx var;
910 enum machine_mode promoted_mode;
911 int unsignedp;
912 {
913 tree pending;
914 rtx first_insn = get_insns ();
915 struct sequence_stack *stack = sequence_stack;
916 tree rtl_exps = rtl_expr_chain;
917
918 /* Must scan all insns for stack-refs that exceed the limit. */
919 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
920
921 /* Scan all pending sequences too. */
922 for (; stack; stack = stack->next)
923 {
924 push_to_sequence (stack->first);
925 fixup_var_refs_insns (var, promoted_mode, unsignedp,
926 stack->first, stack->next != 0);
927 /* Update remembered end of sequence
928 in case we added an insn at the end. */
929 stack->last = get_last_insn ();
930 end_sequence ();
931 }
932
933 /* Scan all waiting RTL_EXPRs too. */
934 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
935 {
936 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
937 if (seq != const0_rtx && seq != 0)
938 {
939 push_to_sequence (seq);
940 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
941 end_sequence ();
942 }
943 }
944 }
945 \f
946 /* This structure is used by the following two functions to record MEMs or
947 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
948 VAR as an address. We need to maintain this list in case two operands of
949 an insn were required to match; in that case we must ensure we use the
950 same replacement. */
951
952 struct fixup_replacement
953 {
954 rtx old;
955 rtx new;
956 struct fixup_replacement *next;
957 };
958
959 /* REPLACEMENTS is a pointer to a list of the above structures and X is
960 some part of an insn. Return a struct fixup_replacement whose OLD
961 value is equal to X. Allocate a new structure if no such entry exists. */
962
963 static struct fixup_replacement *
964 find_replacement (replacements, x)
965 struct fixup_replacement **replacements;
966 rtx x;
967 {
968 struct fixup_replacement *p;
969
970 /* See if we have already replaced this. */
971 for (p = *replacements; p && p->old != x; p = p->next)
972 ;
973
974 if (p == 0)
975 {
976 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
977 p->old = x;
978 p->new = 0;
979 p->next = *replacements;
980 *replacements = p;
981 }
982
983 return p;
984 }
985
986 /* Scan the insn-chain starting with INSN for refs to VAR
987 and fix them up. TOPLEVEL is nonzero if this chain is the
988 main chain of insns for the current function. */
989
990 static void
991 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
992 rtx var;
993 enum machine_mode promoted_mode;
994 int unsignedp;
995 rtx insn;
996 int toplevel;
997 {
998 while (insn)
999 {
1000 rtx next = NEXT_INSN (insn);
1001 rtx note;
1002 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1003 || GET_CODE (insn) == JUMP_INSN)
1004 {
1005 /* The insn to load VAR from a home in the arglist
1006 is now a no-op. When we see it, just delete it. */
1007 if (toplevel
1008 && GET_CODE (PATTERN (insn)) == SET
1009 && SET_DEST (PATTERN (insn)) == var
1010 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1011 {
1012 next = delete_insn (insn);
1013 if (insn == last_parm_insn)
1014 last_parm_insn = PREV_INSN (next);
1015 }
1016 else
1017 {
1018 /* See if we have to do anything to INSN now that VAR is in
1019 memory. If it needs to be loaded into a pseudo, use a single
1020 pseudo for the entire insn in case there is a MATCH_DUP
1021 between two operands. We pass a pointer to the head of
1022 a list of struct fixup_replacements. If fixup_var_refs_1
1023 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1024 it will record them in this list.
1025
1026 If it allocated a pseudo for any replacement, we copy into
1027 it here. */
1028
1029 struct fixup_replacement *replacements = 0;
1030
1031 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1032 &replacements);
1033
1034 while (replacements)
1035 {
1036 if (GET_CODE (replacements->new) == REG)
1037 {
1038 rtx insert_before;
1039 rtx seq;
1040
1041 /* OLD might be a (subreg (mem)). */
1042 if (GET_CODE (replacements->old) == SUBREG)
1043 replacements->old
1044 = fixup_memory_subreg (replacements->old, insn, 0);
1045 else
1046 replacements->old
1047 = fixup_stack_1 (replacements->old, insn);
1048
1049 /* We can not separate USE insns from the CALL_INSN
1050 that they belong to. If this is a CALL_INSN, insert
1051 the move insn before the USE insns preceding it
1052 instead of immediately before the insn. */
1053 if (GET_CODE (insn) == CALL_INSN)
1054 {
1055 insert_before = insn;
1056 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1057 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1058 insert_before = PREV_INSN (insert_before);
1059 }
1060 else
1061 insert_before = insn;
1062
1063 /* If we are changing the mode, do a conversion.
1064 This might be wasteful, but combine.c will
1065 eliminate much of the waste. */
1066
1067 if (GET_MODE (replacements->new)
1068 != GET_MODE (replacements->old))
1069 {
1070 start_sequence ();
1071 convert_move (replacements->new,
1072 replacements->old, unsignedp);
1073 seq = gen_sequence ();
1074 end_sequence ();
1075 }
1076 else
1077 seq = gen_move_insn (replacements->new,
1078 replacements->old);
1079
1080 emit_insn_before (seq, insert_before);
1081 }
1082
1083 replacements = replacements->next;
1084 }
1085 }
1086
1087 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1088 But don't touch other insns referred to by reg-notes;
1089 we will get them elsewhere. */
1090 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1091 if (GET_CODE (note) != INSN_LIST)
1092 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1093 }
1094 insn = next;
1095 }
1096 }
1097 \f
1098 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1099 See if the rtx expression at *LOC in INSN needs to be changed.
1100
1101 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1102 contain a list of original rtx's and replacements. If we find that we need
1103 to modify this insn by replacing a memory reference with a pseudo or by
1104 making a new MEM to implement a SUBREG, we consult that list to see if
1105 we have already chosen a replacement. If none has already been allocated,
1106 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1107 or the SUBREG, as appropriate, to the pseudo. */
1108
1109 static void
1110 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1111 register rtx var;
1112 enum machine_mode promoted_mode;
1113 register rtx *loc;
1114 rtx insn;
1115 struct fixup_replacement **replacements;
1116 {
1117 register int i;
1118 register rtx x = *loc;
1119 RTX_CODE code = GET_CODE (x);
1120 register char *fmt;
1121 register rtx tem, tem1;
1122 struct fixup_replacement *replacement;
1123
1124 switch (code)
1125 {
1126 case MEM:
1127 if (var == x)
1128 {
1129 /* If we already have a replacement, use it. Otherwise,
1130 try to fix up this address in case it is invalid. */
1131
1132 replacement = find_replacement (replacements, var);
1133 if (replacement->new)
1134 {
1135 *loc = replacement->new;
1136 return;
1137 }
1138
1139 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1140
1141 /* Unless we are forcing memory to register or we changed the mode,
1142 we can leave things the way they are if the insn is valid. */
1143
1144 INSN_CODE (insn) = -1;
1145 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1146 && recog_memoized (insn) >= 0)
1147 return;
1148
1149 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1150 return;
1151 }
1152
1153 /* If X contains VAR, we need to unshare it here so that we update
1154 each occurrence separately. But all identical MEMs in one insn
1155 must be replaced with the same rtx because of the possibility of
1156 MATCH_DUPs. */
1157
1158 if (reg_mentioned_p (var, x))
1159 {
1160 replacement = find_replacement (replacements, x);
1161 if (replacement->new == 0)
1162 replacement->new = copy_most_rtx (x, var);
1163
1164 *loc = x = replacement->new;
1165 }
1166 break;
1167
1168 case REG:
1169 case CC0:
1170 case PC:
1171 case CONST_INT:
1172 case CONST:
1173 case SYMBOL_REF:
1174 case LABEL_REF:
1175 case CONST_DOUBLE:
1176 return;
1177
1178 case SIGN_EXTRACT:
1179 case ZERO_EXTRACT:
1180 /* Note that in some cases those types of expressions are altered
1181 by optimize_bit_field, and do not survive to get here. */
1182 if (XEXP (x, 0) == var
1183 || (GET_CODE (XEXP (x, 0)) == SUBREG
1184 && SUBREG_REG (XEXP (x, 0)) == var))
1185 {
1186 /* Get TEM as a valid MEM in the mode presently in the insn.
1187
1188 We don't worry about the possibility of MATCH_DUP here; it
1189 is highly unlikely and would be tricky to handle. */
1190
1191 tem = XEXP (x, 0);
1192 if (GET_CODE (tem) == SUBREG)
1193 tem = fixup_memory_subreg (tem, insn, 1);
1194 tem = fixup_stack_1 (tem, insn);
1195
1196 /* Unless we want to load from memory, get TEM into the proper mode
1197 for an extract from memory. This can only be done if the
1198 extract is at a constant position and length. */
1199
1200 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1201 && GET_CODE (XEXP (x, 2)) == CONST_INT
1202 && ! mode_dependent_address_p (XEXP (tem, 0))
1203 && ! MEM_VOLATILE_P (tem))
1204 {
1205 enum machine_mode wanted_mode = VOIDmode;
1206 enum machine_mode is_mode = GET_MODE (tem);
1207 int width = INTVAL (XEXP (x, 1));
1208 int pos = INTVAL (XEXP (x, 2));
1209
1210 #ifdef HAVE_extzv
1211 if (GET_CODE (x) == ZERO_EXTRACT)
1212 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1213 #endif
1214 #ifdef HAVE_extv
1215 if (GET_CODE (x) == SIGN_EXTRACT)
1216 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1217 #endif
1218 /* If we have a narrower mode, we can do something. */
1219 if (wanted_mode != VOIDmode
1220 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1221 {
1222 int offset = pos / BITS_PER_UNIT;
1223 rtx old_pos = XEXP (x, 2);
1224 rtx newmem;
1225
1226 /* If the bytes and bits are counted differently, we
1227 must adjust the offset. */
1228 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1229 offset = (GET_MODE_SIZE (is_mode)
1230 - GET_MODE_SIZE (wanted_mode) - offset);
1231 #endif
1232
1233 pos %= GET_MODE_BITSIZE (wanted_mode);
1234
1235 newmem = gen_rtx (MEM, wanted_mode,
1236 plus_constant (XEXP (tem, 0), offset));
1237 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1238 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1239 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1240
1241 /* Make the change and see if the insn remains valid. */
1242 INSN_CODE (insn) = -1;
1243 XEXP (x, 0) = newmem;
1244 XEXP (x, 2) = GEN_INT (pos);
1245
1246 if (recog_memoized (insn) >= 0)
1247 return;
1248
1249 /* Otherwise, restore old position. XEXP (x, 0) will be
1250 restored later. */
1251 XEXP (x, 2) = old_pos;
1252 }
1253 }
1254
1255 /* If we get here, the bitfield extract insn can't accept a memory
1256 reference. Copy the input into a register. */
1257
1258 tem1 = gen_reg_rtx (GET_MODE (tem));
1259 emit_insn_before (gen_move_insn (tem1, tem), insn);
1260 XEXP (x, 0) = tem1;
1261 return;
1262 }
1263 break;
1264
1265 case SUBREG:
1266 if (SUBREG_REG (x) == var)
1267 {
1268 /* If this is a special SUBREG made because VAR was promoted
1269 from a wider mode, replace it with VAR and call ourself
1270 recursively, this time saying that the object previously
1271 had its current mode (by virtue of the SUBREG). */
1272
1273 if (SUBREG_PROMOTED_VAR_P (x))
1274 {
1275 *loc = var;
1276 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1277 return;
1278 }
1279
1280 /* If this SUBREG makes VAR wider, it has become a paradoxical
1281 SUBREG with VAR in memory, but these aren't allowed at this
1282 stage of the compilation. So load VAR into a pseudo and take
1283 a SUBREG of that pseudo. */
1284 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1285 {
1286 replacement = find_replacement (replacements, var);
1287 if (replacement->new == 0)
1288 replacement->new = gen_reg_rtx (GET_MODE (var));
1289 SUBREG_REG (x) = replacement->new;
1290 return;
1291 }
1292
1293 /* See if we have already found a replacement for this SUBREG.
1294 If so, use it. Otherwise, make a MEM and see if the insn
1295 is recognized. If not, or if we should force MEM into a register,
1296 make a pseudo for this SUBREG. */
1297 replacement = find_replacement (replacements, x);
1298 if (replacement->new)
1299 {
1300 *loc = replacement->new;
1301 return;
1302 }
1303
1304 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1305
1306 if (! flag_force_mem && recog_memoized (insn) >= 0)
1307 return;
1308
1309 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1310 return;
1311 }
1312 break;
1313
1314 case SET:
1315 /* First do special simplification of bit-field references. */
1316 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1317 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1318 optimize_bit_field (x, insn, 0);
1319 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1320 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1321 optimize_bit_field (x, insn, NULL_PTR);
1322
1323 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1324 insn into a pseudo and store the low part of the pseudo into VAR. */
1325 if (GET_CODE (SET_DEST (x)) == SUBREG
1326 && SUBREG_REG (SET_DEST (x)) == var
1327 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1328 > GET_MODE_SIZE (GET_MODE (var))))
1329 {
1330 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1331 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1332 tem)),
1333 insn);
1334 break;
1335 }
1336
1337 {
1338 rtx dest = SET_DEST (x);
1339 rtx src = SET_SRC (x);
1340 rtx outerdest = dest;
1341
1342 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1343 || GET_CODE (dest) == SIGN_EXTRACT
1344 || GET_CODE (dest) == ZERO_EXTRACT)
1345 dest = XEXP (dest, 0);
1346
1347 if (GET_CODE (src) == SUBREG)
1348 src = XEXP (src, 0);
1349
1350 /* If VAR does not appear at the top level of the SET
1351 just scan the lower levels of the tree. */
1352
1353 if (src != var && dest != var)
1354 break;
1355
1356 /* We will need to rerecognize this insn. */
1357 INSN_CODE (insn) = -1;
1358
1359 #ifdef HAVE_insv
1360 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1361 {
1362 /* Since this case will return, ensure we fixup all the
1363 operands here. */
1364 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1365 insn, replacements);
1366 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1367 insn, replacements);
1368 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1369 insn, replacements);
1370
1371 tem = XEXP (outerdest, 0);
1372
1373 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1374 that may appear inside a ZERO_EXTRACT.
1375 This was legitimate when the MEM was a REG. */
1376 if (GET_CODE (tem) == SUBREG
1377 && SUBREG_REG (tem) == var)
1378 tem = fixup_memory_subreg (tem, insn, 1);
1379 else
1380 tem = fixup_stack_1 (tem, insn);
1381
1382 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1383 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1384 && ! mode_dependent_address_p (XEXP (tem, 0))
1385 && ! MEM_VOLATILE_P (tem))
1386 {
1387 enum machine_mode wanted_mode
1388 = insn_operand_mode[(int) CODE_FOR_insv][0];
1389 enum machine_mode is_mode = GET_MODE (tem);
1390 int width = INTVAL (XEXP (outerdest, 1));
1391 int pos = INTVAL (XEXP (outerdest, 2));
1392
1393 /* If we have a narrower mode, we can do something. */
1394 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1395 {
1396 int offset = pos / BITS_PER_UNIT;
1397 rtx old_pos = XEXP (outerdest, 2);
1398 rtx newmem;
1399
1400 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1401 offset = (GET_MODE_SIZE (is_mode)
1402 - GET_MODE_SIZE (wanted_mode) - offset);
1403 #endif
1404
1405 pos %= GET_MODE_BITSIZE (wanted_mode);
1406
1407 newmem = gen_rtx (MEM, wanted_mode,
1408 plus_constant (XEXP (tem, 0), offset));
1409 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1410 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1411 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1412
1413 /* Make the change and see if the insn remains valid. */
1414 INSN_CODE (insn) = -1;
1415 XEXP (outerdest, 0) = newmem;
1416 XEXP (outerdest, 2) = GEN_INT (pos);
1417
1418 if (recog_memoized (insn) >= 0)
1419 return;
1420
1421 /* Otherwise, restore old position. XEXP (x, 0) will be
1422 restored later. */
1423 XEXP (outerdest, 2) = old_pos;
1424 }
1425 }
1426
1427 /* If we get here, the bit-field store doesn't allow memory
1428 or isn't located at a constant position. Load the value into
1429 a register, do the store, and put it back into memory. */
1430
1431 tem1 = gen_reg_rtx (GET_MODE (tem));
1432 emit_insn_before (gen_move_insn (tem1, tem), insn);
1433 emit_insn_after (gen_move_insn (tem, tem1), insn);
1434 XEXP (outerdest, 0) = tem1;
1435 return;
1436 }
1437 #endif
1438
1439 /* STRICT_LOW_PART is a no-op on memory references
1440 and it can cause combinations to be unrecognizable,
1441 so eliminate it. */
1442
1443 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1444 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1445
1446 /* A valid insn to copy VAR into or out of a register
1447 must be left alone, to avoid an infinite loop here.
1448 If the reference to VAR is by a subreg, fix that up,
1449 since SUBREG is not valid for a memref.
1450 Also fix up the address of the stack slot. */
1451
1452 if ((SET_SRC (x) == var
1453 || (GET_CODE (SET_SRC (x)) == SUBREG
1454 && SUBREG_REG (SET_SRC (x)) == var))
1455 && (GET_CODE (SET_DEST (x)) == REG
1456 || (GET_CODE (SET_DEST (x)) == SUBREG
1457 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1458 && recog_memoized (insn) >= 0)
1459 {
1460 replacement = find_replacement (replacements, SET_SRC (x));
1461 if (replacement->new)
1462 {
1463 SET_SRC (x) = replacement->new;
1464 return;
1465 }
1466 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1467 SET_SRC (x) = replacement->new
1468 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1469 else
1470 SET_SRC (x) = replacement->new
1471 = fixup_stack_1 (SET_SRC (x), insn);
1472 return;
1473 }
1474
1475 if ((SET_DEST (x) == var
1476 || (GET_CODE (SET_DEST (x)) == SUBREG
1477 && SUBREG_REG (SET_DEST (x)) == var))
1478 && (GET_CODE (SET_SRC (x)) == REG
1479 || (GET_CODE (SET_SRC (x)) == SUBREG
1480 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1481 && recog_memoized (insn) >= 0)
1482 {
1483 if (GET_CODE (SET_DEST (x)) == SUBREG)
1484 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1485 else
1486 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1487 return;
1488 }
1489
1490 /* Otherwise, storing into VAR must be handled specially
1491 by storing into a temporary and copying that into VAR
1492 with a new insn after this one. Note that this case
1493 will be used when storing into a promoted scalar since
1494 the insn will now have different modes on the input
1495 and output and hence will be invalid (except for the case
1496 of setting it to a constant, which does not need any
1497 change if it is valid). We generate extra code in that case,
1498 but combine.c will eliminate it. */
1499
1500 if (dest == var)
1501 {
1502 rtx temp;
1503 rtx fixeddest = SET_DEST (x);
1504
1505 /* STRICT_LOW_PART can be discarded, around a MEM. */
1506 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1507 fixeddest = XEXP (fixeddest, 0);
1508 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1509 if (GET_CODE (fixeddest) == SUBREG)
1510 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1511 else
1512 fixeddest = fixup_stack_1 (fixeddest, insn);
1513
1514 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1515 ? GET_MODE (fixeddest)
1516 : GET_MODE (SET_SRC (x)));
1517
1518 emit_insn_after (gen_move_insn (fixeddest,
1519 gen_lowpart (GET_MODE (fixeddest),
1520 temp)),
1521 insn);
1522
1523 SET_DEST (x) = temp;
1524 }
1525 }
1526 }
1527
1528 /* Nothing special about this RTX; fix its operands. */
1529
1530 fmt = GET_RTX_FORMAT (code);
1531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1532 {
1533 if (fmt[i] == 'e')
1534 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1535 if (fmt[i] == 'E')
1536 {
1537 register int j;
1538 for (j = 0; j < XVECLEN (x, i); j++)
1539 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1540 insn, replacements);
1541 }
1542 }
1543 }
1544 \f
1545 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1546 return an rtx (MEM:m1 newaddr) which is equivalent.
1547 If any insns must be emitted to compute NEWADDR, put them before INSN.
1548
1549 UNCRITICAL nonzero means accept paradoxical subregs.
1550 This is used for subregs found inside of ZERO_EXTRACTs. */
1551
1552 static rtx
1553 fixup_memory_subreg (x, insn, uncritical)
1554 rtx x;
1555 rtx insn;
1556 int uncritical;
1557 {
1558 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1559 rtx addr = XEXP (SUBREG_REG (x), 0);
1560 enum machine_mode mode = GET_MODE (x);
1561 rtx saved, result;
1562
1563 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1564 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1565 && ! uncritical)
1566 abort ();
1567
1568 #if BYTES_BIG_ENDIAN
1569 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1570 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1571 #endif
1572 addr = plus_constant (addr, offset);
1573 if (!flag_force_addr && memory_address_p (mode, addr))
1574 /* Shortcut if no insns need be emitted. */
1575 return change_address (SUBREG_REG (x), mode, addr);
1576 start_sequence ();
1577 result = change_address (SUBREG_REG (x), mode, addr);
1578 emit_insn_before (gen_sequence (), insn);
1579 end_sequence ();
1580 return result;
1581 }
1582
1583 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1584 Replace subexpressions of X in place.
1585 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1586 Otherwise return X, with its contents possibly altered.
1587
1588 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1589
1590 static rtx
1591 walk_fixup_memory_subreg (x, insn)
1592 register rtx x;
1593 rtx insn;
1594 {
1595 register enum rtx_code code;
1596 register char *fmt;
1597 register int i;
1598
1599 if (x == 0)
1600 return 0;
1601
1602 code = GET_CODE (x);
1603
1604 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1605 return fixup_memory_subreg (x, insn, 0);
1606
1607 /* Nothing special about this RTX; fix its operands. */
1608
1609 fmt = GET_RTX_FORMAT (code);
1610 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1611 {
1612 if (fmt[i] == 'e')
1613 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1614 if (fmt[i] == 'E')
1615 {
1616 register int j;
1617 for (j = 0; j < XVECLEN (x, i); j++)
1618 XVECEXP (x, i, j)
1619 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1620 }
1621 }
1622 return x;
1623 }
1624 \f
1625 #if 0
1626 /* Fix up any references to stack slots that are invalid memory addresses
1627 because they exceed the maximum range of a displacement. */
1628
1629 void
1630 fixup_stack_slots ()
1631 {
1632 register rtx insn;
1633
1634 /* Did we generate a stack slot that is out of range
1635 or otherwise has an invalid address? */
1636 if (invalid_stack_slot)
1637 {
1638 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1639 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1640 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1641 || GET_CODE (insn) == JUMP_INSN)
1642 fixup_stack_1 (PATTERN (insn), insn);
1643 }
1644 }
1645 #endif
1646
1647 /* For each memory ref within X, if it refers to a stack slot
1648 with an out of range displacement, put the address in a temp register
1649 (emitting new insns before INSN to load these registers)
1650 and alter the memory ref to use that register.
1651 Replace each such MEM rtx with a copy, to avoid clobberage. */
1652
1653 static rtx
1654 fixup_stack_1 (x, insn)
1655 rtx x;
1656 rtx insn;
1657 {
1658 register int i;
1659 register RTX_CODE code = GET_CODE (x);
1660 register char *fmt;
1661
1662 if (code == MEM)
1663 {
1664 register rtx ad = XEXP (x, 0);
1665 /* If we have address of a stack slot but it's not valid
1666 (displacement is too large), compute the sum in a register. */
1667 if (GET_CODE (ad) == PLUS
1668 && GET_CODE (XEXP (ad, 0)) == REG
1669 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1670 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1671 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1672 {
1673 rtx temp, seq;
1674 if (memory_address_p (GET_MODE (x), ad))
1675 return x;
1676
1677 start_sequence ();
1678 temp = copy_to_reg (ad);
1679 seq = gen_sequence ();
1680 end_sequence ();
1681 emit_insn_before (seq, insn);
1682 return change_address (x, VOIDmode, temp);
1683 }
1684 return x;
1685 }
1686
1687 fmt = GET_RTX_FORMAT (code);
1688 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1689 {
1690 if (fmt[i] == 'e')
1691 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1692 if (fmt[i] == 'E')
1693 {
1694 register int j;
1695 for (j = 0; j < XVECLEN (x, i); j++)
1696 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1697 }
1698 }
1699 return x;
1700 }
1701 \f
1702 /* Optimization: a bit-field instruction whose field
1703 happens to be a byte or halfword in memory
1704 can be changed to a move instruction.
1705
1706 We call here when INSN is an insn to examine or store into a bit-field.
1707 BODY is the SET-rtx to be altered.
1708
1709 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1710 (Currently this is called only from function.c, and EQUIV_MEM
1711 is always 0.) */
1712
1713 static void
1714 optimize_bit_field (body, insn, equiv_mem)
1715 rtx body;
1716 rtx insn;
1717 rtx *equiv_mem;
1718 {
1719 register rtx bitfield;
1720 int destflag;
1721 rtx seq = 0;
1722 enum machine_mode mode;
1723
1724 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1725 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1726 bitfield = SET_DEST (body), destflag = 1;
1727 else
1728 bitfield = SET_SRC (body), destflag = 0;
1729
1730 /* First check that the field being stored has constant size and position
1731 and is in fact a byte or halfword suitably aligned. */
1732
1733 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1734 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1735 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1736 != BLKmode)
1737 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1738 {
1739 register rtx memref = 0;
1740
1741 /* Now check that the containing word is memory, not a register,
1742 and that it is safe to change the machine mode. */
1743
1744 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1745 memref = XEXP (bitfield, 0);
1746 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1747 && equiv_mem != 0)
1748 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1749 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1750 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1751 memref = SUBREG_REG (XEXP (bitfield, 0));
1752 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1753 && equiv_mem != 0
1754 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1755 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1756
1757 if (memref
1758 && ! mode_dependent_address_p (XEXP (memref, 0))
1759 && ! MEM_VOLATILE_P (memref))
1760 {
1761 /* Now adjust the address, first for any subreg'ing
1762 that we are now getting rid of,
1763 and then for which byte of the word is wanted. */
1764
1765 register int offset = INTVAL (XEXP (bitfield, 2));
1766 /* Adjust OFFSET to count bits from low-address byte. */
1767 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1768 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1769 - offset - INTVAL (XEXP (bitfield, 1)));
1770 #endif
1771 /* Adjust OFFSET to count bytes from low-address byte. */
1772 offset /= BITS_PER_UNIT;
1773 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1774 {
1775 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1776 #if BYTES_BIG_ENDIAN
1777 offset -= (MIN (UNITS_PER_WORD,
1778 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1779 - MIN (UNITS_PER_WORD,
1780 GET_MODE_SIZE (GET_MODE (memref))));
1781 #endif
1782 }
1783
1784 memref = change_address (memref, mode,
1785 plus_constant (XEXP (memref, 0), offset));
1786
1787 /* Store this memory reference where
1788 we found the bit field reference. */
1789
1790 if (destflag)
1791 {
1792 validate_change (insn, &SET_DEST (body), memref, 1);
1793 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1794 {
1795 rtx src = SET_SRC (body);
1796 while (GET_CODE (src) == SUBREG
1797 && SUBREG_WORD (src) == 0)
1798 src = SUBREG_REG (src);
1799 if (GET_MODE (src) != GET_MODE (memref))
1800 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1801 validate_change (insn, &SET_SRC (body), src, 1);
1802 }
1803 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1804 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1805 /* This shouldn't happen because anything that didn't have
1806 one of these modes should have got converted explicitly
1807 and then referenced through a subreg.
1808 This is so because the original bit-field was
1809 handled by agg_mode and so its tree structure had
1810 the same mode that memref now has. */
1811 abort ();
1812 }
1813 else
1814 {
1815 rtx dest = SET_DEST (body);
1816
1817 while (GET_CODE (dest) == SUBREG
1818 && SUBREG_WORD (dest) == 0)
1819 dest = SUBREG_REG (dest);
1820
1821 validate_change (insn, &SET_DEST (body), dest, 1);
1822
1823 if (GET_MODE (dest) == GET_MODE (memref))
1824 validate_change (insn, &SET_SRC (body), memref, 1);
1825 else
1826 {
1827 /* Convert the mem ref to the destination mode. */
1828 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1829
1830 start_sequence ();
1831 convert_move (newreg, memref,
1832 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1833 seq = get_insns ();
1834 end_sequence ();
1835
1836 validate_change (insn, &SET_SRC (body), newreg, 1);
1837 }
1838 }
1839
1840 /* See if we can convert this extraction or insertion into
1841 a simple move insn. We might not be able to do so if this
1842 was, for example, part of a PARALLEL.
1843
1844 If we succeed, write out any needed conversions. If we fail,
1845 it is hard to guess why we failed, so don't do anything
1846 special; just let the optimization be suppressed. */
1847
1848 if (apply_change_group () && seq)
1849 emit_insns_before (seq, insn);
1850 }
1851 }
1852 }
1853 \f
1854 /* These routines are responsible for converting virtual register references
1855 to the actual hard register references once RTL generation is complete.
1856
1857 The following four variables are used for communication between the
1858 routines. They contain the offsets of the virtual registers from their
1859 respective hard registers. */
1860
1861 static int in_arg_offset;
1862 static int var_offset;
1863 static int dynamic_offset;
1864 static int out_arg_offset;
1865
1866 /* In most machines, the stack pointer register is equivalent to the bottom
1867 of the stack. */
1868
1869 #ifndef STACK_POINTER_OFFSET
1870 #define STACK_POINTER_OFFSET 0
1871 #endif
1872
1873 /* If not defined, pick an appropriate default for the offset of dynamically
1874 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1875 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1876
1877 #ifndef STACK_DYNAMIC_OFFSET
1878
1879 #ifdef ACCUMULATE_OUTGOING_ARGS
1880 /* The bottom of the stack points to the actual arguments. If
1881 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1882 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1883 stack space for register parameters is not pushed by the caller, but
1884 rather part of the fixed stack areas and hence not included in
1885 `current_function_outgoing_args_size'. Nevertheless, we must allow
1886 for it when allocating stack dynamic objects. */
1887
1888 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1889 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1890 (current_function_outgoing_args_size \
1891 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1892
1893 #else
1894 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1895 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1896 #endif
1897
1898 #else
1899 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1900 #endif
1901 #endif
1902
1903 /* Pass through the INSNS of function FNDECL and convert virtual register
1904 references to hard register references. */
1905
1906 void
1907 instantiate_virtual_regs (fndecl, insns)
1908 tree fndecl;
1909 rtx insns;
1910 {
1911 rtx insn;
1912
1913 /* Compute the offsets to use for this function. */
1914 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1915 var_offset = STARTING_FRAME_OFFSET;
1916 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1917 out_arg_offset = STACK_POINTER_OFFSET;
1918
1919 /* Scan all variables and parameters of this function. For each that is
1920 in memory, instantiate all virtual registers if the result is a valid
1921 address. If not, we do it later. That will handle most uses of virtual
1922 regs on many machines. */
1923 instantiate_decls (fndecl, 1);
1924
1925 /* Initialize recognition, indicating that volatile is OK. */
1926 init_recog ();
1927
1928 /* Scan through all the insns, instantiating every virtual register still
1929 present. */
1930 for (insn = insns; insn; insn = NEXT_INSN (insn))
1931 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1932 || GET_CODE (insn) == CALL_INSN)
1933 {
1934 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1935 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1936 }
1937
1938 /* Now instantiate the remaining register equivalences for debugging info.
1939 These will not be valid addresses. */
1940 instantiate_decls (fndecl, 0);
1941
1942 /* Indicate that, from now on, assign_stack_local should use
1943 frame_pointer_rtx. */
1944 virtuals_instantiated = 1;
1945 }
1946
1947 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1948 all virtual registers in their DECL_RTL's.
1949
1950 If VALID_ONLY, do this only if the resulting address is still valid.
1951 Otherwise, always do it. */
1952
1953 static void
1954 instantiate_decls (fndecl, valid_only)
1955 tree fndecl;
1956 int valid_only;
1957 {
1958 tree decl;
1959
1960 if (DECL_INLINE (fndecl))
1961 /* When compiling an inline function, the obstack used for
1962 rtl allocation is the maybepermanent_obstack. Calling
1963 `resume_temporary_allocation' switches us back to that
1964 obstack while we process this function's parameters. */
1965 resume_temporary_allocation ();
1966
1967 /* Process all parameters of the function. */
1968 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1969 {
1970 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
1971 valid_only);
1972 instantiate_decl (DECL_INCOMING_RTL (decl),
1973 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
1974 }
1975
1976 /* Now process all variables defined in the function or its subblocks. */
1977 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1978
1979 if (DECL_INLINE (fndecl))
1980 {
1981 /* Save all rtl allocated for this function by raising the
1982 high-water mark on the maybepermanent_obstack. */
1983 preserve_data ();
1984 /* All further rtl allocation is now done in the current_obstack. */
1985 rtl_in_current_obstack ();
1986 }
1987 }
1988
1989 /* Subroutine of instantiate_decls: Process all decls in the given
1990 BLOCK node and all its subblocks. */
1991
1992 static void
1993 instantiate_decls_1 (let, valid_only)
1994 tree let;
1995 int valid_only;
1996 {
1997 tree t;
1998
1999 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2000 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2001 valid_only);
2002
2003 /* Process all subblocks. */
2004 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2005 instantiate_decls_1 (t, valid_only);
2006 }
2007
2008 /* Subroutine of the preceeding procedures: Given RTL representing a
2009 decl and the size of the object, do any instantiation required.
2010
2011 If VALID_ONLY is non-zero, it means that the RTL should only be
2012 changed if the new address is valid. */
2013
2014 static void
2015 instantiate_decl (x, size, valid_only)
2016 rtx x;
2017 int size;
2018 int valid_only;
2019 {
2020 enum machine_mode mode;
2021 rtx addr;
2022
2023 /* If this is not a MEM, no need to do anything. Similarly if the
2024 address is a constant or a register that is not a virtual register. */
2025
2026 if (x == 0 || GET_CODE (x) != MEM)
2027 return;
2028
2029 addr = XEXP (x, 0);
2030 if (CONSTANT_P (addr)
2031 || (GET_CODE (addr) == REG
2032 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2033 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2034 return;
2035
2036 /* If we should only do this if the address is valid, copy the address.
2037 We need to do this so we can undo any changes that might make the
2038 address invalid. This copy is unfortunate, but probably can't be
2039 avoided. */
2040
2041 if (valid_only)
2042 addr = copy_rtx (addr);
2043
2044 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2045
2046 if (! valid_only)
2047 return;
2048
2049 /* Now verify that the resulting address is valid for every integer or
2050 floating-point mode up to and including SIZE bytes long. We do this
2051 since the object might be accessed in any mode and frame addresses
2052 are shared. */
2053
2054 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2055 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2056 mode = GET_MODE_WIDER_MODE (mode))
2057 if (! memory_address_p (mode, addr))
2058 return;
2059
2060 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2061 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2062 mode = GET_MODE_WIDER_MODE (mode))
2063 if (! memory_address_p (mode, addr))
2064 return;
2065
2066 /* Otherwise, put back the address, now that we have updated it and we
2067 know it is valid. */
2068
2069 XEXP (x, 0) = addr;
2070 }
2071 \f
2072 /* Given a pointer to a piece of rtx and an optional pointer to the
2073 containing object, instantiate any virtual registers present in it.
2074
2075 If EXTRA_INSNS, we always do the replacement and generate
2076 any extra insns before OBJECT. If it zero, we do nothing if replacement
2077 is not valid.
2078
2079 Return 1 if we either had nothing to do or if we were able to do the
2080 needed replacement. Return 0 otherwise; we only return zero if
2081 EXTRA_INSNS is zero.
2082
2083 We first try some simple transformations to avoid the creation of extra
2084 pseudos. */
2085
2086 static int
2087 instantiate_virtual_regs_1 (loc, object, extra_insns)
2088 rtx *loc;
2089 rtx object;
2090 int extra_insns;
2091 {
2092 rtx x;
2093 RTX_CODE code;
2094 rtx new = 0;
2095 int offset;
2096 rtx temp;
2097 rtx seq;
2098 int i, j;
2099 char *fmt;
2100
2101 /* Re-start here to avoid recursion in common cases. */
2102 restart:
2103
2104 x = *loc;
2105 if (x == 0)
2106 return 1;
2107
2108 code = GET_CODE (x);
2109
2110 /* Check for some special cases. */
2111 switch (code)
2112 {
2113 case CONST_INT:
2114 case CONST_DOUBLE:
2115 case CONST:
2116 case SYMBOL_REF:
2117 case CODE_LABEL:
2118 case PC:
2119 case CC0:
2120 case ASM_INPUT:
2121 case ADDR_VEC:
2122 case ADDR_DIFF_VEC:
2123 case RETURN:
2124 return 1;
2125
2126 case SET:
2127 /* We are allowed to set the virtual registers. This means that
2128 that the actual register should receive the source minus the
2129 appropriate offset. This is used, for example, in the handling
2130 of non-local gotos. */
2131 if (SET_DEST (x) == virtual_incoming_args_rtx)
2132 new = arg_pointer_rtx, offset = - in_arg_offset;
2133 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2134 new = frame_pointer_rtx, offset = - var_offset;
2135 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2136 new = stack_pointer_rtx, offset = - dynamic_offset;
2137 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2138 new = stack_pointer_rtx, offset = - out_arg_offset;
2139
2140 if (new)
2141 {
2142 /* The only valid sources here are PLUS or REG. Just do
2143 the simplest possible thing to handle them. */
2144 if (GET_CODE (SET_SRC (x)) != REG
2145 && GET_CODE (SET_SRC (x)) != PLUS)
2146 abort ();
2147
2148 start_sequence ();
2149 if (GET_CODE (SET_SRC (x)) != REG)
2150 temp = force_operand (SET_SRC (x), NULL_RTX);
2151 else
2152 temp = SET_SRC (x);
2153 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2154 seq = get_insns ();
2155 end_sequence ();
2156
2157 emit_insns_before (seq, object);
2158 SET_DEST (x) = new;
2159
2160 if (!validate_change (object, &SET_SRC (x), temp, 0)
2161 || ! extra_insns)
2162 abort ();
2163
2164 return 1;
2165 }
2166
2167 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2168 loc = &SET_SRC (x);
2169 goto restart;
2170
2171 case PLUS:
2172 /* Handle special case of virtual register plus constant. */
2173 if (CONSTANT_P (XEXP (x, 1)))
2174 {
2175 rtx old;
2176
2177 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2178 if (GET_CODE (XEXP (x, 0)) == PLUS)
2179 {
2180 rtx inner = XEXP (XEXP (x, 0), 0);
2181
2182 if (inner == virtual_incoming_args_rtx)
2183 new = arg_pointer_rtx, offset = in_arg_offset;
2184 else if (inner == virtual_stack_vars_rtx)
2185 new = frame_pointer_rtx, offset = var_offset;
2186 else if (inner == virtual_stack_dynamic_rtx)
2187 new = stack_pointer_rtx, offset = dynamic_offset;
2188 else if (inner == virtual_outgoing_args_rtx)
2189 new = stack_pointer_rtx, offset = out_arg_offset;
2190 else
2191 {
2192 loc = &XEXP (x, 0);
2193 goto restart;
2194 }
2195
2196 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2197 extra_insns);
2198 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2199 }
2200
2201 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2202 new = arg_pointer_rtx, offset = in_arg_offset;
2203 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2204 new = frame_pointer_rtx, offset = var_offset;
2205 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2206 new = stack_pointer_rtx, offset = dynamic_offset;
2207 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2208 new = stack_pointer_rtx, offset = out_arg_offset;
2209 else
2210 {
2211 /* We know the second operand is a constant. Unless the
2212 first operand is a REG (which has been already checked),
2213 it needs to be checked. */
2214 if (GET_CODE (XEXP (x, 0)) != REG)
2215 {
2216 loc = &XEXP (x, 0);
2217 goto restart;
2218 }
2219 return 1;
2220 }
2221
2222 old = XEXP (x, 0);
2223 XEXP (x, 0) = new;
2224 new = plus_constant (XEXP (x, 1), offset);
2225
2226 /* If the new constant is zero, try to replace the sum with its
2227 first operand. */
2228 if (new == const0_rtx
2229 && validate_change (object, loc, XEXP (x, 0), 0))
2230 return 1;
2231
2232 /* Next try to replace constant with new one. */
2233 if (!validate_change (object, &XEXP (x, 1), new, 0))
2234 {
2235 if (! extra_insns)
2236 {
2237 XEXP (x, 0) = old;
2238 return 0;
2239 }
2240
2241 /* Otherwise copy the new constant into a register and replace
2242 constant with that register. */
2243 temp = gen_reg_rtx (Pmode);
2244 if (validate_change (object, &XEXP (x, 1), temp, 0))
2245 emit_insn_before (gen_move_insn (temp, new), object);
2246 else
2247 {
2248 /* If that didn't work, replace this expression with a
2249 register containing the sum. */
2250
2251 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2252 XEXP (x, 0) = old;
2253
2254 start_sequence ();
2255 temp = force_operand (new, NULL_RTX);
2256 seq = get_insns ();
2257 end_sequence ();
2258
2259 emit_insns_before (seq, object);
2260 if (! validate_change (object, loc, temp, 0)
2261 && ! validate_replace_rtx (x, temp, object))
2262 abort ();
2263 }
2264 }
2265
2266 return 1;
2267 }
2268
2269 /* Fall through to generic two-operand expression case. */
2270 case EXPR_LIST:
2271 case CALL:
2272 case COMPARE:
2273 case MINUS:
2274 case MULT:
2275 case DIV: case UDIV:
2276 case MOD: case UMOD:
2277 case AND: case IOR: case XOR:
2278 case LSHIFT: case ASHIFT: case ROTATE:
2279 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2280 case NE: case EQ:
2281 case GE: case GT: case GEU: case GTU:
2282 case LE: case LT: case LEU: case LTU:
2283 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2284 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2285 loc = &XEXP (x, 0);
2286 goto restart;
2287
2288 case MEM:
2289 /* Most cases of MEM that convert to valid addresses have already been
2290 handled by our scan of regno_reg_rtx. The only special handling we
2291 need here is to make a copy of the rtx to ensure it isn't being
2292 shared if we have to change it to a pseudo.
2293
2294 If the rtx is a simple reference to an address via a virtual register,
2295 it can potentially be shared. In such cases, first try to make it
2296 a valid address, which can also be shared. Otherwise, copy it and
2297 proceed normally.
2298
2299 First check for common cases that need no processing. These are
2300 usually due to instantiation already being done on a previous instance
2301 of a shared rtx. */
2302
2303 temp = XEXP (x, 0);
2304 if (CONSTANT_ADDRESS_P (temp)
2305 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2306 || temp == arg_pointer_rtx
2307 #endif
2308 || temp == frame_pointer_rtx)
2309 return 1;
2310
2311 if (GET_CODE (temp) == PLUS
2312 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2313 && (XEXP (temp, 0) == frame_pointer_rtx
2314 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2315 || XEXP (temp, 0) == arg_pointer_rtx
2316 #endif
2317 ))
2318 return 1;
2319
2320 if (temp == virtual_stack_vars_rtx
2321 || temp == virtual_incoming_args_rtx
2322 || (GET_CODE (temp) == PLUS
2323 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2324 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2325 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2326 {
2327 /* This MEM may be shared. If the substitution can be done without
2328 the need to generate new pseudos, we want to do it in place
2329 so all copies of the shared rtx benefit. The call below will
2330 only make substitutions if the resulting address is still
2331 valid.
2332
2333 Note that we cannot pass X as the object in the recursive call
2334 since the insn being processed may not allow all valid
2335 addresses. However, if we were not passed on object, we can
2336 only modify X without copying it if X will have a valid
2337 address.
2338
2339 ??? Also note that this can still lose if OBJECT is an insn that
2340 has less restrictions on an address that some other insn.
2341 In that case, we will modify the shared address. This case
2342 doesn't seem very likely, though. */
2343
2344 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2345 object ? object : x, 0))
2346 return 1;
2347
2348 /* Otherwise make a copy and process that copy. We copy the entire
2349 RTL expression since it might be a PLUS which could also be
2350 shared. */
2351 *loc = x = copy_rtx (x);
2352 }
2353
2354 /* Fall through to generic unary operation case. */
2355 case USE:
2356 case CLOBBER:
2357 case SUBREG:
2358 case STRICT_LOW_PART:
2359 case NEG: case NOT:
2360 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2361 case SIGN_EXTEND: case ZERO_EXTEND:
2362 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2363 case FLOAT: case FIX:
2364 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2365 case ABS:
2366 case SQRT:
2367 case FFS:
2368 /* These case either have just one operand or we know that we need not
2369 check the rest of the operands. */
2370 loc = &XEXP (x, 0);
2371 goto restart;
2372
2373 case REG:
2374 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2375 in front of this insn and substitute the temporary. */
2376 if (x == virtual_incoming_args_rtx)
2377 new = arg_pointer_rtx, offset = in_arg_offset;
2378 else if (x == virtual_stack_vars_rtx)
2379 new = frame_pointer_rtx, offset = var_offset;
2380 else if (x == virtual_stack_dynamic_rtx)
2381 new = stack_pointer_rtx, offset = dynamic_offset;
2382 else if (x == virtual_outgoing_args_rtx)
2383 new = stack_pointer_rtx, offset = out_arg_offset;
2384
2385 if (new)
2386 {
2387 temp = plus_constant (new, offset);
2388 if (!validate_change (object, loc, temp, 0))
2389 {
2390 if (! extra_insns)
2391 return 0;
2392
2393 start_sequence ();
2394 temp = force_operand (temp, NULL_RTX);
2395 seq = get_insns ();
2396 end_sequence ();
2397
2398 emit_insns_before (seq, object);
2399 if (! validate_change (object, loc, temp, 0)
2400 && ! validate_replace_rtx (x, temp, object))
2401 abort ();
2402 }
2403 }
2404
2405 return 1;
2406 }
2407
2408 /* Scan all subexpressions. */
2409 fmt = GET_RTX_FORMAT (code);
2410 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2411 if (*fmt == 'e')
2412 {
2413 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2414 return 0;
2415 }
2416 else if (*fmt == 'E')
2417 for (j = 0; j < XVECLEN (x, i); j++)
2418 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2419 extra_insns))
2420 return 0;
2421
2422 return 1;
2423 }
2424 \f
2425 /* Optimization: assuming this function does not receive nonlocal gotos,
2426 delete the handlers for such, as well as the insns to establish
2427 and disestablish them. */
2428
2429 static void
2430 delete_handlers ()
2431 {
2432 rtx insn;
2433 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2434 {
2435 /* Delete the handler by turning off the flag that would
2436 prevent jump_optimize from deleting it.
2437 Also permit deletion of the nonlocal labels themselves
2438 if nothing local refers to them. */
2439 if (GET_CODE (insn) == CODE_LABEL)
2440 LABEL_PRESERVE_P (insn) = 0;
2441 if (GET_CODE (insn) == INSN
2442 && ((nonlocal_goto_handler_slot != 0
2443 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2444 || (nonlocal_goto_stack_level != 0
2445 && reg_mentioned_p (nonlocal_goto_stack_level,
2446 PATTERN (insn)))))
2447 delete_insn (insn);
2448 }
2449 }
2450
2451 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2452 of the current function. */
2453
2454 rtx
2455 nonlocal_label_rtx_list ()
2456 {
2457 tree t;
2458 rtx x = 0;
2459
2460 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2461 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2462
2463 return x;
2464 }
2465 \f
2466 /* Output a USE for any register use in RTL.
2467 This is used with -noreg to mark the extent of lifespan
2468 of any registers used in a user-visible variable's DECL_RTL. */
2469
2470 void
2471 use_variable (rtl)
2472 rtx rtl;
2473 {
2474 if (GET_CODE (rtl) == REG)
2475 /* This is a register variable. */
2476 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2477 else if (GET_CODE (rtl) == MEM
2478 && GET_CODE (XEXP (rtl, 0)) == REG
2479 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2480 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2481 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2482 /* This is a variable-sized structure. */
2483 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2484 }
2485
2486 /* Like use_variable except that it outputs the USEs after INSN
2487 instead of at the end of the insn-chain. */
2488
2489 void
2490 use_variable_after (rtl, insn)
2491 rtx rtl, insn;
2492 {
2493 if (GET_CODE (rtl) == REG)
2494 /* This is a register variable. */
2495 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2496 else if (GET_CODE (rtl) == MEM
2497 && GET_CODE (XEXP (rtl, 0)) == REG
2498 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2499 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2500 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2501 /* This is a variable-sized structure. */
2502 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2503 }
2504 \f
2505 int
2506 max_parm_reg_num ()
2507 {
2508 return max_parm_reg;
2509 }
2510
2511 /* Return the first insn following those generated by `assign_parms'. */
2512
2513 rtx
2514 get_first_nonparm_insn ()
2515 {
2516 if (last_parm_insn)
2517 return NEXT_INSN (last_parm_insn);
2518 return get_insns ();
2519 }
2520
2521 /* Return 1 if EXP returns an aggregate value, for which an address
2522 must be passed to the function or returned by the function. */
2523
2524 int
2525 aggregate_value_p (exp)
2526 tree exp;
2527 {
2528 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2529 return 1;
2530 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2531 return 1;
2532 if (flag_pcc_struct_return
2533 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2534 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2535 return 1;
2536 return 0;
2537 }
2538 \f
2539 /* Assign RTL expressions to the function's parameters.
2540 This may involve copying them into registers and using
2541 those registers as the RTL for them.
2542
2543 If SECOND_TIME is non-zero it means that this function is being
2544 called a second time. This is done by integrate.c when a function's
2545 compilation is deferred. We need to come back here in case the
2546 FUNCTION_ARG macro computes items needed for the rest of the compilation
2547 (such as changing which registers are fixed or caller-saved). But suppress
2548 writing any insns or setting DECL_RTL of anything in this case. */
2549
2550 void
2551 assign_parms (fndecl, second_time)
2552 tree fndecl;
2553 int second_time;
2554 {
2555 register tree parm;
2556 register rtx entry_parm = 0;
2557 register rtx stack_parm = 0;
2558 CUMULATIVE_ARGS args_so_far;
2559 enum machine_mode passed_mode, nominal_mode;
2560 int unsignedp;
2561 /* Total space needed so far for args on the stack,
2562 given as a constant and a tree-expression. */
2563 struct args_size stack_args_size;
2564 tree fntype = TREE_TYPE (fndecl);
2565 tree fnargs = DECL_ARGUMENTS (fndecl);
2566 /* This is used for the arg pointer when referring to stack args. */
2567 rtx internal_arg_pointer;
2568 /* This is a dummy PARM_DECL that we used for the function result if
2569 the function returns a structure. */
2570 tree function_result_decl = 0;
2571 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2572 int varargs_setup = 0;
2573
2574 /* Nonzero if the last arg is named `__builtin_va_alist',
2575 which is used on some machines for old-fashioned non-ANSI varargs.h;
2576 this should be stuck onto the stack as if it had arrived there. */
2577 int vararg
2578 = (fnargs
2579 && (parm = tree_last (fnargs)) != 0
2580 && DECL_NAME (parm)
2581 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2582 "__builtin_va_alist")));
2583
2584 /* Nonzero if function takes extra anonymous args.
2585 This means the last named arg must be on the stack
2586 right before the anonymous ones. */
2587 int stdarg
2588 = (TYPE_ARG_TYPES (fntype) != 0
2589 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2590 != void_type_node));
2591
2592 /* If the reg that the virtual arg pointer will be translated into is
2593 not a fixed reg or is the stack pointer, make a copy of the virtual
2594 arg pointer, and address parms via the copy. The frame pointer is
2595 considered fixed even though it is not marked as such.
2596
2597 The second time through, simply use ap to avoid generating rtx. */
2598
2599 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2600 || ! (fixed_regs[ARG_POINTER_REGNUM]
2601 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2602 && ! second_time)
2603 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2604 else
2605 internal_arg_pointer = virtual_incoming_args_rtx;
2606 current_function_internal_arg_pointer = internal_arg_pointer;
2607
2608 stack_args_size.constant = 0;
2609 stack_args_size.var = 0;
2610
2611 /* If struct value address is treated as the first argument, make it so. */
2612 if (aggregate_value_p (DECL_RESULT (fndecl))
2613 && ! current_function_returns_pcc_struct
2614 && struct_value_incoming_rtx == 0)
2615 {
2616 tree type = build_pointer_type (fntype);
2617
2618 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2619
2620 DECL_ARG_TYPE (function_result_decl) = type;
2621 TREE_CHAIN (function_result_decl) = fnargs;
2622 fnargs = function_result_decl;
2623 }
2624
2625 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2626 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2627
2628 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2629 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
2630 #else
2631 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
2632 #endif
2633
2634 /* We haven't yet found an argument that we must push and pretend the
2635 caller did. */
2636 current_function_pretend_args_size = 0;
2637
2638 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2639 {
2640 int aggregate
2641 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2642 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2643 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2644 struct args_size stack_offset;
2645 struct args_size arg_size;
2646 int passed_pointer = 0;
2647 tree passed_type = DECL_ARG_TYPE (parm);
2648
2649 /* Set LAST_NAMED if this is last named arg before some
2650 anonymous args. We treat it as if it were anonymous too. */
2651 int last_named = ((TREE_CHAIN (parm) == 0
2652 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2653 && (vararg || stdarg));
2654
2655 if (TREE_TYPE (parm) == error_mark_node
2656 /* This can happen after weird syntax errors
2657 or if an enum type is defined among the parms. */
2658 || TREE_CODE (parm) != PARM_DECL
2659 || passed_type == NULL)
2660 {
2661 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2662 TREE_USED (parm) = 1;
2663 continue;
2664 }
2665
2666 /* For varargs.h function, save info about regs and stack space
2667 used by the individual args, not including the va_alist arg. */
2668 if (vararg && last_named)
2669 current_function_args_info = args_so_far;
2670
2671 /* Find mode of arg as it is passed, and mode of arg
2672 as it should be during execution of this function. */
2673 passed_mode = TYPE_MODE (passed_type);
2674 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2675
2676 /* If the parm's mode is VOID, its value doesn't matter,
2677 and avoid the usual things like emit_move_insn that could crash. */
2678 if (nominal_mode == VOIDmode)
2679 {
2680 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2681 continue;
2682 }
2683
2684 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2685 /* See if this arg was passed by invisible reference. */
2686 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2687 passed_type, ! last_named))
2688 {
2689 passed_type = build_pointer_type (passed_type);
2690 passed_pointer = 1;
2691 passed_mode = nominal_mode = Pmode;
2692 }
2693 #endif
2694
2695 /* Let machine desc say which reg (if any) the parm arrives in.
2696 0 means it arrives on the stack. */
2697 #ifdef FUNCTION_INCOMING_ARG
2698 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2699 passed_type, ! last_named);
2700 #else
2701 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2702 passed_type, ! last_named);
2703 #endif
2704
2705 #ifdef SETUP_INCOMING_VARARGS
2706 /* If this is the last named parameter, do any required setup for
2707 varargs or stdargs. We need to know about the case of this being an
2708 addressable type, in which case we skip the registers it
2709 would have arrived in.
2710
2711 For stdargs, LAST_NAMED will be set for two parameters, the one that
2712 is actually the last named, and the dummy parameter. We only
2713 want to do this action once.
2714
2715 Also, indicate when RTL generation is to be suppressed. */
2716 if (last_named && !varargs_setup)
2717 {
2718 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2719 current_function_pretend_args_size,
2720 second_time);
2721 varargs_setup = 1;
2722 }
2723 #endif
2724
2725 /* Determine parm's home in the stack,
2726 in case it arrives in the stack or we should pretend it did.
2727
2728 Compute the stack position and rtx where the argument arrives
2729 and its size.
2730
2731 There is one complexity here: If this was a parameter that would
2732 have been passed in registers, but wasn't only because it is
2733 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2734 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2735 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2736 0 as it was the previous time. */
2737
2738 locate_and_pad_parm (passed_mode, passed_type,
2739 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2740 1,
2741 #else
2742 #ifdef FUNCTION_INCOMING_ARG
2743 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2744 passed_type,
2745 (! last_named
2746 || varargs_setup)) != 0,
2747 #else
2748 FUNCTION_ARG (args_so_far, passed_mode,
2749 passed_type,
2750 ! last_named || varargs_setup) != 0,
2751 #endif
2752 #endif
2753 fndecl, &stack_args_size, &stack_offset, &arg_size);
2754
2755 if (! second_time)
2756 {
2757 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2758
2759 if (offset_rtx == const0_rtx)
2760 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2761 else
2762 stack_parm = gen_rtx (MEM, passed_mode,
2763 gen_rtx (PLUS, Pmode,
2764 internal_arg_pointer, offset_rtx));
2765
2766 /* If this is a memory ref that contains aggregate components,
2767 mark it as such for cse and loop optimize. */
2768 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2769 }
2770
2771 /* If this parameter was passed both in registers and in the stack,
2772 use the copy on the stack. */
2773 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2774 entry_parm = 0;
2775
2776 /* If this parm was passed part in regs and part in memory,
2777 pretend it arrived entirely in memory
2778 by pushing the register-part onto the stack.
2779
2780 In the special case of a DImode or DFmode that is split,
2781 we could put it together in a pseudoreg directly,
2782 but for now that's not worth bothering with. */
2783
2784 if (entry_parm)
2785 {
2786 int nregs = 0;
2787 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2788 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2789 passed_type, ! last_named);
2790 #endif
2791
2792 if (nregs > 0)
2793 {
2794 current_function_pretend_args_size
2795 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2796 / (PARM_BOUNDARY / BITS_PER_UNIT)
2797 * (PARM_BOUNDARY / BITS_PER_UNIT));
2798
2799 if (! second_time)
2800 move_block_from_reg (REGNO (entry_parm),
2801 validize_mem (stack_parm), nregs);
2802 entry_parm = stack_parm;
2803 }
2804 }
2805
2806 /* If we didn't decide this parm came in a register,
2807 by default it came on the stack. */
2808 if (entry_parm == 0)
2809 entry_parm = stack_parm;
2810
2811 /* Record permanently how this parm was passed. */
2812 if (! second_time)
2813 DECL_INCOMING_RTL (parm) = entry_parm;
2814
2815 /* If there is actually space on the stack for this parm,
2816 count it in stack_args_size; otherwise set stack_parm to 0
2817 to indicate there is no preallocated stack slot for the parm. */
2818
2819 if (entry_parm == stack_parm
2820 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
2821 /* On some machines, even if a parm value arrives in a register
2822 there is still an (uninitialized) stack slot allocated for it.
2823
2824 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2825 whether this parameter already has a stack slot allocated,
2826 because an arg block exists only if current_function_args_size
2827 is larger than some threshhold, and we haven't calculated that
2828 yet. So, for now, we just assume that stack slots never exist
2829 in this case. */
2830 || REG_PARM_STACK_SPACE (fndecl) > 0
2831 #endif
2832 )
2833 {
2834 stack_args_size.constant += arg_size.constant;
2835 if (arg_size.var)
2836 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2837 }
2838 else
2839 /* No stack slot was pushed for this parm. */
2840 stack_parm = 0;
2841
2842 /* Update info on where next arg arrives in registers. */
2843
2844 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2845 passed_type, ! last_named);
2846
2847 /* If this is our second time through, we are done with this parm. */
2848 if (second_time)
2849 continue;
2850
2851 /* If we can't trust the parm stack slot to be aligned enough
2852 for its ultimate type, don't use that slot after entry.
2853 We'll make another stack slot, if we need one. */
2854 {
2855 #ifdef FUNCTION_ARG_BOUNDARY
2856 int thisparm_boundary
2857 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2858 #else
2859 int thisparm_boundary = PARM_BOUNDARY;
2860 #endif
2861
2862 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2863 stack_parm = 0;
2864 }
2865
2866 /* Now adjust STACK_PARM to the mode and precise location
2867 where this parameter should live during execution,
2868 if we discover that it must live in the stack during execution.
2869 To make debuggers happier on big-endian machines, we store
2870 the value in the last bytes of the space available. */
2871
2872 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2873 && stack_parm != 0)
2874 {
2875 rtx offset_rtx;
2876
2877 #if BYTES_BIG_ENDIAN
2878 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2879 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2880 - GET_MODE_SIZE (nominal_mode));
2881 #endif
2882
2883 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2884 if (offset_rtx == const0_rtx)
2885 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2886 else
2887 stack_parm = gen_rtx (MEM, nominal_mode,
2888 gen_rtx (PLUS, Pmode,
2889 internal_arg_pointer, offset_rtx));
2890
2891 /* If this is a memory ref that contains aggregate components,
2892 mark it as such for cse and loop optimize. */
2893 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2894 }
2895
2896 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2897 in the mode in which it arrives.
2898 STACK_PARM is an RTX for a stack slot where the parameter can live
2899 during the function (in case we want to put it there).
2900 STACK_PARM is 0 if no stack slot was pushed for it.
2901
2902 Now output code if necessary to convert ENTRY_PARM to
2903 the type in which this function declares it,
2904 and store that result in an appropriate place,
2905 which may be a pseudo reg, may be STACK_PARM,
2906 or may be a local stack slot if STACK_PARM is 0.
2907
2908 Set DECL_RTL to that place. */
2909
2910 if (nominal_mode == BLKmode)
2911 {
2912 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2913 if (GET_CODE (entry_parm) == REG)
2914 {
2915 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2916 UNITS_PER_WORD);
2917
2918 /* Note that we will be storing an integral number of words.
2919 So we have to be careful to ensure that we allocate an
2920 integral number of words. We do this below in the
2921 assign_stack_local if space was not allocated in the argument
2922 list. If it was, this will not work if PARM_BOUNDARY is not
2923 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2924 if it becomes a problem. */
2925
2926 if (stack_parm == 0)
2927 {
2928 stack_parm
2929 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2930 /* If this is a memory ref that contains aggregate components,
2931 mark it as such for cse and loop optimize. */
2932 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2933 }
2934
2935 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2936 abort ();
2937
2938 move_block_from_reg (REGNO (entry_parm),
2939 validize_mem (stack_parm),
2940 size_stored / UNITS_PER_WORD);
2941 }
2942 DECL_RTL (parm) = stack_parm;
2943 }
2944 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
2945 && ! DECL_INLINE (fndecl))
2946 /* layout_decl may set this. */
2947 || TREE_ADDRESSABLE (parm)
2948 || TREE_SIDE_EFFECTS (parm)
2949 /* If -ffloat-store specified, don't put explicit
2950 float variables into registers. */
2951 || (flag_float_store
2952 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2953 /* Always assign pseudo to structure return or item passed
2954 by invisible reference. */
2955 || passed_pointer || parm == function_result_decl)
2956 {
2957 /* Store the parm in a pseudoregister during the function, but we
2958 may need to do it in a wider mode. */
2959
2960 register rtx parmreg;
2961
2962 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
2963 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
2964 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
2965 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
2966 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
2967 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
2968 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
2969 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
2970 {
2971 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
2972 }
2973
2974 parmreg = gen_reg_rtx (nominal_mode);
2975 REG_USERVAR_P (parmreg) = 1;
2976
2977 /* If this was an item that we received a pointer to, set DECL_RTL
2978 appropriately. */
2979 if (passed_pointer)
2980 {
2981 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2982 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2983 }
2984 else
2985 DECL_RTL (parm) = parmreg;
2986
2987 /* Copy the value into the register. */
2988 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
2989 {
2990 /* If ENTRY_PARM is a hard register, it might be in a register
2991 not valid for operating in its mode (e.g., an odd-numbered
2992 register for a DFmode). In that case, moves are the only
2993 thing valid, so we can't do a convert from there. This
2994 occurs when the calling sequence allow such misaligned
2995 usages. */
2996 if (GET_CODE (entry_parm) == REG
2997 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2998 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2999 GET_MODE (entry_parm)))
3000 convert_move (parmreg, copy_to_reg (entry_parm), unsignedp);
3001 else
3002 convert_move (parmreg, validize_mem (entry_parm), unsignedp);
3003 }
3004 else
3005 emit_move_insn (parmreg, validize_mem (entry_parm));
3006
3007 /* If we were passed a pointer but the actual value
3008 can safely live in a register, put it in one. */
3009 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3010 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3011 && ! DECL_INLINE (fndecl))
3012 /* layout_decl may set this. */
3013 || TREE_ADDRESSABLE (parm)
3014 || TREE_SIDE_EFFECTS (parm)
3015 /* If -ffloat-store specified, don't put explicit
3016 float variables into registers. */
3017 || (flag_float_store
3018 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3019 {
3020 /* We can't use nominal_mode, because it will have been set to
3021 Pmode above. We must use the actual mode of the parm. */
3022 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3023 emit_move_insn (parmreg, DECL_RTL (parm));
3024 DECL_RTL (parm) = parmreg;
3025 }
3026
3027 /* In any case, record the parm's desired stack location
3028 in case we later discover it must live in the stack. */
3029 if (REGNO (parmreg) >= nparmregs)
3030 {
3031 rtx *new;
3032 nparmregs = REGNO (parmreg) + 5;
3033 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3034 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3035 parm_reg_stack_loc = new;
3036 }
3037 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3038
3039 /* Mark the register as eliminable if we did no conversion
3040 and it was copied from memory at a fixed offset,
3041 and the arg pointer was not copied to a pseudo-reg.
3042 If the arg pointer is a pseudo reg or the offset formed
3043 an invalid address, such memory-equivalences
3044 as we make here would screw up life analysis for it. */
3045 if (nominal_mode == passed_mode
3046 && GET_CODE (entry_parm) == MEM
3047 && entry_parm == stack_parm
3048 && stack_offset.var == 0
3049 && reg_mentioned_p (virtual_incoming_args_rtx,
3050 XEXP (entry_parm, 0)))
3051 REG_NOTES (get_last_insn ())
3052 = gen_rtx (EXPR_LIST, REG_EQUIV,
3053 entry_parm, REG_NOTES (get_last_insn ()));
3054
3055 /* For pointer data type, suggest pointer register. */
3056 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3057 mark_reg_pointer (parmreg);
3058 }
3059 else
3060 {
3061 /* Value must be stored in the stack slot STACK_PARM
3062 during function execution. */
3063
3064 if (passed_mode != nominal_mode)
3065 {
3066 /* Conversion is required. */
3067 if (GET_CODE (entry_parm) == REG
3068 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3069 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
3070 entry_parm = copy_to_reg (entry_parm);
3071
3072 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
3073 }
3074
3075 if (entry_parm != stack_parm)
3076 {
3077 if (stack_parm == 0)
3078 {
3079 stack_parm
3080 = assign_stack_local (GET_MODE (entry_parm),
3081 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3082 /* If this is a memory ref that contains aggregate components,
3083 mark it as such for cse and loop optimize. */
3084 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3085 }
3086
3087 emit_move_insn (validize_mem (stack_parm),
3088 validize_mem (entry_parm));
3089 }
3090
3091 DECL_RTL (parm) = stack_parm;
3092 }
3093
3094 /* If this "parameter" was the place where we are receiving the
3095 function's incoming structure pointer, set up the result. */
3096 if (parm == function_result_decl)
3097 DECL_RTL (DECL_RESULT (fndecl))
3098 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3099
3100 if (TREE_THIS_VOLATILE (parm))
3101 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3102 if (TREE_READONLY (parm))
3103 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3104 }
3105
3106 max_parm_reg = max_reg_num ();
3107 last_parm_insn = get_last_insn ();
3108
3109 current_function_args_size = stack_args_size.constant;
3110
3111 /* Adjust function incoming argument size for alignment and
3112 minimum length. */
3113
3114 #ifdef REG_PARM_STACK_SPACE
3115 #ifndef MAYBE_REG_PARM_STACK_SPACE
3116 current_function_args_size = MAX (current_function_args_size,
3117 REG_PARM_STACK_SPACE (fndecl));
3118 #endif
3119 #endif
3120
3121 #ifdef STACK_BOUNDARY
3122 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3123
3124 current_function_args_size
3125 = ((current_function_args_size + STACK_BYTES - 1)
3126 / STACK_BYTES) * STACK_BYTES;
3127 #endif
3128
3129 #ifdef ARGS_GROW_DOWNWARD
3130 current_function_arg_offset_rtx
3131 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3132 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3133 size_int (-stack_args_size.constant)),
3134 NULL_RTX, VOIDmode, 0));
3135 #else
3136 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3137 #endif
3138
3139 /* See how many bytes, if any, of its args a function should try to pop
3140 on return. */
3141
3142 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3143 current_function_args_size);
3144
3145 /* For stdarg.h function, save info about regs and stack space
3146 used by the named args. */
3147
3148 if (stdarg)
3149 current_function_args_info = args_so_far;
3150
3151 /* Set the rtx used for the function return value. Put this in its
3152 own variable so any optimizers that need this information don't have
3153 to include tree.h. Do this here so it gets done when an inlined
3154 function gets output. */
3155
3156 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3157 }
3158 \f
3159 /* Compute the size and offset from the start of the stacked arguments for a
3160 parm passed in mode PASSED_MODE and with type TYPE.
3161
3162 INITIAL_OFFSET_PTR points to the current offset into the stacked
3163 arguments.
3164
3165 The starting offset and size for this parm are returned in *OFFSET_PTR
3166 and *ARG_SIZE_PTR, respectively.
3167
3168 IN_REGS is non-zero if the argument will be passed in registers. It will
3169 never be set if REG_PARM_STACK_SPACE is not defined.
3170
3171 FNDECL is the function in which the argument was defined.
3172
3173 There are two types of rounding that are done. The first, controlled by
3174 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3175 list to be aligned to the specific boundary (in bits). This rounding
3176 affects the initial and starting offsets, but not the argument size.
3177
3178 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3179 optionally rounds the size of the parm to PARM_BOUNDARY. The
3180 initial offset is not affected by this rounding, while the size always
3181 is and the starting offset may be. */
3182
3183 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3184 initial_offset_ptr is positive because locate_and_pad_parm's
3185 callers pass in the total size of args so far as
3186 initial_offset_ptr. arg_size_ptr is always positive.*/
3187
3188 static void pad_to_arg_alignment (), pad_below ();
3189
3190 void
3191 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3192 initial_offset_ptr, offset_ptr, arg_size_ptr)
3193 enum machine_mode passed_mode;
3194 tree type;
3195 int in_regs;
3196 tree fndecl;
3197 struct args_size *initial_offset_ptr;
3198 struct args_size *offset_ptr;
3199 struct args_size *arg_size_ptr;
3200 {
3201 tree sizetree
3202 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3203 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3204 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3205 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3206 int reg_parm_stack_space = 0;
3207
3208 #ifdef REG_PARM_STACK_SPACE
3209 /* If we have found a stack parm before we reach the end of the
3210 area reserved for registers, skip that area. */
3211 if (! in_regs)
3212 {
3213 #ifdef MAYBE_REG_PARM_STACK_SPACE
3214 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3215 #else
3216 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3217 #endif
3218 if (reg_parm_stack_space > 0)
3219 {
3220 if (initial_offset_ptr->var)
3221 {
3222 initial_offset_ptr->var
3223 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3224 size_int (reg_parm_stack_space));
3225 initial_offset_ptr->constant = 0;
3226 }
3227 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3228 initial_offset_ptr->constant = reg_parm_stack_space;
3229 }
3230 }
3231 #endif /* REG_PARM_STACK_SPACE */
3232
3233 arg_size_ptr->var = 0;
3234 arg_size_ptr->constant = 0;
3235
3236 #ifdef ARGS_GROW_DOWNWARD
3237 if (initial_offset_ptr->var)
3238 {
3239 offset_ptr->constant = 0;
3240 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3241 initial_offset_ptr->var);
3242 }
3243 else
3244 {
3245 offset_ptr->constant = - initial_offset_ptr->constant;
3246 offset_ptr->var = 0;
3247 }
3248 if (where_pad == upward
3249 && (TREE_CODE (sizetree) != INTEGER_CST
3250 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3251 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3252 SUB_PARM_SIZE (*offset_ptr, sizetree);
3253 pad_to_arg_alignment (offset_ptr, boundary);
3254 if (initial_offset_ptr->var)
3255 {
3256 arg_size_ptr->var = size_binop (MINUS_EXPR,
3257 size_binop (MINUS_EXPR,
3258 integer_zero_node,
3259 initial_offset_ptr->var),
3260 offset_ptr->var);
3261 }
3262 else
3263 {
3264 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3265 offset_ptr->constant);
3266 }
3267 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3268 if (where_pad == downward)
3269 pad_below (arg_size_ptr, passed_mode, sizetree);
3270 #else /* !ARGS_GROW_DOWNWARD */
3271 pad_to_arg_alignment (initial_offset_ptr, boundary);
3272 *offset_ptr = *initial_offset_ptr;
3273 if (where_pad == downward)
3274 pad_below (offset_ptr, passed_mode, sizetree);
3275
3276 #ifdef PUSH_ROUNDING
3277 if (passed_mode != BLKmode)
3278 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3279 #endif
3280
3281 if (where_pad != none
3282 && (TREE_CODE (sizetree) != INTEGER_CST
3283 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3284 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3285
3286 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3287 #endif /* ARGS_GROW_DOWNWARD */
3288 }
3289
3290 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3291 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3292
3293 static void
3294 pad_to_arg_alignment (offset_ptr, boundary)
3295 struct args_size *offset_ptr;
3296 int boundary;
3297 {
3298 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3299
3300 if (boundary > BITS_PER_UNIT)
3301 {
3302 if (offset_ptr->var)
3303 {
3304 offset_ptr->var =
3305 #ifdef ARGS_GROW_DOWNWARD
3306 round_down
3307 #else
3308 round_up
3309 #endif
3310 (ARGS_SIZE_TREE (*offset_ptr),
3311 boundary / BITS_PER_UNIT);
3312 offset_ptr->constant = 0; /*?*/
3313 }
3314 else
3315 offset_ptr->constant =
3316 #ifdef ARGS_GROW_DOWNWARD
3317 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3318 #else
3319 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3320 #endif
3321 }
3322 }
3323
3324 static void
3325 pad_below (offset_ptr, passed_mode, sizetree)
3326 struct args_size *offset_ptr;
3327 enum machine_mode passed_mode;
3328 tree sizetree;
3329 {
3330 if (passed_mode != BLKmode)
3331 {
3332 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3333 offset_ptr->constant
3334 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3335 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3336 - GET_MODE_SIZE (passed_mode));
3337 }
3338 else
3339 {
3340 if (TREE_CODE (sizetree) != INTEGER_CST
3341 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3342 {
3343 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3344 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3345 /* Add it in. */
3346 ADD_PARM_SIZE (*offset_ptr, s2);
3347 SUB_PARM_SIZE (*offset_ptr, sizetree);
3348 }
3349 }
3350 }
3351
3352 static tree
3353 round_down (value, divisor)
3354 tree value;
3355 int divisor;
3356 {
3357 return size_binop (MULT_EXPR,
3358 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3359 size_int (divisor));
3360 }
3361 \f
3362 /* Walk the tree of blocks describing the binding levels within a function
3363 and warn about uninitialized variables.
3364 This is done after calling flow_analysis and before global_alloc
3365 clobbers the pseudo-regs to hard regs. */
3366
3367 void
3368 uninitialized_vars_warning (block)
3369 tree block;
3370 {
3371 register tree decl, sub;
3372 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3373 {
3374 if (TREE_CODE (decl) == VAR_DECL
3375 /* These warnings are unreliable for and aggregates
3376 because assigning the fields one by one can fail to convince
3377 flow.c that the entire aggregate was initialized.
3378 Unions are troublesome because members may be shorter. */
3379 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3380 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3381 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3382 && DECL_RTL (decl) != 0
3383 && GET_CODE (DECL_RTL (decl)) == REG
3384 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3385 warning_with_decl (decl,
3386 "`%s' may be used uninitialized in this function");
3387 if (TREE_CODE (decl) == VAR_DECL
3388 && DECL_RTL (decl) != 0
3389 && GET_CODE (DECL_RTL (decl)) == REG
3390 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3391 warning_with_decl (decl,
3392 "variable `%s' may be clobbered by `longjmp'");
3393 }
3394 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3395 uninitialized_vars_warning (sub);
3396 }
3397
3398 /* Do the appropriate part of uninitialized_vars_warning
3399 but for arguments instead of local variables. */
3400
3401 void
3402 setjmp_args_warning (block)
3403 tree block;
3404 {
3405 register tree decl;
3406 for (decl = DECL_ARGUMENTS (current_function_decl);
3407 decl; decl = TREE_CHAIN (decl))
3408 if (DECL_RTL (decl) != 0
3409 && GET_CODE (DECL_RTL (decl)) == REG
3410 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3411 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3412 }
3413
3414 /* If this function call setjmp, put all vars into the stack
3415 unless they were declared `register'. */
3416
3417 void
3418 setjmp_protect (block)
3419 tree block;
3420 {
3421 register tree decl, sub;
3422 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3423 if ((TREE_CODE (decl) == VAR_DECL
3424 || TREE_CODE (decl) == PARM_DECL)
3425 && DECL_RTL (decl) != 0
3426 && GET_CODE (DECL_RTL (decl)) == REG
3427 /* If this variable came from an inline function, it must be
3428 that it's life doesn't overlap the setjmp. If there was a
3429 setjmp in the function, it would already be in memory. We
3430 must exclude such variable because their DECL_RTL might be
3431 set to strange things such as virtual_stack_vars_rtx. */
3432 && ! DECL_FROM_INLINE (decl)
3433 && (
3434 #ifdef NON_SAVING_SETJMP
3435 /* If longjmp doesn't restore the registers,
3436 don't put anything in them. */
3437 NON_SAVING_SETJMP
3438 ||
3439 #endif
3440 ! DECL_REGISTER (decl)))
3441 put_var_into_stack (decl);
3442 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3443 setjmp_protect (sub);
3444 }
3445 \f
3446 /* Like the previous function, but for args instead of local variables. */
3447
3448 void
3449 setjmp_protect_args ()
3450 {
3451 register tree decl, sub;
3452 for (decl = DECL_ARGUMENTS (current_function_decl);
3453 decl; decl = TREE_CHAIN (decl))
3454 if ((TREE_CODE (decl) == VAR_DECL
3455 || TREE_CODE (decl) == PARM_DECL)
3456 && DECL_RTL (decl) != 0
3457 && GET_CODE (DECL_RTL (decl)) == REG
3458 && (
3459 /* If longjmp doesn't restore the registers,
3460 don't put anything in them. */
3461 #ifdef NON_SAVING_SETJMP
3462 NON_SAVING_SETJMP
3463 ||
3464 #endif
3465 ! DECL_REGISTER (decl)))
3466 put_var_into_stack (decl);
3467 }
3468 \f
3469 /* Return the context-pointer register corresponding to DECL,
3470 or 0 if it does not need one. */
3471
3472 rtx
3473 lookup_static_chain (decl)
3474 tree decl;
3475 {
3476 tree context = decl_function_context (decl);
3477 tree link;
3478
3479 if (context == 0)
3480 return 0;
3481
3482 /* We treat inline_function_decl as an alias for the current function
3483 because that is the inline function whose vars, types, etc.
3484 are being merged into the current function.
3485 See expand_inline_function. */
3486 if (context == current_function_decl || context == inline_function_decl)
3487 return virtual_stack_vars_rtx;
3488
3489 for (link = context_display; link; link = TREE_CHAIN (link))
3490 if (TREE_PURPOSE (link) == context)
3491 return RTL_EXPR_RTL (TREE_VALUE (link));
3492
3493 abort ();
3494 }
3495 \f
3496 /* Convert a stack slot address ADDR for variable VAR
3497 (from a containing function)
3498 into an address valid in this function (using a static chain). */
3499
3500 rtx
3501 fix_lexical_addr (addr, var)
3502 rtx addr;
3503 tree var;
3504 {
3505 rtx basereg;
3506 int displacement;
3507 tree context = decl_function_context (var);
3508 struct function *fp;
3509 rtx base = 0;
3510
3511 /* If this is the present function, we need not do anything. */
3512 if (context == current_function_decl || context == inline_function_decl)
3513 return addr;
3514
3515 for (fp = outer_function_chain; fp; fp = fp->next)
3516 if (fp->decl == context)
3517 break;
3518
3519 if (fp == 0)
3520 abort ();
3521
3522 /* Decode given address as base reg plus displacement. */
3523 if (GET_CODE (addr) == REG)
3524 basereg = addr, displacement = 0;
3525 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3526 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3527 else
3528 abort ();
3529
3530 /* We accept vars reached via the containing function's
3531 incoming arg pointer and via its stack variables pointer. */
3532 if (basereg == fp->internal_arg_pointer)
3533 {
3534 /* If reached via arg pointer, get the arg pointer value
3535 out of that function's stack frame.
3536
3537 There are two cases: If a separate ap is needed, allocate a
3538 slot in the outer function for it and dereference it that way.
3539 This is correct even if the real ap is actually a pseudo.
3540 Otherwise, just adjust the offset from the frame pointer to
3541 compensate. */
3542
3543 #ifdef NEED_SEPARATE_AP
3544 rtx addr;
3545
3546 if (fp->arg_pointer_save_area == 0)
3547 fp->arg_pointer_save_area
3548 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3549
3550 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3551 addr = memory_address (Pmode, addr);
3552
3553 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3554 #else
3555 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3556 base = lookup_static_chain (var);
3557 #endif
3558 }
3559
3560 else if (basereg == virtual_stack_vars_rtx)
3561 {
3562 /* This is the same code as lookup_static_chain, duplicated here to
3563 avoid an extra call to decl_function_context. */
3564 tree link;
3565
3566 for (link = context_display; link; link = TREE_CHAIN (link))
3567 if (TREE_PURPOSE (link) == context)
3568 {
3569 base = RTL_EXPR_RTL (TREE_VALUE (link));
3570 break;
3571 }
3572 }
3573
3574 if (base == 0)
3575 abort ();
3576
3577 /* Use same offset, relative to appropriate static chain or argument
3578 pointer. */
3579 return plus_constant (base, displacement);
3580 }
3581 \f
3582 /* Return the address of the trampoline for entering nested fn FUNCTION.
3583 If necessary, allocate a trampoline (in the stack frame)
3584 and emit rtl to initialize its contents (at entry to this function). */
3585
3586 rtx
3587 trampoline_address (function)
3588 tree function;
3589 {
3590 tree link;
3591 tree rtlexp;
3592 rtx tramp;
3593 struct function *fp;
3594 tree fn_context;
3595
3596 /* Find an existing trampoline and return it. */
3597 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3598 if (TREE_PURPOSE (link) == function)
3599 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3600 for (fp = outer_function_chain; fp; fp = fp->next)
3601 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3602 if (TREE_PURPOSE (link) == function)
3603 {
3604 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3605 function);
3606 return round_trampoline_addr (tramp);
3607 }
3608
3609 /* None exists; we must make one. */
3610
3611 /* Find the `struct function' for the function containing FUNCTION. */
3612 fp = 0;
3613 fn_context = decl_function_context (function);
3614 if (fn_context != current_function_decl)
3615 for (fp = outer_function_chain; fp; fp = fp->next)
3616 if (fp->decl == fn_context)
3617 break;
3618
3619 /* Allocate run-time space for this trampoline
3620 (usually in the defining function's stack frame). */
3621 #ifdef ALLOCATE_TRAMPOLINE
3622 tramp = ALLOCATE_TRAMPOLINE (fp);
3623 #else
3624 /* If rounding needed, allocate extra space
3625 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3626 #ifdef TRAMPOLINE_ALIGNMENT
3627 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3628 #else
3629 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3630 #endif
3631 if (fp != 0)
3632 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3633 else
3634 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3635 #endif
3636
3637 /* Record the trampoline for reuse and note it for later initialization
3638 by expand_function_end. */
3639 if (fp != 0)
3640 {
3641 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3642 rtlexp = make_node (RTL_EXPR);
3643 RTL_EXPR_RTL (rtlexp) = tramp;
3644 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3645 pop_obstacks ();
3646 }
3647 else
3648 {
3649 /* Make the RTL_EXPR node temporary, not momentary, so that the
3650 trampoline_list doesn't become garbage. */
3651 int momentary = suspend_momentary ();
3652 rtlexp = make_node (RTL_EXPR);
3653 resume_momentary (momentary);
3654
3655 RTL_EXPR_RTL (rtlexp) = tramp;
3656 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3657 }
3658
3659 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3660 return round_trampoline_addr (tramp);
3661 }
3662
3663 /* Given a trampoline address,
3664 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3665
3666 static rtx
3667 round_trampoline_addr (tramp)
3668 rtx tramp;
3669 {
3670 #ifdef TRAMPOLINE_ALIGNMENT
3671 /* Round address up to desired boundary. */
3672 rtx temp = gen_reg_rtx (Pmode);
3673 temp = expand_binop (Pmode, add_optab, tramp,
3674 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
3675 temp, 0, OPTAB_LIB_WIDEN);
3676 tramp = expand_binop (Pmode, and_optab, temp,
3677 GEN_INT (- TRAMPOLINE_ALIGNMENT),
3678 temp, 0, OPTAB_LIB_WIDEN);
3679 #endif
3680 return tramp;
3681 }
3682 \f
3683 /* The functions identify_blocks and reorder_blocks provide a way to
3684 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3685 duplicate portions of the RTL code. Call identify_blocks before
3686 changing the RTL, and call reorder_blocks after. */
3687
3688 static int all_blocks ();
3689 static tree blocks_nreverse ();
3690
3691 /* Put all this function's BLOCK nodes into a vector, and return it.
3692 Also store in each NOTE for the beginning or end of a block
3693 the index of that block in the vector.
3694 The arguments are TOP_BLOCK, the top-level block of the function,
3695 and INSNS, the insn chain of the function. */
3696
3697 tree *
3698 identify_blocks (top_block, insns)
3699 tree top_block;
3700 rtx insns;
3701 {
3702 int n_blocks;
3703 tree *block_vector;
3704 int *block_stack;
3705 int depth = 0;
3706 int next_block_number = 0;
3707 int current_block_number = 0;
3708 rtx insn;
3709
3710 if (top_block == 0)
3711 return 0;
3712
3713 n_blocks = all_blocks (top_block, 0);
3714 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3715 block_stack = (int *) alloca (n_blocks * sizeof (int));
3716
3717 all_blocks (top_block, block_vector);
3718
3719 for (insn = insns; insn; insn = NEXT_INSN (insn))
3720 if (GET_CODE (insn) == NOTE)
3721 {
3722 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3723 {
3724 block_stack[depth++] = current_block_number;
3725 current_block_number = next_block_number;
3726 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
3727 }
3728 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3729 {
3730 current_block_number = block_stack[--depth];
3731 NOTE_BLOCK_NUMBER (insn) = current_block_number;
3732 }
3733 }
3734
3735 return block_vector;
3736 }
3737
3738 /* Given BLOCK_VECTOR which was returned by identify_blocks,
3739 and a revised instruction chain, rebuild the tree structure
3740 of BLOCK nodes to correspond to the new order of RTL.
3741 The new block tree is inserted below TOP_BLOCK.
3742 Returns the current top-level block. */
3743
3744 tree
3745 reorder_blocks (block_vector, top_block, insns)
3746 tree *block_vector;
3747 tree top_block;
3748 rtx insns;
3749 {
3750 tree current_block = top_block;
3751 rtx insn;
3752
3753 if (block_vector == 0)
3754 return top_block;
3755
3756 /* Prune the old tree away, so that it doesn't get in the way. */
3757 BLOCK_SUBBLOCKS (current_block) = 0;
3758
3759 for (insn = insns; insn; insn = NEXT_INSN (insn))
3760 if (GET_CODE (insn) == NOTE)
3761 {
3762 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3763 {
3764 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3765 /* If we have seen this block before, copy it. */
3766 if (TREE_ASM_WRITTEN (block))
3767 block = copy_node (block);
3768 BLOCK_SUBBLOCKS (block) = 0;
3769 TREE_ASM_WRITTEN (block) = 1;
3770 BLOCK_SUPERCONTEXT (block) = current_block;
3771 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3772 BLOCK_SUBBLOCKS (current_block) = block;
3773 current_block = block;
3774 NOTE_SOURCE_FILE (insn) = 0;
3775 }
3776 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3777 {
3778 BLOCK_SUBBLOCKS (current_block)
3779 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3780 current_block = BLOCK_SUPERCONTEXT (current_block);
3781 NOTE_SOURCE_FILE (insn) = 0;
3782 }
3783 }
3784
3785 return current_block;
3786 }
3787
3788 /* Reverse the order of elements in the chain T of blocks,
3789 and return the new head of the chain (old last element). */
3790
3791 static tree
3792 blocks_nreverse (t)
3793 tree t;
3794 {
3795 register tree prev = 0, decl, next;
3796 for (decl = t; decl; decl = next)
3797 {
3798 next = BLOCK_CHAIN (decl);
3799 BLOCK_CHAIN (decl) = prev;
3800 prev = decl;
3801 }
3802 return prev;
3803 }
3804
3805 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3806 Also clear TREE_ASM_WRITTEN in all blocks. */
3807
3808 static int
3809 all_blocks (block, vector)
3810 tree block;
3811 tree *vector;
3812 {
3813 int n_blocks = 1;
3814 tree subblocks;
3815
3816 TREE_ASM_WRITTEN (block) = 0;
3817 /* Record this block. */
3818 if (vector)
3819 vector[0] = block;
3820
3821 /* Record the subblocks, and their subblocks. */
3822 for (subblocks = BLOCK_SUBBLOCKS (block);
3823 subblocks; subblocks = BLOCK_CHAIN (subblocks))
3824 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
3825
3826 return n_blocks;
3827 }
3828 \f
3829 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3830 and initialize static variables for generating RTL for the statements
3831 of the function. */
3832
3833 void
3834 init_function_start (subr, filename, line)
3835 tree subr;
3836 char *filename;
3837 int line;
3838 {
3839 char *junk;
3840
3841 init_stmt_for_function ();
3842
3843 cse_not_expected = ! optimize;
3844
3845 /* Caller save not needed yet. */
3846 caller_save_needed = 0;
3847
3848 /* No stack slots have been made yet. */
3849 stack_slot_list = 0;
3850
3851 /* There is no stack slot for handling nonlocal gotos. */
3852 nonlocal_goto_handler_slot = 0;
3853 nonlocal_goto_stack_level = 0;
3854
3855 /* No labels have been declared for nonlocal use. */
3856 nonlocal_labels = 0;
3857
3858 /* No function calls so far in this function. */
3859 function_call_count = 0;
3860
3861 /* No parm regs have been allocated.
3862 (This is important for output_inline_function.) */
3863 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3864
3865 /* Initialize the RTL mechanism. */
3866 init_emit ();
3867
3868 /* Initialize the queue of pending postincrement and postdecrements,
3869 and some other info in expr.c. */
3870 init_expr ();
3871
3872 /* We haven't done register allocation yet. */
3873 reg_renumber = 0;
3874
3875 init_const_rtx_hash_table ();
3876
3877 current_function_name = (*decl_printable_name) (subr, &junk);
3878
3879 /* Nonzero if this is a nested function that uses a static chain. */
3880
3881 current_function_needs_context
3882 = (decl_function_context (current_function_decl) != 0);
3883
3884 /* Set if a call to setjmp is seen. */
3885 current_function_calls_setjmp = 0;
3886
3887 /* Set if a call to longjmp is seen. */
3888 current_function_calls_longjmp = 0;
3889
3890 current_function_calls_alloca = 0;
3891 current_function_has_nonlocal_label = 0;
3892 current_function_contains_functions = 0;
3893
3894 current_function_returns_pcc_struct = 0;
3895 current_function_returns_struct = 0;
3896 current_function_epilogue_delay_list = 0;
3897 current_function_uses_const_pool = 0;
3898 current_function_uses_pic_offset_table = 0;
3899
3900 /* We have not yet needed to make a label to jump to for tail-recursion. */
3901 tail_recursion_label = 0;
3902
3903 /* We haven't had a need to make a save area for ap yet. */
3904
3905 arg_pointer_save_area = 0;
3906
3907 /* No stack slots allocated yet. */
3908 frame_offset = 0;
3909
3910 /* No SAVE_EXPRs in this function yet. */
3911 save_expr_regs = 0;
3912
3913 /* No RTL_EXPRs in this function yet. */
3914 rtl_expr_chain = 0;
3915
3916 /* We have not allocated any temporaries yet. */
3917 temp_slots = 0;
3918 temp_slot_level = 0;
3919
3920 /* Within function body, compute a type's size as soon it is laid out. */
3921 immediate_size_expand++;
3922
3923 init_pending_stack_adjust ();
3924 inhibit_defer_pop = 0;
3925
3926 current_function_outgoing_args_size = 0;
3927
3928 /* Initialize the insn lengths. */
3929 init_insn_lengths ();
3930
3931 /* Prevent ever trying to delete the first instruction of a function.
3932 Also tell final how to output a linenum before the function prologue. */
3933 emit_line_note (filename, line);
3934
3935 /* Make sure first insn is a note even if we don't want linenums.
3936 This makes sure the first insn will never be deleted.
3937 Also, final expects a note to appear there. */
3938 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3939
3940 /* Set flags used by final.c. */
3941 if (aggregate_value_p (DECL_RESULT (subr)))
3942 {
3943 #ifdef PCC_STATIC_STRUCT_RETURN
3944 if (flag_pcc_struct_return)
3945 current_function_returns_pcc_struct = 1;
3946 else
3947 #endif
3948 current_function_returns_struct = 1;
3949 }
3950
3951 /* Warn if this value is an aggregate type,
3952 regardless of which calling convention we are using for it. */
3953 if (warn_aggregate_return
3954 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3955 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3956 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3957 warning ("function returns an aggregate");
3958
3959 current_function_returns_pointer
3960 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3961
3962 /* Indicate that we need to distinguish between the return value of the
3963 present function and the return value of a function being called. */
3964 rtx_equal_function_value_matters = 1;
3965
3966 /* Indicate that we have not instantiated virtual registers yet. */
3967 virtuals_instantiated = 0;
3968
3969 /* Indicate we have no need of a frame pointer yet. */
3970 frame_pointer_needed = 0;
3971
3972 /* By default assume not varargs. */
3973 current_function_varargs = 0;
3974 }
3975
3976 /* Indicate that the current function uses extra args
3977 not explicitly mentioned in the argument list in any fashion. */
3978
3979 void
3980 mark_varargs ()
3981 {
3982 current_function_varargs = 1;
3983 }
3984
3985 /* Expand a call to __main at the beginning of a possible main function. */
3986
3987 void
3988 expand_main_function ()
3989 {
3990 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
3991 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3992 VOIDmode, 0);
3993 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
3994 }
3995 \f
3996 /* Start the RTL for a new function, and set variables used for
3997 emitting RTL.
3998 SUBR is the FUNCTION_DECL node.
3999 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4000 the function's parameters, which must be run at any return statement. */
4001
4002 void
4003 expand_function_start (subr, parms_have_cleanups)
4004 tree subr;
4005 int parms_have_cleanups;
4006 {
4007 register int i;
4008 tree tem;
4009 rtx last_ptr;
4010
4011 /* Make sure volatile mem refs aren't considered
4012 valid operands of arithmetic insns. */
4013 init_recog_no_volatile ();
4014
4015 /* If function gets a static chain arg, store it in the stack frame.
4016 Do this first, so it gets the first stack slot offset. */
4017 if (current_function_needs_context)
4018 {
4019 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4020 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4021 }
4022
4023 /* If the parameters of this function need cleaning up, get a label
4024 for the beginning of the code which executes those cleanups. This must
4025 be done before doing anything with return_label. */
4026 if (parms_have_cleanups)
4027 cleanup_label = gen_label_rtx ();
4028 else
4029 cleanup_label = 0;
4030
4031 /* Make the label for return statements to jump to, if this machine
4032 does not have a one-instruction return and uses an epilogue,
4033 or if it returns a structure, or if it has parm cleanups. */
4034 #ifdef HAVE_return
4035 if (cleanup_label == 0 && HAVE_return
4036 && ! current_function_returns_pcc_struct
4037 && ! (current_function_returns_struct && ! optimize))
4038 return_label = 0;
4039 else
4040 return_label = gen_label_rtx ();
4041 #else
4042 return_label = gen_label_rtx ();
4043 #endif
4044
4045 /* Initialize rtx used to return the value. */
4046 /* Do this before assign_parms so that we copy the struct value address
4047 before any library calls that assign parms might generate. */
4048
4049 /* Decide whether to return the value in memory or in a register. */
4050 if (aggregate_value_p (DECL_RESULT (subr)))
4051 {
4052 /* Returning something that won't go in a register. */
4053 register rtx value_address;
4054
4055 #ifdef PCC_STATIC_STRUCT_RETURN
4056 if (current_function_returns_pcc_struct)
4057 {
4058 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4059 value_address = assemble_static_space (size);
4060 }
4061 else
4062 #endif
4063 {
4064 /* Expect to be passed the address of a place to store the value.
4065 If it is passed as an argument, assign_parms will take care of
4066 it. */
4067 if (struct_value_incoming_rtx)
4068 {
4069 value_address = gen_reg_rtx (Pmode);
4070 emit_move_insn (value_address, struct_value_incoming_rtx);
4071 }
4072 }
4073 if (value_address)
4074 DECL_RTL (DECL_RESULT (subr))
4075 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4076 value_address);
4077 }
4078 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4079 /* If return mode is void, this decl rtl should not be used. */
4080 DECL_RTL (DECL_RESULT (subr)) = 0;
4081 else if (parms_have_cleanups)
4082 /* If function will end with cleanup code for parms,
4083 compute the return values into a pseudo reg,
4084 which we will copy into the true return register
4085 after the cleanups are done. */
4086 DECL_RTL (DECL_RESULT (subr))
4087 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
4088 else
4089 /* Scalar, returned in a register. */
4090 {
4091 #ifdef FUNCTION_OUTGOING_VALUE
4092 DECL_RTL (DECL_RESULT (subr))
4093 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4094 #else
4095 DECL_RTL (DECL_RESULT (subr))
4096 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4097 #endif
4098
4099 /* Mark this reg as the function's return value. */
4100 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4101 {
4102 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4103 /* Needed because we may need to move this to memory
4104 in case it's a named return value whose address is taken. */
4105 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4106 }
4107 }
4108
4109 /* Initialize rtx for parameters and local variables.
4110 In some cases this requires emitting insns. */
4111
4112 assign_parms (subr, 0);
4113
4114 /* The following was moved from init_function_start.
4115 The move is supposed to make sdb output more accurate. */
4116 /* Indicate the beginning of the function body,
4117 as opposed to parm setup. */
4118 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4119
4120 /* If doing stupid allocation, mark parms as born here. */
4121
4122 if (GET_CODE (get_last_insn ()) != NOTE)
4123 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4124 parm_birth_insn = get_last_insn ();
4125
4126 if (obey_regdecls)
4127 {
4128 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4129 use_variable (regno_reg_rtx[i]);
4130
4131 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4132 use_variable (current_function_internal_arg_pointer);
4133 }
4134
4135 /* Fetch static chain values for containing functions. */
4136 tem = decl_function_context (current_function_decl);
4137 /* If not doing stupid register allocation, then start off with the static
4138 chain pointer in a pseudo register. Otherwise, we use the stack
4139 address that was generated above. */
4140 if (tem && ! obey_regdecls)
4141 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4142 context_display = 0;
4143 while (tem)
4144 {
4145 tree rtlexp = make_node (RTL_EXPR);
4146
4147 RTL_EXPR_RTL (rtlexp) = last_ptr;
4148 context_display = tree_cons (tem, rtlexp, context_display);
4149 tem = decl_function_context (tem);
4150 if (tem == 0)
4151 break;
4152 /* Chain thru stack frames, assuming pointer to next lexical frame
4153 is found at the place we always store it. */
4154 #ifdef FRAME_GROWS_DOWNWARD
4155 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4156 #endif
4157 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4158 memory_address (Pmode, last_ptr)));
4159 }
4160
4161 /* After the display initializations is where the tail-recursion label
4162 should go, if we end up needing one. Ensure we have a NOTE here
4163 since some things (like trampolines) get placed before this. */
4164 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4165
4166 /* Evaluate now the sizes of any types declared among the arguments. */
4167 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4168 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
4169
4170 /* Make sure there is a line number after the function entry setup code. */
4171 force_next_line_note ();
4172 }
4173 \f
4174 /* Generate RTL for the end of the current function.
4175 FILENAME and LINE are the current position in the source file. */
4176
4177 /* It is up to language-specific callers to do cleanups for parameters. */
4178
4179 void
4180 expand_function_end (filename, line)
4181 char *filename;
4182 int line;
4183 {
4184 register int i;
4185 tree link;
4186
4187 static rtx initial_trampoline;
4188
4189 #ifdef NON_SAVING_SETJMP
4190 /* Don't put any variables in registers if we call setjmp
4191 on a machine that fails to restore the registers. */
4192 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4193 {
4194 setjmp_protect (DECL_INITIAL (current_function_decl));
4195 setjmp_protect_args ();
4196 }
4197 #endif
4198
4199 /* Save the argument pointer if a save area was made for it. */
4200 if (arg_pointer_save_area)
4201 {
4202 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4203 emit_insn_before (x, tail_recursion_reentry);
4204 }
4205
4206 /* Initialize any trampolines required by this function. */
4207 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4208 {
4209 tree function = TREE_PURPOSE (link);
4210 rtx context = lookup_static_chain (function);
4211 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4212 rtx seq;
4213
4214 /* First make sure this compilation has a template for
4215 initializing trampolines. */
4216 if (initial_trampoline == 0)
4217 {
4218 end_temporary_allocation ();
4219 initial_trampoline
4220 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4221 resume_temporary_allocation ();
4222 }
4223
4224 /* Generate insns to initialize the trampoline. */
4225 start_sequence ();
4226 tramp = change_address (initial_trampoline, BLKmode,
4227 round_trampoline_addr (XEXP (tramp, 0)));
4228 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4229 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4230 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4231 XEXP (DECL_RTL (function), 0), context);
4232 seq = get_insns ();
4233 end_sequence ();
4234
4235 /* Put those insns at entry to the containing function (this one). */
4236 emit_insns_before (seq, tail_recursion_reentry);
4237 }
4238 /* Clear the trampoline_list for the next function. */
4239 trampoline_list = 0;
4240
4241 #if 0 /* I think unused parms are legitimate enough. */
4242 /* Warn about unused parms. */
4243 if (warn_unused)
4244 {
4245 rtx decl;
4246
4247 for (decl = DECL_ARGUMENTS (current_function_decl);
4248 decl; decl = TREE_CHAIN (decl))
4249 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4250 warning_with_decl (decl, "unused parameter `%s'");
4251 }
4252 #endif
4253
4254 /* Delete handlers for nonlocal gotos if nothing uses them. */
4255 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4256 delete_handlers ();
4257
4258 /* End any sequences that failed to be closed due to syntax errors. */
4259 while (in_sequence_p ())
4260 end_sequence ();
4261
4262 /* Outside function body, can't compute type's actual size
4263 until next function's body starts. */
4264 immediate_size_expand--;
4265
4266 /* If doing stupid register allocation,
4267 mark register parms as dying here. */
4268
4269 if (obey_regdecls)
4270 {
4271 rtx tem;
4272 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4273 use_variable (regno_reg_rtx[i]);
4274
4275 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4276
4277 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4278 {
4279 use_variable (XEXP (tem, 0));
4280 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4281 }
4282
4283 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4284 use_variable (current_function_internal_arg_pointer);
4285 }
4286
4287 clear_pending_stack_adjust ();
4288 do_pending_stack_adjust ();
4289
4290 /* Mark the end of the function body.
4291 If control reaches this insn, the function can drop through
4292 without returning a value. */
4293 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4294
4295 /* Output a linenumber for the end of the function.
4296 SDB depends on this. */
4297 emit_line_note_force (filename, line);
4298
4299 /* Output the label for the actual return from the function,
4300 if one is expected. This happens either because a function epilogue
4301 is used instead of a return instruction, or because a return was done
4302 with a goto in order to run local cleanups, or because of pcc-style
4303 structure returning. */
4304
4305 if (return_label)
4306 emit_label (return_label);
4307
4308 /* If we had calls to alloca, and this machine needs
4309 an accurate stack pointer to exit the function,
4310 insert some code to save and restore the stack pointer. */
4311 #ifdef EXIT_IGNORE_STACK
4312 if (! EXIT_IGNORE_STACK)
4313 #endif
4314 if (current_function_calls_alloca)
4315 {
4316 rtx tem = 0;
4317
4318 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4319 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4320 }
4321
4322 /* If scalar return value was computed in a pseudo-reg,
4323 copy that to the hard return register. */
4324 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4325 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4326 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4327 >= FIRST_PSEUDO_REGISTER))
4328 {
4329 rtx real_decl_result;
4330
4331 #ifdef FUNCTION_OUTGOING_VALUE
4332 real_decl_result
4333 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4334 current_function_decl);
4335 #else
4336 real_decl_result
4337 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4338 current_function_decl);
4339 #endif
4340 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4341 emit_move_insn (real_decl_result,
4342 DECL_RTL (DECL_RESULT (current_function_decl)));
4343 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4344 }
4345
4346 /* If returning a structure, arrange to return the address of the value
4347 in a place where debuggers expect to find it.
4348
4349 If returning a structure PCC style,
4350 the caller also depends on this value.
4351 And current_function_returns_pcc_struct is not necessarily set. */
4352 if (current_function_returns_struct
4353 || current_function_returns_pcc_struct)
4354 {
4355 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4356 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4357 #ifdef FUNCTION_OUTGOING_VALUE
4358 rtx outgoing
4359 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4360 current_function_decl);
4361 #else
4362 rtx outgoing
4363 = FUNCTION_VALUE (build_pointer_type (type),
4364 current_function_decl);
4365 #endif
4366
4367 /* Mark this as a function return value so integrate will delete the
4368 assignment and USE below when inlining this function. */
4369 REG_FUNCTION_VALUE_P (outgoing) = 1;
4370
4371 emit_move_insn (outgoing, value_address);
4372 use_variable (outgoing);
4373 }
4374
4375 /* Output a return insn if we are using one.
4376 Otherwise, let the rtl chain end here, to drop through
4377 into the epilogue. */
4378
4379 #ifdef HAVE_return
4380 if (HAVE_return)
4381 {
4382 emit_jump_insn (gen_return ());
4383 emit_barrier ();
4384 }
4385 #endif
4386
4387 /* Fix up any gotos that jumped out to the outermost
4388 binding level of the function.
4389 Must follow emitting RETURN_LABEL. */
4390
4391 /* If you have any cleanups to do at this point,
4392 and they need to create temporary variables,
4393 then you will lose. */
4394 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
4395 }
4396 \f
4397 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4398
4399 static int *prologue;
4400 static int *epilogue;
4401
4402 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
4403 or a single insn). */
4404
4405 static int *
4406 record_insns (insns)
4407 rtx insns;
4408 {
4409 int *vec;
4410
4411 if (GET_CODE (insns) == SEQUENCE)
4412 {
4413 int len = XVECLEN (insns, 0);
4414 vec = (int *) oballoc ((len + 1) * sizeof (int));
4415 vec[len] = 0;
4416 while (--len >= 0)
4417 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4418 }
4419 else
4420 {
4421 vec = (int *) oballoc (2 * sizeof (int));
4422 vec[0] = INSN_UID (insns);
4423 vec[1] = 0;
4424 }
4425 return vec;
4426 }
4427
4428 /* Determine how many INSN_UIDs in VEC are part of INSN. */
4429
4430 static int
4431 contains (insn, vec)
4432 rtx insn;
4433 int *vec;
4434 {
4435 register int i, j;
4436
4437 if (GET_CODE (insn) == INSN
4438 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4439 {
4440 int count = 0;
4441 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4442 for (j = 0; vec[j]; j++)
4443 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
4444 count++;
4445 return count;
4446 }
4447 else
4448 {
4449 for (j = 0; vec[j]; j++)
4450 if (INSN_UID (insn) == vec[j])
4451 return 1;
4452 }
4453 return 0;
4454 }
4455
4456 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
4457 this into place with notes indicating where the prologue ends and where
4458 the epilogue begins. Update the basic block information when possible. */
4459
4460 void
4461 thread_prologue_and_epilogue_insns (f)
4462 rtx f;
4463 {
4464 #ifdef HAVE_prologue
4465 if (HAVE_prologue)
4466 {
4467 rtx head, seq, insn;
4468
4469 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4470 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4471 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4472 seq = gen_prologue ();
4473 head = emit_insn_after (seq, f);
4474
4475 /* Include the new prologue insns in the first block. Ignore them
4476 if they form a basic block unto themselves. */
4477 if (basic_block_head && n_basic_blocks
4478 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4479 basic_block_head[0] = NEXT_INSN (f);
4480
4481 /* Retain a map of the prologue insns. */
4482 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4483 }
4484 else
4485 #endif
4486 prologue = 0;
4487
4488 #ifdef HAVE_epilogue
4489 if (HAVE_epilogue)
4490 {
4491 rtx insn = get_last_insn ();
4492 rtx prev = prev_nonnote_insn (insn);
4493
4494 /* If we end with a BARRIER, we don't need an epilogue. */
4495 if (! (prev && GET_CODE (prev) == BARRIER))
4496 {
4497 rtx tail, seq;
4498
4499 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4500 the epilogue insns (this must include the jump insn that
4501 returns), USE insns ad the end of a function, and a BARRIER. */
4502
4503 emit_barrier_after (insn);
4504
4505 /* Place the epilogue before the USE insns at the end of a
4506 function. */
4507 while (prev
4508 && GET_CODE (prev) == INSN
4509 && GET_CODE (PATTERN (prev)) == USE)
4510 {
4511 insn = PREV_INSN (prev);
4512 prev = prev_nonnote_insn (prev);
4513 }
4514
4515 seq = gen_epilogue ();
4516 tail = emit_jump_insn_after (seq, insn);
4517 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4518
4519 /* Include the new epilogue insns in the last block. Ignore
4520 them if they form a basic block unto themselves. */
4521 if (basic_block_end && n_basic_blocks
4522 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4523 basic_block_end[n_basic_blocks - 1] = tail;
4524
4525 /* Retain a map of the epilogue insns. */
4526 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4527 return;
4528 }
4529 }
4530 #endif
4531 epilogue = 0;
4532 }
4533
4534 /* Reposition the prologue-end and epilogue-begin notes after instruction
4535 scheduling and delayed branch scheduling. */
4536
4537 void
4538 reposition_prologue_and_epilogue_notes (f)
4539 rtx f;
4540 {
4541 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4542 /* Reposition the prologue and epilogue notes. */
4543 if (n_basic_blocks)
4544 {
4545 rtx next, prev;
4546 int len;
4547
4548 if (prologue)
4549 {
4550 register rtx insn, note = 0;
4551
4552 /* Scan from the beginning until we reach the last prologue insn.
4553 We apparently can't depend on basic_block_{head,end} after
4554 reorg has run. */
4555 for (len = 0; prologue[len]; len++)
4556 ;
4557 for (insn = f; insn; insn = NEXT_INSN (insn))
4558 if (GET_CODE (insn) == NOTE)
4559 {
4560 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4561 note = insn;
4562 }
4563 else if ((len -= contains (insn, prologue)) == 0)
4564 {
4565 /* Find the prologue-end note if we haven't already, and
4566 move it to just after the last prologue insn. */
4567 if (note == 0)
4568 for (note = insn; note = NEXT_INSN (note);)
4569 if (GET_CODE (note) == NOTE
4570 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4571 break;
4572 next = NEXT_INSN (note);
4573 prev = PREV_INSN (note);
4574 if (prev)
4575 NEXT_INSN (prev) = next;
4576 if (next)
4577 PREV_INSN (next) = prev;
4578 add_insn_after (note, insn);
4579 break;
4580 }
4581 }
4582
4583 if (epilogue)
4584 {
4585 register rtx insn, note = 0;
4586
4587 /* Scan from the end until we reach the first epilogue insn.
4588 We apparently can't depend on basic_block_{head,end} after
4589 reorg has run. */
4590 for (len = 0; epilogue[len]; len++)
4591 ;
4592 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4593 if (GET_CODE (insn) == NOTE)
4594 {
4595 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4596 note = insn;
4597 }
4598 else if ((len -= contains (insn, epilogue)) == 0)
4599 {
4600 /* Find the epilogue-begin note if we haven't already, and
4601 move it to just before the first epilogue insn. */
4602 if (note == 0)
4603 for (note = insn; note = PREV_INSN (note);)
4604 if (GET_CODE (note) == NOTE
4605 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4606 break;
4607 next = NEXT_INSN (note);
4608 prev = PREV_INSN (note);
4609 if (prev)
4610 NEXT_INSN (prev) = next;
4611 if (next)
4612 PREV_INSN (next) = prev;
4613 add_insn_after (note, PREV_INSN (insn));
4614 break;
4615 }
4616 }
4617 }
4618 #endif /* HAVE_prologue or HAVE_epilogue */
4619 }
This page took 0.245124 seconds and 6 git commands to generate.