]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
*** empty log message ***
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc
RS
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
56
57/* Round a value to the lowest integer less than it that is a multiple of
58 the required alignment. Avoid using division in case the value is
59 negative. Assume the alignment is a power of two. */
60#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
61
62/* Similar, but round to the next highest integer that meets the
63 alignment. */
64#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
65
66/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
67 during rtl generation. If they are different register numbers, this is
68 always true. It may also be true if
69 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
70 generation. See fix_lexical_addr for details. */
71
72#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
73#define NEED_SEPARATE_AP
74#endif
75
76/* Number of bytes of args popped by function being compiled on its return.
77 Zero if no bytes are to be popped.
78 May affect compilation of return insn or of function epilogue. */
79
80int current_function_pops_args;
81
82/* Nonzero if function being compiled needs to be given an address
83 where the value should be stored. */
84
85int current_function_returns_struct;
86
87/* Nonzero if function being compiled needs to
88 return the address of where it has put a structure value. */
89
90int current_function_returns_pcc_struct;
91
92/* Nonzero if function being compiled needs to be passed a static chain. */
93
94int current_function_needs_context;
95
96/* Nonzero if function being compiled can call setjmp. */
97
98int current_function_calls_setjmp;
99
100/* Nonzero if function being compiled can call longjmp. */
101
102int current_function_calls_longjmp;
103
104/* Nonzero if function being compiled receives nonlocal gotos
105 from nested functions. */
106
107int current_function_has_nonlocal_label;
108
109/* Nonzero if function being compiled contains nested functions. */
110
111int current_function_contains_functions;
112
113/* Nonzero if function being compiled can call alloca,
114 either as a subroutine or builtin. */
115
116int current_function_calls_alloca;
117
118/* Nonzero if the current function returns a pointer type */
119
120int current_function_returns_pointer;
121
122/* If some insns can be deferred to the delay slots of the epilogue, the
123 delay list for them is recorded here. */
124
125rtx current_function_epilogue_delay_list;
126
127/* If function's args have a fixed size, this is that size, in bytes.
128 Otherwise, it is -1.
129 May affect compilation of return insn or of function epilogue. */
130
131int current_function_args_size;
132
133/* # bytes the prologue should push and pretend that the caller pushed them.
134 The prologue must do this, but only if parms can be passed in registers. */
135
136int current_function_pretend_args_size;
137
138/* # of bytes of outgoing arguments required to be pushed by the prologue.
139 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
140 and no stack adjusts will be done on function calls. */
141
142int current_function_outgoing_args_size;
143
144/* This is the offset from the arg pointer to the place where the first
145 anonymous arg can be found, if there is one. */
146
147rtx current_function_arg_offset_rtx;
148
149/* Nonzero if current function uses varargs.h or equivalent.
150 Zero for functions that use stdarg.h. */
151
152int current_function_varargs;
153
154/* Quantities of various kinds of registers
155 used for the current function's args. */
156
157CUMULATIVE_ARGS current_function_args_info;
158
159/* Name of function now being compiled. */
160
161char *current_function_name;
162
163/* If non-zero, an RTL expression for that location at which the current
164 function returns its result. Always equal to
165 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
166 independently of the tree structures. */
167
168rtx current_function_return_rtx;
169
170/* Nonzero if the current function uses the constant pool. */
171
172int current_function_uses_const_pool;
173
174/* Nonzero if the current function uses pic_offset_table_rtx. */
175int current_function_uses_pic_offset_table;
176
177/* The arg pointer hard register, or the pseudo into which it was copied. */
178rtx current_function_internal_arg_pointer;
179
180/* The FUNCTION_DECL for an inline function currently being expanded. */
181tree inline_function_decl;
182
183/* Number of function calls seen so far in current function. */
184
185int function_call_count;
186
187/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
188 (labels to which there can be nonlocal gotos from nested functions)
189 in this function. */
190
191tree nonlocal_labels;
192
193/* RTX for stack slot that holds the current handler for nonlocal gotos.
194 Zero when function does not have nonlocal labels. */
195
196rtx nonlocal_goto_handler_slot;
197
198/* RTX for stack slot that holds the stack pointer value to restore
199 for a nonlocal goto.
200 Zero when function does not have nonlocal labels. */
201
202rtx nonlocal_goto_stack_level;
203
204/* Label that will go on parm cleanup code, if any.
205 Jumping to this label runs cleanup code for parameters, if
206 such code must be run. Following this code is the logical return label. */
207
208rtx cleanup_label;
209
210/* Label that will go on function epilogue.
211 Jumping to this label serves as a "return" instruction
212 on machines which require execution of the epilogue on all returns. */
213
214rtx return_label;
215
216/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
217 So we can mark them all live at the end of the function, if nonopt. */
218rtx save_expr_regs;
219
220/* List (chain of EXPR_LISTs) of all stack slots in this function.
221 Made for the sake of unshare_all_rtl. */
222rtx stack_slot_list;
223
224/* Chain of all RTL_EXPRs that have insns in them. */
225tree rtl_expr_chain;
226
227/* Label to jump back to for tail recursion, or 0 if we have
228 not yet needed one for this function. */
229rtx tail_recursion_label;
230
231/* Place after which to insert the tail_recursion_label if we need one. */
232rtx tail_recursion_reentry;
233
234/* Location at which to save the argument pointer if it will need to be
235 referenced. There are two cases where this is done: if nonlocal gotos
236 exist, or if vars stored at an offset from the argument pointer will be
237 needed by inner routines. */
238
239rtx arg_pointer_save_area;
240
241/* Offset to end of allocated area of stack frame.
242 If stack grows down, this is the address of the last stack slot allocated.
243 If stack grows up, this is the address for the next slot. */
244int frame_offset;
245
246/* List (chain of TREE_LISTs) of static chains for containing functions.
247 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
248 in an RTL_EXPR in the TREE_VALUE. */
249static tree context_display;
250
251/* List (chain of TREE_LISTs) of trampolines for nested functions.
252 The trampoline sets up the static chain and jumps to the function.
253 We supply the trampoline's address when the function's address is requested.
254
255 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
256 in an RTL_EXPR in the TREE_VALUE. */
257static tree trampoline_list;
258
259/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
260static rtx parm_birth_insn;
261
262#if 0
263/* Nonzero if a stack slot has been generated whose address is not
264 actually valid. It means that the generated rtl must all be scanned
265 to detect and correct the invalid addresses where they occur. */
266static int invalid_stack_slot;
267#endif
268
269/* Last insn of those whose job was to put parms into their nominal homes. */
270static rtx last_parm_insn;
271
272/* 1 + last pseudo register number used for loading a copy
273 of a parameter of this function. */
274static int max_parm_reg;
275
276/* Vector indexed by REGNO, containing location on stack in which
277 to put the parm which is nominally in pseudo register REGNO,
278 if we discover that that parm must go in the stack. */
279static rtx *parm_reg_stack_loc;
280
281#if 0 /* Turned off because 0 seems to work just as well. */
282/* Cleanup lists are required for binding levels regardless of whether
283 that binding level has cleanups or not. This node serves as the
284 cleanup list whenever an empty list is required. */
285static tree empty_cleanup_list;
286#endif
287
288/* Nonzero once virtual register instantiation has been done.
289 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
290static int virtuals_instantiated;
291
292/* Nonzero if we need to distinguish between the return value of this function
293 and the return value of a function called by this function. This helps
294 integrate.c */
295
296extern int rtx_equal_function_value_matters;
297
298void fixup_gotos ();
299
300static tree round_down ();
301static rtx round_trampoline_addr ();
302static rtx fixup_stack_1 ();
303static void fixup_var_refs ();
304static void fixup_var_refs_insns ();
305static void fixup_var_refs_1 ();
306static void optimize_bit_field ();
307static void instantiate_decls ();
308static void instantiate_decls_1 ();
309static int instantiate_virtual_regs_1 ();
310static rtx fixup_memory_subreg ();
311static rtx walk_fixup_memory_subreg ();
312\f
313/* In order to evaluate some expressions, such as function calls returning
314 structures in memory, we need to temporarily allocate stack locations.
315 We record each allocated temporary in the following structure.
316
317 Associated with each temporary slot is a nesting level. When we pop up
318 one level, all temporaries associated with the previous level are freed.
319 Normally, all temporaries are freed after the execution of the statement
320 in which they were created. However, if we are inside a ({...}) grouping,
321 the result may be in a temporary and hence must be preserved. If the
322 result could be in a temporary, we preserve it if we can determine which
323 one it is in. If we cannot determine which temporary may contain the
324 result, all temporaries are preserved. A temporary is preserved by
325 pretending it was allocated at the previous nesting level.
326
327 Automatic variables are also assigned temporary slots, at the nesting
328 level where they are defined. They are marked a "kept" so that
329 free_temp_slots will not free them. */
330
331struct temp_slot
332{
333 /* Points to next temporary slot. */
334 struct temp_slot *next;
335 /* The rtx to used to reference the slot. */
336 rtx slot;
337 /* The size, in units, of the slot. */
338 int size;
339 /* Non-zero if this temporary is currently in use. */
340 char in_use;
341 /* Nesting level at which this slot is being used. */
342 int level;
343 /* Non-zero if this should survive a call to free_temp_slots. */
344 int keep;
345};
346
347/* List of all temporaries allocated, both available and in use. */
348
349struct temp_slot *temp_slots;
350
351/* Current nesting level for temporaries. */
352
353int temp_slot_level;
354\f
355/* Pointer to chain of `struct function' for containing functions. */
356struct function *outer_function_chain;
357
358/* Given a function decl for a containing function,
359 return the `struct function' for it. */
360
361struct function *
362find_function_data (decl)
363 tree decl;
364{
365 struct function *p;
366 for (p = outer_function_chain; p; p = p->next)
367 if (p->decl == decl)
368 return p;
369 abort ();
370}
371
372/* Save the current context for compilation of a nested function.
373 This is called from language-specific code.
374 The caller is responsible for saving any language-specific status,
375 since this function knows only about language-indepedent variables. */
376
377void
378push_function_context ()
379{
380 struct function *p = (struct function *) xmalloc (sizeof (struct function));
381
382 p->next = outer_function_chain;
383 outer_function_chain = p;
384
385 p->name = current_function_name;
386 p->decl = current_function_decl;
387 p->pops_args = current_function_pops_args;
388 p->returns_struct = current_function_returns_struct;
389 p->returns_pcc_struct = current_function_returns_pcc_struct;
390 p->needs_context = current_function_needs_context;
391 p->calls_setjmp = current_function_calls_setjmp;
392 p->calls_longjmp = current_function_calls_longjmp;
393 p->calls_alloca = current_function_calls_alloca;
394 p->has_nonlocal_label = current_function_has_nonlocal_label;
395 p->args_size = current_function_args_size;
396 p->pretend_args_size = current_function_pretend_args_size;
397 p->arg_offset_rtx = current_function_arg_offset_rtx;
398 p->uses_const_pool = current_function_uses_const_pool;
399 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
400 p->internal_arg_pointer = current_function_internal_arg_pointer;
401 p->max_parm_reg = max_parm_reg;
402 p->parm_reg_stack_loc = parm_reg_stack_loc;
403 p->outgoing_args_size = current_function_outgoing_args_size;
404 p->return_rtx = current_function_return_rtx;
405 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
406 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
407 p->nonlocal_labels = nonlocal_labels;
408 p->cleanup_label = cleanup_label;
409 p->return_label = return_label;
410 p->save_expr_regs = save_expr_regs;
411 p->stack_slot_list = stack_slot_list;
412 p->parm_birth_insn = parm_birth_insn;
413 p->frame_offset = frame_offset;
414 p->tail_recursion_label = tail_recursion_label;
415 p->tail_recursion_reentry = tail_recursion_reentry;
416 p->arg_pointer_save_area = arg_pointer_save_area;
417 p->rtl_expr_chain = rtl_expr_chain;
418 p->last_parm_insn = last_parm_insn;
419 p->context_display = context_display;
420 p->trampoline_list = trampoline_list;
421 p->function_call_count = function_call_count;
422 p->temp_slots = temp_slots;
423 p->temp_slot_level = temp_slot_level;
424 p->fixup_var_refs_queue = 0;
425
426 save_tree_status (p);
427 save_storage_status (p);
428 save_emit_status (p);
429 init_emit ();
430 save_expr_status (p);
431 save_stmt_status (p);
432}
433
434/* Restore the last saved context, at the end of a nested function.
435 This function is called from language-specific code. */
436
437void
438pop_function_context ()
439{
440 struct function *p = outer_function_chain;
441
442 outer_function_chain = p->next;
443
444 current_function_name = p->name;
445 current_function_decl = p->decl;
446 current_function_pops_args = p->pops_args;
447 current_function_returns_struct = p->returns_struct;
448 current_function_returns_pcc_struct = p->returns_pcc_struct;
449 current_function_needs_context = p->needs_context;
450 current_function_calls_setjmp = p->calls_setjmp;
451 current_function_calls_longjmp = p->calls_longjmp;
452 current_function_calls_alloca = p->calls_alloca;
453 current_function_has_nonlocal_label = p->has_nonlocal_label;
454 current_function_contains_functions = 1;
455 current_function_args_size = p->args_size;
456 current_function_pretend_args_size = p->pretend_args_size;
457 current_function_arg_offset_rtx = p->arg_offset_rtx;
458 current_function_uses_const_pool = p->uses_const_pool;
459 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
460 current_function_internal_arg_pointer = p->internal_arg_pointer;
461 max_parm_reg = p->max_parm_reg;
462 parm_reg_stack_loc = p->parm_reg_stack_loc;
463 current_function_outgoing_args_size = p->outgoing_args_size;
464 current_function_return_rtx = p->return_rtx;
465 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
466 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
467 nonlocal_labels = p->nonlocal_labels;
468 cleanup_label = p->cleanup_label;
469 return_label = p->return_label;
470 save_expr_regs = p->save_expr_regs;
471 stack_slot_list = p->stack_slot_list;
472 parm_birth_insn = p->parm_birth_insn;
473 frame_offset = p->frame_offset;
474 tail_recursion_label = p->tail_recursion_label;
475 tail_recursion_reentry = p->tail_recursion_reentry;
476 arg_pointer_save_area = p->arg_pointer_save_area;
477 rtl_expr_chain = p->rtl_expr_chain;
478 last_parm_insn = p->last_parm_insn;
479 context_display = p->context_display;
480 trampoline_list = p->trampoline_list;
481 function_call_count = p->function_call_count;
482 temp_slots = p->temp_slots;
483 temp_slot_level = p->temp_slot_level;
484
485 restore_tree_status (p);
486 restore_storage_status (p);
487 restore_expr_status (p);
488 restore_emit_status (p);
489 restore_stmt_status (p);
490
491 /* Finish doing put_var_into_stack for any of our variables
492 which became addressable during the nested function. */
493 {
494 struct var_refs_queue *queue = p->fixup_var_refs_queue;
495 for (; queue; queue = queue->next)
496 fixup_var_refs (queue->modified);
497 }
498
499 free (p);
500
501 /* Reset variables that have known state during rtx generation. */
502 rtx_equal_function_value_matters = 1;
503 virtuals_instantiated = 0;
504}
505\f
506/* Allocate fixed slots in the stack frame of the current function. */
507
508/* Return size needed for stack frame based on slots so far allocated.
509 This size counts from zero. It is not rounded to STACK_BOUNDARY;
510 the caller may have to do that. */
511
512int
513get_frame_size ()
514{
515#ifdef FRAME_GROWS_DOWNWARD
516 return -frame_offset;
517#else
518 return frame_offset;
519#endif
520}
521
522/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
523 with machine mode MODE.
524
525 ALIGN controls the amount of alignment for the address of the slot:
526 0 means according to MODE,
527 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
528 positive specifies alignment boundary in bits.
529
530 We do not round to stack_boundary here. */
531
532rtx
533assign_stack_local (mode, size, align)
534 enum machine_mode mode;
535 int size;
536 int align;
537{
538 register rtx x, addr;
539 int bigend_correction = 0;
540 int alignment;
541
542 if (align == 0)
543 {
544 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
545 if (mode == BLKmode)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 }
548 else if (align == -1)
549 {
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 size = CEIL_ROUND (size, alignment);
552 }
553 else
554 alignment = align / BITS_PER_UNIT;
555
6f086dfc
RS
556 /* Round frame offset to that alignment.
557 We must be careful here, since FRAME_OFFSET might be negative and
558 division with a negative dividend isn't as well defined as we might
559 like. So we instead assume that ALIGNMENT is a power of two and
560 use logical operations which are unambiguous. */
561#ifdef FRAME_GROWS_DOWNWARD
562 frame_offset = FLOOR_ROUND (frame_offset, alignment);
563#else
564 frame_offset = CEIL_ROUND (frame_offset, alignment);
565#endif
566
567 /* On a big-endian machine, if we are allocating more space than we will use,
568 use the least significant bytes of those that are allocated. */
569#if BYTES_BIG_ENDIAN
570 if (mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
572#endif
573
574#ifdef FRAME_GROWS_DOWNWARD
575 frame_offset -= size;
576#endif
577
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (virtuals_instantiated)
581 addr = plus_constant (frame_pointer_rtx,
582 (frame_offset + bigend_correction
583 + STARTING_FRAME_OFFSET));
584 else
585 addr = plus_constant (virtual_stack_vars_rtx,
586 frame_offset + bigend_correction);
587
588#ifndef FRAME_GROWS_DOWNWARD
589 frame_offset += size;
590#endif
591
592 x = gen_rtx (MEM, mode, addr);
593
594 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
595
596 return x;
597}
598
599/* Assign a stack slot in a containing function.
600 First three arguments are same as in preceding function.
601 The last argument specifies the function to allocate in. */
602
603rtx
604assign_outer_stack_local (mode, size, align, function)
605 enum machine_mode mode;
606 int size;
607 int align;
608 struct function *function;
609{
610 register rtx x, addr;
611 int bigend_correction = 0;
612 int alignment;
613
614 /* Allocate in the memory associated with the function in whose frame
615 we are assigning. */
616 push_obstacks (function->function_obstack,
617 function->function_maybepermanent_obstack);
618
619 if (align == 0)
620 {
621 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
622 if (mode == BLKmode)
623 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
624 }
625 else if (align == -1)
626 {
627 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
628 size = CEIL_ROUND (size, alignment);
629 }
630 else
631 alignment = align / BITS_PER_UNIT;
632
6f086dfc
RS
633 /* Round frame offset to that alignment. */
634#ifdef FRAME_GROWS_DOWNWARD
635 frame_offset = FLOOR_ROUND (frame_offset, alignment);
636#else
637 frame_offset = CEIL_ROUND (frame_offset, alignment);
638#endif
639
640 /* On a big-endian machine, if we are allocating more space than we will use,
641 use the least significant bytes of those that are allocated. */
642#if BYTES_BIG_ENDIAN
643 if (mode != BLKmode)
644 bigend_correction = size - GET_MODE_SIZE (mode);
645#endif
646
647#ifdef FRAME_GROWS_DOWNWARD
648 function->frame_offset -= size;
649#endif
650 addr = plus_constant (virtual_stack_vars_rtx,
651 function->frame_offset + bigend_correction);
652#ifndef FRAME_GROWS_DOWNWARD
653 function->frame_offset += size;
654#endif
655
656 x = gen_rtx (MEM, mode, addr);
657
658 function->stack_slot_list
659 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
660
661 pop_obstacks ();
662
663 return x;
664}
665\f
666/* Allocate a temporary stack slot and record it for possible later
667 reuse.
668
669 MODE is the machine mode to be given to the returned rtx.
670
671 SIZE is the size in units of the space required. We do no rounding here
672 since assign_stack_local will do any required rounding.
673
674 KEEP is non-zero if this slot is to be retained after a call to
675 free_temp_slots. Automatic variables for a block are allocated with this
676 flag. */
677
678rtx
679assign_stack_temp (mode, size, keep)
680 enum machine_mode mode;
681 int size;
682 int keep;
683{
684 struct temp_slot *p, *best_p = 0;
685
686 /* First try to find an available, already-allocated temporary that is the
687 exact size we require. */
688 for (p = temp_slots; p; p = p->next)
689 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
690 break;
691
692 /* If we didn't find, one, try one that is larger than what we want. We
693 find the smallest such. */
694 if (p == 0)
695 for (p = temp_slots; p; p = p->next)
696 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
697 && (best_p == 0 || best_p->size > p->size))
698 best_p = p;
699
700 /* Make our best, if any, the one to use. */
701 if (best_p)
702 p = best_p;
703
704 /* If we still didn't find one, make a new temporary. */
705 if (p == 0)
706 {
707 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
708 p->size = size;
709 /* If the temp slot mode doesn't indicate the alignment,
710 use the largest possible, so no one will be disappointed. */
711 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
712 p->next = temp_slots;
713 temp_slots = p;
714 }
715
716 p->in_use = 1;
717 p->level = temp_slot_level;
718 p->keep = keep;
719 return p->slot;
720}
721\f
722/* If X could be a reference to a temporary slot, mark that slot as belonging
723 to the to one level higher. If X matched one of our slots, just mark that
724 one. Otherwise, we can't easily predict which it is, so upgrade all of
725 them. Kept slots need not be touched.
726
727 This is called when an ({...}) construct occurs and a statement
728 returns a value in memory. */
729
730void
731preserve_temp_slots (x)
732 rtx x;
733{
734 struct temp_slot *p;
735
736 /* If X is not in memory or is at a constant address, it cannot be in
737 a temporary slot. */
738 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
739 return;
740
741 /* First see if we can find a match. */
742 for (p = temp_slots; p; p = p->next)
743 if (p->in_use && x == p->slot)
744 {
745 p->level--;
746 return;
747 }
748
749 /* Otherwise, preserve all non-kept slots at this level. */
750 for (p = temp_slots; p; p = p->next)
751 if (p->in_use && p->level == temp_slot_level && ! p->keep)
752 p->level--;
753}
754
755/* Free all temporaries used so far. This is normally called at the end
756 of generating code for a statement. */
757
758void
759free_temp_slots ()
760{
761 struct temp_slot *p;
762
763 for (p = temp_slots; p; p = p->next)
764 if (p->in_use && p->level == temp_slot_level && ! p->keep)
765 p->in_use = 0;
766}
767
768/* Push deeper into the nesting level for stack temporaries. */
769
770void
771push_temp_slots ()
772{
773 /* For GNU C++, we must allow a sequence to be emitted anywhere in
774 the level where the sequence was started. By not changing levels
775 when the compiler is inside a sequence, the temporaries for the
776 sequence and the temporaries will not unwittingly conflict with
777 the temporaries for other sequences and/or code at that level. */
778 if (in_sequence_p ())
779 return;
780
781 temp_slot_level++;
782}
783
784/* Pop a temporary nesting level. All slots in use in the current level
785 are freed. */
786
787void
788pop_temp_slots ()
789{
790 struct temp_slot *p;
791
792 /* See comment in push_temp_slots about why we don't change levels
793 in sequences. */
794 if (in_sequence_p ())
795 return;
796
797 for (p = temp_slots; p; p = p->next)
798 if (p->in_use && p->level == temp_slot_level)
799 p->in_use = 0;
800
801 temp_slot_level--;
802}
803\f
804/* Retroactively move an auto variable from a register to a stack slot.
805 This is done when an address-reference to the variable is seen. */
806
807void
808put_var_into_stack (decl)
809 tree decl;
810{
811 register rtx reg;
812 register rtx new = 0;
813 struct function *function = 0;
814 tree context = decl_function_context (decl);
815
816 /* Get the current rtl used for this object. */
817 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
818
819 /* If this variable comes from an outer function,
820 find that function's saved context. */
821 if (context != current_function_decl)
822 for (function = outer_function_chain; function; function = function->next)
823 if (function->decl == context)
824 break;
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* If this is a variable-size object with a pseudo to address it,
833 put that pseudo into the stack, if the var is nonlocal. */
834 if (TREE_NONLOCAL (decl)
835 && GET_CODE (reg) == MEM
836 && GET_CODE (XEXP (reg, 0)) == REG
837 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
838 reg = XEXP (reg, 0);
839 if (GET_CODE (reg) != REG)
840 return;
841
842 if (function)
843 {
844 if (REGNO (reg) < function->max_parm_reg)
845 new = function->parm_reg_stack_loc[REGNO (reg)];
846 if (new == 0)
847 new = assign_outer_stack_local (GET_MODE (reg),
848 GET_MODE_SIZE (GET_MODE (reg)),
849 0, function);
850 }
851 else
852 {
853 if (REGNO (reg) < max_parm_reg)
854 new = parm_reg_stack_loc[REGNO (reg)];
855 if (new == 0)
856 new = assign_stack_local (GET_MODE (reg),
857 GET_MODE_SIZE (GET_MODE (reg)),
858 0);
859 }
860
861 XEXP (reg, 0) = XEXP (new, 0);
862 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
863 REG_USERVAR_P (reg) = 0;
864 PUT_CODE (reg, MEM);
865
866 /* If this is a memory ref that contains aggregate components,
867 mark it as such for cse and loop optimize. */
868 MEM_IN_STRUCT_P (reg)
869 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
870 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
872
873 /* Now make sure that all refs to the variable, previously made
874 when it was a register, are fixed up to be valid again. */
875 if (function)
876 {
877 struct var_refs_queue *temp;
878
879 /* Variable is inherited; fix it up when we get back to its function. */
880 push_obstacks (function->function_obstack,
881 function->function_maybepermanent_obstack);
882 temp
883 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
884 temp->modified = reg;
885 temp->next = function->fixup_var_refs_queue;
886 function->fixup_var_refs_queue = temp;
887 pop_obstacks ();
888 }
889 else
890 /* Variable is local; fix it up now. */
891 fixup_var_refs (reg);
892}
893\f
894static void
895fixup_var_refs (var)
896 rtx var;
897{
898 tree pending;
899 rtx first_insn = get_insns ();
900 struct sequence_stack *stack = sequence_stack;
901 tree rtl_exps = rtl_expr_chain;
902
903 /* Must scan all insns for stack-refs that exceed the limit. */
904 fixup_var_refs_insns (var, first_insn, stack == 0);
905
906 /* Scan all pending sequences too. */
907 for (; stack; stack = stack->next)
908 {
909 push_to_sequence (stack->first);
910 fixup_var_refs_insns (var, stack->first, stack->next != 0);
911 /* Update remembered end of sequence
912 in case we added an insn at the end. */
913 stack->last = get_last_insn ();
914 end_sequence ();
915 }
916
917 /* Scan all waiting RTL_EXPRs too. */
918 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
919 {
920 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
921 if (seq != const0_rtx && seq != 0)
922 {
923 push_to_sequence (seq);
924 fixup_var_refs_insns (var, seq, 0);
925 end_sequence ();
926 }
927 }
928}
929\f
930/* This structure is used by the following two functions to record MEMs or
931 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
932 VAR as an address. We need to maintain this list in case two operands of
933 an insn were required to match; in that case we must ensure we use the
934 same replacement. */
935
936struct fixup_replacement
937{
938 rtx old;
939 rtx new;
940 struct fixup_replacement *next;
941};
942
943/* REPLACEMENTS is a pointer to a list of the above structures and X is
944 some part of an insn. Return a struct fixup_replacement whose OLD
945 value is equal to X. Allocate a new structure if no such entry exists. */
946
947static struct fixup_replacement *
948find_replacement (replacements, x)
949 struct fixup_replacement **replacements;
950 rtx x;
951{
952 struct fixup_replacement *p;
953
954 /* See if we have already replaced this. */
955 for (p = *replacements; p && p->old != x; p = p->next)
956 ;
957
958 if (p == 0)
959 {
960 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
961 p->old = x;
962 p->new = 0;
963 p->next = *replacements;
964 *replacements = p;
965 }
966
967 return p;
968}
969
970/* Scan the insn-chain starting with INSN for refs to VAR
971 and fix them up. TOPLEVEL is nonzero if this chain is the
972 main chain of insns for the current function. */
973
974static void
975fixup_var_refs_insns (var, insn, toplevel)
976 rtx var;
977 rtx insn;
978 int toplevel;
979{
980 while (insn)
981 {
982 rtx next = NEXT_INSN (insn);
983 rtx note;
984 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
985 || GET_CODE (insn) == JUMP_INSN)
986 {
987 /* The insn to load VAR from a home in the arglist
988 is now a no-op. When we see it, just delete it. */
989 if (toplevel
990 && GET_CODE (PATTERN (insn)) == SET
991 && SET_DEST (PATTERN (insn)) == var
992 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
993 {
994 next = delete_insn (insn);
995 if (insn == last_parm_insn)
996 last_parm_insn = PREV_INSN (next);
997 }
998 else
999 {
1000 /* See if we have to do anything to INSN now that VAR is in
1001 memory. If it needs to be loaded into a pseudo, use a single
1002 pseudo for the entire insn in case there is a MATCH_DUP
1003 between two operands. We pass a pointer to the head of
1004 a list of struct fixup_replacements. If fixup_var_refs_1
1005 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1006 it will record them in this list.
1007
1008 If it allocated a pseudo for any replacement, we copy into
1009 it here. */
1010
1011 struct fixup_replacement *replacements = 0;
1012
1013 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1014
1015 while (replacements)
1016 {
1017 if (GET_CODE (replacements->new) == REG)
1018 {
1019 rtx insert_before;
1020
1021 /* OLD might be a (subreg (mem)). */
1022 if (GET_CODE (replacements->old) == SUBREG)
1023 replacements->old
1024 = fixup_memory_subreg (replacements->old, insn, 0);
1025 else
1026 replacements->old
1027 = fixup_stack_1 (replacements->old, insn);
1028
1029 /* We can not separate USE insns from the CALL_INSN
1030 that they belong to. If this is a CALL_INSN, insert
1031 the move insn before the USE insns preceeding it
1032 instead of immediately before the insn. */
1033 if (GET_CODE (insn) == CALL_INSN)
1034 {
1035 insert_before = insn;
1036 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1037 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1038 insert_before = PREV_INSN (insert_before);
1039 }
1040 else
1041 insert_before = insn;
1042
1043 emit_insn_before (gen_move_insn (replacements->new,
1044 replacements->old),
1045 insert_before);
1046 }
1047
1048 replacements = replacements->next;
1049 }
1050 }
1051
1052 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1053 But don't touch other insns referred to by reg-notes;
1054 we will get them elsewhere. */
1055 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1056 if (GET_CODE (note) != INSN_LIST)
1057 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1058 }
1059 insn = next;
1060 }
1061}
1062\f
1063/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1064 at *LOC in INSN needs to be changed.
1065
1066 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1067 contain a list of original rtx's and replacements. If we find that we need
1068 to modify this insn by replacing a memory reference with a pseudo or by
1069 making a new MEM to implement a SUBREG, we consult that list to see if
1070 we have already chosen a replacement. If none has already been allocated,
1071 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1072 or the SUBREG, as appropriate, to the pseudo. */
1073
1074static void
1075fixup_var_refs_1 (var, loc, insn, replacements)
1076 register rtx var;
1077 register rtx *loc;
1078 rtx insn;
1079 struct fixup_replacement **replacements;
1080{
1081 register int i;
1082 register rtx x = *loc;
1083 RTX_CODE code = GET_CODE (x);
1084 register char *fmt;
1085 register rtx tem, tem1;
1086 struct fixup_replacement *replacement;
1087
1088 switch (code)
1089 {
1090 case MEM:
1091 if (var == x)
1092 {
1093 /* If we already have a replacement, use it. Otherwise,
1094 try to fix up this address in case it is invalid. */
1095
1096 replacement = find_replacement (replacements, var);
1097 if (replacement->new)
1098 {
1099 *loc = replacement->new;
1100 return;
1101 }
1102
1103 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1104
1105 /* Unless we are forcing memory to register, we can leave things
1106 the way they are if the insn is valid. */
1107
1108 INSN_CODE (insn) = -1;
1109 if (! flag_force_mem && recog_memoized (insn) >= 0)
1110 return;
1111
1112 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1113 return;
1114 }
1115
1116 /* If X contains VAR, we need to unshare it here so that we update
1117 each occurrence separately. But all identical MEMs in one insn
1118 must be replaced with the same rtx because of the possibility of
1119 MATCH_DUPs. */
1120
1121 if (reg_mentioned_p (var, x))
1122 {
1123 replacement = find_replacement (replacements, x);
1124 if (replacement->new == 0)
1125 replacement->new = copy_most_rtx (x, var);
1126
1127 *loc = x = replacement->new;
1128 }
1129 break;
1130
1131 case REG:
1132 case CC0:
1133 case PC:
1134 case CONST_INT:
1135 case CONST:
1136 case SYMBOL_REF:
1137 case LABEL_REF:
1138 case CONST_DOUBLE:
1139 return;
1140
1141 case SIGN_EXTRACT:
1142 case ZERO_EXTRACT:
1143 /* Note that in some cases those types of expressions are altered
1144 by optimize_bit_field, and do not survive to get here. */
1145 if (XEXP (x, 0) == var
1146 || (GET_CODE (XEXP (x, 0)) == SUBREG
1147 && SUBREG_REG (XEXP (x, 0)) == var))
1148 {
1149 /* Get TEM as a valid MEM in the mode presently in the insn.
1150
1151 We don't worry about the possibility of MATCH_DUP here; it
1152 is highly unlikely and would be tricky to handle. */
1153
1154 tem = XEXP (x, 0);
1155 if (GET_CODE (tem) == SUBREG)
1156 tem = fixup_memory_subreg (tem, insn, 1);
1157 tem = fixup_stack_1 (tem, insn);
1158
1159 /* Unless we want to load from memory, get TEM into the proper mode
1160 for an extract from memory. This can only be done if the
1161 extract is at a constant position and length. */
1162
1163 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && GET_CODE (XEXP (x, 2)) == CONST_INT
1165 && ! mode_dependent_address_p (XEXP (tem, 0))
1166 && ! MEM_VOLATILE_P (tem))
1167 {
1168 enum machine_mode wanted_mode = VOIDmode;
1169 enum machine_mode is_mode = GET_MODE (tem);
1170 int width = INTVAL (XEXP (x, 1));
1171 int pos = INTVAL (XEXP (x, 2));
1172
1173#ifdef HAVE_extzv
1174 if (GET_CODE (x) == ZERO_EXTRACT)
1175 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1176#endif
1177#ifdef HAVE_extv
1178 if (GET_CODE (x) == SIGN_EXTRACT)
1179 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1180#endif
1181 /* If we have a narrower mode, we can do someting. */
1182 if (wanted_mode != VOIDmode
1183 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1184 {
1185 int offset = pos / BITS_PER_UNIT;
1186 rtx old_pos = XEXP (x, 2);
1187 rtx newmem;
1188
1189 /* If the bytes and bits are counted differently, we
1190 must adjust the offset. */
1191#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1192 offset = (GET_MODE_SIZE (is_mode)
1193 - GET_MODE_SIZE (wanted_mode) - offset);
1194#endif
1195
1196 pos %= GET_MODE_BITSIZE (wanted_mode);
1197
1198 newmem = gen_rtx (MEM, wanted_mode,
1199 plus_constant (XEXP (tem, 0), offset));
1200 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1201 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1202 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1203
1204 /* Make the change and see if the insn remains valid. */
1205 INSN_CODE (insn) = -1;
1206 XEXP (x, 0) = newmem;
1207 XEXP (x, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1208
1209 if (recog_memoized (insn) >= 0)
1210 return;
1211
1212 /* Otherwise, restore old position. XEXP (x, 0) will be
1213 restored later. */
1214 XEXP (x, 2) = old_pos;
1215 }
1216 }
1217
1218 /* If we get here, the bitfield extract insn can't accept a memory
1219 reference. Copy the input into a register. */
1220
1221 tem1 = gen_reg_rtx (GET_MODE (tem));
1222 emit_insn_before (gen_move_insn (tem1, tem), insn);
1223 XEXP (x, 0) = tem1;
1224 return;
1225 }
1226 break;
1227
1228 case SUBREG:
1229 if (SUBREG_REG (x) == var)
1230 {
1231 /* If this SUBREG makes VAR wider, it has become a paradoxical
1232 SUBREG with VAR in memory, but these aren't allowed at this
1233 stage of the compilation. So load VAR into a pseudo and take
1234 a SUBREG of that pseudo. */
1235 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1236 {
1237 replacement = find_replacement (replacements, var);
1238 if (replacement->new == 0)
1239 replacement->new = gen_reg_rtx (GET_MODE (var));
1240 SUBREG_REG (x) = replacement->new;
1241 return;
1242 }
1243
1244 /* See if we have already found a replacement for this SUBREG.
1245 If so, use it. Otherwise, make a MEM and see if the insn
1246 is recognized. If not, or if we should force MEM into a register,
1247 make a pseudo for this SUBREG. */
1248 replacement = find_replacement (replacements, x);
1249 if (replacement->new)
1250 {
1251 *loc = replacement->new;
1252 return;
1253 }
1254
1255 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1256
1257 if (! flag_force_mem && recog_memoized (insn) >= 0)
1258 return;
1259
1260 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1261 return;
1262 }
1263 break;
1264
1265 case SET:
1266 /* First do special simplification of bit-field references. */
1267 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1268 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1269 optimize_bit_field (x, insn, 0);
1270 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1271 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1272 optimize_bit_field (x, insn, 0);
1273
1274 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1275 insn into a pseudo and store the low part of the pseudo into VAR. */
1276 if (GET_CODE (SET_DEST (x)) == SUBREG
1277 && SUBREG_REG (SET_DEST (x)) == var
1278 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1279 > GET_MODE_SIZE (GET_MODE (var))))
1280 {
1281 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1282 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1283 tem)),
1284 insn);
1285 break;
1286 }
1287
1288 {
1289 rtx dest = SET_DEST (x);
1290 rtx src = SET_SRC (x);
1291 rtx outerdest = dest;
1292
1293 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1294 || GET_CODE (dest) == SIGN_EXTRACT
1295 || GET_CODE (dest) == ZERO_EXTRACT)
1296 dest = XEXP (dest, 0);
1297
1298 if (GET_CODE (src) == SUBREG)
1299 src = XEXP (src, 0);
1300
1301 /* If VAR does not appear at the top level of the SET
1302 just scan the lower levels of the tree. */
1303
1304 if (src != var && dest != var)
1305 break;
1306
1307 /* We will need to rerecognize this insn. */
1308 INSN_CODE (insn) = -1;
1309
1310#ifdef HAVE_insv
1311 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1312 {
1313 /* Since this case will return, ensure we fixup all the
1314 operands here. */
1315 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1317 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1318
1319 tem = XEXP (outerdest, 0);
1320
1321 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1322 that may appear inside a ZERO_EXTRACT.
1323 This was legitimate when the MEM was a REG. */
1324 if (GET_CODE (tem) == SUBREG
1325 && SUBREG_REG (tem) == var)
1326 tem = fixup_memory_subreg (tem, insn, 1);
1327 else
1328 tem = fixup_stack_1 (tem, insn);
1329
1330 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1331 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1332 && ! mode_dependent_address_p (XEXP (tem, 0))
1333 && ! MEM_VOLATILE_P (tem))
1334 {
1335 enum machine_mode wanted_mode
1336 = insn_operand_mode[(int) CODE_FOR_insv][0];
1337 enum machine_mode is_mode = GET_MODE (tem);
1338 int width = INTVAL (XEXP (outerdest, 1));
1339 int pos = INTVAL (XEXP (outerdest, 2));
1340
1341 /* If we have a narrower mode, we can do someting. */
1342 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1343 {
1344 int offset = pos / BITS_PER_UNIT;
1345 rtx old_pos = XEXP (outerdest, 2);
1346 rtx newmem;
1347
1348#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1349 offset = (GET_MODE_SIZE (is_mode)
1350 - GET_MODE_SIZE (wanted_mode) - offset);
1351#endif
1352
1353 pos %= GET_MODE_BITSIZE (wanted_mode);
1354
1355 newmem = gen_rtx (MEM, wanted_mode,
1356 plus_constant (XEXP (tem, 0), offset));
1357 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1358 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1359 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1360
1361 /* Make the change and see if the insn remains valid. */
1362 INSN_CODE (insn) = -1;
1363 XEXP (outerdest, 0) = newmem;
1364 XEXP (outerdest, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1365
1366 if (recog_memoized (insn) >= 0)
1367 return;
1368
1369 /* Otherwise, restore old position. XEXP (x, 0) will be
1370 restored later. */
1371 XEXP (outerdest, 2) = old_pos;
1372 }
1373 }
1374
1375 /* If we get here, the bit-field store doesn't allow memory
1376 or isn't located at a constant position. Load the value into
1377 a register, do the store, and put it back into memory. */
1378
1379 tem1 = gen_reg_rtx (GET_MODE (tem));
1380 emit_insn_before (gen_move_insn (tem1, tem), insn);
1381 emit_insn_after (gen_move_insn (tem, tem1), insn);
1382 XEXP (outerdest, 0) = tem1;
1383 return;
1384 }
1385#endif
1386
1387 /* STRICT_LOW_PART is a no-op on memory references
1388 and it can cause combinations to be unrecognizable,
1389 so eliminate it. */
1390
1391 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1392 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1393
1394 /* A valid insn to copy VAR into or out of a register
1395 must be left alone, to avoid an infinite loop here.
1396 If the reference to VAR is by a subreg, fix that up,
1397 since SUBREG is not valid for a memref.
1398 Also fix up the address of the stack slot. */
1399
1400 if ((SET_SRC (x) == var
1401 || (GET_CODE (SET_SRC (x)) == SUBREG
1402 && SUBREG_REG (SET_SRC (x)) == var))
1403 && (GET_CODE (SET_DEST (x)) == REG
1404 || (GET_CODE (SET_DEST (x)) == SUBREG
1405 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1406 && recog_memoized (insn) >= 0)
1407 {
1408 replacement = find_replacement (replacements, SET_SRC (x));
1409 if (replacement->new)
1410 {
1411 SET_SRC (x) = replacement->new;
1412 return;
1413 }
1414 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1415 SET_SRC (x) = replacement->new
1416 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1417 else
1418 SET_SRC (x) = replacement->new
1419 = fixup_stack_1 (SET_SRC (x), insn);
1420 return;
1421 }
1422
1423 if ((SET_DEST (x) == var
1424 || (GET_CODE (SET_DEST (x)) == SUBREG
1425 && SUBREG_REG (SET_DEST (x)) == var))
1426 && (GET_CODE (SET_SRC (x)) == REG
1427 || (GET_CODE (SET_SRC (x)) == SUBREG
1428 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1429 && recog_memoized (insn) >= 0)
1430 {
1431 if (GET_CODE (SET_DEST (x)) == SUBREG)
1432 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1433 else
1434 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1435 return;
1436 }
1437
1438 /* Otherwise, storing into VAR must be handled specially
1439 by storing into a temporary and copying that into VAR
1440 with a new insn after this one. */
1441
1442 if (dest == var)
1443 {
1444 rtx temp;
1445 rtx fixeddest;
1446 tem = SET_DEST (x);
1447 /* STRICT_LOW_PART can be discarded, around a MEM. */
1448 if (GET_CODE (tem) == STRICT_LOW_PART)
1449 tem = XEXP (tem, 0);
1450 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1451 if (GET_CODE (tem) == SUBREG)
1452 fixeddest = fixup_memory_subreg (tem, insn, 0);
1453 else
1454 fixeddest = fixup_stack_1 (tem, insn);
1455
1456 temp = gen_reg_rtx (GET_MODE (tem));
1457 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1458 SET_DEST (x) = temp;
1459 }
1460 }
1461 }
1462
1463 /* Nothing special about this RTX; fix its operands. */
1464
1465 fmt = GET_RTX_FORMAT (code);
1466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467 {
1468 if (fmt[i] == 'e')
1469 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1470 if (fmt[i] == 'E')
1471 {
1472 register int j;
1473 for (j = 0; j < XVECLEN (x, i); j++)
1474 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1475 }
1476 }
1477}
1478\f
1479/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1480 return an rtx (MEM:m1 newaddr) which is equivalent.
1481 If any insns must be emitted to compute NEWADDR, put them before INSN.
1482
1483 UNCRITICAL nonzero means accept paradoxical subregs.
1484 This is used for subregs found inside of ZERO_EXTRACTs. */
1485
1486static rtx
1487fixup_memory_subreg (x, insn, uncritical)
1488 rtx x;
1489 rtx insn;
1490 int uncritical;
1491{
1492 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1493 rtx addr = XEXP (SUBREG_REG (x), 0);
1494 enum machine_mode mode = GET_MODE (x);
1495 rtx saved, result;
1496
1497 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1498 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1499 && ! uncritical)
1500 abort ();
1501
1502#if BYTES_BIG_ENDIAN
1503 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1504 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1505#endif
1506 addr = plus_constant (addr, offset);
1507 if (!flag_force_addr && memory_address_p (mode, addr))
1508 /* Shortcut if no insns need be emitted. */
1509 return change_address (SUBREG_REG (x), mode, addr);
1510 start_sequence ();
1511 result = change_address (SUBREG_REG (x), mode, addr);
1512 emit_insn_before (gen_sequence (), insn);
1513 end_sequence ();
1514 return result;
1515}
1516
1517/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1518 Replace subexpressions of X in place.
1519 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1520 Otherwise return X, with its contents possibly altered.
1521
1522 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1523
1524static rtx
1525walk_fixup_memory_subreg (x, insn)
1526 register rtx x;
1527 rtx insn;
1528{
1529 register enum rtx_code code;
1530 register char *fmt;
1531 register int i;
1532
1533 if (x == 0)
1534 return 0;
1535
1536 code = GET_CODE (x);
1537
1538 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1539 return fixup_memory_subreg (x, insn, 0);
1540
1541 /* Nothing special about this RTX; fix its operands. */
1542
1543 fmt = GET_RTX_FORMAT (code);
1544 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1545 {
1546 if (fmt[i] == 'e')
1547 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1548 if (fmt[i] == 'E')
1549 {
1550 register int j;
1551 for (j = 0; j < XVECLEN (x, i); j++)
1552 XVECEXP (x, i, j)
1553 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1554 }
1555 }
1556 return x;
1557}
1558\f
1559#if 0
1560/* Fix up any references to stack slots that are invalid memory addresses
1561 because they exceed the maximum range of a displacement. */
1562
1563void
1564fixup_stack_slots ()
1565{
1566 register rtx insn;
1567
1568 /* Did we generate a stack slot that is out of range
1569 or otherwise has an invalid address? */
1570 if (invalid_stack_slot)
1571 {
1572 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1573 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1574 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1575 || GET_CODE (insn) == JUMP_INSN)
1576 fixup_stack_1 (PATTERN (insn), insn);
1577 }
1578}
1579#endif
1580
1581/* For each memory ref within X, if it refers to a stack slot
1582 with an out of range displacement, put the address in a temp register
1583 (emitting new insns before INSN to load these registers)
1584 and alter the memory ref to use that register.
1585 Replace each such MEM rtx with a copy, to avoid clobberage. */
1586
1587static rtx
1588fixup_stack_1 (x, insn)
1589 rtx x;
1590 rtx insn;
1591{
1592 register int i;
1593 register RTX_CODE code = GET_CODE (x);
1594 register char *fmt;
1595
1596 if (code == MEM)
1597 {
1598 register rtx ad = XEXP (x, 0);
1599 /* If we have address of a stack slot but it's not valid
1600 (displacement is too large), compute the sum in a register. */
1601 if (GET_CODE (ad) == PLUS
1602 && GET_CODE (XEXP (ad, 0)) == REG
1603 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1604 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1605 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1606 {
1607 rtx temp, seq;
1608 if (memory_address_p (GET_MODE (x), ad))
1609 return x;
1610
1611 start_sequence ();
1612 temp = copy_to_reg (ad);
1613 seq = gen_sequence ();
1614 end_sequence ();
1615 emit_insn_before (seq, insn);
1616 return change_address (x, VOIDmode, temp);
1617 }
1618 return x;
1619 }
1620
1621 fmt = GET_RTX_FORMAT (code);
1622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1623 {
1624 if (fmt[i] == 'e')
1625 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1626 if (fmt[i] == 'E')
1627 {
1628 register int j;
1629 for (j = 0; j < XVECLEN (x, i); j++)
1630 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1631 }
1632 }
1633 return x;
1634}
1635\f
1636/* Optimization: a bit-field instruction whose field
1637 happens to be a byte or halfword in memory
1638 can be changed to a move instruction.
1639
1640 We call here when INSN is an insn to examine or store into a bit-field.
1641 BODY is the SET-rtx to be altered.
1642
1643 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1644 (Currently this is called only from function.c, and EQUIV_MEM
1645 is always 0.) */
1646
1647static void
1648optimize_bit_field (body, insn, equiv_mem)
1649 rtx body;
1650 rtx insn;
1651 rtx *equiv_mem;
1652{
1653 register rtx bitfield;
1654 int destflag;
1655 rtx seq = 0;
1656 enum machine_mode mode;
1657
1658 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1659 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1660 bitfield = SET_DEST (body), destflag = 1;
1661 else
1662 bitfield = SET_SRC (body), destflag = 0;
1663
1664 /* First check that the field being stored has constant size and position
1665 and is in fact a byte or halfword suitably aligned. */
1666
1667 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1668 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1669 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1670 != BLKmode)
1671 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1672 {
1673 register rtx memref = 0;
1674
1675 /* Now check that the containing word is memory, not a register,
1676 and that it is safe to change the machine mode. */
1677
1678 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1679 memref = XEXP (bitfield, 0);
1680 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1681 && equiv_mem != 0)
1682 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1683 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1684 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1685 memref = SUBREG_REG (XEXP (bitfield, 0));
1686 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1687 && equiv_mem != 0
1688 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1689 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1690
1691 if (memref
1692 && ! mode_dependent_address_p (XEXP (memref, 0))
1693 && ! MEM_VOLATILE_P (memref))
1694 {
1695 /* Now adjust the address, first for any subreg'ing
1696 that we are now getting rid of,
1697 and then for which byte of the word is wanted. */
1698
1699 register int offset = INTVAL (XEXP (bitfield, 2));
1700 /* Adjust OFFSET to count bits from low-address byte. */
1701#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1702 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1703 - offset - INTVAL (XEXP (bitfield, 1)));
1704#endif
1705 /* Adjust OFFSET to count bytes from low-address byte. */
1706 offset /= BITS_PER_UNIT;
1707 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1708 {
1709 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1710#if BYTES_BIG_ENDIAN
1711 offset -= (MIN (UNITS_PER_WORD,
1712 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1713 - MIN (UNITS_PER_WORD,
1714 GET_MODE_SIZE (GET_MODE (memref))));
1715#endif
1716 }
1717
1718 memref = change_address (memref, mode,
1719 plus_constant (XEXP (memref, 0), offset));
1720
1721 /* Store this memory reference where
1722 we found the bit field reference. */
1723
1724 if (destflag)
1725 {
1726 validate_change (insn, &SET_DEST (body), memref, 1);
1727 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1728 {
1729 rtx src = SET_SRC (body);
1730 while (GET_CODE (src) == SUBREG
1731 && SUBREG_WORD (src) == 0)
1732 src = SUBREG_REG (src);
1733 if (GET_MODE (src) != GET_MODE (memref))
1734 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1735 validate_change (insn, &SET_SRC (body), src, 1);
1736 }
1737 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1738 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1739 /* This shouldn't happen because anything that didn't have
1740 one of these modes should have got converted explicitly
1741 and then referenced through a subreg.
1742 This is so because the original bit-field was
1743 handled by agg_mode and so its tree structure had
1744 the same mode that memref now has. */
1745 abort ();
1746 }
1747 else
1748 {
1749 rtx dest = SET_DEST (body);
1750
1751 while (GET_CODE (dest) == SUBREG
1752 && SUBREG_WORD (dest) == 0)
1753 dest = SUBREG_REG (dest);
1754
1755 validate_change (insn, &SET_DEST (body), dest, 1);
1756
1757 if (GET_MODE (dest) == GET_MODE (memref))
1758 validate_change (insn, &SET_SRC (body), memref, 1);
1759 else
1760 {
1761 /* Convert the mem ref to the destination mode. */
1762 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1763
1764 start_sequence ();
1765 convert_move (newreg, memref,
1766 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1767 seq = get_insns ();
1768 end_sequence ();
1769
1770 validate_change (insn, &SET_SRC (body), newreg, 1);
1771 }
1772 }
1773
1774 /* See if we can convert this extraction or insertion into
1775 a simple move insn. We might not be able to do so if this
1776 was, for example, part of a PARALLEL.
1777
1778 If we succeed, write out any needed conversions. If we fail,
1779 it is hard to guess why we failed, so don't do anything
1780 special; just let the optimization be suppressed. */
1781
1782 if (apply_change_group () && seq)
1783 emit_insns_before (seq, insn);
1784 }
1785 }
1786}
1787\f
1788/* These routines are responsible for converting virtual register references
1789 to the actual hard register references once RTL generation is complete.
1790
1791 The following four variables are used for communication between the
1792 routines. They contain the offsets of the virtual registers from their
1793 respective hard registers. */
1794
1795static int in_arg_offset;
1796static int var_offset;
1797static int dynamic_offset;
1798static int out_arg_offset;
1799
1800/* In most machines, the stack pointer register is equivalent to the bottom
1801 of the stack. */
1802
1803#ifndef STACK_POINTER_OFFSET
1804#define STACK_POINTER_OFFSET 0
1805#endif
1806
1807/* If not defined, pick an appropriate default for the offset of dynamically
1808 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1809 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1810
1811#ifndef STACK_DYNAMIC_OFFSET
1812
1813#ifdef ACCUMULATE_OUTGOING_ARGS
1814/* The bottom of the stack points to the actual arguments. If
1815 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1816 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1817 stack space for register parameters is not pushed by the caller, but
1818 rather part of the fixed stack areas and hence not included in
1819 `current_function_outgoing_args_size'. Nevertheless, we must allow
1820 for it when allocating stack dynamic objects. */
1821
1822#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1823#define STACK_DYNAMIC_OFFSET(FNDECL) \
1824(current_function_outgoing_args_size \
1825 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1826
1827#else
1828#define STACK_DYNAMIC_OFFSET(FNDECL) \
1829(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1830#endif
1831
1832#else
1833#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1834#endif
1835#endif
1836
1837/* Pass through the INSNS of function FNDECL and convert virtual register
1838 references to hard register references. */
1839
1840void
1841instantiate_virtual_regs (fndecl, insns)
1842 tree fndecl;
1843 rtx insns;
1844{
1845 rtx insn;
1846
1847 /* Compute the offsets to use for this function. */
1848 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1849 var_offset = STARTING_FRAME_OFFSET;
1850 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1851 out_arg_offset = STACK_POINTER_OFFSET;
1852
1853 /* Scan all variables and parameters of this function. For each that is
1854 in memory, instantiate all virtual registers if the result is a valid
1855 address. If not, we do it later. That will handle most uses of virtual
1856 regs on many machines. */
1857 instantiate_decls (fndecl, 1);
1858
1859 /* Initialize recognition, indicating that volatile is OK. */
1860 init_recog ();
1861
1862 /* Scan through all the insns, instantiating every virtual register still
1863 present. */
1864 for (insn = insns; insn; insn = NEXT_INSN (insn))
1865 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1866 || GET_CODE (insn) == CALL_INSN)
1867 {
1868 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1869 instantiate_virtual_regs_1 (&REG_NOTES (insn), 0, 0);
1870 }
1871
1872 /* Now instantiate the remaining register equivalences for debugging info.
1873 These will not be valid addresses. */
1874 instantiate_decls (fndecl, 0);
1875
1876 /* Indicate that, from now on, assign_stack_local should use
1877 frame_pointer_rtx. */
1878 virtuals_instantiated = 1;
1879}
1880
1881/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1882 all virtual registers in their DECL_RTL's.
1883
1884 If VALID_ONLY, do this only if the resulting address is still valid.
1885 Otherwise, always do it. */
1886
1887static void
1888instantiate_decls (fndecl, valid_only)
1889 tree fndecl;
1890 int valid_only;
1891{
1892 tree decl;
1893
1894 if (TREE_INLINE (fndecl))
1895 /* When compiling an inline function, the obstack used for
1896 rtl allocation is the maybepermanent_obstack. Calling
1897 `resume_temporary_allocation' switches us back to that
1898 obstack while we process this function's parameters. */
1899 resume_temporary_allocation ();
1900
1901 /* Process all parameters of the function. */
1902 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1903 {
1904 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
1905 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
1906 valid_only ? DECL_RTL (decl) : 0, 0);
86f8eff3
RK
1907#if 0 /* This is probably correct, but it seems to require fixes
1908 elsewhere in order to work. Let's fix them in 2.1. */
6f086dfc
RS
1909 if (DECL_INCOMING_RTL (decl)
1910 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM)
1911 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
1912 valid_only ? DECL_INCOMING_RTL (decl) : 0,
1913 0);
86f8eff3 1914#endif
6f086dfc
RS
1915 }
1916
1917 /* Now process all variables defined in the function or its subblocks. */
1918 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1919
1920 if (TREE_INLINE (fndecl))
1921 {
1922 /* Save all rtl allocated for this function by raising the
1923 high-water mark on the maybepermanent_obstack. */
1924 preserve_data ();
1925 /* All further rtl allocation is now done in the current_obstack. */
1926 rtl_in_current_obstack ();
1927 }
1928}
1929
1930/* Subroutine of instantiate_decls: Process all decls in the given
1931 BLOCK node and all its subblocks. */
1932
1933static void
1934instantiate_decls_1 (let, valid_only)
1935 tree let;
1936 int valid_only;
1937{
1938 tree t;
1939
1940 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1941 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
1942 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
1943 valid_only ? DECL_RTL (t) : 0, 0);
1944
1945 /* Process all subblocks. */
1946 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1947 instantiate_decls_1 (t, valid_only);
1948}
1949\f
1950/* Given a pointer to a piece of rtx and an optional pointer to the
1951 containing object, instantiate any virtual registers present in it.
1952
1953 If EXTRA_INSNS, we always do the replacement and generate
1954 any extra insns before OBJECT. If it zero, we do nothing if replacement
1955 is not valid.
1956
1957 Return 1 if we either had nothing to do or if we were able to do the
1958 needed replacement. Return 0 otherwise; we only return zero if
1959 EXTRA_INSNS is zero.
1960
1961 We first try some simple transformations to avoid the creation of extra
1962 pseudos. */
1963
1964static int
1965instantiate_virtual_regs_1 (loc, object, extra_insns)
1966 rtx *loc;
1967 rtx object;
1968 int extra_insns;
1969{
1970 rtx x;
1971 RTX_CODE code;
1972 rtx new = 0;
1973 int offset;
1974 rtx temp;
1975 rtx seq;
1976 int i, j;
1977 char *fmt;
1978
1979 /* Re-start here to avoid recursion in common cases. */
1980 restart:
1981
1982 x = *loc;
1983 if (x == 0)
1984 return 1;
1985
1986 code = GET_CODE (x);
1987
1988 /* Check for some special cases. */
1989 switch (code)
1990 {
1991 case CONST_INT:
1992 case CONST_DOUBLE:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case CODE_LABEL:
1996 case PC:
1997 case CC0:
1998 case ASM_INPUT:
1999 case ADDR_VEC:
2000 case ADDR_DIFF_VEC:
2001 case RETURN:
2002 return 1;
2003
2004 case SET:
2005 /* We are allowed to set the virtual registers. This means that
2006 that the actual register should receive the source minus the
2007 appropriate offset. This is used, for example, in the handling
2008 of non-local gotos. */
2009 if (SET_DEST (x) == virtual_incoming_args_rtx)
2010 new = arg_pointer_rtx, offset = - in_arg_offset;
2011 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2012 new = frame_pointer_rtx, offset = - var_offset;
2013 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2014 new = stack_pointer_rtx, offset = - dynamic_offset;
2015 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2016 new = stack_pointer_rtx, offset = - out_arg_offset;
2017
2018 if (new)
2019 {
2020 /* The only valid sources here are PLUS or REG. Just do
2021 the simplest possible thing to handle them. */
2022 if (GET_CODE (SET_SRC (x)) != REG
2023 && GET_CODE (SET_SRC (x)) != PLUS)
2024 abort ();
2025
2026 start_sequence ();
2027 if (GET_CODE (SET_SRC (x)) != REG)
2028 temp = force_operand (SET_SRC (x), 0);
2029 else
2030 temp = SET_SRC (x);
2031 temp = force_operand (plus_constant (temp, offset), 0);
2032 seq = get_insns ();
2033 end_sequence ();
2034
2035 emit_insns_before (seq, object);
2036 SET_DEST (x) = new;
2037
2038 if (!validate_change (object, &SET_SRC (x), temp, 0)
2039 || ! extra_insns)
2040 abort ();
2041
2042 return 1;
2043 }
2044
2045 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2046 loc = &SET_SRC (x);
2047 goto restart;
2048
2049 case PLUS:
2050 /* Handle special case of virtual register plus constant. */
2051 if (CONSTANT_P (XEXP (x, 1)))
2052 {
2053 rtx old;
2054
2055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2056 if (GET_CODE (XEXP (x, 0)) == PLUS)
2057 {
2058 rtx inner = XEXP (XEXP (x, 0), 0);
2059
2060 if (inner == virtual_incoming_args_rtx)
2061 new = arg_pointer_rtx, offset = in_arg_offset;
2062 else if (inner == virtual_stack_vars_rtx)
2063 new = frame_pointer_rtx, offset = var_offset;
2064 else if (inner == virtual_stack_dynamic_rtx)
2065 new = stack_pointer_rtx, offset = dynamic_offset;
2066 else if (inner == virtual_outgoing_args_rtx)
2067 new = stack_pointer_rtx, offset = out_arg_offset;
2068 else
2069 {
2070 loc = &XEXP (x, 0);
2071 goto restart;
2072 }
2073
2074 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2075 extra_insns);
2076 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2077 }
2078
2079 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2080 new = arg_pointer_rtx, offset = in_arg_offset;
2081 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2082 new = frame_pointer_rtx, offset = var_offset;
2083 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2084 new = stack_pointer_rtx, offset = dynamic_offset;
2085 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2086 new = stack_pointer_rtx, offset = out_arg_offset;
2087 else
2088 {
2089 /* We know the second operand is a constant. Unless the
2090 first operand is a REG (which has been already checked),
2091 it needs to be checked. */
2092 if (GET_CODE (XEXP (x, 0)) != REG)
2093 {
2094 loc = &XEXP (x, 0);
2095 goto restart;
2096 }
2097 return 1;
2098 }
2099
2100 old = XEXP (x, 0);
2101 XEXP (x, 0) = new;
2102 new = plus_constant (XEXP (x, 1), offset);
2103
2104 /* If the new constant is zero, try to replace the sum with its
2105 first operand. */
2106 if (new == const0_rtx
2107 && validate_change (object, loc, XEXP (x, 0), 0))
2108 return 1;
2109
2110 /* Next try to replace constant with new one. */
2111 if (!validate_change (object, &XEXP (x, 1), new, 0))
2112 {
2113 if (! extra_insns)
2114 {
2115 XEXP (x, 0) = old;
2116 return 0;
2117 }
2118
2119 /* Otherwise copy the new constant into a register and replace
2120 constant with that register. */
2121 temp = gen_reg_rtx (Pmode);
2122 if (validate_change (object, &XEXP (x, 1), temp, 0))
2123 emit_insn_before (gen_move_insn (temp, new), object);
2124 else
2125 {
2126 /* If that didn't work, replace this expression with a
2127 register containing the sum. */
2128
2129 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2130 XEXP (x, 0) = old;
2131
2132 start_sequence ();
2133 temp = force_operand (new, 0);
2134 seq = get_insns ();
2135 end_sequence ();
2136
2137 emit_insns_before (seq, object);
2138 if (! validate_change (object, loc, temp, 0)
2139 && ! validate_replace_rtx (x, temp, object))
2140 abort ();
2141 }
2142 }
2143
2144 return 1;
2145 }
2146
2147 /* Fall through to generic two-operand expression case. */
2148 case EXPR_LIST:
2149 case CALL:
2150 case COMPARE:
2151 case MINUS:
2152 case MULT:
2153 case DIV: case UDIV:
2154 case MOD: case UMOD:
2155 case AND: case IOR: case XOR:
2156 case LSHIFT: case ASHIFT: case ROTATE:
2157 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2158 case NE: case EQ:
2159 case GE: case GT: case GEU: case GTU:
2160 case LE: case LT: case LEU: case LTU:
2161 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2162 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2163 loc = &XEXP (x, 0);
2164 goto restart;
2165
2166 case MEM:
2167 /* Most cases of MEM that convert to valid addresses have already been
2168 handled by our scan of regno_reg_rtx. The only special handling we
2169 need here is to make a copy of the rtx to ensure it isn't being
2170 shared if we have to change it to a psuedo.
2171
2172 If the rtx is a simple reference to an address via a virtual register,
2173 it can potentially be shared. In such cases, first try to make it
2174 a valid address, which can also be shared. Otherwise, copy it and
2175 proceed normally.
2176
2177 First check for common cases that need no processing. These are
2178 usually due to instantiation already being done on a previous instance
2179 of a shared rtx. */
2180
2181 temp = XEXP (x, 0);
2182 if (CONSTANT_ADDRESS_P (temp)
2183#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2184 || temp == arg_pointer_rtx
2185#endif
2186 || temp == frame_pointer_rtx)
2187 return 1;
2188
2189 if (GET_CODE (temp) == PLUS
2190 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2191 && (XEXP (temp, 0) == frame_pointer_rtx
2192#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2193 || XEXP (temp, 0) == arg_pointer_rtx
2194#endif
2195 ))
2196 return 1;
2197
2198 if (temp == virtual_stack_vars_rtx
2199 || temp == virtual_incoming_args_rtx
2200 || (GET_CODE (temp) == PLUS
2201 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2202 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2203 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2204 {
2205 /* This MEM may be shared. If the substitution can be done without
2206 the need to generate new pseudos, we want to do it in place
2207 so all copies of the shared rtx benefit. The call below will
2208 only make substitutions if the resulting address is still
2209 valid.
2210
2211 Note that we cannot pass X as the object in the recursive call
2212 since the insn being processed may not allow all valid
2213 addresses. */
2214
2215 if (instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0))
2216 return 1;
2217
2218 /* Otherwise make a copy and process that copy. We copy the entire
2219 RTL expression since it might be a PLUS which could also be
2220 shared. */
2221 *loc = x = copy_rtx (x);
2222 }
2223
2224 /* Fall through to generic unary operation case. */
2225 case USE:
2226 case CLOBBER:
2227 case SUBREG:
2228 case STRICT_LOW_PART:
2229 case NEG: case NOT:
2230 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2231 case SIGN_EXTEND: case ZERO_EXTEND:
2232 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2233 case FLOAT: case FIX:
2234 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2235 case ABS:
2236 case SQRT:
2237 case FFS:
2238 /* These case either have just one operand or we know that we need not
2239 check the rest of the operands. */
2240 loc = &XEXP (x, 0);
2241 goto restart;
2242
2243 case REG:
2244 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2245 in front of this insn and substitute the temporary. */
2246 if (x == virtual_incoming_args_rtx)
2247 new = arg_pointer_rtx, offset = in_arg_offset;
2248 else if (x == virtual_stack_vars_rtx)
2249 new = frame_pointer_rtx, offset = var_offset;
2250 else if (x == virtual_stack_dynamic_rtx)
2251 new = stack_pointer_rtx, offset = dynamic_offset;
2252 else if (x == virtual_outgoing_args_rtx)
2253 new = stack_pointer_rtx, offset = out_arg_offset;
2254
2255 if (new)
2256 {
2257 temp = plus_constant (new, offset);
2258 if (!validate_change (object, loc, temp, 0))
2259 {
2260 if (! extra_insns)
2261 return 0;
2262
2263 start_sequence ();
2264 temp = force_operand (temp, 0);
2265 seq = get_insns ();
2266 end_sequence ();
2267
2268 emit_insns_before (seq, object);
2269 if (! validate_change (object, loc, temp, 0)
2270 && ! validate_replace_rtx (x, temp, object))
2271 abort ();
2272 }
2273 }
2274
2275 return 1;
2276 }
2277
2278 /* Scan all subexpressions. */
2279 fmt = GET_RTX_FORMAT (code);
2280 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2281 if (*fmt == 'e')
2282 {
2283 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2284 return 0;
2285 }
2286 else if (*fmt == 'E')
2287 for (j = 0; j < XVECLEN (x, i); j++)
2288 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2289 extra_insns))
2290 return 0;
2291
2292 return 1;
2293}
2294\f
2295/* Optimization: assuming this function does not receive nonlocal gotos,
2296 delete the handlers for such, as well as the insns to establish
2297 and disestablish them. */
2298
2299static void
2300delete_handlers ()
2301{
2302 rtx insn;
2303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2304 {
2305 /* Delete the handler by turning off the flag that would
2306 prevent jump_optimize from deleting it.
2307 Also permit deletion of the nonlocal labels themselves
2308 if nothing local refers to them. */
2309 if (GET_CODE (insn) == CODE_LABEL)
2310 LABEL_PRESERVE_P (insn) = 0;
2311 if (GET_CODE (insn) == INSN
2312 && GET_CODE (PATTERN (insn)) == SET
2313 && (SET_DEST (PATTERN (insn)) == nonlocal_goto_handler_slot
2314 || SET_SRC (PATTERN (insn)) == nonlocal_goto_handler_slot
2315 || SET_DEST (PATTERN (insn)) == nonlocal_goto_stack_level
2316 || SET_SRC (PATTERN (insn)) == nonlocal_goto_stack_level))
2317 delete_insn (insn);
2318 }
2319}
2320
2321/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2322 of the current function. */
2323
2324rtx
2325nonlocal_label_rtx_list ()
2326{
2327 tree t;
2328 rtx x = 0;
2329
2330 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2331 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2332
2333 return x;
2334}
2335\f
2336/* Output a USE for any register use in RTL.
2337 This is used with -noreg to mark the extent of lifespan
2338 of any registers used in a user-visible variable's DECL_RTL. */
2339
2340void
2341use_variable (rtl)
2342 rtx rtl;
2343{
2344 if (GET_CODE (rtl) == REG)
2345 /* This is a register variable. */
2346 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2347 else if (GET_CODE (rtl) == MEM
2348 && GET_CODE (XEXP (rtl, 0)) == REG
2349 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2350 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2351 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2352 /* This is a variable-sized structure. */
2353 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2354}
2355
2356/* Like use_variable except that it outputs the USEs after INSN
2357 instead of at the end of the insn-chain. */
2358
2359void
2360use_variable_after (rtl, insn)
2361 rtx rtl, insn;
2362{
2363 if (GET_CODE (rtl) == REG)
2364 /* This is a register variable. */
2365 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2366 else if (GET_CODE (rtl) == MEM
2367 && GET_CODE (XEXP (rtl, 0)) == REG
2368 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2369 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2370 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2371 /* This is a variable-sized structure. */
2372 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2373}
2374\f
2375int
2376max_parm_reg_num ()
2377{
2378 return max_parm_reg;
2379}
2380
2381/* Return the first insn following those generated by `assign_parms'. */
2382
2383rtx
2384get_first_nonparm_insn ()
2385{
2386 if (last_parm_insn)
2387 return NEXT_INSN (last_parm_insn);
2388 return get_insns ();
2389}
2390
2391/* Return 1 if EXP returns an aggregate value, for which an address
2392 must be passed to the function or returned by the function. */
2393
2394int
2395aggregate_value_p (exp)
2396 tree exp;
2397{
2398 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2399 return 1;
2400 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2401 return 1;
2402 if (flag_pcc_struct_return
2403 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2404 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2405 return 1;
2406 return 0;
2407}
2408\f
2409/* Assign RTL expressions to the function's parameters.
2410 This may involve copying them into registers and using
2411 those registers as the RTL for them.
2412
2413 If SECOND_TIME is non-zero it means that this function is being
2414 called a second time. This is done by integrate.c when a function's
2415 compilation is deferred. We need to come back here in case the
2416 FUNCTION_ARG macro computes items needed for the rest of the compilation
2417 (such as changing which registers are fixed or caller-saved). But suppress
2418 writing any insns or setting DECL_RTL of anything in this case. */
2419
2420void
2421assign_parms (fndecl, second_time)
2422 tree fndecl;
2423 int second_time;
2424{
2425 register tree parm;
2426 register rtx entry_parm = 0;
2427 register rtx stack_parm = 0;
2428 CUMULATIVE_ARGS args_so_far;
2429 enum machine_mode passed_mode, nominal_mode;
2430 /* Total space needed so far for args on the stack,
2431 given as a constant and a tree-expression. */
2432 struct args_size stack_args_size;
2433 tree fntype = TREE_TYPE (fndecl);
2434 tree fnargs = DECL_ARGUMENTS (fndecl);
2435 /* This is used for the arg pointer when referring to stack args. */
2436 rtx internal_arg_pointer;
2437 /* This is a dummy PARM_DECL that we used for the function result if
2438 the function returns a structure. */
2439 tree function_result_decl = 0;
2440 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2441 int varargs_setup = 0;
2442
2443 /* Nonzero if the last arg is named `__builtin_va_alist',
2444 which is used on some machines for old-fashioned non-ANSI varargs.h;
2445 this should be stuck onto the stack as if it had arrived there. */
2446 int vararg
2447 = (fnargs
2448 && (parm = tree_last (fnargs)) != 0
2449 && DECL_NAME (parm)
2450 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2451 "__builtin_va_alist")));
2452
2453 /* Nonzero if function takes extra anonymous args.
2454 This means the last named arg must be on the stack
2455 right before the anonymous ones. */
2456 int stdarg
2457 = (TYPE_ARG_TYPES (fntype) != 0
2458 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2459 != void_type_node));
2460
2461 /* If the reg that the virtual arg pointer will be translated into is
2462 not a fixed reg or is the stack pointer, make a copy of the virtual
2463 arg pointer, and address parms via the copy. The frame pointer is
2464 considered fixed even though it is not marked as such.
2465
2466 The second time through, simply use ap to avoid generating rtx. */
2467
2468 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2469 || ! (fixed_regs[ARG_POINTER_REGNUM]
2470 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2471 && ! second_time)
2472 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2473 else
2474 internal_arg_pointer = virtual_incoming_args_rtx;
2475 current_function_internal_arg_pointer = internal_arg_pointer;
2476
2477 stack_args_size.constant = 0;
2478 stack_args_size.var = 0;
2479
2480 /* If struct value address is treated as the first argument, make it so. */
2481 if (aggregate_value_p (DECL_RESULT (fndecl))
2482 && ! current_function_returns_pcc_struct
2483 && struct_value_incoming_rtx == 0)
2484 {
2485 tree type = build_pointer_type (fntype);
2486
2487 function_result_decl = build_decl (PARM_DECL, 0, type);
2488
2489 DECL_ARG_TYPE (function_result_decl) = type;
2490 TREE_CHAIN (function_result_decl) = fnargs;
2491 fnargs = function_result_decl;
2492 }
2493
2494 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2495 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2496
2497#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2498 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, 0);
2499#else
2500 INIT_CUMULATIVE_ARGS (args_so_far, fntype, 0);
2501#endif
2502
2503 /* We haven't yet found an argument that we must push and pretend the
2504 caller did. */
2505 current_function_pretend_args_size = 0;
2506
2507 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2508 {
2509 int aggregate
2510 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2511 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2512 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2513 struct args_size stack_offset;
2514 struct args_size arg_size;
2515 int passed_pointer = 0;
2516 tree passed_type = DECL_ARG_TYPE (parm);
2517
2518 /* Set LAST_NAMED if this is last named arg before some
2519 anonymous args. We treat it as if it were anonymous too. */
2520 int last_named = ((TREE_CHAIN (parm) == 0
2521 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2522 && (vararg || stdarg));
2523
2524 if (TREE_TYPE (parm) == error_mark_node
2525 /* This can happen after weird syntax errors
2526 or if an enum type is defined among the parms. */
2527 || TREE_CODE (parm) != PARM_DECL
2528 || passed_type == NULL)
2529 {
2530 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2531 TREE_USED (parm) = 1;
2532 continue;
2533 }
2534
2535 /* For varargs.h function, save info about regs and stack space
2536 used by the individual args, not including the va_alist arg. */
2537 if (vararg && last_named)
2538 current_function_args_info = args_so_far;
2539
2540 /* Find mode of arg as it is passed, and mode of arg
2541 as it should be during execution of this function. */
2542 passed_mode = TYPE_MODE (passed_type);
2543 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2544
2545#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2546 /* See if this arg was passed by invisible reference. */
2547 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2548 passed_type, ! last_named))
2549 {
2550 passed_type = build_pointer_type (passed_type);
2551 passed_pointer = 1;
2552 passed_mode = nominal_mode = Pmode;
2553 }
2554#endif
2555
2556 /* Let machine desc say which reg (if any) the parm arrives in.
2557 0 means it arrives on the stack. */
2558#ifdef FUNCTION_INCOMING_ARG
2559 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2560 passed_type, ! last_named);
2561#else
2562 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2563 passed_type, ! last_named);
2564#endif
2565
2566#ifdef SETUP_INCOMING_VARARGS
2567 /* If this is the last named parameter, do any required setup for
2568 varargs or stdargs. We need to know about the case of this being an
2569 addressable type, in which case we skip the registers it
2570 would have arrived in.
2571
2572 For stdargs, LAST_NAMED will be set for two parameters, the one that
2573 is actually the last named, and the dummy parameter. We only
2574 want to do this action once.
2575
2576 Also, indicate when RTL generation is to be suppressed. */
2577 if (last_named && !varargs_setup)
2578 {
2579 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2580 current_function_pretend_args_size,
2581 second_time);
2582 varargs_setup = 1;
2583 }
2584#endif
2585
2586 /* Determine parm's home in the stack,
2587 in case it arrives in the stack or we should pretend it did.
2588
2589 Compute the stack position and rtx where the argument arrives
2590 and its size.
2591
2592 There is one complexity here: If this was a parameter that would
2593 have been passed in registers, but wasn't only because it is
2594 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2595 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2596 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2597 0 as it was the previous time. */
2598
2599 locate_and_pad_parm (passed_mode, passed_type,
2600#ifdef STACK_PARMS_IN_REG_PARM_AREA
2601 1,
2602#else
2603#ifdef FUNCTION_INCOMING_ARG
2604 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2605 passed_type,
2606 (! last_named
2607 || varargs_setup)) != 0,
2608#else
2609 FUNCTION_ARG (args_so_far, passed_mode,
2610 passed_type,
2611 ! last_named || varargs_setup) != 0,
2612#endif
2613#endif
2614 fndecl, &stack_args_size, &stack_offset, &arg_size);
2615
2616 if (! second_time)
2617 {
2618 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2619
2620 if (offset_rtx == const0_rtx)
2621 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2622 else
2623 stack_parm = gen_rtx (MEM, passed_mode,
2624 gen_rtx (PLUS, Pmode,
2625 internal_arg_pointer, offset_rtx));
2626
2627 /* If this is a memory ref that contains aggregate components,
2628 mark it as such for cse and loop optimize. */
2629 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2630 }
2631
2632 /* If this parameter was passed both in registers and in the stack,
2633 use the copy on the stack. */
2634 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2635 entry_parm = 0;
2636
2637 /* If this parm was passed part in regs and part in memory,
2638 pretend it arrived entirely in memory
2639 by pushing the register-part onto the stack.
2640
2641 In the special case of a DImode or DFmode that is split,
2642 we could put it together in a pseudoreg directly,
2643 but for now that's not worth bothering with. */
2644
2645 if (entry_parm)
2646 {
2647 int nregs = 0;
2648#ifdef FUNCTION_ARG_PARTIAL_NREGS
2649 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2650 passed_type, ! last_named);
2651#endif
2652
2653 if (nregs > 0)
2654 {
2655 current_function_pretend_args_size
2656 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2657 / (PARM_BOUNDARY / BITS_PER_UNIT)
2658 * (PARM_BOUNDARY / BITS_PER_UNIT));
2659
2660 if (! second_time)
2661 move_block_from_reg (REGNO (entry_parm),
2662 validize_mem (stack_parm), nregs);
2663 entry_parm = stack_parm;
2664 }
2665 }
2666
2667 /* If we didn't decide this parm came in a register,
2668 by default it came on the stack. */
2669 if (entry_parm == 0)
2670 entry_parm = stack_parm;
2671
2672 /* Record permanently how this parm was passed. */
2673 if (! second_time)
2674 DECL_INCOMING_RTL (parm) = entry_parm;
2675
2676 /* If there is actually space on the stack for this parm,
2677 count it in stack_args_size; otherwise set stack_parm to 0
2678 to indicate there is no preallocated stack slot for the parm. */
2679
2680 if (entry_parm == stack_parm
2681#ifdef REG_PARM_STACK_SPACE
2682 /* On some machines, even if a parm value arrives in a register
2683 there is still an (uninitialized) stack slot allocated for it. */
2684 || REG_PARM_STACK_SPACE (fndecl) > 0
2685#endif
2686 )
2687 {
2688 stack_args_size.constant += arg_size.constant;
2689 if (arg_size.var)
2690 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2691 }
2692 else
2693 /* No stack slot was pushed for this parm. */
2694 stack_parm = 0;
2695
2696 /* Update info on where next arg arrives in registers. */
2697
2698 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2699 passed_type, ! last_named);
2700
2701 /* If this is our second time through, we are done with this parm. */
2702 if (second_time)
2703 continue;
2704
2705 /* Now adjust STACK_PARM to the mode and precise location
2706 where this parameter should live during execution,
2707 if we discover that it must live in the stack during execution.
2708 To make debuggers happier on big-endian machines, we store
2709 the value in the last bytes of the space available. */
2710
2711 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2712 && stack_parm != 0)
2713 {
2714 rtx offset_rtx;
2715
2716#if BYTES_BIG_ENDIAN
2717 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2718 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2719 - GET_MODE_SIZE (nominal_mode));
2720#endif
2721
2722 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2723 if (offset_rtx == const0_rtx)
2724 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2725 else
2726 stack_parm = gen_rtx (MEM, nominal_mode,
2727 gen_rtx (PLUS, Pmode,
2728 internal_arg_pointer, offset_rtx));
2729
2730 /* If this is a memory ref that contains aggregate components,
2731 mark it as such for cse and loop optimize. */
2732 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2733 }
2734
2735 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2736 in the mode in which it arrives.
2737 STACK_PARM is an RTX for a stack slot where the parameter can live
2738 during the function (in case we want to put it there).
2739 STACK_PARM is 0 if no stack slot was pushed for it.
2740
2741 Now output code if necessary to convert ENTRY_PARM to
2742 the type in which this function declares it,
2743 and store that result in an appropriate place,
2744 which may be a pseudo reg, may be STACK_PARM,
2745 or may be a local stack slot if STACK_PARM is 0.
2746
2747 Set DECL_RTL to that place. */
2748
2749 if (nominal_mode == BLKmode)
2750 {
2751 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2752 if (GET_CODE (entry_parm) == REG)
2753 {
2754 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2755 UNITS_PER_WORD);
2756
2757 /* Note that we will be storing an integral number of words.
2758 So we have to be careful to ensure that we allocate an
2759 integral number of words. We do this below in the
2760 assign_stack_local if space was not allocated in the argument
2761 list. If it was, this will not work if PARM_BOUNDARY is not
2762 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2763 if it becomes a problem. */
2764
2765 if (stack_parm == 0)
2766 stack_parm
2767 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2768 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2769 abort ();
2770
2771 move_block_from_reg (REGNO (entry_parm),
2772 validize_mem (stack_parm),
2773 size_stored / UNITS_PER_WORD);
2774 }
2775 DECL_RTL (parm) = stack_parm;
2776 }
2777 else if (! (
2778#if 0 /* This change was turned off because it makes compilation bigger. */
2779 !optimize
2780#else /* It's not clear why the following was replaced. */
2781 /* Obsoleted by preceeding line. */
2782 (obey_regdecls && ! TREE_REGDECL (parm)
2783 && ! TREE_INLINE (fndecl))
2784#endif
2785 /* layout_decl may set this. */
2786 || TREE_ADDRESSABLE (parm)
2787 || TREE_SIDE_EFFECTS (parm)
2788 /* If -ffloat-store specified, don't put explicit
2789 float variables into registers. */
2790 || (flag_float_store
2791 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2792 /* Always assign pseudo to structure return or item passed
2793 by invisible reference. */
2794 || passed_pointer || parm == function_result_decl)
2795 {
2796 /* Store the parm in a pseudoregister during the function. */
2797 register rtx parmreg = gen_reg_rtx (nominal_mode);
2798
2799 REG_USERVAR_P (parmreg) = 1;
2800
2801 /* If this was an item that we received a pointer to, set DECL_RTL
2802 appropriately. */
2803 if (passed_pointer)
2804 {
2805 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2806 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2807 }
2808 else
2809 DECL_RTL (parm) = parmreg;
2810
2811 /* Copy the value into the register. */
2812 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2813 {
2814 /* If ENTRY_PARM is a hard register, it might be in a register
2815 not valid for operating in its mode (e.g., an odd-numbered
2816 register for a DFmode). In that case, moves are the only
2817 thing valid, so we can't do a convert from there. This
2818 occurs when the calling sequence allow such misaligned
2819 usages. */
2820 if (GET_CODE (entry_parm) == REG
2821 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2822 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2823 GET_MODE (entry_parm)))
2824 convert_move (parmreg, copy_to_reg (entry_parm));
2825 else
2826 convert_move (parmreg, validize_mem (entry_parm), 0);
2827 }
6f086dfc
RS
2828 else
2829 emit_move_insn (parmreg, validize_mem (entry_parm));
2830
2831 /* In any case, record the parm's desired stack location
2832 in case we later discover it must live in the stack. */
2833 if (REGNO (parmreg) >= nparmregs)
2834 {
2835 rtx *new;
2836 nparmregs = REGNO (parmreg) + 5;
2837 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2838 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2839 parm_reg_stack_loc = new;
2840 }
2841 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2842
2843 /* Mark the register as eliminable if we did no conversion
2844 and it was copied from memory at a fixed offset,
2845 and the arg pointer was not copied to a pseudo-reg.
2846 If the arg pointer is a pseudo reg or the offset formed
2847 an invalid address, such memory-equivalences
2848 as we make here would screw up life analysis for it. */
2849 if (nominal_mode == passed_mode
2850 && GET_CODE (entry_parm) == MEM
2851 && stack_offset.var == 0
2852 && reg_mentioned_p (virtual_incoming_args_rtx,
2853 XEXP (entry_parm, 0)))
2854 REG_NOTES (get_last_insn ())
2855 = gen_rtx (EXPR_LIST, REG_EQUIV,
2856 entry_parm, REG_NOTES (get_last_insn ()));
2857
2858 /* For pointer data type, suggest pointer register. */
2859 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2860 mark_reg_pointer (parmreg);
2861 }
2862 else
2863 {
2864 /* Value must be stored in the stack slot STACK_PARM
2865 during function execution. */
2866
2867 if (passed_mode != nominal_mode)
86f8eff3
RK
2868 {
2869 /* Conversion is required. */
2870 if (GET_CODE (entry_parm) == REG
2871 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2872 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2873 entry_parm = copy_to_reg (entry_parm);
2874
2875 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2876 }
6f086dfc
RS
2877
2878 if (entry_parm != stack_parm)
2879 {
2880 if (stack_parm == 0)
2881 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2882 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2883 emit_move_insn (validize_mem (stack_parm),
2884 validize_mem (entry_parm));
2885 }
2886
2887 DECL_RTL (parm) = stack_parm;
2888 }
2889
2890 /* If this "parameter" was the place where we are receiving the
2891 function's incoming structure pointer, set up the result. */
2892 if (parm == function_result_decl)
2893 DECL_RTL (DECL_RESULT (fndecl))
2894 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2895
2896 if (TREE_THIS_VOLATILE (parm))
2897 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2898 if (TREE_READONLY (parm))
2899 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2900 }
2901
2902 max_parm_reg = max_reg_num ();
2903 last_parm_insn = get_last_insn ();
2904
2905 current_function_args_size = stack_args_size.constant;
2906
2907 /* Adjust function incoming argument size for alignment and
2908 minimum length. */
2909
2910#ifdef REG_PARM_STACK_SPACE
2911 current_function_args_size = MAX (current_function_args_size,
2912 REG_PARM_STACK_SPACE (fndecl));
2913#endif
2914
2915#ifdef STACK_BOUNDARY
2916#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2917
2918 current_function_args_size
2919 = ((current_function_args_size + STACK_BYTES - 1)
2920 / STACK_BYTES) * STACK_BYTES;
2921#endif
2922
2923#ifdef ARGS_GROW_DOWNWARD
2924 current_function_arg_offset_rtx
2925 = (stack_args_size.var == 0 ? gen_rtx (CONST_INT, VOIDmode,
2926 -stack_args_size.constant)
2927 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2928 size_int (-stack_args_size.constant)),
2929 0, VOIDmode, 0));
2930#else
2931 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2932#endif
2933
2934 /* See how many bytes, if any, of its args a function should try to pop
2935 on return. */
2936
2937 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2938 current_function_args_size);
2939
2940 /* For stdarg.h function, save info about regs and stack space
2941 used by the named args. */
2942
2943 if (stdarg)
2944 current_function_args_info = args_so_far;
2945
2946 /* Set the rtx used for the function return value. Put this in its
2947 own variable so any optimizers that need this information don't have
2948 to include tree.h. Do this here so it gets done when an inlined
2949 function gets output. */
2950
2951 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
2952}
2953\f
2954/* Compute the size and offset from the start of the stacked arguments for a
2955 parm passed in mode PASSED_MODE and with type TYPE.
2956
2957 INITIAL_OFFSET_PTR points to the current offset into the stacked
2958 arguments.
2959
2960 The starting offset and size for this parm are returned in *OFFSET_PTR
2961 and *ARG_SIZE_PTR, respectively.
2962
2963 IN_REGS is non-zero if the argument will be passed in registers. It will
2964 never be set if REG_PARM_STACK_SPACE is not defined.
2965
2966 FNDECL is the function in which the argument was defined.
2967
2968 There are two types of rounding that are done. The first, controlled by
2969 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
2970 list to be aligned to the specific boundary (in bits). This rounding
2971 affects the initial and starting offsets, but not the argument size.
2972
2973 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
2974 optionally rounds the size of the parm to PARM_BOUNDARY. The
2975 initial offset is not affected by this rounding, while the size always
2976 is and the starting offset may be. */
2977
2978/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
2979 initial_offset_ptr is positive because locate_and_pad_parm's
2980 callers pass in the total size of args so far as
2981 initial_offset_ptr. arg_size_ptr is always positive.*/
2982
2983static void pad_to_arg_alignment (), pad_below ();
2984
2985void
2986locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
2987 initial_offset_ptr, offset_ptr, arg_size_ptr)
2988 enum machine_mode passed_mode;
2989 tree type;
2990 int in_regs;
2991 tree fndecl;
2992 struct args_size *initial_offset_ptr;
2993 struct args_size *offset_ptr;
2994 struct args_size *arg_size_ptr;
2995{
2996 tree sizetree
2997 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
2998 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
2999 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3000 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3001 int reg_parm_stack_space = 0;
3002
3003#ifdef REG_PARM_STACK_SPACE
3004 /* If we have found a stack parm before we reach the end of the
3005 area reserved for registers, skip that area. */
3006 if (! in_regs)
3007 {
3008 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3009 if (reg_parm_stack_space > 0)
3010 {
3011 if (initial_offset_ptr->var)
3012 {
3013 initial_offset_ptr->var
3014 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3015 size_int (reg_parm_stack_space));
3016 initial_offset_ptr->constant = 0;
3017 }
3018 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3019 initial_offset_ptr->constant = reg_parm_stack_space;
3020 }
3021 }
3022#endif /* REG_PARM_STACK_SPACE */
3023
3024 arg_size_ptr->var = 0;
3025 arg_size_ptr->constant = 0;
3026
3027#ifdef ARGS_GROW_DOWNWARD
3028 if (initial_offset_ptr->var)
3029 {
3030 offset_ptr->constant = 0;
3031 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3032 initial_offset_ptr->var);
3033 }
3034 else
3035 {
3036 offset_ptr->constant = - initial_offset_ptr->constant;
3037 offset_ptr->var = 0;
3038 }
3039 if (where_pad == upward
3040 && (TREE_CODE (sizetree) != INTEGER_CST
3041 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3042 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3043 SUB_PARM_SIZE (*offset_ptr, sizetree);
3044 pad_to_arg_alignment (offset_ptr, boundary);
3045 if (initial_offset_ptr->var)
3046 {
3047 arg_size_ptr->var = size_binop (MINUS_EXPR,
3048 size_binop (MINUS_EXPR,
3049 integer_zero_node,
3050 initial_offset_ptr->var),
3051 offset_ptr->var);
3052 }
3053 else
3054 {
3055 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3056 offset_ptr->constant);
3057 }
3058/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3059 if (where_pad == downward)
3060 pad_below (arg_size_ptr, passed_mode, sizetree);
3061#else /* !ARGS_GROW_DOWNWARD */
3062 pad_to_arg_alignment (initial_offset_ptr, boundary);
3063 *offset_ptr = *initial_offset_ptr;
3064 if (where_pad == downward)
3065 pad_below (offset_ptr, passed_mode, sizetree);
3066
3067#ifdef PUSH_ROUNDING
3068 if (passed_mode != BLKmode)
3069 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3070#endif
3071
3072 if (where_pad != none
3073 && (TREE_CODE (sizetree) != INTEGER_CST
3074 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3075 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3076
3077 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3078#endif /* ARGS_GROW_DOWNWARD */
3079}
3080
3081static void
3082pad_to_arg_alignment (offset_ptr, boundary)
3083 struct args_size *offset_ptr;
3084 int boundary;
3085{
3086 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3087
3088 if (boundary > BITS_PER_UNIT)
3089 {
3090 if (offset_ptr->var)
3091 {
3092 offset_ptr->var =
3093#ifdef ARGS_GROW_DOWNWARD
3094 round_down
3095#else
3096 round_up
3097#endif
3098 (ARGS_SIZE_TREE (*offset_ptr),
3099 boundary / BITS_PER_UNIT);
3100 offset_ptr->constant = 0; /*?*/
3101 }
3102 else
3103 offset_ptr->constant =
3104#ifdef ARGS_GROW_DOWNWARD
3105 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3106#else
3107 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3108#endif
3109 }
3110}
3111
3112static void
3113pad_below (offset_ptr, passed_mode, sizetree)
3114 struct args_size *offset_ptr;
3115 enum machine_mode passed_mode;
3116 tree sizetree;
3117{
3118 if (passed_mode != BLKmode)
3119 {
3120 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3121 offset_ptr->constant
3122 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3123 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3124 - GET_MODE_SIZE (passed_mode));
3125 }
3126 else
3127 {
3128 if (TREE_CODE (sizetree) != INTEGER_CST
3129 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3130 {
3131 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3132 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3133 /* Add it in. */
3134 ADD_PARM_SIZE (*offset_ptr, s2);
3135 SUB_PARM_SIZE (*offset_ptr, sizetree);
3136 }
3137 }
3138}
3139
3140static tree
3141round_down (value, divisor)
3142 tree value;
3143 int divisor;
3144{
3145 return size_binop (MULT_EXPR,
3146 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3147 size_int (divisor));
3148}
3149\f
3150/* Walk the tree of blocks describing the binding levels within a function
3151 and warn about uninitialized variables.
3152 This is done after calling flow_analysis and before global_alloc
3153 clobbers the pseudo-regs to hard regs. */
3154
3155void
3156uninitialized_vars_warning (block)
3157 tree block;
3158{
3159 register tree decl, sub;
3160 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3161 {
3162 if (TREE_CODE (decl) == VAR_DECL
3163 /* These warnings are unreliable for and aggregates
3164 because assigning the fields one by one can fail to convince
3165 flow.c that the entire aggregate was initialized.
3166 Unions are troublesome because members may be shorter. */
3167 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3168 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3169 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3170 && DECL_RTL (decl) != 0
3171 && GET_CODE (DECL_RTL (decl)) == REG
3172 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3173 warning_with_decl (decl,
3174 "`%s' may be used uninitialized in this function");
3175 if (TREE_CODE (decl) == VAR_DECL
3176 && DECL_RTL (decl) != 0
3177 && GET_CODE (DECL_RTL (decl)) == REG
3178 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3179 warning_with_decl (decl,
3180 "variable `%s' may be clobbered by `longjmp'");
3181 }
3182 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3183 uninitialized_vars_warning (sub);
3184}
3185
3186/* Do the appropriate part of uninitialized_vars_warning
3187 but for arguments instead of local variables. */
3188
3189void
3190setjmp_args_warning (block)
3191 tree block;
3192{
3193 register tree decl;
3194 for (decl = DECL_ARGUMENTS (current_function_decl);
3195 decl; decl = TREE_CHAIN (decl))
3196 if (DECL_RTL (decl) != 0
3197 && GET_CODE (DECL_RTL (decl)) == REG
3198 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3199 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3200}
3201
3202/* If this function call setjmp, put all vars into the stack
3203 unless they were declared `register'. */
3204
3205void
3206setjmp_protect (block)
3207 tree block;
3208{
3209 register tree decl, sub;
3210 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3211 if ((TREE_CODE (decl) == VAR_DECL
3212 || TREE_CODE (decl) == PARM_DECL)
3213 && DECL_RTL (decl) != 0
3214 && GET_CODE (DECL_RTL (decl)) == REG
3215 && (
3216#ifdef NON_SAVING_SETJMP
3217 /* If longjmp doesn't restore the registers,
3218 don't put anything in them. */
3219 NON_SAVING_SETJMP
3220 ||
3221#endif
3222 ! TREE_REGDECL (decl)))
3223 put_var_into_stack (decl);
3224 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3225 setjmp_protect (sub);
3226}
3227\f
3228/* Like the previous function, but for args instead of local variables. */
3229
3230void
3231setjmp_protect_args ()
3232{
3233 register tree decl, sub;
3234 for (decl = DECL_ARGUMENTS (current_function_decl);
3235 decl; decl = TREE_CHAIN (decl))
3236 if ((TREE_CODE (decl) == VAR_DECL
3237 || TREE_CODE (decl) == PARM_DECL)
3238 && DECL_RTL (decl) != 0
3239 && GET_CODE (DECL_RTL (decl)) == REG
3240 && (
3241 /* If longjmp doesn't restore the registers,
3242 don't put anything in them. */
3243#ifdef NON_SAVING_SETJMP
3244 NON_SAVING_SETJMP
3245 ||
3246#endif
3247 ! TREE_REGDECL (decl)))
3248 put_var_into_stack (decl);
3249}
3250\f
3251/* Return the context-pointer register corresponding to DECL,
3252 or 0 if it does not need one. */
3253
3254rtx
3255lookup_static_chain (decl)
3256 tree decl;
3257{
3258 tree context = decl_function_context (decl);
3259 tree link;
3260
3261 if (context == 0)
3262 return 0;
3263
3264 /* We treat inline_function_decl as an alias for the current function
3265 because that is the inline function whose vars, types, etc.
3266 are being merged into the current function.
3267 See expand_inline_function. */
3268 if (context == current_function_decl || context == inline_function_decl)
3269 return virtual_stack_vars_rtx;
3270
3271 for (link = context_display; link; link = TREE_CHAIN (link))
3272 if (TREE_PURPOSE (link) == context)
3273 return RTL_EXPR_RTL (TREE_VALUE (link));
3274
3275 abort ();
3276}
3277\f
3278/* Convert a stack slot address ADDR for variable VAR
3279 (from a containing function)
3280 into an address valid in this function (using a static chain). */
3281
3282rtx
3283fix_lexical_addr (addr, var)
3284 rtx addr;
3285 tree var;
3286{
3287 rtx basereg;
3288 int displacement;
3289 tree context = decl_function_context (var);
3290 struct function *fp;
3291 rtx base = 0;
3292
3293 /* If this is the present function, we need not do anything. */
3294 if (context == current_function_decl || context == inline_function_decl)
3295 return addr;
3296
3297 for (fp = outer_function_chain; fp; fp = fp->next)
3298 if (fp->decl == context)
3299 break;
3300
3301 if (fp == 0)
3302 abort ();
3303
3304 /* Decode given address as base reg plus displacement. */
3305 if (GET_CODE (addr) == REG)
3306 basereg = addr, displacement = 0;
3307 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3308 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3309 else
3310 abort ();
3311
3312 /* We accept vars reached via the containing function's
3313 incoming arg pointer and via its stack variables pointer. */
3314 if (basereg == fp->internal_arg_pointer)
3315 {
3316 /* If reached via arg pointer, get the arg pointer value
3317 out of that function's stack frame.
3318
3319 There are two cases: If a separate ap is needed, allocate a
3320 slot in the outer function for it and dereference it that way.
3321 This is correct even if the real ap is actually a pseudo.
3322 Otherwise, just adjust the offset from the frame pointer to
3323 compensate. */
3324
3325#ifdef NEED_SEPARATE_AP
3326 rtx addr;
3327
3328 if (fp->arg_pointer_save_area == 0)
3329 fp->arg_pointer_save_area
3330 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3331
3332 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3333 addr = memory_address (Pmode, addr);
3334
3335 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3336#else
3337 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3338 base = lookup_static_chain (var);
6f086dfc
RS
3339#endif
3340 }
3341
3342 else if (basereg == virtual_stack_vars_rtx)
3343 {
3344 /* This is the same code as lookup_static_chain, duplicated here to
3345 avoid an extra call to decl_function_context. */
3346 tree link;
3347
3348 for (link = context_display; link; link = TREE_CHAIN (link))
3349 if (TREE_PURPOSE (link) == context)
3350 {
3351 base = RTL_EXPR_RTL (TREE_VALUE (link));
3352 break;
3353 }
3354 }
3355
3356 if (base == 0)
3357 abort ();
3358
3359 /* Use same offset, relative to appropriate static chain or argument
3360 pointer. */
3361 return plus_constant (base, displacement);
3362}
3363\f
3364/* Return the address of the trampoline for entering nested fn FUNCTION.
3365 If necessary, allocate a trampoline (in the stack frame)
3366 and emit rtl to initialize its contents (at entry to this function). */
3367
3368rtx
3369trampoline_address (function)
3370 tree function;
3371{
3372 tree link;
3373 tree rtlexp;
3374 rtx tramp;
3375 struct function *fp;
3376 tree fn_context;
3377
3378 /* Find an existing trampoline and return it. */
3379 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3380 if (TREE_PURPOSE (link) == function)
3381 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3382 for (fp = outer_function_chain; fp; fp = fp->next)
3383 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3384 if (TREE_PURPOSE (link) == function)
3385 {
3386 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3387 function);
3388 return round_trampoline_addr (tramp);
3389 }
3390
3391 /* None exists; we must make one. */
3392
3393 /* Find the `struct function' for the function containing FUNCTION. */
3394 fp = 0;
3395 fn_context = decl_function_context (function);
3396 if (fn_context != current_function_decl)
3397 for (fp = outer_function_chain; fp; fp = fp->next)
3398 if (fp->decl == fn_context)
3399 break;
3400
3401 /* Allocate run-time space for this trampoline
3402 (usually in the defining function's stack frame). */
3403#ifdef ALLOCATE_TRAMPOLINE
3404 tramp = ALLOCATE_TRAMPOLINE (fp);
3405#else
3406 /* If rounding needed, allocate extra space
3407 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3408#ifdef TRAMPOLINE_ALIGNMENT
3409#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3410#else
3411#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3412#endif
3413 if (fp != 0)
3414 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3415 else
3416 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3417#endif
3418
3419 /* Record the trampoline for reuse and note it for later initialization
3420 by expand_function_end. */
3421 if (fp != 0)
3422 {
3423 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3424 rtlexp = make_node (RTL_EXPR);
3425 RTL_EXPR_RTL (rtlexp) = tramp;
3426 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3427 pop_obstacks ();
3428 }
3429 else
3430 {
3431 /* Make the RTL_EXPR node temporary, not momentary, so that the
3432 trampoline_list doesn't become garbage. */
3433 int momentary = suspend_momentary ();
3434 rtlexp = make_node (RTL_EXPR);
3435 resume_momentary (momentary);
3436
3437 RTL_EXPR_RTL (rtlexp) = tramp;
3438 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3439 }
3440
3441 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3442 return round_trampoline_addr (tramp);
3443}
3444
3445/* Given a trampoline address,
3446 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3447
3448static rtx
3449round_trampoline_addr (tramp)
3450 rtx tramp;
3451{
3452#ifdef TRAMPOLINE_ALIGNMENT
3453 /* Round address up to desired boundary. */
3454 rtx temp = gen_reg_rtx (Pmode);
3455 temp = expand_binop (Pmode, add_optab, tramp,
3456 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_ALIGNMENT - 1),
3457 temp, 0, OPTAB_LIB_WIDEN);
3458 tramp = expand_binop (Pmode, and_optab, temp,
3459 gen_rtx (CONST_INT, VOIDmode, - TRAMPOLINE_ALIGNMENT),
3460 temp, 0, OPTAB_LIB_WIDEN);
3461#endif
3462 return tramp;
3463}
3464\f
3465/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3466 and initialize static variables for generating RTL for the statements
3467 of the function. */
3468
3469void
3470init_function_start (subr, filename, line)
3471 tree subr;
3472 char *filename;
3473 int line;
3474{
3475 char *junk;
3476
3477 init_stmt_for_function ();
3478
3479 cse_not_expected = ! optimize;
3480
3481 /* Caller save not needed yet. */
3482 caller_save_needed = 0;
3483
3484 /* No stack slots have been made yet. */
3485 stack_slot_list = 0;
3486
3487 /* There is no stack slot for handling nonlocal gotos. */
3488 nonlocal_goto_handler_slot = 0;
3489 nonlocal_goto_stack_level = 0;
3490
3491 /* No labels have been declared for nonlocal use. */
3492 nonlocal_labels = 0;
3493
3494 /* No function calls so far in this function. */
3495 function_call_count = 0;
3496
3497 /* No parm regs have been allocated.
3498 (This is important for output_inline_function.) */
3499 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3500
3501 /* Initialize the RTL mechanism. */
3502 init_emit ();
3503
3504 /* Initialize the queue of pending postincrement and postdecrements,
3505 and some other info in expr.c. */
3506 init_expr ();
3507
3508 /* We haven't done register allocation yet. */
3509 reg_renumber = 0;
3510
3511 init_const_rtx_hash_table ();
3512
3513 current_function_name = (*decl_printable_name) (subr, &junk);
3514
3515 /* Nonzero if this is a nested function that uses a static chain. */
3516
3517 current_function_needs_context
3518 = (decl_function_context (current_function_decl) != 0);
3519
3520 /* Set if a call to setjmp is seen. */
3521 current_function_calls_setjmp = 0;
3522
3523 /* Set if a call to longjmp is seen. */
3524 current_function_calls_longjmp = 0;
3525
3526 current_function_calls_alloca = 0;
3527 current_function_has_nonlocal_label = 0;
3528 current_function_contains_functions = 0;
3529
3530 current_function_returns_pcc_struct = 0;
3531 current_function_returns_struct = 0;
3532 current_function_epilogue_delay_list = 0;
3533 current_function_uses_const_pool = 0;
3534 current_function_uses_pic_offset_table = 0;
3535
3536 /* We have not yet needed to make a label to jump to for tail-recursion. */
3537 tail_recursion_label = 0;
3538
3539 /* We haven't had a need to make a save area for ap yet. */
3540
3541 arg_pointer_save_area = 0;
3542
3543 /* No stack slots allocated yet. */
3544 frame_offset = 0;
3545
3546 /* No SAVE_EXPRs in this function yet. */
3547 save_expr_regs = 0;
3548
3549 /* No RTL_EXPRs in this function yet. */
3550 rtl_expr_chain = 0;
3551
3552 /* We have not allocated any temporaries yet. */
3553 temp_slots = 0;
3554 temp_slot_level = 0;
3555
3556 /* Within function body, compute a type's size as soon it is laid out. */
3557 immediate_size_expand++;
3558
3559 init_pending_stack_adjust ();
3560 inhibit_defer_pop = 0;
3561
3562 current_function_outgoing_args_size = 0;
3563
3564 /* Initialize the insn lengths. */
3565 init_insn_lengths ();
3566
3567 /* Prevent ever trying to delete the first instruction of a function.
3568 Also tell final how to output a linenum before the function prologue. */
3569 emit_line_note (filename, line);
3570
3571 /* Make sure first insn is a note even if we don't want linenums.
3572 This makes sure the first insn will never be deleted.
3573 Also, final expects a note to appear there. */
3574 emit_note (0, NOTE_INSN_DELETED);
3575
3576 /* Set flags used by final.c. */
3577 if (aggregate_value_p (DECL_RESULT (subr)))
3578 {
3579#ifdef PCC_STATIC_STRUCT_RETURN
3580 if (flag_pcc_struct_return)
3581 current_function_returns_pcc_struct = 1;
3582 else
3583#endif
3584 current_function_returns_struct = 1;
3585 }
3586
3587 /* Warn if this value is an aggregate type,
3588 regardless of which calling convention we are using for it. */
3589 if (warn_aggregate_return
3590 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3591 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3592 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3593 warning ("function returns an aggregate");
3594
3595 current_function_returns_pointer
3596 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3597
3598 /* Indicate that we need to distinguish between the return value of the
3599 present function and the return value of a function being called. */
3600 rtx_equal_function_value_matters = 1;
3601
3602 /* Indicate that we have not instantiated virtual registers yet. */
3603 virtuals_instantiated = 0;
3604
3605 /* Indicate we have no need of a frame pointer yet. */
3606 frame_pointer_needed = 0;
3607
3608 /* By default assume not varargs. */
3609 current_function_varargs = 0;
3610}
3611
3612/* Indicate that the current function uses extra args
3613 not explicitly mentioned in the argument list in any fashion. */
3614
3615void
3616mark_varargs ()
3617{
3618 current_function_varargs = 1;
3619}
3620
3621/* Expand a call to __main at the beginning of a possible main function. */
3622
3623void
3624expand_main_function ()
3625{
3626#ifndef INIT_SECTION_ASM_OP
3627 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3628 VOIDmode, 0);
3629#endif /* not INIT_SECTION_ASM_OP */
3630}
3631\f
3632/* Start the RTL for a new function, and set variables used for
3633 emitting RTL.
3634 SUBR is the FUNCTION_DECL node.
3635 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3636 the function's parameters, which must be run at any return statement. */
3637
3638void
3639expand_function_start (subr, parms_have_cleanups)
3640 tree subr;
3641 int parms_have_cleanups;
3642{
3643 register int i;
3644 tree tem;
3645 rtx last_ptr;
3646
3647 /* Make sure volatile mem refs aren't considered
3648 valid operands of arithmetic insns. */
3649 init_recog_no_volatile ();
3650
3651 /* If function gets a static chain arg, store it in the stack frame.
3652 Do this first, so it gets the first stack slot offset. */
3653 if (current_function_needs_context)
3654 emit_move_insn (assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0),
3655 static_chain_incoming_rtx);
3656
3657 /* If the parameters of this function need cleaning up, get a label
3658 for the beginning of the code which executes those cleanups. This must
3659 be done before doing anything with return_label. */
3660 if (parms_have_cleanups)
3661 cleanup_label = gen_label_rtx ();
3662 else
3663 cleanup_label = 0;
3664
3665 /* Make the label for return statements to jump to, if this machine
3666 does not have a one-instruction return and uses an epilogue,
3667 or if it returns a structure, or if it has parm cleanups. */
3668#ifdef HAVE_return
3669 if (cleanup_label == 0 && HAVE_return
3670 && ! current_function_returns_pcc_struct
3671 && ! (current_function_returns_struct && ! optimize))
3672 return_label = 0;
3673 else
3674 return_label = gen_label_rtx ();
3675#else
3676 return_label = gen_label_rtx ();
3677#endif
3678
3679 /* Initialize rtx used to return the value. */
3680 /* Do this before assign_parms so that we copy the struct value address
3681 before any library calls that assign parms might generate. */
3682
3683 /* Decide whether to return the value in memory or in a register. */
3684 if (aggregate_value_p (DECL_RESULT (subr)))
3685 {
3686 /* Returning something that won't go in a register. */
3687 register rtx value_address;
3688
3689#ifdef PCC_STATIC_STRUCT_RETURN
3690 if (current_function_returns_pcc_struct)
3691 {
3692 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3693 value_address = assemble_static_space (size);
3694 }
3695 else
3696#endif
3697 {
3698 /* Expect to be passed the address of a place to store the value.
3699 If it is passed as an argument, assign_parms will take care of
3700 it. */
3701 if (struct_value_incoming_rtx)
3702 {
3703 value_address = gen_reg_rtx (Pmode);
3704 emit_move_insn (value_address, struct_value_incoming_rtx);
3705 }
3706 }
3707 if (value_address)
3708 DECL_RTL (DECL_RESULT (subr))
3709 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3710 value_address);
3711 }
3712 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3713 /* If return mode is void, this decl rtl should not be used. */
3714 DECL_RTL (DECL_RESULT (subr)) = 0;
3715 else if (parms_have_cleanups)
3716 /* If function will end with cleanup code for parms,
3717 compute the return values into a pseudo reg,
3718 which we will copy into the true return register
3719 after the cleanups are done. */
3720 DECL_RTL (DECL_RESULT (subr))
3721 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3722 else
3723 /* Scalar, returned in a register. */
3724 {
3725#ifdef FUNCTION_OUTGOING_VALUE
3726 DECL_RTL (DECL_RESULT (subr))
3727 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3728#else
3729 DECL_RTL (DECL_RESULT (subr))
3730 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3731#endif
3732
3733 /* Mark this reg as the function's return value. */
3734 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3735 {
3736 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3737 /* Needed because we may need to move this to memory
3738 in case it's a named return value whose address is taken. */
3739 TREE_REGDECL (DECL_RESULT (subr)) = 1;
3740 }
3741 }
3742
3743 /* Initialize rtx for parameters and local variables.
3744 In some cases this requires emitting insns. */
3745
3746 assign_parms (subr, 0);
3747
3748 /* The following was moved from init_function_start.
3749 The move is supposed to make sdb output more accurate. */
3750 /* Indicate the beginning of the function body,
3751 as opposed to parm setup. */
3752 emit_note (0, NOTE_INSN_FUNCTION_BEG);
3753
3754 /* If doing stupid allocation, mark parms as born here. */
3755
3756 if (GET_CODE (get_last_insn ()) != NOTE)
3757 emit_note (0, NOTE_INSN_DELETED);
3758 parm_birth_insn = get_last_insn ();
3759
3760 if (obey_regdecls)
3761 {
3762 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3763 use_variable (regno_reg_rtx[i]);
3764
3765 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3766 use_variable (current_function_internal_arg_pointer);
3767 }
3768
3769 /* Fetch static chain values for containing functions. */
3770 tem = decl_function_context (current_function_decl);
3771 if (tem)
3772 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3773 context_display = 0;
3774 while (tem)
3775 {
3776 tree rtlexp = make_node (RTL_EXPR);
3777
3778 RTL_EXPR_RTL (rtlexp) = last_ptr;
3779 context_display = tree_cons (tem, rtlexp, context_display);
3780 tem = decl_function_context (tem);
3781 if (tem == 0)
3782 break;
3783 /* Chain thru stack frames, assuming pointer to next lexical frame
3784 is found at the place we always store it. */
3785#ifdef FRAME_GROWS_DOWNWARD
3786 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
3787#endif
3788 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
3789 memory_address (Pmode, last_ptr)));
3790 }
3791
3792 /* After the display initializations is where the tail-recursion label
3793 should go, if we end up needing one. Ensure we have a NOTE here
3794 since some things (like trampolines) get placed before this. */
3795 tail_recursion_reentry = emit_note (0, NOTE_INSN_DELETED);
3796
3797 /* Evaluate now the sizes of any types declared among the arguments. */
3798 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
3799 expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0);
3800
3801 /* Make sure there is a line number after the function entry setup code. */
3802 force_next_line_note ();
3803}
3804\f
3805/* Generate RTL for the end of the current function.
3806 FILENAME and LINE are the current position in the source file. */
3807
3808/* It is up to language-specific callers to do cleanups for parameters. */
3809
3810void
3811expand_function_end (filename, line)
3812 char *filename;
3813 int line;
3814{
3815 register int i;
3816 tree link;
3817
3818 static rtx initial_trampoline;
3819
3820#ifdef NON_SAVING_SETJMP
3821 /* Don't put any variables in registers if we call setjmp
3822 on a machine that fails to restore the registers. */
3823 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
3824 {
3825 setjmp_protect (DECL_INITIAL (current_function_decl));
3826 setjmp_protect_args ();
3827 }
3828#endif
3829
3830 /* Save the argument pointer if a save area was made for it. */
3831 if (arg_pointer_save_area)
3832 {
3833 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
3834 emit_insn_before (x, tail_recursion_reentry);
3835 }
3836
3837 /* Initialize any trampolines required by this function. */
3838 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3839 {
3840 tree function = TREE_PURPOSE (link);
3841 rtx context = lookup_static_chain (function);
3842 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
3843 rtx seq;
3844
3845 /* First make sure this compilation has a template for
3846 initializing trampolines. */
3847 if (initial_trampoline == 0)
86f8eff3
RK
3848 {
3849 end_temporary_allocation ();
3850 initial_trampoline
3851 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
3852 resume_temporary_allocation ();
3853 }
6f086dfc
RS
3854
3855 /* Generate insns to initialize the trampoline. */
3856 start_sequence ();
3857 tramp = change_address (initial_trampoline, BLKmode,
3858 round_trampoline_addr (XEXP (tramp, 0)));
3859 emit_block_move (tramp, initial_trampoline,
3860 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_SIZE),
3861 FUNCTION_BOUNDARY / BITS_PER_UNIT);
3862 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
3863 XEXP (DECL_RTL (function), 0), context);
3864 seq = get_insns ();
3865 end_sequence ();
3866
3867 /* Put those insns at entry to the containing function (this one). */
3868 emit_insns_before (seq, tail_recursion_reentry);
3869 }
3870 /* Clear the trampoline_list for the next function. */
3871 trampoline_list = 0;
3872
3873#if 0 /* I think unused parms are legitimate enough. */
3874 /* Warn about unused parms. */
3875 if (warn_unused)
3876 {
3877 rtx decl;
3878
3879 for (decl = DECL_ARGUMENTS (current_function_decl);
3880 decl; decl = TREE_CHAIN (decl))
3881 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
3882 warning_with_decl (decl, "unused parameter `%s'");
3883 }
3884#endif
3885
3886 /* Delete handlers for nonlocal gotos if nothing uses them. */
3887 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
3888 delete_handlers ();
3889
3890 /* End any sequences that failed to be closed due to syntax errors. */
3891 while (in_sequence_p ())
3892 end_sequence (0);
3893
3894 /* Outside function body, can't compute type's actual size
3895 until next function's body starts. */
3896 immediate_size_expand--;
3897
3898 /* If doing stupid register allocation,
3899 mark register parms as dying here. */
3900
3901 if (obey_regdecls)
3902 {
3903 rtx tem;
3904 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3905 use_variable (regno_reg_rtx[i]);
3906
3907 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
3908
3909 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
3910 {
3911 use_variable (XEXP (tem, 0));
3912 use_variable_after (XEXP (tem, 0), parm_birth_insn);
3913 }
3914
3915 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3916 use_variable (current_function_internal_arg_pointer);
3917 }
3918
3919 clear_pending_stack_adjust ();
3920 do_pending_stack_adjust ();
3921
3922 /* Mark the end of the function body.
3923 If control reaches this insn, the function can drop through
3924 without returning a value. */
3925 emit_note (0, NOTE_INSN_FUNCTION_END);
3926
3927 /* Output a linenumber for the end of the function.
3928 SDB depends on this. */
3929 emit_line_note_force (filename, line);
3930
3931 /* Output the label for the actual return from the function,
3932 if one is expected. This happens either because a function epilogue
3933 is used instead of a return instruction, or because a return was done
3934 with a goto in order to run local cleanups, or because of pcc-style
3935 structure returning. */
3936
3937 if (return_label)
3938 emit_label (return_label);
3939
3940 /* If we had calls to alloca, and this machine needs
3941 an accurate stack pointer to exit the function,
3942 insert some code to save and restore the stack pointer. */
3943#ifdef EXIT_IGNORE_STACK
3944 if (! EXIT_IGNORE_STACK)
3945#endif
3946 if (current_function_calls_alloca)
3947 {
3948 rtx tem = gen_reg_rtx (Pmode);
3949 emit_insn_after (gen_rtx (SET, VOIDmode, tem, stack_pointer_rtx),
3950 parm_birth_insn);
3951 emit_insn (gen_rtx (SET, VOIDmode, stack_pointer_rtx, tem));
3952 }
3953
3954 /* If scalar return value was computed in a pseudo-reg,
3955 copy that to the hard return register. */
3956 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
3957 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
3958 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
3959 >= FIRST_PSEUDO_REGISTER))
3960 {
3961 rtx real_decl_result;
3962
3963#ifdef FUNCTION_OUTGOING_VALUE
3964 real_decl_result
3965 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3966 current_function_decl);
3967#else
3968 real_decl_result
3969 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3970 current_function_decl);
3971#endif
3972 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
3973 emit_move_insn (real_decl_result,
3974 DECL_RTL (DECL_RESULT (current_function_decl)));
3975 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
3976 }
3977
3978 /* If returning a structure, arrange to return the address of the value
3979 in a place where debuggers expect to find it.
3980
3981 If returning a structure PCC style,
3982 the caller also depends on this value.
3983 And current_function_returns_pcc_struct is not necessarily set. */
3984 if (current_function_returns_struct
3985 || current_function_returns_pcc_struct)
3986 {
3987 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3988 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3989#ifdef FUNCTION_OUTGOING_VALUE
3990 rtx outgoing
3991 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
3992 current_function_decl);
3993#else
3994 rtx outgoing
3995 = FUNCTION_VALUE (build_pointer_type (type),
3996 current_function_decl);
3997#endif
3998
3999 /* Mark this as a function return value so integrate will delete the
4000 assignment and USE below when inlining this function. */
4001 REG_FUNCTION_VALUE_P (outgoing) = 1;
4002
4003 emit_move_insn (outgoing, value_address);
4004 use_variable (outgoing);
4005 }
4006
4007 /* Output a return insn if we are using one.
4008 Otherwise, let the rtl chain end here, to drop through
4009 into the epilogue. */
4010
4011#ifdef HAVE_return
4012 if (HAVE_return)
4013 {
4014 emit_jump_insn (gen_return ());
4015 emit_barrier ();
4016 }
4017#endif
4018
4019 /* Fix up any gotos that jumped out to the outermost
4020 binding level of the function.
4021 Must follow emitting RETURN_LABEL. */
4022
4023 /* If you have any cleanups to do at this point,
4024 and they need to create temporary variables,
4025 then you will lose. */
4026 fixup_gotos (0, 0, 0, get_insns (), 0);
4027}
This page took 0.386262 seconds and 5 git commands to generate.