]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
*** empty log message ***
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc
RS
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
56
57/* Round a value to the lowest integer less than it that is a multiple of
58 the required alignment. Avoid using division in case the value is
59 negative. Assume the alignment is a power of two. */
60#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
61
62/* Similar, but round to the next highest integer that meets the
63 alignment. */
64#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
65
66/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
67 during rtl generation. If they are different register numbers, this is
68 always true. It may also be true if
69 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
70 generation. See fix_lexical_addr for details. */
71
72#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
73#define NEED_SEPARATE_AP
74#endif
75
76/* Number of bytes of args popped by function being compiled on its return.
77 Zero if no bytes are to be popped.
78 May affect compilation of return insn or of function epilogue. */
79
80int current_function_pops_args;
81
82/* Nonzero if function being compiled needs to be given an address
83 where the value should be stored. */
84
85int current_function_returns_struct;
86
87/* Nonzero if function being compiled needs to
88 return the address of where it has put a structure value. */
89
90int current_function_returns_pcc_struct;
91
92/* Nonzero if function being compiled needs to be passed a static chain. */
93
94int current_function_needs_context;
95
96/* Nonzero if function being compiled can call setjmp. */
97
98int current_function_calls_setjmp;
99
100/* Nonzero if function being compiled can call longjmp. */
101
102int current_function_calls_longjmp;
103
104/* Nonzero if function being compiled receives nonlocal gotos
105 from nested functions. */
106
107int current_function_has_nonlocal_label;
108
109/* Nonzero if function being compiled contains nested functions. */
110
111int current_function_contains_functions;
112
113/* Nonzero if function being compiled can call alloca,
114 either as a subroutine or builtin. */
115
116int current_function_calls_alloca;
117
118/* Nonzero if the current function returns a pointer type */
119
120int current_function_returns_pointer;
121
122/* If some insns can be deferred to the delay slots of the epilogue, the
123 delay list for them is recorded here. */
124
125rtx current_function_epilogue_delay_list;
126
127/* If function's args have a fixed size, this is that size, in bytes.
128 Otherwise, it is -1.
129 May affect compilation of return insn or of function epilogue. */
130
131int current_function_args_size;
132
133/* # bytes the prologue should push and pretend that the caller pushed them.
134 The prologue must do this, but only if parms can be passed in registers. */
135
136int current_function_pretend_args_size;
137
138/* # of bytes of outgoing arguments required to be pushed by the prologue.
139 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
140 and no stack adjusts will be done on function calls. */
141
142int current_function_outgoing_args_size;
143
144/* This is the offset from the arg pointer to the place where the first
145 anonymous arg can be found, if there is one. */
146
147rtx current_function_arg_offset_rtx;
148
149/* Nonzero if current function uses varargs.h or equivalent.
150 Zero for functions that use stdarg.h. */
151
152int current_function_varargs;
153
154/* Quantities of various kinds of registers
155 used for the current function's args. */
156
157CUMULATIVE_ARGS current_function_args_info;
158
159/* Name of function now being compiled. */
160
161char *current_function_name;
162
163/* If non-zero, an RTL expression for that location at which the current
164 function returns its result. Always equal to
165 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
166 independently of the tree structures. */
167
168rtx current_function_return_rtx;
169
170/* Nonzero if the current function uses the constant pool. */
171
172int current_function_uses_const_pool;
173
174/* Nonzero if the current function uses pic_offset_table_rtx. */
175int current_function_uses_pic_offset_table;
176
177/* The arg pointer hard register, or the pseudo into which it was copied. */
178rtx current_function_internal_arg_pointer;
179
180/* The FUNCTION_DECL for an inline function currently being expanded. */
181tree inline_function_decl;
182
183/* Number of function calls seen so far in current function. */
184
185int function_call_count;
186
187/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
188 (labels to which there can be nonlocal gotos from nested functions)
189 in this function. */
190
191tree nonlocal_labels;
192
193/* RTX for stack slot that holds the current handler for nonlocal gotos.
194 Zero when function does not have nonlocal labels. */
195
196rtx nonlocal_goto_handler_slot;
197
198/* RTX for stack slot that holds the stack pointer value to restore
199 for a nonlocal goto.
200 Zero when function does not have nonlocal labels. */
201
202rtx nonlocal_goto_stack_level;
203
204/* Label that will go on parm cleanup code, if any.
205 Jumping to this label runs cleanup code for parameters, if
206 such code must be run. Following this code is the logical return label. */
207
208rtx cleanup_label;
209
210/* Label that will go on function epilogue.
211 Jumping to this label serves as a "return" instruction
212 on machines which require execution of the epilogue on all returns. */
213
214rtx return_label;
215
216/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
217 So we can mark them all live at the end of the function, if nonopt. */
218rtx save_expr_regs;
219
220/* List (chain of EXPR_LISTs) of all stack slots in this function.
221 Made for the sake of unshare_all_rtl. */
222rtx stack_slot_list;
223
224/* Chain of all RTL_EXPRs that have insns in them. */
225tree rtl_expr_chain;
226
227/* Label to jump back to for tail recursion, or 0 if we have
228 not yet needed one for this function. */
229rtx tail_recursion_label;
230
231/* Place after which to insert the tail_recursion_label if we need one. */
232rtx tail_recursion_reentry;
233
234/* Location at which to save the argument pointer if it will need to be
235 referenced. There are two cases where this is done: if nonlocal gotos
236 exist, or if vars stored at an offset from the argument pointer will be
237 needed by inner routines. */
238
239rtx arg_pointer_save_area;
240
241/* Offset to end of allocated area of stack frame.
242 If stack grows down, this is the address of the last stack slot allocated.
243 If stack grows up, this is the address for the next slot. */
244int frame_offset;
245
246/* List (chain of TREE_LISTs) of static chains for containing functions.
247 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
248 in an RTL_EXPR in the TREE_VALUE. */
249static tree context_display;
250
251/* List (chain of TREE_LISTs) of trampolines for nested functions.
252 The trampoline sets up the static chain and jumps to the function.
253 We supply the trampoline's address when the function's address is requested.
254
255 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
256 in an RTL_EXPR in the TREE_VALUE. */
257static tree trampoline_list;
258
259/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
260static rtx parm_birth_insn;
261
262#if 0
263/* Nonzero if a stack slot has been generated whose address is not
264 actually valid. It means that the generated rtl must all be scanned
265 to detect and correct the invalid addresses where they occur. */
266static int invalid_stack_slot;
267#endif
268
269/* Last insn of those whose job was to put parms into their nominal homes. */
270static rtx last_parm_insn;
271
272/* 1 + last pseudo register number used for loading a copy
273 of a parameter of this function. */
274static int max_parm_reg;
275
276/* Vector indexed by REGNO, containing location on stack in which
277 to put the parm which is nominally in pseudo register REGNO,
278 if we discover that that parm must go in the stack. */
279static rtx *parm_reg_stack_loc;
280
281#if 0 /* Turned off because 0 seems to work just as well. */
282/* Cleanup lists are required for binding levels regardless of whether
283 that binding level has cleanups or not. This node serves as the
284 cleanup list whenever an empty list is required. */
285static tree empty_cleanup_list;
286#endif
287
288/* Nonzero once virtual register instantiation has been done.
289 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
290static int virtuals_instantiated;
291
292/* Nonzero if we need to distinguish between the return value of this function
293 and the return value of a function called by this function. This helps
294 integrate.c */
295
296extern int rtx_equal_function_value_matters;
297
298void fixup_gotos ();
299
300static tree round_down ();
301static rtx round_trampoline_addr ();
302static rtx fixup_stack_1 ();
303static void fixup_var_refs ();
304static void fixup_var_refs_insns ();
305static void fixup_var_refs_1 ();
306static void optimize_bit_field ();
307static void instantiate_decls ();
308static void instantiate_decls_1 ();
309static int instantiate_virtual_regs_1 ();
310static rtx fixup_memory_subreg ();
311static rtx walk_fixup_memory_subreg ();
312\f
313/* In order to evaluate some expressions, such as function calls returning
314 structures in memory, we need to temporarily allocate stack locations.
315 We record each allocated temporary in the following structure.
316
317 Associated with each temporary slot is a nesting level. When we pop up
318 one level, all temporaries associated with the previous level are freed.
319 Normally, all temporaries are freed after the execution of the statement
320 in which they were created. However, if we are inside a ({...}) grouping,
321 the result may be in a temporary and hence must be preserved. If the
322 result could be in a temporary, we preserve it if we can determine which
323 one it is in. If we cannot determine which temporary may contain the
324 result, all temporaries are preserved. A temporary is preserved by
325 pretending it was allocated at the previous nesting level.
326
327 Automatic variables are also assigned temporary slots, at the nesting
328 level where they are defined. They are marked a "kept" so that
329 free_temp_slots will not free them. */
330
331struct temp_slot
332{
333 /* Points to next temporary slot. */
334 struct temp_slot *next;
335 /* The rtx to used to reference the slot. */
336 rtx slot;
337 /* The size, in units, of the slot. */
338 int size;
339 /* Non-zero if this temporary is currently in use. */
340 char in_use;
341 /* Nesting level at which this slot is being used. */
342 int level;
343 /* Non-zero if this should survive a call to free_temp_slots. */
344 int keep;
345};
346
347/* List of all temporaries allocated, both available and in use. */
348
349struct temp_slot *temp_slots;
350
351/* Current nesting level for temporaries. */
352
353int temp_slot_level;
354\f
355/* Pointer to chain of `struct function' for containing functions. */
356struct function *outer_function_chain;
357
358/* Given a function decl for a containing function,
359 return the `struct function' for it. */
360
361struct function *
362find_function_data (decl)
363 tree decl;
364{
365 struct function *p;
366 for (p = outer_function_chain; p; p = p->next)
367 if (p->decl == decl)
368 return p;
369 abort ();
370}
371
372/* Save the current context for compilation of a nested function.
373 This is called from language-specific code.
374 The caller is responsible for saving any language-specific status,
375 since this function knows only about language-indepedent variables. */
376
377void
378push_function_context ()
379{
380 struct function *p = (struct function *) xmalloc (sizeof (struct function));
381
382 p->next = outer_function_chain;
383 outer_function_chain = p;
384
385 p->name = current_function_name;
386 p->decl = current_function_decl;
387 p->pops_args = current_function_pops_args;
388 p->returns_struct = current_function_returns_struct;
389 p->returns_pcc_struct = current_function_returns_pcc_struct;
390 p->needs_context = current_function_needs_context;
391 p->calls_setjmp = current_function_calls_setjmp;
392 p->calls_longjmp = current_function_calls_longjmp;
393 p->calls_alloca = current_function_calls_alloca;
394 p->has_nonlocal_label = current_function_has_nonlocal_label;
395 p->args_size = current_function_args_size;
396 p->pretend_args_size = current_function_pretend_args_size;
397 p->arg_offset_rtx = current_function_arg_offset_rtx;
398 p->uses_const_pool = current_function_uses_const_pool;
399 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
400 p->internal_arg_pointer = current_function_internal_arg_pointer;
401 p->max_parm_reg = max_parm_reg;
402 p->parm_reg_stack_loc = parm_reg_stack_loc;
403 p->outgoing_args_size = current_function_outgoing_args_size;
404 p->return_rtx = current_function_return_rtx;
405 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
406 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
407 p->nonlocal_labels = nonlocal_labels;
408 p->cleanup_label = cleanup_label;
409 p->return_label = return_label;
410 p->save_expr_regs = save_expr_regs;
411 p->stack_slot_list = stack_slot_list;
412 p->parm_birth_insn = parm_birth_insn;
413 p->frame_offset = frame_offset;
414 p->tail_recursion_label = tail_recursion_label;
415 p->tail_recursion_reentry = tail_recursion_reentry;
416 p->arg_pointer_save_area = arg_pointer_save_area;
417 p->rtl_expr_chain = rtl_expr_chain;
418 p->last_parm_insn = last_parm_insn;
419 p->context_display = context_display;
420 p->trampoline_list = trampoline_list;
421 p->function_call_count = function_call_count;
422 p->temp_slots = temp_slots;
423 p->temp_slot_level = temp_slot_level;
424 p->fixup_var_refs_queue = 0;
425
426 save_tree_status (p);
427 save_storage_status (p);
428 save_emit_status (p);
429 init_emit ();
430 save_expr_status (p);
431 save_stmt_status (p);
432}
433
434/* Restore the last saved context, at the end of a nested function.
435 This function is called from language-specific code. */
436
437void
438pop_function_context ()
439{
440 struct function *p = outer_function_chain;
441
442 outer_function_chain = p->next;
443
444 current_function_name = p->name;
445 current_function_decl = p->decl;
446 current_function_pops_args = p->pops_args;
447 current_function_returns_struct = p->returns_struct;
448 current_function_returns_pcc_struct = p->returns_pcc_struct;
449 current_function_needs_context = p->needs_context;
450 current_function_calls_setjmp = p->calls_setjmp;
451 current_function_calls_longjmp = p->calls_longjmp;
452 current_function_calls_alloca = p->calls_alloca;
453 current_function_has_nonlocal_label = p->has_nonlocal_label;
454 current_function_contains_functions = 1;
455 current_function_args_size = p->args_size;
456 current_function_pretend_args_size = p->pretend_args_size;
457 current_function_arg_offset_rtx = p->arg_offset_rtx;
458 current_function_uses_const_pool = p->uses_const_pool;
459 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
460 current_function_internal_arg_pointer = p->internal_arg_pointer;
461 max_parm_reg = p->max_parm_reg;
462 parm_reg_stack_loc = p->parm_reg_stack_loc;
463 current_function_outgoing_args_size = p->outgoing_args_size;
464 current_function_return_rtx = p->return_rtx;
465 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
466 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
467 nonlocal_labels = p->nonlocal_labels;
468 cleanup_label = p->cleanup_label;
469 return_label = p->return_label;
470 save_expr_regs = p->save_expr_regs;
471 stack_slot_list = p->stack_slot_list;
472 parm_birth_insn = p->parm_birth_insn;
473 frame_offset = p->frame_offset;
474 tail_recursion_label = p->tail_recursion_label;
475 tail_recursion_reentry = p->tail_recursion_reentry;
476 arg_pointer_save_area = p->arg_pointer_save_area;
477 rtl_expr_chain = p->rtl_expr_chain;
478 last_parm_insn = p->last_parm_insn;
479 context_display = p->context_display;
480 trampoline_list = p->trampoline_list;
481 function_call_count = p->function_call_count;
482 temp_slots = p->temp_slots;
483 temp_slot_level = p->temp_slot_level;
484
485 restore_tree_status (p);
486 restore_storage_status (p);
487 restore_expr_status (p);
488 restore_emit_status (p);
489 restore_stmt_status (p);
490
491 /* Finish doing put_var_into_stack for any of our variables
492 which became addressable during the nested function. */
493 {
494 struct var_refs_queue *queue = p->fixup_var_refs_queue;
495 for (; queue; queue = queue->next)
496 fixup_var_refs (queue->modified);
497 }
498
499 free (p);
500
501 /* Reset variables that have known state during rtx generation. */
502 rtx_equal_function_value_matters = 1;
503 virtuals_instantiated = 0;
504}
505\f
506/* Allocate fixed slots in the stack frame of the current function. */
507
508/* Return size needed for stack frame based on slots so far allocated.
509 This size counts from zero. It is not rounded to STACK_BOUNDARY;
510 the caller may have to do that. */
511
512int
513get_frame_size ()
514{
515#ifdef FRAME_GROWS_DOWNWARD
516 return -frame_offset;
517#else
518 return frame_offset;
519#endif
520}
521
522/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
523 with machine mode MODE.
524
525 ALIGN controls the amount of alignment for the address of the slot:
526 0 means according to MODE,
527 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
528 positive specifies alignment boundary in bits.
529
530 We do not round to stack_boundary here. */
531
532rtx
533assign_stack_local (mode, size, align)
534 enum machine_mode mode;
535 int size;
536 int align;
537{
538 register rtx x, addr;
539 int bigend_correction = 0;
540 int alignment;
541
542 if (align == 0)
543 {
544 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
545 if (mode == BLKmode)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 }
548 else if (align == -1)
549 {
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 size = CEIL_ROUND (size, alignment);
552 }
553 else
554 alignment = align / BITS_PER_UNIT;
555
6f086dfc
RS
556 /* Round frame offset to that alignment.
557 We must be careful here, since FRAME_OFFSET might be negative and
558 division with a negative dividend isn't as well defined as we might
559 like. So we instead assume that ALIGNMENT is a power of two and
560 use logical operations which are unambiguous. */
561#ifdef FRAME_GROWS_DOWNWARD
562 frame_offset = FLOOR_ROUND (frame_offset, alignment);
563#else
564 frame_offset = CEIL_ROUND (frame_offset, alignment);
565#endif
566
567 /* On a big-endian machine, if we are allocating more space than we will use,
568 use the least significant bytes of those that are allocated. */
569#if BYTES_BIG_ENDIAN
570 if (mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
572#endif
573
574#ifdef FRAME_GROWS_DOWNWARD
575 frame_offset -= size;
576#endif
577
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (virtuals_instantiated)
581 addr = plus_constant (frame_pointer_rtx,
582 (frame_offset + bigend_correction
583 + STARTING_FRAME_OFFSET));
584 else
585 addr = plus_constant (virtual_stack_vars_rtx,
586 frame_offset + bigend_correction);
587
588#ifndef FRAME_GROWS_DOWNWARD
589 frame_offset += size;
590#endif
591
592 x = gen_rtx (MEM, mode, addr);
593
594 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
595
596 return x;
597}
598
599/* Assign a stack slot in a containing function.
600 First three arguments are same as in preceding function.
601 The last argument specifies the function to allocate in. */
602
603rtx
604assign_outer_stack_local (mode, size, align, function)
605 enum machine_mode mode;
606 int size;
607 int align;
608 struct function *function;
609{
610 register rtx x, addr;
611 int bigend_correction = 0;
612 int alignment;
613
614 /* Allocate in the memory associated with the function in whose frame
615 we are assigning. */
616 push_obstacks (function->function_obstack,
617 function->function_maybepermanent_obstack);
618
619 if (align == 0)
620 {
621 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
622 if (mode == BLKmode)
623 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
624 }
625 else if (align == -1)
626 {
627 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
628 size = CEIL_ROUND (size, alignment);
629 }
630 else
631 alignment = align / BITS_PER_UNIT;
632
6f086dfc
RS
633 /* Round frame offset to that alignment. */
634#ifdef FRAME_GROWS_DOWNWARD
635 frame_offset = FLOOR_ROUND (frame_offset, alignment);
636#else
637 frame_offset = CEIL_ROUND (frame_offset, alignment);
638#endif
639
640 /* On a big-endian machine, if we are allocating more space than we will use,
641 use the least significant bytes of those that are allocated. */
642#if BYTES_BIG_ENDIAN
643 if (mode != BLKmode)
644 bigend_correction = size - GET_MODE_SIZE (mode);
645#endif
646
647#ifdef FRAME_GROWS_DOWNWARD
648 function->frame_offset -= size;
649#endif
650 addr = plus_constant (virtual_stack_vars_rtx,
651 function->frame_offset + bigend_correction);
652#ifndef FRAME_GROWS_DOWNWARD
653 function->frame_offset += size;
654#endif
655
656 x = gen_rtx (MEM, mode, addr);
657
658 function->stack_slot_list
659 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
660
661 pop_obstacks ();
662
663 return x;
664}
665\f
666/* Allocate a temporary stack slot and record it for possible later
667 reuse.
668
669 MODE is the machine mode to be given to the returned rtx.
670
671 SIZE is the size in units of the space required. We do no rounding here
672 since assign_stack_local will do any required rounding.
673
674 KEEP is non-zero if this slot is to be retained after a call to
675 free_temp_slots. Automatic variables for a block are allocated with this
676 flag. */
677
678rtx
679assign_stack_temp (mode, size, keep)
680 enum machine_mode mode;
681 int size;
682 int keep;
683{
684 struct temp_slot *p, *best_p = 0;
685
686 /* First try to find an available, already-allocated temporary that is the
687 exact size we require. */
688 for (p = temp_slots; p; p = p->next)
689 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
690 break;
691
692 /* If we didn't find, one, try one that is larger than what we want. We
693 find the smallest such. */
694 if (p == 0)
695 for (p = temp_slots; p; p = p->next)
696 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
697 && (best_p == 0 || best_p->size > p->size))
698 best_p = p;
699
700 /* Make our best, if any, the one to use. */
701 if (best_p)
702 p = best_p;
703
704 /* If we still didn't find one, make a new temporary. */
705 if (p == 0)
706 {
707 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
708 p->size = size;
709 /* If the temp slot mode doesn't indicate the alignment,
710 use the largest possible, so no one will be disappointed. */
711 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
712 p->next = temp_slots;
713 temp_slots = p;
714 }
715
716 p->in_use = 1;
717 p->level = temp_slot_level;
718 p->keep = keep;
719 return p->slot;
720}
721\f
722/* If X could be a reference to a temporary slot, mark that slot as belonging
723 to the to one level higher. If X matched one of our slots, just mark that
724 one. Otherwise, we can't easily predict which it is, so upgrade all of
725 them. Kept slots need not be touched.
726
727 This is called when an ({...}) construct occurs and a statement
728 returns a value in memory. */
729
730void
731preserve_temp_slots (x)
732 rtx x;
733{
734 struct temp_slot *p;
735
736 /* If X is not in memory or is at a constant address, it cannot be in
737 a temporary slot. */
738 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
739 return;
740
741 /* First see if we can find a match. */
742 for (p = temp_slots; p; p = p->next)
743 if (p->in_use && x == p->slot)
744 {
745 p->level--;
746 return;
747 }
748
749 /* Otherwise, preserve all non-kept slots at this level. */
750 for (p = temp_slots; p; p = p->next)
751 if (p->in_use && p->level == temp_slot_level && ! p->keep)
752 p->level--;
753}
754
755/* Free all temporaries used so far. This is normally called at the end
756 of generating code for a statement. */
757
758void
759free_temp_slots ()
760{
761 struct temp_slot *p;
762
763 for (p = temp_slots; p; p = p->next)
764 if (p->in_use && p->level == temp_slot_level && ! p->keep)
765 p->in_use = 0;
766}
767
768/* Push deeper into the nesting level for stack temporaries. */
769
770void
771push_temp_slots ()
772{
773 /* For GNU C++, we must allow a sequence to be emitted anywhere in
774 the level where the sequence was started. By not changing levels
775 when the compiler is inside a sequence, the temporaries for the
776 sequence and the temporaries will not unwittingly conflict with
777 the temporaries for other sequences and/or code at that level. */
778 if (in_sequence_p ())
779 return;
780
781 temp_slot_level++;
782}
783
784/* Pop a temporary nesting level. All slots in use in the current level
785 are freed. */
786
787void
788pop_temp_slots ()
789{
790 struct temp_slot *p;
791
792 /* See comment in push_temp_slots about why we don't change levels
793 in sequences. */
794 if (in_sequence_p ())
795 return;
796
797 for (p = temp_slots; p; p = p->next)
798 if (p->in_use && p->level == temp_slot_level)
799 p->in_use = 0;
800
801 temp_slot_level--;
802}
803\f
804/* Retroactively move an auto variable from a register to a stack slot.
805 This is done when an address-reference to the variable is seen. */
806
807void
808put_var_into_stack (decl)
809 tree decl;
810{
811 register rtx reg;
812 register rtx new = 0;
813 struct function *function = 0;
814 tree context = decl_function_context (decl);
815
816 /* Get the current rtl used for this object. */
817 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
818
819 /* If this variable comes from an outer function,
820 find that function's saved context. */
821 if (context != current_function_decl)
822 for (function = outer_function_chain; function; function = function->next)
823 if (function->decl == context)
824 break;
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* If this is a variable-size object with a pseudo to address it,
833 put that pseudo into the stack, if the var is nonlocal. */
834 if (TREE_NONLOCAL (decl)
835 && GET_CODE (reg) == MEM
836 && GET_CODE (XEXP (reg, 0)) == REG
837 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
838 reg = XEXP (reg, 0);
839 if (GET_CODE (reg) != REG)
840 return;
841
842 if (function)
843 {
844 if (REGNO (reg) < function->max_parm_reg)
845 new = function->parm_reg_stack_loc[REGNO (reg)];
846 if (new == 0)
847 new = assign_outer_stack_local (GET_MODE (reg),
848 GET_MODE_SIZE (GET_MODE (reg)),
849 0, function);
850 }
851 else
852 {
853 if (REGNO (reg) < max_parm_reg)
854 new = parm_reg_stack_loc[REGNO (reg)];
855 if (new == 0)
856 new = assign_stack_local (GET_MODE (reg),
857 GET_MODE_SIZE (GET_MODE (reg)),
858 0);
859 }
860
861 XEXP (reg, 0) = XEXP (new, 0);
862 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
863 REG_USERVAR_P (reg) = 0;
864 PUT_CODE (reg, MEM);
865
866 /* If this is a memory ref that contains aggregate components,
867 mark it as such for cse and loop optimize. */
868 MEM_IN_STRUCT_P (reg)
869 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
870 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
872
873 /* Now make sure that all refs to the variable, previously made
874 when it was a register, are fixed up to be valid again. */
875 if (function)
876 {
877 struct var_refs_queue *temp;
878
879 /* Variable is inherited; fix it up when we get back to its function. */
880 push_obstacks (function->function_obstack,
881 function->function_maybepermanent_obstack);
882 temp
883 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
884 temp->modified = reg;
885 temp->next = function->fixup_var_refs_queue;
886 function->fixup_var_refs_queue = temp;
887 pop_obstacks ();
888 }
889 else
890 /* Variable is local; fix it up now. */
891 fixup_var_refs (reg);
892}
893\f
894static void
895fixup_var_refs (var)
896 rtx var;
897{
898 tree pending;
899 rtx first_insn = get_insns ();
900 struct sequence_stack *stack = sequence_stack;
901 tree rtl_exps = rtl_expr_chain;
902
903 /* Must scan all insns for stack-refs that exceed the limit. */
904 fixup_var_refs_insns (var, first_insn, stack == 0);
905
906 /* Scan all pending sequences too. */
907 for (; stack; stack = stack->next)
908 {
909 push_to_sequence (stack->first);
910 fixup_var_refs_insns (var, stack->first, stack->next != 0);
911 /* Update remembered end of sequence
912 in case we added an insn at the end. */
913 stack->last = get_last_insn ();
914 end_sequence ();
915 }
916
917 /* Scan all waiting RTL_EXPRs too. */
918 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
919 {
920 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
921 if (seq != const0_rtx && seq != 0)
922 {
923 push_to_sequence (seq);
924 fixup_var_refs_insns (var, seq, 0);
925 end_sequence ();
926 }
927 }
928}
929\f
930/* This structure is used by the following two functions to record MEMs or
931 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
932 VAR as an address. We need to maintain this list in case two operands of
933 an insn were required to match; in that case we must ensure we use the
934 same replacement. */
935
936struct fixup_replacement
937{
938 rtx old;
939 rtx new;
940 struct fixup_replacement *next;
941};
942
943/* REPLACEMENTS is a pointer to a list of the above structures and X is
944 some part of an insn. Return a struct fixup_replacement whose OLD
945 value is equal to X. Allocate a new structure if no such entry exists. */
946
947static struct fixup_replacement *
948find_replacement (replacements, x)
949 struct fixup_replacement **replacements;
950 rtx x;
951{
952 struct fixup_replacement *p;
953
954 /* See if we have already replaced this. */
955 for (p = *replacements; p && p->old != x; p = p->next)
956 ;
957
958 if (p == 0)
959 {
960 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
961 p->old = x;
962 p->new = 0;
963 p->next = *replacements;
964 *replacements = p;
965 }
966
967 return p;
968}
969
970/* Scan the insn-chain starting with INSN for refs to VAR
971 and fix them up. TOPLEVEL is nonzero if this chain is the
972 main chain of insns for the current function. */
973
974static void
975fixup_var_refs_insns (var, insn, toplevel)
976 rtx var;
977 rtx insn;
978 int toplevel;
979{
980 while (insn)
981 {
982 rtx next = NEXT_INSN (insn);
983 rtx note;
984 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
985 || GET_CODE (insn) == JUMP_INSN)
986 {
987 /* The insn to load VAR from a home in the arglist
988 is now a no-op. When we see it, just delete it. */
989 if (toplevel
990 && GET_CODE (PATTERN (insn)) == SET
991 && SET_DEST (PATTERN (insn)) == var
992 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
993 {
994 next = delete_insn (insn);
995 if (insn == last_parm_insn)
996 last_parm_insn = PREV_INSN (next);
997 }
998 else
999 {
1000 /* See if we have to do anything to INSN now that VAR is in
1001 memory. If it needs to be loaded into a pseudo, use a single
1002 pseudo for the entire insn in case there is a MATCH_DUP
1003 between two operands. We pass a pointer to the head of
1004 a list of struct fixup_replacements. If fixup_var_refs_1
1005 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1006 it will record them in this list.
1007
1008 If it allocated a pseudo for any replacement, we copy into
1009 it here. */
1010
1011 struct fixup_replacement *replacements = 0;
1012
1013 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1014
1015 while (replacements)
1016 {
1017 if (GET_CODE (replacements->new) == REG)
1018 {
1019 rtx insert_before;
1020
1021 /* OLD might be a (subreg (mem)). */
1022 if (GET_CODE (replacements->old) == SUBREG)
1023 replacements->old
1024 = fixup_memory_subreg (replacements->old, insn, 0);
1025 else
1026 replacements->old
1027 = fixup_stack_1 (replacements->old, insn);
1028
1029 /* We can not separate USE insns from the CALL_INSN
1030 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1031 the move insn before the USE insns preceding it
6f086dfc
RS
1032 instead of immediately before the insn. */
1033 if (GET_CODE (insn) == CALL_INSN)
1034 {
1035 insert_before = insn;
1036 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1037 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1038 insert_before = PREV_INSN (insert_before);
1039 }
1040 else
1041 insert_before = insn;
1042
1043 emit_insn_before (gen_move_insn (replacements->new,
1044 replacements->old),
1045 insert_before);
1046 }
1047
1048 replacements = replacements->next;
1049 }
1050 }
1051
1052 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1053 But don't touch other insns referred to by reg-notes;
1054 we will get them elsewhere. */
1055 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1056 if (GET_CODE (note) != INSN_LIST)
1057 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1058 }
1059 insn = next;
1060 }
1061}
1062\f
1063/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1064 at *LOC in INSN needs to be changed.
1065
1066 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1067 contain a list of original rtx's and replacements. If we find that we need
1068 to modify this insn by replacing a memory reference with a pseudo or by
1069 making a new MEM to implement a SUBREG, we consult that list to see if
1070 we have already chosen a replacement. If none has already been allocated,
1071 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1072 or the SUBREG, as appropriate, to the pseudo. */
1073
1074static void
1075fixup_var_refs_1 (var, loc, insn, replacements)
1076 register rtx var;
1077 register rtx *loc;
1078 rtx insn;
1079 struct fixup_replacement **replacements;
1080{
1081 register int i;
1082 register rtx x = *loc;
1083 RTX_CODE code = GET_CODE (x);
1084 register char *fmt;
1085 register rtx tem, tem1;
1086 struct fixup_replacement *replacement;
1087
1088 switch (code)
1089 {
1090 case MEM:
1091 if (var == x)
1092 {
1093 /* If we already have a replacement, use it. Otherwise,
1094 try to fix up this address in case it is invalid. */
1095
1096 replacement = find_replacement (replacements, var);
1097 if (replacement->new)
1098 {
1099 *loc = replacement->new;
1100 return;
1101 }
1102
1103 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1104
1105 /* Unless we are forcing memory to register, we can leave things
1106 the way they are if the insn is valid. */
1107
1108 INSN_CODE (insn) = -1;
1109 if (! flag_force_mem && recog_memoized (insn) >= 0)
1110 return;
1111
1112 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1113 return;
1114 }
1115
1116 /* If X contains VAR, we need to unshare it here so that we update
1117 each occurrence separately. But all identical MEMs in one insn
1118 must be replaced with the same rtx because of the possibility of
1119 MATCH_DUPs. */
1120
1121 if (reg_mentioned_p (var, x))
1122 {
1123 replacement = find_replacement (replacements, x);
1124 if (replacement->new == 0)
1125 replacement->new = copy_most_rtx (x, var);
1126
1127 *loc = x = replacement->new;
1128 }
1129 break;
1130
1131 case REG:
1132 case CC0:
1133 case PC:
1134 case CONST_INT:
1135 case CONST:
1136 case SYMBOL_REF:
1137 case LABEL_REF:
1138 case CONST_DOUBLE:
1139 return;
1140
1141 case SIGN_EXTRACT:
1142 case ZERO_EXTRACT:
1143 /* Note that in some cases those types of expressions are altered
1144 by optimize_bit_field, and do not survive to get here. */
1145 if (XEXP (x, 0) == var
1146 || (GET_CODE (XEXP (x, 0)) == SUBREG
1147 && SUBREG_REG (XEXP (x, 0)) == var))
1148 {
1149 /* Get TEM as a valid MEM in the mode presently in the insn.
1150
1151 We don't worry about the possibility of MATCH_DUP here; it
1152 is highly unlikely and would be tricky to handle. */
1153
1154 tem = XEXP (x, 0);
1155 if (GET_CODE (tem) == SUBREG)
1156 tem = fixup_memory_subreg (tem, insn, 1);
1157 tem = fixup_stack_1 (tem, insn);
1158
1159 /* Unless we want to load from memory, get TEM into the proper mode
1160 for an extract from memory. This can only be done if the
1161 extract is at a constant position and length. */
1162
1163 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && GET_CODE (XEXP (x, 2)) == CONST_INT
1165 && ! mode_dependent_address_p (XEXP (tem, 0))
1166 && ! MEM_VOLATILE_P (tem))
1167 {
1168 enum machine_mode wanted_mode = VOIDmode;
1169 enum machine_mode is_mode = GET_MODE (tem);
1170 int width = INTVAL (XEXP (x, 1));
1171 int pos = INTVAL (XEXP (x, 2));
1172
1173#ifdef HAVE_extzv
1174 if (GET_CODE (x) == ZERO_EXTRACT)
1175 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1176#endif
1177#ifdef HAVE_extv
1178 if (GET_CODE (x) == SIGN_EXTRACT)
1179 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1180#endif
1181 /* If we have a narrower mode, we can do someting. */
1182 if (wanted_mode != VOIDmode
1183 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1184 {
1185 int offset = pos / BITS_PER_UNIT;
1186 rtx old_pos = XEXP (x, 2);
1187 rtx newmem;
1188
1189 /* If the bytes and bits are counted differently, we
1190 must adjust the offset. */
1191#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1192 offset = (GET_MODE_SIZE (is_mode)
1193 - GET_MODE_SIZE (wanted_mode) - offset);
1194#endif
1195
1196 pos %= GET_MODE_BITSIZE (wanted_mode);
1197
1198 newmem = gen_rtx (MEM, wanted_mode,
1199 plus_constant (XEXP (tem, 0), offset));
1200 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1201 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1202 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1203
1204 /* Make the change and see if the insn remains valid. */
1205 INSN_CODE (insn) = -1;
1206 XEXP (x, 0) = newmem;
1207 XEXP (x, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1208
1209 if (recog_memoized (insn) >= 0)
1210 return;
1211
1212 /* Otherwise, restore old position. XEXP (x, 0) will be
1213 restored later. */
1214 XEXP (x, 2) = old_pos;
1215 }
1216 }
1217
1218 /* If we get here, the bitfield extract insn can't accept a memory
1219 reference. Copy the input into a register. */
1220
1221 tem1 = gen_reg_rtx (GET_MODE (tem));
1222 emit_insn_before (gen_move_insn (tem1, tem), insn);
1223 XEXP (x, 0) = tem1;
1224 return;
1225 }
1226 break;
1227
1228 case SUBREG:
1229 if (SUBREG_REG (x) == var)
1230 {
1231 /* If this SUBREG makes VAR wider, it has become a paradoxical
1232 SUBREG with VAR in memory, but these aren't allowed at this
1233 stage of the compilation. So load VAR into a pseudo and take
1234 a SUBREG of that pseudo. */
1235 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1236 {
1237 replacement = find_replacement (replacements, var);
1238 if (replacement->new == 0)
1239 replacement->new = gen_reg_rtx (GET_MODE (var));
1240 SUBREG_REG (x) = replacement->new;
1241 return;
1242 }
1243
1244 /* See if we have already found a replacement for this SUBREG.
1245 If so, use it. Otherwise, make a MEM and see if the insn
1246 is recognized. If not, or if we should force MEM into a register,
1247 make a pseudo for this SUBREG. */
1248 replacement = find_replacement (replacements, x);
1249 if (replacement->new)
1250 {
1251 *loc = replacement->new;
1252 return;
1253 }
1254
1255 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1256
1257 if (! flag_force_mem && recog_memoized (insn) >= 0)
1258 return;
1259
1260 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1261 return;
1262 }
1263 break;
1264
1265 case SET:
1266 /* First do special simplification of bit-field references. */
1267 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1268 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1269 optimize_bit_field (x, insn, 0);
1270 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1271 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1272 optimize_bit_field (x, insn, 0);
1273
1274 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1275 insn into a pseudo and store the low part of the pseudo into VAR. */
1276 if (GET_CODE (SET_DEST (x)) == SUBREG
1277 && SUBREG_REG (SET_DEST (x)) == var
1278 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1279 > GET_MODE_SIZE (GET_MODE (var))))
1280 {
1281 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1282 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1283 tem)),
1284 insn);
1285 break;
1286 }
1287
1288 {
1289 rtx dest = SET_DEST (x);
1290 rtx src = SET_SRC (x);
1291 rtx outerdest = dest;
1292
1293 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1294 || GET_CODE (dest) == SIGN_EXTRACT
1295 || GET_CODE (dest) == ZERO_EXTRACT)
1296 dest = XEXP (dest, 0);
1297
1298 if (GET_CODE (src) == SUBREG)
1299 src = XEXP (src, 0);
1300
1301 /* If VAR does not appear at the top level of the SET
1302 just scan the lower levels of the tree. */
1303
1304 if (src != var && dest != var)
1305 break;
1306
1307 /* We will need to rerecognize this insn. */
1308 INSN_CODE (insn) = -1;
1309
1310#ifdef HAVE_insv
1311 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1312 {
1313 /* Since this case will return, ensure we fixup all the
1314 operands here. */
1315 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1317 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1318
1319 tem = XEXP (outerdest, 0);
1320
1321 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1322 that may appear inside a ZERO_EXTRACT.
1323 This was legitimate when the MEM was a REG. */
1324 if (GET_CODE (tem) == SUBREG
1325 && SUBREG_REG (tem) == var)
1326 tem = fixup_memory_subreg (tem, insn, 1);
1327 else
1328 tem = fixup_stack_1 (tem, insn);
1329
1330 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1331 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1332 && ! mode_dependent_address_p (XEXP (tem, 0))
1333 && ! MEM_VOLATILE_P (tem))
1334 {
1335 enum machine_mode wanted_mode
1336 = insn_operand_mode[(int) CODE_FOR_insv][0];
1337 enum machine_mode is_mode = GET_MODE (tem);
1338 int width = INTVAL (XEXP (outerdest, 1));
1339 int pos = INTVAL (XEXP (outerdest, 2));
1340
1341 /* If we have a narrower mode, we can do someting. */
1342 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1343 {
1344 int offset = pos / BITS_PER_UNIT;
1345 rtx old_pos = XEXP (outerdest, 2);
1346 rtx newmem;
1347
1348#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1349 offset = (GET_MODE_SIZE (is_mode)
1350 - GET_MODE_SIZE (wanted_mode) - offset);
1351#endif
1352
1353 pos %= GET_MODE_BITSIZE (wanted_mode);
1354
1355 newmem = gen_rtx (MEM, wanted_mode,
1356 plus_constant (XEXP (tem, 0), offset));
1357 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1358 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1359 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1360
1361 /* Make the change and see if the insn remains valid. */
1362 INSN_CODE (insn) = -1;
1363 XEXP (outerdest, 0) = newmem;
1364 XEXP (outerdest, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1365
1366 if (recog_memoized (insn) >= 0)
1367 return;
1368
1369 /* Otherwise, restore old position. XEXP (x, 0) will be
1370 restored later. */
1371 XEXP (outerdest, 2) = old_pos;
1372 }
1373 }
1374
1375 /* If we get here, the bit-field store doesn't allow memory
1376 or isn't located at a constant position. Load the value into
1377 a register, do the store, and put it back into memory. */
1378
1379 tem1 = gen_reg_rtx (GET_MODE (tem));
1380 emit_insn_before (gen_move_insn (tem1, tem), insn);
1381 emit_insn_after (gen_move_insn (tem, tem1), insn);
1382 XEXP (outerdest, 0) = tem1;
1383 return;
1384 }
1385#endif
1386
1387 /* STRICT_LOW_PART is a no-op on memory references
1388 and it can cause combinations to be unrecognizable,
1389 so eliminate it. */
1390
1391 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1392 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1393
1394 /* A valid insn to copy VAR into or out of a register
1395 must be left alone, to avoid an infinite loop here.
1396 If the reference to VAR is by a subreg, fix that up,
1397 since SUBREG is not valid for a memref.
1398 Also fix up the address of the stack slot. */
1399
1400 if ((SET_SRC (x) == var
1401 || (GET_CODE (SET_SRC (x)) == SUBREG
1402 && SUBREG_REG (SET_SRC (x)) == var))
1403 && (GET_CODE (SET_DEST (x)) == REG
1404 || (GET_CODE (SET_DEST (x)) == SUBREG
1405 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1406 && recog_memoized (insn) >= 0)
1407 {
1408 replacement = find_replacement (replacements, SET_SRC (x));
1409 if (replacement->new)
1410 {
1411 SET_SRC (x) = replacement->new;
1412 return;
1413 }
1414 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1415 SET_SRC (x) = replacement->new
1416 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1417 else
1418 SET_SRC (x) = replacement->new
1419 = fixup_stack_1 (SET_SRC (x), insn);
1420 return;
1421 }
1422
1423 if ((SET_DEST (x) == var
1424 || (GET_CODE (SET_DEST (x)) == SUBREG
1425 && SUBREG_REG (SET_DEST (x)) == var))
1426 && (GET_CODE (SET_SRC (x)) == REG
1427 || (GET_CODE (SET_SRC (x)) == SUBREG
1428 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1429 && recog_memoized (insn) >= 0)
1430 {
1431 if (GET_CODE (SET_DEST (x)) == SUBREG)
1432 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1433 else
1434 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1435 return;
1436 }
1437
1438 /* Otherwise, storing into VAR must be handled specially
1439 by storing into a temporary and copying that into VAR
1440 with a new insn after this one. */
1441
1442 if (dest == var)
1443 {
1444 rtx temp;
1445 rtx fixeddest;
1446 tem = SET_DEST (x);
1447 /* STRICT_LOW_PART can be discarded, around a MEM. */
1448 if (GET_CODE (tem) == STRICT_LOW_PART)
1449 tem = XEXP (tem, 0);
1450 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1451 if (GET_CODE (tem) == SUBREG)
1452 fixeddest = fixup_memory_subreg (tem, insn, 0);
1453 else
1454 fixeddest = fixup_stack_1 (tem, insn);
1455
1456 temp = gen_reg_rtx (GET_MODE (tem));
1457 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1458 SET_DEST (x) = temp;
1459 }
1460 }
1461 }
1462
1463 /* Nothing special about this RTX; fix its operands. */
1464
1465 fmt = GET_RTX_FORMAT (code);
1466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467 {
1468 if (fmt[i] == 'e')
1469 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1470 if (fmt[i] == 'E')
1471 {
1472 register int j;
1473 for (j = 0; j < XVECLEN (x, i); j++)
1474 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1475 }
1476 }
1477}
1478\f
1479/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1480 return an rtx (MEM:m1 newaddr) which is equivalent.
1481 If any insns must be emitted to compute NEWADDR, put them before INSN.
1482
1483 UNCRITICAL nonzero means accept paradoxical subregs.
1484 This is used for subregs found inside of ZERO_EXTRACTs. */
1485
1486static rtx
1487fixup_memory_subreg (x, insn, uncritical)
1488 rtx x;
1489 rtx insn;
1490 int uncritical;
1491{
1492 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1493 rtx addr = XEXP (SUBREG_REG (x), 0);
1494 enum machine_mode mode = GET_MODE (x);
1495 rtx saved, result;
1496
1497 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1498 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1499 && ! uncritical)
1500 abort ();
1501
1502#if BYTES_BIG_ENDIAN
1503 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1504 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1505#endif
1506 addr = plus_constant (addr, offset);
1507 if (!flag_force_addr && memory_address_p (mode, addr))
1508 /* Shortcut if no insns need be emitted. */
1509 return change_address (SUBREG_REG (x), mode, addr);
1510 start_sequence ();
1511 result = change_address (SUBREG_REG (x), mode, addr);
1512 emit_insn_before (gen_sequence (), insn);
1513 end_sequence ();
1514 return result;
1515}
1516
1517/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1518 Replace subexpressions of X in place.
1519 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1520 Otherwise return X, with its contents possibly altered.
1521
1522 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1523
1524static rtx
1525walk_fixup_memory_subreg (x, insn)
1526 register rtx x;
1527 rtx insn;
1528{
1529 register enum rtx_code code;
1530 register char *fmt;
1531 register int i;
1532
1533 if (x == 0)
1534 return 0;
1535
1536 code = GET_CODE (x);
1537
1538 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1539 return fixup_memory_subreg (x, insn, 0);
1540
1541 /* Nothing special about this RTX; fix its operands. */
1542
1543 fmt = GET_RTX_FORMAT (code);
1544 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1545 {
1546 if (fmt[i] == 'e')
1547 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1548 if (fmt[i] == 'E')
1549 {
1550 register int j;
1551 for (j = 0; j < XVECLEN (x, i); j++)
1552 XVECEXP (x, i, j)
1553 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1554 }
1555 }
1556 return x;
1557}
1558\f
1559#if 0
1560/* Fix up any references to stack slots that are invalid memory addresses
1561 because they exceed the maximum range of a displacement. */
1562
1563void
1564fixup_stack_slots ()
1565{
1566 register rtx insn;
1567
1568 /* Did we generate a stack slot that is out of range
1569 or otherwise has an invalid address? */
1570 if (invalid_stack_slot)
1571 {
1572 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1573 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1574 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1575 || GET_CODE (insn) == JUMP_INSN)
1576 fixup_stack_1 (PATTERN (insn), insn);
1577 }
1578}
1579#endif
1580
1581/* For each memory ref within X, if it refers to a stack slot
1582 with an out of range displacement, put the address in a temp register
1583 (emitting new insns before INSN to load these registers)
1584 and alter the memory ref to use that register.
1585 Replace each such MEM rtx with a copy, to avoid clobberage. */
1586
1587static rtx
1588fixup_stack_1 (x, insn)
1589 rtx x;
1590 rtx insn;
1591{
1592 register int i;
1593 register RTX_CODE code = GET_CODE (x);
1594 register char *fmt;
1595
1596 if (code == MEM)
1597 {
1598 register rtx ad = XEXP (x, 0);
1599 /* If we have address of a stack slot but it's not valid
1600 (displacement is too large), compute the sum in a register. */
1601 if (GET_CODE (ad) == PLUS
1602 && GET_CODE (XEXP (ad, 0)) == REG
1603 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1604 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1605 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1606 {
1607 rtx temp, seq;
1608 if (memory_address_p (GET_MODE (x), ad))
1609 return x;
1610
1611 start_sequence ();
1612 temp = copy_to_reg (ad);
1613 seq = gen_sequence ();
1614 end_sequence ();
1615 emit_insn_before (seq, insn);
1616 return change_address (x, VOIDmode, temp);
1617 }
1618 return x;
1619 }
1620
1621 fmt = GET_RTX_FORMAT (code);
1622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1623 {
1624 if (fmt[i] == 'e')
1625 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1626 if (fmt[i] == 'E')
1627 {
1628 register int j;
1629 for (j = 0; j < XVECLEN (x, i); j++)
1630 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1631 }
1632 }
1633 return x;
1634}
1635\f
1636/* Optimization: a bit-field instruction whose field
1637 happens to be a byte or halfword in memory
1638 can be changed to a move instruction.
1639
1640 We call here when INSN is an insn to examine or store into a bit-field.
1641 BODY is the SET-rtx to be altered.
1642
1643 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1644 (Currently this is called only from function.c, and EQUIV_MEM
1645 is always 0.) */
1646
1647static void
1648optimize_bit_field (body, insn, equiv_mem)
1649 rtx body;
1650 rtx insn;
1651 rtx *equiv_mem;
1652{
1653 register rtx bitfield;
1654 int destflag;
1655 rtx seq = 0;
1656 enum machine_mode mode;
1657
1658 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1659 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1660 bitfield = SET_DEST (body), destflag = 1;
1661 else
1662 bitfield = SET_SRC (body), destflag = 0;
1663
1664 /* First check that the field being stored has constant size and position
1665 and is in fact a byte or halfword suitably aligned. */
1666
1667 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1668 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1669 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1670 != BLKmode)
1671 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1672 {
1673 register rtx memref = 0;
1674
1675 /* Now check that the containing word is memory, not a register,
1676 and that it is safe to change the machine mode. */
1677
1678 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1679 memref = XEXP (bitfield, 0);
1680 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1681 && equiv_mem != 0)
1682 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1683 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1684 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1685 memref = SUBREG_REG (XEXP (bitfield, 0));
1686 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1687 && equiv_mem != 0
1688 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1689 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1690
1691 if (memref
1692 && ! mode_dependent_address_p (XEXP (memref, 0))
1693 && ! MEM_VOLATILE_P (memref))
1694 {
1695 /* Now adjust the address, first for any subreg'ing
1696 that we are now getting rid of,
1697 and then for which byte of the word is wanted. */
1698
1699 register int offset = INTVAL (XEXP (bitfield, 2));
1700 /* Adjust OFFSET to count bits from low-address byte. */
1701#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1702 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1703 - offset - INTVAL (XEXP (bitfield, 1)));
1704#endif
1705 /* Adjust OFFSET to count bytes from low-address byte. */
1706 offset /= BITS_PER_UNIT;
1707 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1708 {
1709 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1710#if BYTES_BIG_ENDIAN
1711 offset -= (MIN (UNITS_PER_WORD,
1712 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1713 - MIN (UNITS_PER_WORD,
1714 GET_MODE_SIZE (GET_MODE (memref))));
1715#endif
1716 }
1717
1718 memref = change_address (memref, mode,
1719 plus_constant (XEXP (memref, 0), offset));
1720
1721 /* Store this memory reference where
1722 we found the bit field reference. */
1723
1724 if (destflag)
1725 {
1726 validate_change (insn, &SET_DEST (body), memref, 1);
1727 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1728 {
1729 rtx src = SET_SRC (body);
1730 while (GET_CODE (src) == SUBREG
1731 && SUBREG_WORD (src) == 0)
1732 src = SUBREG_REG (src);
1733 if (GET_MODE (src) != GET_MODE (memref))
1734 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1735 validate_change (insn, &SET_SRC (body), src, 1);
1736 }
1737 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1738 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1739 /* This shouldn't happen because anything that didn't have
1740 one of these modes should have got converted explicitly
1741 and then referenced through a subreg.
1742 This is so because the original bit-field was
1743 handled by agg_mode and so its tree structure had
1744 the same mode that memref now has. */
1745 abort ();
1746 }
1747 else
1748 {
1749 rtx dest = SET_DEST (body);
1750
1751 while (GET_CODE (dest) == SUBREG
1752 && SUBREG_WORD (dest) == 0)
1753 dest = SUBREG_REG (dest);
1754
1755 validate_change (insn, &SET_DEST (body), dest, 1);
1756
1757 if (GET_MODE (dest) == GET_MODE (memref))
1758 validate_change (insn, &SET_SRC (body), memref, 1);
1759 else
1760 {
1761 /* Convert the mem ref to the destination mode. */
1762 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1763
1764 start_sequence ();
1765 convert_move (newreg, memref,
1766 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1767 seq = get_insns ();
1768 end_sequence ();
1769
1770 validate_change (insn, &SET_SRC (body), newreg, 1);
1771 }
1772 }
1773
1774 /* See if we can convert this extraction or insertion into
1775 a simple move insn. We might not be able to do so if this
1776 was, for example, part of a PARALLEL.
1777
1778 If we succeed, write out any needed conversions. If we fail,
1779 it is hard to guess why we failed, so don't do anything
1780 special; just let the optimization be suppressed. */
1781
1782 if (apply_change_group () && seq)
1783 emit_insns_before (seq, insn);
1784 }
1785 }
1786}
1787\f
1788/* These routines are responsible for converting virtual register references
1789 to the actual hard register references once RTL generation is complete.
1790
1791 The following four variables are used for communication between the
1792 routines. They contain the offsets of the virtual registers from their
1793 respective hard registers. */
1794
1795static int in_arg_offset;
1796static int var_offset;
1797static int dynamic_offset;
1798static int out_arg_offset;
1799
1800/* In most machines, the stack pointer register is equivalent to the bottom
1801 of the stack. */
1802
1803#ifndef STACK_POINTER_OFFSET
1804#define STACK_POINTER_OFFSET 0
1805#endif
1806
1807/* If not defined, pick an appropriate default for the offset of dynamically
1808 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1809 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1810
1811#ifndef STACK_DYNAMIC_OFFSET
1812
1813#ifdef ACCUMULATE_OUTGOING_ARGS
1814/* The bottom of the stack points to the actual arguments. If
1815 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1816 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1817 stack space for register parameters is not pushed by the caller, but
1818 rather part of the fixed stack areas and hence not included in
1819 `current_function_outgoing_args_size'. Nevertheless, we must allow
1820 for it when allocating stack dynamic objects. */
1821
1822#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1823#define STACK_DYNAMIC_OFFSET(FNDECL) \
1824(current_function_outgoing_args_size \
1825 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1826
1827#else
1828#define STACK_DYNAMIC_OFFSET(FNDECL) \
1829(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1830#endif
1831
1832#else
1833#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1834#endif
1835#endif
1836
1837/* Pass through the INSNS of function FNDECL and convert virtual register
1838 references to hard register references. */
1839
1840void
1841instantiate_virtual_regs (fndecl, insns)
1842 tree fndecl;
1843 rtx insns;
1844{
1845 rtx insn;
1846
1847 /* Compute the offsets to use for this function. */
1848 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1849 var_offset = STARTING_FRAME_OFFSET;
1850 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1851 out_arg_offset = STACK_POINTER_OFFSET;
1852
1853 /* Scan all variables and parameters of this function. For each that is
1854 in memory, instantiate all virtual registers if the result is a valid
1855 address. If not, we do it later. That will handle most uses of virtual
1856 regs on many machines. */
1857 instantiate_decls (fndecl, 1);
1858
1859 /* Initialize recognition, indicating that volatile is OK. */
1860 init_recog ();
1861
1862 /* Scan through all the insns, instantiating every virtual register still
1863 present. */
1864 for (insn = insns; insn; insn = NEXT_INSN (insn))
1865 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1866 || GET_CODE (insn) == CALL_INSN)
1867 {
1868 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1869 instantiate_virtual_regs_1 (&REG_NOTES (insn), 0, 0);
1870 }
1871
1872 /* Now instantiate the remaining register equivalences for debugging info.
1873 These will not be valid addresses. */
1874 instantiate_decls (fndecl, 0);
1875
1876 /* Indicate that, from now on, assign_stack_local should use
1877 frame_pointer_rtx. */
1878 virtuals_instantiated = 1;
1879}
1880
1881/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1882 all virtual registers in their DECL_RTL's.
1883
1884 If VALID_ONLY, do this only if the resulting address is still valid.
1885 Otherwise, always do it. */
1886
1887static void
1888instantiate_decls (fndecl, valid_only)
1889 tree fndecl;
1890 int valid_only;
1891{
1892 tree decl;
1893
1894 if (TREE_INLINE (fndecl))
1895 /* When compiling an inline function, the obstack used for
1896 rtl allocation is the maybepermanent_obstack. Calling
1897 `resume_temporary_allocation' switches us back to that
1898 obstack while we process this function's parameters. */
1899 resume_temporary_allocation ();
1900
1901 /* Process all parameters of the function. */
1902 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1903 {
1904 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
1905 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
1906 valid_only ? DECL_RTL (decl) : 0, 0);
b335c2cc 1907#if 1 /* This is probably correct, but it seems to require fixes
86f8eff3 1908 elsewhere in order to work. Let's fix them in 2.1. */
6f086dfc
RS
1909 if (DECL_INCOMING_RTL (decl)
1910 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM)
1911 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
1912 valid_only ? DECL_INCOMING_RTL (decl) : 0,
1913 0);
86f8eff3 1914#endif
6f086dfc
RS
1915 }
1916
1917 /* Now process all variables defined in the function or its subblocks. */
1918 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1919
1920 if (TREE_INLINE (fndecl))
1921 {
1922 /* Save all rtl allocated for this function by raising the
1923 high-water mark on the maybepermanent_obstack. */
1924 preserve_data ();
1925 /* All further rtl allocation is now done in the current_obstack. */
1926 rtl_in_current_obstack ();
1927 }
1928}
1929
1930/* Subroutine of instantiate_decls: Process all decls in the given
1931 BLOCK node and all its subblocks. */
1932
1933static void
1934instantiate_decls_1 (let, valid_only)
1935 tree let;
1936 int valid_only;
1937{
1938 tree t;
1939
1940 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1941 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
1942 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
1943 valid_only ? DECL_RTL (t) : 0, 0);
1944
1945 /* Process all subblocks. */
1946 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1947 instantiate_decls_1 (t, valid_only);
1948}
1949\f
1950/* Given a pointer to a piece of rtx and an optional pointer to the
1951 containing object, instantiate any virtual registers present in it.
1952
1953 If EXTRA_INSNS, we always do the replacement and generate
1954 any extra insns before OBJECT. If it zero, we do nothing if replacement
1955 is not valid.
1956
1957 Return 1 if we either had nothing to do or if we were able to do the
1958 needed replacement. Return 0 otherwise; we only return zero if
1959 EXTRA_INSNS is zero.
1960
1961 We first try some simple transformations to avoid the creation of extra
1962 pseudos. */
1963
1964static int
1965instantiate_virtual_regs_1 (loc, object, extra_insns)
1966 rtx *loc;
1967 rtx object;
1968 int extra_insns;
1969{
1970 rtx x;
1971 RTX_CODE code;
1972 rtx new = 0;
1973 int offset;
1974 rtx temp;
1975 rtx seq;
1976 int i, j;
1977 char *fmt;
1978
1979 /* Re-start here to avoid recursion in common cases. */
1980 restart:
1981
1982 x = *loc;
1983 if (x == 0)
1984 return 1;
1985
1986 code = GET_CODE (x);
1987
1988 /* Check for some special cases. */
1989 switch (code)
1990 {
1991 case CONST_INT:
1992 case CONST_DOUBLE:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case CODE_LABEL:
1996 case PC:
1997 case CC0:
1998 case ASM_INPUT:
1999 case ADDR_VEC:
2000 case ADDR_DIFF_VEC:
2001 case RETURN:
2002 return 1;
2003
2004 case SET:
2005 /* We are allowed to set the virtual registers. This means that
2006 that the actual register should receive the source minus the
2007 appropriate offset. This is used, for example, in the handling
2008 of non-local gotos. */
2009 if (SET_DEST (x) == virtual_incoming_args_rtx)
2010 new = arg_pointer_rtx, offset = - in_arg_offset;
2011 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2012 new = frame_pointer_rtx, offset = - var_offset;
2013 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2014 new = stack_pointer_rtx, offset = - dynamic_offset;
2015 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2016 new = stack_pointer_rtx, offset = - out_arg_offset;
2017
2018 if (new)
2019 {
2020 /* The only valid sources here are PLUS or REG. Just do
2021 the simplest possible thing to handle them. */
2022 if (GET_CODE (SET_SRC (x)) != REG
2023 && GET_CODE (SET_SRC (x)) != PLUS)
2024 abort ();
2025
2026 start_sequence ();
2027 if (GET_CODE (SET_SRC (x)) != REG)
2028 temp = force_operand (SET_SRC (x), 0);
2029 else
2030 temp = SET_SRC (x);
2031 temp = force_operand (plus_constant (temp, offset), 0);
2032 seq = get_insns ();
2033 end_sequence ();
2034
2035 emit_insns_before (seq, object);
2036 SET_DEST (x) = new;
2037
2038 if (!validate_change (object, &SET_SRC (x), temp, 0)
2039 || ! extra_insns)
2040 abort ();
2041
2042 return 1;
2043 }
2044
2045 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2046 loc = &SET_SRC (x);
2047 goto restart;
2048
2049 case PLUS:
2050 /* Handle special case of virtual register plus constant. */
2051 if (CONSTANT_P (XEXP (x, 1)))
2052 {
2053 rtx old;
2054
2055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2056 if (GET_CODE (XEXP (x, 0)) == PLUS)
2057 {
2058 rtx inner = XEXP (XEXP (x, 0), 0);
2059
2060 if (inner == virtual_incoming_args_rtx)
2061 new = arg_pointer_rtx, offset = in_arg_offset;
2062 else if (inner == virtual_stack_vars_rtx)
2063 new = frame_pointer_rtx, offset = var_offset;
2064 else if (inner == virtual_stack_dynamic_rtx)
2065 new = stack_pointer_rtx, offset = dynamic_offset;
2066 else if (inner == virtual_outgoing_args_rtx)
2067 new = stack_pointer_rtx, offset = out_arg_offset;
2068 else
2069 {
2070 loc = &XEXP (x, 0);
2071 goto restart;
2072 }
2073
2074 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2075 extra_insns);
2076 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2077 }
2078
2079 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2080 new = arg_pointer_rtx, offset = in_arg_offset;
2081 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2082 new = frame_pointer_rtx, offset = var_offset;
2083 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2084 new = stack_pointer_rtx, offset = dynamic_offset;
2085 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2086 new = stack_pointer_rtx, offset = out_arg_offset;
2087 else
2088 {
2089 /* We know the second operand is a constant. Unless the
2090 first operand is a REG (which has been already checked),
2091 it needs to be checked. */
2092 if (GET_CODE (XEXP (x, 0)) != REG)
2093 {
2094 loc = &XEXP (x, 0);
2095 goto restart;
2096 }
2097 return 1;
2098 }
2099
2100 old = XEXP (x, 0);
2101 XEXP (x, 0) = new;
2102 new = plus_constant (XEXP (x, 1), offset);
2103
2104 /* If the new constant is zero, try to replace the sum with its
2105 first operand. */
2106 if (new == const0_rtx
2107 && validate_change (object, loc, XEXP (x, 0), 0))
2108 return 1;
2109
2110 /* Next try to replace constant with new one. */
2111 if (!validate_change (object, &XEXP (x, 1), new, 0))
2112 {
2113 if (! extra_insns)
2114 {
2115 XEXP (x, 0) = old;
2116 return 0;
2117 }
2118
2119 /* Otherwise copy the new constant into a register and replace
2120 constant with that register. */
2121 temp = gen_reg_rtx (Pmode);
2122 if (validate_change (object, &XEXP (x, 1), temp, 0))
2123 emit_insn_before (gen_move_insn (temp, new), object);
2124 else
2125 {
2126 /* If that didn't work, replace this expression with a
2127 register containing the sum. */
2128
2129 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2130 XEXP (x, 0) = old;
2131
2132 start_sequence ();
2133 temp = force_operand (new, 0);
2134 seq = get_insns ();
2135 end_sequence ();
2136
2137 emit_insns_before (seq, object);
2138 if (! validate_change (object, loc, temp, 0)
2139 && ! validate_replace_rtx (x, temp, object))
2140 abort ();
2141 }
2142 }
2143
2144 return 1;
2145 }
2146
2147 /* Fall through to generic two-operand expression case. */
2148 case EXPR_LIST:
2149 case CALL:
2150 case COMPARE:
2151 case MINUS:
2152 case MULT:
2153 case DIV: case UDIV:
2154 case MOD: case UMOD:
2155 case AND: case IOR: case XOR:
2156 case LSHIFT: case ASHIFT: case ROTATE:
2157 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2158 case NE: case EQ:
2159 case GE: case GT: case GEU: case GTU:
2160 case LE: case LT: case LEU: case LTU:
2161 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2162 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2163 loc = &XEXP (x, 0);
2164 goto restart;
2165
2166 case MEM:
2167 /* Most cases of MEM that convert to valid addresses have already been
2168 handled by our scan of regno_reg_rtx. The only special handling we
2169 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2170 shared if we have to change it to a pseudo.
6f086dfc
RS
2171
2172 If the rtx is a simple reference to an address via a virtual register,
2173 it can potentially be shared. In such cases, first try to make it
2174 a valid address, which can also be shared. Otherwise, copy it and
2175 proceed normally.
2176
2177 First check for common cases that need no processing. These are
2178 usually due to instantiation already being done on a previous instance
2179 of a shared rtx. */
2180
2181 temp = XEXP (x, 0);
2182 if (CONSTANT_ADDRESS_P (temp)
2183#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2184 || temp == arg_pointer_rtx
2185#endif
2186 || temp == frame_pointer_rtx)
2187 return 1;
2188
2189 if (GET_CODE (temp) == PLUS
2190 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2191 && (XEXP (temp, 0) == frame_pointer_rtx
2192#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2193 || XEXP (temp, 0) == arg_pointer_rtx
2194#endif
2195 ))
2196 return 1;
2197
2198 if (temp == virtual_stack_vars_rtx
2199 || temp == virtual_incoming_args_rtx
2200 || (GET_CODE (temp) == PLUS
2201 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2202 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2203 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2204 {
2205 /* This MEM may be shared. If the substitution can be done without
2206 the need to generate new pseudos, we want to do it in place
2207 so all copies of the shared rtx benefit. The call below will
2208 only make substitutions if the resulting address is still
2209 valid.
2210
2211 Note that we cannot pass X as the object in the recursive call
2212 since the insn being processed may not allow all valid
6461be14
RS
2213 addresses. However, if we were not passed on object, we can
2214 only modify X without copying it if X will have a valid
2215 address.
6f086dfc 2216
6461be14
RS
2217 ??? Also note that this can still lose if OBJECT is an insn that
2218 has less restrictions on an address that some other insn.
2219 In that case, we will modify the shared address. This case
2220 doesn't seem very likely, though. */
2221
2222 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2223 object ? object : x, 0))
6f086dfc
RS
2224 return 1;
2225
2226 /* Otherwise make a copy and process that copy. We copy the entire
2227 RTL expression since it might be a PLUS which could also be
2228 shared. */
2229 *loc = x = copy_rtx (x);
2230 }
2231
2232 /* Fall through to generic unary operation case. */
2233 case USE:
2234 case CLOBBER:
2235 case SUBREG:
2236 case STRICT_LOW_PART:
2237 case NEG: case NOT:
2238 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2239 case SIGN_EXTEND: case ZERO_EXTEND:
2240 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2241 case FLOAT: case FIX:
2242 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2243 case ABS:
2244 case SQRT:
2245 case FFS:
2246 /* These case either have just one operand or we know that we need not
2247 check the rest of the operands. */
2248 loc = &XEXP (x, 0);
2249 goto restart;
2250
2251 case REG:
2252 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2253 in front of this insn and substitute the temporary. */
2254 if (x == virtual_incoming_args_rtx)
2255 new = arg_pointer_rtx, offset = in_arg_offset;
2256 else if (x == virtual_stack_vars_rtx)
2257 new = frame_pointer_rtx, offset = var_offset;
2258 else if (x == virtual_stack_dynamic_rtx)
2259 new = stack_pointer_rtx, offset = dynamic_offset;
2260 else if (x == virtual_outgoing_args_rtx)
2261 new = stack_pointer_rtx, offset = out_arg_offset;
2262
2263 if (new)
2264 {
2265 temp = plus_constant (new, offset);
2266 if (!validate_change (object, loc, temp, 0))
2267 {
2268 if (! extra_insns)
2269 return 0;
2270
2271 start_sequence ();
2272 temp = force_operand (temp, 0);
2273 seq = get_insns ();
2274 end_sequence ();
2275
2276 emit_insns_before (seq, object);
2277 if (! validate_change (object, loc, temp, 0)
2278 && ! validate_replace_rtx (x, temp, object))
2279 abort ();
2280 }
2281 }
2282
2283 return 1;
2284 }
2285
2286 /* Scan all subexpressions. */
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2289 if (*fmt == 'e')
2290 {
2291 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2292 return 0;
2293 }
2294 else if (*fmt == 'E')
2295 for (j = 0; j < XVECLEN (x, i); j++)
2296 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2297 extra_insns))
2298 return 0;
2299
2300 return 1;
2301}
2302\f
2303/* Optimization: assuming this function does not receive nonlocal gotos,
2304 delete the handlers for such, as well as the insns to establish
2305 and disestablish them. */
2306
2307static void
2308delete_handlers ()
2309{
2310 rtx insn;
2311 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2312 {
2313 /* Delete the handler by turning off the flag that would
2314 prevent jump_optimize from deleting it.
2315 Also permit deletion of the nonlocal labels themselves
2316 if nothing local refers to them. */
2317 if (GET_CODE (insn) == CODE_LABEL)
2318 LABEL_PRESERVE_P (insn) = 0;
2319 if (GET_CODE (insn) == INSN
59257ff7
RK
2320 && ((nonlocal_goto_handler_slot != 0
2321 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2322 || (nonlocal_goto_stack_level != 0
2323 && reg_mentioned_p (nonlocal_goto_stack_level,
2324 PATTERN (insn)))))
6f086dfc
RS
2325 delete_insn (insn);
2326 }
2327}
2328
2329/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2330 of the current function. */
2331
2332rtx
2333nonlocal_label_rtx_list ()
2334{
2335 tree t;
2336 rtx x = 0;
2337
2338 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2339 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2340
2341 return x;
2342}
2343\f
2344/* Output a USE for any register use in RTL.
2345 This is used with -noreg to mark the extent of lifespan
2346 of any registers used in a user-visible variable's DECL_RTL. */
2347
2348void
2349use_variable (rtl)
2350 rtx rtl;
2351{
2352 if (GET_CODE (rtl) == REG)
2353 /* This is a register variable. */
2354 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2355 else if (GET_CODE (rtl) == MEM
2356 && GET_CODE (XEXP (rtl, 0)) == REG
2357 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2358 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2359 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2360 /* This is a variable-sized structure. */
2361 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2362}
2363
2364/* Like use_variable except that it outputs the USEs after INSN
2365 instead of at the end of the insn-chain. */
2366
2367void
2368use_variable_after (rtl, insn)
2369 rtx rtl, insn;
2370{
2371 if (GET_CODE (rtl) == REG)
2372 /* This is a register variable. */
2373 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2374 else if (GET_CODE (rtl) == MEM
2375 && GET_CODE (XEXP (rtl, 0)) == REG
2376 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2377 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2378 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2379 /* This is a variable-sized structure. */
2380 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2381}
2382\f
2383int
2384max_parm_reg_num ()
2385{
2386 return max_parm_reg;
2387}
2388
2389/* Return the first insn following those generated by `assign_parms'. */
2390
2391rtx
2392get_first_nonparm_insn ()
2393{
2394 if (last_parm_insn)
2395 return NEXT_INSN (last_parm_insn);
2396 return get_insns ();
2397}
2398
2399/* Return 1 if EXP returns an aggregate value, for which an address
2400 must be passed to the function or returned by the function. */
2401
2402int
2403aggregate_value_p (exp)
2404 tree exp;
2405{
2406 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2407 return 1;
2408 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2409 return 1;
2410 if (flag_pcc_struct_return
2411 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2412 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2413 return 1;
2414 return 0;
2415}
2416\f
2417/* Assign RTL expressions to the function's parameters.
2418 This may involve copying them into registers and using
2419 those registers as the RTL for them.
2420
2421 If SECOND_TIME is non-zero it means that this function is being
2422 called a second time. This is done by integrate.c when a function's
2423 compilation is deferred. We need to come back here in case the
2424 FUNCTION_ARG macro computes items needed for the rest of the compilation
2425 (such as changing which registers are fixed or caller-saved). But suppress
2426 writing any insns or setting DECL_RTL of anything in this case. */
2427
2428void
2429assign_parms (fndecl, second_time)
2430 tree fndecl;
2431 int second_time;
2432{
2433 register tree parm;
2434 register rtx entry_parm = 0;
2435 register rtx stack_parm = 0;
2436 CUMULATIVE_ARGS args_so_far;
2437 enum machine_mode passed_mode, nominal_mode;
2438 /* Total space needed so far for args on the stack,
2439 given as a constant and a tree-expression. */
2440 struct args_size stack_args_size;
2441 tree fntype = TREE_TYPE (fndecl);
2442 tree fnargs = DECL_ARGUMENTS (fndecl);
2443 /* This is used for the arg pointer when referring to stack args. */
2444 rtx internal_arg_pointer;
2445 /* This is a dummy PARM_DECL that we used for the function result if
2446 the function returns a structure. */
2447 tree function_result_decl = 0;
2448 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2449 int varargs_setup = 0;
2450
2451 /* Nonzero if the last arg is named `__builtin_va_alist',
2452 which is used on some machines for old-fashioned non-ANSI varargs.h;
2453 this should be stuck onto the stack as if it had arrived there. */
2454 int vararg
2455 = (fnargs
2456 && (parm = tree_last (fnargs)) != 0
2457 && DECL_NAME (parm)
2458 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2459 "__builtin_va_alist")));
2460
2461 /* Nonzero if function takes extra anonymous args.
2462 This means the last named arg must be on the stack
2463 right before the anonymous ones. */
2464 int stdarg
2465 = (TYPE_ARG_TYPES (fntype) != 0
2466 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2467 != void_type_node));
2468
2469 /* If the reg that the virtual arg pointer will be translated into is
2470 not a fixed reg or is the stack pointer, make a copy of the virtual
2471 arg pointer, and address parms via the copy. The frame pointer is
2472 considered fixed even though it is not marked as such.
2473
2474 The second time through, simply use ap to avoid generating rtx. */
2475
2476 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2477 || ! (fixed_regs[ARG_POINTER_REGNUM]
2478 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2479 && ! second_time)
2480 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2481 else
2482 internal_arg_pointer = virtual_incoming_args_rtx;
2483 current_function_internal_arg_pointer = internal_arg_pointer;
2484
2485 stack_args_size.constant = 0;
2486 stack_args_size.var = 0;
2487
2488 /* If struct value address is treated as the first argument, make it so. */
2489 if (aggregate_value_p (DECL_RESULT (fndecl))
2490 && ! current_function_returns_pcc_struct
2491 && struct_value_incoming_rtx == 0)
2492 {
2493 tree type = build_pointer_type (fntype);
2494
2495 function_result_decl = build_decl (PARM_DECL, 0, type);
2496
2497 DECL_ARG_TYPE (function_result_decl) = type;
2498 TREE_CHAIN (function_result_decl) = fnargs;
2499 fnargs = function_result_decl;
2500 }
2501
2502 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2503 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2504
2505#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2506 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, 0);
2507#else
2508 INIT_CUMULATIVE_ARGS (args_so_far, fntype, 0);
2509#endif
2510
2511 /* We haven't yet found an argument that we must push and pretend the
2512 caller did. */
2513 current_function_pretend_args_size = 0;
2514
2515 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2516 {
2517 int aggregate
2518 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2519 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2520 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2521 struct args_size stack_offset;
2522 struct args_size arg_size;
2523 int passed_pointer = 0;
2524 tree passed_type = DECL_ARG_TYPE (parm);
2525
2526 /* Set LAST_NAMED if this is last named arg before some
2527 anonymous args. We treat it as if it were anonymous too. */
2528 int last_named = ((TREE_CHAIN (parm) == 0
2529 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2530 && (vararg || stdarg));
2531
2532 if (TREE_TYPE (parm) == error_mark_node
2533 /* This can happen after weird syntax errors
2534 or if an enum type is defined among the parms. */
2535 || TREE_CODE (parm) != PARM_DECL
2536 || passed_type == NULL)
2537 {
2538 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2539 TREE_USED (parm) = 1;
2540 continue;
2541 }
2542
2543 /* For varargs.h function, save info about regs and stack space
2544 used by the individual args, not including the va_alist arg. */
2545 if (vararg && last_named)
2546 current_function_args_info = args_so_far;
2547
2548 /* Find mode of arg as it is passed, and mode of arg
2549 as it should be during execution of this function. */
2550 passed_mode = TYPE_MODE (passed_type);
2551 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2552
2553#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2554 /* See if this arg was passed by invisible reference. */
2555 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2556 passed_type, ! last_named))
2557 {
2558 passed_type = build_pointer_type (passed_type);
2559 passed_pointer = 1;
2560 passed_mode = nominal_mode = Pmode;
2561 }
2562#endif
2563
2564 /* Let machine desc say which reg (if any) the parm arrives in.
2565 0 means it arrives on the stack. */
2566#ifdef FUNCTION_INCOMING_ARG
2567 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2568 passed_type, ! last_named);
2569#else
2570 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2571 passed_type, ! last_named);
2572#endif
2573
2574#ifdef SETUP_INCOMING_VARARGS
2575 /* If this is the last named parameter, do any required setup for
2576 varargs or stdargs. We need to know about the case of this being an
2577 addressable type, in which case we skip the registers it
2578 would have arrived in.
2579
2580 For stdargs, LAST_NAMED will be set for two parameters, the one that
2581 is actually the last named, and the dummy parameter. We only
2582 want to do this action once.
2583
2584 Also, indicate when RTL generation is to be suppressed. */
2585 if (last_named && !varargs_setup)
2586 {
2587 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2588 current_function_pretend_args_size,
2589 second_time);
2590 varargs_setup = 1;
2591 }
2592#endif
2593
2594 /* Determine parm's home in the stack,
2595 in case it arrives in the stack or we should pretend it did.
2596
2597 Compute the stack position and rtx where the argument arrives
2598 and its size.
2599
2600 There is one complexity here: If this was a parameter that would
2601 have been passed in registers, but wasn't only because it is
2602 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2603 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2604 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2605 0 as it was the previous time. */
2606
2607 locate_and_pad_parm (passed_mode, passed_type,
2608#ifdef STACK_PARMS_IN_REG_PARM_AREA
2609 1,
2610#else
2611#ifdef FUNCTION_INCOMING_ARG
2612 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2613 passed_type,
2614 (! last_named
2615 || varargs_setup)) != 0,
2616#else
2617 FUNCTION_ARG (args_so_far, passed_mode,
2618 passed_type,
2619 ! last_named || varargs_setup) != 0,
2620#endif
2621#endif
2622 fndecl, &stack_args_size, &stack_offset, &arg_size);
2623
2624 if (! second_time)
2625 {
2626 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2627
2628 if (offset_rtx == const0_rtx)
2629 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2630 else
2631 stack_parm = gen_rtx (MEM, passed_mode,
2632 gen_rtx (PLUS, Pmode,
2633 internal_arg_pointer, offset_rtx));
2634
2635 /* If this is a memory ref that contains aggregate components,
2636 mark it as such for cse and loop optimize. */
2637 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2638 }
2639
2640 /* If this parameter was passed both in registers and in the stack,
2641 use the copy on the stack. */
2642 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2643 entry_parm = 0;
2644
2645 /* If this parm was passed part in regs and part in memory,
2646 pretend it arrived entirely in memory
2647 by pushing the register-part onto the stack.
2648
2649 In the special case of a DImode or DFmode that is split,
2650 we could put it together in a pseudoreg directly,
2651 but for now that's not worth bothering with. */
2652
2653 if (entry_parm)
2654 {
2655 int nregs = 0;
2656#ifdef FUNCTION_ARG_PARTIAL_NREGS
2657 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2658 passed_type, ! last_named);
2659#endif
2660
2661 if (nregs > 0)
2662 {
2663 current_function_pretend_args_size
2664 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2665 / (PARM_BOUNDARY / BITS_PER_UNIT)
2666 * (PARM_BOUNDARY / BITS_PER_UNIT));
2667
2668 if (! second_time)
2669 move_block_from_reg (REGNO (entry_parm),
2670 validize_mem (stack_parm), nregs);
2671 entry_parm = stack_parm;
2672 }
2673 }
2674
2675 /* If we didn't decide this parm came in a register,
2676 by default it came on the stack. */
2677 if (entry_parm == 0)
2678 entry_parm = stack_parm;
2679
2680 /* Record permanently how this parm was passed. */
2681 if (! second_time)
2682 DECL_INCOMING_RTL (parm) = entry_parm;
2683
2684 /* If there is actually space on the stack for this parm,
2685 count it in stack_args_size; otherwise set stack_parm to 0
2686 to indicate there is no preallocated stack slot for the parm. */
2687
2688 if (entry_parm == stack_parm
2689#ifdef REG_PARM_STACK_SPACE
2690 /* On some machines, even if a parm value arrives in a register
2691 there is still an (uninitialized) stack slot allocated for it. */
2692 || REG_PARM_STACK_SPACE (fndecl) > 0
2693#endif
2694 )
2695 {
2696 stack_args_size.constant += arg_size.constant;
2697 if (arg_size.var)
2698 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2699 }
2700 else
2701 /* No stack slot was pushed for this parm. */
2702 stack_parm = 0;
2703
2704 /* Update info on where next arg arrives in registers. */
2705
2706 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2707 passed_type, ! last_named);
2708
2709 /* If this is our second time through, we are done with this parm. */
2710 if (second_time)
2711 continue;
2712
2713 /* Now adjust STACK_PARM to the mode and precise location
2714 where this parameter should live during execution,
2715 if we discover that it must live in the stack during execution.
2716 To make debuggers happier on big-endian machines, we store
2717 the value in the last bytes of the space available. */
2718
2719 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2720 && stack_parm != 0)
2721 {
2722 rtx offset_rtx;
2723
2724#if BYTES_BIG_ENDIAN
2725 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2726 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2727 - GET_MODE_SIZE (nominal_mode));
2728#endif
2729
2730 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2731 if (offset_rtx == const0_rtx)
2732 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2733 else
2734 stack_parm = gen_rtx (MEM, nominal_mode,
2735 gen_rtx (PLUS, Pmode,
2736 internal_arg_pointer, offset_rtx));
2737
2738 /* If this is a memory ref that contains aggregate components,
2739 mark it as such for cse and loop optimize. */
2740 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2741 }
2742
2743 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2744 in the mode in which it arrives.
2745 STACK_PARM is an RTX for a stack slot where the parameter can live
2746 during the function (in case we want to put it there).
2747 STACK_PARM is 0 if no stack slot was pushed for it.
2748
2749 Now output code if necessary to convert ENTRY_PARM to
2750 the type in which this function declares it,
2751 and store that result in an appropriate place,
2752 which may be a pseudo reg, may be STACK_PARM,
2753 or may be a local stack slot if STACK_PARM is 0.
2754
2755 Set DECL_RTL to that place. */
2756
2757 if (nominal_mode == BLKmode)
2758 {
2759 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2760 if (GET_CODE (entry_parm) == REG)
2761 {
2762 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2763 UNITS_PER_WORD);
2764
2765 /* Note that we will be storing an integral number of words.
2766 So we have to be careful to ensure that we allocate an
2767 integral number of words. We do this below in the
2768 assign_stack_local if space was not allocated in the argument
2769 list. If it was, this will not work if PARM_BOUNDARY is not
2770 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2771 if it becomes a problem. */
2772
2773 if (stack_parm == 0)
2774 stack_parm
2775 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2776 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2777 abort ();
2778
2779 move_block_from_reg (REGNO (entry_parm),
2780 validize_mem (stack_parm),
2781 size_stored / UNITS_PER_WORD);
2782 }
2783 DECL_RTL (parm) = stack_parm;
2784 }
2785 else if (! (
2786#if 0 /* This change was turned off because it makes compilation bigger. */
2787 !optimize
2788#else /* It's not clear why the following was replaced. */
b335c2cc 2789 /* Obsoleted by preceding line. */
6f086dfc
RS
2790 (obey_regdecls && ! TREE_REGDECL (parm)
2791 && ! TREE_INLINE (fndecl))
2792#endif
2793 /* layout_decl may set this. */
2794 || TREE_ADDRESSABLE (parm)
2795 || TREE_SIDE_EFFECTS (parm)
2796 /* If -ffloat-store specified, don't put explicit
2797 float variables into registers. */
2798 || (flag_float_store
2799 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2800 /* Always assign pseudo to structure return or item passed
2801 by invisible reference. */
2802 || passed_pointer || parm == function_result_decl)
2803 {
2804 /* Store the parm in a pseudoregister during the function. */
2805 register rtx parmreg = gen_reg_rtx (nominal_mode);
2806
2807 REG_USERVAR_P (parmreg) = 1;
2808
2809 /* If this was an item that we received a pointer to, set DECL_RTL
2810 appropriately. */
2811 if (passed_pointer)
2812 {
2813 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2814 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2815 }
2816 else
2817 DECL_RTL (parm) = parmreg;
2818
2819 /* Copy the value into the register. */
2820 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2821 {
2822 /* If ENTRY_PARM is a hard register, it might be in a register
2823 not valid for operating in its mode (e.g., an odd-numbered
2824 register for a DFmode). In that case, moves are the only
2825 thing valid, so we can't do a convert from there. This
2826 occurs when the calling sequence allow such misaligned
2827 usages. */
2828 if (GET_CODE (entry_parm) == REG
2829 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2830 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2831 GET_MODE (entry_parm)))
2832 convert_move (parmreg, copy_to_reg (entry_parm));
2833 else
2834 convert_move (parmreg, validize_mem (entry_parm), 0);
2835 }
6f086dfc
RS
2836 else
2837 emit_move_insn (parmreg, validize_mem (entry_parm));
2838
2839 /* In any case, record the parm's desired stack location
2840 in case we later discover it must live in the stack. */
2841 if (REGNO (parmreg) >= nparmregs)
2842 {
2843 rtx *new;
2844 nparmregs = REGNO (parmreg) + 5;
2845 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2846 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2847 parm_reg_stack_loc = new;
2848 }
2849 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2850
2851 /* Mark the register as eliminable if we did no conversion
2852 and it was copied from memory at a fixed offset,
2853 and the arg pointer was not copied to a pseudo-reg.
2854 If the arg pointer is a pseudo reg or the offset formed
2855 an invalid address, such memory-equivalences
2856 as we make here would screw up life analysis for it. */
2857 if (nominal_mode == passed_mode
2858 && GET_CODE (entry_parm) == MEM
2859 && stack_offset.var == 0
2860 && reg_mentioned_p (virtual_incoming_args_rtx,
2861 XEXP (entry_parm, 0)))
2862 REG_NOTES (get_last_insn ())
2863 = gen_rtx (EXPR_LIST, REG_EQUIV,
2864 entry_parm, REG_NOTES (get_last_insn ()));
2865
2866 /* For pointer data type, suggest pointer register. */
2867 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2868 mark_reg_pointer (parmreg);
2869 }
2870 else
2871 {
2872 /* Value must be stored in the stack slot STACK_PARM
2873 during function execution. */
2874
2875 if (passed_mode != nominal_mode)
86f8eff3
RK
2876 {
2877 /* Conversion is required. */
2878 if (GET_CODE (entry_parm) == REG
2879 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2880 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2881 entry_parm = copy_to_reg (entry_parm);
2882
2883 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2884 }
6f086dfc
RS
2885
2886 if (entry_parm != stack_parm)
2887 {
2888 if (stack_parm == 0)
2889 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2890 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2891 emit_move_insn (validize_mem (stack_parm),
2892 validize_mem (entry_parm));
2893 }
2894
2895 DECL_RTL (parm) = stack_parm;
2896 }
2897
2898 /* If this "parameter" was the place where we are receiving the
2899 function's incoming structure pointer, set up the result. */
2900 if (parm == function_result_decl)
2901 DECL_RTL (DECL_RESULT (fndecl))
2902 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2903
2904 if (TREE_THIS_VOLATILE (parm))
2905 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2906 if (TREE_READONLY (parm))
2907 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2908 }
2909
2910 max_parm_reg = max_reg_num ();
2911 last_parm_insn = get_last_insn ();
2912
2913 current_function_args_size = stack_args_size.constant;
2914
2915 /* Adjust function incoming argument size for alignment and
2916 minimum length. */
2917
2918#ifdef REG_PARM_STACK_SPACE
6f90e075 2919#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
2920 current_function_args_size = MAX (current_function_args_size,
2921 REG_PARM_STACK_SPACE (fndecl));
2922#endif
6f90e075 2923#endif
6f086dfc
RS
2924
2925#ifdef STACK_BOUNDARY
2926#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2927
2928 current_function_args_size
2929 = ((current_function_args_size + STACK_BYTES - 1)
2930 / STACK_BYTES) * STACK_BYTES;
2931#endif
2932
2933#ifdef ARGS_GROW_DOWNWARD
2934 current_function_arg_offset_rtx
2935 = (stack_args_size.var == 0 ? gen_rtx (CONST_INT, VOIDmode,
2936 -stack_args_size.constant)
2937 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2938 size_int (-stack_args_size.constant)),
2939 0, VOIDmode, 0));
2940#else
2941 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2942#endif
2943
2944 /* See how many bytes, if any, of its args a function should try to pop
2945 on return. */
2946
2947 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2948 current_function_args_size);
2949
2950 /* For stdarg.h function, save info about regs and stack space
2951 used by the named args. */
2952
2953 if (stdarg)
2954 current_function_args_info = args_so_far;
2955
2956 /* Set the rtx used for the function return value. Put this in its
2957 own variable so any optimizers that need this information don't have
2958 to include tree.h. Do this here so it gets done when an inlined
2959 function gets output. */
2960
2961 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
2962}
2963\f
2964/* Compute the size and offset from the start of the stacked arguments for a
2965 parm passed in mode PASSED_MODE and with type TYPE.
2966
2967 INITIAL_OFFSET_PTR points to the current offset into the stacked
2968 arguments.
2969
2970 The starting offset and size for this parm are returned in *OFFSET_PTR
2971 and *ARG_SIZE_PTR, respectively.
2972
2973 IN_REGS is non-zero if the argument will be passed in registers. It will
2974 never be set if REG_PARM_STACK_SPACE is not defined.
2975
2976 FNDECL is the function in which the argument was defined.
2977
2978 There are two types of rounding that are done. The first, controlled by
2979 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
2980 list to be aligned to the specific boundary (in bits). This rounding
2981 affects the initial and starting offsets, but not the argument size.
2982
2983 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
2984 optionally rounds the size of the parm to PARM_BOUNDARY. The
2985 initial offset is not affected by this rounding, while the size always
2986 is and the starting offset may be. */
2987
2988/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
2989 initial_offset_ptr is positive because locate_and_pad_parm's
2990 callers pass in the total size of args so far as
2991 initial_offset_ptr. arg_size_ptr is always positive.*/
2992
2993static void pad_to_arg_alignment (), pad_below ();
2994
2995void
2996locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
2997 initial_offset_ptr, offset_ptr, arg_size_ptr)
2998 enum machine_mode passed_mode;
2999 tree type;
3000 int in_regs;
3001 tree fndecl;
3002 struct args_size *initial_offset_ptr;
3003 struct args_size *offset_ptr;
3004 struct args_size *arg_size_ptr;
3005{
3006 tree sizetree
3007 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3008 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3009 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3010 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3011 int reg_parm_stack_space = 0;
3012
3013#ifdef REG_PARM_STACK_SPACE
3014 /* If we have found a stack parm before we reach the end of the
3015 area reserved for registers, skip that area. */
3016 if (! in_regs)
3017 {
3018 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3019 if (reg_parm_stack_space > 0)
3020 {
3021 if (initial_offset_ptr->var)
3022 {
3023 initial_offset_ptr->var
3024 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3025 size_int (reg_parm_stack_space));
3026 initial_offset_ptr->constant = 0;
3027 }
3028 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3029 initial_offset_ptr->constant = reg_parm_stack_space;
3030 }
3031 }
3032#endif /* REG_PARM_STACK_SPACE */
3033
3034 arg_size_ptr->var = 0;
3035 arg_size_ptr->constant = 0;
3036
3037#ifdef ARGS_GROW_DOWNWARD
3038 if (initial_offset_ptr->var)
3039 {
3040 offset_ptr->constant = 0;
3041 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3042 initial_offset_ptr->var);
3043 }
3044 else
3045 {
3046 offset_ptr->constant = - initial_offset_ptr->constant;
3047 offset_ptr->var = 0;
3048 }
3049 if (where_pad == upward
3050 && (TREE_CODE (sizetree) != INTEGER_CST
3051 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3052 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3053 SUB_PARM_SIZE (*offset_ptr, sizetree);
3054 pad_to_arg_alignment (offset_ptr, boundary);
3055 if (initial_offset_ptr->var)
3056 {
3057 arg_size_ptr->var = size_binop (MINUS_EXPR,
3058 size_binop (MINUS_EXPR,
3059 integer_zero_node,
3060 initial_offset_ptr->var),
3061 offset_ptr->var);
3062 }
3063 else
3064 {
3065 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3066 offset_ptr->constant);
3067 }
3068/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3069 if (where_pad == downward)
3070 pad_below (arg_size_ptr, passed_mode, sizetree);
3071#else /* !ARGS_GROW_DOWNWARD */
3072 pad_to_arg_alignment (initial_offset_ptr, boundary);
3073 *offset_ptr = *initial_offset_ptr;
3074 if (where_pad == downward)
3075 pad_below (offset_ptr, passed_mode, sizetree);
3076
3077#ifdef PUSH_ROUNDING
3078 if (passed_mode != BLKmode)
3079 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3080#endif
3081
3082 if (where_pad != none
3083 && (TREE_CODE (sizetree) != INTEGER_CST
3084 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3085 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3086
3087 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3088#endif /* ARGS_GROW_DOWNWARD */
3089}
3090
3091static void
3092pad_to_arg_alignment (offset_ptr, boundary)
3093 struct args_size *offset_ptr;
3094 int boundary;
3095{
3096 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3097
3098 if (boundary > BITS_PER_UNIT)
3099 {
3100 if (offset_ptr->var)
3101 {
3102 offset_ptr->var =
3103#ifdef ARGS_GROW_DOWNWARD
3104 round_down
3105#else
3106 round_up
3107#endif
3108 (ARGS_SIZE_TREE (*offset_ptr),
3109 boundary / BITS_PER_UNIT);
3110 offset_ptr->constant = 0; /*?*/
3111 }
3112 else
3113 offset_ptr->constant =
3114#ifdef ARGS_GROW_DOWNWARD
3115 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3116#else
3117 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3118#endif
3119 }
3120}
3121
3122static void
3123pad_below (offset_ptr, passed_mode, sizetree)
3124 struct args_size *offset_ptr;
3125 enum machine_mode passed_mode;
3126 tree sizetree;
3127{
3128 if (passed_mode != BLKmode)
3129 {
3130 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3131 offset_ptr->constant
3132 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3133 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3134 - GET_MODE_SIZE (passed_mode));
3135 }
3136 else
3137 {
3138 if (TREE_CODE (sizetree) != INTEGER_CST
3139 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3140 {
3141 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3142 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3143 /* Add it in. */
3144 ADD_PARM_SIZE (*offset_ptr, s2);
3145 SUB_PARM_SIZE (*offset_ptr, sizetree);
3146 }
3147 }
3148}
3149
3150static tree
3151round_down (value, divisor)
3152 tree value;
3153 int divisor;
3154{
3155 return size_binop (MULT_EXPR,
3156 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3157 size_int (divisor));
3158}
3159\f
3160/* Walk the tree of blocks describing the binding levels within a function
3161 and warn about uninitialized variables.
3162 This is done after calling flow_analysis and before global_alloc
3163 clobbers the pseudo-regs to hard regs. */
3164
3165void
3166uninitialized_vars_warning (block)
3167 tree block;
3168{
3169 register tree decl, sub;
3170 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3171 {
3172 if (TREE_CODE (decl) == VAR_DECL
3173 /* These warnings are unreliable for and aggregates
3174 because assigning the fields one by one can fail to convince
3175 flow.c that the entire aggregate was initialized.
3176 Unions are troublesome because members may be shorter. */
3177 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3178 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3179 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3180 && DECL_RTL (decl) != 0
3181 && GET_CODE (DECL_RTL (decl)) == REG
3182 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3183 warning_with_decl (decl,
3184 "`%s' may be used uninitialized in this function");
3185 if (TREE_CODE (decl) == VAR_DECL
3186 && DECL_RTL (decl) != 0
3187 && GET_CODE (DECL_RTL (decl)) == REG
3188 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3189 warning_with_decl (decl,
3190 "variable `%s' may be clobbered by `longjmp'");
3191 }
3192 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3193 uninitialized_vars_warning (sub);
3194}
3195
3196/* Do the appropriate part of uninitialized_vars_warning
3197 but for arguments instead of local variables. */
3198
3199void
3200setjmp_args_warning (block)
3201 tree block;
3202{
3203 register tree decl;
3204 for (decl = DECL_ARGUMENTS (current_function_decl);
3205 decl; decl = TREE_CHAIN (decl))
3206 if (DECL_RTL (decl) != 0
3207 && GET_CODE (DECL_RTL (decl)) == REG
3208 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3209 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3210}
3211
3212/* If this function call setjmp, put all vars into the stack
3213 unless they were declared `register'. */
3214
3215void
3216setjmp_protect (block)
3217 tree block;
3218{
3219 register tree decl, sub;
3220 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3221 if ((TREE_CODE (decl) == VAR_DECL
3222 || TREE_CODE (decl) == PARM_DECL)
3223 && DECL_RTL (decl) != 0
3224 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3225 /* If this variable came from an inline function, it must be
3226 that it's life doesn't overlap the setjmp. If there was a
3227 setjmp in the function, it would already be in memory. We
3228 must exclude such variable because their DECL_RTL might be
3229 set to strange things such as virtual_stack_vars_rtx. */
3230 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3231 && (
3232#ifdef NON_SAVING_SETJMP
3233 /* If longjmp doesn't restore the registers,
3234 don't put anything in them. */
3235 NON_SAVING_SETJMP
3236 ||
3237#endif
3238 ! TREE_REGDECL (decl)))
3239 put_var_into_stack (decl);
3240 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3241 setjmp_protect (sub);
3242}
3243\f
3244/* Like the previous function, but for args instead of local variables. */
3245
3246void
3247setjmp_protect_args ()
3248{
3249 register tree decl, sub;
3250 for (decl = DECL_ARGUMENTS (current_function_decl);
3251 decl; decl = TREE_CHAIN (decl))
3252 if ((TREE_CODE (decl) == VAR_DECL
3253 || TREE_CODE (decl) == PARM_DECL)
3254 && DECL_RTL (decl) != 0
3255 && GET_CODE (DECL_RTL (decl)) == REG
3256 && (
3257 /* If longjmp doesn't restore the registers,
3258 don't put anything in them. */
3259#ifdef NON_SAVING_SETJMP
3260 NON_SAVING_SETJMP
3261 ||
3262#endif
3263 ! TREE_REGDECL (decl)))
3264 put_var_into_stack (decl);
3265}
3266\f
3267/* Return the context-pointer register corresponding to DECL,
3268 or 0 if it does not need one. */
3269
3270rtx
3271lookup_static_chain (decl)
3272 tree decl;
3273{
3274 tree context = decl_function_context (decl);
3275 tree link;
3276
3277 if (context == 0)
3278 return 0;
3279
3280 /* We treat inline_function_decl as an alias for the current function
3281 because that is the inline function whose vars, types, etc.
3282 are being merged into the current function.
3283 See expand_inline_function. */
3284 if (context == current_function_decl || context == inline_function_decl)
3285 return virtual_stack_vars_rtx;
3286
3287 for (link = context_display; link; link = TREE_CHAIN (link))
3288 if (TREE_PURPOSE (link) == context)
3289 return RTL_EXPR_RTL (TREE_VALUE (link));
3290
3291 abort ();
3292}
3293\f
3294/* Convert a stack slot address ADDR for variable VAR
3295 (from a containing function)
3296 into an address valid in this function (using a static chain). */
3297
3298rtx
3299fix_lexical_addr (addr, var)
3300 rtx addr;
3301 tree var;
3302{
3303 rtx basereg;
3304 int displacement;
3305 tree context = decl_function_context (var);
3306 struct function *fp;
3307 rtx base = 0;
3308
3309 /* If this is the present function, we need not do anything. */
3310 if (context == current_function_decl || context == inline_function_decl)
3311 return addr;
3312
3313 for (fp = outer_function_chain; fp; fp = fp->next)
3314 if (fp->decl == context)
3315 break;
3316
3317 if (fp == 0)
3318 abort ();
3319
3320 /* Decode given address as base reg plus displacement. */
3321 if (GET_CODE (addr) == REG)
3322 basereg = addr, displacement = 0;
3323 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3324 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3325 else
3326 abort ();
3327
3328 /* We accept vars reached via the containing function's
3329 incoming arg pointer and via its stack variables pointer. */
3330 if (basereg == fp->internal_arg_pointer)
3331 {
3332 /* If reached via arg pointer, get the arg pointer value
3333 out of that function's stack frame.
3334
3335 There are two cases: If a separate ap is needed, allocate a
3336 slot in the outer function for it and dereference it that way.
3337 This is correct even if the real ap is actually a pseudo.
3338 Otherwise, just adjust the offset from the frame pointer to
3339 compensate. */
3340
3341#ifdef NEED_SEPARATE_AP
3342 rtx addr;
3343
3344 if (fp->arg_pointer_save_area == 0)
3345 fp->arg_pointer_save_area
3346 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3347
3348 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3349 addr = memory_address (Pmode, addr);
3350
3351 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3352#else
3353 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3354 base = lookup_static_chain (var);
6f086dfc
RS
3355#endif
3356 }
3357
3358 else if (basereg == virtual_stack_vars_rtx)
3359 {
3360 /* This is the same code as lookup_static_chain, duplicated here to
3361 avoid an extra call to decl_function_context. */
3362 tree link;
3363
3364 for (link = context_display; link; link = TREE_CHAIN (link))
3365 if (TREE_PURPOSE (link) == context)
3366 {
3367 base = RTL_EXPR_RTL (TREE_VALUE (link));
3368 break;
3369 }
3370 }
3371
3372 if (base == 0)
3373 abort ();
3374
3375 /* Use same offset, relative to appropriate static chain or argument
3376 pointer. */
3377 return plus_constant (base, displacement);
3378}
3379\f
3380/* Return the address of the trampoline for entering nested fn FUNCTION.
3381 If necessary, allocate a trampoline (in the stack frame)
3382 and emit rtl to initialize its contents (at entry to this function). */
3383
3384rtx
3385trampoline_address (function)
3386 tree function;
3387{
3388 tree link;
3389 tree rtlexp;
3390 rtx tramp;
3391 struct function *fp;
3392 tree fn_context;
3393
3394 /* Find an existing trampoline and return it. */
3395 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3396 if (TREE_PURPOSE (link) == function)
3397 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3398 for (fp = outer_function_chain; fp; fp = fp->next)
3399 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3400 if (TREE_PURPOSE (link) == function)
3401 {
3402 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3403 function);
3404 return round_trampoline_addr (tramp);
3405 }
3406
3407 /* None exists; we must make one. */
3408
3409 /* Find the `struct function' for the function containing FUNCTION. */
3410 fp = 0;
3411 fn_context = decl_function_context (function);
3412 if (fn_context != current_function_decl)
3413 for (fp = outer_function_chain; fp; fp = fp->next)
3414 if (fp->decl == fn_context)
3415 break;
3416
3417 /* Allocate run-time space for this trampoline
3418 (usually in the defining function's stack frame). */
3419#ifdef ALLOCATE_TRAMPOLINE
3420 tramp = ALLOCATE_TRAMPOLINE (fp);
3421#else
3422 /* If rounding needed, allocate extra space
3423 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3424#ifdef TRAMPOLINE_ALIGNMENT
3425#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3426#else
3427#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3428#endif
3429 if (fp != 0)
3430 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3431 else
3432 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3433#endif
3434
3435 /* Record the trampoline for reuse and note it for later initialization
3436 by expand_function_end. */
3437 if (fp != 0)
3438 {
3439 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3440 rtlexp = make_node (RTL_EXPR);
3441 RTL_EXPR_RTL (rtlexp) = tramp;
3442 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3443 pop_obstacks ();
3444 }
3445 else
3446 {
3447 /* Make the RTL_EXPR node temporary, not momentary, so that the
3448 trampoline_list doesn't become garbage. */
3449 int momentary = suspend_momentary ();
3450 rtlexp = make_node (RTL_EXPR);
3451 resume_momentary (momentary);
3452
3453 RTL_EXPR_RTL (rtlexp) = tramp;
3454 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3455 }
3456
3457 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3458 return round_trampoline_addr (tramp);
3459}
3460
3461/* Given a trampoline address,
3462 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3463
3464static rtx
3465round_trampoline_addr (tramp)
3466 rtx tramp;
3467{
3468#ifdef TRAMPOLINE_ALIGNMENT
3469 /* Round address up to desired boundary. */
3470 rtx temp = gen_reg_rtx (Pmode);
3471 temp = expand_binop (Pmode, add_optab, tramp,
3472 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_ALIGNMENT - 1),
3473 temp, 0, OPTAB_LIB_WIDEN);
3474 tramp = expand_binop (Pmode, and_optab, temp,
3475 gen_rtx (CONST_INT, VOIDmode, - TRAMPOLINE_ALIGNMENT),
3476 temp, 0, OPTAB_LIB_WIDEN);
3477#endif
3478 return tramp;
3479}
3480\f
3481/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3482 and initialize static variables for generating RTL for the statements
3483 of the function. */
3484
3485void
3486init_function_start (subr, filename, line)
3487 tree subr;
3488 char *filename;
3489 int line;
3490{
3491 char *junk;
3492
3493 init_stmt_for_function ();
3494
3495 cse_not_expected = ! optimize;
3496
3497 /* Caller save not needed yet. */
3498 caller_save_needed = 0;
3499
3500 /* No stack slots have been made yet. */
3501 stack_slot_list = 0;
3502
3503 /* There is no stack slot for handling nonlocal gotos. */
3504 nonlocal_goto_handler_slot = 0;
3505 nonlocal_goto_stack_level = 0;
3506
3507 /* No labels have been declared for nonlocal use. */
3508 nonlocal_labels = 0;
3509
3510 /* No function calls so far in this function. */
3511 function_call_count = 0;
3512
3513 /* No parm regs have been allocated.
3514 (This is important for output_inline_function.) */
3515 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3516
3517 /* Initialize the RTL mechanism. */
3518 init_emit ();
3519
3520 /* Initialize the queue of pending postincrement and postdecrements,
3521 and some other info in expr.c. */
3522 init_expr ();
3523
3524 /* We haven't done register allocation yet. */
3525 reg_renumber = 0;
3526
3527 init_const_rtx_hash_table ();
3528
3529 current_function_name = (*decl_printable_name) (subr, &junk);
3530
3531 /* Nonzero if this is a nested function that uses a static chain. */
3532
3533 current_function_needs_context
3534 = (decl_function_context (current_function_decl) != 0);
3535
3536 /* Set if a call to setjmp is seen. */
3537 current_function_calls_setjmp = 0;
3538
3539 /* Set if a call to longjmp is seen. */
3540 current_function_calls_longjmp = 0;
3541
3542 current_function_calls_alloca = 0;
3543 current_function_has_nonlocal_label = 0;
3544 current_function_contains_functions = 0;
3545
3546 current_function_returns_pcc_struct = 0;
3547 current_function_returns_struct = 0;
3548 current_function_epilogue_delay_list = 0;
3549 current_function_uses_const_pool = 0;
3550 current_function_uses_pic_offset_table = 0;
3551
3552 /* We have not yet needed to make a label to jump to for tail-recursion. */
3553 tail_recursion_label = 0;
3554
3555 /* We haven't had a need to make a save area for ap yet. */
3556
3557 arg_pointer_save_area = 0;
3558
3559 /* No stack slots allocated yet. */
3560 frame_offset = 0;
3561
3562 /* No SAVE_EXPRs in this function yet. */
3563 save_expr_regs = 0;
3564
3565 /* No RTL_EXPRs in this function yet. */
3566 rtl_expr_chain = 0;
3567
3568 /* We have not allocated any temporaries yet. */
3569 temp_slots = 0;
3570 temp_slot_level = 0;
3571
3572 /* Within function body, compute a type's size as soon it is laid out. */
3573 immediate_size_expand++;
3574
3575 init_pending_stack_adjust ();
3576 inhibit_defer_pop = 0;
3577
3578 current_function_outgoing_args_size = 0;
3579
3580 /* Initialize the insn lengths. */
3581 init_insn_lengths ();
3582
3583 /* Prevent ever trying to delete the first instruction of a function.
3584 Also tell final how to output a linenum before the function prologue. */
3585 emit_line_note (filename, line);
3586
3587 /* Make sure first insn is a note even if we don't want linenums.
3588 This makes sure the first insn will never be deleted.
3589 Also, final expects a note to appear there. */
3590 emit_note (0, NOTE_INSN_DELETED);
3591
3592 /* Set flags used by final.c. */
3593 if (aggregate_value_p (DECL_RESULT (subr)))
3594 {
3595#ifdef PCC_STATIC_STRUCT_RETURN
3596 if (flag_pcc_struct_return)
3597 current_function_returns_pcc_struct = 1;
3598 else
3599#endif
3600 current_function_returns_struct = 1;
3601 }
3602
3603 /* Warn if this value is an aggregate type,
3604 regardless of which calling convention we are using for it. */
3605 if (warn_aggregate_return
3606 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3607 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3608 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3609 warning ("function returns an aggregate");
3610
3611 current_function_returns_pointer
3612 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3613
3614 /* Indicate that we need to distinguish between the return value of the
3615 present function and the return value of a function being called. */
3616 rtx_equal_function_value_matters = 1;
3617
3618 /* Indicate that we have not instantiated virtual registers yet. */
3619 virtuals_instantiated = 0;
3620
3621 /* Indicate we have no need of a frame pointer yet. */
3622 frame_pointer_needed = 0;
3623
3624 /* By default assume not varargs. */
3625 current_function_varargs = 0;
3626}
3627
3628/* Indicate that the current function uses extra args
3629 not explicitly mentioned in the argument list in any fashion. */
3630
3631void
3632mark_varargs ()
3633{
3634 current_function_varargs = 1;
3635}
3636
3637/* Expand a call to __main at the beginning of a possible main function. */
3638
3639void
3640expand_main_function ()
3641{
b335c2cc 3642#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3643 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3644 VOIDmode, 0);
b335c2cc 3645#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3646}
3647\f
3648/* Start the RTL for a new function, and set variables used for
3649 emitting RTL.
3650 SUBR is the FUNCTION_DECL node.
3651 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3652 the function's parameters, which must be run at any return statement. */
3653
3654void
3655expand_function_start (subr, parms_have_cleanups)
3656 tree subr;
3657 int parms_have_cleanups;
3658{
3659 register int i;
3660 tree tem;
3661 rtx last_ptr;
3662
3663 /* Make sure volatile mem refs aren't considered
3664 valid operands of arithmetic insns. */
3665 init_recog_no_volatile ();
3666
3667 /* If function gets a static chain arg, store it in the stack frame.
3668 Do this first, so it gets the first stack slot offset. */
3669 if (current_function_needs_context)
3670 emit_move_insn (assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0),
3671 static_chain_incoming_rtx);
3672
3673 /* If the parameters of this function need cleaning up, get a label
3674 for the beginning of the code which executes those cleanups. This must
3675 be done before doing anything with return_label. */
3676 if (parms_have_cleanups)
3677 cleanup_label = gen_label_rtx ();
3678 else
3679 cleanup_label = 0;
3680
3681 /* Make the label for return statements to jump to, if this machine
3682 does not have a one-instruction return and uses an epilogue,
3683 or if it returns a structure, or if it has parm cleanups. */
3684#ifdef HAVE_return
3685 if (cleanup_label == 0 && HAVE_return
3686 && ! current_function_returns_pcc_struct
3687 && ! (current_function_returns_struct && ! optimize))
3688 return_label = 0;
3689 else
3690 return_label = gen_label_rtx ();
3691#else
3692 return_label = gen_label_rtx ();
3693#endif
3694
3695 /* Initialize rtx used to return the value. */
3696 /* Do this before assign_parms so that we copy the struct value address
3697 before any library calls that assign parms might generate. */
3698
3699 /* Decide whether to return the value in memory or in a register. */
3700 if (aggregate_value_p (DECL_RESULT (subr)))
3701 {
3702 /* Returning something that won't go in a register. */
3703 register rtx value_address;
3704
3705#ifdef PCC_STATIC_STRUCT_RETURN
3706 if (current_function_returns_pcc_struct)
3707 {
3708 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3709 value_address = assemble_static_space (size);
3710 }
3711 else
3712#endif
3713 {
3714 /* Expect to be passed the address of a place to store the value.
3715 If it is passed as an argument, assign_parms will take care of
3716 it. */
3717 if (struct_value_incoming_rtx)
3718 {
3719 value_address = gen_reg_rtx (Pmode);
3720 emit_move_insn (value_address, struct_value_incoming_rtx);
3721 }
3722 }
3723 if (value_address)
3724 DECL_RTL (DECL_RESULT (subr))
3725 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3726 value_address);
3727 }
3728 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3729 /* If return mode is void, this decl rtl should not be used. */
3730 DECL_RTL (DECL_RESULT (subr)) = 0;
3731 else if (parms_have_cleanups)
3732 /* If function will end with cleanup code for parms,
3733 compute the return values into a pseudo reg,
3734 which we will copy into the true return register
3735 after the cleanups are done. */
3736 DECL_RTL (DECL_RESULT (subr))
3737 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3738 else
3739 /* Scalar, returned in a register. */
3740 {
3741#ifdef FUNCTION_OUTGOING_VALUE
3742 DECL_RTL (DECL_RESULT (subr))
3743 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3744#else
3745 DECL_RTL (DECL_RESULT (subr))
3746 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3747#endif
3748
3749 /* Mark this reg as the function's return value. */
3750 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3751 {
3752 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3753 /* Needed because we may need to move this to memory
3754 in case it's a named return value whose address is taken. */
3755 TREE_REGDECL (DECL_RESULT (subr)) = 1;
3756 }
3757 }
3758
3759 /* Initialize rtx for parameters and local variables.
3760 In some cases this requires emitting insns. */
3761
3762 assign_parms (subr, 0);
3763
3764 /* The following was moved from init_function_start.
3765 The move is supposed to make sdb output more accurate. */
3766 /* Indicate the beginning of the function body,
3767 as opposed to parm setup. */
3768 emit_note (0, NOTE_INSN_FUNCTION_BEG);
3769
3770 /* If doing stupid allocation, mark parms as born here. */
3771
3772 if (GET_CODE (get_last_insn ()) != NOTE)
3773 emit_note (0, NOTE_INSN_DELETED);
3774 parm_birth_insn = get_last_insn ();
3775
3776 if (obey_regdecls)
3777 {
3778 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3779 use_variable (regno_reg_rtx[i]);
3780
3781 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3782 use_variable (current_function_internal_arg_pointer);
3783 }
3784
3785 /* Fetch static chain values for containing functions. */
3786 tem = decl_function_context (current_function_decl);
3787 if (tem)
3788 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3789 context_display = 0;
3790 while (tem)
3791 {
3792 tree rtlexp = make_node (RTL_EXPR);
3793
3794 RTL_EXPR_RTL (rtlexp) = last_ptr;
3795 context_display = tree_cons (tem, rtlexp, context_display);
3796 tem = decl_function_context (tem);
3797 if (tem == 0)
3798 break;
3799 /* Chain thru stack frames, assuming pointer to next lexical frame
3800 is found at the place we always store it. */
3801#ifdef FRAME_GROWS_DOWNWARD
3802 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
3803#endif
3804 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
3805 memory_address (Pmode, last_ptr)));
3806 }
3807
3808 /* After the display initializations is where the tail-recursion label
3809 should go, if we end up needing one. Ensure we have a NOTE here
3810 since some things (like trampolines) get placed before this. */
3811 tail_recursion_reentry = emit_note (0, NOTE_INSN_DELETED);
3812
3813 /* Evaluate now the sizes of any types declared among the arguments. */
3814 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
3815 expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0);
3816
3817 /* Make sure there is a line number after the function entry setup code. */
3818 force_next_line_note ();
3819}
3820\f
3821/* Generate RTL for the end of the current function.
3822 FILENAME and LINE are the current position in the source file. */
3823
3824/* It is up to language-specific callers to do cleanups for parameters. */
3825
3826void
3827expand_function_end (filename, line)
3828 char *filename;
3829 int line;
3830{
3831 register int i;
3832 tree link;
3833
3834 static rtx initial_trampoline;
3835
3836#ifdef NON_SAVING_SETJMP
3837 /* Don't put any variables in registers if we call setjmp
3838 on a machine that fails to restore the registers. */
3839 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
3840 {
3841 setjmp_protect (DECL_INITIAL (current_function_decl));
3842 setjmp_protect_args ();
3843 }
3844#endif
3845
3846 /* Save the argument pointer if a save area was made for it. */
3847 if (arg_pointer_save_area)
3848 {
3849 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
3850 emit_insn_before (x, tail_recursion_reentry);
3851 }
3852
3853 /* Initialize any trampolines required by this function. */
3854 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3855 {
3856 tree function = TREE_PURPOSE (link);
3857 rtx context = lookup_static_chain (function);
3858 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
3859 rtx seq;
3860
3861 /* First make sure this compilation has a template for
3862 initializing trampolines. */
3863 if (initial_trampoline == 0)
86f8eff3
RK
3864 {
3865 end_temporary_allocation ();
3866 initial_trampoline
3867 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
3868 resume_temporary_allocation ();
3869 }
6f086dfc
RS
3870
3871 /* Generate insns to initialize the trampoline. */
3872 start_sequence ();
3873 tramp = change_address (initial_trampoline, BLKmode,
3874 round_trampoline_addr (XEXP (tramp, 0)));
3875 emit_block_move (tramp, initial_trampoline,
3876 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_SIZE),
3877 FUNCTION_BOUNDARY / BITS_PER_UNIT);
3878 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
3879 XEXP (DECL_RTL (function), 0), context);
3880 seq = get_insns ();
3881 end_sequence ();
3882
3883 /* Put those insns at entry to the containing function (this one). */
3884 emit_insns_before (seq, tail_recursion_reentry);
3885 }
3886 /* Clear the trampoline_list for the next function. */
3887 trampoline_list = 0;
3888
3889#if 0 /* I think unused parms are legitimate enough. */
3890 /* Warn about unused parms. */
3891 if (warn_unused)
3892 {
3893 rtx decl;
3894
3895 for (decl = DECL_ARGUMENTS (current_function_decl);
3896 decl; decl = TREE_CHAIN (decl))
3897 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
3898 warning_with_decl (decl, "unused parameter `%s'");
3899 }
3900#endif
3901
3902 /* Delete handlers for nonlocal gotos if nothing uses them. */
3903 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
3904 delete_handlers ();
3905
3906 /* End any sequences that failed to be closed due to syntax errors. */
3907 while (in_sequence_p ())
3908 end_sequence (0);
3909
3910 /* Outside function body, can't compute type's actual size
3911 until next function's body starts. */
3912 immediate_size_expand--;
3913
3914 /* If doing stupid register allocation,
3915 mark register parms as dying here. */
3916
3917 if (obey_regdecls)
3918 {
3919 rtx tem;
3920 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3921 use_variable (regno_reg_rtx[i]);
3922
3923 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
3924
3925 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
3926 {
3927 use_variable (XEXP (tem, 0));
3928 use_variable_after (XEXP (tem, 0), parm_birth_insn);
3929 }
3930
3931 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3932 use_variable (current_function_internal_arg_pointer);
3933 }
3934
3935 clear_pending_stack_adjust ();
3936 do_pending_stack_adjust ();
3937
3938 /* Mark the end of the function body.
3939 If control reaches this insn, the function can drop through
3940 without returning a value. */
3941 emit_note (0, NOTE_INSN_FUNCTION_END);
3942
3943 /* Output a linenumber for the end of the function.
3944 SDB depends on this. */
3945 emit_line_note_force (filename, line);
3946
3947 /* Output the label for the actual return from the function,
3948 if one is expected. This happens either because a function epilogue
3949 is used instead of a return instruction, or because a return was done
3950 with a goto in order to run local cleanups, or because of pcc-style
3951 structure returning. */
3952
3953 if (return_label)
3954 emit_label (return_label);
3955
3956 /* If we had calls to alloca, and this machine needs
3957 an accurate stack pointer to exit the function,
3958 insert some code to save and restore the stack pointer. */
3959#ifdef EXIT_IGNORE_STACK
3960 if (! EXIT_IGNORE_STACK)
3961#endif
3962 if (current_function_calls_alloca)
3963 {
59257ff7
RK
3964 rtx tem = 0;
3965
3966 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
3967 emit_stack_restore (SAVE_FUNCTION, tem, 0);
6f086dfc
RS
3968 }
3969
3970 /* If scalar return value was computed in a pseudo-reg,
3971 copy that to the hard return register. */
3972 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
3973 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
3974 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
3975 >= FIRST_PSEUDO_REGISTER))
3976 {
3977 rtx real_decl_result;
3978
3979#ifdef FUNCTION_OUTGOING_VALUE
3980 real_decl_result
3981 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3982 current_function_decl);
3983#else
3984 real_decl_result
3985 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3986 current_function_decl);
3987#endif
3988 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
3989 emit_move_insn (real_decl_result,
3990 DECL_RTL (DECL_RESULT (current_function_decl)));
3991 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
3992 }
3993
3994 /* If returning a structure, arrange to return the address of the value
3995 in a place where debuggers expect to find it.
3996
3997 If returning a structure PCC style,
3998 the caller also depends on this value.
3999 And current_function_returns_pcc_struct is not necessarily set. */
4000 if (current_function_returns_struct
4001 || current_function_returns_pcc_struct)
4002 {
4003 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4004 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4005#ifdef FUNCTION_OUTGOING_VALUE
4006 rtx outgoing
4007 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4008 current_function_decl);
4009#else
4010 rtx outgoing
4011 = FUNCTION_VALUE (build_pointer_type (type),
4012 current_function_decl);
4013#endif
4014
4015 /* Mark this as a function return value so integrate will delete the
4016 assignment and USE below when inlining this function. */
4017 REG_FUNCTION_VALUE_P (outgoing) = 1;
4018
4019 emit_move_insn (outgoing, value_address);
4020 use_variable (outgoing);
4021 }
4022
4023 /* Output a return insn if we are using one.
4024 Otherwise, let the rtl chain end here, to drop through
4025 into the epilogue. */
4026
4027#ifdef HAVE_return
4028 if (HAVE_return)
4029 {
4030 emit_jump_insn (gen_return ());
4031 emit_barrier ();
4032 }
4033#endif
4034
4035 /* Fix up any gotos that jumped out to the outermost
4036 binding level of the function.
4037 Must follow emitting RETURN_LABEL. */
4038
4039 /* If you have any cleanups to do at this point,
4040 and they need to create temporary variables,
4041 then you will lose. */
4042 fixup_gotos (0, 0, 0, get_insns (), 0);
4043}
This page took 0.388182 seconds and 5 git commands to generate.