]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
entered into RCS
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc
RS
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
56
57/* Round a value to the lowest integer less than it that is a multiple of
58 the required alignment. Avoid using division in case the value is
59 negative. Assume the alignment is a power of two. */
60#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
61
62/* Similar, but round to the next highest integer that meets the
63 alignment. */
64#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
65
66/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
67 during rtl generation. If they are different register numbers, this is
68 always true. It may also be true if
69 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
70 generation. See fix_lexical_addr for details. */
71
72#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
73#define NEED_SEPARATE_AP
74#endif
75
76/* Number of bytes of args popped by function being compiled on its return.
77 Zero if no bytes are to be popped.
78 May affect compilation of return insn or of function epilogue. */
79
80int current_function_pops_args;
81
82/* Nonzero if function being compiled needs to be given an address
83 where the value should be stored. */
84
85int current_function_returns_struct;
86
87/* Nonzero if function being compiled needs to
88 return the address of where it has put a structure value. */
89
90int current_function_returns_pcc_struct;
91
92/* Nonzero if function being compiled needs to be passed a static chain. */
93
94int current_function_needs_context;
95
96/* Nonzero if function being compiled can call setjmp. */
97
98int current_function_calls_setjmp;
99
100/* Nonzero if function being compiled can call longjmp. */
101
102int current_function_calls_longjmp;
103
104/* Nonzero if function being compiled receives nonlocal gotos
105 from nested functions. */
106
107int current_function_has_nonlocal_label;
108
109/* Nonzero if function being compiled contains nested functions. */
110
111int current_function_contains_functions;
112
113/* Nonzero if function being compiled can call alloca,
114 either as a subroutine or builtin. */
115
116int current_function_calls_alloca;
117
118/* Nonzero if the current function returns a pointer type */
119
120int current_function_returns_pointer;
121
122/* If some insns can be deferred to the delay slots of the epilogue, the
123 delay list for them is recorded here. */
124
125rtx current_function_epilogue_delay_list;
126
127/* If function's args have a fixed size, this is that size, in bytes.
128 Otherwise, it is -1.
129 May affect compilation of return insn or of function epilogue. */
130
131int current_function_args_size;
132
133/* # bytes the prologue should push and pretend that the caller pushed them.
134 The prologue must do this, but only if parms can be passed in registers. */
135
136int current_function_pretend_args_size;
137
138/* # of bytes of outgoing arguments required to be pushed by the prologue.
139 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
140 and no stack adjusts will be done on function calls. */
141
142int current_function_outgoing_args_size;
143
144/* This is the offset from the arg pointer to the place where the first
145 anonymous arg can be found, if there is one. */
146
147rtx current_function_arg_offset_rtx;
148
149/* Nonzero if current function uses varargs.h or equivalent.
150 Zero for functions that use stdarg.h. */
151
152int current_function_varargs;
153
154/* Quantities of various kinds of registers
155 used for the current function's args. */
156
157CUMULATIVE_ARGS current_function_args_info;
158
159/* Name of function now being compiled. */
160
161char *current_function_name;
162
163/* If non-zero, an RTL expression for that location at which the current
164 function returns its result. Always equal to
165 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
166 independently of the tree structures. */
167
168rtx current_function_return_rtx;
169
170/* Nonzero if the current function uses the constant pool. */
171
172int current_function_uses_const_pool;
173
174/* Nonzero if the current function uses pic_offset_table_rtx. */
175int current_function_uses_pic_offset_table;
176
177/* The arg pointer hard register, or the pseudo into which it was copied. */
178rtx current_function_internal_arg_pointer;
179
180/* The FUNCTION_DECL for an inline function currently being expanded. */
181tree inline_function_decl;
182
183/* Number of function calls seen so far in current function. */
184
185int function_call_count;
186
187/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
188 (labels to which there can be nonlocal gotos from nested functions)
189 in this function. */
190
191tree nonlocal_labels;
192
193/* RTX for stack slot that holds the current handler for nonlocal gotos.
194 Zero when function does not have nonlocal labels. */
195
196rtx nonlocal_goto_handler_slot;
197
198/* RTX for stack slot that holds the stack pointer value to restore
199 for a nonlocal goto.
200 Zero when function does not have nonlocal labels. */
201
202rtx nonlocal_goto_stack_level;
203
204/* Label that will go on parm cleanup code, if any.
205 Jumping to this label runs cleanup code for parameters, if
206 such code must be run. Following this code is the logical return label. */
207
208rtx cleanup_label;
209
210/* Label that will go on function epilogue.
211 Jumping to this label serves as a "return" instruction
212 on machines which require execution of the epilogue on all returns. */
213
214rtx return_label;
215
216/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
217 So we can mark them all live at the end of the function, if nonopt. */
218rtx save_expr_regs;
219
220/* List (chain of EXPR_LISTs) of all stack slots in this function.
221 Made for the sake of unshare_all_rtl. */
222rtx stack_slot_list;
223
224/* Chain of all RTL_EXPRs that have insns in them. */
225tree rtl_expr_chain;
226
227/* Label to jump back to for tail recursion, or 0 if we have
228 not yet needed one for this function. */
229rtx tail_recursion_label;
230
231/* Place after which to insert the tail_recursion_label if we need one. */
232rtx tail_recursion_reentry;
233
234/* Location at which to save the argument pointer if it will need to be
235 referenced. There are two cases where this is done: if nonlocal gotos
236 exist, or if vars stored at an offset from the argument pointer will be
237 needed by inner routines. */
238
239rtx arg_pointer_save_area;
240
241/* Offset to end of allocated area of stack frame.
242 If stack grows down, this is the address of the last stack slot allocated.
243 If stack grows up, this is the address for the next slot. */
244int frame_offset;
245
246/* List (chain of TREE_LISTs) of static chains for containing functions.
247 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
248 in an RTL_EXPR in the TREE_VALUE. */
249static tree context_display;
250
251/* List (chain of TREE_LISTs) of trampolines for nested functions.
252 The trampoline sets up the static chain and jumps to the function.
253 We supply the trampoline's address when the function's address is requested.
254
255 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
256 in an RTL_EXPR in the TREE_VALUE. */
257static tree trampoline_list;
258
259/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
260static rtx parm_birth_insn;
261
262#if 0
263/* Nonzero if a stack slot has been generated whose address is not
264 actually valid. It means that the generated rtl must all be scanned
265 to detect and correct the invalid addresses where they occur. */
266static int invalid_stack_slot;
267#endif
268
269/* Last insn of those whose job was to put parms into their nominal homes. */
270static rtx last_parm_insn;
271
272/* 1 + last pseudo register number used for loading a copy
273 of a parameter of this function. */
274static int max_parm_reg;
275
276/* Vector indexed by REGNO, containing location on stack in which
277 to put the parm which is nominally in pseudo register REGNO,
278 if we discover that that parm must go in the stack. */
279static rtx *parm_reg_stack_loc;
280
281#if 0 /* Turned off because 0 seems to work just as well. */
282/* Cleanup lists are required for binding levels regardless of whether
283 that binding level has cleanups or not. This node serves as the
284 cleanup list whenever an empty list is required. */
285static tree empty_cleanup_list;
286#endif
287
288/* Nonzero once virtual register instantiation has been done.
289 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
290static int virtuals_instantiated;
291
292/* Nonzero if we need to distinguish between the return value of this function
293 and the return value of a function called by this function. This helps
294 integrate.c */
295
296extern int rtx_equal_function_value_matters;
297
298void fixup_gotos ();
299
300static tree round_down ();
301static rtx round_trampoline_addr ();
302static rtx fixup_stack_1 ();
303static void fixup_var_refs ();
304static void fixup_var_refs_insns ();
305static void fixup_var_refs_1 ();
306static void optimize_bit_field ();
307static void instantiate_decls ();
308static void instantiate_decls_1 ();
309static int instantiate_virtual_regs_1 ();
310static rtx fixup_memory_subreg ();
311static rtx walk_fixup_memory_subreg ();
312\f
313/* In order to evaluate some expressions, such as function calls returning
314 structures in memory, we need to temporarily allocate stack locations.
315 We record each allocated temporary in the following structure.
316
317 Associated with each temporary slot is a nesting level. When we pop up
318 one level, all temporaries associated with the previous level are freed.
319 Normally, all temporaries are freed after the execution of the statement
320 in which they were created. However, if we are inside a ({...}) grouping,
321 the result may be in a temporary and hence must be preserved. If the
322 result could be in a temporary, we preserve it if we can determine which
323 one it is in. If we cannot determine which temporary may contain the
324 result, all temporaries are preserved. A temporary is preserved by
325 pretending it was allocated at the previous nesting level.
326
327 Automatic variables are also assigned temporary slots, at the nesting
328 level where they are defined. They are marked a "kept" so that
329 free_temp_slots will not free them. */
330
331struct temp_slot
332{
333 /* Points to next temporary slot. */
334 struct temp_slot *next;
335 /* The rtx to used to reference the slot. */
336 rtx slot;
337 /* The size, in units, of the slot. */
338 int size;
339 /* Non-zero if this temporary is currently in use. */
340 char in_use;
341 /* Nesting level at which this slot is being used. */
342 int level;
343 /* Non-zero if this should survive a call to free_temp_slots. */
344 int keep;
345};
346
347/* List of all temporaries allocated, both available and in use. */
348
349struct temp_slot *temp_slots;
350
351/* Current nesting level for temporaries. */
352
353int temp_slot_level;
354\f
355/* Pointer to chain of `struct function' for containing functions. */
356struct function *outer_function_chain;
357
358/* Given a function decl for a containing function,
359 return the `struct function' for it. */
360
361struct function *
362find_function_data (decl)
363 tree decl;
364{
365 struct function *p;
366 for (p = outer_function_chain; p; p = p->next)
367 if (p->decl == decl)
368 return p;
369 abort ();
370}
371
372/* Save the current context for compilation of a nested function.
373 This is called from language-specific code.
374 The caller is responsible for saving any language-specific status,
6dc42e49 375 since this function knows only about language-independent variables. */
6f086dfc
RS
376
377void
378push_function_context ()
379{
380 struct function *p = (struct function *) xmalloc (sizeof (struct function));
381
382 p->next = outer_function_chain;
383 outer_function_chain = p;
384
385 p->name = current_function_name;
386 p->decl = current_function_decl;
387 p->pops_args = current_function_pops_args;
388 p->returns_struct = current_function_returns_struct;
389 p->returns_pcc_struct = current_function_returns_pcc_struct;
390 p->needs_context = current_function_needs_context;
391 p->calls_setjmp = current_function_calls_setjmp;
392 p->calls_longjmp = current_function_calls_longjmp;
393 p->calls_alloca = current_function_calls_alloca;
394 p->has_nonlocal_label = current_function_has_nonlocal_label;
395 p->args_size = current_function_args_size;
396 p->pretend_args_size = current_function_pretend_args_size;
397 p->arg_offset_rtx = current_function_arg_offset_rtx;
398 p->uses_const_pool = current_function_uses_const_pool;
399 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
400 p->internal_arg_pointer = current_function_internal_arg_pointer;
401 p->max_parm_reg = max_parm_reg;
402 p->parm_reg_stack_loc = parm_reg_stack_loc;
403 p->outgoing_args_size = current_function_outgoing_args_size;
404 p->return_rtx = current_function_return_rtx;
405 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
406 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
407 p->nonlocal_labels = nonlocal_labels;
408 p->cleanup_label = cleanup_label;
409 p->return_label = return_label;
410 p->save_expr_regs = save_expr_regs;
411 p->stack_slot_list = stack_slot_list;
412 p->parm_birth_insn = parm_birth_insn;
413 p->frame_offset = frame_offset;
414 p->tail_recursion_label = tail_recursion_label;
415 p->tail_recursion_reentry = tail_recursion_reentry;
416 p->arg_pointer_save_area = arg_pointer_save_area;
417 p->rtl_expr_chain = rtl_expr_chain;
418 p->last_parm_insn = last_parm_insn;
419 p->context_display = context_display;
420 p->trampoline_list = trampoline_list;
421 p->function_call_count = function_call_count;
422 p->temp_slots = temp_slots;
423 p->temp_slot_level = temp_slot_level;
424 p->fixup_var_refs_queue = 0;
425
426 save_tree_status (p);
427 save_storage_status (p);
428 save_emit_status (p);
429 init_emit ();
430 save_expr_status (p);
431 save_stmt_status (p);
432}
433
434/* Restore the last saved context, at the end of a nested function.
435 This function is called from language-specific code. */
436
437void
438pop_function_context ()
439{
440 struct function *p = outer_function_chain;
441
442 outer_function_chain = p->next;
443
444 current_function_name = p->name;
445 current_function_decl = p->decl;
446 current_function_pops_args = p->pops_args;
447 current_function_returns_struct = p->returns_struct;
448 current_function_returns_pcc_struct = p->returns_pcc_struct;
449 current_function_needs_context = p->needs_context;
450 current_function_calls_setjmp = p->calls_setjmp;
451 current_function_calls_longjmp = p->calls_longjmp;
452 current_function_calls_alloca = p->calls_alloca;
453 current_function_has_nonlocal_label = p->has_nonlocal_label;
454 current_function_contains_functions = 1;
455 current_function_args_size = p->args_size;
456 current_function_pretend_args_size = p->pretend_args_size;
457 current_function_arg_offset_rtx = p->arg_offset_rtx;
458 current_function_uses_const_pool = p->uses_const_pool;
459 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
460 current_function_internal_arg_pointer = p->internal_arg_pointer;
461 max_parm_reg = p->max_parm_reg;
462 parm_reg_stack_loc = p->parm_reg_stack_loc;
463 current_function_outgoing_args_size = p->outgoing_args_size;
464 current_function_return_rtx = p->return_rtx;
465 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
466 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
467 nonlocal_labels = p->nonlocal_labels;
468 cleanup_label = p->cleanup_label;
469 return_label = p->return_label;
470 save_expr_regs = p->save_expr_regs;
471 stack_slot_list = p->stack_slot_list;
472 parm_birth_insn = p->parm_birth_insn;
473 frame_offset = p->frame_offset;
474 tail_recursion_label = p->tail_recursion_label;
475 tail_recursion_reentry = p->tail_recursion_reentry;
476 arg_pointer_save_area = p->arg_pointer_save_area;
477 rtl_expr_chain = p->rtl_expr_chain;
478 last_parm_insn = p->last_parm_insn;
479 context_display = p->context_display;
480 trampoline_list = p->trampoline_list;
481 function_call_count = p->function_call_count;
482 temp_slots = p->temp_slots;
483 temp_slot_level = p->temp_slot_level;
484
485 restore_tree_status (p);
486 restore_storage_status (p);
487 restore_expr_status (p);
488 restore_emit_status (p);
489 restore_stmt_status (p);
490
491 /* Finish doing put_var_into_stack for any of our variables
492 which became addressable during the nested function. */
493 {
494 struct var_refs_queue *queue = p->fixup_var_refs_queue;
495 for (; queue; queue = queue->next)
496 fixup_var_refs (queue->modified);
497 }
498
499 free (p);
500
501 /* Reset variables that have known state during rtx generation. */
502 rtx_equal_function_value_matters = 1;
503 virtuals_instantiated = 0;
504}
505\f
506/* Allocate fixed slots in the stack frame of the current function. */
507
508/* Return size needed for stack frame based on slots so far allocated.
509 This size counts from zero. It is not rounded to STACK_BOUNDARY;
510 the caller may have to do that. */
511
512int
513get_frame_size ()
514{
515#ifdef FRAME_GROWS_DOWNWARD
516 return -frame_offset;
517#else
518 return frame_offset;
519#endif
520}
521
522/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
523 with machine mode MODE.
524
525 ALIGN controls the amount of alignment for the address of the slot:
526 0 means according to MODE,
527 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
528 positive specifies alignment boundary in bits.
529
530 We do not round to stack_boundary here. */
531
532rtx
533assign_stack_local (mode, size, align)
534 enum machine_mode mode;
535 int size;
536 int align;
537{
538 register rtx x, addr;
539 int bigend_correction = 0;
540 int alignment;
541
542 if (align == 0)
543 {
544 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
545 if (mode == BLKmode)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 }
548 else if (align == -1)
549 {
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 size = CEIL_ROUND (size, alignment);
552 }
553 else
554 alignment = align / BITS_PER_UNIT;
555
6f086dfc
RS
556 /* Round frame offset to that alignment.
557 We must be careful here, since FRAME_OFFSET might be negative and
558 division with a negative dividend isn't as well defined as we might
559 like. So we instead assume that ALIGNMENT is a power of two and
560 use logical operations which are unambiguous. */
561#ifdef FRAME_GROWS_DOWNWARD
562 frame_offset = FLOOR_ROUND (frame_offset, alignment);
563#else
564 frame_offset = CEIL_ROUND (frame_offset, alignment);
565#endif
566
567 /* On a big-endian machine, if we are allocating more space than we will use,
568 use the least significant bytes of those that are allocated. */
569#if BYTES_BIG_ENDIAN
570 if (mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
572#endif
573
574#ifdef FRAME_GROWS_DOWNWARD
575 frame_offset -= size;
576#endif
577
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (virtuals_instantiated)
581 addr = plus_constant (frame_pointer_rtx,
582 (frame_offset + bigend_correction
583 + STARTING_FRAME_OFFSET));
584 else
585 addr = plus_constant (virtual_stack_vars_rtx,
586 frame_offset + bigend_correction);
587
588#ifndef FRAME_GROWS_DOWNWARD
589 frame_offset += size;
590#endif
591
592 x = gen_rtx (MEM, mode, addr);
593
594 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
595
596 return x;
597}
598
599/* Assign a stack slot in a containing function.
600 First three arguments are same as in preceding function.
601 The last argument specifies the function to allocate in. */
602
603rtx
604assign_outer_stack_local (mode, size, align, function)
605 enum machine_mode mode;
606 int size;
607 int align;
608 struct function *function;
609{
610 register rtx x, addr;
611 int bigend_correction = 0;
612 int alignment;
613
614 /* Allocate in the memory associated with the function in whose frame
615 we are assigning. */
616 push_obstacks (function->function_obstack,
617 function->function_maybepermanent_obstack);
618
619 if (align == 0)
620 {
621 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
622 if (mode == BLKmode)
623 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
624 }
625 else if (align == -1)
626 {
627 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
628 size = CEIL_ROUND (size, alignment);
629 }
630 else
631 alignment = align / BITS_PER_UNIT;
632
6f086dfc
RS
633 /* Round frame offset to that alignment. */
634#ifdef FRAME_GROWS_DOWNWARD
635 frame_offset = FLOOR_ROUND (frame_offset, alignment);
636#else
637 frame_offset = CEIL_ROUND (frame_offset, alignment);
638#endif
639
640 /* On a big-endian machine, if we are allocating more space than we will use,
641 use the least significant bytes of those that are allocated. */
642#if BYTES_BIG_ENDIAN
643 if (mode != BLKmode)
644 bigend_correction = size - GET_MODE_SIZE (mode);
645#endif
646
647#ifdef FRAME_GROWS_DOWNWARD
648 function->frame_offset -= size;
649#endif
650 addr = plus_constant (virtual_stack_vars_rtx,
651 function->frame_offset + bigend_correction);
652#ifndef FRAME_GROWS_DOWNWARD
653 function->frame_offset += size;
654#endif
655
656 x = gen_rtx (MEM, mode, addr);
657
658 function->stack_slot_list
659 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
660
661 pop_obstacks ();
662
663 return x;
664}
665\f
666/* Allocate a temporary stack slot and record it for possible later
667 reuse.
668
669 MODE is the machine mode to be given to the returned rtx.
670
671 SIZE is the size in units of the space required. We do no rounding here
672 since assign_stack_local will do any required rounding.
673
674 KEEP is non-zero if this slot is to be retained after a call to
675 free_temp_slots. Automatic variables for a block are allocated with this
676 flag. */
677
678rtx
679assign_stack_temp (mode, size, keep)
680 enum machine_mode mode;
681 int size;
682 int keep;
683{
684 struct temp_slot *p, *best_p = 0;
685
686 /* First try to find an available, already-allocated temporary that is the
687 exact size we require. */
688 for (p = temp_slots; p; p = p->next)
689 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
690 break;
691
692 /* If we didn't find, one, try one that is larger than what we want. We
693 find the smallest such. */
694 if (p == 0)
695 for (p = temp_slots; p; p = p->next)
696 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
697 && (best_p == 0 || best_p->size > p->size))
698 best_p = p;
699
700 /* Make our best, if any, the one to use. */
701 if (best_p)
702 p = best_p;
703
704 /* If we still didn't find one, make a new temporary. */
705 if (p == 0)
706 {
707 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
708 p->size = size;
709 /* If the temp slot mode doesn't indicate the alignment,
710 use the largest possible, so no one will be disappointed. */
711 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
712 p->next = temp_slots;
713 temp_slots = p;
714 }
715
716 p->in_use = 1;
717 p->level = temp_slot_level;
718 p->keep = keep;
719 return p->slot;
720}
721\f
722/* If X could be a reference to a temporary slot, mark that slot as belonging
723 to the to one level higher. If X matched one of our slots, just mark that
724 one. Otherwise, we can't easily predict which it is, so upgrade all of
725 them. Kept slots need not be touched.
726
727 This is called when an ({...}) construct occurs and a statement
728 returns a value in memory. */
729
730void
731preserve_temp_slots (x)
732 rtx x;
733{
734 struct temp_slot *p;
735
736 /* If X is not in memory or is at a constant address, it cannot be in
737 a temporary slot. */
738 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
739 return;
740
741 /* First see if we can find a match. */
742 for (p = temp_slots; p; p = p->next)
743 if (p->in_use && x == p->slot)
744 {
745 p->level--;
746 return;
747 }
748
749 /* Otherwise, preserve all non-kept slots at this level. */
750 for (p = temp_slots; p; p = p->next)
751 if (p->in_use && p->level == temp_slot_level && ! p->keep)
752 p->level--;
753}
754
755/* Free all temporaries used so far. This is normally called at the end
756 of generating code for a statement. */
757
758void
759free_temp_slots ()
760{
761 struct temp_slot *p;
762
763 for (p = temp_slots; p; p = p->next)
764 if (p->in_use && p->level == temp_slot_level && ! p->keep)
765 p->in_use = 0;
766}
767
768/* Push deeper into the nesting level for stack temporaries. */
769
770void
771push_temp_slots ()
772{
773 /* For GNU C++, we must allow a sequence to be emitted anywhere in
774 the level where the sequence was started. By not changing levels
775 when the compiler is inside a sequence, the temporaries for the
776 sequence and the temporaries will not unwittingly conflict with
777 the temporaries for other sequences and/or code at that level. */
778 if (in_sequence_p ())
779 return;
780
781 temp_slot_level++;
782}
783
784/* Pop a temporary nesting level. All slots in use in the current level
785 are freed. */
786
787void
788pop_temp_slots ()
789{
790 struct temp_slot *p;
791
792 /* See comment in push_temp_slots about why we don't change levels
793 in sequences. */
794 if (in_sequence_p ())
795 return;
796
797 for (p = temp_slots; p; p = p->next)
798 if (p->in_use && p->level == temp_slot_level)
799 p->in_use = 0;
800
801 temp_slot_level--;
802}
803\f
804/* Retroactively move an auto variable from a register to a stack slot.
805 This is done when an address-reference to the variable is seen. */
806
807void
808put_var_into_stack (decl)
809 tree decl;
810{
811 register rtx reg;
812 register rtx new = 0;
813 struct function *function = 0;
814 tree context = decl_function_context (decl);
815
816 /* Get the current rtl used for this object. */
817 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
818
819 /* If this variable comes from an outer function,
820 find that function's saved context. */
821 if (context != current_function_decl)
822 for (function = outer_function_chain; function; function = function->next)
823 if (function->decl == context)
824 break;
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* If this is a variable-size object with a pseudo to address it,
833 put that pseudo into the stack, if the var is nonlocal. */
834 if (TREE_NONLOCAL (decl)
835 && GET_CODE (reg) == MEM
836 && GET_CODE (XEXP (reg, 0)) == REG
837 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
838 reg = XEXP (reg, 0);
839 if (GET_CODE (reg) != REG)
840 return;
841
842 if (function)
843 {
844 if (REGNO (reg) < function->max_parm_reg)
845 new = function->parm_reg_stack_loc[REGNO (reg)];
846 if (new == 0)
847 new = assign_outer_stack_local (GET_MODE (reg),
848 GET_MODE_SIZE (GET_MODE (reg)),
849 0, function);
850 }
851 else
852 {
853 if (REGNO (reg) < max_parm_reg)
854 new = parm_reg_stack_loc[REGNO (reg)];
855 if (new == 0)
856 new = assign_stack_local (GET_MODE (reg),
857 GET_MODE_SIZE (GET_MODE (reg)),
858 0);
859 }
860
861 XEXP (reg, 0) = XEXP (new, 0);
862 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
863 REG_USERVAR_P (reg) = 0;
864 PUT_CODE (reg, MEM);
865
866 /* If this is a memory ref that contains aggregate components,
867 mark it as such for cse and loop optimize. */
868 MEM_IN_STRUCT_P (reg)
869 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
870 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
872
873 /* Now make sure that all refs to the variable, previously made
874 when it was a register, are fixed up to be valid again. */
875 if (function)
876 {
877 struct var_refs_queue *temp;
878
879 /* Variable is inherited; fix it up when we get back to its function. */
880 push_obstacks (function->function_obstack,
881 function->function_maybepermanent_obstack);
882 temp
883 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
884 temp->modified = reg;
885 temp->next = function->fixup_var_refs_queue;
886 function->fixup_var_refs_queue = temp;
887 pop_obstacks ();
888 }
889 else
890 /* Variable is local; fix it up now. */
891 fixup_var_refs (reg);
892}
893\f
894static void
895fixup_var_refs (var)
896 rtx var;
897{
898 tree pending;
899 rtx first_insn = get_insns ();
900 struct sequence_stack *stack = sequence_stack;
901 tree rtl_exps = rtl_expr_chain;
902
903 /* Must scan all insns for stack-refs that exceed the limit. */
904 fixup_var_refs_insns (var, first_insn, stack == 0);
905
906 /* Scan all pending sequences too. */
907 for (; stack; stack = stack->next)
908 {
909 push_to_sequence (stack->first);
910 fixup_var_refs_insns (var, stack->first, stack->next != 0);
911 /* Update remembered end of sequence
912 in case we added an insn at the end. */
913 stack->last = get_last_insn ();
914 end_sequence ();
915 }
916
917 /* Scan all waiting RTL_EXPRs too. */
918 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
919 {
920 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
921 if (seq != const0_rtx && seq != 0)
922 {
923 push_to_sequence (seq);
924 fixup_var_refs_insns (var, seq, 0);
925 end_sequence ();
926 }
927 }
928}
929\f
930/* This structure is used by the following two functions to record MEMs or
931 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
932 VAR as an address. We need to maintain this list in case two operands of
933 an insn were required to match; in that case we must ensure we use the
934 same replacement. */
935
936struct fixup_replacement
937{
938 rtx old;
939 rtx new;
940 struct fixup_replacement *next;
941};
942
943/* REPLACEMENTS is a pointer to a list of the above structures and X is
944 some part of an insn. Return a struct fixup_replacement whose OLD
945 value is equal to X. Allocate a new structure if no such entry exists. */
946
947static struct fixup_replacement *
948find_replacement (replacements, x)
949 struct fixup_replacement **replacements;
950 rtx x;
951{
952 struct fixup_replacement *p;
953
954 /* See if we have already replaced this. */
955 for (p = *replacements; p && p->old != x; p = p->next)
956 ;
957
958 if (p == 0)
959 {
960 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
961 p->old = x;
962 p->new = 0;
963 p->next = *replacements;
964 *replacements = p;
965 }
966
967 return p;
968}
969
970/* Scan the insn-chain starting with INSN for refs to VAR
971 and fix them up. TOPLEVEL is nonzero if this chain is the
972 main chain of insns for the current function. */
973
974static void
975fixup_var_refs_insns (var, insn, toplevel)
976 rtx var;
977 rtx insn;
978 int toplevel;
979{
980 while (insn)
981 {
982 rtx next = NEXT_INSN (insn);
983 rtx note;
984 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
985 || GET_CODE (insn) == JUMP_INSN)
986 {
987 /* The insn to load VAR from a home in the arglist
988 is now a no-op. When we see it, just delete it. */
989 if (toplevel
990 && GET_CODE (PATTERN (insn)) == SET
991 && SET_DEST (PATTERN (insn)) == var
992 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
993 {
994 next = delete_insn (insn);
995 if (insn == last_parm_insn)
996 last_parm_insn = PREV_INSN (next);
997 }
998 else
999 {
1000 /* See if we have to do anything to INSN now that VAR is in
1001 memory. If it needs to be loaded into a pseudo, use a single
1002 pseudo for the entire insn in case there is a MATCH_DUP
1003 between two operands. We pass a pointer to the head of
1004 a list of struct fixup_replacements. If fixup_var_refs_1
1005 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1006 it will record them in this list.
1007
1008 If it allocated a pseudo for any replacement, we copy into
1009 it here. */
1010
1011 struct fixup_replacement *replacements = 0;
1012
1013 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1014
1015 while (replacements)
1016 {
1017 if (GET_CODE (replacements->new) == REG)
1018 {
1019 rtx insert_before;
1020
1021 /* OLD might be a (subreg (mem)). */
1022 if (GET_CODE (replacements->old) == SUBREG)
1023 replacements->old
1024 = fixup_memory_subreg (replacements->old, insn, 0);
1025 else
1026 replacements->old
1027 = fixup_stack_1 (replacements->old, insn);
1028
1029 /* We can not separate USE insns from the CALL_INSN
1030 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1031 the move insn before the USE insns preceding it
6f086dfc
RS
1032 instead of immediately before the insn. */
1033 if (GET_CODE (insn) == CALL_INSN)
1034 {
1035 insert_before = insn;
1036 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1037 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1038 insert_before = PREV_INSN (insert_before);
1039 }
1040 else
1041 insert_before = insn;
1042
1043 emit_insn_before (gen_move_insn (replacements->new,
1044 replacements->old),
1045 insert_before);
1046 }
1047
1048 replacements = replacements->next;
1049 }
1050 }
1051
1052 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1053 But don't touch other insns referred to by reg-notes;
1054 we will get them elsewhere. */
1055 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1056 if (GET_CODE (note) != INSN_LIST)
1057 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1058 }
1059 insn = next;
1060 }
1061}
1062\f
1063/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1064 at *LOC in INSN needs to be changed.
1065
1066 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1067 contain a list of original rtx's and replacements. If we find that we need
1068 to modify this insn by replacing a memory reference with a pseudo or by
1069 making a new MEM to implement a SUBREG, we consult that list to see if
1070 we have already chosen a replacement. If none has already been allocated,
1071 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1072 or the SUBREG, as appropriate, to the pseudo. */
1073
1074static void
1075fixup_var_refs_1 (var, loc, insn, replacements)
1076 register rtx var;
1077 register rtx *loc;
1078 rtx insn;
1079 struct fixup_replacement **replacements;
1080{
1081 register int i;
1082 register rtx x = *loc;
1083 RTX_CODE code = GET_CODE (x);
1084 register char *fmt;
1085 register rtx tem, tem1;
1086 struct fixup_replacement *replacement;
1087
1088 switch (code)
1089 {
1090 case MEM:
1091 if (var == x)
1092 {
1093 /* If we already have a replacement, use it. Otherwise,
1094 try to fix up this address in case it is invalid. */
1095
1096 replacement = find_replacement (replacements, var);
1097 if (replacement->new)
1098 {
1099 *loc = replacement->new;
1100 return;
1101 }
1102
1103 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1104
1105 /* Unless we are forcing memory to register, we can leave things
1106 the way they are if the insn is valid. */
1107
1108 INSN_CODE (insn) = -1;
1109 if (! flag_force_mem && recog_memoized (insn) >= 0)
1110 return;
1111
1112 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1113 return;
1114 }
1115
1116 /* If X contains VAR, we need to unshare it here so that we update
1117 each occurrence separately. But all identical MEMs in one insn
1118 must be replaced with the same rtx because of the possibility of
1119 MATCH_DUPs. */
1120
1121 if (reg_mentioned_p (var, x))
1122 {
1123 replacement = find_replacement (replacements, x);
1124 if (replacement->new == 0)
1125 replacement->new = copy_most_rtx (x, var);
1126
1127 *loc = x = replacement->new;
1128 }
1129 break;
1130
1131 case REG:
1132 case CC0:
1133 case PC:
1134 case CONST_INT:
1135 case CONST:
1136 case SYMBOL_REF:
1137 case LABEL_REF:
1138 case CONST_DOUBLE:
1139 return;
1140
1141 case SIGN_EXTRACT:
1142 case ZERO_EXTRACT:
1143 /* Note that in some cases those types of expressions are altered
1144 by optimize_bit_field, and do not survive to get here. */
1145 if (XEXP (x, 0) == var
1146 || (GET_CODE (XEXP (x, 0)) == SUBREG
1147 && SUBREG_REG (XEXP (x, 0)) == var))
1148 {
1149 /* Get TEM as a valid MEM in the mode presently in the insn.
1150
1151 We don't worry about the possibility of MATCH_DUP here; it
1152 is highly unlikely and would be tricky to handle. */
1153
1154 tem = XEXP (x, 0);
1155 if (GET_CODE (tem) == SUBREG)
1156 tem = fixup_memory_subreg (tem, insn, 1);
1157 tem = fixup_stack_1 (tem, insn);
1158
1159 /* Unless we want to load from memory, get TEM into the proper mode
1160 for an extract from memory. This can only be done if the
1161 extract is at a constant position and length. */
1162
1163 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && GET_CODE (XEXP (x, 2)) == CONST_INT
1165 && ! mode_dependent_address_p (XEXP (tem, 0))
1166 && ! MEM_VOLATILE_P (tem))
1167 {
1168 enum machine_mode wanted_mode = VOIDmode;
1169 enum machine_mode is_mode = GET_MODE (tem);
1170 int width = INTVAL (XEXP (x, 1));
1171 int pos = INTVAL (XEXP (x, 2));
1172
1173#ifdef HAVE_extzv
1174 if (GET_CODE (x) == ZERO_EXTRACT)
1175 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1176#endif
1177#ifdef HAVE_extv
1178 if (GET_CODE (x) == SIGN_EXTRACT)
1179 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1180#endif
6dc42e49 1181 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1182 if (wanted_mode != VOIDmode
1183 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1184 {
1185 int offset = pos / BITS_PER_UNIT;
1186 rtx old_pos = XEXP (x, 2);
1187 rtx newmem;
1188
1189 /* If the bytes and bits are counted differently, we
1190 must adjust the offset. */
1191#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1192 offset = (GET_MODE_SIZE (is_mode)
1193 - GET_MODE_SIZE (wanted_mode) - offset);
1194#endif
1195
1196 pos %= GET_MODE_BITSIZE (wanted_mode);
1197
1198 newmem = gen_rtx (MEM, wanted_mode,
1199 plus_constant (XEXP (tem, 0), offset));
1200 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1201 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1202 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1203
1204 /* Make the change and see if the insn remains valid. */
1205 INSN_CODE (insn) = -1;
1206 XEXP (x, 0) = newmem;
1207 XEXP (x, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1208
1209 if (recog_memoized (insn) >= 0)
1210 return;
1211
1212 /* Otherwise, restore old position. XEXP (x, 0) will be
1213 restored later. */
1214 XEXP (x, 2) = old_pos;
1215 }
1216 }
1217
1218 /* If we get here, the bitfield extract insn can't accept a memory
1219 reference. Copy the input into a register. */
1220
1221 tem1 = gen_reg_rtx (GET_MODE (tem));
1222 emit_insn_before (gen_move_insn (tem1, tem), insn);
1223 XEXP (x, 0) = tem1;
1224 return;
1225 }
1226 break;
1227
1228 case SUBREG:
1229 if (SUBREG_REG (x) == var)
1230 {
1231 /* If this SUBREG makes VAR wider, it has become a paradoxical
1232 SUBREG with VAR in memory, but these aren't allowed at this
1233 stage of the compilation. So load VAR into a pseudo and take
1234 a SUBREG of that pseudo. */
1235 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1236 {
1237 replacement = find_replacement (replacements, var);
1238 if (replacement->new == 0)
1239 replacement->new = gen_reg_rtx (GET_MODE (var));
1240 SUBREG_REG (x) = replacement->new;
1241 return;
1242 }
1243
1244 /* See if we have already found a replacement for this SUBREG.
1245 If so, use it. Otherwise, make a MEM and see if the insn
1246 is recognized. If not, or if we should force MEM into a register,
1247 make a pseudo for this SUBREG. */
1248 replacement = find_replacement (replacements, x);
1249 if (replacement->new)
1250 {
1251 *loc = replacement->new;
1252 return;
1253 }
1254
1255 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1256
1257 if (! flag_force_mem && recog_memoized (insn) >= 0)
1258 return;
1259
1260 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1261 return;
1262 }
1263 break;
1264
1265 case SET:
1266 /* First do special simplification of bit-field references. */
1267 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1268 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1269 optimize_bit_field (x, insn, 0);
1270 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1271 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1272 optimize_bit_field (x, insn, 0);
1273
1274 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1275 insn into a pseudo and store the low part of the pseudo into VAR. */
1276 if (GET_CODE (SET_DEST (x)) == SUBREG
1277 && SUBREG_REG (SET_DEST (x)) == var
1278 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1279 > GET_MODE_SIZE (GET_MODE (var))))
1280 {
1281 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1282 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1283 tem)),
1284 insn);
1285 break;
1286 }
1287
1288 {
1289 rtx dest = SET_DEST (x);
1290 rtx src = SET_SRC (x);
1291 rtx outerdest = dest;
1292
1293 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1294 || GET_CODE (dest) == SIGN_EXTRACT
1295 || GET_CODE (dest) == ZERO_EXTRACT)
1296 dest = XEXP (dest, 0);
1297
1298 if (GET_CODE (src) == SUBREG)
1299 src = XEXP (src, 0);
1300
1301 /* If VAR does not appear at the top level of the SET
1302 just scan the lower levels of the tree. */
1303
1304 if (src != var && dest != var)
1305 break;
1306
1307 /* We will need to rerecognize this insn. */
1308 INSN_CODE (insn) = -1;
1309
1310#ifdef HAVE_insv
1311 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1312 {
1313 /* Since this case will return, ensure we fixup all the
1314 operands here. */
1315 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1317 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1318
1319 tem = XEXP (outerdest, 0);
1320
1321 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1322 that may appear inside a ZERO_EXTRACT.
1323 This was legitimate when the MEM was a REG. */
1324 if (GET_CODE (tem) == SUBREG
1325 && SUBREG_REG (tem) == var)
1326 tem = fixup_memory_subreg (tem, insn, 1);
1327 else
1328 tem = fixup_stack_1 (tem, insn);
1329
1330 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1331 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1332 && ! mode_dependent_address_p (XEXP (tem, 0))
1333 && ! MEM_VOLATILE_P (tem))
1334 {
1335 enum machine_mode wanted_mode
1336 = insn_operand_mode[(int) CODE_FOR_insv][0];
1337 enum machine_mode is_mode = GET_MODE (tem);
1338 int width = INTVAL (XEXP (outerdest, 1));
1339 int pos = INTVAL (XEXP (outerdest, 2));
1340
6dc42e49 1341 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1342 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1343 {
1344 int offset = pos / BITS_PER_UNIT;
1345 rtx old_pos = XEXP (outerdest, 2);
1346 rtx newmem;
1347
1348#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1349 offset = (GET_MODE_SIZE (is_mode)
1350 - GET_MODE_SIZE (wanted_mode) - offset);
1351#endif
1352
1353 pos %= GET_MODE_BITSIZE (wanted_mode);
1354
1355 newmem = gen_rtx (MEM, wanted_mode,
1356 plus_constant (XEXP (tem, 0), offset));
1357 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1358 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1359 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1360
1361 /* Make the change and see if the insn remains valid. */
1362 INSN_CODE (insn) = -1;
1363 XEXP (outerdest, 0) = newmem;
1364 XEXP (outerdest, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1365
1366 if (recog_memoized (insn) >= 0)
1367 return;
1368
1369 /* Otherwise, restore old position. XEXP (x, 0) will be
1370 restored later. */
1371 XEXP (outerdest, 2) = old_pos;
1372 }
1373 }
1374
1375 /* If we get here, the bit-field store doesn't allow memory
1376 or isn't located at a constant position. Load the value into
1377 a register, do the store, and put it back into memory. */
1378
1379 tem1 = gen_reg_rtx (GET_MODE (tem));
1380 emit_insn_before (gen_move_insn (tem1, tem), insn);
1381 emit_insn_after (gen_move_insn (tem, tem1), insn);
1382 XEXP (outerdest, 0) = tem1;
1383 return;
1384 }
1385#endif
1386
1387 /* STRICT_LOW_PART is a no-op on memory references
1388 and it can cause combinations to be unrecognizable,
1389 so eliminate it. */
1390
1391 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1392 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1393
1394 /* A valid insn to copy VAR into or out of a register
1395 must be left alone, to avoid an infinite loop here.
1396 If the reference to VAR is by a subreg, fix that up,
1397 since SUBREG is not valid for a memref.
1398 Also fix up the address of the stack slot. */
1399
1400 if ((SET_SRC (x) == var
1401 || (GET_CODE (SET_SRC (x)) == SUBREG
1402 && SUBREG_REG (SET_SRC (x)) == var))
1403 && (GET_CODE (SET_DEST (x)) == REG
1404 || (GET_CODE (SET_DEST (x)) == SUBREG
1405 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1406 && recog_memoized (insn) >= 0)
1407 {
1408 replacement = find_replacement (replacements, SET_SRC (x));
1409 if (replacement->new)
1410 {
1411 SET_SRC (x) = replacement->new;
1412 return;
1413 }
1414 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1415 SET_SRC (x) = replacement->new
1416 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1417 else
1418 SET_SRC (x) = replacement->new
1419 = fixup_stack_1 (SET_SRC (x), insn);
1420 return;
1421 }
1422
1423 if ((SET_DEST (x) == var
1424 || (GET_CODE (SET_DEST (x)) == SUBREG
1425 && SUBREG_REG (SET_DEST (x)) == var))
1426 && (GET_CODE (SET_SRC (x)) == REG
1427 || (GET_CODE (SET_SRC (x)) == SUBREG
1428 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1429 && recog_memoized (insn) >= 0)
1430 {
1431 if (GET_CODE (SET_DEST (x)) == SUBREG)
1432 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1433 else
1434 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1435 return;
1436 }
1437
1438 /* Otherwise, storing into VAR must be handled specially
1439 by storing into a temporary and copying that into VAR
1440 with a new insn after this one. */
1441
1442 if (dest == var)
1443 {
1444 rtx temp;
1445 rtx fixeddest;
1446 tem = SET_DEST (x);
1447 /* STRICT_LOW_PART can be discarded, around a MEM. */
1448 if (GET_CODE (tem) == STRICT_LOW_PART)
1449 tem = XEXP (tem, 0);
1450 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1451 if (GET_CODE (tem) == SUBREG)
1452 fixeddest = fixup_memory_subreg (tem, insn, 0);
1453 else
1454 fixeddest = fixup_stack_1 (tem, insn);
1455
1456 temp = gen_reg_rtx (GET_MODE (tem));
1457 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1458 SET_DEST (x) = temp;
1459 }
1460 }
1461 }
1462
1463 /* Nothing special about this RTX; fix its operands. */
1464
1465 fmt = GET_RTX_FORMAT (code);
1466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467 {
1468 if (fmt[i] == 'e')
1469 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1470 if (fmt[i] == 'E')
1471 {
1472 register int j;
1473 for (j = 0; j < XVECLEN (x, i); j++)
1474 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1475 }
1476 }
1477}
1478\f
1479/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1480 return an rtx (MEM:m1 newaddr) which is equivalent.
1481 If any insns must be emitted to compute NEWADDR, put them before INSN.
1482
1483 UNCRITICAL nonzero means accept paradoxical subregs.
1484 This is used for subregs found inside of ZERO_EXTRACTs. */
1485
1486static rtx
1487fixup_memory_subreg (x, insn, uncritical)
1488 rtx x;
1489 rtx insn;
1490 int uncritical;
1491{
1492 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1493 rtx addr = XEXP (SUBREG_REG (x), 0);
1494 enum machine_mode mode = GET_MODE (x);
1495 rtx saved, result;
1496
1497 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1498 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1499 && ! uncritical)
1500 abort ();
1501
1502#if BYTES_BIG_ENDIAN
1503 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1504 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1505#endif
1506 addr = plus_constant (addr, offset);
1507 if (!flag_force_addr && memory_address_p (mode, addr))
1508 /* Shortcut if no insns need be emitted. */
1509 return change_address (SUBREG_REG (x), mode, addr);
1510 start_sequence ();
1511 result = change_address (SUBREG_REG (x), mode, addr);
1512 emit_insn_before (gen_sequence (), insn);
1513 end_sequence ();
1514 return result;
1515}
1516
1517/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1518 Replace subexpressions of X in place.
1519 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1520 Otherwise return X, with its contents possibly altered.
1521
1522 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1523
1524static rtx
1525walk_fixup_memory_subreg (x, insn)
1526 register rtx x;
1527 rtx insn;
1528{
1529 register enum rtx_code code;
1530 register char *fmt;
1531 register int i;
1532
1533 if (x == 0)
1534 return 0;
1535
1536 code = GET_CODE (x);
1537
1538 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1539 return fixup_memory_subreg (x, insn, 0);
1540
1541 /* Nothing special about this RTX; fix its operands. */
1542
1543 fmt = GET_RTX_FORMAT (code);
1544 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1545 {
1546 if (fmt[i] == 'e')
1547 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1548 if (fmt[i] == 'E')
1549 {
1550 register int j;
1551 for (j = 0; j < XVECLEN (x, i); j++)
1552 XVECEXP (x, i, j)
1553 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1554 }
1555 }
1556 return x;
1557}
1558\f
1559#if 0
1560/* Fix up any references to stack slots that are invalid memory addresses
1561 because they exceed the maximum range of a displacement. */
1562
1563void
1564fixup_stack_slots ()
1565{
1566 register rtx insn;
1567
1568 /* Did we generate a stack slot that is out of range
1569 or otherwise has an invalid address? */
1570 if (invalid_stack_slot)
1571 {
1572 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1573 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1574 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1575 || GET_CODE (insn) == JUMP_INSN)
1576 fixup_stack_1 (PATTERN (insn), insn);
1577 }
1578}
1579#endif
1580
1581/* For each memory ref within X, if it refers to a stack slot
1582 with an out of range displacement, put the address in a temp register
1583 (emitting new insns before INSN to load these registers)
1584 and alter the memory ref to use that register.
1585 Replace each such MEM rtx with a copy, to avoid clobberage. */
1586
1587static rtx
1588fixup_stack_1 (x, insn)
1589 rtx x;
1590 rtx insn;
1591{
1592 register int i;
1593 register RTX_CODE code = GET_CODE (x);
1594 register char *fmt;
1595
1596 if (code == MEM)
1597 {
1598 register rtx ad = XEXP (x, 0);
1599 /* If we have address of a stack slot but it's not valid
1600 (displacement is too large), compute the sum in a register. */
1601 if (GET_CODE (ad) == PLUS
1602 && GET_CODE (XEXP (ad, 0)) == REG
1603 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1604 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1605 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1606 {
1607 rtx temp, seq;
1608 if (memory_address_p (GET_MODE (x), ad))
1609 return x;
1610
1611 start_sequence ();
1612 temp = copy_to_reg (ad);
1613 seq = gen_sequence ();
1614 end_sequence ();
1615 emit_insn_before (seq, insn);
1616 return change_address (x, VOIDmode, temp);
1617 }
1618 return x;
1619 }
1620
1621 fmt = GET_RTX_FORMAT (code);
1622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1623 {
1624 if (fmt[i] == 'e')
1625 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1626 if (fmt[i] == 'E')
1627 {
1628 register int j;
1629 for (j = 0; j < XVECLEN (x, i); j++)
1630 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1631 }
1632 }
1633 return x;
1634}
1635\f
1636/* Optimization: a bit-field instruction whose field
1637 happens to be a byte or halfword in memory
1638 can be changed to a move instruction.
1639
1640 We call here when INSN is an insn to examine or store into a bit-field.
1641 BODY is the SET-rtx to be altered.
1642
1643 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1644 (Currently this is called only from function.c, and EQUIV_MEM
1645 is always 0.) */
1646
1647static void
1648optimize_bit_field (body, insn, equiv_mem)
1649 rtx body;
1650 rtx insn;
1651 rtx *equiv_mem;
1652{
1653 register rtx bitfield;
1654 int destflag;
1655 rtx seq = 0;
1656 enum machine_mode mode;
1657
1658 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1659 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1660 bitfield = SET_DEST (body), destflag = 1;
1661 else
1662 bitfield = SET_SRC (body), destflag = 0;
1663
1664 /* First check that the field being stored has constant size and position
1665 and is in fact a byte or halfword suitably aligned. */
1666
1667 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1668 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1669 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1670 != BLKmode)
1671 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1672 {
1673 register rtx memref = 0;
1674
1675 /* Now check that the containing word is memory, not a register,
1676 and that it is safe to change the machine mode. */
1677
1678 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1679 memref = XEXP (bitfield, 0);
1680 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1681 && equiv_mem != 0)
1682 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1683 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1684 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1685 memref = SUBREG_REG (XEXP (bitfield, 0));
1686 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1687 && equiv_mem != 0
1688 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1689 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1690
1691 if (memref
1692 && ! mode_dependent_address_p (XEXP (memref, 0))
1693 && ! MEM_VOLATILE_P (memref))
1694 {
1695 /* Now adjust the address, first for any subreg'ing
1696 that we are now getting rid of,
1697 and then for which byte of the word is wanted. */
1698
1699 register int offset = INTVAL (XEXP (bitfield, 2));
1700 /* Adjust OFFSET to count bits from low-address byte. */
1701#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1702 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1703 - offset - INTVAL (XEXP (bitfield, 1)));
1704#endif
1705 /* Adjust OFFSET to count bytes from low-address byte. */
1706 offset /= BITS_PER_UNIT;
1707 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1708 {
1709 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1710#if BYTES_BIG_ENDIAN
1711 offset -= (MIN (UNITS_PER_WORD,
1712 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1713 - MIN (UNITS_PER_WORD,
1714 GET_MODE_SIZE (GET_MODE (memref))));
1715#endif
1716 }
1717
1718 memref = change_address (memref, mode,
1719 plus_constant (XEXP (memref, 0), offset));
1720
1721 /* Store this memory reference where
1722 we found the bit field reference. */
1723
1724 if (destflag)
1725 {
1726 validate_change (insn, &SET_DEST (body), memref, 1);
1727 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1728 {
1729 rtx src = SET_SRC (body);
1730 while (GET_CODE (src) == SUBREG
1731 && SUBREG_WORD (src) == 0)
1732 src = SUBREG_REG (src);
1733 if (GET_MODE (src) != GET_MODE (memref))
1734 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1735 validate_change (insn, &SET_SRC (body), src, 1);
1736 }
1737 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1738 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1739 /* This shouldn't happen because anything that didn't have
1740 one of these modes should have got converted explicitly
1741 and then referenced through a subreg.
1742 This is so because the original bit-field was
1743 handled by agg_mode and so its tree structure had
1744 the same mode that memref now has. */
1745 abort ();
1746 }
1747 else
1748 {
1749 rtx dest = SET_DEST (body);
1750
1751 while (GET_CODE (dest) == SUBREG
1752 && SUBREG_WORD (dest) == 0)
1753 dest = SUBREG_REG (dest);
1754
1755 validate_change (insn, &SET_DEST (body), dest, 1);
1756
1757 if (GET_MODE (dest) == GET_MODE (memref))
1758 validate_change (insn, &SET_SRC (body), memref, 1);
1759 else
1760 {
1761 /* Convert the mem ref to the destination mode. */
1762 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1763
1764 start_sequence ();
1765 convert_move (newreg, memref,
1766 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1767 seq = get_insns ();
1768 end_sequence ();
1769
1770 validate_change (insn, &SET_SRC (body), newreg, 1);
1771 }
1772 }
1773
1774 /* See if we can convert this extraction or insertion into
1775 a simple move insn. We might not be able to do so if this
1776 was, for example, part of a PARALLEL.
1777
1778 If we succeed, write out any needed conversions. If we fail,
1779 it is hard to guess why we failed, so don't do anything
1780 special; just let the optimization be suppressed. */
1781
1782 if (apply_change_group () && seq)
1783 emit_insns_before (seq, insn);
1784 }
1785 }
1786}
1787\f
1788/* These routines are responsible for converting virtual register references
1789 to the actual hard register references once RTL generation is complete.
1790
1791 The following four variables are used for communication between the
1792 routines. They contain the offsets of the virtual registers from their
1793 respective hard registers. */
1794
1795static int in_arg_offset;
1796static int var_offset;
1797static int dynamic_offset;
1798static int out_arg_offset;
1799
1800/* In most machines, the stack pointer register is equivalent to the bottom
1801 of the stack. */
1802
1803#ifndef STACK_POINTER_OFFSET
1804#define STACK_POINTER_OFFSET 0
1805#endif
1806
1807/* If not defined, pick an appropriate default for the offset of dynamically
1808 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1809 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1810
1811#ifndef STACK_DYNAMIC_OFFSET
1812
1813#ifdef ACCUMULATE_OUTGOING_ARGS
1814/* The bottom of the stack points to the actual arguments. If
1815 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1816 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1817 stack space for register parameters is not pushed by the caller, but
1818 rather part of the fixed stack areas and hence not included in
1819 `current_function_outgoing_args_size'. Nevertheless, we must allow
1820 for it when allocating stack dynamic objects. */
1821
1822#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1823#define STACK_DYNAMIC_OFFSET(FNDECL) \
1824(current_function_outgoing_args_size \
1825 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1826
1827#else
1828#define STACK_DYNAMIC_OFFSET(FNDECL) \
1829(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1830#endif
1831
1832#else
1833#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1834#endif
1835#endif
1836
1837/* Pass through the INSNS of function FNDECL and convert virtual register
1838 references to hard register references. */
1839
1840void
1841instantiate_virtual_regs (fndecl, insns)
1842 tree fndecl;
1843 rtx insns;
1844{
1845 rtx insn;
1846
1847 /* Compute the offsets to use for this function. */
1848 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1849 var_offset = STARTING_FRAME_OFFSET;
1850 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1851 out_arg_offset = STACK_POINTER_OFFSET;
1852
1853 /* Scan all variables and parameters of this function. For each that is
1854 in memory, instantiate all virtual registers if the result is a valid
1855 address. If not, we do it later. That will handle most uses of virtual
1856 regs on many machines. */
1857 instantiate_decls (fndecl, 1);
1858
1859 /* Initialize recognition, indicating that volatile is OK. */
1860 init_recog ();
1861
1862 /* Scan through all the insns, instantiating every virtual register still
1863 present. */
1864 for (insn = insns; insn; insn = NEXT_INSN (insn))
1865 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1866 || GET_CODE (insn) == CALL_INSN)
1867 {
1868 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1869 instantiate_virtual_regs_1 (&REG_NOTES (insn), 0, 0);
1870 }
1871
1872 /* Now instantiate the remaining register equivalences for debugging info.
1873 These will not be valid addresses. */
1874 instantiate_decls (fndecl, 0);
1875
1876 /* Indicate that, from now on, assign_stack_local should use
1877 frame_pointer_rtx. */
1878 virtuals_instantiated = 1;
1879}
1880
1881/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1882 all virtual registers in their DECL_RTL's.
1883
1884 If VALID_ONLY, do this only if the resulting address is still valid.
1885 Otherwise, always do it. */
1886
1887static void
1888instantiate_decls (fndecl, valid_only)
1889 tree fndecl;
1890 int valid_only;
1891{
1892 tree decl;
1893
1894 if (TREE_INLINE (fndecl))
1895 /* When compiling an inline function, the obstack used for
1896 rtl allocation is the maybepermanent_obstack. Calling
1897 `resume_temporary_allocation' switches us back to that
1898 obstack while we process this function's parameters. */
1899 resume_temporary_allocation ();
1900
1901 /* Process all parameters of the function. */
1902 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1903 {
1904 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
1905 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
1906 valid_only ? DECL_RTL (decl) : 0, 0);
b335c2cc 1907#if 1 /* This is probably correct, but it seems to require fixes
86f8eff3 1908 elsewhere in order to work. Let's fix them in 2.1. */
6f086dfc
RS
1909 if (DECL_INCOMING_RTL (decl)
1910 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM)
1911 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
1912 valid_only ? DECL_INCOMING_RTL (decl) : 0,
1913 0);
86f8eff3 1914#endif
6f086dfc
RS
1915 }
1916
1917 /* Now process all variables defined in the function or its subblocks. */
1918 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1919
1920 if (TREE_INLINE (fndecl))
1921 {
1922 /* Save all rtl allocated for this function by raising the
1923 high-water mark on the maybepermanent_obstack. */
1924 preserve_data ();
1925 /* All further rtl allocation is now done in the current_obstack. */
1926 rtl_in_current_obstack ();
1927 }
1928}
1929
1930/* Subroutine of instantiate_decls: Process all decls in the given
1931 BLOCK node and all its subblocks. */
1932
1933static void
1934instantiate_decls_1 (let, valid_only)
1935 tree let;
1936 int valid_only;
1937{
1938 tree t;
1939
1940 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1941 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
1942 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
1943 valid_only ? DECL_RTL (t) : 0, 0);
1944
1945 /* Process all subblocks. */
1946 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1947 instantiate_decls_1 (t, valid_only);
1948}
1949\f
1950/* Given a pointer to a piece of rtx and an optional pointer to the
1951 containing object, instantiate any virtual registers present in it.
1952
1953 If EXTRA_INSNS, we always do the replacement and generate
1954 any extra insns before OBJECT. If it zero, we do nothing if replacement
1955 is not valid.
1956
1957 Return 1 if we either had nothing to do or if we were able to do the
1958 needed replacement. Return 0 otherwise; we only return zero if
1959 EXTRA_INSNS is zero.
1960
1961 We first try some simple transformations to avoid the creation of extra
1962 pseudos. */
1963
1964static int
1965instantiate_virtual_regs_1 (loc, object, extra_insns)
1966 rtx *loc;
1967 rtx object;
1968 int extra_insns;
1969{
1970 rtx x;
1971 RTX_CODE code;
1972 rtx new = 0;
1973 int offset;
1974 rtx temp;
1975 rtx seq;
1976 int i, j;
1977 char *fmt;
1978
1979 /* Re-start here to avoid recursion in common cases. */
1980 restart:
1981
1982 x = *loc;
1983 if (x == 0)
1984 return 1;
1985
1986 code = GET_CODE (x);
1987
1988 /* Check for some special cases. */
1989 switch (code)
1990 {
1991 case CONST_INT:
1992 case CONST_DOUBLE:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case CODE_LABEL:
1996 case PC:
1997 case CC0:
1998 case ASM_INPUT:
1999 case ADDR_VEC:
2000 case ADDR_DIFF_VEC:
2001 case RETURN:
2002 return 1;
2003
2004 case SET:
2005 /* We are allowed to set the virtual registers. This means that
2006 that the actual register should receive the source minus the
2007 appropriate offset. This is used, for example, in the handling
2008 of non-local gotos. */
2009 if (SET_DEST (x) == virtual_incoming_args_rtx)
2010 new = arg_pointer_rtx, offset = - in_arg_offset;
2011 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2012 new = frame_pointer_rtx, offset = - var_offset;
2013 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2014 new = stack_pointer_rtx, offset = - dynamic_offset;
2015 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2016 new = stack_pointer_rtx, offset = - out_arg_offset;
2017
2018 if (new)
2019 {
2020 /* The only valid sources here are PLUS or REG. Just do
2021 the simplest possible thing to handle them. */
2022 if (GET_CODE (SET_SRC (x)) != REG
2023 && GET_CODE (SET_SRC (x)) != PLUS)
2024 abort ();
2025
2026 start_sequence ();
2027 if (GET_CODE (SET_SRC (x)) != REG)
2028 temp = force_operand (SET_SRC (x), 0);
2029 else
2030 temp = SET_SRC (x);
2031 temp = force_operand (plus_constant (temp, offset), 0);
2032 seq = get_insns ();
2033 end_sequence ();
2034
2035 emit_insns_before (seq, object);
2036 SET_DEST (x) = new;
2037
2038 if (!validate_change (object, &SET_SRC (x), temp, 0)
2039 || ! extra_insns)
2040 abort ();
2041
2042 return 1;
2043 }
2044
2045 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2046 loc = &SET_SRC (x);
2047 goto restart;
2048
2049 case PLUS:
2050 /* Handle special case of virtual register plus constant. */
2051 if (CONSTANT_P (XEXP (x, 1)))
2052 {
2053 rtx old;
2054
2055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2056 if (GET_CODE (XEXP (x, 0)) == PLUS)
2057 {
2058 rtx inner = XEXP (XEXP (x, 0), 0);
2059
2060 if (inner == virtual_incoming_args_rtx)
2061 new = arg_pointer_rtx, offset = in_arg_offset;
2062 else if (inner == virtual_stack_vars_rtx)
2063 new = frame_pointer_rtx, offset = var_offset;
2064 else if (inner == virtual_stack_dynamic_rtx)
2065 new = stack_pointer_rtx, offset = dynamic_offset;
2066 else if (inner == virtual_outgoing_args_rtx)
2067 new = stack_pointer_rtx, offset = out_arg_offset;
2068 else
2069 {
2070 loc = &XEXP (x, 0);
2071 goto restart;
2072 }
2073
2074 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2075 extra_insns);
2076 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2077 }
2078
2079 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2080 new = arg_pointer_rtx, offset = in_arg_offset;
2081 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2082 new = frame_pointer_rtx, offset = var_offset;
2083 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2084 new = stack_pointer_rtx, offset = dynamic_offset;
2085 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2086 new = stack_pointer_rtx, offset = out_arg_offset;
2087 else
2088 {
2089 /* We know the second operand is a constant. Unless the
2090 first operand is a REG (which has been already checked),
2091 it needs to be checked. */
2092 if (GET_CODE (XEXP (x, 0)) != REG)
2093 {
2094 loc = &XEXP (x, 0);
2095 goto restart;
2096 }
2097 return 1;
2098 }
2099
2100 old = XEXP (x, 0);
2101 XEXP (x, 0) = new;
2102 new = plus_constant (XEXP (x, 1), offset);
2103
2104 /* If the new constant is zero, try to replace the sum with its
2105 first operand. */
2106 if (new == const0_rtx
2107 && validate_change (object, loc, XEXP (x, 0), 0))
2108 return 1;
2109
2110 /* Next try to replace constant with new one. */
2111 if (!validate_change (object, &XEXP (x, 1), new, 0))
2112 {
2113 if (! extra_insns)
2114 {
2115 XEXP (x, 0) = old;
2116 return 0;
2117 }
2118
2119 /* Otherwise copy the new constant into a register and replace
2120 constant with that register. */
2121 temp = gen_reg_rtx (Pmode);
2122 if (validate_change (object, &XEXP (x, 1), temp, 0))
2123 emit_insn_before (gen_move_insn (temp, new), object);
2124 else
2125 {
2126 /* If that didn't work, replace this expression with a
2127 register containing the sum. */
2128
2129 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2130 XEXP (x, 0) = old;
2131
2132 start_sequence ();
2133 temp = force_operand (new, 0);
2134 seq = get_insns ();
2135 end_sequence ();
2136
2137 emit_insns_before (seq, object);
2138 if (! validate_change (object, loc, temp, 0)
2139 && ! validate_replace_rtx (x, temp, object))
2140 abort ();
2141 }
2142 }
2143
2144 return 1;
2145 }
2146
2147 /* Fall through to generic two-operand expression case. */
2148 case EXPR_LIST:
2149 case CALL:
2150 case COMPARE:
2151 case MINUS:
2152 case MULT:
2153 case DIV: case UDIV:
2154 case MOD: case UMOD:
2155 case AND: case IOR: case XOR:
2156 case LSHIFT: case ASHIFT: case ROTATE:
2157 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2158 case NE: case EQ:
2159 case GE: case GT: case GEU: case GTU:
2160 case LE: case LT: case LEU: case LTU:
2161 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2162 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2163 loc = &XEXP (x, 0);
2164 goto restart;
2165
2166 case MEM:
2167 /* Most cases of MEM that convert to valid addresses have already been
2168 handled by our scan of regno_reg_rtx. The only special handling we
2169 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2170 shared if we have to change it to a pseudo.
6f086dfc
RS
2171
2172 If the rtx is a simple reference to an address via a virtual register,
2173 it can potentially be shared. In such cases, first try to make it
2174 a valid address, which can also be shared. Otherwise, copy it and
2175 proceed normally.
2176
2177 First check for common cases that need no processing. These are
2178 usually due to instantiation already being done on a previous instance
2179 of a shared rtx. */
2180
2181 temp = XEXP (x, 0);
2182 if (CONSTANT_ADDRESS_P (temp)
2183#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2184 || temp == arg_pointer_rtx
2185#endif
2186 || temp == frame_pointer_rtx)
2187 return 1;
2188
2189 if (GET_CODE (temp) == PLUS
2190 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2191 && (XEXP (temp, 0) == frame_pointer_rtx
2192#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2193 || XEXP (temp, 0) == arg_pointer_rtx
2194#endif
2195 ))
2196 return 1;
2197
2198 if (temp == virtual_stack_vars_rtx
2199 || temp == virtual_incoming_args_rtx
2200 || (GET_CODE (temp) == PLUS
2201 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2202 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2203 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2204 {
2205 /* This MEM may be shared. If the substitution can be done without
2206 the need to generate new pseudos, we want to do it in place
2207 so all copies of the shared rtx benefit. The call below will
2208 only make substitutions if the resulting address is still
2209 valid.
2210
2211 Note that we cannot pass X as the object in the recursive call
2212 since the insn being processed may not allow all valid
6461be14
RS
2213 addresses. However, if we were not passed on object, we can
2214 only modify X without copying it if X will have a valid
2215 address.
6f086dfc 2216
6461be14
RS
2217 ??? Also note that this can still lose if OBJECT is an insn that
2218 has less restrictions on an address that some other insn.
2219 In that case, we will modify the shared address. This case
2220 doesn't seem very likely, though. */
2221
2222 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2223 object ? object : x, 0))
6f086dfc
RS
2224 return 1;
2225
2226 /* Otherwise make a copy and process that copy. We copy the entire
2227 RTL expression since it might be a PLUS which could also be
2228 shared. */
2229 *loc = x = copy_rtx (x);
2230 }
2231
2232 /* Fall through to generic unary operation case. */
2233 case USE:
2234 case CLOBBER:
2235 case SUBREG:
2236 case STRICT_LOW_PART:
2237 case NEG: case NOT:
2238 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2239 case SIGN_EXTEND: case ZERO_EXTEND:
2240 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2241 case FLOAT: case FIX:
2242 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2243 case ABS:
2244 case SQRT:
2245 case FFS:
2246 /* These case either have just one operand or we know that we need not
2247 check the rest of the operands. */
2248 loc = &XEXP (x, 0);
2249 goto restart;
2250
2251 case REG:
2252 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2253 in front of this insn and substitute the temporary. */
2254 if (x == virtual_incoming_args_rtx)
2255 new = arg_pointer_rtx, offset = in_arg_offset;
2256 else if (x == virtual_stack_vars_rtx)
2257 new = frame_pointer_rtx, offset = var_offset;
2258 else if (x == virtual_stack_dynamic_rtx)
2259 new = stack_pointer_rtx, offset = dynamic_offset;
2260 else if (x == virtual_outgoing_args_rtx)
2261 new = stack_pointer_rtx, offset = out_arg_offset;
2262
2263 if (new)
2264 {
2265 temp = plus_constant (new, offset);
2266 if (!validate_change (object, loc, temp, 0))
2267 {
2268 if (! extra_insns)
2269 return 0;
2270
2271 start_sequence ();
2272 temp = force_operand (temp, 0);
2273 seq = get_insns ();
2274 end_sequence ();
2275
2276 emit_insns_before (seq, object);
2277 if (! validate_change (object, loc, temp, 0)
2278 && ! validate_replace_rtx (x, temp, object))
2279 abort ();
2280 }
2281 }
2282
2283 return 1;
2284 }
2285
2286 /* Scan all subexpressions. */
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2289 if (*fmt == 'e')
2290 {
2291 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2292 return 0;
2293 }
2294 else if (*fmt == 'E')
2295 for (j = 0; j < XVECLEN (x, i); j++)
2296 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2297 extra_insns))
2298 return 0;
2299
2300 return 1;
2301}
2302\f
2303/* Optimization: assuming this function does not receive nonlocal gotos,
2304 delete the handlers for such, as well as the insns to establish
2305 and disestablish them. */
2306
2307static void
2308delete_handlers ()
2309{
2310 rtx insn;
2311 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2312 {
2313 /* Delete the handler by turning off the flag that would
2314 prevent jump_optimize from deleting it.
2315 Also permit deletion of the nonlocal labels themselves
2316 if nothing local refers to them. */
2317 if (GET_CODE (insn) == CODE_LABEL)
2318 LABEL_PRESERVE_P (insn) = 0;
2319 if (GET_CODE (insn) == INSN
59257ff7
RK
2320 && ((nonlocal_goto_handler_slot != 0
2321 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2322 || (nonlocal_goto_stack_level != 0
2323 && reg_mentioned_p (nonlocal_goto_stack_level,
2324 PATTERN (insn)))))
6f086dfc
RS
2325 delete_insn (insn);
2326 }
2327}
2328
2329/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2330 of the current function. */
2331
2332rtx
2333nonlocal_label_rtx_list ()
2334{
2335 tree t;
2336 rtx x = 0;
2337
2338 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2339 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2340
2341 return x;
2342}
2343\f
2344/* Output a USE for any register use in RTL.
2345 This is used with -noreg to mark the extent of lifespan
2346 of any registers used in a user-visible variable's DECL_RTL. */
2347
2348void
2349use_variable (rtl)
2350 rtx rtl;
2351{
2352 if (GET_CODE (rtl) == REG)
2353 /* This is a register variable. */
2354 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2355 else if (GET_CODE (rtl) == MEM
2356 && GET_CODE (XEXP (rtl, 0)) == REG
2357 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2358 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2359 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2360 /* This is a variable-sized structure. */
2361 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2362}
2363
2364/* Like use_variable except that it outputs the USEs after INSN
2365 instead of at the end of the insn-chain. */
2366
2367void
2368use_variable_after (rtl, insn)
2369 rtx rtl, insn;
2370{
2371 if (GET_CODE (rtl) == REG)
2372 /* This is a register variable. */
2373 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2374 else if (GET_CODE (rtl) == MEM
2375 && GET_CODE (XEXP (rtl, 0)) == REG
2376 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2377 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2378 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2379 /* This is a variable-sized structure. */
2380 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2381}
2382\f
2383int
2384max_parm_reg_num ()
2385{
2386 return max_parm_reg;
2387}
2388
2389/* Return the first insn following those generated by `assign_parms'. */
2390
2391rtx
2392get_first_nonparm_insn ()
2393{
2394 if (last_parm_insn)
2395 return NEXT_INSN (last_parm_insn);
2396 return get_insns ();
2397}
2398
2399/* Return 1 if EXP returns an aggregate value, for which an address
2400 must be passed to the function or returned by the function. */
2401
2402int
2403aggregate_value_p (exp)
2404 tree exp;
2405{
2406 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2407 return 1;
2408 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2409 return 1;
2410 if (flag_pcc_struct_return
2411 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2412 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2413 return 1;
2414 return 0;
2415}
2416\f
2417/* Assign RTL expressions to the function's parameters.
2418 This may involve copying them into registers and using
2419 those registers as the RTL for them.
2420
2421 If SECOND_TIME is non-zero it means that this function is being
2422 called a second time. This is done by integrate.c when a function's
2423 compilation is deferred. We need to come back here in case the
2424 FUNCTION_ARG macro computes items needed for the rest of the compilation
2425 (such as changing which registers are fixed or caller-saved). But suppress
2426 writing any insns or setting DECL_RTL of anything in this case. */
2427
2428void
2429assign_parms (fndecl, second_time)
2430 tree fndecl;
2431 int second_time;
2432{
2433 register tree parm;
2434 register rtx entry_parm = 0;
2435 register rtx stack_parm = 0;
2436 CUMULATIVE_ARGS args_so_far;
2437 enum machine_mode passed_mode, nominal_mode;
2438 /* Total space needed so far for args on the stack,
2439 given as a constant and a tree-expression. */
2440 struct args_size stack_args_size;
2441 tree fntype = TREE_TYPE (fndecl);
2442 tree fnargs = DECL_ARGUMENTS (fndecl);
2443 /* This is used for the arg pointer when referring to stack args. */
2444 rtx internal_arg_pointer;
2445 /* This is a dummy PARM_DECL that we used for the function result if
2446 the function returns a structure. */
2447 tree function_result_decl = 0;
2448 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2449 int varargs_setup = 0;
2450
2451 /* Nonzero if the last arg is named `__builtin_va_alist',
2452 which is used on some machines for old-fashioned non-ANSI varargs.h;
2453 this should be stuck onto the stack as if it had arrived there. */
2454 int vararg
2455 = (fnargs
2456 && (parm = tree_last (fnargs)) != 0
2457 && DECL_NAME (parm)
2458 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2459 "__builtin_va_alist")));
2460
2461 /* Nonzero if function takes extra anonymous args.
2462 This means the last named arg must be on the stack
2463 right before the anonymous ones. */
2464 int stdarg
2465 = (TYPE_ARG_TYPES (fntype) != 0
2466 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2467 != void_type_node));
2468
2469 /* If the reg that the virtual arg pointer will be translated into is
2470 not a fixed reg or is the stack pointer, make a copy of the virtual
2471 arg pointer, and address parms via the copy. The frame pointer is
2472 considered fixed even though it is not marked as such.
2473
2474 The second time through, simply use ap to avoid generating rtx. */
2475
2476 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2477 || ! (fixed_regs[ARG_POINTER_REGNUM]
2478 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2479 && ! second_time)
2480 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2481 else
2482 internal_arg_pointer = virtual_incoming_args_rtx;
2483 current_function_internal_arg_pointer = internal_arg_pointer;
2484
2485 stack_args_size.constant = 0;
2486 stack_args_size.var = 0;
2487
2488 /* If struct value address is treated as the first argument, make it so. */
2489 if (aggregate_value_p (DECL_RESULT (fndecl))
2490 && ! current_function_returns_pcc_struct
2491 && struct_value_incoming_rtx == 0)
2492 {
2493 tree type = build_pointer_type (fntype);
2494
2495 function_result_decl = build_decl (PARM_DECL, 0, type);
2496
2497 DECL_ARG_TYPE (function_result_decl) = type;
2498 TREE_CHAIN (function_result_decl) = fnargs;
2499 fnargs = function_result_decl;
2500 }
2501
2502 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2503 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2504
2505#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2506 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, 0);
2507#else
2508 INIT_CUMULATIVE_ARGS (args_so_far, fntype, 0);
2509#endif
2510
2511 /* We haven't yet found an argument that we must push and pretend the
2512 caller did. */
2513 current_function_pretend_args_size = 0;
2514
2515 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2516 {
2517 int aggregate
2518 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2519 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2520 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2521 struct args_size stack_offset;
2522 struct args_size arg_size;
2523 int passed_pointer = 0;
2524 tree passed_type = DECL_ARG_TYPE (parm);
2525
2526 /* Set LAST_NAMED if this is last named arg before some
2527 anonymous args. We treat it as if it were anonymous too. */
2528 int last_named = ((TREE_CHAIN (parm) == 0
2529 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2530 && (vararg || stdarg));
2531
2532 if (TREE_TYPE (parm) == error_mark_node
2533 /* This can happen after weird syntax errors
2534 or if an enum type is defined among the parms. */
2535 || TREE_CODE (parm) != PARM_DECL
2536 || passed_type == NULL)
2537 {
2538 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2539 TREE_USED (parm) = 1;
2540 continue;
2541 }
2542
2543 /* For varargs.h function, save info about regs and stack space
2544 used by the individual args, not including the va_alist arg. */
2545 if (vararg && last_named)
2546 current_function_args_info = args_so_far;
2547
2548 /* Find mode of arg as it is passed, and mode of arg
2549 as it should be during execution of this function. */
2550 passed_mode = TYPE_MODE (passed_type);
2551 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2552
2553#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2554 /* See if this arg was passed by invisible reference. */
2555 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2556 passed_type, ! last_named))
2557 {
2558 passed_type = build_pointer_type (passed_type);
2559 passed_pointer = 1;
2560 passed_mode = nominal_mode = Pmode;
2561 }
2562#endif
2563
2564 /* Let machine desc say which reg (if any) the parm arrives in.
2565 0 means it arrives on the stack. */
2566#ifdef FUNCTION_INCOMING_ARG
2567 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2568 passed_type, ! last_named);
2569#else
2570 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2571 passed_type, ! last_named);
2572#endif
2573
2574#ifdef SETUP_INCOMING_VARARGS
2575 /* If this is the last named parameter, do any required setup for
2576 varargs or stdargs. We need to know about the case of this being an
2577 addressable type, in which case we skip the registers it
2578 would have arrived in.
2579
2580 For stdargs, LAST_NAMED will be set for two parameters, the one that
2581 is actually the last named, and the dummy parameter. We only
2582 want to do this action once.
2583
2584 Also, indicate when RTL generation is to be suppressed. */
2585 if (last_named && !varargs_setup)
2586 {
2587 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2588 current_function_pretend_args_size,
2589 second_time);
2590 varargs_setup = 1;
2591 }
2592#endif
2593
2594 /* Determine parm's home in the stack,
2595 in case it arrives in the stack or we should pretend it did.
2596
2597 Compute the stack position and rtx where the argument arrives
2598 and its size.
2599
2600 There is one complexity here: If this was a parameter that would
2601 have been passed in registers, but wasn't only because it is
2602 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2603 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2604 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2605 0 as it was the previous time. */
2606
2607 locate_and_pad_parm (passed_mode, passed_type,
2608#ifdef STACK_PARMS_IN_REG_PARM_AREA
2609 1,
2610#else
2611#ifdef FUNCTION_INCOMING_ARG
2612 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2613 passed_type,
2614 (! last_named
2615 || varargs_setup)) != 0,
2616#else
2617 FUNCTION_ARG (args_so_far, passed_mode,
2618 passed_type,
2619 ! last_named || varargs_setup) != 0,
2620#endif
2621#endif
2622 fndecl, &stack_args_size, &stack_offset, &arg_size);
2623
2624 if (! second_time)
2625 {
2626 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2627
2628 if (offset_rtx == const0_rtx)
2629 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2630 else
2631 stack_parm = gen_rtx (MEM, passed_mode,
2632 gen_rtx (PLUS, Pmode,
2633 internal_arg_pointer, offset_rtx));
2634
2635 /* If this is a memory ref that contains aggregate components,
2636 mark it as such for cse and loop optimize. */
2637 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2638 }
2639
2640 /* If this parameter was passed both in registers and in the stack,
2641 use the copy on the stack. */
2642 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2643 entry_parm = 0;
2644
2645 /* If this parm was passed part in regs and part in memory,
2646 pretend it arrived entirely in memory
2647 by pushing the register-part onto the stack.
2648
2649 In the special case of a DImode or DFmode that is split,
2650 we could put it together in a pseudoreg directly,
2651 but for now that's not worth bothering with. */
2652
2653 if (entry_parm)
2654 {
2655 int nregs = 0;
2656#ifdef FUNCTION_ARG_PARTIAL_NREGS
2657 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2658 passed_type, ! last_named);
2659#endif
2660
2661 if (nregs > 0)
2662 {
2663 current_function_pretend_args_size
2664 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2665 / (PARM_BOUNDARY / BITS_PER_UNIT)
2666 * (PARM_BOUNDARY / BITS_PER_UNIT));
2667
2668 if (! second_time)
2669 move_block_from_reg (REGNO (entry_parm),
2670 validize_mem (stack_parm), nregs);
2671 entry_parm = stack_parm;
2672 }
2673 }
2674
2675 /* If we didn't decide this parm came in a register,
2676 by default it came on the stack. */
2677 if (entry_parm == 0)
2678 entry_parm = stack_parm;
2679
2680 /* Record permanently how this parm was passed. */
2681 if (! second_time)
2682 DECL_INCOMING_RTL (parm) = entry_parm;
2683
2684 /* If there is actually space on the stack for this parm,
2685 count it in stack_args_size; otherwise set stack_parm to 0
2686 to indicate there is no preallocated stack slot for the parm. */
2687
2688 if (entry_parm == stack_parm
2689#ifdef REG_PARM_STACK_SPACE
2690 /* On some machines, even if a parm value arrives in a register
2691 there is still an (uninitialized) stack slot allocated for it. */
2692 || REG_PARM_STACK_SPACE (fndecl) > 0
2693#endif
2694 )
2695 {
2696 stack_args_size.constant += arg_size.constant;
2697 if (arg_size.var)
2698 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2699 }
2700 else
2701 /* No stack slot was pushed for this parm. */
2702 stack_parm = 0;
2703
2704 /* Update info on where next arg arrives in registers. */
2705
2706 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2707 passed_type, ! last_named);
2708
2709 /* If this is our second time through, we are done with this parm. */
2710 if (second_time)
2711 continue;
2712
e16c591a
RS
2713 /* If we can't trust the parm stack slot to be aligned enough
2714 for its ultimate type, don't use that slot after entry.
2715 We'll make another stack slot, if we need one. */
2716 {
2717#ifdef FUNCTION_ARG_BOUNDARY
2718 int thisparm_boundary
2719 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2720#else
2721 int thisparm_boundary = PARM_BOUNDARY;
2722#endif
2723
2724 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2725 stack_parm = 0;
2726 }
2727
6f086dfc
RS
2728 /* Now adjust STACK_PARM to the mode and precise location
2729 where this parameter should live during execution,
2730 if we discover that it must live in the stack during execution.
2731 To make debuggers happier on big-endian machines, we store
2732 the value in the last bytes of the space available. */
2733
2734 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2735 && stack_parm != 0)
2736 {
2737 rtx offset_rtx;
2738
2739#if BYTES_BIG_ENDIAN
2740 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2741 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2742 - GET_MODE_SIZE (nominal_mode));
2743#endif
2744
2745 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2746 if (offset_rtx == const0_rtx)
2747 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2748 else
2749 stack_parm = gen_rtx (MEM, nominal_mode,
2750 gen_rtx (PLUS, Pmode,
2751 internal_arg_pointer, offset_rtx));
2752
2753 /* If this is a memory ref that contains aggregate components,
2754 mark it as such for cse and loop optimize. */
2755 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2756 }
2757
2758 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2759 in the mode in which it arrives.
2760 STACK_PARM is an RTX for a stack slot where the parameter can live
2761 during the function (in case we want to put it there).
2762 STACK_PARM is 0 if no stack slot was pushed for it.
2763
2764 Now output code if necessary to convert ENTRY_PARM to
2765 the type in which this function declares it,
2766 and store that result in an appropriate place,
2767 which may be a pseudo reg, may be STACK_PARM,
2768 or may be a local stack slot if STACK_PARM is 0.
2769
2770 Set DECL_RTL to that place. */
2771
2772 if (nominal_mode == BLKmode)
2773 {
2774 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2775 if (GET_CODE (entry_parm) == REG)
2776 {
2777 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2778 UNITS_PER_WORD);
2779
2780 /* Note that we will be storing an integral number of words.
2781 So we have to be careful to ensure that we allocate an
2782 integral number of words. We do this below in the
2783 assign_stack_local if space was not allocated in the argument
2784 list. If it was, this will not work if PARM_BOUNDARY is not
2785 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2786 if it becomes a problem. */
2787
2788 if (stack_parm == 0)
2789 stack_parm
2790 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2791 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2792 abort ();
2793
2794 move_block_from_reg (REGNO (entry_parm),
2795 validize_mem (stack_parm),
2796 size_stored / UNITS_PER_WORD);
2797 }
2798 DECL_RTL (parm) = stack_parm;
2799 }
2800 else if (! (
2801#if 0 /* This change was turned off because it makes compilation bigger. */
2802 !optimize
2803#else /* It's not clear why the following was replaced. */
b335c2cc 2804 /* Obsoleted by preceding line. */
6f086dfc
RS
2805 (obey_regdecls && ! TREE_REGDECL (parm)
2806 && ! TREE_INLINE (fndecl))
2807#endif
2808 /* layout_decl may set this. */
2809 || TREE_ADDRESSABLE (parm)
2810 || TREE_SIDE_EFFECTS (parm)
2811 /* If -ffloat-store specified, don't put explicit
2812 float variables into registers. */
2813 || (flag_float_store
2814 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2815 /* Always assign pseudo to structure return or item passed
2816 by invisible reference. */
2817 || passed_pointer || parm == function_result_decl)
2818 {
2819 /* Store the parm in a pseudoregister during the function. */
2820 register rtx parmreg = gen_reg_rtx (nominal_mode);
2821
2822 REG_USERVAR_P (parmreg) = 1;
2823
2824 /* If this was an item that we received a pointer to, set DECL_RTL
2825 appropriately. */
2826 if (passed_pointer)
2827 {
2828 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2829 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2830 }
2831 else
2832 DECL_RTL (parm) = parmreg;
2833
2834 /* Copy the value into the register. */
2835 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2836 {
2837 /* If ENTRY_PARM is a hard register, it might be in a register
2838 not valid for operating in its mode (e.g., an odd-numbered
2839 register for a DFmode). In that case, moves are the only
2840 thing valid, so we can't do a convert from there. This
2841 occurs when the calling sequence allow such misaligned
2842 usages. */
2843 if (GET_CODE (entry_parm) == REG
2844 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2845 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2846 GET_MODE (entry_parm)))
2847 convert_move (parmreg, copy_to_reg (entry_parm));
2848 else
2849 convert_move (parmreg, validize_mem (entry_parm), 0);
2850 }
6f086dfc
RS
2851 else
2852 emit_move_insn (parmreg, validize_mem (entry_parm));
2853
2854 /* In any case, record the parm's desired stack location
2855 in case we later discover it must live in the stack. */
2856 if (REGNO (parmreg) >= nparmregs)
2857 {
2858 rtx *new;
2859 nparmregs = REGNO (parmreg) + 5;
2860 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2861 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2862 parm_reg_stack_loc = new;
2863 }
2864 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2865
2866 /* Mark the register as eliminable if we did no conversion
2867 and it was copied from memory at a fixed offset,
2868 and the arg pointer was not copied to a pseudo-reg.
2869 If the arg pointer is a pseudo reg or the offset formed
2870 an invalid address, such memory-equivalences
2871 as we make here would screw up life analysis for it. */
2872 if (nominal_mode == passed_mode
2873 && GET_CODE (entry_parm) == MEM
e16c591a 2874 && entry_parm == stack_parm
6f086dfc
RS
2875 && stack_offset.var == 0
2876 && reg_mentioned_p (virtual_incoming_args_rtx,
2877 XEXP (entry_parm, 0)))
2878 REG_NOTES (get_last_insn ())
2879 = gen_rtx (EXPR_LIST, REG_EQUIV,
2880 entry_parm, REG_NOTES (get_last_insn ()));
2881
2882 /* For pointer data type, suggest pointer register. */
2883 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2884 mark_reg_pointer (parmreg);
2885 }
2886 else
2887 {
2888 /* Value must be stored in the stack slot STACK_PARM
2889 during function execution. */
2890
2891 if (passed_mode != nominal_mode)
86f8eff3
RK
2892 {
2893 /* Conversion is required. */
2894 if (GET_CODE (entry_parm) == REG
2895 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2896 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2897 entry_parm = copy_to_reg (entry_parm);
2898
2899 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2900 }
6f086dfc
RS
2901
2902 if (entry_parm != stack_parm)
2903 {
2904 if (stack_parm == 0)
2905 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2906 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2907 emit_move_insn (validize_mem (stack_parm),
2908 validize_mem (entry_parm));
2909 }
2910
2911 DECL_RTL (parm) = stack_parm;
2912 }
2913
2914 /* If this "parameter" was the place where we are receiving the
2915 function's incoming structure pointer, set up the result. */
2916 if (parm == function_result_decl)
2917 DECL_RTL (DECL_RESULT (fndecl))
2918 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2919
2920 if (TREE_THIS_VOLATILE (parm))
2921 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2922 if (TREE_READONLY (parm))
2923 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2924 }
2925
2926 max_parm_reg = max_reg_num ();
2927 last_parm_insn = get_last_insn ();
2928
2929 current_function_args_size = stack_args_size.constant;
2930
2931 /* Adjust function incoming argument size for alignment and
2932 minimum length. */
2933
2934#ifdef REG_PARM_STACK_SPACE
6f90e075 2935#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
2936 current_function_args_size = MAX (current_function_args_size,
2937 REG_PARM_STACK_SPACE (fndecl));
2938#endif
6f90e075 2939#endif
6f086dfc
RS
2940
2941#ifdef STACK_BOUNDARY
2942#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2943
2944 current_function_args_size
2945 = ((current_function_args_size + STACK_BYTES - 1)
2946 / STACK_BYTES) * STACK_BYTES;
2947#endif
2948
2949#ifdef ARGS_GROW_DOWNWARD
2950 current_function_arg_offset_rtx
2951 = (stack_args_size.var == 0 ? gen_rtx (CONST_INT, VOIDmode,
2952 -stack_args_size.constant)
2953 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2954 size_int (-stack_args_size.constant)),
2955 0, VOIDmode, 0));
2956#else
2957 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2958#endif
2959
2960 /* See how many bytes, if any, of its args a function should try to pop
2961 on return. */
2962
2963 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2964 current_function_args_size);
2965
2966 /* For stdarg.h function, save info about regs and stack space
2967 used by the named args. */
2968
2969 if (stdarg)
2970 current_function_args_info = args_so_far;
2971
2972 /* Set the rtx used for the function return value. Put this in its
2973 own variable so any optimizers that need this information don't have
2974 to include tree.h. Do this here so it gets done when an inlined
2975 function gets output. */
2976
2977 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
2978}
2979\f
2980/* Compute the size and offset from the start of the stacked arguments for a
2981 parm passed in mode PASSED_MODE and with type TYPE.
2982
2983 INITIAL_OFFSET_PTR points to the current offset into the stacked
2984 arguments.
2985
2986 The starting offset and size for this parm are returned in *OFFSET_PTR
2987 and *ARG_SIZE_PTR, respectively.
2988
2989 IN_REGS is non-zero if the argument will be passed in registers. It will
2990 never be set if REG_PARM_STACK_SPACE is not defined.
2991
2992 FNDECL is the function in which the argument was defined.
2993
2994 There are two types of rounding that are done. The first, controlled by
2995 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
2996 list to be aligned to the specific boundary (in bits). This rounding
2997 affects the initial and starting offsets, but not the argument size.
2998
2999 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3000 optionally rounds the size of the parm to PARM_BOUNDARY. The
3001 initial offset is not affected by this rounding, while the size always
3002 is and the starting offset may be. */
3003
3004/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3005 initial_offset_ptr is positive because locate_and_pad_parm's
3006 callers pass in the total size of args so far as
3007 initial_offset_ptr. arg_size_ptr is always positive.*/
3008
3009static void pad_to_arg_alignment (), pad_below ();
3010
3011void
3012locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3013 initial_offset_ptr, offset_ptr, arg_size_ptr)
3014 enum machine_mode passed_mode;
3015 tree type;
3016 int in_regs;
3017 tree fndecl;
3018 struct args_size *initial_offset_ptr;
3019 struct args_size *offset_ptr;
3020 struct args_size *arg_size_ptr;
3021{
3022 tree sizetree
3023 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3024 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3025 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3026 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3027 int reg_parm_stack_space = 0;
3028
3029#ifdef REG_PARM_STACK_SPACE
3030 /* If we have found a stack parm before we reach the end of the
3031 area reserved for registers, skip that area. */
3032 if (! in_regs)
3033 {
3034 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3035 if (reg_parm_stack_space > 0)
3036 {
3037 if (initial_offset_ptr->var)
3038 {
3039 initial_offset_ptr->var
3040 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3041 size_int (reg_parm_stack_space));
3042 initial_offset_ptr->constant = 0;
3043 }
3044 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3045 initial_offset_ptr->constant = reg_parm_stack_space;
3046 }
3047 }
3048#endif /* REG_PARM_STACK_SPACE */
3049
3050 arg_size_ptr->var = 0;
3051 arg_size_ptr->constant = 0;
3052
3053#ifdef ARGS_GROW_DOWNWARD
3054 if (initial_offset_ptr->var)
3055 {
3056 offset_ptr->constant = 0;
3057 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3058 initial_offset_ptr->var);
3059 }
3060 else
3061 {
3062 offset_ptr->constant = - initial_offset_ptr->constant;
3063 offset_ptr->var = 0;
3064 }
3065 if (where_pad == upward
3066 && (TREE_CODE (sizetree) != INTEGER_CST
3067 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3068 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3069 SUB_PARM_SIZE (*offset_ptr, sizetree);
3070 pad_to_arg_alignment (offset_ptr, boundary);
3071 if (initial_offset_ptr->var)
3072 {
3073 arg_size_ptr->var = size_binop (MINUS_EXPR,
3074 size_binop (MINUS_EXPR,
3075 integer_zero_node,
3076 initial_offset_ptr->var),
3077 offset_ptr->var);
3078 }
3079 else
3080 {
3081 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3082 offset_ptr->constant);
3083 }
3084/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3085 if (where_pad == downward)
3086 pad_below (arg_size_ptr, passed_mode, sizetree);
3087#else /* !ARGS_GROW_DOWNWARD */
3088 pad_to_arg_alignment (initial_offset_ptr, boundary);
3089 *offset_ptr = *initial_offset_ptr;
3090 if (where_pad == downward)
3091 pad_below (offset_ptr, passed_mode, sizetree);
3092
3093#ifdef PUSH_ROUNDING
3094 if (passed_mode != BLKmode)
3095 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3096#endif
3097
3098 if (where_pad != none
3099 && (TREE_CODE (sizetree) != INTEGER_CST
3100 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3101 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3102
3103 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3104#endif /* ARGS_GROW_DOWNWARD */
3105}
3106
e16c591a
RS
3107/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3108 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3109
6f086dfc
RS
3110static void
3111pad_to_arg_alignment (offset_ptr, boundary)
3112 struct args_size *offset_ptr;
3113 int boundary;
3114{
3115 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3116
3117 if (boundary > BITS_PER_UNIT)
3118 {
3119 if (offset_ptr->var)
3120 {
3121 offset_ptr->var =
3122#ifdef ARGS_GROW_DOWNWARD
3123 round_down
3124#else
3125 round_up
3126#endif
3127 (ARGS_SIZE_TREE (*offset_ptr),
3128 boundary / BITS_PER_UNIT);
3129 offset_ptr->constant = 0; /*?*/
3130 }
3131 else
3132 offset_ptr->constant =
3133#ifdef ARGS_GROW_DOWNWARD
3134 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3135#else
3136 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3137#endif
3138 }
3139}
3140
3141static void
3142pad_below (offset_ptr, passed_mode, sizetree)
3143 struct args_size *offset_ptr;
3144 enum machine_mode passed_mode;
3145 tree sizetree;
3146{
3147 if (passed_mode != BLKmode)
3148 {
3149 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3150 offset_ptr->constant
3151 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3152 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3153 - GET_MODE_SIZE (passed_mode));
3154 }
3155 else
3156 {
3157 if (TREE_CODE (sizetree) != INTEGER_CST
3158 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3159 {
3160 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3161 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3162 /* Add it in. */
3163 ADD_PARM_SIZE (*offset_ptr, s2);
3164 SUB_PARM_SIZE (*offset_ptr, sizetree);
3165 }
3166 }
3167}
3168
3169static tree
3170round_down (value, divisor)
3171 tree value;
3172 int divisor;
3173{
3174 return size_binop (MULT_EXPR,
3175 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3176 size_int (divisor));
3177}
3178\f
3179/* Walk the tree of blocks describing the binding levels within a function
3180 and warn about uninitialized variables.
3181 This is done after calling flow_analysis and before global_alloc
3182 clobbers the pseudo-regs to hard regs. */
3183
3184void
3185uninitialized_vars_warning (block)
3186 tree block;
3187{
3188 register tree decl, sub;
3189 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3190 {
3191 if (TREE_CODE (decl) == VAR_DECL
3192 /* These warnings are unreliable for and aggregates
3193 because assigning the fields one by one can fail to convince
3194 flow.c that the entire aggregate was initialized.
3195 Unions are troublesome because members may be shorter. */
3196 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3197 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3198 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3199 && DECL_RTL (decl) != 0
3200 && GET_CODE (DECL_RTL (decl)) == REG
3201 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3202 warning_with_decl (decl,
3203 "`%s' may be used uninitialized in this function");
3204 if (TREE_CODE (decl) == VAR_DECL
3205 && DECL_RTL (decl) != 0
3206 && GET_CODE (DECL_RTL (decl)) == REG
3207 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3208 warning_with_decl (decl,
3209 "variable `%s' may be clobbered by `longjmp'");
3210 }
3211 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3212 uninitialized_vars_warning (sub);
3213}
3214
3215/* Do the appropriate part of uninitialized_vars_warning
3216 but for arguments instead of local variables. */
3217
3218void
3219setjmp_args_warning (block)
3220 tree block;
3221{
3222 register tree decl;
3223 for (decl = DECL_ARGUMENTS (current_function_decl);
3224 decl; decl = TREE_CHAIN (decl))
3225 if (DECL_RTL (decl) != 0
3226 && GET_CODE (DECL_RTL (decl)) == REG
3227 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3228 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3229}
3230
3231/* If this function call setjmp, put all vars into the stack
3232 unless they were declared `register'. */
3233
3234void
3235setjmp_protect (block)
3236 tree block;
3237{
3238 register tree decl, sub;
3239 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3240 if ((TREE_CODE (decl) == VAR_DECL
3241 || TREE_CODE (decl) == PARM_DECL)
3242 && DECL_RTL (decl) != 0
3243 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3244 /* If this variable came from an inline function, it must be
3245 that it's life doesn't overlap the setjmp. If there was a
3246 setjmp in the function, it would already be in memory. We
3247 must exclude such variable because their DECL_RTL might be
3248 set to strange things such as virtual_stack_vars_rtx. */
3249 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3250 && (
3251#ifdef NON_SAVING_SETJMP
3252 /* If longjmp doesn't restore the registers,
3253 don't put anything in them. */
3254 NON_SAVING_SETJMP
3255 ||
3256#endif
3257 ! TREE_REGDECL (decl)))
3258 put_var_into_stack (decl);
3259 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3260 setjmp_protect (sub);
3261}
3262\f
3263/* Like the previous function, but for args instead of local variables. */
3264
3265void
3266setjmp_protect_args ()
3267{
3268 register tree decl, sub;
3269 for (decl = DECL_ARGUMENTS (current_function_decl);
3270 decl; decl = TREE_CHAIN (decl))
3271 if ((TREE_CODE (decl) == VAR_DECL
3272 || TREE_CODE (decl) == PARM_DECL)
3273 && DECL_RTL (decl) != 0
3274 && GET_CODE (DECL_RTL (decl)) == REG
3275 && (
3276 /* If longjmp doesn't restore the registers,
3277 don't put anything in them. */
3278#ifdef NON_SAVING_SETJMP
3279 NON_SAVING_SETJMP
3280 ||
3281#endif
3282 ! TREE_REGDECL (decl)))
3283 put_var_into_stack (decl);
3284}
3285\f
3286/* Return the context-pointer register corresponding to DECL,
3287 or 0 if it does not need one. */
3288
3289rtx
3290lookup_static_chain (decl)
3291 tree decl;
3292{
3293 tree context = decl_function_context (decl);
3294 tree link;
3295
3296 if (context == 0)
3297 return 0;
3298
3299 /* We treat inline_function_decl as an alias for the current function
3300 because that is the inline function whose vars, types, etc.
3301 are being merged into the current function.
3302 See expand_inline_function. */
3303 if (context == current_function_decl || context == inline_function_decl)
3304 return virtual_stack_vars_rtx;
3305
3306 for (link = context_display; link; link = TREE_CHAIN (link))
3307 if (TREE_PURPOSE (link) == context)
3308 return RTL_EXPR_RTL (TREE_VALUE (link));
3309
3310 abort ();
3311}
3312\f
3313/* Convert a stack slot address ADDR for variable VAR
3314 (from a containing function)
3315 into an address valid in this function (using a static chain). */
3316
3317rtx
3318fix_lexical_addr (addr, var)
3319 rtx addr;
3320 tree var;
3321{
3322 rtx basereg;
3323 int displacement;
3324 tree context = decl_function_context (var);
3325 struct function *fp;
3326 rtx base = 0;
3327
3328 /* If this is the present function, we need not do anything. */
3329 if (context == current_function_decl || context == inline_function_decl)
3330 return addr;
3331
3332 for (fp = outer_function_chain; fp; fp = fp->next)
3333 if (fp->decl == context)
3334 break;
3335
3336 if (fp == 0)
3337 abort ();
3338
3339 /* Decode given address as base reg plus displacement. */
3340 if (GET_CODE (addr) == REG)
3341 basereg = addr, displacement = 0;
3342 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3343 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3344 else
3345 abort ();
3346
3347 /* We accept vars reached via the containing function's
3348 incoming arg pointer and via its stack variables pointer. */
3349 if (basereg == fp->internal_arg_pointer)
3350 {
3351 /* If reached via arg pointer, get the arg pointer value
3352 out of that function's stack frame.
3353
3354 There are two cases: If a separate ap is needed, allocate a
3355 slot in the outer function for it and dereference it that way.
3356 This is correct even if the real ap is actually a pseudo.
3357 Otherwise, just adjust the offset from the frame pointer to
3358 compensate. */
3359
3360#ifdef NEED_SEPARATE_AP
3361 rtx addr;
3362
3363 if (fp->arg_pointer_save_area == 0)
3364 fp->arg_pointer_save_area
3365 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3366
3367 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3368 addr = memory_address (Pmode, addr);
3369
3370 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3371#else
3372 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3373 base = lookup_static_chain (var);
6f086dfc
RS
3374#endif
3375 }
3376
3377 else if (basereg == virtual_stack_vars_rtx)
3378 {
3379 /* This is the same code as lookup_static_chain, duplicated here to
3380 avoid an extra call to decl_function_context. */
3381 tree link;
3382
3383 for (link = context_display; link; link = TREE_CHAIN (link))
3384 if (TREE_PURPOSE (link) == context)
3385 {
3386 base = RTL_EXPR_RTL (TREE_VALUE (link));
3387 break;
3388 }
3389 }
3390
3391 if (base == 0)
3392 abort ();
3393
3394 /* Use same offset, relative to appropriate static chain or argument
3395 pointer. */
3396 return plus_constant (base, displacement);
3397}
3398\f
3399/* Return the address of the trampoline for entering nested fn FUNCTION.
3400 If necessary, allocate a trampoline (in the stack frame)
3401 and emit rtl to initialize its contents (at entry to this function). */
3402
3403rtx
3404trampoline_address (function)
3405 tree function;
3406{
3407 tree link;
3408 tree rtlexp;
3409 rtx tramp;
3410 struct function *fp;
3411 tree fn_context;
3412
3413 /* Find an existing trampoline and return it. */
3414 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3415 if (TREE_PURPOSE (link) == function)
3416 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3417 for (fp = outer_function_chain; fp; fp = fp->next)
3418 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3419 if (TREE_PURPOSE (link) == function)
3420 {
3421 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3422 function);
3423 return round_trampoline_addr (tramp);
3424 }
3425
3426 /* None exists; we must make one. */
3427
3428 /* Find the `struct function' for the function containing FUNCTION. */
3429 fp = 0;
3430 fn_context = decl_function_context (function);
3431 if (fn_context != current_function_decl)
3432 for (fp = outer_function_chain; fp; fp = fp->next)
3433 if (fp->decl == fn_context)
3434 break;
3435
3436 /* Allocate run-time space for this trampoline
3437 (usually in the defining function's stack frame). */
3438#ifdef ALLOCATE_TRAMPOLINE
3439 tramp = ALLOCATE_TRAMPOLINE (fp);
3440#else
3441 /* If rounding needed, allocate extra space
3442 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3443#ifdef TRAMPOLINE_ALIGNMENT
3444#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3445#else
3446#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3447#endif
3448 if (fp != 0)
3449 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3450 else
3451 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3452#endif
3453
3454 /* Record the trampoline for reuse and note it for later initialization
3455 by expand_function_end. */
3456 if (fp != 0)
3457 {
3458 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3459 rtlexp = make_node (RTL_EXPR);
3460 RTL_EXPR_RTL (rtlexp) = tramp;
3461 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3462 pop_obstacks ();
3463 }
3464 else
3465 {
3466 /* Make the RTL_EXPR node temporary, not momentary, so that the
3467 trampoline_list doesn't become garbage. */
3468 int momentary = suspend_momentary ();
3469 rtlexp = make_node (RTL_EXPR);
3470 resume_momentary (momentary);
3471
3472 RTL_EXPR_RTL (rtlexp) = tramp;
3473 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3474 }
3475
3476 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3477 return round_trampoline_addr (tramp);
3478}
3479
3480/* Given a trampoline address,
3481 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3482
3483static rtx
3484round_trampoline_addr (tramp)
3485 rtx tramp;
3486{
3487#ifdef TRAMPOLINE_ALIGNMENT
3488 /* Round address up to desired boundary. */
3489 rtx temp = gen_reg_rtx (Pmode);
3490 temp = expand_binop (Pmode, add_optab, tramp,
3491 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_ALIGNMENT - 1),
3492 temp, 0, OPTAB_LIB_WIDEN);
3493 tramp = expand_binop (Pmode, and_optab, temp,
3494 gen_rtx (CONST_INT, VOIDmode, - TRAMPOLINE_ALIGNMENT),
3495 temp, 0, OPTAB_LIB_WIDEN);
3496#endif
3497 return tramp;
3498}
3499\f
3500/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3501 and initialize static variables for generating RTL for the statements
3502 of the function. */
3503
3504void
3505init_function_start (subr, filename, line)
3506 tree subr;
3507 char *filename;
3508 int line;
3509{
3510 char *junk;
3511
3512 init_stmt_for_function ();
3513
3514 cse_not_expected = ! optimize;
3515
3516 /* Caller save not needed yet. */
3517 caller_save_needed = 0;
3518
3519 /* No stack slots have been made yet. */
3520 stack_slot_list = 0;
3521
3522 /* There is no stack slot for handling nonlocal gotos. */
3523 nonlocal_goto_handler_slot = 0;
3524 nonlocal_goto_stack_level = 0;
3525
3526 /* No labels have been declared for nonlocal use. */
3527 nonlocal_labels = 0;
3528
3529 /* No function calls so far in this function. */
3530 function_call_count = 0;
3531
3532 /* No parm regs have been allocated.
3533 (This is important for output_inline_function.) */
3534 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3535
3536 /* Initialize the RTL mechanism. */
3537 init_emit ();
3538
3539 /* Initialize the queue of pending postincrement and postdecrements,
3540 and some other info in expr.c. */
3541 init_expr ();
3542
3543 /* We haven't done register allocation yet. */
3544 reg_renumber = 0;
3545
3546 init_const_rtx_hash_table ();
3547
3548 current_function_name = (*decl_printable_name) (subr, &junk);
3549
3550 /* Nonzero if this is a nested function that uses a static chain. */
3551
3552 current_function_needs_context
3553 = (decl_function_context (current_function_decl) != 0);
3554
3555 /* Set if a call to setjmp is seen. */
3556 current_function_calls_setjmp = 0;
3557
3558 /* Set if a call to longjmp is seen. */
3559 current_function_calls_longjmp = 0;
3560
3561 current_function_calls_alloca = 0;
3562 current_function_has_nonlocal_label = 0;
3563 current_function_contains_functions = 0;
3564
3565 current_function_returns_pcc_struct = 0;
3566 current_function_returns_struct = 0;
3567 current_function_epilogue_delay_list = 0;
3568 current_function_uses_const_pool = 0;
3569 current_function_uses_pic_offset_table = 0;
3570
3571 /* We have not yet needed to make a label to jump to for tail-recursion. */
3572 tail_recursion_label = 0;
3573
3574 /* We haven't had a need to make a save area for ap yet. */
3575
3576 arg_pointer_save_area = 0;
3577
3578 /* No stack slots allocated yet. */
3579 frame_offset = 0;
3580
3581 /* No SAVE_EXPRs in this function yet. */
3582 save_expr_regs = 0;
3583
3584 /* No RTL_EXPRs in this function yet. */
3585 rtl_expr_chain = 0;
3586
3587 /* We have not allocated any temporaries yet. */
3588 temp_slots = 0;
3589 temp_slot_level = 0;
3590
3591 /* Within function body, compute a type's size as soon it is laid out. */
3592 immediate_size_expand++;
3593
3594 init_pending_stack_adjust ();
3595 inhibit_defer_pop = 0;
3596
3597 current_function_outgoing_args_size = 0;
3598
3599 /* Initialize the insn lengths. */
3600 init_insn_lengths ();
3601
3602 /* Prevent ever trying to delete the first instruction of a function.
3603 Also tell final how to output a linenum before the function prologue. */
3604 emit_line_note (filename, line);
3605
3606 /* Make sure first insn is a note even if we don't want linenums.
3607 This makes sure the first insn will never be deleted.
3608 Also, final expects a note to appear there. */
3609 emit_note (0, NOTE_INSN_DELETED);
3610
3611 /* Set flags used by final.c. */
3612 if (aggregate_value_p (DECL_RESULT (subr)))
3613 {
3614#ifdef PCC_STATIC_STRUCT_RETURN
3615 if (flag_pcc_struct_return)
3616 current_function_returns_pcc_struct = 1;
3617 else
3618#endif
3619 current_function_returns_struct = 1;
3620 }
3621
3622 /* Warn if this value is an aggregate type,
3623 regardless of which calling convention we are using for it. */
3624 if (warn_aggregate_return
3625 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3626 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3627 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3628 warning ("function returns an aggregate");
3629
3630 current_function_returns_pointer
3631 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3632
3633 /* Indicate that we need to distinguish between the return value of the
3634 present function and the return value of a function being called. */
3635 rtx_equal_function_value_matters = 1;
3636
3637 /* Indicate that we have not instantiated virtual registers yet. */
3638 virtuals_instantiated = 0;
3639
3640 /* Indicate we have no need of a frame pointer yet. */
3641 frame_pointer_needed = 0;
3642
3643 /* By default assume not varargs. */
3644 current_function_varargs = 0;
3645}
3646
3647/* Indicate that the current function uses extra args
3648 not explicitly mentioned in the argument list in any fashion. */
3649
3650void
3651mark_varargs ()
3652{
3653 current_function_varargs = 1;
3654}
3655
3656/* Expand a call to __main at the beginning of a possible main function. */
3657
3658void
3659expand_main_function ()
3660{
b335c2cc 3661#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3662 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3663 VOIDmode, 0);
b335c2cc 3664#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3665}
3666\f
3667/* Start the RTL for a new function, and set variables used for
3668 emitting RTL.
3669 SUBR is the FUNCTION_DECL node.
3670 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3671 the function's parameters, which must be run at any return statement. */
3672
3673void
3674expand_function_start (subr, parms_have_cleanups)
3675 tree subr;
3676 int parms_have_cleanups;
3677{
3678 register int i;
3679 tree tem;
3680 rtx last_ptr;
3681
3682 /* Make sure volatile mem refs aren't considered
3683 valid operands of arithmetic insns. */
3684 init_recog_no_volatile ();
3685
3686 /* If function gets a static chain arg, store it in the stack frame.
3687 Do this first, so it gets the first stack slot offset. */
3688 if (current_function_needs_context)
3689 emit_move_insn (assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0),
3690 static_chain_incoming_rtx);
3691
3692 /* If the parameters of this function need cleaning up, get a label
3693 for the beginning of the code which executes those cleanups. This must
3694 be done before doing anything with return_label. */
3695 if (parms_have_cleanups)
3696 cleanup_label = gen_label_rtx ();
3697 else
3698 cleanup_label = 0;
3699
3700 /* Make the label for return statements to jump to, if this machine
3701 does not have a one-instruction return and uses an epilogue,
3702 or if it returns a structure, or if it has parm cleanups. */
3703#ifdef HAVE_return
3704 if (cleanup_label == 0 && HAVE_return
3705 && ! current_function_returns_pcc_struct
3706 && ! (current_function_returns_struct && ! optimize))
3707 return_label = 0;
3708 else
3709 return_label = gen_label_rtx ();
3710#else
3711 return_label = gen_label_rtx ();
3712#endif
3713
3714 /* Initialize rtx used to return the value. */
3715 /* Do this before assign_parms so that we copy the struct value address
3716 before any library calls that assign parms might generate. */
3717
3718 /* Decide whether to return the value in memory or in a register. */
3719 if (aggregate_value_p (DECL_RESULT (subr)))
3720 {
3721 /* Returning something that won't go in a register. */
3722 register rtx value_address;
3723
3724#ifdef PCC_STATIC_STRUCT_RETURN
3725 if (current_function_returns_pcc_struct)
3726 {
3727 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3728 value_address = assemble_static_space (size);
3729 }
3730 else
3731#endif
3732 {
3733 /* Expect to be passed the address of a place to store the value.
3734 If it is passed as an argument, assign_parms will take care of
3735 it. */
3736 if (struct_value_incoming_rtx)
3737 {
3738 value_address = gen_reg_rtx (Pmode);
3739 emit_move_insn (value_address, struct_value_incoming_rtx);
3740 }
3741 }
3742 if (value_address)
3743 DECL_RTL (DECL_RESULT (subr))
3744 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3745 value_address);
3746 }
3747 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3748 /* If return mode is void, this decl rtl should not be used. */
3749 DECL_RTL (DECL_RESULT (subr)) = 0;
3750 else if (parms_have_cleanups)
3751 /* If function will end with cleanup code for parms,
3752 compute the return values into a pseudo reg,
3753 which we will copy into the true return register
3754 after the cleanups are done. */
3755 DECL_RTL (DECL_RESULT (subr))
3756 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3757 else
3758 /* Scalar, returned in a register. */
3759 {
3760#ifdef FUNCTION_OUTGOING_VALUE
3761 DECL_RTL (DECL_RESULT (subr))
3762 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3763#else
3764 DECL_RTL (DECL_RESULT (subr))
3765 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3766#endif
3767
3768 /* Mark this reg as the function's return value. */
3769 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3770 {
3771 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3772 /* Needed because we may need to move this to memory
3773 in case it's a named return value whose address is taken. */
3774 TREE_REGDECL (DECL_RESULT (subr)) = 1;
3775 }
3776 }
3777
3778 /* Initialize rtx for parameters and local variables.
3779 In some cases this requires emitting insns. */
3780
3781 assign_parms (subr, 0);
3782
3783 /* The following was moved from init_function_start.
3784 The move is supposed to make sdb output more accurate. */
3785 /* Indicate the beginning of the function body,
3786 as opposed to parm setup. */
3787 emit_note (0, NOTE_INSN_FUNCTION_BEG);
3788
3789 /* If doing stupid allocation, mark parms as born here. */
3790
3791 if (GET_CODE (get_last_insn ()) != NOTE)
3792 emit_note (0, NOTE_INSN_DELETED);
3793 parm_birth_insn = get_last_insn ();
3794
3795 if (obey_regdecls)
3796 {
3797 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3798 use_variable (regno_reg_rtx[i]);
3799
3800 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3801 use_variable (current_function_internal_arg_pointer);
3802 }
3803
3804 /* Fetch static chain values for containing functions. */
3805 tem = decl_function_context (current_function_decl);
3806 if (tem)
3807 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3808 context_display = 0;
3809 while (tem)
3810 {
3811 tree rtlexp = make_node (RTL_EXPR);
3812
3813 RTL_EXPR_RTL (rtlexp) = last_ptr;
3814 context_display = tree_cons (tem, rtlexp, context_display);
3815 tem = decl_function_context (tem);
3816 if (tem == 0)
3817 break;
3818 /* Chain thru stack frames, assuming pointer to next lexical frame
3819 is found at the place we always store it. */
3820#ifdef FRAME_GROWS_DOWNWARD
3821 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
3822#endif
3823 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
3824 memory_address (Pmode, last_ptr)));
3825 }
3826
3827 /* After the display initializations is where the tail-recursion label
3828 should go, if we end up needing one. Ensure we have a NOTE here
3829 since some things (like trampolines) get placed before this. */
3830 tail_recursion_reentry = emit_note (0, NOTE_INSN_DELETED);
3831
3832 /* Evaluate now the sizes of any types declared among the arguments. */
3833 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
3834 expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0);
3835
3836 /* Make sure there is a line number after the function entry setup code. */
3837 force_next_line_note ();
3838}
3839\f
3840/* Generate RTL for the end of the current function.
3841 FILENAME and LINE are the current position in the source file. */
3842
3843/* It is up to language-specific callers to do cleanups for parameters. */
3844
3845void
3846expand_function_end (filename, line)
3847 char *filename;
3848 int line;
3849{
3850 register int i;
3851 tree link;
3852
3853 static rtx initial_trampoline;
3854
3855#ifdef NON_SAVING_SETJMP
3856 /* Don't put any variables in registers if we call setjmp
3857 on a machine that fails to restore the registers. */
3858 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
3859 {
3860 setjmp_protect (DECL_INITIAL (current_function_decl));
3861 setjmp_protect_args ();
3862 }
3863#endif
3864
3865 /* Save the argument pointer if a save area was made for it. */
3866 if (arg_pointer_save_area)
3867 {
3868 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
3869 emit_insn_before (x, tail_recursion_reentry);
3870 }
3871
3872 /* Initialize any trampolines required by this function. */
3873 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3874 {
3875 tree function = TREE_PURPOSE (link);
3876 rtx context = lookup_static_chain (function);
3877 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
3878 rtx seq;
3879
3880 /* First make sure this compilation has a template for
3881 initializing trampolines. */
3882 if (initial_trampoline == 0)
86f8eff3
RK
3883 {
3884 end_temporary_allocation ();
3885 initial_trampoline
3886 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
3887 resume_temporary_allocation ();
3888 }
6f086dfc
RS
3889
3890 /* Generate insns to initialize the trampoline. */
3891 start_sequence ();
3892 tramp = change_address (initial_trampoline, BLKmode,
3893 round_trampoline_addr (XEXP (tramp, 0)));
3894 emit_block_move (tramp, initial_trampoline,
3895 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_SIZE),
3896 FUNCTION_BOUNDARY / BITS_PER_UNIT);
3897 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
3898 XEXP (DECL_RTL (function), 0), context);
3899 seq = get_insns ();
3900 end_sequence ();
3901
3902 /* Put those insns at entry to the containing function (this one). */
3903 emit_insns_before (seq, tail_recursion_reentry);
3904 }
3905 /* Clear the trampoline_list for the next function. */
3906 trampoline_list = 0;
3907
3908#if 0 /* I think unused parms are legitimate enough. */
3909 /* Warn about unused parms. */
3910 if (warn_unused)
3911 {
3912 rtx decl;
3913
3914 for (decl = DECL_ARGUMENTS (current_function_decl);
3915 decl; decl = TREE_CHAIN (decl))
3916 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
3917 warning_with_decl (decl, "unused parameter `%s'");
3918 }
3919#endif
3920
3921 /* Delete handlers for nonlocal gotos if nothing uses them. */
3922 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
3923 delete_handlers ();
3924
3925 /* End any sequences that failed to be closed due to syntax errors. */
3926 while (in_sequence_p ())
3927 end_sequence (0);
3928
3929 /* Outside function body, can't compute type's actual size
3930 until next function's body starts. */
3931 immediate_size_expand--;
3932
3933 /* If doing stupid register allocation,
3934 mark register parms as dying here. */
3935
3936 if (obey_regdecls)
3937 {
3938 rtx tem;
3939 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3940 use_variable (regno_reg_rtx[i]);
3941
3942 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
3943
3944 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
3945 {
3946 use_variable (XEXP (tem, 0));
3947 use_variable_after (XEXP (tem, 0), parm_birth_insn);
3948 }
3949
3950 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3951 use_variable (current_function_internal_arg_pointer);
3952 }
3953
3954 clear_pending_stack_adjust ();
3955 do_pending_stack_adjust ();
3956
3957 /* Mark the end of the function body.
3958 If control reaches this insn, the function can drop through
3959 without returning a value. */
3960 emit_note (0, NOTE_INSN_FUNCTION_END);
3961
3962 /* Output a linenumber for the end of the function.
3963 SDB depends on this. */
3964 emit_line_note_force (filename, line);
3965
3966 /* Output the label for the actual return from the function,
3967 if one is expected. This happens either because a function epilogue
3968 is used instead of a return instruction, or because a return was done
3969 with a goto in order to run local cleanups, or because of pcc-style
3970 structure returning. */
3971
3972 if (return_label)
3973 emit_label (return_label);
3974
3975 /* If we had calls to alloca, and this machine needs
3976 an accurate stack pointer to exit the function,
3977 insert some code to save and restore the stack pointer. */
3978#ifdef EXIT_IGNORE_STACK
3979 if (! EXIT_IGNORE_STACK)
3980#endif
3981 if (current_function_calls_alloca)
3982 {
59257ff7
RK
3983 rtx tem = 0;
3984
3985 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
3986 emit_stack_restore (SAVE_FUNCTION, tem, 0);
6f086dfc
RS
3987 }
3988
3989 /* If scalar return value was computed in a pseudo-reg,
3990 copy that to the hard return register. */
3991 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
3992 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
3993 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
3994 >= FIRST_PSEUDO_REGISTER))
3995 {
3996 rtx real_decl_result;
3997
3998#ifdef FUNCTION_OUTGOING_VALUE
3999 real_decl_result
4000 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4001 current_function_decl);
4002#else
4003 real_decl_result
4004 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4005 current_function_decl);
4006#endif
4007 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4008 emit_move_insn (real_decl_result,
4009 DECL_RTL (DECL_RESULT (current_function_decl)));
4010 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4011 }
4012
4013 /* If returning a structure, arrange to return the address of the value
4014 in a place where debuggers expect to find it.
4015
4016 If returning a structure PCC style,
4017 the caller also depends on this value.
4018 And current_function_returns_pcc_struct is not necessarily set. */
4019 if (current_function_returns_struct
4020 || current_function_returns_pcc_struct)
4021 {
4022 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4023 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4024#ifdef FUNCTION_OUTGOING_VALUE
4025 rtx outgoing
4026 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4027 current_function_decl);
4028#else
4029 rtx outgoing
4030 = FUNCTION_VALUE (build_pointer_type (type),
4031 current_function_decl);
4032#endif
4033
4034 /* Mark this as a function return value so integrate will delete the
4035 assignment and USE below when inlining this function. */
4036 REG_FUNCTION_VALUE_P (outgoing) = 1;
4037
4038 emit_move_insn (outgoing, value_address);
4039 use_variable (outgoing);
4040 }
4041
4042 /* Output a return insn if we are using one.
4043 Otherwise, let the rtl chain end here, to drop through
4044 into the epilogue. */
4045
4046#ifdef HAVE_return
4047 if (HAVE_return)
4048 {
4049 emit_jump_insn (gen_return ());
4050 emit_barrier ();
4051 }
4052#endif
4053
4054 /* Fix up any gotos that jumped out to the outermost
4055 binding level of the function.
4056 Must follow emitting RETURN_LABEL. */
4057
4058 /* If you have any cleanups to do at this point,
4059 and they need to create temporary variables,
4060 then you will lose. */
4061 fixup_gotos (0, 0, 0, get_insns (), 0);
4062}
This page took 0.408332 seconds and 5 git commands to generate.