]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
* reorg.c: Finish deleting half-deleted comment.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
a5cad800 2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
6f086dfc
RS
20
21
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41#include "config.h"
670ee920 42#include "system.h"
6f086dfc
RS
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
1ef08c63 46#include "except.h"
6f086dfc
RS
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
c20bf1f3 57#include "obstack.h"
10f0ad3d 58#include "toplev.h"
6f086dfc 59
c795bca9
BS
60#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62#endif
63
189cc377
RK
64#ifndef TRAMPOLINE_ALIGNMENT
65#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66#endif
67
293e3de4
RS
68/* Some systems use __main in a way incompatible with its use in gcc, in these
69 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
70 give the same symbol without quotes for an alternative entry point. You
0f41302f 71 must define both, or neither. */
293e3de4
RS
72#ifndef NAME__MAIN
73#define NAME__MAIN "__main"
74#define SYMBOL__MAIN __main
75#endif
76
6f086dfc
RS
77/* Round a value to the lowest integer less than it that is a multiple of
78 the required alignment. Avoid using division in case the value is
79 negative. Assume the alignment is a power of two. */
80#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
81
82/* Similar, but round to the next highest integer that meets the
83 alignment. */
84#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
85
86/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
87 during rtl generation. If they are different register numbers, this is
88 always true. It may also be true if
89 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
90 generation. See fix_lexical_addr for details. */
91
92#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
93#define NEED_SEPARATE_AP
94#endif
95
96/* Number of bytes of args popped by function being compiled on its return.
97 Zero if no bytes are to be popped.
98 May affect compilation of return insn or of function epilogue. */
99
100int current_function_pops_args;
101
102/* Nonzero if function being compiled needs to be given an address
103 where the value should be stored. */
104
105int current_function_returns_struct;
106
107/* Nonzero if function being compiled needs to
108 return the address of where it has put a structure value. */
109
110int current_function_returns_pcc_struct;
111
112/* Nonzero if function being compiled needs to be passed a static chain. */
113
114int current_function_needs_context;
115
116/* Nonzero if function being compiled can call setjmp. */
117
118int current_function_calls_setjmp;
119
120/* Nonzero if function being compiled can call longjmp. */
121
122int current_function_calls_longjmp;
123
124/* Nonzero if function being compiled receives nonlocal gotos
125 from nested functions. */
126
127int current_function_has_nonlocal_label;
128
8634413a
JW
129/* Nonzero if function being compiled has nonlocal gotos to parent
130 function. */
131
132int current_function_has_nonlocal_goto;
133
4d1d8045
BS
134/* Nonzero if this function has a computed goto.
135
136 It is computed during find_basic_blocks or during stupid life
137 analysis. */
138
139int current_function_has_computed_jump;
140
6f086dfc
RS
141/* Nonzero if function being compiled contains nested functions. */
142
143int current_function_contains_functions;
144
fdb8a883
JW
145/* Nonzero if function being compiled doesn't modify the stack pointer
146 (ignoring the prologue and epilogue). This is only valid after
147 life_analysis has run. */
148
149int current_function_sp_is_unchanging;
150
acd693d1 151/* Nonzero if the function being compiled issues a computed jump. */
ab87f8c8 152
acd693d1 153int current_function_has_computed_jump;
ab87f8c8 154
173cd503
JM
155/* Nonzero if the current function is a thunk (a lightweight function that
156 just adjusts one of its arguments and forwards to another function), so
157 we should try to cut corners where we can. */
158int current_function_is_thunk;
159
6f086dfc
RS
160/* Nonzero if function being compiled can call alloca,
161 either as a subroutine or builtin. */
162
163int current_function_calls_alloca;
164
165/* Nonzero if the current function returns a pointer type */
166
167int current_function_returns_pointer;
168
169/* If some insns can be deferred to the delay slots of the epilogue, the
170 delay list for them is recorded here. */
171
172rtx current_function_epilogue_delay_list;
173
174/* If function's args have a fixed size, this is that size, in bytes.
175 Otherwise, it is -1.
176 May affect compilation of return insn or of function epilogue. */
177
178int current_function_args_size;
179
180/* # bytes the prologue should push and pretend that the caller pushed them.
181 The prologue must do this, but only if parms can be passed in registers. */
182
183int current_function_pretend_args_size;
184
f7339633 185/* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
0f41302f 186 defined, the needed space is pushed by the prologue. */
6f086dfc
RS
187
188int current_function_outgoing_args_size;
189
190/* This is the offset from the arg pointer to the place where the first
191 anonymous arg can be found, if there is one. */
192
193rtx current_function_arg_offset_rtx;
194
195/* Nonzero if current function uses varargs.h or equivalent.
196 Zero for functions that use stdarg.h. */
197
198int current_function_varargs;
199
ebb904cb
RK
200/* Nonzero if current function uses stdarg.h or equivalent.
201 Zero for functions that use varargs.h. */
202
203int current_function_stdarg;
204
6f086dfc
RS
205/* Quantities of various kinds of registers
206 used for the current function's args. */
207
208CUMULATIVE_ARGS current_function_args_info;
209
210/* Name of function now being compiled. */
211
212char *current_function_name;
213
f345de42
JL
214/* If non-zero, an RTL expression for the location at which the current
215 function returns its result. If the current function returns its
216 result in a register, current_function_return_rtx will always be
217 the hard register containing the result. */
6f086dfc
RS
218
219rtx current_function_return_rtx;
220
221/* Nonzero if the current function uses the constant pool. */
222
223int current_function_uses_const_pool;
224
225/* Nonzero if the current function uses pic_offset_table_rtx. */
226int current_function_uses_pic_offset_table;
227
228/* The arg pointer hard register, or the pseudo into which it was copied. */
229rtx current_function_internal_arg_pointer;
230
aeb302bb
JM
231/* Language-specific reason why the current function cannot be made inline. */
232char *current_function_cannot_inline;
233
07417085
KR
234/* Nonzero if instrumentation calls for function entry and exit should be
235 generated. */
236int current_function_instrument_entry_exit;
237
7d384cc0
KR
238/* Nonzero if memory access checking be enabled in the current function. */
239int current_function_check_memory_usage;
240
6f086dfc
RS
241/* The FUNCTION_DECL for an inline function currently being expanded. */
242tree inline_function_decl;
243
244/* Number of function calls seen so far in current function. */
245
246int function_call_count;
247
248/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
249 (labels to which there can be nonlocal gotos from nested functions)
250 in this function. */
251
252tree nonlocal_labels;
253
ba716ac9
BS
254/* List (chain of EXPR_LIST) of stack slots that hold the current handlers
255 for nonlocal gotos. There is one for every nonlocal label in the function;
256 this list matches the one in nonlocal_labels.
6f086dfc
RS
257 Zero when function does not have nonlocal labels. */
258
ba716ac9 259rtx nonlocal_goto_handler_slots;
6f086dfc
RS
260
261/* RTX for stack slot that holds the stack pointer value to restore
262 for a nonlocal goto.
263 Zero when function does not have nonlocal labels. */
264
265rtx nonlocal_goto_stack_level;
266
267/* Label that will go on parm cleanup code, if any.
268 Jumping to this label runs cleanup code for parameters, if
269 such code must be run. Following this code is the logical return label. */
270
271rtx cleanup_label;
272
273/* Label that will go on function epilogue.
274 Jumping to this label serves as a "return" instruction
275 on machines which require execution of the epilogue on all returns. */
276
277rtx return_label;
278
279/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
280 So we can mark them all live at the end of the function, if nonopt. */
281rtx save_expr_regs;
282
283/* List (chain of EXPR_LISTs) of all stack slots in this function.
284 Made for the sake of unshare_all_rtl. */
285rtx stack_slot_list;
286
287/* Chain of all RTL_EXPRs that have insns in them. */
288tree rtl_expr_chain;
289
290/* Label to jump back to for tail recursion, or 0 if we have
291 not yet needed one for this function. */
292rtx tail_recursion_label;
293
294/* Place after which to insert the tail_recursion_label if we need one. */
295rtx tail_recursion_reentry;
296
297/* Location at which to save the argument pointer if it will need to be
298 referenced. There are two cases where this is done: if nonlocal gotos
299 exist, or if vars stored at an offset from the argument pointer will be
300 needed by inner routines. */
301
302rtx arg_pointer_save_area;
303
304/* Offset to end of allocated area of stack frame.
305 If stack grows down, this is the address of the last stack slot allocated.
306 If stack grows up, this is the address for the next slot. */
8af5168b 307HOST_WIDE_INT frame_offset;
6f086dfc
RS
308
309/* List (chain of TREE_LISTs) of static chains for containing functions.
310 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
311 in an RTL_EXPR in the TREE_VALUE. */
312static tree context_display;
313
314/* List (chain of TREE_LISTs) of trampolines for nested functions.
315 The trampoline sets up the static chain and jumps to the function.
316 We supply the trampoline's address when the function's address is requested.
317
318 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
319 in an RTL_EXPR in the TREE_VALUE. */
320static tree trampoline_list;
321
322/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
323static rtx parm_birth_insn;
324
325#if 0
326/* Nonzero if a stack slot has been generated whose address is not
327 actually valid. It means that the generated rtl must all be scanned
328 to detect and correct the invalid addresses where they occur. */
329static int invalid_stack_slot;
330#endif
331
332/* Last insn of those whose job was to put parms into their nominal homes. */
333static rtx last_parm_insn;
334
e9a25f70
JL
335/* 1 + last pseudo register number possibly used for loading a copy
336 of a parameter of this function. */
337int max_parm_reg;
6f086dfc
RS
338
339/* Vector indexed by REGNO, containing location on stack in which
340 to put the parm which is nominally in pseudo register REGNO,
e9a25f70
JL
341 if we discover that that parm must go in the stack. The highest
342 element in this vector is one less than MAX_PARM_REG, above. */
343rtx *parm_reg_stack_loc;
6f086dfc 344
6f086dfc
RS
345/* Nonzero once virtual register instantiation has been done.
346 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
347static int virtuals_instantiated;
348
46766466
RS
349/* These variables hold pointers to functions to
350 save and restore machine-specific data,
351 in push_function_context and pop_function_context. */
9e014ded
RK
352void (*save_machine_status) PROTO((struct function *));
353void (*restore_machine_status) PROTO((struct function *));
46766466 354
6f086dfc
RS
355/* Nonzero if we need to distinguish between the return value of this function
356 and the return value of a function called by this function. This helps
357 integrate.c */
358
359extern int rtx_equal_function_value_matters;
e7a84011 360extern tree sequence_rtl_expr;
6f086dfc
RS
361\f
362/* In order to evaluate some expressions, such as function calls returning
363 structures in memory, we need to temporarily allocate stack locations.
364 We record each allocated temporary in the following structure.
365
366 Associated with each temporary slot is a nesting level. When we pop up
367 one level, all temporaries associated with the previous level are freed.
368 Normally, all temporaries are freed after the execution of the statement
369 in which they were created. However, if we are inside a ({...}) grouping,
370 the result may be in a temporary and hence must be preserved. If the
371 result could be in a temporary, we preserve it if we can determine which
372 one it is in. If we cannot determine which temporary may contain the
373 result, all temporaries are preserved. A temporary is preserved by
374 pretending it was allocated at the previous nesting level.
375
376 Automatic variables are also assigned temporary slots, at the nesting
377 level where they are defined. They are marked a "kept" so that
378 free_temp_slots will not free them. */
379
380struct temp_slot
381{
382 /* Points to next temporary slot. */
383 struct temp_slot *next;
0f41302f 384 /* The rtx to used to reference the slot. */
6f086dfc 385 rtx slot;
e5e76139
RK
386 /* The rtx used to represent the address if not the address of the
387 slot above. May be an EXPR_LIST if multiple addresses exist. */
388 rtx address;
6f086dfc 389 /* The size, in units, of the slot. */
e5e809f4 390 HOST_WIDE_INT size;
e7a84011
RK
391 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
392 tree rtl_expr;
6f086dfc
RS
393 /* Non-zero if this temporary is currently in use. */
394 char in_use;
a25d4ba2
RK
395 /* Non-zero if this temporary has its address taken. */
396 char addr_taken;
6f086dfc
RS
397 /* Nesting level at which this slot is being used. */
398 int level;
399 /* Non-zero if this should survive a call to free_temp_slots. */
400 int keep;
fc91b0d0
RK
401 /* The offset of the slot from the frame_pointer, including extra space
402 for alignment. This info is for combine_temp_slots. */
e5e809f4 403 HOST_WIDE_INT base_offset;
fc91b0d0
RK
404 /* The size of the slot, including extra space for alignment. This
405 info is for combine_temp_slots. */
e5e809f4 406 HOST_WIDE_INT full_size;
6f086dfc
RS
407};
408
409/* List of all temporaries allocated, both available and in use. */
410
411struct temp_slot *temp_slots;
412
413/* Current nesting level for temporaries. */
414
415int temp_slot_level;
e5e809f4
JL
416
417/* Current nesting level for variables in a block. */
418
419int var_temp_slot_level;
f5963e61
JL
420
421/* When temporaries are created by TARGET_EXPRs, they are created at
422 this level of temp_slot_level, so that they can remain allocated
423 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
424 of TARGET_EXPRs. */
425int target_temp_slot_level;
6f086dfc 426\f
e15679f8
RK
427/* This structure is used to record MEMs or pseudos used to replace VAR, any
428 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
429 maintain this list in case two operands of an insn were required to match;
430 in that case we must ensure we use the same replacement. */
431
432struct fixup_replacement
433{
434 rtx old;
435 rtx new;
436 struct fixup_replacement *next;
437};
438
439/* Forward declarations. */
440
1ac4f799
JL
441static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
442 int, struct function *));
e15679f8
RK
443static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
444static void put_reg_into_stack PROTO((struct function *, rtx, tree,
0006e95b 445 enum machine_mode, enum machine_mode,
e5e809f4 446 int, int, int));
e15679f8
RK
447static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
448static struct fixup_replacement
449 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
450static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
451 rtx, int));
452static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
453 struct fixup_replacement **));
454static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
455static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
456static rtx fixup_stack_1 PROTO((rtx, rtx));
457static void optimize_bit_field PROTO((rtx, rtx, rtx *));
458static void instantiate_decls PROTO((tree, int));
459static void instantiate_decls_1 PROTO((tree, int));
460static void instantiate_decl PROTO((rtx, int, int));
461static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
462static void delete_handlers PROTO((void));
463static void pad_to_arg_alignment PROTO((struct args_size *, int));
51723711 464#ifndef ARGS_GROW_DOWNWARD
e15679f8
RK
465static void pad_below PROTO((struct args_size *, enum machine_mode,
466 tree));
51723711 467#endif
487a6e06 468#ifdef ARGS_GROW_DOWNWARD
e15679f8 469static tree round_down PROTO((tree, int));
487a6e06 470#endif
e15679f8
RK
471static rtx round_trampoline_addr PROTO((rtx));
472static tree blocks_nreverse PROTO((tree));
473static int all_blocks PROTO((tree, tree *));
081f5e7e 474#if defined (HAVE_prologue) || defined (HAVE_epilogue)
487a6e06 475static int *record_insns PROTO((rtx));
e15679f8 476static int contains PROTO((rtx, int *));
081f5e7e 477#endif /* HAVE_prologue || HAVE_epilogue */
e9a25f70 478static void put_addressof_into_stack PROTO((rtx));
f7b6d104 479static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
c20bf1f3 480\f
6f086dfc
RS
481/* Pointer to chain of `struct function' for containing functions. */
482struct function *outer_function_chain;
483
484/* Given a function decl for a containing function,
485 return the `struct function' for it. */
486
487struct function *
488find_function_data (decl)
489 tree decl;
490{
491 struct function *p;
e5e809f4 492
6f086dfc
RS
493 for (p = outer_function_chain; p; p = p->next)
494 if (p->decl == decl)
495 return p;
e5e809f4 496
6f086dfc
RS
497 abort ();
498}
499
500/* Save the current context for compilation of a nested function.
501 This is called from language-specific code.
502 The caller is responsible for saving any language-specific status,
6dc42e49 503 since this function knows only about language-independent variables. */
6f086dfc
RS
504
505void
a0dabda5
JM
506push_function_context_to (context)
507 tree context;
6f086dfc
RS
508{
509 struct function *p = (struct function *) xmalloc (sizeof (struct function));
510
511 p->next = outer_function_chain;
512 outer_function_chain = p;
513
514 p->name = current_function_name;
515 p->decl = current_function_decl;
516 p->pops_args = current_function_pops_args;
517 p->returns_struct = current_function_returns_struct;
518 p->returns_pcc_struct = current_function_returns_pcc_struct;
1651bdfe 519 p->returns_pointer = current_function_returns_pointer;
6f086dfc
RS
520 p->needs_context = current_function_needs_context;
521 p->calls_setjmp = current_function_calls_setjmp;
522 p->calls_longjmp = current_function_calls_longjmp;
523 p->calls_alloca = current_function_calls_alloca;
524 p->has_nonlocal_label = current_function_has_nonlocal_label;
8634413a 525 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
a0dabda5 526 p->contains_functions = current_function_contains_functions;
acd693d1 527 p->has_computed_jump = current_function_has_computed_jump;
173cd503 528 p->is_thunk = current_function_is_thunk;
6f086dfc
RS
529 p->args_size = current_function_args_size;
530 p->pretend_args_size = current_function_pretend_args_size;
531 p->arg_offset_rtx = current_function_arg_offset_rtx;
3b69d50e 532 p->varargs = current_function_varargs;
ebb904cb 533 p->stdarg = current_function_stdarg;
6f086dfc
RS
534 p->uses_const_pool = current_function_uses_const_pool;
535 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
536 p->internal_arg_pointer = current_function_internal_arg_pointer;
aeb302bb 537 p->cannot_inline = current_function_cannot_inline;
6f086dfc
RS
538 p->max_parm_reg = max_parm_reg;
539 p->parm_reg_stack_loc = parm_reg_stack_loc;
540 p->outgoing_args_size = current_function_outgoing_args_size;
541 p->return_rtx = current_function_return_rtx;
ba716ac9 542 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
6f086dfc
RS
543 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
544 p->nonlocal_labels = nonlocal_labels;
545 p->cleanup_label = cleanup_label;
546 p->return_label = return_label;
547 p->save_expr_regs = save_expr_regs;
548 p->stack_slot_list = stack_slot_list;
549 p->parm_birth_insn = parm_birth_insn;
550 p->frame_offset = frame_offset;
551 p->tail_recursion_label = tail_recursion_label;
552 p->tail_recursion_reentry = tail_recursion_reentry;
553 p->arg_pointer_save_area = arg_pointer_save_area;
554 p->rtl_expr_chain = rtl_expr_chain;
555 p->last_parm_insn = last_parm_insn;
556 p->context_display = context_display;
557 p->trampoline_list = trampoline_list;
558 p->function_call_count = function_call_count;
559 p->temp_slots = temp_slots;
560 p->temp_slot_level = temp_slot_level;
e5e809f4
JL
561 p->target_temp_slot_level = target_temp_slot_level;
562 p->var_temp_slot_level = var_temp_slot_level;
6f086dfc 563 p->fixup_var_refs_queue = 0;
f979c996 564 p->epilogue_delay_list = current_function_epilogue_delay_list;
01c1558a 565 p->args_info = current_function_args_info;
7d384cc0 566 p->check_memory_usage = current_function_check_memory_usage;
07417085 567 p->instrument_entry_exit = current_function_instrument_entry_exit;
6f086dfc 568
a0dabda5 569 save_tree_status (p, context);
6f086dfc
RS
570 save_storage_status (p);
571 save_emit_status (p);
6f086dfc
RS
572 save_expr_status (p);
573 save_stmt_status (p);
e9a25f70 574 save_varasm_status (p, context);
46766466
RS
575 if (save_machine_status)
576 (*save_machine_status) (p);
6f086dfc
RS
577}
578
e4a4639e
JM
579void
580push_function_context ()
581{
a0dabda5 582 push_function_context_to (current_function_decl);
e4a4639e
JM
583}
584
6f086dfc
RS
585/* Restore the last saved context, at the end of a nested function.
586 This function is called from language-specific code. */
587
588void
a0dabda5
JM
589pop_function_context_from (context)
590 tree context;
6f086dfc
RS
591{
592 struct function *p = outer_function_chain;
e5e809f4 593 struct var_refs_queue *queue;
6f086dfc
RS
594
595 outer_function_chain = p->next;
596
49468af2
RK
597 current_function_contains_functions
598 = p->contains_functions || p->inline_obstacks
599 || context == current_function_decl;
acd693d1 600 current_function_has_computed_jump = p->has_computed_jump;
6f086dfc
RS
601 current_function_name = p->name;
602 current_function_decl = p->decl;
603 current_function_pops_args = p->pops_args;
604 current_function_returns_struct = p->returns_struct;
605 current_function_returns_pcc_struct = p->returns_pcc_struct;
1651bdfe 606 current_function_returns_pointer = p->returns_pointer;
6f086dfc
RS
607 current_function_needs_context = p->needs_context;
608 current_function_calls_setjmp = p->calls_setjmp;
609 current_function_calls_longjmp = p->calls_longjmp;
610 current_function_calls_alloca = p->calls_alloca;
611 current_function_has_nonlocal_label = p->has_nonlocal_label;
8634413a 612 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
173cd503 613 current_function_is_thunk = p->is_thunk;
6f086dfc
RS
614 current_function_args_size = p->args_size;
615 current_function_pretend_args_size = p->pretend_args_size;
616 current_function_arg_offset_rtx = p->arg_offset_rtx;
3b69d50e 617 current_function_varargs = p->varargs;
ebb904cb 618 current_function_stdarg = p->stdarg;
6f086dfc
RS
619 current_function_uses_const_pool = p->uses_const_pool;
620 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
621 current_function_internal_arg_pointer = p->internal_arg_pointer;
aeb302bb 622 current_function_cannot_inline = p->cannot_inline;
6f086dfc
RS
623 max_parm_reg = p->max_parm_reg;
624 parm_reg_stack_loc = p->parm_reg_stack_loc;
625 current_function_outgoing_args_size = p->outgoing_args_size;
626 current_function_return_rtx = p->return_rtx;
ba716ac9 627 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
6f086dfc
RS
628 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
629 nonlocal_labels = p->nonlocal_labels;
630 cleanup_label = p->cleanup_label;
631 return_label = p->return_label;
632 save_expr_regs = p->save_expr_regs;
633 stack_slot_list = p->stack_slot_list;
634 parm_birth_insn = p->parm_birth_insn;
635 frame_offset = p->frame_offset;
636 tail_recursion_label = p->tail_recursion_label;
637 tail_recursion_reentry = p->tail_recursion_reentry;
638 arg_pointer_save_area = p->arg_pointer_save_area;
639 rtl_expr_chain = p->rtl_expr_chain;
640 last_parm_insn = p->last_parm_insn;
641 context_display = p->context_display;
642 trampoline_list = p->trampoline_list;
643 function_call_count = p->function_call_count;
644 temp_slots = p->temp_slots;
645 temp_slot_level = p->temp_slot_level;
e5e809f4
JL
646 target_temp_slot_level = p->target_temp_slot_level;
647 var_temp_slot_level = p->var_temp_slot_level;
f979c996 648 current_function_epilogue_delay_list = p->epilogue_delay_list;
7cbc7b0c 649 reg_renumber = 0;
01c1558a 650 current_function_args_info = p->args_info;
7d384cc0 651 current_function_check_memory_usage = p->check_memory_usage;
07417085 652 current_function_instrument_entry_exit = p->instrument_entry_exit;
6f086dfc 653
d1485032 654 restore_tree_status (p, context);
6f086dfc
RS
655 restore_storage_status (p);
656 restore_expr_status (p);
657 restore_emit_status (p);
658 restore_stmt_status (p);
a506307a 659 restore_varasm_status (p);
6f086dfc 660
46766466
RS
661 if (restore_machine_status)
662 (*restore_machine_status) (p);
663
6f086dfc
RS
664 /* Finish doing put_var_into_stack for any of our variables
665 which became addressable during the nested function. */
e5e809f4
JL
666 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
667 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
6f086dfc
RS
668
669 free (p);
670
671 /* Reset variables that have known state during rtx generation. */
672 rtx_equal_function_value_matters = 1;
673 virtuals_instantiated = 0;
674}
e4a4639e
JM
675
676void pop_function_context ()
677{
a0dabda5 678 pop_function_context_from (current_function_decl);
e4a4639e 679}
6f086dfc
RS
680\f
681/* Allocate fixed slots in the stack frame of the current function. */
682
683/* Return size needed for stack frame based on slots so far allocated.
c795bca9 684 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
685 the caller may have to do that. */
686
8af5168b 687HOST_WIDE_INT
6f086dfc
RS
688get_frame_size ()
689{
690#ifdef FRAME_GROWS_DOWNWARD
691 return -frame_offset;
692#else
693 return frame_offset;
694#endif
695}
696
697/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
698 with machine mode MODE.
699
700 ALIGN controls the amount of alignment for the address of the slot:
701 0 means according to MODE,
702 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
703 positive specifies alignment boundary in bits.
704
705 We do not round to stack_boundary here. */
706
707rtx
708assign_stack_local (mode, size, align)
709 enum machine_mode mode;
e5e809f4 710 HOST_WIDE_INT size;
6f086dfc
RS
711 int align;
712{
713 register rtx x, addr;
714 int bigend_correction = 0;
715 int alignment;
716
717 if (align == 0)
718 {
719 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
720 if (mode == BLKmode)
721 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
722 }
723 else if (align == -1)
724 {
725 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
726 size = CEIL_ROUND (size, alignment);
727 }
728 else
729 alignment = align / BITS_PER_UNIT;
730
6f086dfc
RS
731 /* Round frame offset to that alignment.
732 We must be careful here, since FRAME_OFFSET might be negative and
733 division with a negative dividend isn't as well defined as we might
734 like. So we instead assume that ALIGNMENT is a power of two and
735 use logical operations which are unambiguous. */
736#ifdef FRAME_GROWS_DOWNWARD
737 frame_offset = FLOOR_ROUND (frame_offset, alignment);
738#else
739 frame_offset = CEIL_ROUND (frame_offset, alignment);
740#endif
741
742 /* On a big-endian machine, if we are allocating more space than we will use,
743 use the least significant bytes of those that are allocated. */
f76b9db2 744 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 745 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
746
747#ifdef FRAME_GROWS_DOWNWARD
748 frame_offset -= size;
749#endif
750
751 /* If we have already instantiated virtual registers, return the actual
752 address relative to the frame pointer. */
753 if (virtuals_instantiated)
754 addr = plus_constant (frame_pointer_rtx,
755 (frame_offset + bigend_correction
756 + STARTING_FRAME_OFFSET));
757 else
758 addr = plus_constant (virtual_stack_vars_rtx,
759 frame_offset + bigend_correction);
760
761#ifndef FRAME_GROWS_DOWNWARD
762 frame_offset += size;
763#endif
764
38a448ca 765 x = gen_rtx_MEM (mode, addr);
6f086dfc 766
38a448ca 767 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
6f086dfc
RS
768
769 return x;
770}
771
772/* Assign a stack slot in a containing function.
773 First three arguments are same as in preceding function.
774 The last argument specifies the function to allocate in. */
775
1ac4f799 776static rtx
6f086dfc
RS
777assign_outer_stack_local (mode, size, align, function)
778 enum machine_mode mode;
e5e809f4 779 HOST_WIDE_INT size;
6f086dfc
RS
780 int align;
781 struct function *function;
782{
783 register rtx x, addr;
784 int bigend_correction = 0;
785 int alignment;
786
787 /* Allocate in the memory associated with the function in whose frame
788 we are assigning. */
789 push_obstacks (function->function_obstack,
790 function->function_maybepermanent_obstack);
791
792 if (align == 0)
793 {
794 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
795 if (mode == BLKmode)
796 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
797 }
798 else if (align == -1)
799 {
800 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
801 size = CEIL_ROUND (size, alignment);
802 }
803 else
804 alignment = align / BITS_PER_UNIT;
805
6f086dfc
RS
806 /* Round frame offset to that alignment. */
807#ifdef FRAME_GROWS_DOWNWARD
2af69b62 808 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
6f086dfc 809#else
2af69b62 810 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
6f086dfc
RS
811#endif
812
813 /* On a big-endian machine, if we are allocating more space than we will use,
814 use the least significant bytes of those that are allocated. */
f76b9db2 815 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 816 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
817
818#ifdef FRAME_GROWS_DOWNWARD
819 function->frame_offset -= size;
820#endif
821 addr = plus_constant (virtual_stack_vars_rtx,
822 function->frame_offset + bigend_correction);
823#ifndef FRAME_GROWS_DOWNWARD
824 function->frame_offset += size;
825#endif
826
38a448ca 827 x = gen_rtx_MEM (mode, addr);
6f086dfc
RS
828
829 function->stack_slot_list
38a448ca 830 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
6f086dfc
RS
831
832 pop_obstacks ();
833
834 return x;
835}
836\f
837/* Allocate a temporary stack slot and record it for possible later
838 reuse.
839
840 MODE is the machine mode to be given to the returned rtx.
841
842 SIZE is the size in units of the space required. We do no rounding here
843 since assign_stack_local will do any required rounding.
844
d93d4205
MS
845 KEEP is 1 if this slot is to be retained after a call to
846 free_temp_slots. Automatic variables for a block are allocated
e5e809f4
JL
847 with this flag. KEEP is 2 if we allocate a longer term temporary,
848 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
849 if we are to allocate something at an inner level to be treated as
850 a variable in the block (e.g., a SAVE_EXPR). */
6f086dfc
RS
851
852rtx
853assign_stack_temp (mode, size, keep)
854 enum machine_mode mode;
e5e809f4 855 HOST_WIDE_INT size;
6f086dfc
RS
856 int keep;
857{
858 struct temp_slot *p, *best_p = 0;
859
303ec2aa
RK
860 /* If SIZE is -1 it means that somebody tried to allocate a temporary
861 of a variable size. */
862 if (size == -1)
863 abort ();
864
6f086dfc
RS
865 /* First try to find an available, already-allocated temporary that is the
866 exact size we require. */
867 for (p = temp_slots; p; p = p->next)
868 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
869 break;
870
871 /* If we didn't find, one, try one that is larger than what we want. We
872 find the smallest such. */
873 if (p == 0)
874 for (p = temp_slots; p; p = p->next)
875 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
876 && (best_p == 0 || best_p->size > p->size))
877 best_p = p;
878
879 /* Make our best, if any, the one to use. */
880 if (best_p)
a45035b6
JW
881 {
882 /* If there are enough aligned bytes left over, make them into a new
883 temp_slot so that the extra bytes don't get wasted. Do this only
884 for BLKmode slots, so that we can be sure of the alignment. */
885 if (GET_MODE (best_p->slot) == BLKmode)
886 {
887 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
e5e809f4 888 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
889
890 if (best_p->size - rounded_size >= alignment)
891 {
892 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
a25d4ba2 893 p->in_use = p->addr_taken = 0;
a45035b6 894 p->size = best_p->size - rounded_size;
307d8cd6
RK
895 p->base_offset = best_p->base_offset + rounded_size;
896 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
897 p->slot = gen_rtx_MEM (BLKmode,
898 plus_constant (XEXP (best_p->slot, 0),
899 rounded_size));
e5e76139 900 p->address = 0;
84e24c03 901 p->rtl_expr = 0;
a45035b6
JW
902 p->next = temp_slots;
903 temp_slots = p;
904
38a448ca
RH
905 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
906 stack_slot_list);
a45035b6
JW
907
908 best_p->size = rounded_size;
291dde90 909 best_p->full_size = rounded_size;
a45035b6
JW
910 }
911 }
912
913 p = best_p;
914 }
915
6f086dfc
RS
916 /* If we still didn't find one, make a new temporary. */
917 if (p == 0)
918 {
e5e809f4
JL
919 HOST_WIDE_INT frame_offset_old = frame_offset;
920
6f086dfc 921 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
e5e809f4 922
6f086dfc
RS
923 /* If the temp slot mode doesn't indicate the alignment,
924 use the largest possible, so no one will be disappointed. */
e5e76139 925 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
e5e809f4 926
b2a80c0d
DE
927 /* The following slot size computation is necessary because we don't
928 know the actual size of the temporary slot until assign_stack_local
929 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
930 requested temporary. Note that extra space added for alignment
931 can be either above or below this stack slot depending on which
932 way the frame grows. We include the extra space if and only if it
933 is above this slot. */
b2a80c0d
DE
934#ifdef FRAME_GROWS_DOWNWARD
935 p->size = frame_offset_old - frame_offset;
936#else
fc91b0d0
RK
937 p->size = size;
938#endif
e5e809f4 939
fc91b0d0
RK
940 /* Now define the fields used by combine_temp_slots. */
941#ifdef FRAME_GROWS_DOWNWARD
942 p->base_offset = frame_offset;
943 p->full_size = frame_offset_old - frame_offset;
944#else
945 p->base_offset = frame_offset_old;
946 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 947#endif
e5e76139 948 p->address = 0;
6f086dfc
RS
949 p->next = temp_slots;
950 temp_slots = p;
951 }
952
953 p->in_use = 1;
a25d4ba2 954 p->addr_taken = 0;
e7a84011 955 p->rtl_expr = sequence_rtl_expr;
a25d4ba2 956
d93d4205
MS
957 if (keep == 2)
958 {
959 p->level = target_temp_slot_level;
960 p->keep = 0;
961 }
e5e809f4
JL
962 else if (keep == 3)
963 {
964 p->level = var_temp_slot_level;
965 p->keep = 0;
966 }
d93d4205
MS
967 else
968 {
969 p->level = temp_slot_level;
970 p->keep = keep;
971 }
1995f267
RK
972
973 /* We may be reusing an old slot, so clear any MEM flags that may have been
974 set from before. */
975 RTX_UNCHANGING_P (p->slot) = 0;
976 MEM_IN_STRUCT_P (p->slot) = 0;
c6df88cb
MM
977 MEM_SCALAR_P (p->slot) = 0;
978 MEM_ALIAS_SET (p->slot) = 0;
6f086dfc
RS
979 return p->slot;
980}
638141a6 981\f
230f21b4
PB
982/* Assign a temporary of given TYPE.
983 KEEP is as for assign_stack_temp.
984 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
985 it is 0 if a register is OK.
986 DONT_PROMOTE is 1 if we should not promote values in register
987 to wider modes. */
230f21b4
PB
988
989rtx
b55d9ff8 990assign_temp (type, keep, memory_required, dont_promote)
230f21b4
PB
991 tree type;
992 int keep;
993 int memory_required;
b55d9ff8 994 int dont_promote;
230f21b4
PB
995{
996 enum machine_mode mode = TYPE_MODE (type);
638141a6
RK
997 int unsignedp = TREE_UNSIGNED (type);
998
230f21b4
PB
999 if (mode == BLKmode || memory_required)
1000 {
e5e809f4 1001 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
1002 rtx tmp;
1003
1004 /* Unfortunately, we don't yet know how to allocate variable-sized
1005 temporaries. However, sometimes we have a fixed upper limit on
1006 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 1007 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
1008 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1009 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1010 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1011 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1012
1013 tmp = assign_stack_temp (mode, size, keep);
c6df88cb 1014 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
230f21b4
PB
1015 return tmp;
1016 }
638141a6 1017
230f21b4 1018#ifndef PROMOTE_FOR_CALL_ONLY
b55d9ff8
RK
1019 if (! dont_promote)
1020 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 1021#endif
638141a6 1022
230f21b4
PB
1023 return gen_reg_rtx (mode);
1024}
638141a6 1025\f
a45035b6
JW
1026/* Combine temporary stack slots which are adjacent on the stack.
1027
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1031
1032void
1033combine_temp_slots ()
1034{
1035 struct temp_slot *p, *q;
1036 struct temp_slot *prev_p, *prev_q;
e5e809f4
JL
1037 int num_slots;
1038
1039 /* If there are a lot of temp slots, don't do anything unless
1040 high levels of optimizaton. */
1041 if (! flag_expensive_optimizations)
1042 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1043 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1044 return;
a45035b6 1045
e9b7093a
RS
1046 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1047 {
1048 int delete_p = 0;
e5e809f4 1049
e9b7093a
RS
1050 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1051 for (q = p->next, prev_q = p; q; q = prev_q->next)
a45035b6 1052 {
e9b7093a
RS
1053 int delete_q = 0;
1054 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
a45035b6 1055 {
fc91b0d0 1056 if (p->base_offset + p->full_size == q->base_offset)
e9b7093a
RS
1057 {
1058 /* Q comes after P; combine Q into P. */
1059 p->size += q->size;
307d8cd6 1060 p->full_size += q->full_size;
e9b7093a
RS
1061 delete_q = 1;
1062 }
fc91b0d0 1063 else if (q->base_offset + q->full_size == p->base_offset)
e9b7093a
RS
1064 {
1065 /* P comes after Q; combine P into Q. */
1066 q->size += p->size;
307d8cd6 1067 q->full_size += p->full_size;
e9b7093a
RS
1068 delete_p = 1;
1069 break;
1070 }
a45035b6 1071 }
e9b7093a
RS
1072 /* Either delete Q or advance past it. */
1073 if (delete_q)
1074 prev_q->next = q->next;
1075 else
1076 prev_q = q;
a45035b6 1077 }
e9b7093a
RS
1078 /* Either delete P or advance past it. */
1079 if (delete_p)
1080 {
1081 if (prev_p)
1082 prev_p->next = p->next;
1083 else
1084 temp_slots = p->next;
1085 }
1086 else
1087 prev_p = p;
1088 }
a45035b6 1089}
6f086dfc 1090\f
e5e76139
RK
1091/* Find the temp slot corresponding to the object at address X. */
1092
1093static struct temp_slot *
1094find_temp_slot_from_address (x)
1095 rtx x;
1096{
1097 struct temp_slot *p;
1098 rtx next;
1099
1100 for (p = temp_slots; p; p = p->next)
1101 {
1102 if (! p->in_use)
1103 continue;
e5e809f4 1104
e5e76139 1105 else if (XEXP (p->slot, 0) == x
abb52246
RK
1106 || p->address == x
1107 || (GET_CODE (x) == PLUS
1108 && XEXP (x, 0) == virtual_stack_vars_rtx
1109 && GET_CODE (XEXP (x, 1)) == CONST_INT
1110 && INTVAL (XEXP (x, 1)) >= p->base_offset
1111 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
e5e76139
RK
1112 return p;
1113
1114 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1115 for (next = p->address; next; next = XEXP (next, 1))
1116 if (XEXP (next, 0) == x)
1117 return p;
1118 }
1119
1120 return 0;
1121}
1122
9faa82d8 1123/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 1124 that previously was known by OLD. */
e5e76139
RK
1125
1126void
1127update_temp_slot_address (old, new)
1128 rtx old, new;
1129{
1130 struct temp_slot *p = find_temp_slot_from_address (old);
1131
1132 /* If none, return. Else add NEW as an alias. */
1133 if (p == 0)
1134 return;
1135 else if (p->address == 0)
1136 p->address = new;
1137 else
1138 {
1139 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1140 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1141
38a448ca 1142 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1143 }
1144}
1145
a25d4ba2 1146/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1147 address was taken. */
a25d4ba2
RK
1148
1149void
1150mark_temp_addr_taken (x)
1151 rtx x;
1152{
1153 struct temp_slot *p;
1154
1155 if (x == 0)
1156 return;
1157
1158 /* If X is not in memory or is at a constant address, it cannot be in
1159 a temporary slot. */
1160 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1161 return;
1162
1163 p = find_temp_slot_from_address (XEXP (x, 0));
1164 if (p != 0)
1165 p->addr_taken = 1;
1166}
1167
9cca6a99
MS
1168/* If X could be a reference to a temporary slot, mark that slot as
1169 belonging to the to one level higher than the current level. If X
1170 matched one of our slots, just mark that one. Otherwise, we can't
1171 easily predict which it is, so upgrade all of them. Kept slots
1172 need not be touched.
6f086dfc
RS
1173
1174 This is called when an ({...}) construct occurs and a statement
1175 returns a value in memory. */
1176
1177void
1178preserve_temp_slots (x)
1179 rtx x;
1180{
a25d4ba2 1181 struct temp_slot *p = 0;
6f086dfc 1182
73620b82
RK
1183 /* If there is no result, we still might have some objects whose address
1184 were taken, so we need to make sure they stay around. */
e3a77161 1185 if (x == 0)
73620b82
RK
1186 {
1187 for (p = temp_slots; p; p = p->next)
1188 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1189 p->level--;
1190
1191 return;
1192 }
e3a77161
RK
1193
1194 /* If X is a register that is being used as a pointer, see if we have
1195 a temporary slot we know it points to. To be consistent with
1196 the code below, we really should preserve all non-kept slots
1197 if we can't find a match, but that seems to be much too costly. */
a25d4ba2
RK
1198 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1199 p = find_temp_slot_from_address (x);
1200
6f086dfc 1201 /* If X is not in memory or is at a constant address, it cannot be in
e19571db
RK
1202 a temporary slot, but it can contain something whose address was
1203 taken. */
a25d4ba2 1204 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
e19571db
RK
1205 {
1206 for (p = temp_slots; p; p = p->next)
1207 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1208 p->level--;
1209
1210 return;
1211 }
6f086dfc
RS
1212
1213 /* First see if we can find a match. */
73620b82 1214 if (p == 0)
a25d4ba2
RK
1215 p = find_temp_slot_from_address (XEXP (x, 0));
1216
e5e76139
RK
1217 if (p != 0)
1218 {
a25d4ba2
RK
1219 /* Move everything at our level whose address was taken to our new
1220 level in case we used its address. */
1221 struct temp_slot *q;
1222
9cca6a99
MS
1223 if (p->level == temp_slot_level)
1224 {
1225 for (q = temp_slots; q; q = q->next)
1226 if (q != p && q->addr_taken && q->level == p->level)
1227 q->level--;
a25d4ba2 1228
9cca6a99
MS
1229 p->level--;
1230 p->addr_taken = 0;
1231 }
e5e76139
RK
1232 return;
1233 }
6f086dfc
RS
1234
1235 /* Otherwise, preserve all non-kept slots at this level. */
1236 for (p = temp_slots; p; p = p->next)
1237 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1238 p->level--;
1239}
1240
422c8f63
RK
1241/* X is the result of an RTL_EXPR. If it is a temporary slot associated
1242 with that RTL_EXPR, promote it into a temporary slot at the present
1243 level so it will not be freed when we free slots made in the
1244 RTL_EXPR. */
1245
1246void
1247preserve_rtl_expr_result (x)
1248 rtx x;
1249{
1250 struct temp_slot *p;
1251
1252 /* If X is not in memory or is at a constant address, it cannot be in
1253 a temporary slot. */
1254 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1255 return;
1256
199b61d8
RK
1257 /* If we can find a match, move it to our level unless it is already at
1258 an upper level. */
1259 p = find_temp_slot_from_address (XEXP (x, 0));
1260 if (p != 0)
1261 {
1262 p->level = MIN (p->level, temp_slot_level);
1263 p->rtl_expr = 0;
1264 }
422c8f63
RK
1265
1266 return;
1267}
1268
6f086dfc 1269/* Free all temporaries used so far. This is normally called at the end
e7a84011
RK
1270 of generating code for a statement. Don't free any temporaries
1271 currently in use for an RTL_EXPR that hasn't yet been emitted.
1272 We could eventually do better than this since it can be reused while
1273 generating the same RTL_EXPR, but this is complex and probably not
1274 worthwhile. */
6f086dfc
RS
1275
1276void
1277free_temp_slots ()
1278{
1279 struct temp_slot *p;
1280
1281 for (p = temp_slots; p; p = p->next)
e7a84011
RK
1282 if (p->in_use && p->level == temp_slot_level && ! p->keep
1283 && p->rtl_expr == 0)
1284 p->in_use = 0;
1285
1286 combine_temp_slots ();
1287}
1288
1289/* Free all temporary slots used in T, an RTL_EXPR node. */
1290
1291void
1292free_temps_for_rtl_expr (t)
1293 tree t;
1294{
1295 struct temp_slot *p;
1296
1297 for (p = temp_slots; p; p = p->next)
1298 if (p->rtl_expr == t)
6f086dfc 1299 p->in_use = 0;
a45035b6
JW
1300
1301 combine_temp_slots ();
6f086dfc
RS
1302}
1303
956d6950 1304/* Mark all temporaries ever allocated in this function as not suitable
a94e4054
RK
1305 for reuse until the current level is exited. */
1306
1307void
1308mark_all_temps_used ()
1309{
1310 struct temp_slot *p;
1311
1312 for (p = temp_slots; p; p = p->next)
1313 {
85b119d1 1314 p->in_use = p->keep = 1;
27ce006b 1315 p->level = MIN (p->level, temp_slot_level);
a94e4054
RK
1316 }
1317}
1318
6f086dfc
RS
1319/* Push deeper into the nesting level for stack temporaries. */
1320
1321void
1322push_temp_slots ()
1323{
6f086dfc
RS
1324 temp_slot_level++;
1325}
1326
e5e809f4
JL
1327/* Likewise, but save the new level as the place to allocate variables
1328 for blocks. */
1329
1330void
1331push_temp_slots_for_block ()
1332{
1333 push_temp_slots ();
1334
1335 var_temp_slot_level = temp_slot_level;
1336}
1337
f5963e61
JL
1338/* Likewise, but save the new level as the place to allocate temporaries
1339 for TARGET_EXPRs. */
1340
1341void
1342push_temp_slots_for_target ()
1343{
1344 push_temp_slots ();
1345
1346 target_temp_slot_level = temp_slot_level;
1347}
1348
1349/* Set and get the value of target_temp_slot_level. The only
1350 permitted use of these functions is to save and restore this value. */
1351
1352int
1353get_target_temp_slot_level ()
1354{
1355 return target_temp_slot_level;
1356}
1357
1358void
1359set_target_temp_slot_level (level)
1360 int level;
1361{
1362 target_temp_slot_level = level;
1363}
1364
6f086dfc
RS
1365/* Pop a temporary nesting level. All slots in use in the current level
1366 are freed. */
1367
1368void
1369pop_temp_slots ()
1370{
1371 struct temp_slot *p;
1372
6f086dfc 1373 for (p = temp_slots; p; p = p->next)
e7a84011 1374 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
6f086dfc
RS
1375 p->in_use = 0;
1376
a45035b6
JW
1377 combine_temp_slots ();
1378
6f086dfc
RS
1379 temp_slot_level--;
1380}
bc0ebdf9
RK
1381
1382/* Initialize temporary slots. */
1383
1384void
1385init_temp_slots ()
1386{
1387 /* We have not allocated any temporaries yet. */
1388 temp_slots = 0;
1389 temp_slot_level = 0;
e5e809f4 1390 var_temp_slot_level = 0;
bc0ebdf9
RK
1391 target_temp_slot_level = 0;
1392}
6f086dfc
RS
1393\f
1394/* Retroactively move an auto variable from a register to a stack slot.
1395 This is done when an address-reference to the variable is seen. */
1396
1397void
1398put_var_into_stack (decl)
1399 tree decl;
1400{
1401 register rtx reg;
00d8a4c1 1402 enum machine_mode promoted_mode, decl_mode;
6f086dfc 1403 struct function *function = 0;
c20bf1f3 1404 tree context;
e9a25f70 1405 int can_use_addressof;
c20bf1f3 1406
c20bf1f3 1407 context = decl_function_context (decl);
6f086dfc 1408
9ec36da5 1409 /* Get the current rtl used for this object and its original mode. */
6f086dfc 1410 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
1411
1412 /* No need to do anything if decl has no rtx yet
1413 since in that case caller is setting TREE_ADDRESSABLE
1414 and a stack slot will be assigned when the rtl is made. */
1415 if (reg == 0)
1416 return;
00d8a4c1
RK
1417
1418 /* Get the declared mode for this object. */
1419 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1420 : DECL_MODE (decl));
2baccce2
RS
1421 /* Get the mode it's actually stored in. */
1422 promoted_mode = GET_MODE (reg);
6f086dfc
RS
1423
1424 /* If this variable comes from an outer function,
1425 find that function's saved context. */
4ac74fb8 1426 if (context != current_function_decl && context != inline_function_decl)
6f086dfc
RS
1427 for (function = outer_function_chain; function; function = function->next)
1428 if (function->decl == context)
1429 break;
1430
6f086dfc
RS
1431 /* If this is a variable-size object with a pseudo to address it,
1432 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 1433 if (DECL_NONLOCAL (decl)
6f086dfc
RS
1434 && GET_CODE (reg) == MEM
1435 && GET_CODE (XEXP (reg, 0)) == REG
1436 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
1437 {
1438 reg = XEXP (reg, 0);
1439 decl_mode = promoted_mode = GET_MODE (reg);
1440 }
e15762df 1441
e9a25f70
JL
1442 can_use_addressof
1443 = (function == 0
e5e809f4 1444 && optimize > 0
e9a25f70
JL
1445 /* FIXME make it work for promoted modes too */
1446 && decl_mode == promoted_mode
1447#ifdef NON_SAVING_SETJMP
1448 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1449#endif
1450 );
1451
1452 /* If we can't use ADDRESSOF, make sure we see through one we already
1453 generated. */
1454 if (! can_use_addressof && GET_CODE (reg) == MEM
1455 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1456 reg = XEXP (XEXP (reg, 0), 0);
1457
293e3de4
RS
1458 /* Now we should have a value that resides in one or more pseudo regs. */
1459
1460 if (GET_CODE (reg) == REG)
e9a25f70
JL
1461 {
1462 /* If this variable lives in the current function and we don't need
1463 to put things in the stack for the sake of setjmp, try to keep it
1464 in a register until we know we actually need the address. */
1465 if (can_use_addressof)
1466 gen_mem_addressof (reg, decl);
1467 else
1468 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1469 promoted_mode, decl_mode,
e5e809f4
JL
1470 TREE_SIDE_EFFECTS (decl), 0,
1471 TREE_USED (decl)
1472 || DECL_INITIAL (decl) != 0);
e9a25f70 1473 }
293e3de4
RS
1474 else if (GET_CODE (reg) == CONCAT)
1475 {
1476 /* A CONCAT contains two pseudos; put them both in the stack.
1477 We do it so they end up consecutive. */
1478 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1479 tree part_type = TREE_TYPE (TREE_TYPE (decl));
4738c10d 1480#ifdef FRAME_GROWS_DOWNWARD
293e3de4 1481 /* Since part 0 should have a lower address, do it second. */
0006e95b 1482 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1483 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1484 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1485 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1486 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1487 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4 1488#else
0006e95b 1489 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1490 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1491 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1492 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1493 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1494 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4
RS
1495#endif
1496
1497 /* Change the CONCAT into a combined MEM for both parts. */
1498 PUT_CODE (reg, MEM);
0006e95b 1499 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
41472af8 1500 MEM_ALIAS_SET (reg) = get_alias_set (decl);
0006e95b 1501
293e3de4
RS
1502 /* The two parts are in memory order already.
1503 Use the lower parts address as ours. */
1504 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1505 /* Prevent sharing of rtl that might lose. */
1506 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1507 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1508 }
86fa911a
RK
1509 else
1510 return;
1511
7d384cc0 1512 if (current_function_check_memory_usage)
86fa911a
RK
1513 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1514 XEXP (reg, 0), ptr_mode,
1515 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1516 TYPE_MODE (sizetype),
956d6950
JL
1517 GEN_INT (MEMORY_USE_RW),
1518 TYPE_MODE (integer_type_node));
293e3de4
RS
1519}
1520
1521/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1522 into the stack frame of FUNCTION (0 means the current function).
1523 DECL_MODE is the machine mode of the user-level data type.
0006e95b 1524 PROMOTED_MODE is the machine mode of the register.
e5e809f4
JL
1525 VOLATILE_P is nonzero if this is for a "volatile" decl.
1526 USED_P is nonzero if this reg might have already been used in an insn. */
293e3de4
RS
1527
1528static void
e9a25f70 1529put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
e5e809f4 1530 original_regno, used_p)
293e3de4
RS
1531 struct function *function;
1532 rtx reg;
1533 tree type;
1534 enum machine_mode promoted_mode, decl_mode;
0006e95b 1535 int volatile_p;
e9a25f70 1536 int original_regno;
e5e809f4 1537 int used_p;
293e3de4
RS
1538{
1539 rtx new = 0;
e9a25f70
JL
1540 int regno = original_regno;
1541
1542 if (regno == 0)
1543 regno = REGNO (reg);
6f086dfc
RS
1544
1545 if (function)
1546 {
e9a25f70
JL
1547 if (regno < function->max_parm_reg)
1548 new = function->parm_reg_stack_loc[regno];
6f086dfc 1549 if (new == 0)
e15762df 1550 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
6f086dfc
RS
1551 0, function);
1552 }
1553 else
1554 {
e9a25f70
JL
1555 if (regno < max_parm_reg)
1556 new = parm_reg_stack_loc[regno];
6f086dfc 1557 if (new == 0)
e15762df 1558 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
1559 }
1560
0006e95b 1561 PUT_MODE (reg, decl_mode);
6f086dfc
RS
1562 XEXP (reg, 0) = XEXP (new, 0);
1563 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
0006e95b 1564 MEM_VOLATILE_P (reg) = volatile_p;
6f086dfc
RS
1565 PUT_CODE (reg, MEM);
1566
1567 /* If this is a memory ref that contains aggregate components,
bdd3e6ab
JW
1568 mark it as such for cse and loop optimize. If we are reusing a
1569 previously generated stack slot, then we need to copy the bit in
1570 case it was set for other reasons. For instance, it is set for
1571 __builtin_va_alist. */
c6df88cb
MM
1572 MEM_SET_IN_STRUCT_P (reg,
1573 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
41472af8 1574 MEM_ALIAS_SET (reg) = get_alias_set (type);
6f086dfc
RS
1575
1576 /* Now make sure that all refs to the variable, previously made
1577 when it was a register, are fixed up to be valid again. */
e5e809f4
JL
1578
1579 if (used_p && function != 0)
6f086dfc
RS
1580 {
1581 struct var_refs_queue *temp;
1582
1583 /* Variable is inherited; fix it up when we get back to its function. */
1584 push_obstacks (function->function_obstack,
1585 function->function_maybepermanent_obstack);
4da73fa0
RK
1586
1587 /* See comment in restore_tree_status in tree.c for why this needs to be
1588 on saveable obstack. */
6f086dfc 1589 temp
4da73fa0 1590 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
6f086dfc 1591 temp->modified = reg;
00d8a4c1 1592 temp->promoted_mode = promoted_mode;
293e3de4 1593 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
1594 temp->next = function->fixup_var_refs_queue;
1595 function->fixup_var_refs_queue = temp;
1596 pop_obstacks ();
1597 }
e5e809f4 1598 else if (used_p)
6f086dfc 1599 /* Variable is local; fix it up now. */
293e3de4 1600 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
6f086dfc
RS
1601}
1602\f
1603static void
00d8a4c1 1604fixup_var_refs (var, promoted_mode, unsignedp)
6f086dfc 1605 rtx var;
00d8a4c1
RK
1606 enum machine_mode promoted_mode;
1607 int unsignedp;
6f086dfc
RS
1608{
1609 tree pending;
1610 rtx first_insn = get_insns ();
1611 struct sequence_stack *stack = sequence_stack;
1612 tree rtl_exps = rtl_expr_chain;
1613
1614 /* Must scan all insns for stack-refs that exceed the limit. */
00d8a4c1 1615 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
6f086dfc
RS
1616
1617 /* Scan all pending sequences too. */
1618 for (; stack; stack = stack->next)
1619 {
1620 push_to_sequence (stack->first);
00d8a4c1
RK
1621 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1622 stack->first, stack->next != 0);
6f086dfc
RS
1623 /* Update remembered end of sequence
1624 in case we added an insn at the end. */
1625 stack->last = get_last_insn ();
1626 end_sequence ();
1627 }
1628
1629 /* Scan all waiting RTL_EXPRs too. */
1630 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1631 {
1632 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1633 if (seq != const0_rtx && seq != 0)
1634 {
1635 push_to_sequence (seq);
00d8a4c1 1636 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
6f086dfc
RS
1637 end_sequence ();
1638 }
1639 }
d33c2956
DB
1640
1641 /* Scan the catch clauses for exception handling too. */
1642 push_to_sequence (catch_clauses);
1643 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0);
1644 end_sequence ();
6f086dfc
RS
1645}
1646\f
e15679f8 1647/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
6f086dfc 1648 some part of an insn. Return a struct fixup_replacement whose OLD
0f41302f 1649 value is equal to X. Allocate a new structure if no such entry exists. */
6f086dfc
RS
1650
1651static struct fixup_replacement *
2740a678 1652find_fixup_replacement (replacements, x)
6f086dfc
RS
1653 struct fixup_replacement **replacements;
1654 rtx x;
1655{
1656 struct fixup_replacement *p;
1657
1658 /* See if we have already replaced this. */
1659 for (p = *replacements; p && p->old != x; p = p->next)
1660 ;
1661
1662 if (p == 0)
1663 {
1664 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1665 p->old = x;
1666 p->new = 0;
1667 p->next = *replacements;
1668 *replacements = p;
1669 }
1670
1671 return p;
1672}
1673
1674/* Scan the insn-chain starting with INSN for refs to VAR
1675 and fix them up. TOPLEVEL is nonzero if this chain is the
1676 main chain of insns for the current function. */
1677
1678static void
00d8a4c1 1679fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
6f086dfc 1680 rtx var;
00d8a4c1
RK
1681 enum machine_mode promoted_mode;
1682 int unsignedp;
6f086dfc
RS
1683 rtx insn;
1684 int toplevel;
1685{
02a10449
RK
1686 rtx call_dest = 0;
1687
6f086dfc
RS
1688 while (insn)
1689 {
1690 rtx next = NEXT_INSN (insn);
e5e809f4 1691 rtx set, prev, prev_set;
6f086dfc 1692 rtx note;
e5e809f4 1693
e15762df 1694 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc 1695 {
63770d6a
RK
1696 /* If this is a CLOBBER of VAR, delete it.
1697
1698 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1699 and REG_RETVAL notes too. */
926d1ca5 1700 if (GET_CODE (PATTERN (insn)) == CLOBBER
07362cb3
JW
1701 && (XEXP (PATTERN (insn), 0) == var
1702 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1703 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1704 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
63770d6a
RK
1705 {
1706 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1707 /* The REG_LIBCALL note will go away since we are going to
1708 turn INSN into a NOTE, so just delete the
1709 corresponding REG_RETVAL note. */
1710 remove_note (XEXP (note, 0),
1711 find_reg_note (XEXP (note, 0), REG_RETVAL,
1712 NULL_RTX));
1713
1714 /* In unoptimized compilation, we shouldn't call delete_insn
1715 except in jump.c doing warnings. */
1716 PUT_CODE (insn, NOTE);
1717 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1718 NOTE_SOURCE_FILE (insn) = 0;
1719 }
1720
6f086dfc 1721 /* The insn to load VAR from a home in the arglist
e5e809f4
JL
1722 is now a no-op. When we see it, just delete it.
1723 Similarly if this is storing VAR from a register from which
1724 it was loaded in the previous insn. This will occur
1725 when an ADDRESSOF was made for an arglist slot. */
63770d6a 1726 else if (toplevel
e5e809f4
JL
1727 && (set = single_set (insn)) != 0
1728 && SET_DEST (set) == var
63770d6a
RK
1729 /* If this represents the result of an insn group,
1730 don't delete the insn. */
1731 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
e5e809f4
JL
1732 && (rtx_equal_p (SET_SRC (set), var)
1733 || (GET_CODE (SET_SRC (set)) == REG
1734 && (prev = prev_nonnote_insn (insn)) != 0
1735 && (prev_set = single_set (prev)) != 0
1736 && SET_DEST (prev_set) == SET_SRC (set)
1737 && rtx_equal_p (SET_SRC (prev_set), var))))
6f086dfc 1738 {
b4ff474c
RS
1739 /* In unoptimized compilation, we shouldn't call delete_insn
1740 except in jump.c doing warnings. */
1741 PUT_CODE (insn, NOTE);
1742 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1743 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1744 if (insn == last_parm_insn)
1745 last_parm_insn = PREV_INSN (next);
1746 }
1747 else
1748 {
02a10449
RK
1749 struct fixup_replacement *replacements = 0;
1750 rtx next_insn = NEXT_INSN (insn);
1751
e9a25f70
JL
1752 if (SMALL_REGISTER_CLASSES)
1753 {
1754 /* If the insn that copies the results of a CALL_INSN
1755 into a pseudo now references VAR, we have to use an
1756 intermediate pseudo since we want the life of the
1757 return value register to be only a single insn.
02a10449 1758
e9a25f70
JL
1759 If we don't use an intermediate pseudo, such things as
1760 address computations to make the address of VAR valid
1761 if it is not can be placed between the CALL_INSN and INSN.
02a10449 1762
e9a25f70
JL
1763 To make sure this doesn't happen, we record the destination
1764 of the CALL_INSN and see if the next insn uses both that
1765 and VAR. */
02a10449 1766
f95182a4
ILT
1767 if (call_dest != 0 && GET_CODE (insn) == INSN
1768 && reg_mentioned_p (var, PATTERN (insn))
1769 && reg_mentioned_p (call_dest, PATTERN (insn)))
1770 {
1771 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
02a10449 1772
f95182a4 1773 emit_insn_before (gen_move_insn (temp, call_dest), insn);
02a10449 1774
f95182a4
ILT
1775 PATTERN (insn) = replace_rtx (PATTERN (insn),
1776 call_dest, temp);
1777 }
02a10449 1778
f95182a4
ILT
1779 if (GET_CODE (insn) == CALL_INSN
1780 && GET_CODE (PATTERN (insn)) == SET)
1781 call_dest = SET_DEST (PATTERN (insn));
1782 else if (GET_CODE (insn) == CALL_INSN
1783 && GET_CODE (PATTERN (insn)) == PARALLEL
1784 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1785 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1786 else
1787 call_dest = 0;
1788 }
02a10449 1789
6f086dfc
RS
1790 /* See if we have to do anything to INSN now that VAR is in
1791 memory. If it needs to be loaded into a pseudo, use a single
1792 pseudo for the entire insn in case there is a MATCH_DUP
1793 between two operands. We pass a pointer to the head of
1794 a list of struct fixup_replacements. If fixup_var_refs_1
1795 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1796 it will record them in this list.
1797
1798 If it allocated a pseudo for any replacement, we copy into
1799 it here. */
1800
00d8a4c1
RK
1801 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1802 &replacements);
6f086dfc 1803
77121fee
JW
1804 /* If this is last_parm_insn, and any instructions were output
1805 after it to fix it up, then we must set last_parm_insn to
1806 the last such instruction emitted. */
1807 if (insn == last_parm_insn)
1808 last_parm_insn = PREV_INSN (next_insn);
1809
6f086dfc
RS
1810 while (replacements)
1811 {
1812 if (GET_CODE (replacements->new) == REG)
1813 {
1814 rtx insert_before;
00d8a4c1 1815 rtx seq;
6f086dfc
RS
1816
1817 /* OLD might be a (subreg (mem)). */
1818 if (GET_CODE (replacements->old) == SUBREG)
1819 replacements->old
1820 = fixup_memory_subreg (replacements->old, insn, 0);
1821 else
1822 replacements->old
1823 = fixup_stack_1 (replacements->old, insn);
1824
5fa7422b 1825 insert_before = insn;
6f086dfc 1826
00d8a4c1
RK
1827 /* If we are changing the mode, do a conversion.
1828 This might be wasteful, but combine.c will
1829 eliminate much of the waste. */
1830
1831 if (GET_MODE (replacements->new)
1832 != GET_MODE (replacements->old))
1833 {
1834 start_sequence ();
1835 convert_move (replacements->new,
1836 replacements->old, unsignedp);
1837 seq = gen_sequence ();
1838 end_sequence ();
1839 }
1840 else
1841 seq = gen_move_insn (replacements->new,
1842 replacements->old);
1843
1844 emit_insn_before (seq, insert_before);
6f086dfc
RS
1845 }
1846
1847 replacements = replacements->next;
1848 }
1849 }
1850
1851 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1852 But don't touch other insns referred to by reg-notes;
1853 we will get them elsewhere. */
1854 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1855 if (GET_CODE (note) != INSN_LIST)
ab6155b7
RK
1856 XEXP (note, 0)
1857 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
6f086dfc
RS
1858 }
1859 insn = next;
1860 }
1861}
1862\f
00d8a4c1
RK
1863/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1864 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1865
1866 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1867 contain a list of original rtx's and replacements. If we find that we need
1868 to modify this insn by replacing a memory reference with a pseudo or by
1869 making a new MEM to implement a SUBREG, we consult that list to see if
1870 we have already chosen a replacement. If none has already been allocated,
1871 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1872 or the SUBREG, as appropriate, to the pseudo. */
1873
1874static void
00d8a4c1 1875fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1876 register rtx var;
00d8a4c1 1877 enum machine_mode promoted_mode;
6f086dfc
RS
1878 register rtx *loc;
1879 rtx insn;
1880 struct fixup_replacement **replacements;
1881{
1882 register int i;
1883 register rtx x = *loc;
1884 RTX_CODE code = GET_CODE (x);
1885 register char *fmt;
1886 register rtx tem, tem1;
1887 struct fixup_replacement *replacement;
1888
1889 switch (code)
1890 {
e9a25f70
JL
1891 case ADDRESSOF:
1892 if (XEXP (x, 0) == var)
1893 {
956d6950
JL
1894 /* Prevent sharing of rtl that might lose. */
1895 rtx sub = copy_rtx (XEXP (var, 0));
1896
e9a25f70 1897 start_sequence ();
956d6950
JL
1898
1899 if (! validate_change (insn, loc, sub, 0))
1900 {
1901 rtx y = force_operand (sub, NULL_RTX);
1902
1903 if (! validate_change (insn, loc, y, 0))
1904 *loc = copy_to_reg (y);
1905 }
1906
e9a25f70
JL
1907 emit_insn_before (gen_sequence (), insn);
1908 end_sequence ();
1909 }
1910 return;
1911
6f086dfc
RS
1912 case MEM:
1913 if (var == x)
1914 {
1915 /* If we already have a replacement, use it. Otherwise,
1916 try to fix up this address in case it is invalid. */
1917
2740a678 1918 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1919 if (replacement->new)
1920 {
1921 *loc = replacement->new;
1922 return;
1923 }
1924
1925 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1926
00d8a4c1
RK
1927 /* Unless we are forcing memory to register or we changed the mode,
1928 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1929
1930 INSN_CODE (insn) = -1;
00d8a4c1
RK
1931 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1932 && recog_memoized (insn) >= 0)
6f086dfc
RS
1933 return;
1934
00d8a4c1 1935 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1936 return;
1937 }
1938
1939 /* If X contains VAR, we need to unshare it here so that we update
1940 each occurrence separately. But all identical MEMs in one insn
1941 must be replaced with the same rtx because of the possibility of
1942 MATCH_DUPs. */
1943
1944 if (reg_mentioned_p (var, x))
1945 {
2740a678 1946 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1947 if (replacement->new == 0)
1948 replacement->new = copy_most_rtx (x, var);
1949
1950 *loc = x = replacement->new;
1951 }
1952 break;
1953
1954 case REG:
1955 case CC0:
1956 case PC:
1957 case CONST_INT:
1958 case CONST:
1959 case SYMBOL_REF:
1960 case LABEL_REF:
1961 case CONST_DOUBLE:
1962 return;
1963
1964 case SIGN_EXTRACT:
1965 case ZERO_EXTRACT:
1966 /* Note that in some cases those types of expressions are altered
1967 by optimize_bit_field, and do not survive to get here. */
1968 if (XEXP (x, 0) == var
1969 || (GET_CODE (XEXP (x, 0)) == SUBREG
1970 && SUBREG_REG (XEXP (x, 0)) == var))
1971 {
1972 /* Get TEM as a valid MEM in the mode presently in the insn.
1973
1974 We don't worry about the possibility of MATCH_DUP here; it
1975 is highly unlikely and would be tricky to handle. */
1976
1977 tem = XEXP (x, 0);
1978 if (GET_CODE (tem) == SUBREG)
0e09cc26
RK
1979 {
1980 if (GET_MODE_BITSIZE (GET_MODE (tem))
1981 > GET_MODE_BITSIZE (GET_MODE (var)))
1982 {
1983 replacement = find_fixup_replacement (replacements, var);
1984 if (replacement->new == 0)
1985 replacement->new = gen_reg_rtx (GET_MODE (var));
1986 SUBREG_REG (tem) = replacement->new;
1987 }
ef933d26
RK
1988 else
1989 tem = fixup_memory_subreg (tem, insn, 0);
0e09cc26
RK
1990 }
1991 else
1992 tem = fixup_stack_1 (tem, insn);
6f086dfc
RS
1993
1994 /* Unless we want to load from memory, get TEM into the proper mode
1995 for an extract from memory. This can only be done if the
1996 extract is at a constant position and length. */
1997
1998 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1999 && GET_CODE (XEXP (x, 2)) == CONST_INT
2000 && ! mode_dependent_address_p (XEXP (tem, 0))
2001 && ! MEM_VOLATILE_P (tem))
2002 {
2003 enum machine_mode wanted_mode = VOIDmode;
2004 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2005 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6f086dfc
RS
2006
2007#ifdef HAVE_extzv
2008 if (GET_CODE (x) == ZERO_EXTRACT)
0d8e55d8
JL
2009 {
2010 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2011 if (wanted_mode == VOIDmode)
2012 wanted_mode = word_mode;
2013 }
6f086dfc
RS
2014#endif
2015#ifdef HAVE_extv
2016 if (GET_CODE (x) == SIGN_EXTRACT)
0d8e55d8
JL
2017 {
2018 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2019 if (wanted_mode == VOIDmode)
2020 wanted_mode = word_mode;
2021 }
6f086dfc 2022#endif
6dc42e49 2023 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2024 if (wanted_mode != VOIDmode
2025 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2026 {
e5e809f4 2027 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2028 rtx old_pos = XEXP (x, 2);
2029 rtx newmem;
2030
2031 /* If the bytes and bits are counted differently, we
2032 must adjust the offset. */
f76b9db2
ILT
2033 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2034 offset = (GET_MODE_SIZE (is_mode)
2035 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2036
2037 pos %= GET_MODE_BITSIZE (wanted_mode);
2038
38a448ca
RH
2039 newmem = gen_rtx_MEM (wanted_mode,
2040 plus_constant (XEXP (tem, 0), offset));
6f086dfc 2041 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2042 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2043
2044 /* Make the change and see if the insn remains valid. */
2045 INSN_CODE (insn) = -1;
2046 XEXP (x, 0) = newmem;
5f4f0e22 2047 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
2048
2049 if (recog_memoized (insn) >= 0)
2050 return;
2051
2052 /* Otherwise, restore old position. XEXP (x, 0) will be
2053 restored later. */
2054 XEXP (x, 2) = old_pos;
2055 }
2056 }
2057
2058 /* If we get here, the bitfield extract insn can't accept a memory
2059 reference. Copy the input into a register. */
2060
2061 tem1 = gen_reg_rtx (GET_MODE (tem));
2062 emit_insn_before (gen_move_insn (tem1, tem), insn);
2063 XEXP (x, 0) = tem1;
2064 return;
2065 }
2066 break;
2067
2068 case SUBREG:
2069 if (SUBREG_REG (x) == var)
2070 {
00d8a4c1
RK
2071 /* If this is a special SUBREG made because VAR was promoted
2072 from a wider mode, replace it with VAR and call ourself
2073 recursively, this time saying that the object previously
2074 had its current mode (by virtue of the SUBREG). */
2075
2076 if (SUBREG_PROMOTED_VAR_P (x))
2077 {
2078 *loc = var;
2079 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2080 return;
2081 }
2082
6f086dfc
RS
2083 /* If this SUBREG makes VAR wider, it has become a paradoxical
2084 SUBREG with VAR in memory, but these aren't allowed at this
2085 stage of the compilation. So load VAR into a pseudo and take
2086 a SUBREG of that pseudo. */
2087 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2088 {
2740a678 2089 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
2090 if (replacement->new == 0)
2091 replacement->new = gen_reg_rtx (GET_MODE (var));
2092 SUBREG_REG (x) = replacement->new;
2093 return;
2094 }
2095
2096 /* See if we have already found a replacement for this SUBREG.
2097 If so, use it. Otherwise, make a MEM and see if the insn
2098 is recognized. If not, or if we should force MEM into a register,
2099 make a pseudo for this SUBREG. */
2740a678 2100 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
2101 if (replacement->new)
2102 {
2103 *loc = replacement->new;
2104 return;
2105 }
2106
2107 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2108
f898f031 2109 INSN_CODE (insn) = -1;
6f086dfc
RS
2110 if (! flag_force_mem && recog_memoized (insn) >= 0)
2111 return;
2112
2113 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2114 return;
2115 }
2116 break;
2117
2118 case SET:
2119 /* First do special simplification of bit-field references. */
2120 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2121 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2122 optimize_bit_field (x, insn, 0);
2123 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2124 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 2125 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc 2126
0e09cc26
RK
2127 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2128 into a register and then store it back out. */
2129 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2130 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2131 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2132 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2133 > GET_MODE_SIZE (GET_MODE (var))))
2134 {
2135 replacement = find_fixup_replacement (replacements, var);
2136 if (replacement->new == 0)
2137 replacement->new = gen_reg_rtx (GET_MODE (var));
2138
2139 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2140 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2141 }
2142
6f086dfc 2143 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
0f41302f 2144 insn into a pseudo and store the low part of the pseudo into VAR. */
6f086dfc
RS
2145 if (GET_CODE (SET_DEST (x)) == SUBREG
2146 && SUBREG_REG (SET_DEST (x)) == var
2147 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2148 > GET_MODE_SIZE (GET_MODE (var))))
2149 {
2150 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2151 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2152 tem)),
2153 insn);
2154 break;
2155 }
2156
2157 {
2158 rtx dest = SET_DEST (x);
2159 rtx src = SET_SRC (x);
29a82058 2160#ifdef HAVE_insv
6f086dfc 2161 rtx outerdest = dest;
29a82058 2162#endif
6f086dfc
RS
2163
2164 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2165 || GET_CODE (dest) == SIGN_EXTRACT
2166 || GET_CODE (dest) == ZERO_EXTRACT)
2167 dest = XEXP (dest, 0);
2168
2169 if (GET_CODE (src) == SUBREG)
2170 src = XEXP (src, 0);
2171
2172 /* If VAR does not appear at the top level of the SET
2173 just scan the lower levels of the tree. */
2174
2175 if (src != var && dest != var)
2176 break;
2177
2178 /* We will need to rerecognize this insn. */
2179 INSN_CODE (insn) = -1;
2180
2181#ifdef HAVE_insv
2182 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2183 {
2184 /* Since this case will return, ensure we fixup all the
2185 operands here. */
00d8a4c1
RK
2186 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2187 insn, replacements);
2188 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2189 insn, replacements);
2190 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2191 insn, replacements);
6f086dfc
RS
2192
2193 tem = XEXP (outerdest, 0);
2194
2195 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2196 that may appear inside a ZERO_EXTRACT.
2197 This was legitimate when the MEM was a REG. */
2198 if (GET_CODE (tem) == SUBREG
2199 && SUBREG_REG (tem) == var)
0e09cc26 2200 tem = fixup_memory_subreg (tem, insn, 0);
6f086dfc
RS
2201 else
2202 tem = fixup_stack_1 (tem, insn);
2203
2204 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2205 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2206 && ! mode_dependent_address_p (XEXP (tem, 0))
2207 && ! MEM_VOLATILE_P (tem))
2208 {
0d8e55d8 2209 enum machine_mode wanted_mode;
6f086dfc 2210 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2211 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
6f086dfc 2212
0d8e55d8
JL
2213 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2214 if (wanted_mode == VOIDmode)
2215 wanted_mode = word_mode;
2216
6dc42e49 2217 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2218 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2219 {
e5e809f4 2220 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2221 rtx old_pos = XEXP (outerdest, 2);
2222 rtx newmem;
2223
f76b9db2
ILT
2224 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2225 offset = (GET_MODE_SIZE (is_mode)
2226 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2227
2228 pos %= GET_MODE_BITSIZE (wanted_mode);
2229
38a448ca
RH
2230 newmem = gen_rtx_MEM (wanted_mode,
2231 plus_constant (XEXP (tem, 0), offset));
6f086dfc 2232 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2233 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2234
2235 /* Make the change and see if the insn remains valid. */
2236 INSN_CODE (insn) = -1;
2237 XEXP (outerdest, 0) = newmem;
5f4f0e22 2238 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
2239
2240 if (recog_memoized (insn) >= 0)
2241 return;
2242
2243 /* Otherwise, restore old position. XEXP (x, 0) will be
2244 restored later. */
2245 XEXP (outerdest, 2) = old_pos;
2246 }
2247 }
2248
2249 /* If we get here, the bit-field store doesn't allow memory
2250 or isn't located at a constant position. Load the value into
2251 a register, do the store, and put it back into memory. */
2252
2253 tem1 = gen_reg_rtx (GET_MODE (tem));
2254 emit_insn_before (gen_move_insn (tem1, tem), insn);
2255 emit_insn_after (gen_move_insn (tem, tem1), insn);
2256 XEXP (outerdest, 0) = tem1;
2257 return;
2258 }
2259#endif
2260
2261 /* STRICT_LOW_PART is a no-op on memory references
2262 and it can cause combinations to be unrecognizable,
2263 so eliminate it. */
2264
2265 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2266 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2267
2268 /* A valid insn to copy VAR into or out of a register
2269 must be left alone, to avoid an infinite loop here.
2270 If the reference to VAR is by a subreg, fix that up,
2271 since SUBREG is not valid for a memref.
e15762df
RK
2272 Also fix up the address of the stack slot.
2273
2274 Note that we must not try to recognize the insn until
2275 after we know that we have valid addresses and no
2276 (subreg (mem ...) ...) constructs, since these interfere
2277 with determining the validity of the insn. */
6f086dfc
RS
2278
2279 if ((SET_SRC (x) == var
2280 || (GET_CODE (SET_SRC (x)) == SUBREG
2281 && SUBREG_REG (SET_SRC (x)) == var))
2282 && (GET_CODE (SET_DEST (x)) == REG
2283 || (GET_CODE (SET_DEST (x)) == SUBREG
2284 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1d273bf5 2285 && GET_MODE (var) == promoted_mode
c46722a7 2286 && x == single_set (insn))
6f086dfc 2287 {
e15762df
RK
2288 rtx pat;
2289
2740a678 2290 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 2291 if (replacement->new)
6f086dfc 2292 SET_SRC (x) = replacement->new;
6f086dfc
RS
2293 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2294 SET_SRC (x) = replacement->new
2295 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2296 else
2297 SET_SRC (x) = replacement->new
2298 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
2299
2300 if (recog_memoized (insn) >= 0)
2301 return;
2302
2303 /* INSN is not valid, but we know that we want to
2304 copy SET_SRC (x) to SET_DEST (x) in some way. So
2305 we generate the move and see whether it requires more
2306 than one insn. If it does, we emit those insns and
2307 delete INSN. Otherwise, we an just replace the pattern
2308 of INSN; we have already verified above that INSN has
2309 no other function that to do X. */
2310
2311 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2312 if (GET_CODE (pat) == SEQUENCE)
2313 {
2314 emit_insn_after (pat, insn);
2315 PUT_CODE (insn, NOTE);
2316 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2317 NOTE_SOURCE_FILE (insn) = 0;
2318 }
2319 else
2320 PATTERN (insn) = pat;
2321
6f086dfc
RS
2322 return;
2323 }
2324
2325 if ((SET_DEST (x) == var
2326 || (GET_CODE (SET_DEST (x)) == SUBREG
2327 && SUBREG_REG (SET_DEST (x)) == var))
2328 && (GET_CODE (SET_SRC (x)) == REG
2329 || (GET_CODE (SET_SRC (x)) == SUBREG
2330 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1d273bf5 2331 && GET_MODE (var) == promoted_mode
c46722a7 2332 && x == single_set (insn))
6f086dfc 2333 {
e15762df
RK
2334 rtx pat;
2335
6f086dfc
RS
2336 if (GET_CODE (SET_DEST (x)) == SUBREG)
2337 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2338 else
2339 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
2340
2341 if (recog_memoized (insn) >= 0)
2342 return;
2343
2344 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2345 if (GET_CODE (pat) == SEQUENCE)
2346 {
2347 emit_insn_after (pat, insn);
2348 PUT_CODE (insn, NOTE);
2349 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2350 NOTE_SOURCE_FILE (insn) = 0;
2351 }
2352 else
2353 PATTERN (insn) = pat;
2354
6f086dfc
RS
2355 return;
2356 }
2357
2358 /* Otherwise, storing into VAR must be handled specially
2359 by storing into a temporary and copying that into VAR
00d8a4c1
RK
2360 with a new insn after this one. Note that this case
2361 will be used when storing into a promoted scalar since
2362 the insn will now have different modes on the input
2363 and output and hence will be invalid (except for the case
2364 of setting it to a constant, which does not need any
2365 change if it is valid). We generate extra code in that case,
2366 but combine.c will eliminate it. */
6f086dfc
RS
2367
2368 if (dest == var)
2369 {
2370 rtx temp;
00d8a4c1
RK
2371 rtx fixeddest = SET_DEST (x);
2372
6f086dfc 2373 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
2374 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2375 fixeddest = XEXP (fixeddest, 0);
6f086dfc 2376 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1 2377 if (GET_CODE (fixeddest) == SUBREG)
926d1ca5
RK
2378 {
2379 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2380 promoted_mode = GET_MODE (fixeddest);
2381 }
6f086dfc 2382 else
00d8a4c1
RK
2383 fixeddest = fixup_stack_1 (fixeddest, insn);
2384
926d1ca5 2385 temp = gen_reg_rtx (promoted_mode);
00d8a4c1
RK
2386
2387 emit_insn_after (gen_move_insn (fixeddest,
2388 gen_lowpart (GET_MODE (fixeddest),
2389 temp)),
2390 insn);
6f086dfc 2391
6f086dfc
RS
2392 SET_DEST (x) = temp;
2393 }
2394 }
e9a25f70
JL
2395
2396 default:
2397 break;
6f086dfc
RS
2398 }
2399
2400 /* Nothing special about this RTX; fix its operands. */
2401
2402 fmt = GET_RTX_FORMAT (code);
2403 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2404 {
2405 if (fmt[i] == 'e')
00d8a4c1 2406 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
2407 if (fmt[i] == 'E')
2408 {
2409 register int j;
2410 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
2411 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2412 insn, replacements);
6f086dfc
RS
2413 }
2414 }
2415}
2416\f
2417/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2418 return an rtx (MEM:m1 newaddr) which is equivalent.
2419 If any insns must be emitted to compute NEWADDR, put them before INSN.
2420
2421 UNCRITICAL nonzero means accept paradoxical subregs.
0f41302f 2422 This is used for subregs found inside REG_NOTES. */
6f086dfc
RS
2423
2424static rtx
2425fixup_memory_subreg (x, insn, uncritical)
2426 rtx x;
2427 rtx insn;
2428 int uncritical;
2429{
2430 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2431 rtx addr = XEXP (SUBREG_REG (x), 0);
2432 enum machine_mode mode = GET_MODE (x);
29a82058 2433 rtx result;
6f086dfc
RS
2434
2435 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2436 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2437 && ! uncritical)
2438 abort ();
2439
f76b9db2
ILT
2440 if (BYTES_BIG_ENDIAN)
2441 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2442 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
6f086dfc
RS
2443 addr = plus_constant (addr, offset);
2444 if (!flag_force_addr && memory_address_p (mode, addr))
2445 /* Shortcut if no insns need be emitted. */
2446 return change_address (SUBREG_REG (x), mode, addr);
2447 start_sequence ();
2448 result = change_address (SUBREG_REG (x), mode, addr);
2449 emit_insn_before (gen_sequence (), insn);
2450 end_sequence ();
2451 return result;
2452}
2453
2454/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2455 Replace subexpressions of X in place.
2456 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2457 Otherwise return X, with its contents possibly altered.
2458
ab6155b7
RK
2459 If any insns must be emitted to compute NEWADDR, put them before INSN.
2460
2461 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
2462
2463static rtx
ab6155b7 2464walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
2465 register rtx x;
2466 rtx insn;
ab6155b7 2467 int uncritical;
6f086dfc
RS
2468{
2469 register enum rtx_code code;
2470 register char *fmt;
2471 register int i;
2472
2473 if (x == 0)
2474 return 0;
2475
2476 code = GET_CODE (x);
2477
2478 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 2479 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
2480
2481 /* Nothing special about this RTX; fix its operands. */
2482
2483 fmt = GET_RTX_FORMAT (code);
2484 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2485 {
2486 if (fmt[i] == 'e')
ab6155b7 2487 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
2488 if (fmt[i] == 'E')
2489 {
2490 register int j;
2491 for (j = 0; j < XVECLEN (x, i); j++)
2492 XVECEXP (x, i, j)
ab6155b7 2493 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
2494 }
2495 }
2496 return x;
2497}
2498\f
6f086dfc
RS
2499/* For each memory ref within X, if it refers to a stack slot
2500 with an out of range displacement, put the address in a temp register
2501 (emitting new insns before INSN to load these registers)
2502 and alter the memory ref to use that register.
2503 Replace each such MEM rtx with a copy, to avoid clobberage. */
2504
2505static rtx
2506fixup_stack_1 (x, insn)
2507 rtx x;
2508 rtx insn;
2509{
2510 register int i;
2511 register RTX_CODE code = GET_CODE (x);
2512 register char *fmt;
2513
2514 if (code == MEM)
2515 {
2516 register rtx ad = XEXP (x, 0);
2517 /* If we have address of a stack slot but it's not valid
2518 (displacement is too large), compute the sum in a register. */
2519 if (GET_CODE (ad) == PLUS
2520 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
2521 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2522 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
e9a25f70
JL
2523 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2524#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2525 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2526#endif
2527 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
956d6950 2528 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
40d05551 2529 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
2530 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2531 {
2532 rtx temp, seq;
2533 if (memory_address_p (GET_MODE (x), ad))
2534 return x;
2535
2536 start_sequence ();
2537 temp = copy_to_reg (ad);
2538 seq = gen_sequence ();
2539 end_sequence ();
2540 emit_insn_before (seq, insn);
2541 return change_address (x, VOIDmode, temp);
2542 }
2543 return x;
2544 }
2545
2546 fmt = GET_RTX_FORMAT (code);
2547 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2548 {
2549 if (fmt[i] == 'e')
2550 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2551 if (fmt[i] == 'E')
2552 {
2553 register int j;
2554 for (j = 0; j < XVECLEN (x, i); j++)
2555 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2556 }
2557 }
2558 return x;
2559}
2560\f
2561/* Optimization: a bit-field instruction whose field
2562 happens to be a byte or halfword in memory
2563 can be changed to a move instruction.
2564
2565 We call here when INSN is an insn to examine or store into a bit-field.
2566 BODY is the SET-rtx to be altered.
2567
2568 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2569 (Currently this is called only from function.c, and EQUIV_MEM
2570 is always 0.) */
2571
2572static void
2573optimize_bit_field (body, insn, equiv_mem)
2574 rtx body;
2575 rtx insn;
2576 rtx *equiv_mem;
2577{
2578 register rtx bitfield;
2579 int destflag;
2580 rtx seq = 0;
2581 enum machine_mode mode;
2582
2583 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2584 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2585 bitfield = SET_DEST (body), destflag = 1;
2586 else
2587 bitfield = SET_SRC (body), destflag = 0;
2588
2589 /* First check that the field being stored has constant size and position
2590 and is in fact a byte or halfword suitably aligned. */
2591
2592 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2593 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2594 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2595 != BLKmode)
2596 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2597 {
2598 register rtx memref = 0;
2599
2600 /* Now check that the containing word is memory, not a register,
2601 and that it is safe to change the machine mode. */
2602
2603 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2604 memref = XEXP (bitfield, 0);
2605 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2606 && equiv_mem != 0)
2607 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2608 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2609 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2610 memref = SUBREG_REG (XEXP (bitfield, 0));
2611 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2612 && equiv_mem != 0
2613 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2614 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2615
2616 if (memref
2617 && ! mode_dependent_address_p (XEXP (memref, 0))
2618 && ! MEM_VOLATILE_P (memref))
2619 {
2620 /* Now adjust the address, first for any subreg'ing
2621 that we are now getting rid of,
2622 and then for which byte of the word is wanted. */
2623
e5e809f4 2624 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
b88a3142
RK
2625 rtx insns;
2626
6f086dfc 2627 /* Adjust OFFSET to count bits from low-address byte. */
f76b9db2
ILT
2628 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2629 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2630 - offset - INTVAL (XEXP (bitfield, 1)));
2631
6f086dfc
RS
2632 /* Adjust OFFSET to count bytes from low-address byte. */
2633 offset /= BITS_PER_UNIT;
2634 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2635 {
2636 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
f76b9db2
ILT
2637 if (BYTES_BIG_ENDIAN)
2638 offset -= (MIN (UNITS_PER_WORD,
2639 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2640 - MIN (UNITS_PER_WORD,
2641 GET_MODE_SIZE (GET_MODE (memref))));
6f086dfc
RS
2642 }
2643
b88a3142
RK
2644 start_sequence ();
2645 memref = change_address (memref, mode,
6f086dfc 2646 plus_constant (XEXP (memref, 0), offset));
b88a3142
RK
2647 insns = get_insns ();
2648 end_sequence ();
2649 emit_insns_before (insns, insn);
6f086dfc
RS
2650
2651 /* Store this memory reference where
2652 we found the bit field reference. */
2653
2654 if (destflag)
2655 {
2656 validate_change (insn, &SET_DEST (body), memref, 1);
2657 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2658 {
2659 rtx src = SET_SRC (body);
2660 while (GET_CODE (src) == SUBREG
2661 && SUBREG_WORD (src) == 0)
2662 src = SUBREG_REG (src);
2663 if (GET_MODE (src) != GET_MODE (memref))
2664 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2665 validate_change (insn, &SET_SRC (body), src, 1);
2666 }
2667 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2668 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2669 /* This shouldn't happen because anything that didn't have
2670 one of these modes should have got converted explicitly
2671 and then referenced through a subreg.
2672 This is so because the original bit-field was
2673 handled by agg_mode and so its tree structure had
2674 the same mode that memref now has. */
2675 abort ();
2676 }
2677 else
2678 {
2679 rtx dest = SET_DEST (body);
2680
2681 while (GET_CODE (dest) == SUBREG
4013a709
RK
2682 && SUBREG_WORD (dest) == 0
2683 && (GET_MODE_CLASS (GET_MODE (dest))
ab87f8c8
JL
2684 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2685 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2686 <= UNITS_PER_WORD))
6f086dfc
RS
2687 dest = SUBREG_REG (dest);
2688
2689 validate_change (insn, &SET_DEST (body), dest, 1);
2690
2691 if (GET_MODE (dest) == GET_MODE (memref))
2692 validate_change (insn, &SET_SRC (body), memref, 1);
2693 else
2694 {
2695 /* Convert the mem ref to the destination mode. */
2696 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2697
2698 start_sequence ();
2699 convert_move (newreg, memref,
2700 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2701 seq = get_insns ();
2702 end_sequence ();
2703
2704 validate_change (insn, &SET_SRC (body), newreg, 1);
2705 }
2706 }
2707
2708 /* See if we can convert this extraction or insertion into
2709 a simple move insn. We might not be able to do so if this
2710 was, for example, part of a PARALLEL.
2711
2712 If we succeed, write out any needed conversions. If we fail,
2713 it is hard to guess why we failed, so don't do anything
2714 special; just let the optimization be suppressed. */
2715
2716 if (apply_change_group () && seq)
2717 emit_insns_before (seq, insn);
2718 }
2719 }
2720}
2721\f
2722/* These routines are responsible for converting virtual register references
2723 to the actual hard register references once RTL generation is complete.
2724
2725 The following four variables are used for communication between the
2726 routines. They contain the offsets of the virtual registers from their
2727 respective hard registers. */
2728
2729static int in_arg_offset;
2730static int var_offset;
2731static int dynamic_offset;
2732static int out_arg_offset;
71038426 2733static int cfa_offset;
6f086dfc
RS
2734
2735/* In most machines, the stack pointer register is equivalent to the bottom
2736 of the stack. */
2737
2738#ifndef STACK_POINTER_OFFSET
2739#define STACK_POINTER_OFFSET 0
2740#endif
2741
2742/* If not defined, pick an appropriate default for the offset of dynamically
2743 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2744 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2745
2746#ifndef STACK_DYNAMIC_OFFSET
2747
2748#ifdef ACCUMULATE_OUTGOING_ARGS
2749/* The bottom of the stack points to the actual arguments. If
2750 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2751 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2752 stack space for register parameters is not pushed by the caller, but
2753 rather part of the fixed stack areas and hence not included in
2754 `current_function_outgoing_args_size'. Nevertheless, we must allow
2755 for it when allocating stack dynamic objects. */
2756
2757#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2758#define STACK_DYNAMIC_OFFSET(FNDECL) \
2759(current_function_outgoing_args_size \
2760 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2761
2762#else
2763#define STACK_DYNAMIC_OFFSET(FNDECL) \
2764(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2765#endif
2766
2767#else
2768#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2769#endif
2770#endif
2771
71038426
RH
2772/* On a few machines, the CFA coincides with the arg pointer. */
2773
2774#ifndef ARG_POINTER_CFA_OFFSET
2775#define ARG_POINTER_CFA_OFFSET 0
2776#endif
2777
2778
e9a25f70
JL
2779/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2780 its address taken. DECL is the decl for the object stored in the
2781 register, for later use if we do need to force REG into the stack.
2782 REG is overwritten by the MEM like in put_reg_into_stack. */
2783
2784rtx
2785gen_mem_addressof (reg, decl)
2786 rtx reg;
2787 tree decl;
2788{
2789 tree type = TREE_TYPE (decl);
38a448ca 2790 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
e9a25f70 2791 SET_ADDRESSOF_DECL (r, decl);
95ca22f4
MM
2792 /* If the original REG was a user-variable, then so is the REG whose
2793 address is being taken. */
2794 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
e9a25f70
JL
2795
2796 XEXP (reg, 0) = r;
2797 PUT_CODE (reg, MEM);
2798 PUT_MODE (reg, DECL_MODE (decl));
2799 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
c6df88cb 2800 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
41472af8 2801 MEM_ALIAS_SET (reg) = get_alias_set (decl);
e9a25f70 2802
e5e809f4
JL
2803 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2804 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2805
e9a25f70
JL
2806 return reg;
2807}
2808
2809/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2810
2811void
2812flush_addressof (decl)
2813 tree decl;
2814{
2815 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2816 && DECL_RTL (decl) != 0
2817 && GET_CODE (DECL_RTL (decl)) == MEM
2818 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2819 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2820 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2821}
2822
2823/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2824
2825static void
2826put_addressof_into_stack (r)
2827 rtx r;
2828{
2829 tree decl = ADDRESSOF_DECL (r);
2830 rtx reg = XEXP (r, 0);
2831
2832 if (GET_CODE (reg) != REG)
2833 abort ();
2834
2835 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2836 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
e5e809f4
JL
2837 ADDRESSOF_REGNO (r),
2838 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
e9a25f70
JL
2839}
2840
b5bd3b3c
AS
2841/* List of replacements made below in purge_addressof_1 when creating
2842 bitfield insertions. */
2843static rtx purge_addressof_replacements;
2844
e9a25f70
JL
2845/* Helper function for purge_addressof. See if the rtx expression at *LOC
2846 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2847 the stack. */
2848
2849static void
f7b6d104 2850purge_addressof_1 (loc, insn, force, store)
e9a25f70
JL
2851 rtx *loc;
2852 rtx insn;
f7b6d104 2853 int force, store;
e9a25f70
JL
2854{
2855 rtx x;
2856 RTX_CODE code;
2857 int i, j;
2858 char *fmt;
2859
2860 /* Re-start here to avoid recursion in common cases. */
2861 restart:
2862
2863 x = *loc;
2864 if (x == 0)
2865 return;
2866
2867 code = GET_CODE (x);
2868
2869 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2870 {
2871 rtx insns;
956d6950
JL
2872 /* We must create a copy of the rtx because it was created by
2873 overwriting a REG rtx which is always shared. */
2874 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
e9a25f70 2875
ab87f8c8
JL
2876 if (validate_change (insn, loc, sub, 0)
2877 || validate_replace_rtx (x, sub, insn))
e9a25f70 2878 return;
ab87f8c8 2879
e9a25f70 2880 start_sequence ();
ab87f8c8
JL
2881 sub = force_operand (sub, NULL_RTX);
2882 if (! validate_change (insn, loc, sub, 0)
2883 && ! validate_replace_rtx (x, sub, insn))
e9a25f70
JL
2884 abort ();
2885
f7b6d104 2886 insns = gen_sequence ();
e9a25f70 2887 end_sequence ();
18e765cb 2888 emit_insn_before (insns, insn);
e9a25f70
JL
2889 return;
2890 }
2891 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2892 {
2893 rtx sub = XEXP (XEXP (x, 0), 0);
ab87f8c8 2894 rtx sub2;
e5e809f4 2895
6d8ccdbb 2896 if (GET_CODE (sub) == MEM)
ab87f8c8
JL
2897 {
2898 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2899 MEM_COPY_ATTRIBUTES (sub2, sub);
2900 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2901 sub = sub2;
2902 }
e5e809f4 2903
f5963e61
JL
2904 if (GET_CODE (sub) == REG
2905 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
e5e809f4
JL
2906 {
2907 put_addressof_into_stack (XEXP (x, 0));
2908 return;
2909 }
2910 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
e9a25f70 2911 {
f7b6d104
RH
2912 int size_x, size_sub;
2913
b5bd3b3c
AS
2914 if (!insn)
2915 {
2916 /* When processing REG_NOTES look at the list of
2917 replacements done on the insn to find the register that X
2918 was replaced by. */
2919 rtx tem;
2920
2921 for (tem = purge_addressof_replacements; tem != NULL_RTX;
2922 tem = XEXP (XEXP (tem, 1), 1))
fbdfe39c
RH
2923 {
2924 rtx y = XEXP (tem, 0);
2925 if (GET_CODE (y) == MEM
2926 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
2927 {
2928 /* It can happen that the note may speak of things in
2929 a wider (or just different) mode than the code did.
2930 This is especially true of REG_RETVAL. */
2931
2932 rtx z = XEXP (XEXP (tem, 1), 0);
2933 if (GET_MODE (x) != GET_MODE (y))
2934 {
2935 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2936 z = SUBREG_REG (z);
2937
2938 /* ??? If we'd gotten into any of the really complex
2939 cases below, I'm not sure we can do a proper
2940 replacement. Might we be able to delete the
2941 note in some cases? */
2942 if (GET_MODE_SIZE (GET_MODE (x))
2943 < GET_MODE_SIZE (GET_MODE (y)))
2944 abort ();
2945
d91dfff4
R
2946 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2947 && (GET_MODE_SIZE (GET_MODE (x))
2948 > GET_MODE_SIZE (GET_MODE (z))))
2949 {
2950 /* This can occur as a result in invalid
2951 pointer casts, e.g. float f; ...
2952 *(long long int *)&f.
2953 ??? We could emit a warning here, but
2954 without a line number that wouldn't be
2955 very helpful. */
2956 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2957 }
2958 else
2959 z = gen_lowpart (GET_MODE (x), z);
fbdfe39c
RH
2960 }
2961
2962 *loc = z;
2963 return;
2964 }
2965 }
b5bd3b3c
AS
2966
2967 /* There should always be such a replacement. */
2968 abort ();
2969 }
2970
f7b6d104
RH
2971 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2972 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2973
2974 /* Don't even consider working with paradoxical subregs,
2975 or the moral equivalent seen here. */
470032d7 2976 if (size_x <= size_sub
d006aa54 2977 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
e9a25f70 2978 {
f7b6d104
RH
2979 /* Do a bitfield insertion to mirror what would happen
2980 in memory. */
2981
f7b6d104
RH
2982 rtx val, seq;
2983
f7b6d104
RH
2984 if (store)
2985 {
de0dd934
R
2986 rtx p;
2987
f7b6d104
RH
2988 start_sequence ();
2989 val = gen_reg_rtx (GET_MODE (x));
2990 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
2991 {
2992 /* Discard the current sequence and put the
2993 ADDRESSOF on stack. */
2994 end_sequence ();
2995 goto give_up;
2996 }
f7b6d104
RH
2997 seq = gen_sequence ();
2998 end_sequence ();
2999 emit_insn_before (seq, insn);
3000
3001 start_sequence ();
47401c4d 3002 store_bit_field (sub, size_x, 0, GET_MODE (x),
f7b6d104
RH
3003 val, GET_MODE_SIZE (GET_MODE (sub)),
3004 GET_MODE_SIZE (GET_MODE (sub)));
3005
de0dd934
R
3006 /* Make sure to unshare any shared rtl that store_bit_field
3007 might have created. */
3008 for (p = get_insns(); p; p = NEXT_INSN (p))
3009 {
3010 reset_used_flags (PATTERN (p));
3011 reset_used_flags (REG_NOTES (p));
3012 reset_used_flags (LOG_LINKS (p));
3013 }
3014 unshare_all_rtl (get_insns ());
3015
f7b6d104
RH
3016 seq = gen_sequence ();
3017 end_sequence ();
3018 emit_insn_after (seq, insn);
3019 }
3020 else
3021 {
3022 start_sequence ();
47401c4d 3023 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
f7b6d104
RH
3024 GET_MODE (x), GET_MODE (x),
3025 GET_MODE_SIZE (GET_MODE (sub)),
3026 GET_MODE_SIZE (GET_MODE (sub)));
3027
f7b6d104 3028 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
3029 {
3030 /* Discard the current sequence and put the
3031 ADDRESSOF on stack. */
3032 end_sequence ();
3033 goto give_up;
3034 }
f7b6d104
RH
3035
3036 seq = gen_sequence ();
3037 end_sequence ();
3038 emit_insn_before (seq, insn);
3039 }
3040
b5bd3b3c
AS
3041 /* Remember the replacement so that the same one can be done
3042 on the REG_NOTES. */
3043 purge_addressof_replacements
3044 = gen_rtx_EXPR_LIST (VOIDmode, x,
3045 gen_rtx_EXPR_LIST (VOIDmode, val,
3046 purge_addressof_replacements));
3047
f7b6d104
RH
3048 /* We replaced with a reg -- all done. */
3049 return;
e9a25f70
JL
3050 }
3051 }
3052 else if (validate_change (insn, loc, sub, 0))
fbdfe39c
RH
3053 {
3054 /* Remember the replacement so that the same one can be done
3055 on the REG_NOTES. */
3056 purge_addressof_replacements
3057 = gen_rtx_EXPR_LIST (VOIDmode, x,
3058 gen_rtx_EXPR_LIST (VOIDmode, sub,
3059 purge_addressof_replacements));
3060 goto restart;
3061 }
b5bd3b3c 3062 give_up:;
e9a25f70
JL
3063 /* else give up and put it into the stack */
3064 }
3065 else if (code == ADDRESSOF)
3066 {
3067 put_addressof_into_stack (x);
3068 return;
3069 }
f7b6d104
RH
3070 else if (code == SET)
3071 {
3072 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3073 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3074 return;
3075 }
e9a25f70
JL
3076
3077 /* Scan all subexpressions. */
3078 fmt = GET_RTX_FORMAT (code);
3079 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3080 {
3081 if (*fmt == 'e')
f7b6d104 3082 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
e9a25f70
JL
3083 else if (*fmt == 'E')
3084 for (j = 0; j < XVECLEN (x, i); j++)
f7b6d104 3085 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
e9a25f70
JL
3086 }
3087}
3088
3089/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3090 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3091 stack. */
3092
3093void
3094purge_addressof (insns)
3095 rtx insns;
3096{
3097 rtx insn;
3098 for (insn = insns; insn; insn = NEXT_INSN (insn))
3099 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3100 || GET_CODE (insn) == CALL_INSN)
3101 {
3102 purge_addressof_1 (&PATTERN (insn), insn,
f7b6d104
RH
3103 asm_noperands (PATTERN (insn)) > 0, 0);
3104 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
e9a25f70 3105 }
da9b1f9c 3106 purge_addressof_replacements = 0;
e9a25f70
JL
3107}
3108\f
6f086dfc
RS
3109/* Pass through the INSNS of function FNDECL and convert virtual register
3110 references to hard register references. */
3111
3112void
3113instantiate_virtual_regs (fndecl, insns)
3114 tree fndecl;
3115 rtx insns;
3116{
3117 rtx insn;
e9a25f70 3118 int i;
6f086dfc
RS
3119
3120 /* Compute the offsets to use for this function. */
3121 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3122 var_offset = STARTING_FRAME_OFFSET;
3123 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3124 out_arg_offset = STACK_POINTER_OFFSET;
71038426 3125 cfa_offset = ARG_POINTER_CFA_OFFSET;
6f086dfc
RS
3126
3127 /* Scan all variables and parameters of this function. For each that is
3128 in memory, instantiate all virtual registers if the result is a valid
3129 address. If not, we do it later. That will handle most uses of virtual
3130 regs on many machines. */
3131 instantiate_decls (fndecl, 1);
3132
3133 /* Initialize recognition, indicating that volatile is OK. */
3134 init_recog ();
3135
3136 /* Scan through all the insns, instantiating every virtual register still
3137 present. */
3138 for (insn = insns; insn; insn = NEXT_INSN (insn))
3139 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3140 || GET_CODE (insn) == CALL_INSN)
3141 {
3142 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 3143 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
3144 }
3145
e9a25f70
JL
3146 /* Instantiate the stack slots for the parm registers, for later use in
3147 addressof elimination. */
3148 for (i = 0; i < max_parm_reg; ++i)
3149 if (parm_reg_stack_loc[i])
3150 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3151
6f086dfc
RS
3152 /* Now instantiate the remaining register equivalences for debugging info.
3153 These will not be valid addresses. */
3154 instantiate_decls (fndecl, 0);
3155
3156 /* Indicate that, from now on, assign_stack_local should use
3157 frame_pointer_rtx. */
3158 virtuals_instantiated = 1;
3159}
3160
3161/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3162 all virtual registers in their DECL_RTL's.
3163
3164 If VALID_ONLY, do this only if the resulting address is still valid.
3165 Otherwise, always do it. */
3166
3167static void
3168instantiate_decls (fndecl, valid_only)
3169 tree fndecl;
3170 int valid_only;
3171{
3172 tree decl;
3173
e1686233 3174 if (DECL_SAVED_INSNS (fndecl))
6f086dfc
RS
3175 /* When compiling an inline function, the obstack used for
3176 rtl allocation is the maybepermanent_obstack. Calling
3177 `resume_temporary_allocation' switches us back to that
3178 obstack while we process this function's parameters. */
3179 resume_temporary_allocation ();
3180
3181 /* Process all parameters of the function. */
3182 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3183 {
e5e809f4
JL
3184 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3185
ce717ce4
JW
3186 instantiate_decl (DECL_RTL (decl), size, valid_only);
3187
3188 /* If the parameter was promoted, then the incoming RTL mode may be
3189 larger than the declared type size. We must use the larger of
3190 the two sizes. */
3191 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3192 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
3193 }
3194
0f41302f 3195 /* Now process all variables defined in the function or its subblocks. */
6f086dfc
RS
3196 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3197
79c0672e 3198 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
6f086dfc
RS
3199 {
3200 /* Save all rtl allocated for this function by raising the
3201 high-water mark on the maybepermanent_obstack. */
3202 preserve_data ();
3203 /* All further rtl allocation is now done in the current_obstack. */
3204 rtl_in_current_obstack ();
3205 }
3206}
3207
3208/* Subroutine of instantiate_decls: Process all decls in the given
3209 BLOCK node and all its subblocks. */
3210
3211static void
3212instantiate_decls_1 (let, valid_only)
3213 tree let;
3214 int valid_only;
3215{
3216 tree t;
3217
3218 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
3219 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3220 valid_only);
6f086dfc
RS
3221
3222 /* Process all subblocks. */
3223 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3224 instantiate_decls_1 (t, valid_only);
3225}
5a73491b 3226
8008b228 3227/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
3228 decl and the size of the object, do any instantiation required.
3229
3230 If VALID_ONLY is non-zero, it means that the RTL should only be
3231 changed if the new address is valid. */
3232
3233static void
3234instantiate_decl (x, size, valid_only)
3235 rtx x;
3236 int size;
3237 int valid_only;
3238{
3239 enum machine_mode mode;
3240 rtx addr;
3241
3242 /* If this is not a MEM, no need to do anything. Similarly if the
3243 address is a constant or a register that is not a virtual register. */
3244
3245 if (x == 0 || GET_CODE (x) != MEM)
3246 return;
3247
3248 addr = XEXP (x, 0);
3249 if (CONSTANT_P (addr)
956d6950 3250 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
5a73491b
RK
3251 || (GET_CODE (addr) == REG
3252 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3253 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3254 return;
3255
3256 /* If we should only do this if the address is valid, copy the address.
3257 We need to do this so we can undo any changes that might make the
3258 address invalid. This copy is unfortunate, but probably can't be
3259 avoided. */
3260
3261 if (valid_only)
3262 addr = copy_rtx (addr);
3263
3264 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3265
87ce34d6
JW
3266 if (valid_only)
3267 {
3268 /* Now verify that the resulting address is valid for every integer or
3269 floating-point mode up to and including SIZE bytes long. We do this
3270 since the object might be accessed in any mode and frame addresses
3271 are shared. */
3272
3273 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3274 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3275 mode = GET_MODE_WIDER_MODE (mode))
3276 if (! memory_address_p (mode, addr))
3277 return;
5a73491b 3278
87ce34d6
JW
3279 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3280 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3281 mode = GET_MODE_WIDER_MODE (mode))
3282 if (! memory_address_p (mode, addr))
3283 return;
3284 }
5a73491b 3285
87ce34d6
JW
3286 /* Put back the address now that we have updated it and we either know
3287 it is valid or we don't care whether it is valid. */
5a73491b
RK
3288
3289 XEXP (x, 0) = addr;
3290}
6f086dfc
RS
3291\f
3292/* Given a pointer to a piece of rtx and an optional pointer to the
3293 containing object, instantiate any virtual registers present in it.
3294
3295 If EXTRA_INSNS, we always do the replacement and generate
3296 any extra insns before OBJECT. If it zero, we do nothing if replacement
3297 is not valid.
3298
3299 Return 1 if we either had nothing to do or if we were able to do the
3300 needed replacement. Return 0 otherwise; we only return zero if
3301 EXTRA_INSNS is zero.
3302
3303 We first try some simple transformations to avoid the creation of extra
3304 pseudos. */
3305
3306static int
3307instantiate_virtual_regs_1 (loc, object, extra_insns)
3308 rtx *loc;
3309 rtx object;
3310 int extra_insns;
3311{
3312 rtx x;
3313 RTX_CODE code;
3314 rtx new = 0;
e5e809f4 3315 HOST_WIDE_INT offset;
6f086dfc
RS
3316 rtx temp;
3317 rtx seq;
3318 int i, j;
3319 char *fmt;
3320
3321 /* Re-start here to avoid recursion in common cases. */
3322 restart:
3323
3324 x = *loc;
3325 if (x == 0)
3326 return 1;
3327
3328 code = GET_CODE (x);
3329
3330 /* Check for some special cases. */
3331 switch (code)
3332 {
3333 case CONST_INT:
3334 case CONST_DOUBLE:
3335 case CONST:
3336 case SYMBOL_REF:
3337 case CODE_LABEL:
3338 case PC:
3339 case CC0:
3340 case ASM_INPUT:
3341 case ADDR_VEC:
3342 case ADDR_DIFF_VEC:
3343 case RETURN:
3344 return 1;
3345
3346 case SET:
3347 /* We are allowed to set the virtual registers. This means that
38e01259 3348 the actual register should receive the source minus the
6f086dfc
RS
3349 appropriate offset. This is used, for example, in the handling
3350 of non-local gotos. */
3351 if (SET_DEST (x) == virtual_incoming_args_rtx)
3352 new = arg_pointer_rtx, offset = - in_arg_offset;
3353 else if (SET_DEST (x) == virtual_stack_vars_rtx)
dfd3dae6 3354 new = frame_pointer_rtx, offset = - var_offset;
6f086dfc
RS
3355 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3356 new = stack_pointer_rtx, offset = - dynamic_offset;
3357 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3358 new = stack_pointer_rtx, offset = - out_arg_offset;
71038426
RH
3359 else if (SET_DEST (x) == virtual_cfa_rtx)
3360 new = arg_pointer_rtx, offset = - cfa_offset;
6f086dfc
RS
3361
3362 if (new)
3363 {
3364 /* The only valid sources here are PLUS or REG. Just do
3365 the simplest possible thing to handle them. */
3366 if (GET_CODE (SET_SRC (x)) != REG
3367 && GET_CODE (SET_SRC (x)) != PLUS)
3368 abort ();
3369
3370 start_sequence ();
3371 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 3372 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
3373 else
3374 temp = SET_SRC (x);
5f4f0e22 3375 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
3376 seq = get_insns ();
3377 end_sequence ();
3378
3379 emit_insns_before (seq, object);
3380 SET_DEST (x) = new;
3381
e9a25f70 3382 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc
RS
3383 || ! extra_insns)
3384 abort ();
3385
3386 return 1;
3387 }
3388
3389 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3390 loc = &SET_SRC (x);
3391 goto restart;
3392
3393 case PLUS:
3394 /* Handle special case of virtual register plus constant. */
3395 if (CONSTANT_P (XEXP (x, 1)))
3396 {
b1f82ccf 3397 rtx old, new_offset;
6f086dfc
RS
3398
3399 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3400 if (GET_CODE (XEXP (x, 0)) == PLUS)
3401 {
3402 rtx inner = XEXP (XEXP (x, 0), 0);
3403
3404 if (inner == virtual_incoming_args_rtx)
3405 new = arg_pointer_rtx, offset = in_arg_offset;
3406 else if (inner == virtual_stack_vars_rtx)
3407 new = frame_pointer_rtx, offset = var_offset;
3408 else if (inner == virtual_stack_dynamic_rtx)
3409 new = stack_pointer_rtx, offset = dynamic_offset;
3410 else if (inner == virtual_outgoing_args_rtx)
3411 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3412 else if (inner == virtual_cfa_rtx)
3413 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3414 else
3415 {
3416 loc = &XEXP (x, 0);
3417 goto restart;
3418 }
3419
3420 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3421 extra_insns);
38a448ca 3422 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
6f086dfc
RS
3423 }
3424
3425 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3426 new = arg_pointer_rtx, offset = in_arg_offset;
3427 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3428 new = frame_pointer_rtx, offset = var_offset;
3429 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3430 new = stack_pointer_rtx, offset = dynamic_offset;
3431 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3432 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3433 else if (XEXP (x, 0) == virtual_cfa_rtx)
3434 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3435 else
3436 {
3437 /* We know the second operand is a constant. Unless the
3438 first operand is a REG (which has been already checked),
3439 it needs to be checked. */
3440 if (GET_CODE (XEXP (x, 0)) != REG)
3441 {
3442 loc = &XEXP (x, 0);
3443 goto restart;
3444 }
3445 return 1;
3446 }
3447
b1f82ccf 3448 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 3449
b1f82ccf
DE
3450 /* If the new constant is zero, try to replace the sum with just
3451 the register. */
3452 if (new_offset == const0_rtx
3453 && validate_change (object, loc, new, 0))
6f086dfc
RS
3454 return 1;
3455
b1f82ccf
DE
3456 /* Next try to replace the register and new offset.
3457 There are two changes to validate here and we can't assume that
3458 in the case of old offset equals new just changing the register
3459 will yield a valid insn. In the interests of a little efficiency,
3460 however, we only call validate change once (we don't queue up the
0f41302f 3461 changes and then call apply_change_group). */
b1f82ccf
DE
3462
3463 old = XEXP (x, 0);
3464 if (offset == 0
3465 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3466 : (XEXP (x, 0) = new,
3467 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
3468 {
3469 if (! extra_insns)
3470 {
3471 XEXP (x, 0) = old;
3472 return 0;
3473 }
3474
3475 /* Otherwise copy the new constant into a register and replace
3476 constant with that register. */
3477 temp = gen_reg_rtx (Pmode);
b1f82ccf 3478 XEXP (x, 0) = new;
6f086dfc 3479 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 3480 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
3481 else
3482 {
3483 /* If that didn't work, replace this expression with a
3484 register containing the sum. */
3485
6f086dfc 3486 XEXP (x, 0) = old;
38a448ca 3487 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
3488
3489 start_sequence ();
5f4f0e22 3490 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
3491 seq = get_insns ();
3492 end_sequence ();
3493
3494 emit_insns_before (seq, object);
3495 if (! validate_change (object, loc, temp, 0)
3496 && ! validate_replace_rtx (x, temp, object))
3497 abort ();
3498 }
3499 }
3500
3501 return 1;
3502 }
3503
3504 /* Fall through to generic two-operand expression case. */
3505 case EXPR_LIST:
3506 case CALL:
3507 case COMPARE:
3508 case MINUS:
3509 case MULT:
3510 case DIV: case UDIV:
3511 case MOD: case UMOD:
3512 case AND: case IOR: case XOR:
45620ed4
RK
3513 case ROTATERT: case ROTATE:
3514 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
3515 case NE: case EQ:
3516 case GE: case GT: case GEU: case GTU:
3517 case LE: case LT: case LEU: case LTU:
3518 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3519 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3520 loc = &XEXP (x, 0);
3521 goto restart;
3522
3523 case MEM:
3524 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 3525 handled by our scan of decls. The only special handling we
6f086dfc 3526 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 3527 shared if we have to change it to a pseudo.
6f086dfc
RS
3528
3529 If the rtx is a simple reference to an address via a virtual register,
3530 it can potentially be shared. In such cases, first try to make it
3531 a valid address, which can also be shared. Otherwise, copy it and
3532 proceed normally.
3533
3534 First check for common cases that need no processing. These are
3535 usually due to instantiation already being done on a previous instance
3536 of a shared rtx. */
3537
3538 temp = XEXP (x, 0);
3539 if (CONSTANT_ADDRESS_P (temp)
3540#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3541 || temp == arg_pointer_rtx
b37f453b
DE
3542#endif
3543#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3544 || temp == hard_frame_pointer_rtx
6f086dfc
RS
3545#endif
3546 || temp == frame_pointer_rtx)
3547 return 1;
3548
3549 if (GET_CODE (temp) == PLUS
3550 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3551 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
3552#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3553 || XEXP (temp, 0) == hard_frame_pointer_rtx
3554#endif
6f086dfc
RS
3555#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3556 || XEXP (temp, 0) == arg_pointer_rtx
3557#endif
3558 ))
3559 return 1;
3560
3561 if (temp == virtual_stack_vars_rtx
3562 || temp == virtual_incoming_args_rtx
3563 || (GET_CODE (temp) == PLUS
3564 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3565 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3566 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3567 {
3568 /* This MEM may be shared. If the substitution can be done without
3569 the need to generate new pseudos, we want to do it in place
3570 so all copies of the shared rtx benefit. The call below will
3571 only make substitutions if the resulting address is still
3572 valid.
3573
3574 Note that we cannot pass X as the object in the recursive call
3575 since the insn being processed may not allow all valid
6461be14
RS
3576 addresses. However, if we were not passed on object, we can
3577 only modify X without copying it if X will have a valid
3578 address.
6f086dfc 3579
6461be14
RS
3580 ??? Also note that this can still lose if OBJECT is an insn that
3581 has less restrictions on an address that some other insn.
3582 In that case, we will modify the shared address. This case
4fd796bb
RK
3583 doesn't seem very likely, though. One case where this could
3584 happen is in the case of a USE or CLOBBER reference, but we
3585 take care of that below. */
6461be14
RS
3586
3587 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3588 object ? object : x, 0))
6f086dfc
RS
3589 return 1;
3590
3591 /* Otherwise make a copy and process that copy. We copy the entire
3592 RTL expression since it might be a PLUS which could also be
3593 shared. */
3594 *loc = x = copy_rtx (x);
3595 }
3596
3597 /* Fall through to generic unary operation case. */
6f086dfc
RS
3598 case SUBREG:
3599 case STRICT_LOW_PART:
3600 case NEG: case NOT:
3601 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3602 case SIGN_EXTEND: case ZERO_EXTEND:
3603 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3604 case FLOAT: case FIX:
3605 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3606 case ABS:
3607 case SQRT:
3608 case FFS:
3609 /* These case either have just one operand or we know that we need not
3610 check the rest of the operands. */
3611 loc = &XEXP (x, 0);
3612 goto restart;
3613
4fd796bb
RK
3614 case USE:
3615 case CLOBBER:
3616 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3617 go ahead and make the invalid one, but do it to a copy. For a REG,
3618 just make the recursive call, since there's no chance of a problem. */
3619
3620 if ((GET_CODE (XEXP (x, 0)) == MEM
3621 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3622 0))
3623 || (GET_CODE (XEXP (x, 0)) == REG
7694ce35 3624 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
3625 return 1;
3626
3627 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3628 loc = &XEXP (x, 0);
3629 goto restart;
3630
6f086dfc
RS
3631 case REG:
3632 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3633 in front of this insn and substitute the temporary. */
3634 if (x == virtual_incoming_args_rtx)
3635 new = arg_pointer_rtx, offset = in_arg_offset;
3636 else if (x == virtual_stack_vars_rtx)
3637 new = frame_pointer_rtx, offset = var_offset;
3638 else if (x == virtual_stack_dynamic_rtx)
3639 new = stack_pointer_rtx, offset = dynamic_offset;
3640 else if (x == virtual_outgoing_args_rtx)
3641 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3642 else if (x == virtual_cfa_rtx)
3643 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3644
3645 if (new)
3646 {
3647 temp = plus_constant (new, offset);
3648 if (!validate_change (object, loc, temp, 0))
3649 {
3650 if (! extra_insns)
3651 return 0;
3652
3653 start_sequence ();
5f4f0e22 3654 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
3655 seq = get_insns ();
3656 end_sequence ();
3657
3658 emit_insns_before (seq, object);
3659 if (! validate_change (object, loc, temp, 0)
3660 && ! validate_replace_rtx (x, temp, object))
3661 abort ();
3662 }
3663 }
3664
3665 return 1;
e9a25f70
JL
3666
3667 case ADDRESSOF:
3668 if (GET_CODE (XEXP (x, 0)) == REG)
3669 return 1;
3670
3671 else if (GET_CODE (XEXP (x, 0)) == MEM)
3672 {
3673 /* If we have a (addressof (mem ..)), do any instantiation inside
3674 since we know we'll be making the inside valid when we finally
3675 remove the ADDRESSOF. */
3676 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3677 return 1;
3678 }
3679 break;
3680
3681 default:
3682 break;
6f086dfc
RS
3683 }
3684
3685 /* Scan all subexpressions. */
3686 fmt = GET_RTX_FORMAT (code);
3687 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3688 if (*fmt == 'e')
3689 {
3690 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3691 return 0;
3692 }
3693 else if (*fmt == 'E')
3694 for (j = 0; j < XVECLEN (x, i); j++)
3695 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3696 extra_insns))
3697 return 0;
3698
3699 return 1;
3700}
3701\f
3702/* Optimization: assuming this function does not receive nonlocal gotos,
3703 delete the handlers for such, as well as the insns to establish
3704 and disestablish them. */
3705
3706static void
3707delete_handlers ()
3708{
3709 rtx insn;
3710 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3711 {
3712 /* Delete the handler by turning off the flag that would
3713 prevent jump_optimize from deleting it.
3714 Also permit deletion of the nonlocal labels themselves
3715 if nothing local refers to them. */
3716 if (GET_CODE (insn) == CODE_LABEL)
71cd4a8d
JW
3717 {
3718 tree t, last_t;
3719
3720 LABEL_PRESERVE_P (insn) = 0;
3721
3722 /* Remove it from the nonlocal_label list, to avoid confusing
3723 flow. */
3724 for (t = nonlocal_labels, last_t = 0; t;
3725 last_t = t, t = TREE_CHAIN (t))
3726 if (DECL_RTL (TREE_VALUE (t)) == insn)
3727 break;
3728 if (t)
3729 {
3730 if (! last_t)
3731 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3732 else
3733 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3734 }
3735 }
ba716ac9
BS
3736 if (GET_CODE (insn) == INSN)
3737 {
3738 int can_delete = 0;
3739 rtx t;
3740 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3741 if (reg_mentioned_p (t, PATTERN (insn)))
3742 {
3743 can_delete = 1;
3744 break;
3745 }
3746 if (can_delete
59257ff7
RK
3747 || (nonlocal_goto_stack_level != 0
3748 && reg_mentioned_p (nonlocal_goto_stack_level,
ba716ac9
BS
3749 PATTERN (insn))))
3750 delete_insn (insn);
3751 }
6f086dfc
RS
3752 }
3753}
3754
3755/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3756 of the current function. */
3757
3758rtx
3759nonlocal_label_rtx_list ()
3760{
3761 tree t;
3762 rtx x = 0;
3763
3764 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
38a448ca 3765 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
6f086dfc
RS
3766
3767 return x;
3768}
3769\f
3770/* Output a USE for any register use in RTL.
3771 This is used with -noreg to mark the extent of lifespan
3772 of any registers used in a user-visible variable's DECL_RTL. */
3773
3774void
3775use_variable (rtl)
3776 rtx rtl;
3777{
3778 if (GET_CODE (rtl) == REG)
3779 /* This is a register variable. */
38a448ca 3780 emit_insn (gen_rtx_USE (VOIDmode, rtl));
6f086dfc
RS
3781 else if (GET_CODE (rtl) == MEM
3782 && GET_CODE (XEXP (rtl, 0)) == REG
3783 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3784 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3785 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3786 /* This is a variable-sized structure. */
38a448ca 3787 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
6f086dfc
RS
3788}
3789
3790/* Like use_variable except that it outputs the USEs after INSN
3791 instead of at the end of the insn-chain. */
3792
3793void
3794use_variable_after (rtl, insn)
3795 rtx rtl, insn;
3796{
3797 if (GET_CODE (rtl) == REG)
3798 /* This is a register variable. */
38a448ca 3799 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
6f086dfc
RS
3800 else if (GET_CODE (rtl) == MEM
3801 && GET_CODE (XEXP (rtl, 0)) == REG
3802 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3803 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3804 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3805 /* This is a variable-sized structure. */
38a448ca 3806 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
6f086dfc
RS
3807}
3808\f
3809int
3810max_parm_reg_num ()
3811{
3812 return max_parm_reg;
3813}
3814
3815/* Return the first insn following those generated by `assign_parms'. */
3816
3817rtx
3818get_first_nonparm_insn ()
3819{
3820 if (last_parm_insn)
3821 return NEXT_INSN (last_parm_insn);
3822 return get_insns ();
3823}
3824
5378192b
RS
3825/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3826 Crash if there is none. */
3827
3828rtx
3829get_first_block_beg ()
3830{
3831 register rtx searcher;
3832 register rtx insn = get_first_nonparm_insn ();
3833
3834 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3835 if (GET_CODE (searcher) == NOTE
3836 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3837 return searcher;
3838
3839 abort (); /* Invalid call to this function. (See comments above.) */
3840 return NULL_RTX;
3841}
3842
d181c154
RS
3843/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3844 This means a type for which function calls must pass an address to the
3845 function or get an address back from the function.
3846 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
3847
3848int
3849aggregate_value_p (exp)
3850 tree exp;
3851{
9d790a4f
RS
3852 int i, regno, nregs;
3853 rtx reg;
d181c154
RS
3854 tree type;
3855 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3856 type = exp;
3857 else
3858 type = TREE_TYPE (exp);
3859
3860 if (RETURN_IN_MEMORY (type))
6f086dfc 3861 return 1;
956d6950 3862 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
3863 and thus can't be returned in registers. */
3864 if (TREE_ADDRESSABLE (type))
3865 return 1;
05e3bdb9 3866 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 3867 return 1;
9d790a4f
RS
3868 /* Make sure we have suitable call-clobbered regs to return
3869 the value in; if not, we must return it in memory. */
d181c154 3870 reg = hard_function_value (type, 0);
e71f7aa5
JW
3871
3872 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3873 it is OK. */
3874 if (GET_CODE (reg) != REG)
3875 return 0;
3876
9d790a4f 3877 regno = REGNO (reg);
d181c154 3878 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
9d790a4f
RS
3879 for (i = 0; i < nregs; i++)
3880 if (! call_used_regs[regno + i])
3881 return 1;
6f086dfc
RS
3882 return 0;
3883}
3884\f
3885/* Assign RTL expressions to the function's parameters.
3886 This may involve copying them into registers and using
3887 those registers as the RTL for them.
3888
3889 If SECOND_TIME is non-zero it means that this function is being
3890 called a second time. This is done by integrate.c when a function's
3891 compilation is deferred. We need to come back here in case the
3892 FUNCTION_ARG macro computes items needed for the rest of the compilation
3893 (such as changing which registers are fixed or caller-saved). But suppress
3894 writing any insns or setting DECL_RTL of anything in this case. */
3895
3896void
3897assign_parms (fndecl, second_time)
3898 tree fndecl;
3899 int second_time;
3900{
3901 register tree parm;
3902 register rtx entry_parm = 0;
3903 register rtx stack_parm = 0;
3904 CUMULATIVE_ARGS args_so_far;
621061f4
RK
3905 enum machine_mode promoted_mode, passed_mode;
3906 enum machine_mode nominal_mode, promoted_nominal_mode;
00d8a4c1 3907 int unsignedp;
6f086dfc
RS
3908 /* Total space needed so far for args on the stack,
3909 given as a constant and a tree-expression. */
3910 struct args_size stack_args_size;
3911 tree fntype = TREE_TYPE (fndecl);
3912 tree fnargs = DECL_ARGUMENTS (fndecl);
3913 /* This is used for the arg pointer when referring to stack args. */
3914 rtx internal_arg_pointer;
3915 /* This is a dummy PARM_DECL that we used for the function result if
3916 the function returns a structure. */
3917 tree function_result_decl = 0;
6f086dfc 3918 int varargs_setup = 0;
3412b298 3919 rtx conversion_insns = 0;
6f086dfc
RS
3920
3921 /* Nonzero if the last arg is named `__builtin_va_alist',
3922 which is used on some machines for old-fashioned non-ANSI varargs.h;
3923 this should be stuck onto the stack as if it had arrived there. */
3b69d50e
RK
3924 int hide_last_arg
3925 = (current_function_varargs
3926 && fnargs
6f086dfc
RS
3927 && (parm = tree_last (fnargs)) != 0
3928 && DECL_NAME (parm)
3929 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3930 "__builtin_va_alist")));
3931
3932 /* Nonzero if function takes extra anonymous args.
3933 This means the last named arg must be on the stack
0f41302f 3934 right before the anonymous ones. */
6f086dfc
RS
3935 int stdarg
3936 = (TYPE_ARG_TYPES (fntype) != 0
3937 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3938 != void_type_node));
3939
ebb904cb
RK
3940 current_function_stdarg = stdarg;
3941
6f086dfc
RS
3942 /* If the reg that the virtual arg pointer will be translated into is
3943 not a fixed reg or is the stack pointer, make a copy of the virtual
3944 arg pointer, and address parms via the copy. The frame pointer is
3945 considered fixed even though it is not marked as such.
3946
3947 The second time through, simply use ap to avoid generating rtx. */
3948
3949 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3950 || ! (fixed_regs[ARG_POINTER_REGNUM]
3951 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3952 && ! second_time)
3953 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3954 else
3955 internal_arg_pointer = virtual_incoming_args_rtx;
3956 current_function_internal_arg_pointer = internal_arg_pointer;
3957
3958 stack_args_size.constant = 0;
3959 stack_args_size.var = 0;
3960
3961 /* If struct value address is treated as the first argument, make it so. */
3962 if (aggregate_value_p (DECL_RESULT (fndecl))
3963 && ! current_function_returns_pcc_struct
3964 && struct_value_incoming_rtx == 0)
3965 {
f9f29478 3966 tree type = build_pointer_type (TREE_TYPE (fntype));
6f086dfc 3967
5f4f0e22 3968 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
3969
3970 DECL_ARG_TYPE (function_result_decl) = type;
3971 TREE_CHAIN (function_result_decl) = fnargs;
3972 fnargs = function_result_decl;
3973 }
3974
e9a25f70
JL
3975 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3976 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3977 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
6f086dfc
RS
3978
3979#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 3980 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 3981#else
2c7ee1a6 3982 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
6f086dfc
RS
3983#endif
3984
3985 /* We haven't yet found an argument that we must push and pretend the
3986 caller did. */
3987 current_function_pretend_args_size = 0;
3988
3989 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3990 {
05e3bdb9 3991 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
6f086dfc
RS
3992 struct args_size stack_offset;
3993 struct args_size arg_size;
3994 int passed_pointer = 0;
621061f4 3995 int did_conversion = 0;
6f086dfc 3996 tree passed_type = DECL_ARG_TYPE (parm);
621061f4 3997 tree nominal_type = TREE_TYPE (parm);
6f086dfc
RS
3998
3999 /* Set LAST_NAMED if this is last named arg before some
bf9c83fe 4000 anonymous args. */
6f086dfc
RS
4001 int last_named = ((TREE_CHAIN (parm) == 0
4002 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3b69d50e 4003 && (stdarg || current_function_varargs));
bf9c83fe
JW
4004 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4005 most machines, if this is a varargs/stdarg function, then we treat
4006 the last named arg as if it were anonymous too. */
e5e809f4 4007 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
6f086dfc
RS
4008
4009 if (TREE_TYPE (parm) == error_mark_node
4010 /* This can happen after weird syntax errors
4011 or if an enum type is defined among the parms. */
4012 || TREE_CODE (parm) != PARM_DECL
4013 || passed_type == NULL)
4014 {
38a448ca
RH
4015 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4016 = gen_rtx_MEM (BLKmode, const0_rtx);
6f086dfc
RS
4017 TREE_USED (parm) = 1;
4018 continue;
4019 }
4020
4021 /* For varargs.h function, save info about regs and stack space
4022 used by the individual args, not including the va_alist arg. */
3b69d50e 4023 if (hide_last_arg && last_named)
6f086dfc
RS
4024 current_function_args_info = args_so_far;
4025
4026 /* Find mode of arg as it is passed, and mode of arg
4027 as it should be during execution of this function. */
4028 passed_mode = TYPE_MODE (passed_type);
621061f4 4029 nominal_mode = TYPE_MODE (nominal_type);
6f086dfc 4030
16bae307
RS
4031 /* If the parm's mode is VOID, its value doesn't matter,
4032 and avoid the usual things like emit_move_insn that could crash. */
4033 if (nominal_mode == VOIDmode)
4034 {
4035 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4036 continue;
4037 }
4038
3f46679a
RK
4039 /* If the parm is to be passed as a transparent union, use the
4040 type of the first field for the tests below. We have already
4041 verified that the modes are the same. */
4042 if (DECL_TRANSPARENT_UNION (parm)
4043 || TYPE_TRANSPARENT_UNION (passed_type))
4044 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4045
a14ae508
RK
4046 /* See if this arg was passed by invisible reference. It is if
4047 it is an object whose size depends on the contents of the
4048 object itself or if the machine requires these objects be passed
4049 that way. */
4050
4051 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4052 && contains_placeholder_p (TYPE_SIZE (passed_type)))
657bb6dc 4053 || TREE_ADDRESSABLE (passed_type)
6f086dfc 4054#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
a14ae508 4055 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
bf9c83fe 4056 passed_type, named_arg)
a14ae508
RK
4057#endif
4058 )
6f086dfc 4059 {
621061f4 4060 passed_type = nominal_type = build_pointer_type (passed_type);
6f086dfc
RS
4061 passed_pointer = 1;
4062 passed_mode = nominal_mode = Pmode;
4063 }
6f086dfc 4064
a53e14c0
RK
4065 promoted_mode = passed_mode;
4066
4067#ifdef PROMOTE_FUNCTION_ARGS
4068 /* Compute the mode in which the arg is actually extended to. */
7940255d 4069 unsignedp = TREE_UNSIGNED (passed_type);
a5a52dbc 4070 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
a53e14c0
RK
4071#endif
4072
6f086dfc
RS
4073 /* Let machine desc say which reg (if any) the parm arrives in.
4074 0 means it arrives on the stack. */
4075#ifdef FUNCTION_INCOMING_ARG
a53e14c0 4076 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
bf9c83fe 4077 passed_type, named_arg);
6f086dfc 4078#else
a53e14c0 4079 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
bf9c83fe 4080 passed_type, named_arg);
6f086dfc
RS
4081#endif
4082
621061f4
RK
4083 if (entry_parm == 0)
4084 promoted_mode = passed_mode;
a53e14c0 4085
6f086dfc
RS
4086#ifdef SETUP_INCOMING_VARARGS
4087 /* If this is the last named parameter, do any required setup for
4088 varargs or stdargs. We need to know about the case of this being an
4089 addressable type, in which case we skip the registers it
4090 would have arrived in.
4091
4092 For stdargs, LAST_NAMED will be set for two parameters, the one that
4093 is actually the last named, and the dummy parameter. We only
4094 want to do this action once.
4095
4096 Also, indicate when RTL generation is to be suppressed. */
4097 if (last_named && !varargs_setup)
4098 {
621061f4 4099 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
6f086dfc
RS
4100 current_function_pretend_args_size,
4101 second_time);
4102 varargs_setup = 1;
4103 }
4104#endif
4105
4106 /* Determine parm's home in the stack,
4107 in case it arrives in the stack or we should pretend it did.
4108
4109 Compute the stack position and rtx where the argument arrives
4110 and its size.
4111
4112 There is one complexity here: If this was a parameter that would
4113 have been passed in registers, but wasn't only because it is
4114 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4115 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4116 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4117 0 as it was the previous time. */
4118
ab87f8c8 4119 locate_and_pad_parm (nominal_mode, passed_type,
6f086dfc
RS
4120#ifdef STACK_PARMS_IN_REG_PARM_AREA
4121 1,
4122#else
4123#ifdef FUNCTION_INCOMING_ARG
621061f4 4124 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc 4125 passed_type,
bf9c83fe 4126 (named_arg
6f086dfc
RS
4127 || varargs_setup)) != 0,
4128#else
621061f4 4129 FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc 4130 passed_type,
bf9c83fe 4131 named_arg || varargs_setup) != 0,
6f086dfc
RS
4132#endif
4133#endif
4134 fndecl, &stack_args_size, &stack_offset, &arg_size);
4135
4136 if (! second_time)
4137 {
4138 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4139
4140 if (offset_rtx == const0_rtx)
ab87f8c8 4141 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4142 else
ab87f8c8 4143 stack_parm = gen_rtx_MEM (nominal_mode,
38a448ca
RH
4144 gen_rtx_PLUS (Pmode,
4145 internal_arg_pointer,
4146 offset_rtx));
6f086dfc
RS
4147
4148 /* If this is a memory ref that contains aggregate components,
a00285d0
RK
4149 mark it as such for cse and loop optimize. Likewise if it
4150 is readonly. */
c6df88cb 4151 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
a00285d0 4152 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
41472af8 4153 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
6f086dfc
RS
4154 }
4155
4156 /* If this parameter was passed both in registers and in the stack,
4157 use the copy on the stack. */
621061f4 4158 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
6f086dfc
RS
4159 entry_parm = 0;
4160
461beb10 4161#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
4162 /* If this parm was passed part in regs and part in memory,
4163 pretend it arrived entirely in memory
4164 by pushing the register-part onto the stack.
4165
4166 In the special case of a DImode or DFmode that is split,
4167 we could put it together in a pseudoreg directly,
4168 but for now that's not worth bothering with. */
4169
4170 if (entry_parm)
4171 {
621061f4 4172 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
bf9c83fe 4173 passed_type, named_arg);
6f086dfc
RS
4174
4175 if (nregs > 0)
4176 {
4177 current_function_pretend_args_size
4178 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4179 / (PARM_BOUNDARY / BITS_PER_UNIT)
4180 * (PARM_BOUNDARY / BITS_PER_UNIT));
4181
4182 if (! second_time)
5c4cdc9f
JW
4183 {
4184 /* Handle calls that pass values in multiple non-contiguous
4185 locations. The Irix 6 ABI has examples of this. */
4186 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4187 emit_group_store (validize_mem (stack_parm), entry_parm,
4188 int_size_in_bytes (TREE_TYPE (parm)),
4189 (TYPE_ALIGN (TREE_TYPE (parm))
4190 / BITS_PER_UNIT));
5c4cdc9f
JW
4191 else
4192 move_block_from_reg (REGNO (entry_parm),
4193 validize_mem (stack_parm), nregs,
4194 int_size_in_bytes (TREE_TYPE (parm)));
4195 }
6f086dfc
RS
4196 entry_parm = stack_parm;
4197 }
4198 }
461beb10 4199#endif
6f086dfc
RS
4200
4201 /* If we didn't decide this parm came in a register,
4202 by default it came on the stack. */
4203 if (entry_parm == 0)
4204 entry_parm = stack_parm;
4205
4206 /* Record permanently how this parm was passed. */
4207 if (! second_time)
4208 DECL_INCOMING_RTL (parm) = entry_parm;
4209
4210 /* If there is actually space on the stack for this parm,
4211 count it in stack_args_size; otherwise set stack_parm to 0
4212 to indicate there is no preallocated stack slot for the parm. */
4213
4214 if (entry_parm == stack_parm
ab87f8c8
JL
4215 || (GET_CODE (entry_parm) == PARALLEL
4216 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
d9ca49d5 4217#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 4218 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
4219 there is still an (uninitialized) stack slot allocated for it.
4220
4221 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4222 whether this parameter already has a stack slot allocated,
4223 because an arg block exists only if current_function_args_size
abc95ed3 4224 is larger than some threshold, and we haven't calculated that
d9ca49d5
JW
4225 yet. So, for now, we just assume that stack slots never exist
4226 in this case. */
6f086dfc
RS
4227 || REG_PARM_STACK_SPACE (fndecl) > 0
4228#endif
4229 )
4230 {
4231 stack_args_size.constant += arg_size.constant;
4232 if (arg_size.var)
4233 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4234 }
4235 else
4236 /* No stack slot was pushed for this parm. */
4237 stack_parm = 0;
4238
4239 /* Update info on where next arg arrives in registers. */
4240
621061f4 4241 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
bf9c83fe 4242 passed_type, named_arg);
6f086dfc 4243
0f41302f 4244 /* If this is our second time through, we are done with this parm. */
6f086dfc
RS
4245 if (second_time)
4246 continue;
4247
e16c591a
RS
4248 /* If we can't trust the parm stack slot to be aligned enough
4249 for its ultimate type, don't use that slot after entry.
4250 We'll make another stack slot, if we need one. */
4251 {
e16c591a 4252 int thisparm_boundary
621061f4 4253 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
e16c591a
RS
4254
4255 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4256 stack_parm = 0;
4257 }
4258
cb61f66f
RS
4259 /* If parm was passed in memory, and we need to convert it on entry,
4260 don't store it back in that same slot. */
4261 if (entry_parm != 0
4262 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4263 stack_parm = 0;
4264
4265#if 0
6f086dfc
RS
4266 /* Now adjust STACK_PARM to the mode and precise location
4267 where this parameter should live during execution,
4268 if we discover that it must live in the stack during execution.
4269 To make debuggers happier on big-endian machines, we store
4270 the value in the last bytes of the space available. */
4271
4272 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4273 && stack_parm != 0)
4274 {
4275 rtx offset_rtx;
4276
f76b9db2
ILT
4277 if (BYTES_BIG_ENDIAN
4278 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
6f086dfc
RS
4279 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4280 - GET_MODE_SIZE (nominal_mode));
6f086dfc
RS
4281
4282 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4283 if (offset_rtx == const0_rtx)
38a448ca 4284 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4285 else
38a448ca
RH
4286 stack_parm = gen_rtx_MEM (nominal_mode,
4287 gen_rtx_PLUS (Pmode,
4288 internal_arg_pointer,
4289 offset_rtx));
6f086dfc
RS
4290
4291 /* If this is a memory ref that contains aggregate components,
4292 mark it as such for cse and loop optimize. */
c6df88cb 4293 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
6f086dfc 4294 }
cb61f66f 4295#endif /* 0 */
6f086dfc 4296
9dc0f531
RK
4297#ifdef STACK_REGS
4298 /* We need this "use" info, because the gcc-register->stack-register
4299 converter in reg-stack.c needs to know which registers are active
4300 at the start of the function call. The actual parameter loading
4301 instructions are not always available then anymore, since they might
4302 have been optimised away. */
4303
4304 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
38a448ca 4305 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
9dc0f531
RK
4306#endif
4307
6f086dfc
RS
4308 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4309 in the mode in which it arrives.
4310 STACK_PARM is an RTX for a stack slot where the parameter can live
4311 during the function (in case we want to put it there).
4312 STACK_PARM is 0 if no stack slot was pushed for it.
4313
4314 Now output code if necessary to convert ENTRY_PARM to
4315 the type in which this function declares it,
4316 and store that result in an appropriate place,
4317 which may be a pseudo reg, may be STACK_PARM,
4318 or may be a local stack slot if STACK_PARM is 0.
4319
4320 Set DECL_RTL to that place. */
4321
5c4cdc9f 4322 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4323 {
5c4cdc9f
JW
4324 /* If a BLKmode arrives in registers, copy it to a stack slot.
4325 Handle calls that pass values in multiple non-contiguous
4326 locations. The Irix 6 ABI has examples of this. */
4327 if (GET_CODE (entry_parm) == REG
4328 || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4329 {
621061f4
RK
4330 int size_stored
4331 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4332 UNITS_PER_WORD);
6f086dfc
RS
4333
4334 /* Note that we will be storing an integral number of words.
4335 So we have to be careful to ensure that we allocate an
4336 integral number of words. We do this below in the
4337 assign_stack_local if space was not allocated in the argument
4338 list. If it was, this will not work if PARM_BOUNDARY is not
4339 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4340 if it becomes a problem. */
4341
4342 if (stack_parm == 0)
7e41ffa2
RS
4343 {
4344 stack_parm
621061f4
RK
4345 = assign_stack_local (GET_MODE (entry_parm),
4346 size_stored, 0);
4347
4348 /* If this is a memory ref that contains aggregate
4349 components, mark it as such for cse and loop optimize. */
c6df88cb 4350 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4351 }
4352
6f086dfc
RS
4353 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4354 abort ();
4355
7a30f0c4
JW
4356 if (TREE_READONLY (parm))
4357 RTX_UNCHANGING_P (stack_parm) = 1;
4358
5c4cdc9f
JW
4359 /* Handle calls that pass values in multiple non-contiguous
4360 locations. The Irix 6 ABI has examples of this. */
4361 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4362 emit_group_store (validize_mem (stack_parm), entry_parm,
4363 int_size_in_bytes (TREE_TYPE (parm)),
4364 (TYPE_ALIGN (TREE_TYPE (parm))
4365 / BITS_PER_UNIT));
5c4cdc9f
JW
4366 else
4367 move_block_from_reg (REGNO (entry_parm),
4368 validize_mem (stack_parm),
4369 size_stored / UNITS_PER_WORD,
4370 int_size_in_bytes (TREE_TYPE (parm)));
6f086dfc
RS
4371 }
4372 DECL_RTL (parm) = stack_parm;
4373 }
74bd77a8 4374 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 4375 && ! DECL_INLINE (fndecl))
6f086dfc
RS
4376 /* layout_decl may set this. */
4377 || TREE_ADDRESSABLE (parm)
4378 || TREE_SIDE_EFFECTS (parm)
4379 /* If -ffloat-store specified, don't put explicit
4380 float variables into registers. */
4381 || (flag_float_store
4382 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4383 /* Always assign pseudo to structure return or item passed
4384 by invisible reference. */
4385 || passed_pointer || parm == function_result_decl)
4386 {
00d8a4c1
RK
4387 /* Store the parm in a pseudoregister during the function, but we
4388 may need to do it in a wider mode. */
4389
4390 register rtx parmreg;
4e86caed 4391 int regno, regnoi = 0, regnor = 0;
00d8a4c1
RK
4392
4393 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
cd5b3469 4394
621061f4
RK
4395 promoted_nominal_mode
4396 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
6f086dfc 4397
621061f4 4398 parmreg = gen_reg_rtx (promoted_nominal_mode);
ddb7361a 4399 mark_user_reg (parmreg);
6f086dfc
RS
4400
4401 /* If this was an item that we received a pointer to, set DECL_RTL
4402 appropriately. */
4403 if (passed_pointer)
4404 {
621061f4 4405 DECL_RTL (parm)
38a448ca 4406 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
c6df88cb 4407 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
6f086dfc
RS
4408 }
4409 else
4410 DECL_RTL (parm) = parmreg;
4411
4412 /* Copy the value into the register. */
621061f4
RK
4413 if (nominal_mode != passed_mode
4414 || promoted_nominal_mode != promoted_mode)
86f8eff3 4415 {
efd8cba0 4416 int save_tree_used;
621061f4
RK
4417 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4418 mode, by the caller. We now have to convert it to
4419 NOMINAL_MODE, if different. However, PARMREG may be in
956d6950 4420 a different mode than NOMINAL_MODE if it is being stored
621061f4
RK
4421 promoted.
4422
4423 If ENTRY_PARM is a hard register, it might be in a register
86f8eff3
RK
4424 not valid for operating in its mode (e.g., an odd-numbered
4425 register for a DFmode). In that case, moves are the only
4426 thing valid, so we can't do a convert from there. This
4427 occurs when the calling sequence allow such misaligned
3412b298
JW
4428 usages.
4429
4430 In addition, the conversion may involve a call, which could
4431 clobber parameters which haven't been copied to pseudo
4432 registers yet. Therefore, we must first copy the parm to
4433 a pseudo reg here, and save the conversion until after all
4434 parameters have been moved. */
4435
4436 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4437
4438 emit_move_insn (tempreg, validize_mem (entry_parm));
4439
4440 push_to_sequence (conversion_insns);
ad241351
RK
4441 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4442
efd8cba0
DB
4443 /* TREE_USED gets set erroneously during expand_assignment. */
4444 save_tree_used = TREE_USED (parm);
621061f4
RK
4445 expand_assignment (parm,
4446 make_tree (nominal_type, tempreg), 0, 0);
efd8cba0 4447 TREE_USED (parm) = save_tree_used;
3412b298 4448 conversion_insns = get_insns ();
621061f4 4449 did_conversion = 1;
3412b298 4450 end_sequence ();
86f8eff3 4451 }
6f086dfc
RS
4452 else
4453 emit_move_insn (parmreg, validize_mem (entry_parm));
4454
74bd77a8
RS
4455 /* If we were passed a pointer but the actual value
4456 can safely live in a register, put it in one. */
16bae307 4457 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
4458 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4459 && ! DECL_INLINE (fndecl))
4460 /* layout_decl may set this. */
4461 || TREE_ADDRESSABLE (parm)
4462 || TREE_SIDE_EFFECTS (parm)
4463 /* If -ffloat-store specified, don't put explicit
4464 float variables into registers. */
4465 || (flag_float_store
4466 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4467 {
2654605a
JW
4468 /* We can't use nominal_mode, because it will have been set to
4469 Pmode above. We must use the actual mode of the parm. */
4470 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
ddb7361a 4471 mark_user_reg (parmreg);
74bd77a8
RS
4472 emit_move_insn (parmreg, DECL_RTL (parm));
4473 DECL_RTL (parm) = parmreg;
c110c53d
RS
4474 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4475 now the parm. */
4476 stack_parm = 0;
74bd77a8 4477 }
137a2a7b
DE
4478#ifdef FUNCTION_ARG_CALLEE_COPIES
4479 /* If we are passed an arg by reference and it is our responsibility
4480 to make a copy, do it now.
4481 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4482 original argument, so we must recreate them in the call to
4483 FUNCTION_ARG_CALLEE_COPIES. */
4484 /* ??? Later add code to handle the case that if the argument isn't
4485 modified, don't do the copy. */
4486
4487 else if (passed_pointer
4488 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4489 TYPE_MODE (DECL_ARG_TYPE (parm)),
4490 DECL_ARG_TYPE (parm),
bf9c83fe 4491 named_arg)
926b1b99 4492 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
137a2a7b
DE
4493 {
4494 rtx copy;
4495 tree type = DECL_ARG_TYPE (parm);
4496
4497 /* This sequence may involve a library call perhaps clobbering
4498 registers that haven't been copied to pseudos yet. */
4499
4500 push_to_sequence (conversion_insns);
4501
4502 if (TYPE_SIZE (type) == 0
4503 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1fd3ef7f 4504 /* This is a variable sized object. */
38a448ca
RH
4505 copy = gen_rtx_MEM (BLKmode,
4506 allocate_dynamic_stack_space
4507 (expr_size (parm), NULL_RTX,
4508 TYPE_ALIGN (type)));
137a2a7b 4509 else
1fd3ef7f
RK
4510 copy = assign_stack_temp (TYPE_MODE (type),
4511 int_size_in_bytes (type), 1);
c6df88cb 4512 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
e9a25f70 4513 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
137a2a7b
DE
4514
4515 store_expr (parm, copy, 0);
4516 emit_move_insn (parmreg, XEXP (copy, 0));
7d384cc0 4517 if (current_function_check_memory_usage)
86fa911a
RK
4518 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4519 XEXP (copy, 0), ptr_mode,
4520 GEN_INT (int_size_in_bytes (type)),
4521 TYPE_MODE (sizetype),
956d6950
JL
4522 GEN_INT (MEMORY_USE_RW),
4523 TYPE_MODE (integer_type_node));
137a2a7b 4524 conversion_insns = get_insns ();
621061f4 4525 did_conversion = 1;
137a2a7b
DE
4526 end_sequence ();
4527 }
4528#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 4529
6f086dfc 4530 /* In any case, record the parm's desired stack location
14aceb29
RS
4531 in case we later discover it must live in the stack.
4532
4533 If it is a COMPLEX value, store the stack location for both
4534 halves. */
4535
4536 if (GET_CODE (parmreg) == CONCAT)
4537 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4538 else
4539 regno = REGNO (parmreg);
4540
e9a25f70 4541 if (regno >= max_parm_reg)
6f086dfc
RS
4542 {
4543 rtx *new;
e9a25f70 4544 int old_max_parm_reg = max_parm_reg;
14aceb29 4545
e9a25f70
JL
4546 /* It's slow to expand this one register at a time,
4547 but it's also rare and we need max_parm_reg to be
4548 precisely correct. */
4549 max_parm_reg = regno + 1;
4550 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4c9a05bc 4551 bcopy ((char *) parm_reg_stack_loc, (char *) new,
e9a25f70
JL
4552 old_max_parm_reg * sizeof (rtx));
4553 bzero ((char *) (new + old_max_parm_reg),
4554 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
6f086dfc
RS
4555 parm_reg_stack_loc = new;
4556 }
14aceb29
RS
4557
4558 if (GET_CODE (parmreg) == CONCAT)
4559 {
4560 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4561
a03caf76
RK
4562 regnor = REGNO (gen_realpart (submode, parmreg));
4563 regnoi = REGNO (gen_imagpart (submode, parmreg));
4564
7b1a0c14
RS
4565 if (stack_parm != 0)
4566 {
a03caf76 4567 parm_reg_stack_loc[regnor]
3d329b07 4568 = gen_realpart (submode, stack_parm);
a03caf76 4569 parm_reg_stack_loc[regnoi]
3d329b07 4570 = gen_imagpart (submode, stack_parm);
7b1a0c14
RS
4571 }
4572 else
4573 {
a03caf76
RK
4574 parm_reg_stack_loc[regnor] = 0;
4575 parm_reg_stack_loc[regnoi] = 0;
7b1a0c14 4576 }
14aceb29
RS
4577 }
4578 else
4579 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
6f086dfc
RS
4580
4581 /* Mark the register as eliminable if we did no conversion
4582 and it was copied from memory at a fixed offset,
4583 and the arg pointer was not copied to a pseudo-reg.
4584 If the arg pointer is a pseudo reg or the offset formed
4585 an invalid address, such memory-equivalences
4586 as we make here would screw up life analysis for it. */
4587 if (nominal_mode == passed_mode
621061f4 4588 && ! did_conversion
38b610ed
ILT
4589 && stack_parm != 0
4590 && GET_CODE (stack_parm) == MEM
6f086dfc
RS
4591 && stack_offset.var == 0
4592 && reg_mentioned_p (virtual_incoming_args_rtx,
38b610ed 4593 XEXP (stack_parm, 0)))
a03caf76
RK
4594 {
4595 rtx linsn = get_last_insn ();
69685820 4596 rtx sinsn, set;
a03caf76
RK
4597
4598 /* Mark complex types separately. */
4599 if (GET_CODE (parmreg) == CONCAT)
69685820
RK
4600 /* Scan backwards for the set of the real and
4601 imaginary parts. */
4602 for (sinsn = linsn; sinsn != 0;
4603 sinsn = prev_nonnote_insn (sinsn))
4604 {
4605 set = single_set (sinsn);
4606 if (set != 0
4607 && SET_DEST (set) == regno_reg_rtx [regnoi])
4608 REG_NOTES (sinsn)
38a448ca
RH
4609 = gen_rtx_EXPR_LIST (REG_EQUIV,
4610 parm_reg_stack_loc[regnoi],
4611 REG_NOTES (sinsn));
69685820
RK
4612 else if (set != 0
4613 && SET_DEST (set) == regno_reg_rtx [regnor])
4614 REG_NOTES (sinsn)
38a448ca
RH
4615 = gen_rtx_EXPR_LIST (REG_EQUIV,
4616 parm_reg_stack_loc[regnor],
4617 REG_NOTES (sinsn));
69685820
RK
4618 }
4619 else if ((set = single_set (linsn)) != 0
4620 && SET_DEST (set) == parmreg)
a03caf76 4621 REG_NOTES (linsn)
38a448ca
RH
4622 = gen_rtx_EXPR_LIST (REG_EQUIV,
4623 stack_parm, REG_NOTES (linsn));
a03caf76 4624 }
6f086dfc
RS
4625
4626 /* For pointer data type, suggest pointer register. */
e5e809f4 4627 if (POINTER_TYPE_P (TREE_TYPE (parm)))
6c6166bd
RK
4628 mark_reg_pointer (parmreg,
4629 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4630 / BITS_PER_UNIT));
6f086dfc
RS
4631 }
4632 else
4633 {
4634 /* Value must be stored in the stack slot STACK_PARM
4635 during function execution. */
4636
621061f4 4637 if (promoted_mode != nominal_mode)
86f8eff3
RK
4638 {
4639 /* Conversion is required. */
3412b298
JW
4640 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4641
4642 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 4643
3412b298
JW
4644 push_to_sequence (conversion_insns);
4645 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 4646 TREE_UNSIGNED (TREE_TYPE (parm)));
de957303
DE
4647 if (stack_parm)
4648 {
4649 /* ??? This may need a big-endian conversion on sparc64. */
4650 stack_parm = change_address (stack_parm, nominal_mode,
4651 NULL_RTX);
4652 }
3412b298 4653 conversion_insns = get_insns ();
621061f4 4654 did_conversion = 1;
3412b298 4655 end_sequence ();
86f8eff3 4656 }
6f086dfc
RS
4657
4658 if (entry_parm != stack_parm)
4659 {
4660 if (stack_parm == 0)
7e41ffa2
RS
4661 {
4662 stack_parm
4663 = assign_stack_local (GET_MODE (entry_parm),
4664 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4665 /* If this is a memory ref that contains aggregate components,
4666 mark it as such for cse and loop optimize. */
c6df88cb 4667 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4668 }
4669
621061f4 4670 if (promoted_mode != nominal_mode)
3412b298
JW
4671 {
4672 push_to_sequence (conversion_insns);
4673 emit_move_insn (validize_mem (stack_parm),
4674 validize_mem (entry_parm));
4675 conversion_insns = get_insns ();
4676 end_sequence ();
4677 }
4678 else
4679 emit_move_insn (validize_mem (stack_parm),
4680 validize_mem (entry_parm));
6f086dfc 4681 }
7d384cc0 4682 if (current_function_check_memory_usage)
86fa911a
RK
4683 {
4684 push_to_sequence (conversion_insns);
4685 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4686 XEXP (stack_parm, 0), ptr_mode,
4687 GEN_INT (GET_MODE_SIZE (GET_MODE
4688 (entry_parm))),
4689 TYPE_MODE (sizetype),
956d6950
JL
4690 GEN_INT (MEMORY_USE_RW),
4691 TYPE_MODE (integer_type_node));
6f086dfc 4692
86fa911a
RK
4693 conversion_insns = get_insns ();
4694 end_sequence ();
4695 }
6f086dfc
RS
4696 DECL_RTL (parm) = stack_parm;
4697 }
4698
4699 /* If this "parameter" was the place where we are receiving the
4700 function's incoming structure pointer, set up the result. */
4701 if (parm == function_result_decl)
ccdecf58
RK
4702 {
4703 tree result = DECL_RESULT (fndecl);
4704 tree restype = TREE_TYPE (result);
4705
4706 DECL_RTL (result)
38a448ca 4707 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
ccdecf58 4708
c6df88cb
MM
4709 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4710 AGGREGATE_TYPE_P (restype));
ccdecf58 4711 }
6f086dfc
RS
4712
4713 if (TREE_THIS_VOLATILE (parm))
4714 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4715 if (TREE_READONLY (parm))
4716 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4717 }
4718
3412b298
JW
4719 /* Output all parameter conversion instructions (possibly including calls)
4720 now that all parameters have been copied out of hard registers. */
4721 emit_insns (conversion_insns);
4722
6f086dfc
RS
4723 last_parm_insn = get_last_insn ();
4724
4725 current_function_args_size = stack_args_size.constant;
4726
4727 /* Adjust function incoming argument size for alignment and
4728 minimum length. */
4729
4730#ifdef REG_PARM_STACK_SPACE
6f90e075 4731#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
4732 current_function_args_size = MAX (current_function_args_size,
4733 REG_PARM_STACK_SPACE (fndecl));
4734#endif
6f90e075 4735#endif
6f086dfc 4736
c795bca9
BS
4737#ifdef PREFERRED_STACK_BOUNDARY
4738#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
6f086dfc
RS
4739
4740 current_function_args_size
4741 = ((current_function_args_size + STACK_BYTES - 1)
4742 / STACK_BYTES) * STACK_BYTES;
4743#endif
4744
4745#ifdef ARGS_GROW_DOWNWARD
4746 current_function_arg_offset_rtx
5f4f0e22 4747 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
4748 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4749 size_int (-stack_args_size.constant)),
86fa911a 4750 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
6f086dfc
RS
4751#else
4752 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4753#endif
4754
4755 /* See how many bytes, if any, of its args a function should try to pop
4756 on return. */
4757
64e6d9cc 4758 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
4759 current_function_args_size);
4760
3b69d50e
RK
4761 /* For stdarg.h function, save info about
4762 regs and stack space used by the named args. */
6f086dfc 4763
3b69d50e 4764 if (!hide_last_arg)
6f086dfc
RS
4765 current_function_args_info = args_so_far;
4766
4767 /* Set the rtx used for the function return value. Put this in its
4768 own variable so any optimizers that need this information don't have
4769 to include tree.h. Do this here so it gets done when an inlined
4770 function gets output. */
4771
4772 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4773}
4774\f
75dc3319
RK
4775/* Indicate whether REGNO is an incoming argument to the current function
4776 that was promoted to a wider mode. If so, return the RTX for the
4777 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4778 that REGNO is promoted from and whether the promotion was signed or
4779 unsigned. */
4780
4781#ifdef PROMOTE_FUNCTION_ARGS
4782
4783rtx
4784promoted_input_arg (regno, pmode, punsignedp)
4785 int regno;
4786 enum machine_mode *pmode;
4787 int *punsignedp;
4788{
4789 tree arg;
4790
4791 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4792 arg = TREE_CHAIN (arg))
4793 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
621061f4
RK
4794 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4795 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
4796 {
4797 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4798 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4799
a5a52dbc 4800 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
4801 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4802 && mode != DECL_MODE (arg))
4803 {
4804 *pmode = DECL_MODE (arg);
4805 *punsignedp = unsignedp;
4806 return DECL_INCOMING_RTL (arg);
4807 }
4808 }
4809
4810 return 0;
4811}
4812
4813#endif
4814\f
6f086dfc
RS
4815/* Compute the size and offset from the start of the stacked arguments for a
4816 parm passed in mode PASSED_MODE and with type TYPE.
4817
4818 INITIAL_OFFSET_PTR points to the current offset into the stacked
4819 arguments.
4820
4821 The starting offset and size for this parm are returned in *OFFSET_PTR
4822 and *ARG_SIZE_PTR, respectively.
4823
4824 IN_REGS is non-zero if the argument will be passed in registers. It will
4825 never be set if REG_PARM_STACK_SPACE is not defined.
4826
4827 FNDECL is the function in which the argument was defined.
4828
4829 There are two types of rounding that are done. The first, controlled by
4830 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4831 list to be aligned to the specific boundary (in bits). This rounding
4832 affects the initial and starting offsets, but not the argument size.
4833
4834 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4835 optionally rounds the size of the parm to PARM_BOUNDARY. The
4836 initial offset is not affected by this rounding, while the size always
4837 is and the starting offset may be. */
4838
4839/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4840 initial_offset_ptr is positive because locate_and_pad_parm's
4841 callers pass in the total size of args so far as
4842 initial_offset_ptr. arg_size_ptr is always positive.*/
4843
6f086dfc
RS
4844void
4845locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4846 initial_offset_ptr, offset_ptr, arg_size_ptr)
4847 enum machine_mode passed_mode;
4848 tree type;
4849 int in_regs;
4850 tree fndecl;
4851 struct args_size *initial_offset_ptr;
4852 struct args_size *offset_ptr;
4853 struct args_size *arg_size_ptr;
4854{
4855 tree sizetree
4856 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4857 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4858 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6f086dfc
RS
4859
4860#ifdef REG_PARM_STACK_SPACE
4861 /* If we have found a stack parm before we reach the end of the
4862 area reserved for registers, skip that area. */
4863 if (! in_regs)
4864 {
29a82058
JL
4865 int reg_parm_stack_space = 0;
4866
29008b51
JW
4867#ifdef MAYBE_REG_PARM_STACK_SPACE
4868 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4869#else
6f086dfc 4870 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 4871#endif
6f086dfc
RS
4872 if (reg_parm_stack_space > 0)
4873 {
4874 if (initial_offset_ptr->var)
4875 {
4876 initial_offset_ptr->var
4877 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4878 size_int (reg_parm_stack_space));
4879 initial_offset_ptr->constant = 0;
4880 }
4881 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4882 initial_offset_ptr->constant = reg_parm_stack_space;
4883 }
4884 }
4885#endif /* REG_PARM_STACK_SPACE */
4886
4887 arg_size_ptr->var = 0;
4888 arg_size_ptr->constant = 0;
4889
4890#ifdef ARGS_GROW_DOWNWARD
4891 if (initial_offset_ptr->var)
4892 {
4893 offset_ptr->constant = 0;
4894 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4895 initial_offset_ptr->var);
4896 }
4897 else
4898 {
4899 offset_ptr->constant = - initial_offset_ptr->constant;
4900 offset_ptr->var = 0;
4901 }
0b21dcf5 4902 if (where_pad != none
6f086dfc
RS
4903 && (TREE_CODE (sizetree) != INTEGER_CST
4904 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4905 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4906 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19
TG
4907 if (where_pad != downward)
4908 pad_to_arg_alignment (offset_ptr, boundary);
6f086dfc
RS
4909 if (initial_offset_ptr->var)
4910 {
4911 arg_size_ptr->var = size_binop (MINUS_EXPR,
4912 size_binop (MINUS_EXPR,
4913 integer_zero_node,
4914 initial_offset_ptr->var),
4915 offset_ptr->var);
4916 }
4917 else
4918 {
db3cf6fb
MS
4919 arg_size_ptr->constant = (- initial_offset_ptr->constant
4920 - offset_ptr->constant);
6f086dfc 4921 }
6f086dfc
RS
4922#else /* !ARGS_GROW_DOWNWARD */
4923 pad_to_arg_alignment (initial_offset_ptr, boundary);
4924 *offset_ptr = *initial_offset_ptr;
6f086dfc
RS
4925
4926#ifdef PUSH_ROUNDING
4927 if (passed_mode != BLKmode)
4928 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4929#endif
4930
d4b0a7a0
DE
4931 /* Pad_below needs the pre-rounded size to know how much to pad below
4932 so this must be done before rounding up. */
ea5917da
DE
4933 if (where_pad == downward
4934 /* However, BLKmode args passed in regs have their padding done elsewhere.
4935 The stack slot must be able to hold the entire register. */
4936 && !(in_regs && passed_mode == BLKmode))
d4b0a7a0
DE
4937 pad_below (offset_ptr, passed_mode, sizetree);
4938
6f086dfc
RS
4939 if (where_pad != none
4940 && (TREE_CODE (sizetree) != INTEGER_CST
4941 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4942 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4943
4944 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4945#endif /* ARGS_GROW_DOWNWARD */
4946}
4947
e16c591a
RS
4948/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4949 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4950
6f086dfc
RS
4951static void
4952pad_to_arg_alignment (offset_ptr, boundary)
4953 struct args_size *offset_ptr;
4954 int boundary;
4955{
4956 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4957
4958 if (boundary > BITS_PER_UNIT)
4959 {
4960 if (offset_ptr->var)
4961 {
4962 offset_ptr->var =
4963#ifdef ARGS_GROW_DOWNWARD
4964 round_down
4965#else
4966 round_up
4967#endif
4968 (ARGS_SIZE_TREE (*offset_ptr),
4969 boundary / BITS_PER_UNIT);
4970 offset_ptr->constant = 0; /*?*/
4971 }
4972 else
4973 offset_ptr->constant =
4974#ifdef ARGS_GROW_DOWNWARD
4975 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4976#else
4977 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4978#endif
4979 }
4980}
4981
51723711 4982#ifndef ARGS_GROW_DOWNWARD
6f086dfc
RS
4983static void
4984pad_below (offset_ptr, passed_mode, sizetree)
4985 struct args_size *offset_ptr;
4986 enum machine_mode passed_mode;
4987 tree sizetree;
4988{
4989 if (passed_mode != BLKmode)
4990 {
4991 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4992 offset_ptr->constant
4993 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4994 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4995 - GET_MODE_SIZE (passed_mode));
4996 }
4997 else
4998 {
4999 if (TREE_CODE (sizetree) != INTEGER_CST
5000 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5001 {
5002 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5003 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5004 /* Add it in. */
5005 ADD_PARM_SIZE (*offset_ptr, s2);
5006 SUB_PARM_SIZE (*offset_ptr, sizetree);
5007 }
5008 }
5009}
51723711 5010#endif
6f086dfc 5011
487a6e06 5012#ifdef ARGS_GROW_DOWNWARD
6f086dfc
RS
5013static tree
5014round_down (value, divisor)
5015 tree value;
5016 int divisor;
5017{
5018 return size_binop (MULT_EXPR,
5019 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5020 size_int (divisor));
5021}
487a6e06 5022#endif
6f086dfc
RS
5023\f
5024/* Walk the tree of blocks describing the binding levels within a function
5025 and warn about uninitialized variables.
5026 This is done after calling flow_analysis and before global_alloc
5027 clobbers the pseudo-regs to hard regs. */
5028
5029void
5030uninitialized_vars_warning (block)
5031 tree block;
5032{
5033 register tree decl, sub;
5034 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5035 {
5036 if (TREE_CODE (decl) == VAR_DECL
5037 /* These warnings are unreliable for and aggregates
5038 because assigning the fields one by one can fail to convince
5039 flow.c that the entire aggregate was initialized.
5040 Unions are troublesome because members may be shorter. */
05e3bdb9 5041 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
6f086dfc
RS
5042 && DECL_RTL (decl) != 0
5043 && GET_CODE (DECL_RTL (decl)) == REG
6acdd0fd
JL
5044 /* Global optimizations can make it difficult to determine if a
5045 particular variable has been initialized. However, a VAR_DECL
5046 with a nonzero DECL_INITIAL had an initializer, so do not
5047 claim it is potentially uninitialized.
5048
5049 We do not care about the actual value in DECL_INITIAL, so we do
5050 not worry that it may be a dangling pointer. */
5051 && DECL_INITIAL (decl) == NULL_TREE
6f086dfc
RS
5052 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5053 warning_with_decl (decl,
3c8cd8bd 5054 "`%s' might be used uninitialized in this function");
6f086dfc
RS
5055 if (TREE_CODE (decl) == VAR_DECL
5056 && DECL_RTL (decl) != 0
5057 && GET_CODE (DECL_RTL (decl)) == REG
5058 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5059 warning_with_decl (decl,
3c8cd8bd 5060 "variable `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5061 }
5062 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5063 uninitialized_vars_warning (sub);
5064}
5065
5066/* Do the appropriate part of uninitialized_vars_warning
5067 but for arguments instead of local variables. */
5068
5069void
0cd6ef35 5070setjmp_args_warning ()
6f086dfc
RS
5071{
5072 register tree decl;
5073 for (decl = DECL_ARGUMENTS (current_function_decl);
5074 decl; decl = TREE_CHAIN (decl))
5075 if (DECL_RTL (decl) != 0
5076 && GET_CODE (DECL_RTL (decl)) == REG
5077 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3c8cd8bd 5078 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5079}
5080
5081/* If this function call setjmp, put all vars into the stack
5082 unless they were declared `register'. */
5083
5084void
5085setjmp_protect (block)
5086 tree block;
5087{
5088 register tree decl, sub;
5089 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5090 if ((TREE_CODE (decl) == VAR_DECL
5091 || TREE_CODE (decl) == PARM_DECL)
5092 && DECL_RTL (decl) != 0
e9a25f70
JL
5093 && (GET_CODE (DECL_RTL (decl)) == REG
5094 || (GET_CODE (DECL_RTL (decl)) == MEM
5095 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
b335c2cc 5096 /* If this variable came from an inline function, it must be
9ec36da5 5097 that its life doesn't overlap the setjmp. If there was a
b335c2cc
TW
5098 setjmp in the function, it would already be in memory. We
5099 must exclude such variable because their DECL_RTL might be
5100 set to strange things such as virtual_stack_vars_rtx. */
5101 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
5102 && (
5103#ifdef NON_SAVING_SETJMP
5104 /* If longjmp doesn't restore the registers,
5105 don't put anything in them. */
5106 NON_SAVING_SETJMP
5107 ||
5108#endif
a82ad570 5109 ! DECL_REGISTER (decl)))
6f086dfc
RS
5110 put_var_into_stack (decl);
5111 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5112 setjmp_protect (sub);
5113}
5114\f
5115/* Like the previous function, but for args instead of local variables. */
5116
5117void
5118setjmp_protect_args ()
5119{
29a82058 5120 register tree decl;
6f086dfc
RS
5121 for (decl = DECL_ARGUMENTS (current_function_decl);
5122 decl; decl = TREE_CHAIN (decl))
5123 if ((TREE_CODE (decl) == VAR_DECL
5124 || TREE_CODE (decl) == PARM_DECL)
5125 && DECL_RTL (decl) != 0
e9a25f70
JL
5126 && (GET_CODE (DECL_RTL (decl)) == REG
5127 || (GET_CODE (DECL_RTL (decl)) == MEM
5128 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
6f086dfc
RS
5129 && (
5130 /* If longjmp doesn't restore the registers,
5131 don't put anything in them. */
5132#ifdef NON_SAVING_SETJMP
5133 NON_SAVING_SETJMP
5134 ||
5135#endif
a82ad570 5136 ! DECL_REGISTER (decl)))
6f086dfc
RS
5137 put_var_into_stack (decl);
5138}
5139\f
5140/* Return the context-pointer register corresponding to DECL,
5141 or 0 if it does not need one. */
5142
5143rtx
5144lookup_static_chain (decl)
5145 tree decl;
5146{
b001a02f
PB
5147 tree context = decl_function_context (decl);
5148 tree link;
7ad8c4bf 5149
38ee6ed9
JM
5150 if (context == 0
5151 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
7ad8c4bf 5152 return 0;
38ee6ed9 5153
6f086dfc
RS
5154 /* We treat inline_function_decl as an alias for the current function
5155 because that is the inline function whose vars, types, etc.
5156 are being merged into the current function.
5157 See expand_inline_function. */
5158 if (context == current_function_decl || context == inline_function_decl)
5159 return virtual_stack_vars_rtx;
5160
5161 for (link = context_display; link; link = TREE_CHAIN (link))
5162 if (TREE_PURPOSE (link) == context)
5163 return RTL_EXPR_RTL (TREE_VALUE (link));
5164
5165 abort ();
5166}
5167\f
5168/* Convert a stack slot address ADDR for variable VAR
5169 (from a containing function)
5170 into an address valid in this function (using a static chain). */
5171
5172rtx
5173fix_lexical_addr (addr, var)
5174 rtx addr;
5175 tree var;
5176{
5177 rtx basereg;
e5e809f4 5178 HOST_WIDE_INT displacement;
6f086dfc
RS
5179 tree context = decl_function_context (var);
5180 struct function *fp;
5181 rtx base = 0;
5182
5183 /* If this is the present function, we need not do anything. */
5184 if (context == current_function_decl || context == inline_function_decl)
5185 return addr;
5186
5187 for (fp = outer_function_chain; fp; fp = fp->next)
5188 if (fp->decl == context)
5189 break;
5190
5191 if (fp == 0)
5192 abort ();
5193
e9a25f70
JL
5194 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5195 addr = XEXP (XEXP (addr, 0), 0);
5196
6f086dfc
RS
5197 /* Decode given address as base reg plus displacement. */
5198 if (GET_CODE (addr) == REG)
5199 basereg = addr, displacement = 0;
5200 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5201 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5202 else
5203 abort ();
5204
5205 /* We accept vars reached via the containing function's
5206 incoming arg pointer and via its stack variables pointer. */
5207 if (basereg == fp->internal_arg_pointer)
5208 {
5209 /* If reached via arg pointer, get the arg pointer value
5210 out of that function's stack frame.
5211
5212 There are two cases: If a separate ap is needed, allocate a
5213 slot in the outer function for it and dereference it that way.
5214 This is correct even if the real ap is actually a pseudo.
5215 Otherwise, just adjust the offset from the frame pointer to
5216 compensate. */
5217
5218#ifdef NEED_SEPARATE_AP
5219 rtx addr;
5220
5221 if (fp->arg_pointer_save_area == 0)
5222 fp->arg_pointer_save_area
5223 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5224
5225 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5226 addr = memory_address (Pmode, addr);
5227
38a448ca 5228 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
6f086dfc
RS
5229#else
5230 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 5231 base = lookup_static_chain (var);
6f086dfc
RS
5232#endif
5233 }
5234
5235 else if (basereg == virtual_stack_vars_rtx)
5236 {
5237 /* This is the same code as lookup_static_chain, duplicated here to
5238 avoid an extra call to decl_function_context. */
5239 tree link;
5240
5241 for (link = context_display; link; link = TREE_CHAIN (link))
5242 if (TREE_PURPOSE (link) == context)
5243 {
5244 base = RTL_EXPR_RTL (TREE_VALUE (link));
5245 break;
5246 }
5247 }
5248
5249 if (base == 0)
5250 abort ();
5251
5252 /* Use same offset, relative to appropriate static chain or argument
5253 pointer. */
5254 return plus_constant (base, displacement);
5255}
5256\f
5257/* Return the address of the trampoline for entering nested fn FUNCTION.
5258 If necessary, allocate a trampoline (in the stack frame)
5259 and emit rtl to initialize its contents (at entry to this function). */
5260
5261rtx
5262trampoline_address (function)
5263 tree function;
5264{
5265 tree link;
5266 tree rtlexp;
5267 rtx tramp;
5268 struct function *fp;
5269 tree fn_context;
5270
5271 /* Find an existing trampoline and return it. */
5272 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5273 if (TREE_PURPOSE (link) == function)
e87ee2a9
RK
5274 return
5275 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5276
6f086dfc
RS
5277 for (fp = outer_function_chain; fp; fp = fp->next)
5278 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5279 if (TREE_PURPOSE (link) == function)
5280 {
5281 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5282 function);
5283 return round_trampoline_addr (tramp);
5284 }
5285
5286 /* None exists; we must make one. */
5287
5288 /* Find the `struct function' for the function containing FUNCTION. */
5289 fp = 0;
5290 fn_context = decl_function_context (function);
4ac74fb8
RK
5291 if (fn_context != current_function_decl
5292 && fn_context != inline_function_decl)
6f086dfc
RS
5293 for (fp = outer_function_chain; fp; fp = fp->next)
5294 if (fp->decl == fn_context)
5295 break;
5296
5297 /* Allocate run-time space for this trampoline
5298 (usually in the defining function's stack frame). */
5299#ifdef ALLOCATE_TRAMPOLINE
5300 tramp = ALLOCATE_TRAMPOLINE (fp);
5301#else
5302 /* If rounding needed, allocate extra space
5303 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5304#ifdef TRAMPOLINE_ALIGNMENT
b02ab63a
RK
5305#define TRAMPOLINE_REAL_SIZE \
5306 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
6f086dfc
RS
5307#else
5308#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5309#endif
5310 if (fp != 0)
5311 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5312 else
5313 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5314#endif
5315
5316 /* Record the trampoline for reuse and note it for later initialization
5317 by expand_function_end. */
5318 if (fp != 0)
5319 {
28498644
RK
5320 push_obstacks (fp->function_maybepermanent_obstack,
5321 fp->function_maybepermanent_obstack);
6f086dfc
RS
5322 rtlexp = make_node (RTL_EXPR);
5323 RTL_EXPR_RTL (rtlexp) = tramp;
5324 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5325 pop_obstacks ();
5326 }
5327 else
5328 {
5329 /* Make the RTL_EXPR node temporary, not momentary, so that the
5330 trampoline_list doesn't become garbage. */
5331 int momentary = suspend_momentary ();
5332 rtlexp = make_node (RTL_EXPR);
5333 resume_momentary (momentary);
5334
5335 RTL_EXPR_RTL (rtlexp) = tramp;
5336 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5337 }
5338
5339 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5340 return round_trampoline_addr (tramp);
5341}
5342
5343/* Given a trampoline address,
5344 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5345
5346static rtx
5347round_trampoline_addr (tramp)
5348 rtx tramp;
5349{
5350#ifdef TRAMPOLINE_ALIGNMENT
5351 /* Round address up to desired boundary. */
5352 rtx temp = gen_reg_rtx (Pmode);
5353 temp = expand_binop (Pmode, add_optab, tramp,
b02ab63a 5354 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
6f086dfc
RS
5355 temp, 0, OPTAB_LIB_WIDEN);
5356 tramp = expand_binop (Pmode, and_optab, temp,
b02ab63a 5357 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
6f086dfc
RS
5358 temp, 0, OPTAB_LIB_WIDEN);
5359#endif
5360 return tramp;
5361}
5362\f
467456d0
RS
5363/* The functions identify_blocks and reorder_blocks provide a way to
5364 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5365 duplicate portions of the RTL code. Call identify_blocks before
5366 changing the RTL, and call reorder_blocks after. */
5367
b2a59b15
MS
5368/* Put all this function's BLOCK nodes including those that are chained
5369 onto the first block into a vector, and return it.
467456d0
RS
5370 Also store in each NOTE for the beginning or end of a block
5371 the index of that block in the vector.
b2a59b15 5372 The arguments are BLOCK, the chain of top-level blocks of the function,
467456d0
RS
5373 and INSNS, the insn chain of the function. */
5374
5375tree *
b2a59b15
MS
5376identify_blocks (block, insns)
5377 tree block;
467456d0
RS
5378 rtx insns;
5379{
fc289cd1
JW
5380 int n_blocks;
5381 tree *block_vector;
5382 int *block_stack;
467456d0 5383 int depth = 0;
b2a59b15
MS
5384 int next_block_number = 1;
5385 int current_block_number = 1;
467456d0
RS
5386 rtx insn;
5387
b2a59b15 5388 if (block == 0)
fc289cd1
JW
5389 return 0;
5390
b2a59b15 5391 n_blocks = all_blocks (block, 0);
fc289cd1
JW
5392 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5393 block_stack = (int *) alloca (n_blocks * sizeof (int));
5394
b2a59b15 5395 all_blocks (block, block_vector);
467456d0
RS
5396
5397 for (insn = insns; insn; insn = NEXT_INSN (insn))
5398 if (GET_CODE (insn) == NOTE)
5399 {
5400 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5401 {
5402 block_stack[depth++] = current_block_number;
5403 current_block_number = next_block_number;
1b2ac438 5404 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
5405 }
5406 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5407 {
1b2ac438 5408 NOTE_BLOCK_NUMBER (insn) = current_block_number;
c7fdfd49 5409 current_block_number = block_stack[--depth];
467456d0
RS
5410 }
5411 }
5412
b2a59b15
MS
5413 if (n_blocks != next_block_number)
5414 abort ();
5415
467456d0
RS
5416 return block_vector;
5417}
5418
5419/* Given BLOCK_VECTOR which was returned by identify_blocks,
5420 and a revised instruction chain, rebuild the tree structure
5421 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 5422 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
5423 Returns the current top-level block. */
5424
5425tree
b2a59b15 5426reorder_blocks (block_vector, block, insns)
467456d0 5427 tree *block_vector;
b2a59b15 5428 tree block;
467456d0
RS
5429 rtx insns;
5430{
b2a59b15 5431 tree current_block = block;
467456d0
RS
5432 rtx insn;
5433
fc289cd1 5434 if (block_vector == 0)
b2a59b15 5435 return block;
fc289cd1 5436
b2a59b15 5437 /* Prune the old trees away, so that it doesn't get in the way. */
fc289cd1 5438 BLOCK_SUBBLOCKS (current_block) = 0;
b2a59b15 5439 BLOCK_CHAIN (current_block) = 0;
fc289cd1 5440
467456d0
RS
5441 for (insn = insns; insn; insn = NEXT_INSN (insn))
5442 if (GET_CODE (insn) == NOTE)
5443 {
5444 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5445 {
5446 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5447 /* If we have seen this block before, copy it. */
5448 if (TREE_ASM_WRITTEN (block))
5449 block = copy_node (block);
fc289cd1 5450 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
5451 TREE_ASM_WRITTEN (block) = 1;
5452 BLOCK_SUPERCONTEXT (block) = current_block;
5453 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5454 BLOCK_SUBBLOCKS (current_block) = block;
5455 current_block = block;
1b2ac438 5456 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5457 }
5458 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5459 {
5460 BLOCK_SUBBLOCKS (current_block)
5461 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5462 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 5463 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5464 }
5465 }
5466
b2a59b15
MS
5467 BLOCK_SUBBLOCKS (current_block)
5468 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
467456d0
RS
5469 return current_block;
5470}
5471
5472/* Reverse the order of elements in the chain T of blocks,
5473 and return the new head of the chain (old last element). */
5474
5475static tree
5476blocks_nreverse (t)
5477 tree t;
5478{
5479 register tree prev = 0, decl, next;
5480 for (decl = t; decl; decl = next)
5481 {
5482 next = BLOCK_CHAIN (decl);
5483 BLOCK_CHAIN (decl) = prev;
5484 prev = decl;
5485 }
5486 return prev;
5487}
5488
b2a59b15
MS
5489/* Count the subblocks of the list starting with BLOCK, and list them
5490 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5491 blocks. */
467456d0
RS
5492
5493static int
5494all_blocks (block, vector)
5495 tree block;
5496 tree *vector;
5497{
b2a59b15
MS
5498 int n_blocks = 0;
5499
5500 while (block)
5501 {
5502 TREE_ASM_WRITTEN (block) = 0;
5503
5504 /* Record this block. */
5505 if (vector)
5506 vector[n_blocks] = block;
5507
5508 ++n_blocks;
5509
5510 /* Record the subblocks, and their subblocks... */
5511 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5512 vector ? vector + n_blocks : 0);
5513 block = BLOCK_CHAIN (block);
5514 }
467456d0
RS
5515
5516 return n_blocks;
5517}
5518\f
6f086dfc
RS
5519/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5520 and initialize static variables for generating RTL for the statements
5521 of the function. */
5522
5523void
5524init_function_start (subr, filename, line)
5525 tree subr;
5526 char *filename;
5527 int line;
5528{
6f086dfc
RS
5529 init_stmt_for_function ();
5530
5531 cse_not_expected = ! optimize;
5532
5533 /* Caller save not needed yet. */
5534 caller_save_needed = 0;
5535
5536 /* No stack slots have been made yet. */
5537 stack_slot_list = 0;
5538
5539 /* There is no stack slot for handling nonlocal gotos. */
ba716ac9 5540 nonlocal_goto_handler_slots = 0;
6f086dfc
RS
5541 nonlocal_goto_stack_level = 0;
5542
5543 /* No labels have been declared for nonlocal use. */
5544 nonlocal_labels = 0;
5545
5546 /* No function calls so far in this function. */
5547 function_call_count = 0;
5548
5549 /* No parm regs have been allocated.
5550 (This is important for output_inline_function.) */
5551 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5552
5553 /* Initialize the RTL mechanism. */
5554 init_emit ();
5555
5556 /* Initialize the queue of pending postincrement and postdecrements,
5557 and some other info in expr.c. */
5558 init_expr ();
5559
5560 /* We haven't done register allocation yet. */
5561 reg_renumber = 0;
5562
5563 init_const_rtx_hash_table ();
5564
a1d7ffe3 5565 current_function_name = (*decl_printable_name) (subr, 2);
6f086dfc
RS
5566
5567 /* Nonzero if this is a nested function that uses a static chain. */
5568
5569 current_function_needs_context
38ee6ed9
JM
5570 = (decl_function_context (current_function_decl) != 0
5571 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6f086dfc
RS
5572
5573 /* Set if a call to setjmp is seen. */
5574 current_function_calls_setjmp = 0;
5575
5576 /* Set if a call to longjmp is seen. */
5577 current_function_calls_longjmp = 0;
5578
5579 current_function_calls_alloca = 0;
5580 current_function_has_nonlocal_label = 0;
8634413a 5581 current_function_has_nonlocal_goto = 0;
6f086dfc 5582 current_function_contains_functions = 0;
fdb8a883 5583 current_function_sp_is_unchanging = 0;
acd693d1 5584 current_function_has_computed_jump = 0;
173cd503 5585 current_function_is_thunk = 0;
6f086dfc
RS
5586
5587 current_function_returns_pcc_struct = 0;
5588 current_function_returns_struct = 0;
5589 current_function_epilogue_delay_list = 0;
5590 current_function_uses_const_pool = 0;
5591 current_function_uses_pic_offset_table = 0;
aeb302bb 5592 current_function_cannot_inline = 0;
6f086dfc
RS
5593
5594 /* We have not yet needed to make a label to jump to for tail-recursion. */
5595 tail_recursion_label = 0;
5596
5597 /* We haven't had a need to make a save area for ap yet. */
5598
5599 arg_pointer_save_area = 0;
5600
5601 /* No stack slots allocated yet. */
5602 frame_offset = 0;
5603
5604 /* No SAVE_EXPRs in this function yet. */
5605 save_expr_regs = 0;
5606
5607 /* No RTL_EXPRs in this function yet. */
5608 rtl_expr_chain = 0;
5609
bc0ebdf9
RK
5610 /* Set up to allocate temporaries. */
5611 init_temp_slots ();
6f086dfc
RS
5612
5613 /* Within function body, compute a type's size as soon it is laid out. */
5614 immediate_size_expand++;
5615
d9a98e1a
RK
5616 /* We haven't made any trampolines for this function yet. */
5617 trampoline_list = 0;
5618
6f086dfc
RS
5619 init_pending_stack_adjust ();
5620 inhibit_defer_pop = 0;
5621
5622 current_function_outgoing_args_size = 0;
5623
6f086dfc 5624 /* Prevent ever trying to delete the first instruction of a function.
b274104c
PB
5625 Also tell final how to output a linenum before the function prologue.
5626 Note linenums could be missing, e.g. when compiling a Java .class file. */
5627 if (line > 0)
5628 emit_line_note (filename, line);
6f086dfc
RS
5629
5630 /* Make sure first insn is a note even if we don't want linenums.
5631 This makes sure the first insn will never be deleted.
5632 Also, final expects a note to appear there. */
5f4f0e22 5633 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5634
5635 /* Set flags used by final.c. */
5636 if (aggregate_value_p (DECL_RESULT (subr)))
5637 {
5638#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 5639 current_function_returns_pcc_struct = 1;
6f086dfc 5640#endif
1b8297c1 5641 current_function_returns_struct = 1;
6f086dfc
RS
5642 }
5643
5644 /* Warn if this value is an aggregate type,
5645 regardless of which calling convention we are using for it. */
5646 if (warn_aggregate_return
05e3bdb9 5647 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc
RS
5648 warning ("function returns an aggregate");
5649
5650 current_function_returns_pointer
8eda074c 5651 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6f086dfc
RS
5652
5653 /* Indicate that we need to distinguish between the return value of the
5654 present function and the return value of a function being called. */
5655 rtx_equal_function_value_matters = 1;
5656
5657 /* Indicate that we have not instantiated virtual registers yet. */
5658 virtuals_instantiated = 0;
5659
5660 /* Indicate we have no need of a frame pointer yet. */
5661 frame_pointer_needed = 0;
5662
ebb904cb 5663 /* By default assume not varargs or stdarg. */
6f086dfc 5664 current_function_varargs = 0;
ebb904cb 5665 current_function_stdarg = 0;
6f086dfc
RS
5666}
5667
5668/* Indicate that the current function uses extra args
5669 not explicitly mentioned in the argument list in any fashion. */
5670
5671void
5672mark_varargs ()
5673{
5674 current_function_varargs = 1;
5675}
5676
5677/* Expand a call to __main at the beginning of a possible main function. */
5678
e2fd1d94
JM
5679#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5680#undef HAS_INIT_SECTION
5681#define HAS_INIT_SECTION
5682#endif
5683
6f086dfc
RS
5684void
5685expand_main_function ()
5686{
e2fd1d94 5687#if !defined (HAS_INIT_SECTION)
b93a436e
JL
5688 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5689 VOIDmode, 0);
e2fd1d94 5690#endif /* not HAS_INIT_SECTION */
6f086dfc
RS
5691}
5692\f
c20bf1f3
JB
5693extern struct obstack permanent_obstack;
5694
6f086dfc
RS
5695/* Start the RTL for a new function, and set variables used for
5696 emitting RTL.
5697 SUBR is the FUNCTION_DECL node.
5698 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5699 the function's parameters, which must be run at any return statement. */
5700
5701void
5702expand_function_start (subr, parms_have_cleanups)
5703 tree subr;
5704 int parms_have_cleanups;
5705{
5706 register int i;
5707 tree tem;
4e86caed 5708 rtx last_ptr = NULL_RTX;
6f086dfc
RS
5709
5710 /* Make sure volatile mem refs aren't considered
5711 valid operands of arithmetic insns. */
5712 init_recog_no_volatile ();
5713
7d384cc0
KR
5714 /* Set this before generating any memory accesses. */
5715 current_function_check_memory_usage
5716 = (flag_check_memory_usage
5717 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5718
07417085
KR
5719 current_function_instrument_entry_exit
5720 = (flag_instrument_function_entry_exit
5721 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5722
6f086dfc
RS
5723 /* If function gets a static chain arg, store it in the stack frame.
5724 Do this first, so it gets the first stack slot offset. */
5725 if (current_function_needs_context)
3e2481e9
JW
5726 {
5727 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
f0c51a1e 5728
f0c51a1e
RK
5729 /* Delay copying static chain if it is not a register to avoid
5730 conflicts with regs used for parameters. */
f95182a4
ILT
5731 if (! SMALL_REGISTER_CLASSES
5732 || GET_CODE (static_chain_incoming_rtx) == REG)
f0c51a1e 5733 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3e2481e9 5734 }
6f086dfc
RS
5735
5736 /* If the parameters of this function need cleaning up, get a label
5737 for the beginning of the code which executes those cleanups. This must
5738 be done before doing anything with return_label. */
5739 if (parms_have_cleanups)
5740 cleanup_label = gen_label_rtx ();
5741 else
5742 cleanup_label = 0;
5743
5744 /* Make the label for return statements to jump to, if this machine
5745 does not have a one-instruction return and uses an epilogue,
5746 or if it returns a structure, or if it has parm cleanups. */
5747#ifdef HAVE_return
5748 if (cleanup_label == 0 && HAVE_return
07417085 5749 && ! current_function_instrument_entry_exit
6f086dfc
RS
5750 && ! current_function_returns_pcc_struct
5751 && ! (current_function_returns_struct && ! optimize))
5752 return_label = 0;
5753 else
5754 return_label = gen_label_rtx ();
5755#else
5756 return_label = gen_label_rtx ();
5757#endif
5758
5759 /* Initialize rtx used to return the value. */
5760 /* Do this before assign_parms so that we copy the struct value address
5761 before any library calls that assign parms might generate. */
5762
5763 /* Decide whether to return the value in memory or in a register. */
5764 if (aggregate_value_p (DECL_RESULT (subr)))
5765 {
5766 /* Returning something that won't go in a register. */
4acc00bf 5767 register rtx value_address = 0;
6f086dfc
RS
5768
5769#ifdef PCC_STATIC_STRUCT_RETURN
5770 if (current_function_returns_pcc_struct)
5771 {
5772 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5773 value_address = assemble_static_space (size);
5774 }
5775 else
5776#endif
5777 {
5778 /* Expect to be passed the address of a place to store the value.
5779 If it is passed as an argument, assign_parms will take care of
5780 it. */
5781 if (struct_value_incoming_rtx)
5782 {
5783 value_address = gen_reg_rtx (Pmode);
5784 emit_move_insn (value_address, struct_value_incoming_rtx);
5785 }
5786 }
5787 if (value_address)
ccdecf58
RK
5788 {
5789 DECL_RTL (DECL_RESULT (subr))
38a448ca 5790 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
c6df88cb
MM
5791 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5792 AGGREGATE_TYPE_P (TREE_TYPE
5793 (DECL_RESULT
5794 (subr))));
ccdecf58 5795 }
6f086dfc
RS
5796 }
5797 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5798 /* If return mode is void, this decl rtl should not be used. */
5799 DECL_RTL (DECL_RESULT (subr)) = 0;
07417085 5800 else if (parms_have_cleanups || current_function_instrument_entry_exit)
a53e14c0
RK
5801 {
5802 /* If function will end with cleanup code for parms,
5803 compute the return values into a pseudo reg,
5804 which we will copy into the true return register
5805 after the cleanups are done. */
5806
5807 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
a5a52dbc 5808
a53e14c0
RK
5809#ifdef PROMOTE_FUNCTION_RETURN
5810 tree type = TREE_TYPE (DECL_RESULT (subr));
5811 int unsignedp = TREE_UNSIGNED (type);
5812
a5a52dbc 5813 mode = promote_mode (type, mode, &unsignedp, 1);
a53e14c0
RK
5814#endif
5815
5816 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5817 }
6f086dfc
RS
5818 else
5819 /* Scalar, returned in a register. */
5820 {
5821#ifdef FUNCTION_OUTGOING_VALUE
5822 DECL_RTL (DECL_RESULT (subr))
5823 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5824#else
5825 DECL_RTL (DECL_RESULT (subr))
5826 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5827#endif
5828
5829 /* Mark this reg as the function's return value. */
5830 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5831 {
5832 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5833 /* Needed because we may need to move this to memory
5834 in case it's a named return value whose address is taken. */
a82ad570 5835 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
5836 }
5837 }
5838
5839 /* Initialize rtx for parameters and local variables.
5840 In some cases this requires emitting insns. */
5841
5842 assign_parms (subr, 0);
5843
f0c51a1e
RK
5844 /* Copy the static chain now if it wasn't a register. The delay is to
5845 avoid conflicts with the parameter passing registers. */
5846
f95182a4 5847 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
f0c51a1e
RK
5848 if (GET_CODE (static_chain_incoming_rtx) != REG)
5849 emit_move_insn (last_ptr, static_chain_incoming_rtx);
f0c51a1e 5850
6f086dfc
RS
5851 /* The following was moved from init_function_start.
5852 The move is supposed to make sdb output more accurate. */
5853 /* Indicate the beginning of the function body,
5854 as opposed to parm setup. */
5f4f0e22 5855 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
5856
5857 /* If doing stupid allocation, mark parms as born here. */
5858
5859 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 5860 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5861 parm_birth_insn = get_last_insn ();
5862
5863 if (obey_regdecls)
5864 {
5865 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5866 use_variable (regno_reg_rtx[i]);
5867
5868 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5869 use_variable (current_function_internal_arg_pointer);
5870 }
5871
6d7306f7
JM
5872 context_display = 0;
5873 if (current_function_needs_context)
ac9e20f0 5874 {
6d7306f7
JM
5875 /* Fetch static chain values for containing functions. */
5876 tem = decl_function_context (current_function_decl);
5877 /* If not doing stupid register allocation copy the static chain
5878 pointer into a pseudo. If we have small register classes, copy
5879 the value from memory if static_chain_incoming_rtx is a REG. If
5880 we do stupid register allocation, we use the stack address
5881 generated above. */
5882 if (tem && ! obey_regdecls)
5883 {
6d7306f7
JM
5884 /* If the static chain originally came in a register, put it back
5885 there, then move it out in the next insn. The reason for
5886 this peculiar code is to satisfy function integration. */
f95182a4
ILT
5887 if (SMALL_REGISTER_CLASSES
5888 && GET_CODE (static_chain_incoming_rtx) == REG)
6d7306f7 5889 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6d7306f7
JM
5890 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5891 }
ac9e20f0 5892
6d7306f7
JM
5893 while (tem)
5894 {
5895 tree rtlexp = make_node (RTL_EXPR);
6f086dfc 5896
6d7306f7
JM
5897 RTL_EXPR_RTL (rtlexp) = last_ptr;
5898 context_display = tree_cons (tem, rtlexp, context_display);
5899 tem = decl_function_context (tem);
5900 if (tem == 0)
5901 break;
5902 /* Chain thru stack frames, assuming pointer to next lexical frame
5903 is found at the place we always store it. */
6f086dfc 5904#ifdef FRAME_GROWS_DOWNWARD
6d7306f7 5905 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6f086dfc 5906#endif
38a448ca
RH
5907 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5908 memory_address (Pmode, last_ptr)));
6d7306f7
JM
5909
5910 /* If we are not optimizing, ensure that we know that this
5911 piece of context is live over the entire function. */
5912 if (! optimize)
38a448ca
RH
5913 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5914 save_expr_regs);
6d7306f7 5915 }
6f086dfc
RS
5916 }
5917
07417085
KR
5918 if (current_function_instrument_entry_exit)
5919 {
5920 rtx fun = DECL_RTL (current_function_decl);
5921 if (GET_CODE (fun) == MEM)
5922 fun = XEXP (fun, 0);
5923 else
5924 abort ();
5925 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5926 fun, Pmode,
5927 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5928 0,
5929 hard_frame_pointer_rtx),
5930 Pmode);
5931 }
5932
6f086dfc
RS
5933 /* After the display initializations is where the tail-recursion label
5934 should go, if we end up needing one. Ensure we have a NOTE here
5935 since some things (like trampolines) get placed before this. */
5f4f0e22 5936 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5937
5938 /* Evaluate now the sizes of any types declared among the arguments. */
5939 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
7b05e286 5940 {
86fa911a
RK
5941 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5942 EXPAND_MEMORY_USE_BAD);
7b05e286
JW
5943 /* Flush the queue in case this parameter declaration has
5944 side-effects. */
5945 emit_queue ();
5946 }
6f086dfc
RS
5947
5948 /* Make sure there is a line number after the function entry setup code. */
5949 force_next_line_note ();
5950}
5951\f
5952/* Generate RTL for the end of the current function.
980697fd 5953 FILENAME and LINE are the current position in the source file.
6f086dfc 5954
980697fd 5955 It is up to language-specific callers to do cleanups for parameters--
1be07046 5956 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6f086dfc
RS
5957
5958void
1be07046 5959expand_function_end (filename, line, end_bindings)
6f086dfc
RS
5960 char *filename;
5961 int line;
1be07046 5962 int end_bindings;
6f086dfc
RS
5963{
5964 register int i;
5965 tree link;
5966
1e2414db 5967#ifdef TRAMPOLINE_TEMPLATE
6f086dfc 5968 static rtx initial_trampoline;
1e2414db 5969#endif
6f086dfc
RS
5970
5971#ifdef NON_SAVING_SETJMP
5972 /* Don't put any variables in registers if we call setjmp
5973 on a machine that fails to restore the registers. */
5974 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5975 {
b88a3142
RK
5976 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5977 setjmp_protect (DECL_INITIAL (current_function_decl));
5978
6f086dfc
RS
5979 setjmp_protect_args ();
5980 }
5981#endif
5982
5983 /* Save the argument pointer if a save area was made for it. */
5984 if (arg_pointer_save_area)
5985 {
ea0f9a85
JW
5986 /* arg_pointer_save_area may not be a valid memory address, so we
5987 have to check it and fix it if necessary. */
5988 rtx seq;
5989 start_sequence ();
5990 emit_move_insn (validize_mem (arg_pointer_save_area),
5991 virtual_incoming_args_rtx);
5992 seq = gen_sequence ();
5993 end_sequence ();
5994 emit_insn_before (seq, tail_recursion_reentry);
6f086dfc
RS
5995 }
5996
5997 /* Initialize any trampolines required by this function. */
5998 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5999 {
6000 tree function = TREE_PURPOSE (link);
6001 rtx context = lookup_static_chain (function);
6002 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7a87758d 6003#ifdef TRAMPOLINE_TEMPLATE
1e2414db 6004 rtx blktramp;
7a87758d 6005#endif
6f086dfc
RS
6006 rtx seq;
6007
1e2414db 6008#ifdef TRAMPOLINE_TEMPLATE
6f086dfc
RS
6009 /* First make sure this compilation has a template for
6010 initializing trampolines. */
6011 if (initial_trampoline == 0)
86f8eff3
RK
6012 {
6013 end_temporary_allocation ();
6014 initial_trampoline
38a448ca 6015 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
86f8eff3
RK
6016 resume_temporary_allocation ();
6017 }
1e2414db 6018#endif
6f086dfc
RS
6019
6020 /* Generate insns to initialize the trampoline. */
6021 start_sequence ();
1e2414db
RK
6022 tramp = round_trampoline_addr (XEXP (tramp, 0));
6023#ifdef TRAMPOLINE_TEMPLATE
6024 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6025 emit_block_move (blktramp, initial_trampoline,
6026 GEN_INT (TRAMPOLINE_SIZE),
189cc377 6027 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
1e2414db
RK
6028#endif
6029 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6f086dfc
RS
6030 seq = get_insns ();
6031 end_sequence ();
6032
6033 /* Put those insns at entry to the containing function (this one). */
6034 emit_insns_before (seq, tail_recursion_reentry);
6035 }
6f086dfc 6036
11044f66
RK
6037 /* If we are doing stack checking and this function makes calls,
6038 do a stack probe at the start of the function to ensure we have enough
6039 space for another stack frame. */
6040 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6041 {
6042 rtx insn, seq;
6043
6044 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6045 if (GET_CODE (insn) == CALL_INSN)
6046 {
6047 start_sequence ();
6048 probe_stack_range (STACK_CHECK_PROTECT,
6049 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6050 seq = get_insns ();
6051 end_sequence ();
6052 emit_insns_before (seq, tail_recursion_reentry);
6053 break;
6054 }
6055 }
6056
db8717d9
RK
6057 /* Warn about unused parms if extra warnings were specified. */
6058 if (warn_unused && extra_warnings)
6f086dfc 6059 {
db8717d9 6060 tree decl;
6f086dfc
RS
6061
6062 for (decl = DECL_ARGUMENTS (current_function_decl);
6063 decl; decl = TREE_CHAIN (decl))
497dc802
JM
6064 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6065 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6f086dfc
RS
6066 warning_with_decl (decl, "unused parameter `%s'");
6067 }
6f086dfc
RS
6068
6069 /* Delete handlers for nonlocal gotos if nothing uses them. */
ba716ac9
BS
6070 if (nonlocal_goto_handler_slots != 0
6071 && ! current_function_has_nonlocal_label)
6f086dfc
RS
6072 delete_handlers ();
6073
6074 /* End any sequences that failed to be closed due to syntax errors. */
6075 while (in_sequence_p ())
5f4f0e22 6076 end_sequence ();
6f086dfc
RS
6077
6078 /* Outside function body, can't compute type's actual size
6079 until next function's body starts. */
6080 immediate_size_expand--;
6081
6082 /* If doing stupid register allocation,
6083 mark register parms as dying here. */
6084
6085 if (obey_regdecls)
6086 {
6087 rtx tem;
6088 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6089 use_variable (regno_reg_rtx[i]);
6090
6091 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6092
6093 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6094 {
6095 use_variable (XEXP (tem, 0));
6096 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6097 }
6098
6099 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6100 use_variable (current_function_internal_arg_pointer);
6101 }
6102
6103 clear_pending_stack_adjust ();
6104 do_pending_stack_adjust ();
6105
6106 /* Mark the end of the function body.
6107 If control reaches this insn, the function can drop through
6108 without returning a value. */
5f4f0e22 6109 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc 6110
82e415a3
DE
6111 /* Must mark the last line number note in the function, so that the test
6112 coverage code can avoid counting the last line twice. This just tells
6113 the code to ignore the immediately following line note, since there
6114 already exists a copy of this note somewhere above. This line number
6115 note is still needed for debugging though, so we can't delete it. */
6116 if (flag_test_coverage)
6117 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6118
6f086dfc
RS
6119 /* Output a linenumber for the end of the function.
6120 SDB depends on this. */
6121 emit_line_note_force (filename, line);
6122
6123 /* Output the label for the actual return from the function,
6124 if one is expected. This happens either because a function epilogue
6125 is used instead of a return instruction, or because a return was done
6126 with a goto in order to run local cleanups, or because of pcc-style
6127 structure returning. */
6128
6129 if (return_label)
6130 emit_label (return_label);
6131
1be07046
RS
6132 /* C++ uses this. */
6133 if (end_bindings)
6134 expand_end_bindings (0, 0, 0);
6135
e5a1e0e8
MS
6136 /* Now handle any leftover exception regions that may have been
6137 created for the parameters. */
6138 {
6139 rtx last = get_last_insn ();
6140 rtx label;
6141
6142 expand_leftover_cleanups ();
6143
6144 /* If the above emitted any code, may sure we jump around it. */
6145 if (last != get_last_insn ())
6146 {
6147 label = gen_label_rtx ();
6148 last = emit_jump_insn_after (gen_jump (label), last);
6149 last = emit_barrier_after (last);
6150 emit_label (label);
6151 }
6152 }
6153
07417085
KR
6154 if (current_function_instrument_entry_exit)
6155 {
6156 rtx fun = DECL_RTL (current_function_decl);
6157 if (GET_CODE (fun) == MEM)
6158 fun = XEXP (fun, 0);
6159 else
6160 abort ();
6161 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6162 fun, Pmode,
6163 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6164 0,
6165 hard_frame_pointer_rtx),
6166 Pmode);
6167 }
6168
6f086dfc
RS
6169 /* If we had calls to alloca, and this machine needs
6170 an accurate stack pointer to exit the function,
6171 insert some code to save and restore the stack pointer. */
6172#ifdef EXIT_IGNORE_STACK
6173 if (! EXIT_IGNORE_STACK)
6174#endif
6175 if (current_function_calls_alloca)
6176 {
59257ff7
RK
6177 rtx tem = 0;
6178
6179 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 6180 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
6181 }
6182
6183 /* If scalar return value was computed in a pseudo-reg,
6184 copy that to the hard return register. */
6185 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6186 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6187 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6188 >= FIRST_PSEUDO_REGISTER))
6189 {
6190 rtx real_decl_result;
6191
6192#ifdef FUNCTION_OUTGOING_VALUE
6193 real_decl_result
6194 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6195 current_function_decl);
6196#else
6197 real_decl_result
6198 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6199 current_function_decl);
6200#endif
6201 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
ecec4441
JW
6202 /* If this is a BLKmode structure being returned in registers, then use
6203 the mode computed in expand_return. */
6204 if (GET_MODE (real_decl_result) == BLKmode)
6205 PUT_MODE (real_decl_result,
6206 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6f086dfc
RS
6207 emit_move_insn (real_decl_result,
6208 DECL_RTL (DECL_RESULT (current_function_decl)));
38a448ca 6209 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
f345de42
JL
6210
6211 /* The delay slot scheduler assumes that current_function_return_rtx
6212 holds the hard register containing the return value, not a temporary
6213 pseudo. */
6214 current_function_return_rtx = real_decl_result;
6f086dfc
RS
6215 }
6216
6217 /* If returning a structure, arrange to return the address of the value
6218 in a place where debuggers expect to find it.
6219
6220 If returning a structure PCC style,
6221 the caller also depends on this value.
6222 And current_function_returns_pcc_struct is not necessarily set. */
6223 if (current_function_returns_struct
6224 || current_function_returns_pcc_struct)
6225 {
6226 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6227 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6228#ifdef FUNCTION_OUTGOING_VALUE
6229 rtx outgoing
6230 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6231 current_function_decl);
6232#else
6233 rtx outgoing
6234 = FUNCTION_VALUE (build_pointer_type (type),
6235 current_function_decl);
6236#endif
6237
6238 /* Mark this as a function return value so integrate will delete the
6239 assignment and USE below when inlining this function. */
6240 REG_FUNCTION_VALUE_P (outgoing) = 1;
6241
6242 emit_move_insn (outgoing, value_address);
6243 use_variable (outgoing);
6244 }
6245
71038426
RH
6246 /* If this is an implementation of __throw, do what's necessary to
6247 communicate between __builtin_eh_return and the epilogue. */
6248 expand_eh_return ();
6249
6f086dfc
RS
6250 /* Output a return insn if we are using one.
6251 Otherwise, let the rtl chain end here, to drop through
6252 into the epilogue. */
6253
6254#ifdef HAVE_return
6255 if (HAVE_return)
6256 {
6257 emit_jump_insn (gen_return ());
6258 emit_barrier ();
6259 }
6260#endif
6261
6262 /* Fix up any gotos that jumped out to the outermost
6263 binding level of the function.
6264 Must follow emitting RETURN_LABEL. */
6265
6266 /* If you have any cleanups to do at this point,
6267 and they need to create temporary variables,
6268 then you will lose. */
e15679f8 6269 expand_fixups (get_insns ());
6f086dfc 6270}
bdac5f58
TW
6271\f
6272/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6273
6274static int *prologue;
6275static int *epilogue;
6276
6277/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6278 or a single insn). */
6279
487a6e06 6280#if defined (HAVE_prologue) || defined (HAVE_epilogue)
bdac5f58
TW
6281static int *
6282record_insns (insns)
6283 rtx insns;
6284{
6285 int *vec;
6286
6287 if (GET_CODE (insns) == SEQUENCE)
6288 {
6289 int len = XVECLEN (insns, 0);
6290 vec = (int *) oballoc ((len + 1) * sizeof (int));
6291 vec[len] = 0;
6292 while (--len >= 0)
6293 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6294 }
6295 else
6296 {
6297 vec = (int *) oballoc (2 * sizeof (int));
6298 vec[0] = INSN_UID (insns);
6299 vec[1] = 0;
6300 }
6301 return vec;
6302}
6303
10914065 6304/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 6305
10914065 6306static int
bdac5f58
TW
6307contains (insn, vec)
6308 rtx insn;
6309 int *vec;
6310{
6311 register int i, j;
6312
6313 if (GET_CODE (insn) == INSN
6314 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6315 {
10914065 6316 int count = 0;
bdac5f58
TW
6317 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6318 for (j = 0; vec[j]; j++)
6319 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
6320 count++;
6321 return count;
bdac5f58
TW
6322 }
6323 else
6324 {
6325 for (j = 0; vec[j]; j++)
6326 if (INSN_UID (insn) == vec[j])
10914065 6327 return 1;
bdac5f58
TW
6328 }
6329 return 0;
6330}
081f5e7e 6331#endif /* HAVE_prologue || HAVE_epilogue */
bdac5f58 6332
9faa82d8 6333/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
6334 this into place with notes indicating where the prologue ends and where
6335 the epilogue begins. Update the basic block information when possible. */
6336
6337void
6338thread_prologue_and_epilogue_insns (f)
79c9824e 6339 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6340{
6341#ifdef HAVE_prologue
6342 if (HAVE_prologue)
6343 {
29a82058 6344 rtx head, seq;
bdac5f58
TW
6345
6346 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6347 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6348 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6349 seq = gen_prologue ();
6350 head = emit_insn_after (seq, f);
6351
6352 /* Include the new prologue insns in the first block. Ignore them
6353 if they form a basic block unto themselves. */
3b413743
RH
6354 if (x_basic_block_head && n_basic_blocks
6355 && GET_CODE (BLOCK_HEAD (0)) != CODE_LABEL)
6356 BLOCK_HEAD (0) = NEXT_INSN (f);
bdac5f58
TW
6357
6358 /* Retain a map of the prologue insns. */
6359 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6360 }
6361 else
6362#endif
6363 prologue = 0;
6364
6365#ifdef HAVE_epilogue
6366 if (HAVE_epilogue)
6367 {
6368 rtx insn = get_last_insn ();
6369 rtx prev = prev_nonnote_insn (insn);
6370
6371 /* If we end with a BARRIER, we don't need an epilogue. */
6372 if (! (prev && GET_CODE (prev) == BARRIER))
6373 {
a78bdb38
JW
6374 rtx tail, seq, tem;
6375 rtx first_use = 0;
6376 rtx last_use = 0;
bdac5f58 6377
a78bdb38
JW
6378 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6379 epilogue insns, the USE insns at the end of a function,
6380 the jump insn that returns, and then a BARRIER. */
bdac5f58 6381
a78bdb38 6382 /* Move the USE insns at the end of a function onto a list. */
bdac5f58
TW
6383 while (prev
6384 && GET_CODE (prev) == INSN
6385 && GET_CODE (PATTERN (prev)) == USE)
6386 {
a78bdb38 6387 tem = prev;
bdac5f58 6388 prev = prev_nonnote_insn (prev);
a78bdb38
JW
6389
6390 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6391 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
83eb3b0d
RK
6392 if (first_use)
6393 {
6394 NEXT_INSN (tem) = first_use;
6395 PREV_INSN (first_use) = tem;
6396 }
6397 first_use = tem;
6398 if (!last_use)
a78bdb38 6399 last_use = tem;
bdac5f58
TW
6400 }
6401
a78bdb38
JW
6402 emit_barrier_after (insn);
6403
bdac5f58
TW
6404 seq = gen_epilogue ();
6405 tail = emit_jump_insn_after (seq, insn);
a78bdb38
JW
6406
6407 /* Insert the USE insns immediately before the return insn, which
6408 must be the first instruction before the final barrier. */
6409 if (first_use)
6410 {
6411 tem = prev_nonnote_insn (get_last_insn ());
6412 NEXT_INSN (PREV_INSN (tem)) = first_use;
6413 PREV_INSN (first_use) = PREV_INSN (tem);
6414 PREV_INSN (tem) = last_use;
6415 NEXT_INSN (last_use) = tem;
6416 }
6417
bdac5f58
TW
6418 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6419
6420 /* Include the new epilogue insns in the last block. Ignore
6421 them if they form a basic block unto themselves. */
3b413743
RH
6422 if (x_basic_block_end && n_basic_blocks
6423 && GET_CODE (BLOCK_END (n_basic_blocks - 1)) != JUMP_INSN)
6424 BLOCK_END (n_basic_blocks - 1) = tail;
bdac5f58
TW
6425
6426 /* Retain a map of the epilogue insns. */
6427 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6428 return;
6429 }
6430 }
6431#endif
6432 epilogue = 0;
6433}
6434
6435/* Reposition the prologue-end and epilogue-begin notes after instruction
6436 scheduling and delayed branch scheduling. */
6437
6438void
6439reposition_prologue_and_epilogue_notes (f)
79c9824e 6440 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6441{
6442#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6443 /* Reposition the prologue and epilogue notes. */
6444 if (n_basic_blocks)
6445 {
bf526252 6446 int len;
bdac5f58
TW
6447
6448 if (prologue)
6449 {
bf526252
RK
6450 register rtx insn, note = 0;
6451
6452 /* Scan from the beginning until we reach the last prologue insn.
6453 We apparently can't depend on basic_block_{head,end} after
6454 reorg has run. */
6455 for (len = 0; prologue[len]; len++)
6456 ;
9392c110
JH
6457 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6458 {
6459 if (GET_CODE (insn) == NOTE)
6460 {
6461 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6462 note = insn;
6463 }
6464 else if ((len -= contains (insn, prologue)) == 0)
6465 {
89e99eea 6466 rtx next;
9392c110
JH
6467 /* Find the prologue-end note if we haven't already, and
6468 move it to just after the last prologue insn. */
6469 if (note == 0)
6470 {
51723711 6471 for (note = insn; (note = NEXT_INSN (note));)
9392c110
JH
6472 if (GET_CODE (note) == NOTE
6473 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6474 break;
6475 }
c93b03c2 6476
9392c110 6477 next = NEXT_INSN (note);
c93b03c2 6478
3b413743 6479 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2 6480 attempt to keep it up-to-date. */
3b413743
RH
6481 if (BLOCK_HEAD (0) == note)
6482 BLOCK_HEAD (0) = next;
c93b03c2 6483
89e99eea 6484 remove_insn (note);
9392c110
JH
6485 add_insn_after (note, insn);
6486 }
6487 }
bdac5f58
TW
6488 }
6489
6490 if (epilogue)
6491 {
bf526252
RK
6492 register rtx insn, note = 0;
6493
6494 /* Scan from the end until we reach the first epilogue insn.
6495 We apparently can't depend on basic_block_{head,end} after
6496 reorg has run. */
6497 for (len = 0; epilogue[len]; len++)
6498 ;
9392c110
JH
6499 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6500 {
6501 if (GET_CODE (insn) == NOTE)
6502 {
6503 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6504 note = insn;
6505 }
6506 else if ((len -= contains (insn, epilogue)) == 0)
6507 {
6508 /* Find the epilogue-begin note if we haven't already, and
6509 move it to just before the first epilogue insn. */
6510 if (note == 0)
6511 {
51723711 6512 for (note = insn; (note = PREV_INSN (note));)
9392c110
JH
6513 if (GET_CODE (note) == NOTE
6514 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6515 break;
6516 }
c93b03c2 6517
3b413743 6518 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2
RH
6519 attempt to keep it up-to-date. */
6520 if (n_basic_blocks
3b413743
RH
6521 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6522 BLOCK_HEAD (n_basic_blocks-1) = note;
c93b03c2 6523
89e99eea 6524 remove_insn (note);
c93b03c2 6525 add_insn_before (note, insn);
9392c110
JH
6526 }
6527 }
bdac5f58
TW
6528 }
6529 }
6530#endif /* HAVE_prologue or HAVE_epilogue */
6531}
This page took 1.574452 seconds and 5 git commands to generate.