]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
new
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
c85f7c16 2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
6f086dfc
RS
20
21
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41#include "config.h"
670ee920 42#include "system.h"
6f086dfc
RS
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
1ef08c63 46#include "except.h"
6f086dfc
RS
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
c20bf1f3 57#include "obstack.h"
10f0ad3d 58#include "toplev.h"
6f086dfc 59
189cc377
RK
60#ifndef TRAMPOLINE_ALIGNMENT
61#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62#endif
63
293e3de4
RS
64/* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
0f41302f 67 must define both, or neither. */
293e3de4
RS
68#ifndef NAME__MAIN
69#define NAME__MAIN "__main"
70#define SYMBOL__MAIN __main
71#endif
72
6f086dfc
RS
73/* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78/* Similar, but round to the next highest integer that meets the
79 alignment. */
80#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89#define NEED_SEPARATE_AP
90#endif
91
92/* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96int current_function_pops_args;
97
98/* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101int current_function_returns_struct;
102
103/* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106int current_function_returns_pcc_struct;
107
108/* Nonzero if function being compiled needs to be passed a static chain. */
109
110int current_function_needs_context;
111
112/* Nonzero if function being compiled can call setjmp. */
113
114int current_function_calls_setjmp;
115
116/* Nonzero if function being compiled can call longjmp. */
117
118int current_function_calls_longjmp;
119
120/* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123int current_function_has_nonlocal_label;
124
8634413a
JW
125/* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128int current_function_has_nonlocal_goto;
129
4d1d8045
BS
130/* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135int current_function_has_computed_jump;
136
6f086dfc
RS
137/* Nonzero if function being compiled contains nested functions. */
138
139int current_function_contains_functions;
140
fdb8a883
JW
141/* Nonzero if function being compiled doesn't modify the stack pointer
142 (ignoring the prologue and epilogue). This is only valid after
143 life_analysis has run. */
144
145int current_function_sp_is_unchanging;
146
173cd503
JM
147/* Nonzero if the current function is a thunk (a lightweight function that
148 just adjusts one of its arguments and forwards to another function), so
149 we should try to cut corners where we can. */
150int current_function_is_thunk;
151
6f086dfc
RS
152/* Nonzero if function being compiled can call alloca,
153 either as a subroutine or builtin. */
154
155int current_function_calls_alloca;
156
157/* Nonzero if the current function returns a pointer type */
158
159int current_function_returns_pointer;
160
161/* If some insns can be deferred to the delay slots of the epilogue, the
162 delay list for them is recorded here. */
163
164rtx current_function_epilogue_delay_list;
165
166/* If function's args have a fixed size, this is that size, in bytes.
167 Otherwise, it is -1.
168 May affect compilation of return insn or of function epilogue. */
169
170int current_function_args_size;
171
172/* # bytes the prologue should push and pretend that the caller pushed them.
173 The prologue must do this, but only if parms can be passed in registers. */
174
175int current_function_pretend_args_size;
176
f7339633 177/* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
0f41302f 178 defined, the needed space is pushed by the prologue. */
6f086dfc
RS
179
180int current_function_outgoing_args_size;
181
182/* This is the offset from the arg pointer to the place where the first
183 anonymous arg can be found, if there is one. */
184
185rtx current_function_arg_offset_rtx;
186
187/* Nonzero if current function uses varargs.h or equivalent.
188 Zero for functions that use stdarg.h. */
189
190int current_function_varargs;
191
ebb904cb
RK
192/* Nonzero if current function uses stdarg.h or equivalent.
193 Zero for functions that use varargs.h. */
194
195int current_function_stdarg;
196
6f086dfc
RS
197/* Quantities of various kinds of registers
198 used for the current function's args. */
199
200CUMULATIVE_ARGS current_function_args_info;
201
202/* Name of function now being compiled. */
203
204char *current_function_name;
205
f345de42
JL
206/* If non-zero, an RTL expression for the location at which the current
207 function returns its result. If the current function returns its
208 result in a register, current_function_return_rtx will always be
209 the hard register containing the result. */
6f086dfc
RS
210
211rtx current_function_return_rtx;
212
213/* Nonzero if the current function uses the constant pool. */
214
215int current_function_uses_const_pool;
216
217/* Nonzero if the current function uses pic_offset_table_rtx. */
218int current_function_uses_pic_offset_table;
219
220/* The arg pointer hard register, or the pseudo into which it was copied. */
221rtx current_function_internal_arg_pointer;
222
aeb302bb
JM
223/* Language-specific reason why the current function cannot be made inline. */
224char *current_function_cannot_inline;
225
07417085
KR
226/* Nonzero if instrumentation calls for function entry and exit should be
227 generated. */
228int current_function_instrument_entry_exit;
229
7d384cc0
KR
230/* Nonzero if memory access checking be enabled in the current function. */
231int current_function_check_memory_usage;
232
6f086dfc
RS
233/* The FUNCTION_DECL for an inline function currently being expanded. */
234tree inline_function_decl;
235
236/* Number of function calls seen so far in current function. */
237
238int function_call_count;
239
240/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
241 (labels to which there can be nonlocal gotos from nested functions)
242 in this function. */
243
244tree nonlocal_labels;
245
246/* RTX for stack slot that holds the current handler for nonlocal gotos.
247 Zero when function does not have nonlocal labels. */
248
249rtx nonlocal_goto_handler_slot;
250
251/* RTX for stack slot that holds the stack pointer value to restore
252 for a nonlocal goto.
253 Zero when function does not have nonlocal labels. */
254
255rtx nonlocal_goto_stack_level;
256
257/* Label that will go on parm cleanup code, if any.
258 Jumping to this label runs cleanup code for parameters, if
259 such code must be run. Following this code is the logical return label. */
260
261rtx cleanup_label;
262
263/* Label that will go on function epilogue.
264 Jumping to this label serves as a "return" instruction
265 on machines which require execution of the epilogue on all returns. */
266
267rtx return_label;
268
269/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
270 So we can mark them all live at the end of the function, if nonopt. */
271rtx save_expr_regs;
272
273/* List (chain of EXPR_LISTs) of all stack slots in this function.
274 Made for the sake of unshare_all_rtl. */
275rtx stack_slot_list;
276
277/* Chain of all RTL_EXPRs that have insns in them. */
278tree rtl_expr_chain;
279
280/* Label to jump back to for tail recursion, or 0 if we have
281 not yet needed one for this function. */
282rtx tail_recursion_label;
283
284/* Place after which to insert the tail_recursion_label if we need one. */
285rtx tail_recursion_reentry;
286
287/* Location at which to save the argument pointer if it will need to be
288 referenced. There are two cases where this is done: if nonlocal gotos
289 exist, or if vars stored at an offset from the argument pointer will be
290 needed by inner routines. */
291
292rtx arg_pointer_save_area;
293
294/* Offset to end of allocated area of stack frame.
295 If stack grows down, this is the address of the last stack slot allocated.
296 If stack grows up, this is the address for the next slot. */
8af5168b 297HOST_WIDE_INT frame_offset;
6f086dfc
RS
298
299/* List (chain of TREE_LISTs) of static chains for containing functions.
300 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
301 in an RTL_EXPR in the TREE_VALUE. */
302static tree context_display;
303
304/* List (chain of TREE_LISTs) of trampolines for nested functions.
305 The trampoline sets up the static chain and jumps to the function.
306 We supply the trampoline's address when the function's address is requested.
307
308 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
309 in an RTL_EXPR in the TREE_VALUE. */
310static tree trampoline_list;
311
312/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
313static rtx parm_birth_insn;
314
315#if 0
316/* Nonzero if a stack slot has been generated whose address is not
317 actually valid. It means that the generated rtl must all be scanned
318 to detect and correct the invalid addresses where they occur. */
319static int invalid_stack_slot;
320#endif
321
322/* Last insn of those whose job was to put parms into their nominal homes. */
323static rtx last_parm_insn;
324
e9a25f70
JL
325/* 1 + last pseudo register number possibly used for loading a copy
326 of a parameter of this function. */
327int max_parm_reg;
6f086dfc
RS
328
329/* Vector indexed by REGNO, containing location on stack in which
330 to put the parm which is nominally in pseudo register REGNO,
e9a25f70
JL
331 if we discover that that parm must go in the stack. The highest
332 element in this vector is one less than MAX_PARM_REG, above. */
333rtx *parm_reg_stack_loc;
6f086dfc 334
6f086dfc
RS
335/* Nonzero once virtual register instantiation has been done.
336 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
337static int virtuals_instantiated;
338
46766466
RS
339/* These variables hold pointers to functions to
340 save and restore machine-specific data,
341 in push_function_context and pop_function_context. */
9e014ded
RK
342void (*save_machine_status) PROTO((struct function *));
343void (*restore_machine_status) PROTO((struct function *));
46766466 344
6f086dfc
RS
345/* Nonzero if we need to distinguish between the return value of this function
346 and the return value of a function called by this function. This helps
347 integrate.c */
348
349extern int rtx_equal_function_value_matters;
e7a84011 350extern tree sequence_rtl_expr;
6f086dfc
RS
351\f
352/* In order to evaluate some expressions, such as function calls returning
353 structures in memory, we need to temporarily allocate stack locations.
354 We record each allocated temporary in the following structure.
355
356 Associated with each temporary slot is a nesting level. When we pop up
357 one level, all temporaries associated with the previous level are freed.
358 Normally, all temporaries are freed after the execution of the statement
359 in which they were created. However, if we are inside a ({...}) grouping,
360 the result may be in a temporary and hence must be preserved. If the
361 result could be in a temporary, we preserve it if we can determine which
362 one it is in. If we cannot determine which temporary may contain the
363 result, all temporaries are preserved. A temporary is preserved by
364 pretending it was allocated at the previous nesting level.
365
366 Automatic variables are also assigned temporary slots, at the nesting
367 level where they are defined. They are marked a "kept" so that
368 free_temp_slots will not free them. */
369
370struct temp_slot
371{
372 /* Points to next temporary slot. */
373 struct temp_slot *next;
0f41302f 374 /* The rtx to used to reference the slot. */
6f086dfc 375 rtx slot;
e5e76139
RK
376 /* The rtx used to represent the address if not the address of the
377 slot above. May be an EXPR_LIST if multiple addresses exist. */
378 rtx address;
6f086dfc 379 /* The size, in units, of the slot. */
e5e809f4 380 HOST_WIDE_INT size;
e7a84011
RK
381 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
382 tree rtl_expr;
6f086dfc
RS
383 /* Non-zero if this temporary is currently in use. */
384 char in_use;
a25d4ba2
RK
385 /* Non-zero if this temporary has its address taken. */
386 char addr_taken;
6f086dfc
RS
387 /* Nesting level at which this slot is being used. */
388 int level;
389 /* Non-zero if this should survive a call to free_temp_slots. */
390 int keep;
fc91b0d0
RK
391 /* The offset of the slot from the frame_pointer, including extra space
392 for alignment. This info is for combine_temp_slots. */
e5e809f4 393 HOST_WIDE_INT base_offset;
fc91b0d0
RK
394 /* The size of the slot, including extra space for alignment. This
395 info is for combine_temp_slots. */
e5e809f4 396 HOST_WIDE_INT full_size;
6f086dfc
RS
397};
398
399/* List of all temporaries allocated, both available and in use. */
400
401struct temp_slot *temp_slots;
402
403/* Current nesting level for temporaries. */
404
405int temp_slot_level;
e5e809f4
JL
406
407/* Current nesting level for variables in a block. */
408
409int var_temp_slot_level;
f5963e61
JL
410
411/* When temporaries are created by TARGET_EXPRs, they are created at
412 this level of temp_slot_level, so that they can remain allocated
413 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
414 of TARGET_EXPRs. */
415int target_temp_slot_level;
6f086dfc 416\f
e15679f8
RK
417/* This structure is used to record MEMs or pseudos used to replace VAR, any
418 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
419 maintain this list in case two operands of an insn were required to match;
420 in that case we must ensure we use the same replacement. */
421
422struct fixup_replacement
423{
424 rtx old;
425 rtx new;
426 struct fixup_replacement *next;
427};
428
429/* Forward declarations. */
430
1ac4f799
JL
431static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
432 int, struct function *));
e15679f8
RK
433static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
434static void put_reg_into_stack PROTO((struct function *, rtx, tree,
0006e95b 435 enum machine_mode, enum machine_mode,
e5e809f4 436 int, int, int));
e15679f8
RK
437static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
438static struct fixup_replacement
439 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
440static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
441 rtx, int));
442static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
443 struct fixup_replacement **));
444static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
445static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
446static rtx fixup_stack_1 PROTO((rtx, rtx));
447static void optimize_bit_field PROTO((rtx, rtx, rtx *));
448static void instantiate_decls PROTO((tree, int));
449static void instantiate_decls_1 PROTO((tree, int));
450static void instantiate_decl PROTO((rtx, int, int));
451static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
452static void delete_handlers PROTO((void));
453static void pad_to_arg_alignment PROTO((struct args_size *, int));
51723711 454#ifndef ARGS_GROW_DOWNWARD
e15679f8
RK
455static void pad_below PROTO((struct args_size *, enum machine_mode,
456 tree));
51723711 457#endif
487a6e06 458#ifdef ARGS_GROW_DOWNWARD
e15679f8 459static tree round_down PROTO((tree, int));
487a6e06 460#endif
e15679f8
RK
461static rtx round_trampoline_addr PROTO((rtx));
462static tree blocks_nreverse PROTO((tree));
463static int all_blocks PROTO((tree, tree *));
081f5e7e 464#if defined (HAVE_prologue) || defined (HAVE_epilogue)
487a6e06 465static int *record_insns PROTO((rtx));
e15679f8 466static int contains PROTO((rtx, int *));
081f5e7e 467#endif /* HAVE_prologue || HAVE_epilogue */
e9a25f70 468static void put_addressof_into_stack PROTO((rtx));
f7b6d104 469static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
c20bf1f3 470\f
6f086dfc
RS
471/* Pointer to chain of `struct function' for containing functions. */
472struct function *outer_function_chain;
473
474/* Given a function decl for a containing function,
475 return the `struct function' for it. */
476
477struct function *
478find_function_data (decl)
479 tree decl;
480{
481 struct function *p;
e5e809f4 482
6f086dfc
RS
483 for (p = outer_function_chain; p; p = p->next)
484 if (p->decl == decl)
485 return p;
e5e809f4 486
6f086dfc
RS
487 abort ();
488}
489
490/* Save the current context for compilation of a nested function.
491 This is called from language-specific code.
492 The caller is responsible for saving any language-specific status,
6dc42e49 493 since this function knows only about language-independent variables. */
6f086dfc
RS
494
495void
a0dabda5
JM
496push_function_context_to (context)
497 tree context;
6f086dfc
RS
498{
499 struct function *p = (struct function *) xmalloc (sizeof (struct function));
500
501 p->next = outer_function_chain;
502 outer_function_chain = p;
503
504 p->name = current_function_name;
505 p->decl = current_function_decl;
506 p->pops_args = current_function_pops_args;
507 p->returns_struct = current_function_returns_struct;
508 p->returns_pcc_struct = current_function_returns_pcc_struct;
1651bdfe 509 p->returns_pointer = current_function_returns_pointer;
6f086dfc
RS
510 p->needs_context = current_function_needs_context;
511 p->calls_setjmp = current_function_calls_setjmp;
512 p->calls_longjmp = current_function_calls_longjmp;
513 p->calls_alloca = current_function_calls_alloca;
514 p->has_nonlocal_label = current_function_has_nonlocal_label;
8634413a 515 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
a0dabda5 516 p->contains_functions = current_function_contains_functions;
173cd503 517 p->is_thunk = current_function_is_thunk;
6f086dfc
RS
518 p->args_size = current_function_args_size;
519 p->pretend_args_size = current_function_pretend_args_size;
520 p->arg_offset_rtx = current_function_arg_offset_rtx;
3b69d50e 521 p->varargs = current_function_varargs;
ebb904cb 522 p->stdarg = current_function_stdarg;
6f086dfc
RS
523 p->uses_const_pool = current_function_uses_const_pool;
524 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
525 p->internal_arg_pointer = current_function_internal_arg_pointer;
aeb302bb 526 p->cannot_inline = current_function_cannot_inline;
6f086dfc
RS
527 p->max_parm_reg = max_parm_reg;
528 p->parm_reg_stack_loc = parm_reg_stack_loc;
529 p->outgoing_args_size = current_function_outgoing_args_size;
530 p->return_rtx = current_function_return_rtx;
531 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
532 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
533 p->nonlocal_labels = nonlocal_labels;
534 p->cleanup_label = cleanup_label;
535 p->return_label = return_label;
536 p->save_expr_regs = save_expr_regs;
537 p->stack_slot_list = stack_slot_list;
538 p->parm_birth_insn = parm_birth_insn;
539 p->frame_offset = frame_offset;
540 p->tail_recursion_label = tail_recursion_label;
541 p->tail_recursion_reentry = tail_recursion_reentry;
542 p->arg_pointer_save_area = arg_pointer_save_area;
543 p->rtl_expr_chain = rtl_expr_chain;
544 p->last_parm_insn = last_parm_insn;
545 p->context_display = context_display;
546 p->trampoline_list = trampoline_list;
547 p->function_call_count = function_call_count;
548 p->temp_slots = temp_slots;
549 p->temp_slot_level = temp_slot_level;
e5e809f4
JL
550 p->target_temp_slot_level = target_temp_slot_level;
551 p->var_temp_slot_level = var_temp_slot_level;
6f086dfc 552 p->fixup_var_refs_queue = 0;
f979c996 553 p->epilogue_delay_list = current_function_epilogue_delay_list;
01c1558a 554 p->args_info = current_function_args_info;
7d384cc0 555 p->check_memory_usage = current_function_check_memory_usage;
07417085 556 p->instrument_entry_exit = current_function_instrument_entry_exit;
6f086dfc 557
a0dabda5 558 save_tree_status (p, context);
6f086dfc
RS
559 save_storage_status (p);
560 save_emit_status (p);
6f086dfc
RS
561 save_expr_status (p);
562 save_stmt_status (p);
e9a25f70 563 save_varasm_status (p, context);
46766466
RS
564 if (save_machine_status)
565 (*save_machine_status) (p);
6f086dfc
RS
566}
567
e4a4639e
JM
568void
569push_function_context ()
570{
a0dabda5 571 push_function_context_to (current_function_decl);
e4a4639e
JM
572}
573
6f086dfc
RS
574/* Restore the last saved context, at the end of a nested function.
575 This function is called from language-specific code. */
576
577void
a0dabda5
JM
578pop_function_context_from (context)
579 tree context;
6f086dfc
RS
580{
581 struct function *p = outer_function_chain;
e5e809f4 582 struct var_refs_queue *queue;
6f086dfc
RS
583
584 outer_function_chain = p->next;
585
49468af2
RK
586 current_function_contains_functions
587 = p->contains_functions || p->inline_obstacks
588 || context == current_function_decl;
6f086dfc
RS
589 current_function_name = p->name;
590 current_function_decl = p->decl;
591 current_function_pops_args = p->pops_args;
592 current_function_returns_struct = p->returns_struct;
593 current_function_returns_pcc_struct = p->returns_pcc_struct;
1651bdfe 594 current_function_returns_pointer = p->returns_pointer;
6f086dfc
RS
595 current_function_needs_context = p->needs_context;
596 current_function_calls_setjmp = p->calls_setjmp;
597 current_function_calls_longjmp = p->calls_longjmp;
598 current_function_calls_alloca = p->calls_alloca;
599 current_function_has_nonlocal_label = p->has_nonlocal_label;
8634413a 600 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
173cd503 601 current_function_is_thunk = p->is_thunk;
6f086dfc
RS
602 current_function_args_size = p->args_size;
603 current_function_pretend_args_size = p->pretend_args_size;
604 current_function_arg_offset_rtx = p->arg_offset_rtx;
3b69d50e 605 current_function_varargs = p->varargs;
ebb904cb 606 current_function_stdarg = p->stdarg;
6f086dfc
RS
607 current_function_uses_const_pool = p->uses_const_pool;
608 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
609 current_function_internal_arg_pointer = p->internal_arg_pointer;
aeb302bb 610 current_function_cannot_inline = p->cannot_inline;
6f086dfc
RS
611 max_parm_reg = p->max_parm_reg;
612 parm_reg_stack_loc = p->parm_reg_stack_loc;
613 current_function_outgoing_args_size = p->outgoing_args_size;
614 current_function_return_rtx = p->return_rtx;
615 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
616 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
617 nonlocal_labels = p->nonlocal_labels;
618 cleanup_label = p->cleanup_label;
619 return_label = p->return_label;
620 save_expr_regs = p->save_expr_regs;
621 stack_slot_list = p->stack_slot_list;
622 parm_birth_insn = p->parm_birth_insn;
623 frame_offset = p->frame_offset;
624 tail_recursion_label = p->tail_recursion_label;
625 tail_recursion_reentry = p->tail_recursion_reentry;
626 arg_pointer_save_area = p->arg_pointer_save_area;
627 rtl_expr_chain = p->rtl_expr_chain;
628 last_parm_insn = p->last_parm_insn;
629 context_display = p->context_display;
630 trampoline_list = p->trampoline_list;
631 function_call_count = p->function_call_count;
632 temp_slots = p->temp_slots;
633 temp_slot_level = p->temp_slot_level;
e5e809f4
JL
634 target_temp_slot_level = p->target_temp_slot_level;
635 var_temp_slot_level = p->var_temp_slot_level;
f979c996 636 current_function_epilogue_delay_list = p->epilogue_delay_list;
7cbc7b0c 637 reg_renumber = 0;
01c1558a 638 current_function_args_info = p->args_info;
7d384cc0 639 current_function_check_memory_usage = p->check_memory_usage;
07417085 640 current_function_instrument_entry_exit = p->instrument_entry_exit;
6f086dfc 641
d1485032 642 restore_tree_status (p, context);
6f086dfc
RS
643 restore_storage_status (p);
644 restore_expr_status (p);
645 restore_emit_status (p);
646 restore_stmt_status (p);
a506307a 647 restore_varasm_status (p);
6f086dfc 648
46766466
RS
649 if (restore_machine_status)
650 (*restore_machine_status) (p);
651
6f086dfc
RS
652 /* Finish doing put_var_into_stack for any of our variables
653 which became addressable during the nested function. */
e5e809f4
JL
654 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
655 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
6f086dfc
RS
656
657 free (p);
658
659 /* Reset variables that have known state during rtx generation. */
660 rtx_equal_function_value_matters = 1;
661 virtuals_instantiated = 0;
662}
e4a4639e
JM
663
664void pop_function_context ()
665{
a0dabda5 666 pop_function_context_from (current_function_decl);
e4a4639e 667}
6f086dfc
RS
668\f
669/* Allocate fixed slots in the stack frame of the current function. */
670
671/* Return size needed for stack frame based on slots so far allocated.
672 This size counts from zero. It is not rounded to STACK_BOUNDARY;
673 the caller may have to do that. */
674
8af5168b 675HOST_WIDE_INT
6f086dfc
RS
676get_frame_size ()
677{
678#ifdef FRAME_GROWS_DOWNWARD
679 return -frame_offset;
680#else
681 return frame_offset;
682#endif
683}
684
685/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
686 with machine mode MODE.
687
688 ALIGN controls the amount of alignment for the address of the slot:
689 0 means according to MODE,
690 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
691 positive specifies alignment boundary in bits.
692
693 We do not round to stack_boundary here. */
694
695rtx
696assign_stack_local (mode, size, align)
697 enum machine_mode mode;
e5e809f4 698 HOST_WIDE_INT size;
6f086dfc
RS
699 int align;
700{
701 register rtx x, addr;
702 int bigend_correction = 0;
703 int alignment;
704
705 if (align == 0)
706 {
707 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
708 if (mode == BLKmode)
709 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
710 }
711 else if (align == -1)
712 {
713 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
714 size = CEIL_ROUND (size, alignment);
715 }
716 else
717 alignment = align / BITS_PER_UNIT;
718
6f086dfc
RS
719 /* Round frame offset to that alignment.
720 We must be careful here, since FRAME_OFFSET might be negative and
721 division with a negative dividend isn't as well defined as we might
722 like. So we instead assume that ALIGNMENT is a power of two and
723 use logical operations which are unambiguous. */
724#ifdef FRAME_GROWS_DOWNWARD
725 frame_offset = FLOOR_ROUND (frame_offset, alignment);
726#else
727 frame_offset = CEIL_ROUND (frame_offset, alignment);
728#endif
729
730 /* On a big-endian machine, if we are allocating more space than we will use,
731 use the least significant bytes of those that are allocated. */
f76b9db2 732 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 733 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
734
735#ifdef FRAME_GROWS_DOWNWARD
736 frame_offset -= size;
737#endif
738
739 /* If we have already instantiated virtual registers, return the actual
740 address relative to the frame pointer. */
741 if (virtuals_instantiated)
742 addr = plus_constant (frame_pointer_rtx,
743 (frame_offset + bigend_correction
744 + STARTING_FRAME_OFFSET));
745 else
746 addr = plus_constant (virtual_stack_vars_rtx,
747 frame_offset + bigend_correction);
748
749#ifndef FRAME_GROWS_DOWNWARD
750 frame_offset += size;
751#endif
752
38a448ca 753 x = gen_rtx_MEM (mode, addr);
6f086dfc 754
38a448ca 755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
6f086dfc
RS
756
757 return x;
758}
759
760/* Assign a stack slot in a containing function.
761 First three arguments are same as in preceding function.
762 The last argument specifies the function to allocate in. */
763
1ac4f799 764static rtx
6f086dfc
RS
765assign_outer_stack_local (mode, size, align, function)
766 enum machine_mode mode;
e5e809f4 767 HOST_WIDE_INT size;
6f086dfc
RS
768 int align;
769 struct function *function;
770{
771 register rtx x, addr;
772 int bigend_correction = 0;
773 int alignment;
774
775 /* Allocate in the memory associated with the function in whose frame
776 we are assigning. */
777 push_obstacks (function->function_obstack,
778 function->function_maybepermanent_obstack);
779
780 if (align == 0)
781 {
782 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
783 if (mode == BLKmode)
784 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
785 }
786 else if (align == -1)
787 {
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
789 size = CEIL_ROUND (size, alignment);
790 }
791 else
792 alignment = align / BITS_PER_UNIT;
793
6f086dfc
RS
794 /* Round frame offset to that alignment. */
795#ifdef FRAME_GROWS_DOWNWARD
2af69b62 796 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
6f086dfc 797#else
2af69b62 798 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
6f086dfc
RS
799#endif
800
801 /* On a big-endian machine, if we are allocating more space than we will use,
802 use the least significant bytes of those that are allocated. */
f76b9db2 803 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 804 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
805
806#ifdef FRAME_GROWS_DOWNWARD
807 function->frame_offset -= size;
808#endif
809 addr = plus_constant (virtual_stack_vars_rtx,
810 function->frame_offset + bigend_correction);
811#ifndef FRAME_GROWS_DOWNWARD
812 function->frame_offset += size;
813#endif
814
38a448ca 815 x = gen_rtx_MEM (mode, addr);
6f086dfc
RS
816
817 function->stack_slot_list
38a448ca 818 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
6f086dfc
RS
819
820 pop_obstacks ();
821
822 return x;
823}
824\f
825/* Allocate a temporary stack slot and record it for possible later
826 reuse.
827
828 MODE is the machine mode to be given to the returned rtx.
829
830 SIZE is the size in units of the space required. We do no rounding here
831 since assign_stack_local will do any required rounding.
832
d93d4205
MS
833 KEEP is 1 if this slot is to be retained after a call to
834 free_temp_slots. Automatic variables for a block are allocated
e5e809f4
JL
835 with this flag. KEEP is 2 if we allocate a longer term temporary,
836 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
837 if we are to allocate something at an inner level to be treated as
838 a variable in the block (e.g., a SAVE_EXPR). */
6f086dfc
RS
839
840rtx
841assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
e5e809f4 843 HOST_WIDE_INT size;
6f086dfc
RS
844 int keep;
845{
846 struct temp_slot *p, *best_p = 0;
847
303ec2aa
RK
848 /* If SIZE is -1 it means that somebody tried to allocate a temporary
849 of a variable size. */
850 if (size == -1)
851 abort ();
852
6f086dfc
RS
853 /* First try to find an available, already-allocated temporary that is the
854 exact size we require. */
855 for (p = temp_slots; p; p = p->next)
856 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
857 break;
858
859 /* If we didn't find, one, try one that is larger than what we want. We
860 find the smallest such. */
861 if (p == 0)
862 for (p = temp_slots; p; p = p->next)
863 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
864 && (best_p == 0 || best_p->size > p->size))
865 best_p = p;
866
867 /* Make our best, if any, the one to use. */
868 if (best_p)
a45035b6
JW
869 {
870 /* If there are enough aligned bytes left over, make them into a new
871 temp_slot so that the extra bytes don't get wasted. Do this only
872 for BLKmode slots, so that we can be sure of the alignment. */
873 if (GET_MODE (best_p->slot) == BLKmode)
874 {
875 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
e5e809f4 876 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
877
878 if (best_p->size - rounded_size >= alignment)
879 {
880 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
a25d4ba2 881 p->in_use = p->addr_taken = 0;
a45035b6 882 p->size = best_p->size - rounded_size;
307d8cd6
RK
883 p->base_offset = best_p->base_offset + rounded_size;
884 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
885 p->slot = gen_rtx_MEM (BLKmode,
886 plus_constant (XEXP (best_p->slot, 0),
887 rounded_size));
e5e76139 888 p->address = 0;
84e24c03 889 p->rtl_expr = 0;
a45035b6
JW
890 p->next = temp_slots;
891 temp_slots = p;
892
38a448ca
RH
893 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
894 stack_slot_list);
a45035b6
JW
895
896 best_p->size = rounded_size;
291dde90 897 best_p->full_size = rounded_size;
a45035b6
JW
898 }
899 }
900
901 p = best_p;
902 }
903
6f086dfc
RS
904 /* If we still didn't find one, make a new temporary. */
905 if (p == 0)
906 {
e5e809f4
JL
907 HOST_WIDE_INT frame_offset_old = frame_offset;
908
6f086dfc 909 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
e5e809f4 910
6f086dfc
RS
911 /* If the temp slot mode doesn't indicate the alignment,
912 use the largest possible, so no one will be disappointed. */
e5e76139 913 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
e5e809f4 914
b2a80c0d
DE
915 /* The following slot size computation is necessary because we don't
916 know the actual size of the temporary slot until assign_stack_local
917 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
918 requested temporary. Note that extra space added for alignment
919 can be either above or below this stack slot depending on which
920 way the frame grows. We include the extra space if and only if it
921 is above this slot. */
b2a80c0d
DE
922#ifdef FRAME_GROWS_DOWNWARD
923 p->size = frame_offset_old - frame_offset;
924#else
fc91b0d0
RK
925 p->size = size;
926#endif
e5e809f4 927
fc91b0d0
RK
928 /* Now define the fields used by combine_temp_slots. */
929#ifdef FRAME_GROWS_DOWNWARD
930 p->base_offset = frame_offset;
931 p->full_size = frame_offset_old - frame_offset;
932#else
933 p->base_offset = frame_offset_old;
934 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 935#endif
e5e76139 936 p->address = 0;
6f086dfc
RS
937 p->next = temp_slots;
938 temp_slots = p;
939 }
940
941 p->in_use = 1;
a25d4ba2 942 p->addr_taken = 0;
e7a84011 943 p->rtl_expr = sequence_rtl_expr;
a25d4ba2 944
d93d4205
MS
945 if (keep == 2)
946 {
947 p->level = target_temp_slot_level;
948 p->keep = 0;
949 }
e5e809f4
JL
950 else if (keep == 3)
951 {
952 p->level = var_temp_slot_level;
953 p->keep = 0;
954 }
d93d4205
MS
955 else
956 {
957 p->level = temp_slot_level;
958 p->keep = keep;
959 }
1995f267
RK
960
961 /* We may be reusing an old slot, so clear any MEM flags that may have been
962 set from before. */
963 RTX_UNCHANGING_P (p->slot) = 0;
964 MEM_IN_STRUCT_P (p->slot) = 0;
6f086dfc
RS
965 return p->slot;
966}
638141a6 967\f
230f21b4
PB
968/* Assign a temporary of given TYPE.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
230f21b4
PB
974
975rtx
b55d9ff8 976assign_temp (type, keep, memory_required, dont_promote)
230f21b4
PB
977 tree type;
978 int keep;
979 int memory_required;
b55d9ff8 980 int dont_promote;
230f21b4
PB
981{
982 enum machine_mode mode = TYPE_MODE (type);
638141a6
RK
983 int unsignedp = TREE_UNSIGNED (type);
984
230f21b4
PB
985 if (mode == BLKmode || memory_required)
986 {
e5e809f4 987 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
988 rtx tmp;
989
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we have a fixed upper limit on
992 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 993 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
994 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
995 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
996 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
997 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
998
999 tmp = assign_stack_temp (mode, size, keep);
1000 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1001 return tmp;
1002 }
638141a6 1003
230f21b4 1004#ifndef PROMOTE_FOR_CALL_ONLY
b55d9ff8
RK
1005 if (! dont_promote)
1006 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 1007#endif
638141a6 1008
230f21b4
PB
1009 return gen_reg_rtx (mode);
1010}
638141a6 1011\f
a45035b6
JW
1012/* Combine temporary stack slots which are adjacent on the stack.
1013
1014 This allows for better use of already allocated stack space. This is only
1015 done for BLKmode slots because we can be sure that we won't have alignment
1016 problems in this case. */
1017
1018void
1019combine_temp_slots ()
1020{
1021 struct temp_slot *p, *q;
1022 struct temp_slot *prev_p, *prev_q;
e5e809f4
JL
1023 int num_slots;
1024
1025 /* If there are a lot of temp slots, don't do anything unless
1026 high levels of optimizaton. */
1027 if (! flag_expensive_optimizations)
1028 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1029 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1030 return;
a45035b6 1031
e9b7093a
RS
1032 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1033 {
1034 int delete_p = 0;
e5e809f4 1035
e9b7093a
RS
1036 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1037 for (q = p->next, prev_q = p; q; q = prev_q->next)
a45035b6 1038 {
e9b7093a
RS
1039 int delete_q = 0;
1040 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
a45035b6 1041 {
fc91b0d0 1042 if (p->base_offset + p->full_size == q->base_offset)
e9b7093a
RS
1043 {
1044 /* Q comes after P; combine Q into P. */
1045 p->size += q->size;
307d8cd6 1046 p->full_size += q->full_size;
e9b7093a
RS
1047 delete_q = 1;
1048 }
fc91b0d0 1049 else if (q->base_offset + q->full_size == p->base_offset)
e9b7093a
RS
1050 {
1051 /* P comes after Q; combine P into Q. */
1052 q->size += p->size;
307d8cd6 1053 q->full_size += p->full_size;
e9b7093a
RS
1054 delete_p = 1;
1055 break;
1056 }
a45035b6 1057 }
e9b7093a
RS
1058 /* Either delete Q or advance past it. */
1059 if (delete_q)
1060 prev_q->next = q->next;
1061 else
1062 prev_q = q;
a45035b6 1063 }
e9b7093a
RS
1064 /* Either delete P or advance past it. */
1065 if (delete_p)
1066 {
1067 if (prev_p)
1068 prev_p->next = p->next;
1069 else
1070 temp_slots = p->next;
1071 }
1072 else
1073 prev_p = p;
1074 }
a45035b6 1075}
6f086dfc 1076\f
e5e76139
RK
1077/* Find the temp slot corresponding to the object at address X. */
1078
1079static struct temp_slot *
1080find_temp_slot_from_address (x)
1081 rtx x;
1082{
1083 struct temp_slot *p;
1084 rtx next;
1085
1086 for (p = temp_slots; p; p = p->next)
1087 {
1088 if (! p->in_use)
1089 continue;
e5e809f4 1090
e5e76139 1091 else if (XEXP (p->slot, 0) == x
abb52246
RK
1092 || p->address == x
1093 || (GET_CODE (x) == PLUS
1094 && XEXP (x, 0) == virtual_stack_vars_rtx
1095 && GET_CODE (XEXP (x, 1)) == CONST_INT
1096 && INTVAL (XEXP (x, 1)) >= p->base_offset
1097 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
e5e76139
RK
1098 return p;
1099
1100 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1101 for (next = p->address; next; next = XEXP (next, 1))
1102 if (XEXP (next, 0) == x)
1103 return p;
1104 }
1105
1106 return 0;
1107}
1108
9faa82d8 1109/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 1110 that previously was known by OLD. */
e5e76139
RK
1111
1112void
1113update_temp_slot_address (old, new)
1114 rtx old, new;
1115{
1116 struct temp_slot *p = find_temp_slot_from_address (old);
1117
1118 /* If none, return. Else add NEW as an alias. */
1119 if (p == 0)
1120 return;
1121 else if (p->address == 0)
1122 p->address = new;
1123 else
1124 {
1125 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1126 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1127
38a448ca 1128 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1129 }
1130}
1131
a25d4ba2 1132/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1133 address was taken. */
a25d4ba2
RK
1134
1135void
1136mark_temp_addr_taken (x)
1137 rtx x;
1138{
1139 struct temp_slot *p;
1140
1141 if (x == 0)
1142 return;
1143
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1147 return;
1148
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 p->addr_taken = 1;
1152}
1153
9cca6a99
MS
1154/* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
6f086dfc
RS
1159
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1162
1163void
1164preserve_temp_slots (x)
1165 rtx x;
1166{
a25d4ba2 1167 struct temp_slot *p = 0;
6f086dfc 1168
73620b82
RK
1169 /* If there is no result, we still might have some objects whose address
1170 were taken, so we need to make sure they stay around. */
e3a77161 1171 if (x == 0)
73620b82
RK
1172 {
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1175 p->level--;
1176
1177 return;
1178 }
e3a77161
RK
1179
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
a25d4ba2
RK
1184 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1185 p = find_temp_slot_from_address (x);
1186
6f086dfc 1187 /* If X is not in memory or is at a constant address, it cannot be in
e19571db
RK
1188 a temporary slot, but it can contain something whose address was
1189 taken. */
a25d4ba2 1190 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
e19571db
RK
1191 {
1192 for (p = temp_slots; p; p = p->next)
1193 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1194 p->level--;
1195
1196 return;
1197 }
6f086dfc
RS
1198
1199 /* First see if we can find a match. */
73620b82 1200 if (p == 0)
a25d4ba2
RK
1201 p = find_temp_slot_from_address (XEXP (x, 0));
1202
e5e76139
RK
1203 if (p != 0)
1204 {
a25d4ba2
RK
1205 /* Move everything at our level whose address was taken to our new
1206 level in case we used its address. */
1207 struct temp_slot *q;
1208
9cca6a99
MS
1209 if (p->level == temp_slot_level)
1210 {
1211 for (q = temp_slots; q; q = q->next)
1212 if (q != p && q->addr_taken && q->level == p->level)
1213 q->level--;
a25d4ba2 1214
9cca6a99
MS
1215 p->level--;
1216 p->addr_taken = 0;
1217 }
e5e76139
RK
1218 return;
1219 }
6f086dfc
RS
1220
1221 /* Otherwise, preserve all non-kept slots at this level. */
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1224 p->level--;
1225}
1226
422c8f63
RK
1227/* X is the result of an RTL_EXPR. If it is a temporary slot associated
1228 with that RTL_EXPR, promote it into a temporary slot at the present
1229 level so it will not be freed when we free slots made in the
1230 RTL_EXPR. */
1231
1232void
1233preserve_rtl_expr_result (x)
1234 rtx x;
1235{
1236 struct temp_slot *p;
1237
1238 /* If X is not in memory or is at a constant address, it cannot be in
1239 a temporary slot. */
1240 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1241 return;
1242
199b61d8
RK
1243 /* If we can find a match, move it to our level unless it is already at
1244 an upper level. */
1245 p = find_temp_slot_from_address (XEXP (x, 0));
1246 if (p != 0)
1247 {
1248 p->level = MIN (p->level, temp_slot_level);
1249 p->rtl_expr = 0;
1250 }
422c8f63
RK
1251
1252 return;
1253}
1254
6f086dfc 1255/* Free all temporaries used so far. This is normally called at the end
e7a84011
RK
1256 of generating code for a statement. Don't free any temporaries
1257 currently in use for an RTL_EXPR that hasn't yet been emitted.
1258 We could eventually do better than this since it can be reused while
1259 generating the same RTL_EXPR, but this is complex and probably not
1260 worthwhile. */
6f086dfc
RS
1261
1262void
1263free_temp_slots ()
1264{
1265 struct temp_slot *p;
1266
1267 for (p = temp_slots; p; p = p->next)
e7a84011
RK
1268 if (p->in_use && p->level == temp_slot_level && ! p->keep
1269 && p->rtl_expr == 0)
1270 p->in_use = 0;
1271
1272 combine_temp_slots ();
1273}
1274
1275/* Free all temporary slots used in T, an RTL_EXPR node. */
1276
1277void
1278free_temps_for_rtl_expr (t)
1279 tree t;
1280{
1281 struct temp_slot *p;
1282
1283 for (p = temp_slots; p; p = p->next)
1284 if (p->rtl_expr == t)
6f086dfc 1285 p->in_use = 0;
a45035b6
JW
1286
1287 combine_temp_slots ();
6f086dfc
RS
1288}
1289
956d6950 1290/* Mark all temporaries ever allocated in this function as not suitable
a94e4054
RK
1291 for reuse until the current level is exited. */
1292
1293void
1294mark_all_temps_used ()
1295{
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 {
85b119d1 1300 p->in_use = p->keep = 1;
27ce006b 1301 p->level = MIN (p->level, temp_slot_level);
a94e4054
RK
1302 }
1303}
1304
6f086dfc
RS
1305/* Push deeper into the nesting level for stack temporaries. */
1306
1307void
1308push_temp_slots ()
1309{
6f086dfc
RS
1310 temp_slot_level++;
1311}
1312
e5e809f4
JL
1313/* Likewise, but save the new level as the place to allocate variables
1314 for blocks. */
1315
1316void
1317push_temp_slots_for_block ()
1318{
1319 push_temp_slots ();
1320
1321 var_temp_slot_level = temp_slot_level;
1322}
1323
f5963e61
JL
1324/* Likewise, but save the new level as the place to allocate temporaries
1325 for TARGET_EXPRs. */
1326
1327void
1328push_temp_slots_for_target ()
1329{
1330 push_temp_slots ();
1331
1332 target_temp_slot_level = temp_slot_level;
1333}
1334
1335/* Set and get the value of target_temp_slot_level. The only
1336 permitted use of these functions is to save and restore this value. */
1337
1338int
1339get_target_temp_slot_level ()
1340{
1341 return target_temp_slot_level;
1342}
1343
1344void
1345set_target_temp_slot_level (level)
1346 int level;
1347{
1348 target_temp_slot_level = level;
1349}
1350
6f086dfc
RS
1351/* Pop a temporary nesting level. All slots in use in the current level
1352 are freed. */
1353
1354void
1355pop_temp_slots ()
1356{
1357 struct temp_slot *p;
1358
6f086dfc 1359 for (p = temp_slots; p; p = p->next)
e7a84011 1360 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
6f086dfc
RS
1361 p->in_use = 0;
1362
a45035b6
JW
1363 combine_temp_slots ();
1364
6f086dfc
RS
1365 temp_slot_level--;
1366}
bc0ebdf9
RK
1367
1368/* Initialize temporary slots. */
1369
1370void
1371init_temp_slots ()
1372{
1373 /* We have not allocated any temporaries yet. */
1374 temp_slots = 0;
1375 temp_slot_level = 0;
e5e809f4 1376 var_temp_slot_level = 0;
bc0ebdf9
RK
1377 target_temp_slot_level = 0;
1378}
6f086dfc
RS
1379\f
1380/* Retroactively move an auto variable from a register to a stack slot.
1381 This is done when an address-reference to the variable is seen. */
1382
1383void
1384put_var_into_stack (decl)
1385 tree decl;
1386{
1387 register rtx reg;
00d8a4c1 1388 enum machine_mode promoted_mode, decl_mode;
6f086dfc 1389 struct function *function = 0;
c20bf1f3 1390 tree context;
e9a25f70 1391 int can_use_addressof;
c20bf1f3 1392
c20bf1f3 1393 context = decl_function_context (decl);
6f086dfc 1394
9ec36da5 1395 /* Get the current rtl used for this object and its original mode. */
6f086dfc 1396 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
1397
1398 /* No need to do anything if decl has no rtx yet
1399 since in that case caller is setting TREE_ADDRESSABLE
1400 and a stack slot will be assigned when the rtl is made. */
1401 if (reg == 0)
1402 return;
00d8a4c1
RK
1403
1404 /* Get the declared mode for this object. */
1405 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1406 : DECL_MODE (decl));
2baccce2
RS
1407 /* Get the mode it's actually stored in. */
1408 promoted_mode = GET_MODE (reg);
6f086dfc
RS
1409
1410 /* If this variable comes from an outer function,
1411 find that function's saved context. */
4ac74fb8 1412 if (context != current_function_decl && context != inline_function_decl)
6f086dfc
RS
1413 for (function = outer_function_chain; function; function = function->next)
1414 if (function->decl == context)
1415 break;
1416
6f086dfc
RS
1417 /* If this is a variable-size object with a pseudo to address it,
1418 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 1419 if (DECL_NONLOCAL (decl)
6f086dfc
RS
1420 && GET_CODE (reg) == MEM
1421 && GET_CODE (XEXP (reg, 0)) == REG
1422 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
1423 {
1424 reg = XEXP (reg, 0);
1425 decl_mode = promoted_mode = GET_MODE (reg);
1426 }
e15762df 1427
e9a25f70
JL
1428 can_use_addressof
1429 = (function == 0
e5e809f4 1430 && optimize > 0
e9a25f70
JL
1431 /* FIXME make it work for promoted modes too */
1432 && decl_mode == promoted_mode
1433#ifdef NON_SAVING_SETJMP
1434 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1435#endif
1436 );
1437
1438 /* If we can't use ADDRESSOF, make sure we see through one we already
1439 generated. */
1440 if (! can_use_addressof && GET_CODE (reg) == MEM
1441 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1442 reg = XEXP (XEXP (reg, 0), 0);
1443
293e3de4
RS
1444 /* Now we should have a value that resides in one or more pseudo regs. */
1445
1446 if (GET_CODE (reg) == REG)
e9a25f70
JL
1447 {
1448 /* If this variable lives in the current function and we don't need
1449 to put things in the stack for the sake of setjmp, try to keep it
1450 in a register until we know we actually need the address. */
1451 if (can_use_addressof)
1452 gen_mem_addressof (reg, decl);
1453 else
1454 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1455 promoted_mode, decl_mode,
e5e809f4
JL
1456 TREE_SIDE_EFFECTS (decl), 0,
1457 TREE_USED (decl)
1458 || DECL_INITIAL (decl) != 0);
e9a25f70 1459 }
293e3de4
RS
1460 else if (GET_CODE (reg) == CONCAT)
1461 {
1462 /* A CONCAT contains two pseudos; put them both in the stack.
1463 We do it so they end up consecutive. */
1464 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1465 tree part_type = TREE_TYPE (TREE_TYPE (decl));
4738c10d 1466#ifdef FRAME_GROWS_DOWNWARD
293e3de4 1467 /* Since part 0 should have a lower address, do it second. */
0006e95b 1468 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1469 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1470 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1471 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1472 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1473 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4 1474#else
0006e95b 1475 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1476 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1477 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1478 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1479 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1480 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4
RS
1481#endif
1482
1483 /* Change the CONCAT into a combined MEM for both parts. */
1484 PUT_CODE (reg, MEM);
0006e95b 1485 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
41472af8 1486 MEM_ALIAS_SET (reg) = get_alias_set (decl);
0006e95b 1487
293e3de4
RS
1488 /* The two parts are in memory order already.
1489 Use the lower parts address as ours. */
1490 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1491 /* Prevent sharing of rtl that might lose. */
1492 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1493 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1494 }
86fa911a
RK
1495 else
1496 return;
1497
7d384cc0 1498 if (current_function_check_memory_usage)
86fa911a
RK
1499 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1500 XEXP (reg, 0), ptr_mode,
1501 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1502 TYPE_MODE (sizetype),
956d6950
JL
1503 GEN_INT (MEMORY_USE_RW),
1504 TYPE_MODE (integer_type_node));
293e3de4
RS
1505}
1506
1507/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1508 into the stack frame of FUNCTION (0 means the current function).
1509 DECL_MODE is the machine mode of the user-level data type.
0006e95b 1510 PROMOTED_MODE is the machine mode of the register.
e5e809f4
JL
1511 VOLATILE_P is nonzero if this is for a "volatile" decl.
1512 USED_P is nonzero if this reg might have already been used in an insn. */
293e3de4
RS
1513
1514static void
e9a25f70 1515put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
e5e809f4 1516 original_regno, used_p)
293e3de4
RS
1517 struct function *function;
1518 rtx reg;
1519 tree type;
1520 enum machine_mode promoted_mode, decl_mode;
0006e95b 1521 int volatile_p;
e9a25f70 1522 int original_regno;
e5e809f4 1523 int used_p;
293e3de4
RS
1524{
1525 rtx new = 0;
e9a25f70
JL
1526 int regno = original_regno;
1527
1528 if (regno == 0)
1529 regno = REGNO (reg);
6f086dfc
RS
1530
1531 if (function)
1532 {
e9a25f70
JL
1533 if (regno < function->max_parm_reg)
1534 new = function->parm_reg_stack_loc[regno];
6f086dfc 1535 if (new == 0)
e15762df 1536 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
6f086dfc
RS
1537 0, function);
1538 }
1539 else
1540 {
e9a25f70
JL
1541 if (regno < max_parm_reg)
1542 new = parm_reg_stack_loc[regno];
6f086dfc 1543 if (new == 0)
e15762df 1544 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
1545 }
1546
0006e95b 1547 PUT_MODE (reg, decl_mode);
6f086dfc
RS
1548 XEXP (reg, 0) = XEXP (new, 0);
1549 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
0006e95b 1550 MEM_VOLATILE_P (reg) = volatile_p;
6f086dfc
RS
1551 PUT_CODE (reg, MEM);
1552
1553 /* If this is a memory ref that contains aggregate components,
bdd3e6ab
JW
1554 mark it as such for cse and loop optimize. If we are reusing a
1555 previously generated stack slot, then we need to copy the bit in
1556 case it was set for other reasons. For instance, it is set for
1557 __builtin_va_alist. */
1558 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
41472af8 1559 MEM_ALIAS_SET (reg) = get_alias_set (type);
6f086dfc
RS
1560
1561 /* Now make sure that all refs to the variable, previously made
1562 when it was a register, are fixed up to be valid again. */
e5e809f4
JL
1563
1564 if (used_p && function != 0)
6f086dfc
RS
1565 {
1566 struct var_refs_queue *temp;
1567
1568 /* Variable is inherited; fix it up when we get back to its function. */
1569 push_obstacks (function->function_obstack,
1570 function->function_maybepermanent_obstack);
4da73fa0
RK
1571
1572 /* See comment in restore_tree_status in tree.c for why this needs to be
1573 on saveable obstack. */
6f086dfc 1574 temp
4da73fa0 1575 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
6f086dfc 1576 temp->modified = reg;
00d8a4c1 1577 temp->promoted_mode = promoted_mode;
293e3de4 1578 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
1579 temp->next = function->fixup_var_refs_queue;
1580 function->fixup_var_refs_queue = temp;
1581 pop_obstacks ();
1582 }
e5e809f4 1583 else if (used_p)
6f086dfc 1584 /* Variable is local; fix it up now. */
293e3de4 1585 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
6f086dfc
RS
1586}
1587\f
1588static void
00d8a4c1 1589fixup_var_refs (var, promoted_mode, unsignedp)
6f086dfc 1590 rtx var;
00d8a4c1
RK
1591 enum machine_mode promoted_mode;
1592 int unsignedp;
6f086dfc
RS
1593{
1594 tree pending;
1595 rtx first_insn = get_insns ();
1596 struct sequence_stack *stack = sequence_stack;
1597 tree rtl_exps = rtl_expr_chain;
1598
1599 /* Must scan all insns for stack-refs that exceed the limit. */
00d8a4c1 1600 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
6f086dfc
RS
1601
1602 /* Scan all pending sequences too. */
1603 for (; stack; stack = stack->next)
1604 {
1605 push_to_sequence (stack->first);
00d8a4c1
RK
1606 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1607 stack->first, stack->next != 0);
6f086dfc
RS
1608 /* Update remembered end of sequence
1609 in case we added an insn at the end. */
1610 stack->last = get_last_insn ();
1611 end_sequence ();
1612 }
1613
1614 /* Scan all waiting RTL_EXPRs too. */
1615 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1616 {
1617 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1618 if (seq != const0_rtx && seq != 0)
1619 {
1620 push_to_sequence (seq);
00d8a4c1 1621 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
6f086dfc
RS
1622 end_sequence ();
1623 }
1624 }
1625}
1626\f
e15679f8 1627/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
6f086dfc 1628 some part of an insn. Return a struct fixup_replacement whose OLD
0f41302f 1629 value is equal to X. Allocate a new structure if no such entry exists. */
6f086dfc
RS
1630
1631static struct fixup_replacement *
2740a678 1632find_fixup_replacement (replacements, x)
6f086dfc
RS
1633 struct fixup_replacement **replacements;
1634 rtx x;
1635{
1636 struct fixup_replacement *p;
1637
1638 /* See if we have already replaced this. */
1639 for (p = *replacements; p && p->old != x; p = p->next)
1640 ;
1641
1642 if (p == 0)
1643 {
1644 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1645 p->old = x;
1646 p->new = 0;
1647 p->next = *replacements;
1648 *replacements = p;
1649 }
1650
1651 return p;
1652}
1653
1654/* Scan the insn-chain starting with INSN for refs to VAR
1655 and fix them up. TOPLEVEL is nonzero if this chain is the
1656 main chain of insns for the current function. */
1657
1658static void
00d8a4c1 1659fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
6f086dfc 1660 rtx var;
00d8a4c1
RK
1661 enum machine_mode promoted_mode;
1662 int unsignedp;
6f086dfc
RS
1663 rtx insn;
1664 int toplevel;
1665{
02a10449
RK
1666 rtx call_dest = 0;
1667
6f086dfc
RS
1668 while (insn)
1669 {
1670 rtx next = NEXT_INSN (insn);
e5e809f4 1671 rtx set, prev, prev_set;
6f086dfc 1672 rtx note;
e5e809f4 1673
e15762df 1674 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc 1675 {
63770d6a
RK
1676 /* If this is a CLOBBER of VAR, delete it.
1677
1678 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1679 and REG_RETVAL notes too. */
926d1ca5 1680 if (GET_CODE (PATTERN (insn)) == CLOBBER
07362cb3
JW
1681 && (XEXP (PATTERN (insn), 0) == var
1682 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1683 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1684 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
63770d6a
RK
1685 {
1686 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1687 /* The REG_LIBCALL note will go away since we are going to
1688 turn INSN into a NOTE, so just delete the
1689 corresponding REG_RETVAL note. */
1690 remove_note (XEXP (note, 0),
1691 find_reg_note (XEXP (note, 0), REG_RETVAL,
1692 NULL_RTX));
1693
1694 /* In unoptimized compilation, we shouldn't call delete_insn
1695 except in jump.c doing warnings. */
1696 PUT_CODE (insn, NOTE);
1697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1698 NOTE_SOURCE_FILE (insn) = 0;
1699 }
1700
6f086dfc 1701 /* The insn to load VAR from a home in the arglist
e5e809f4
JL
1702 is now a no-op. When we see it, just delete it.
1703 Similarly if this is storing VAR from a register from which
1704 it was loaded in the previous insn. This will occur
1705 when an ADDRESSOF was made for an arglist slot. */
63770d6a 1706 else if (toplevel
e5e809f4
JL
1707 && (set = single_set (insn)) != 0
1708 && SET_DEST (set) == var
63770d6a
RK
1709 /* If this represents the result of an insn group,
1710 don't delete the insn. */
1711 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
e5e809f4
JL
1712 && (rtx_equal_p (SET_SRC (set), var)
1713 || (GET_CODE (SET_SRC (set)) == REG
1714 && (prev = prev_nonnote_insn (insn)) != 0
1715 && (prev_set = single_set (prev)) != 0
1716 && SET_DEST (prev_set) == SET_SRC (set)
1717 && rtx_equal_p (SET_SRC (prev_set), var))))
6f086dfc 1718 {
b4ff474c
RS
1719 /* In unoptimized compilation, we shouldn't call delete_insn
1720 except in jump.c doing warnings. */
1721 PUT_CODE (insn, NOTE);
1722 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1723 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1724 if (insn == last_parm_insn)
1725 last_parm_insn = PREV_INSN (next);
1726 }
1727 else
1728 {
02a10449
RK
1729 struct fixup_replacement *replacements = 0;
1730 rtx next_insn = NEXT_INSN (insn);
1731
e9a25f70
JL
1732 if (SMALL_REGISTER_CLASSES)
1733 {
1734 /* If the insn that copies the results of a CALL_INSN
1735 into a pseudo now references VAR, we have to use an
1736 intermediate pseudo since we want the life of the
1737 return value register to be only a single insn.
02a10449 1738
e9a25f70
JL
1739 If we don't use an intermediate pseudo, such things as
1740 address computations to make the address of VAR valid
1741 if it is not can be placed between the CALL_INSN and INSN.
02a10449 1742
e9a25f70
JL
1743 To make sure this doesn't happen, we record the destination
1744 of the CALL_INSN and see if the next insn uses both that
1745 and VAR. */
02a10449 1746
f95182a4
ILT
1747 if (call_dest != 0 && GET_CODE (insn) == INSN
1748 && reg_mentioned_p (var, PATTERN (insn))
1749 && reg_mentioned_p (call_dest, PATTERN (insn)))
1750 {
1751 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
02a10449 1752
f95182a4 1753 emit_insn_before (gen_move_insn (temp, call_dest), insn);
02a10449 1754
f95182a4
ILT
1755 PATTERN (insn) = replace_rtx (PATTERN (insn),
1756 call_dest, temp);
1757 }
02a10449 1758
f95182a4
ILT
1759 if (GET_CODE (insn) == CALL_INSN
1760 && GET_CODE (PATTERN (insn)) == SET)
1761 call_dest = SET_DEST (PATTERN (insn));
1762 else if (GET_CODE (insn) == CALL_INSN
1763 && GET_CODE (PATTERN (insn)) == PARALLEL
1764 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1765 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1766 else
1767 call_dest = 0;
1768 }
02a10449 1769
6f086dfc
RS
1770 /* See if we have to do anything to INSN now that VAR is in
1771 memory. If it needs to be loaded into a pseudo, use a single
1772 pseudo for the entire insn in case there is a MATCH_DUP
1773 between two operands. We pass a pointer to the head of
1774 a list of struct fixup_replacements. If fixup_var_refs_1
1775 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1776 it will record them in this list.
1777
1778 If it allocated a pseudo for any replacement, we copy into
1779 it here. */
1780
00d8a4c1
RK
1781 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1782 &replacements);
6f086dfc 1783
77121fee
JW
1784 /* If this is last_parm_insn, and any instructions were output
1785 after it to fix it up, then we must set last_parm_insn to
1786 the last such instruction emitted. */
1787 if (insn == last_parm_insn)
1788 last_parm_insn = PREV_INSN (next_insn);
1789
6f086dfc
RS
1790 while (replacements)
1791 {
1792 if (GET_CODE (replacements->new) == REG)
1793 {
1794 rtx insert_before;
00d8a4c1 1795 rtx seq;
6f086dfc
RS
1796
1797 /* OLD might be a (subreg (mem)). */
1798 if (GET_CODE (replacements->old) == SUBREG)
1799 replacements->old
1800 = fixup_memory_subreg (replacements->old, insn, 0);
1801 else
1802 replacements->old
1803 = fixup_stack_1 (replacements->old, insn);
1804
5fa7422b 1805 insert_before = insn;
6f086dfc 1806
00d8a4c1
RK
1807 /* If we are changing the mode, do a conversion.
1808 This might be wasteful, but combine.c will
1809 eliminate much of the waste. */
1810
1811 if (GET_MODE (replacements->new)
1812 != GET_MODE (replacements->old))
1813 {
1814 start_sequence ();
1815 convert_move (replacements->new,
1816 replacements->old, unsignedp);
1817 seq = gen_sequence ();
1818 end_sequence ();
1819 }
1820 else
1821 seq = gen_move_insn (replacements->new,
1822 replacements->old);
1823
1824 emit_insn_before (seq, insert_before);
6f086dfc
RS
1825 }
1826
1827 replacements = replacements->next;
1828 }
1829 }
1830
1831 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1832 But don't touch other insns referred to by reg-notes;
1833 we will get them elsewhere. */
1834 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1835 if (GET_CODE (note) != INSN_LIST)
ab6155b7
RK
1836 XEXP (note, 0)
1837 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
6f086dfc
RS
1838 }
1839 insn = next;
1840 }
1841}
1842\f
00d8a4c1
RK
1843/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1844 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1845
1846 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1847 contain a list of original rtx's and replacements. If we find that we need
1848 to modify this insn by replacing a memory reference with a pseudo or by
1849 making a new MEM to implement a SUBREG, we consult that list to see if
1850 we have already chosen a replacement. If none has already been allocated,
1851 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1852 or the SUBREG, as appropriate, to the pseudo. */
1853
1854static void
00d8a4c1 1855fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1856 register rtx var;
00d8a4c1 1857 enum machine_mode promoted_mode;
6f086dfc
RS
1858 register rtx *loc;
1859 rtx insn;
1860 struct fixup_replacement **replacements;
1861{
1862 register int i;
1863 register rtx x = *loc;
1864 RTX_CODE code = GET_CODE (x);
1865 register char *fmt;
1866 register rtx tem, tem1;
1867 struct fixup_replacement *replacement;
1868
1869 switch (code)
1870 {
e9a25f70
JL
1871 case ADDRESSOF:
1872 if (XEXP (x, 0) == var)
1873 {
956d6950
JL
1874 /* Prevent sharing of rtl that might lose. */
1875 rtx sub = copy_rtx (XEXP (var, 0));
1876
e9a25f70 1877 start_sequence ();
956d6950
JL
1878
1879 if (! validate_change (insn, loc, sub, 0))
1880 {
1881 rtx y = force_operand (sub, NULL_RTX);
1882
1883 if (! validate_change (insn, loc, y, 0))
1884 *loc = copy_to_reg (y);
1885 }
1886
e9a25f70
JL
1887 emit_insn_before (gen_sequence (), insn);
1888 end_sequence ();
1889 }
1890 return;
1891
6f086dfc
RS
1892 case MEM:
1893 if (var == x)
1894 {
1895 /* If we already have a replacement, use it. Otherwise,
1896 try to fix up this address in case it is invalid. */
1897
2740a678 1898 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1899 if (replacement->new)
1900 {
1901 *loc = replacement->new;
1902 return;
1903 }
1904
1905 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1906
00d8a4c1
RK
1907 /* Unless we are forcing memory to register or we changed the mode,
1908 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1909
1910 INSN_CODE (insn) = -1;
00d8a4c1
RK
1911 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1912 && recog_memoized (insn) >= 0)
6f086dfc
RS
1913 return;
1914
00d8a4c1 1915 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1916 return;
1917 }
1918
1919 /* If X contains VAR, we need to unshare it here so that we update
1920 each occurrence separately. But all identical MEMs in one insn
1921 must be replaced with the same rtx because of the possibility of
1922 MATCH_DUPs. */
1923
1924 if (reg_mentioned_p (var, x))
1925 {
2740a678 1926 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1927 if (replacement->new == 0)
1928 replacement->new = copy_most_rtx (x, var);
1929
1930 *loc = x = replacement->new;
1931 }
1932 break;
1933
1934 case REG:
1935 case CC0:
1936 case PC:
1937 case CONST_INT:
1938 case CONST:
1939 case SYMBOL_REF:
1940 case LABEL_REF:
1941 case CONST_DOUBLE:
1942 return;
1943
1944 case SIGN_EXTRACT:
1945 case ZERO_EXTRACT:
1946 /* Note that in some cases those types of expressions are altered
1947 by optimize_bit_field, and do not survive to get here. */
1948 if (XEXP (x, 0) == var
1949 || (GET_CODE (XEXP (x, 0)) == SUBREG
1950 && SUBREG_REG (XEXP (x, 0)) == var))
1951 {
1952 /* Get TEM as a valid MEM in the mode presently in the insn.
1953
1954 We don't worry about the possibility of MATCH_DUP here; it
1955 is highly unlikely and would be tricky to handle. */
1956
1957 tem = XEXP (x, 0);
1958 if (GET_CODE (tem) == SUBREG)
0e09cc26
RK
1959 {
1960 if (GET_MODE_BITSIZE (GET_MODE (tem))
1961 > GET_MODE_BITSIZE (GET_MODE (var)))
1962 {
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new == 0)
1965 replacement->new = gen_reg_rtx (GET_MODE (var));
1966 SUBREG_REG (tem) = replacement->new;
1967 }
ef933d26
RK
1968 else
1969 tem = fixup_memory_subreg (tem, insn, 0);
0e09cc26
RK
1970 }
1971 else
1972 tem = fixup_stack_1 (tem, insn);
6f086dfc
RS
1973
1974 /* Unless we want to load from memory, get TEM into the proper mode
1975 for an extract from memory. This can only be done if the
1976 extract is at a constant position and length. */
1977
1978 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1979 && GET_CODE (XEXP (x, 2)) == CONST_INT
1980 && ! mode_dependent_address_p (XEXP (tem, 0))
1981 && ! MEM_VOLATILE_P (tem))
1982 {
1983 enum machine_mode wanted_mode = VOIDmode;
1984 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 1985 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6f086dfc
RS
1986
1987#ifdef HAVE_extzv
1988 if (GET_CODE (x) == ZERO_EXTRACT)
0d8e55d8
JL
1989 {
1990 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1991 if (wanted_mode == VOIDmode)
1992 wanted_mode = word_mode;
1993 }
6f086dfc
RS
1994#endif
1995#ifdef HAVE_extv
1996 if (GET_CODE (x) == SIGN_EXTRACT)
0d8e55d8
JL
1997 {
1998 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1999 if (wanted_mode == VOIDmode)
2000 wanted_mode = word_mode;
2001 }
6f086dfc 2002#endif
6dc42e49 2003 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2004 if (wanted_mode != VOIDmode
2005 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2006 {
e5e809f4 2007 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2008 rtx old_pos = XEXP (x, 2);
2009 rtx newmem;
2010
2011 /* If the bytes and bits are counted differently, we
2012 must adjust the offset. */
f76b9db2
ILT
2013 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2014 offset = (GET_MODE_SIZE (is_mode)
2015 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2016
2017 pos %= GET_MODE_BITSIZE (wanted_mode);
2018
38a448ca
RH
2019 newmem = gen_rtx_MEM (wanted_mode,
2020 plus_constant (XEXP (tem, 0), offset));
6f086dfc
RS
2021 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2022 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2023 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2024
2025 /* Make the change and see if the insn remains valid. */
2026 INSN_CODE (insn) = -1;
2027 XEXP (x, 0) = newmem;
5f4f0e22 2028 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
2029
2030 if (recog_memoized (insn) >= 0)
2031 return;
2032
2033 /* Otherwise, restore old position. XEXP (x, 0) will be
2034 restored later. */
2035 XEXP (x, 2) = old_pos;
2036 }
2037 }
2038
2039 /* If we get here, the bitfield extract insn can't accept a memory
2040 reference. Copy the input into a register. */
2041
2042 tem1 = gen_reg_rtx (GET_MODE (tem));
2043 emit_insn_before (gen_move_insn (tem1, tem), insn);
2044 XEXP (x, 0) = tem1;
2045 return;
2046 }
2047 break;
2048
2049 case SUBREG:
2050 if (SUBREG_REG (x) == var)
2051 {
00d8a4c1
RK
2052 /* If this is a special SUBREG made because VAR was promoted
2053 from a wider mode, replace it with VAR and call ourself
2054 recursively, this time saying that the object previously
2055 had its current mode (by virtue of the SUBREG). */
2056
2057 if (SUBREG_PROMOTED_VAR_P (x))
2058 {
2059 *loc = var;
2060 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2061 return;
2062 }
2063
6f086dfc
RS
2064 /* If this SUBREG makes VAR wider, it has become a paradoxical
2065 SUBREG with VAR in memory, but these aren't allowed at this
2066 stage of the compilation. So load VAR into a pseudo and take
2067 a SUBREG of that pseudo. */
2068 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2069 {
2740a678 2070 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
2071 if (replacement->new == 0)
2072 replacement->new = gen_reg_rtx (GET_MODE (var));
2073 SUBREG_REG (x) = replacement->new;
2074 return;
2075 }
2076
2077 /* See if we have already found a replacement for this SUBREG.
2078 If so, use it. Otherwise, make a MEM and see if the insn
2079 is recognized. If not, or if we should force MEM into a register,
2080 make a pseudo for this SUBREG. */
2740a678 2081 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
2082 if (replacement->new)
2083 {
2084 *loc = replacement->new;
2085 return;
2086 }
2087
2088 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2089
f898f031 2090 INSN_CODE (insn) = -1;
6f086dfc
RS
2091 if (! flag_force_mem && recog_memoized (insn) >= 0)
2092 return;
2093
2094 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2095 return;
2096 }
2097 break;
2098
2099 case SET:
2100 /* First do special simplification of bit-field references. */
2101 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2102 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2103 optimize_bit_field (x, insn, 0);
2104 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2105 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 2106 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc 2107
0e09cc26
RK
2108 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2109 into a register and then store it back out. */
2110 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2111 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2112 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2113 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2114 > GET_MODE_SIZE (GET_MODE (var))))
2115 {
2116 replacement = find_fixup_replacement (replacements, var);
2117 if (replacement->new == 0)
2118 replacement->new = gen_reg_rtx (GET_MODE (var));
2119
2120 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2121 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2122 }
2123
6f086dfc 2124 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
0f41302f 2125 insn into a pseudo and store the low part of the pseudo into VAR. */
6f086dfc
RS
2126 if (GET_CODE (SET_DEST (x)) == SUBREG
2127 && SUBREG_REG (SET_DEST (x)) == var
2128 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2129 > GET_MODE_SIZE (GET_MODE (var))))
2130 {
2131 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2132 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2133 tem)),
2134 insn);
2135 break;
2136 }
2137
2138 {
2139 rtx dest = SET_DEST (x);
2140 rtx src = SET_SRC (x);
29a82058 2141#ifdef HAVE_insv
6f086dfc 2142 rtx outerdest = dest;
29a82058 2143#endif
6f086dfc
RS
2144
2145 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2146 || GET_CODE (dest) == SIGN_EXTRACT
2147 || GET_CODE (dest) == ZERO_EXTRACT)
2148 dest = XEXP (dest, 0);
2149
2150 if (GET_CODE (src) == SUBREG)
2151 src = XEXP (src, 0);
2152
2153 /* If VAR does not appear at the top level of the SET
2154 just scan the lower levels of the tree. */
2155
2156 if (src != var && dest != var)
2157 break;
2158
2159 /* We will need to rerecognize this insn. */
2160 INSN_CODE (insn) = -1;
2161
2162#ifdef HAVE_insv
2163 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2164 {
2165 /* Since this case will return, ensure we fixup all the
2166 operands here. */
00d8a4c1
RK
2167 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2168 insn, replacements);
2169 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2170 insn, replacements);
2171 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2172 insn, replacements);
6f086dfc
RS
2173
2174 tem = XEXP (outerdest, 0);
2175
2176 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2177 that may appear inside a ZERO_EXTRACT.
2178 This was legitimate when the MEM was a REG. */
2179 if (GET_CODE (tem) == SUBREG
2180 && SUBREG_REG (tem) == var)
0e09cc26 2181 tem = fixup_memory_subreg (tem, insn, 0);
6f086dfc
RS
2182 else
2183 tem = fixup_stack_1 (tem, insn);
2184
2185 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2186 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2187 && ! mode_dependent_address_p (XEXP (tem, 0))
2188 && ! MEM_VOLATILE_P (tem))
2189 {
0d8e55d8 2190 enum machine_mode wanted_mode;
6f086dfc 2191 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2192 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
6f086dfc 2193
0d8e55d8
JL
2194 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2195 if (wanted_mode == VOIDmode)
2196 wanted_mode = word_mode;
2197
6dc42e49 2198 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2199 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2200 {
e5e809f4 2201 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2202 rtx old_pos = XEXP (outerdest, 2);
2203 rtx newmem;
2204
f76b9db2
ILT
2205 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2206 offset = (GET_MODE_SIZE (is_mode)
2207 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2208
2209 pos %= GET_MODE_BITSIZE (wanted_mode);
2210
38a448ca
RH
2211 newmem = gen_rtx_MEM (wanted_mode,
2212 plus_constant (XEXP (tem, 0), offset));
6f086dfc
RS
2213 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2214 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2215 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2216
2217 /* Make the change and see if the insn remains valid. */
2218 INSN_CODE (insn) = -1;
2219 XEXP (outerdest, 0) = newmem;
5f4f0e22 2220 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
2221
2222 if (recog_memoized (insn) >= 0)
2223 return;
2224
2225 /* Otherwise, restore old position. XEXP (x, 0) will be
2226 restored later. */
2227 XEXP (outerdest, 2) = old_pos;
2228 }
2229 }
2230
2231 /* If we get here, the bit-field store doesn't allow memory
2232 or isn't located at a constant position. Load the value into
2233 a register, do the store, and put it back into memory. */
2234
2235 tem1 = gen_reg_rtx (GET_MODE (tem));
2236 emit_insn_before (gen_move_insn (tem1, tem), insn);
2237 emit_insn_after (gen_move_insn (tem, tem1), insn);
2238 XEXP (outerdest, 0) = tem1;
2239 return;
2240 }
2241#endif
2242
2243 /* STRICT_LOW_PART is a no-op on memory references
2244 and it can cause combinations to be unrecognizable,
2245 so eliminate it. */
2246
2247 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2248 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2249
2250 /* A valid insn to copy VAR into or out of a register
2251 must be left alone, to avoid an infinite loop here.
2252 If the reference to VAR is by a subreg, fix that up,
2253 since SUBREG is not valid for a memref.
e15762df
RK
2254 Also fix up the address of the stack slot.
2255
2256 Note that we must not try to recognize the insn until
2257 after we know that we have valid addresses and no
2258 (subreg (mem ...) ...) constructs, since these interfere
2259 with determining the validity of the insn. */
6f086dfc
RS
2260
2261 if ((SET_SRC (x) == var
2262 || (GET_CODE (SET_SRC (x)) == SUBREG
2263 && SUBREG_REG (SET_SRC (x)) == var))
2264 && (GET_CODE (SET_DEST (x)) == REG
2265 || (GET_CODE (SET_DEST (x)) == SUBREG
2266 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1d273bf5 2267 && GET_MODE (var) == promoted_mode
c46722a7 2268 && x == single_set (insn))
6f086dfc 2269 {
e15762df
RK
2270 rtx pat;
2271
2740a678 2272 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 2273 if (replacement->new)
6f086dfc 2274 SET_SRC (x) = replacement->new;
6f086dfc
RS
2275 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2276 SET_SRC (x) = replacement->new
2277 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2278 else
2279 SET_SRC (x) = replacement->new
2280 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
2281
2282 if (recog_memoized (insn) >= 0)
2283 return;
2284
2285 /* INSN is not valid, but we know that we want to
2286 copy SET_SRC (x) to SET_DEST (x) in some way. So
2287 we generate the move and see whether it requires more
2288 than one insn. If it does, we emit those insns and
2289 delete INSN. Otherwise, we an just replace the pattern
2290 of INSN; we have already verified above that INSN has
2291 no other function that to do X. */
2292
2293 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2294 if (GET_CODE (pat) == SEQUENCE)
2295 {
2296 emit_insn_after (pat, insn);
2297 PUT_CODE (insn, NOTE);
2298 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2299 NOTE_SOURCE_FILE (insn) = 0;
2300 }
2301 else
2302 PATTERN (insn) = pat;
2303
6f086dfc
RS
2304 return;
2305 }
2306
2307 if ((SET_DEST (x) == var
2308 || (GET_CODE (SET_DEST (x)) == SUBREG
2309 && SUBREG_REG (SET_DEST (x)) == var))
2310 && (GET_CODE (SET_SRC (x)) == REG
2311 || (GET_CODE (SET_SRC (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1d273bf5 2313 && GET_MODE (var) == promoted_mode
c46722a7 2314 && x == single_set (insn))
6f086dfc 2315 {
e15762df
RK
2316 rtx pat;
2317
6f086dfc
RS
2318 if (GET_CODE (SET_DEST (x)) == SUBREG)
2319 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2320 else
2321 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
2322
2323 if (recog_memoized (insn) >= 0)
2324 return;
2325
2326 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2327 if (GET_CODE (pat) == SEQUENCE)
2328 {
2329 emit_insn_after (pat, insn);
2330 PUT_CODE (insn, NOTE);
2331 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2332 NOTE_SOURCE_FILE (insn) = 0;
2333 }
2334 else
2335 PATTERN (insn) = pat;
2336
6f086dfc
RS
2337 return;
2338 }
2339
2340 /* Otherwise, storing into VAR must be handled specially
2341 by storing into a temporary and copying that into VAR
00d8a4c1
RK
2342 with a new insn after this one. Note that this case
2343 will be used when storing into a promoted scalar since
2344 the insn will now have different modes on the input
2345 and output and hence will be invalid (except for the case
2346 of setting it to a constant, which does not need any
2347 change if it is valid). We generate extra code in that case,
2348 but combine.c will eliminate it. */
6f086dfc
RS
2349
2350 if (dest == var)
2351 {
2352 rtx temp;
00d8a4c1
RK
2353 rtx fixeddest = SET_DEST (x);
2354
6f086dfc 2355 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
2356 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2357 fixeddest = XEXP (fixeddest, 0);
6f086dfc 2358 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1 2359 if (GET_CODE (fixeddest) == SUBREG)
926d1ca5
RK
2360 {
2361 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2362 promoted_mode = GET_MODE (fixeddest);
2363 }
6f086dfc 2364 else
00d8a4c1
RK
2365 fixeddest = fixup_stack_1 (fixeddest, insn);
2366
926d1ca5 2367 temp = gen_reg_rtx (promoted_mode);
00d8a4c1
RK
2368
2369 emit_insn_after (gen_move_insn (fixeddest,
2370 gen_lowpart (GET_MODE (fixeddest),
2371 temp)),
2372 insn);
6f086dfc 2373
6f086dfc
RS
2374 SET_DEST (x) = temp;
2375 }
2376 }
e9a25f70
JL
2377
2378 default:
2379 break;
6f086dfc
RS
2380 }
2381
2382 /* Nothing special about this RTX; fix its operands. */
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
00d8a4c1 2388 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
2393 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2394 insn, replacements);
6f086dfc
RS
2395 }
2396 }
2397}
2398\f
2399/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2400 return an rtx (MEM:m1 newaddr) which is equivalent.
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2402
2403 UNCRITICAL nonzero means accept paradoxical subregs.
0f41302f 2404 This is used for subregs found inside REG_NOTES. */
6f086dfc
RS
2405
2406static rtx
2407fixup_memory_subreg (x, insn, uncritical)
2408 rtx x;
2409 rtx insn;
2410 int uncritical;
2411{
2412 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2413 rtx addr = XEXP (SUBREG_REG (x), 0);
2414 enum machine_mode mode = GET_MODE (x);
29a82058 2415 rtx result;
6f086dfc
RS
2416
2417 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2418 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2419 && ! uncritical)
2420 abort ();
2421
f76b9db2
ILT
2422 if (BYTES_BIG_ENDIAN)
2423 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2424 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
6f086dfc
RS
2425 addr = plus_constant (addr, offset);
2426 if (!flag_force_addr && memory_address_p (mode, addr))
2427 /* Shortcut if no insns need be emitted. */
2428 return change_address (SUBREG_REG (x), mode, addr);
2429 start_sequence ();
2430 result = change_address (SUBREG_REG (x), mode, addr);
2431 emit_insn_before (gen_sequence (), insn);
2432 end_sequence ();
2433 return result;
2434}
2435
2436/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2437 Replace subexpressions of X in place.
2438 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2439 Otherwise return X, with its contents possibly altered.
2440
ab6155b7
RK
2441 If any insns must be emitted to compute NEWADDR, put them before INSN.
2442
2443 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
2444
2445static rtx
ab6155b7 2446walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
2447 register rtx x;
2448 rtx insn;
ab6155b7 2449 int uncritical;
6f086dfc
RS
2450{
2451 register enum rtx_code code;
2452 register char *fmt;
2453 register int i;
2454
2455 if (x == 0)
2456 return 0;
2457
2458 code = GET_CODE (x);
2459
2460 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 2461 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
2462
2463 /* Nothing special about this RTX; fix its operands. */
2464
2465 fmt = GET_RTX_FORMAT (code);
2466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2467 {
2468 if (fmt[i] == 'e')
ab6155b7 2469 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
2470 if (fmt[i] == 'E')
2471 {
2472 register int j;
2473 for (j = 0; j < XVECLEN (x, i); j++)
2474 XVECEXP (x, i, j)
ab6155b7 2475 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
2476 }
2477 }
2478 return x;
2479}
2480\f
6f086dfc
RS
2481/* For each memory ref within X, if it refers to a stack slot
2482 with an out of range displacement, put the address in a temp register
2483 (emitting new insns before INSN to load these registers)
2484 and alter the memory ref to use that register.
2485 Replace each such MEM rtx with a copy, to avoid clobberage. */
2486
2487static rtx
2488fixup_stack_1 (x, insn)
2489 rtx x;
2490 rtx insn;
2491{
2492 register int i;
2493 register RTX_CODE code = GET_CODE (x);
2494 register char *fmt;
2495
2496 if (code == MEM)
2497 {
2498 register rtx ad = XEXP (x, 0);
2499 /* If we have address of a stack slot but it's not valid
2500 (displacement is too large), compute the sum in a register. */
2501 if (GET_CODE (ad) == PLUS
2502 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
2503 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2504 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
e9a25f70
JL
2505 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2506#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2507 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2508#endif
2509 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
956d6950 2510 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
40d05551 2511 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
2512 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2513 {
2514 rtx temp, seq;
2515 if (memory_address_p (GET_MODE (x), ad))
2516 return x;
2517
2518 start_sequence ();
2519 temp = copy_to_reg (ad);
2520 seq = gen_sequence ();
2521 end_sequence ();
2522 emit_insn_before (seq, insn);
2523 return change_address (x, VOIDmode, temp);
2524 }
2525 return x;
2526 }
2527
2528 fmt = GET_RTX_FORMAT (code);
2529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2530 {
2531 if (fmt[i] == 'e')
2532 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2533 if (fmt[i] == 'E')
2534 {
2535 register int j;
2536 for (j = 0; j < XVECLEN (x, i); j++)
2537 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2538 }
2539 }
2540 return x;
2541}
2542\f
2543/* Optimization: a bit-field instruction whose field
2544 happens to be a byte or halfword in memory
2545 can be changed to a move instruction.
2546
2547 We call here when INSN is an insn to examine or store into a bit-field.
2548 BODY is the SET-rtx to be altered.
2549
2550 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2551 (Currently this is called only from function.c, and EQUIV_MEM
2552 is always 0.) */
2553
2554static void
2555optimize_bit_field (body, insn, equiv_mem)
2556 rtx body;
2557 rtx insn;
2558 rtx *equiv_mem;
2559{
2560 register rtx bitfield;
2561 int destflag;
2562 rtx seq = 0;
2563 enum machine_mode mode;
2564
2565 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2566 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2567 bitfield = SET_DEST (body), destflag = 1;
2568 else
2569 bitfield = SET_SRC (body), destflag = 0;
2570
2571 /* First check that the field being stored has constant size and position
2572 and is in fact a byte or halfword suitably aligned. */
2573
2574 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2575 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2576 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2577 != BLKmode)
2578 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2579 {
2580 register rtx memref = 0;
2581
2582 /* Now check that the containing word is memory, not a register,
2583 and that it is safe to change the machine mode. */
2584
2585 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2586 memref = XEXP (bitfield, 0);
2587 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2588 && equiv_mem != 0)
2589 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2590 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2591 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2592 memref = SUBREG_REG (XEXP (bitfield, 0));
2593 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2594 && equiv_mem != 0
2595 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2596 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2597
2598 if (memref
2599 && ! mode_dependent_address_p (XEXP (memref, 0))
2600 && ! MEM_VOLATILE_P (memref))
2601 {
2602 /* Now adjust the address, first for any subreg'ing
2603 that we are now getting rid of,
2604 and then for which byte of the word is wanted. */
2605
e5e809f4 2606 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
b88a3142
RK
2607 rtx insns;
2608
6f086dfc 2609 /* Adjust OFFSET to count bits from low-address byte. */
f76b9db2
ILT
2610 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2611 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2612 - offset - INTVAL (XEXP (bitfield, 1)));
2613
6f086dfc
RS
2614 /* Adjust OFFSET to count bytes from low-address byte. */
2615 offset /= BITS_PER_UNIT;
2616 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2617 {
2618 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
f76b9db2
ILT
2619 if (BYTES_BIG_ENDIAN)
2620 offset -= (MIN (UNITS_PER_WORD,
2621 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2622 - MIN (UNITS_PER_WORD,
2623 GET_MODE_SIZE (GET_MODE (memref))));
6f086dfc
RS
2624 }
2625
b88a3142
RK
2626 start_sequence ();
2627 memref = change_address (memref, mode,
6f086dfc 2628 plus_constant (XEXP (memref, 0), offset));
b88a3142
RK
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insns_before (insns, insn);
6f086dfc
RS
2632
2633 /* Store this memory reference where
2634 we found the bit field reference. */
2635
2636 if (destflag)
2637 {
2638 validate_change (insn, &SET_DEST (body), memref, 1);
2639 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2640 {
2641 rtx src = SET_SRC (body);
2642 while (GET_CODE (src) == SUBREG
2643 && SUBREG_WORD (src) == 0)
2644 src = SUBREG_REG (src);
2645 if (GET_MODE (src) != GET_MODE (memref))
2646 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2647 validate_change (insn, &SET_SRC (body), src, 1);
2648 }
2649 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2650 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2651 /* This shouldn't happen because anything that didn't have
2652 one of these modes should have got converted explicitly
2653 and then referenced through a subreg.
2654 This is so because the original bit-field was
2655 handled by agg_mode and so its tree structure had
2656 the same mode that memref now has. */
2657 abort ();
2658 }
2659 else
2660 {
2661 rtx dest = SET_DEST (body);
2662
2663 while (GET_CODE (dest) == SUBREG
4013a709
RK
2664 && SUBREG_WORD (dest) == 0
2665 && (GET_MODE_CLASS (GET_MODE (dest))
2666 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
6f086dfc
RS
2667 dest = SUBREG_REG (dest);
2668
2669 validate_change (insn, &SET_DEST (body), dest, 1);
2670
2671 if (GET_MODE (dest) == GET_MODE (memref))
2672 validate_change (insn, &SET_SRC (body), memref, 1);
2673 else
2674 {
2675 /* Convert the mem ref to the destination mode. */
2676 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2677
2678 start_sequence ();
2679 convert_move (newreg, memref,
2680 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2681 seq = get_insns ();
2682 end_sequence ();
2683
2684 validate_change (insn, &SET_SRC (body), newreg, 1);
2685 }
2686 }
2687
2688 /* See if we can convert this extraction or insertion into
2689 a simple move insn. We might not be able to do so if this
2690 was, for example, part of a PARALLEL.
2691
2692 If we succeed, write out any needed conversions. If we fail,
2693 it is hard to guess why we failed, so don't do anything
2694 special; just let the optimization be suppressed. */
2695
2696 if (apply_change_group () && seq)
2697 emit_insns_before (seq, insn);
2698 }
2699 }
2700}
2701\f
2702/* These routines are responsible for converting virtual register references
2703 to the actual hard register references once RTL generation is complete.
2704
2705 The following four variables are used for communication between the
2706 routines. They contain the offsets of the virtual registers from their
2707 respective hard registers. */
2708
2709static int in_arg_offset;
2710static int var_offset;
2711static int dynamic_offset;
2712static int out_arg_offset;
71038426 2713static int cfa_offset;
6f086dfc
RS
2714
2715/* In most machines, the stack pointer register is equivalent to the bottom
2716 of the stack. */
2717
2718#ifndef STACK_POINTER_OFFSET
2719#define STACK_POINTER_OFFSET 0
2720#endif
2721
2722/* If not defined, pick an appropriate default for the offset of dynamically
2723 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2724 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2725
2726#ifndef STACK_DYNAMIC_OFFSET
2727
2728#ifdef ACCUMULATE_OUTGOING_ARGS
2729/* The bottom of the stack points to the actual arguments. If
2730 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2731 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2732 stack space for register parameters is not pushed by the caller, but
2733 rather part of the fixed stack areas and hence not included in
2734 `current_function_outgoing_args_size'. Nevertheless, we must allow
2735 for it when allocating stack dynamic objects. */
2736
2737#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2738#define STACK_DYNAMIC_OFFSET(FNDECL) \
2739(current_function_outgoing_args_size \
2740 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2741
2742#else
2743#define STACK_DYNAMIC_OFFSET(FNDECL) \
2744(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2745#endif
2746
2747#else
2748#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2749#endif
2750#endif
2751
71038426
RH
2752/* On a few machines, the CFA coincides with the arg pointer. */
2753
2754#ifndef ARG_POINTER_CFA_OFFSET
2755#define ARG_POINTER_CFA_OFFSET 0
2756#endif
2757
2758
e9a25f70
JL
2759/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2760 its address taken. DECL is the decl for the object stored in the
2761 register, for later use if we do need to force REG into the stack.
2762 REG is overwritten by the MEM like in put_reg_into_stack. */
2763
2764rtx
2765gen_mem_addressof (reg, decl)
2766 rtx reg;
2767 tree decl;
2768{
2769 tree type = TREE_TYPE (decl);
38a448ca 2770 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
e9a25f70 2771 SET_ADDRESSOF_DECL (r, decl);
95ca22f4
MM
2772 /* If the original REG was a user-variable, then so is the REG whose
2773 address is being taken. */
2774 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
e9a25f70
JL
2775
2776 XEXP (reg, 0) = r;
2777 PUT_CODE (reg, MEM);
2778 PUT_MODE (reg, DECL_MODE (decl));
2779 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2780 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
41472af8 2781 MEM_ALIAS_SET (reg) = get_alias_set (decl);
e9a25f70 2782
e5e809f4
JL
2783 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2784 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2785
e9a25f70
JL
2786 return reg;
2787}
2788
2789/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2790
2791void
2792flush_addressof (decl)
2793 tree decl;
2794{
2795 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2796 && DECL_RTL (decl) != 0
2797 && GET_CODE (DECL_RTL (decl)) == MEM
2798 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2799 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2800 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2801}
2802
2803/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2804
2805static void
2806put_addressof_into_stack (r)
2807 rtx r;
2808{
2809 tree decl = ADDRESSOF_DECL (r);
2810 rtx reg = XEXP (r, 0);
2811
2812 if (GET_CODE (reg) != REG)
2813 abort ();
2814
2815 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2816 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
e5e809f4
JL
2817 ADDRESSOF_REGNO (r),
2818 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
e9a25f70
JL
2819}
2820
2821/* Helper function for purge_addressof. See if the rtx expression at *LOC
2822 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2823 the stack. */
2824
2825static void
f7b6d104 2826purge_addressof_1 (loc, insn, force, store)
e9a25f70
JL
2827 rtx *loc;
2828 rtx insn;
f7b6d104 2829 int force, store;
e9a25f70
JL
2830{
2831 rtx x;
2832 RTX_CODE code;
2833 int i, j;
2834 char *fmt;
2835
2836 /* Re-start here to avoid recursion in common cases. */
2837 restart:
2838
2839 x = *loc;
2840 if (x == 0)
2841 return;
2842
2843 code = GET_CODE (x);
2844
2845 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2846 {
2847 rtx insns;
956d6950
JL
2848 /* We must create a copy of the rtx because it was created by
2849 overwriting a REG rtx which is always shared. */
2850 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
e9a25f70 2851
956d6950 2852 if (validate_change (insn, loc, sub, 0))
e9a25f70
JL
2853 return;
2854
2855 start_sequence ();
2856 if (! validate_change (insn, loc,
956d6950 2857 force_operand (sub, NULL_RTX),
e9a25f70
JL
2858 0))
2859 abort ();
2860
f7b6d104 2861 insns = gen_sequence ();
e9a25f70
JL
2862 end_sequence ();
2863 emit_insns_before (insns, insn);
2864 return;
2865 }
2866 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2867 {
2868 rtx sub = XEXP (XEXP (x, 0), 0);
e5e809f4 2869
6d8ccdbb 2870 if (GET_CODE (sub) == MEM)
38a448ca 2871 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
e5e809f4 2872
f5963e61
JL
2873 if (GET_CODE (sub) == REG
2874 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
e5e809f4
JL
2875 {
2876 put_addressof_into_stack (XEXP (x, 0));
2877 return;
2878 }
2879 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
e9a25f70 2880 {
f7b6d104
RH
2881 int size_x, size_sub;
2882
2883 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2884 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2885
2886 /* Don't even consider working with paradoxical subregs,
2887 or the moral equivalent seen here. */
470032d7 2888 if (size_x <= size_sub
d006aa54 2889 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
e9a25f70 2890 {
f7b6d104
RH
2891 /* Do a bitfield insertion to mirror what would happen
2892 in memory. */
2893
f7b6d104
RH
2894 rtx val, seq;
2895
f7b6d104
RH
2896 if (store)
2897 {
2898 /* If we can't replace with a register, be afraid. */
2899
2900 start_sequence ();
2901 val = gen_reg_rtx (GET_MODE (x));
2902 if (! validate_change (insn, loc, val, 0))
2903 abort ();
2904 seq = gen_sequence ();
2905 end_sequence ();
2906 emit_insn_before (seq, insn);
2907
2908 start_sequence ();
47401c4d 2909 store_bit_field (sub, size_x, 0, GET_MODE (x),
f7b6d104
RH
2910 val, GET_MODE_SIZE (GET_MODE (sub)),
2911 GET_MODE_SIZE (GET_MODE (sub)));
2912
2913 seq = gen_sequence ();
2914 end_sequence ();
2915 emit_insn_after (seq, insn);
2916 }
2917 else
2918 {
2919 start_sequence ();
47401c4d 2920 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
f7b6d104
RH
2921 GET_MODE (x), GET_MODE (x),
2922 GET_MODE_SIZE (GET_MODE (sub)),
2923 GET_MODE_SIZE (GET_MODE (sub)));
2924
2925 /* If we can't replace with a register, be afraid. */
2926 if (! validate_change (insn, loc, val, 0))
2927 abort ();
2928
2929 seq = gen_sequence ();
2930 end_sequence ();
2931 emit_insn_before (seq, insn);
2932 }
2933
2934 /* We replaced with a reg -- all done. */
2935 return;
e9a25f70
JL
2936 }
2937 }
2938 else if (validate_change (insn, loc, sub, 0))
2939 goto restart;
2940 /* else give up and put it into the stack */
2941 }
2942 else if (code == ADDRESSOF)
2943 {
2944 put_addressof_into_stack (x);
2945 return;
2946 }
f7b6d104
RH
2947 else if (code == SET)
2948 {
2949 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2950 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2951 return;
2952 }
2fdc4440
AS
2953 else if (code == CALL)
2954 {
2955 purge_addressof_1 (&XEXP (x, 0), insn, 1, 0);
2956 purge_addressof_1 (&XEXP (x, 1), insn, force, 0);
2957 return;
2958 }
e9a25f70
JL
2959
2960 /* Scan all subexpressions. */
2961 fmt = GET_RTX_FORMAT (code);
2962 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2963 {
2964 if (*fmt == 'e')
f7b6d104 2965 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
e9a25f70
JL
2966 else if (*fmt == 'E')
2967 for (j = 0; j < XVECLEN (x, i); j++)
f7b6d104 2968 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
e9a25f70
JL
2969 }
2970}
2971
2972/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2973 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2974 stack. */
2975
2976void
2977purge_addressof (insns)
2978 rtx insns;
2979{
2980 rtx insn;
2981 for (insn = insns; insn; insn = NEXT_INSN (insn))
2982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2983 || GET_CODE (insn) == CALL_INSN)
2984 {
2985 purge_addressof_1 (&PATTERN (insn), insn,
f7b6d104
RH
2986 asm_noperands (PATTERN (insn)) > 0, 0);
2987 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
e9a25f70
JL
2988 }
2989}
2990\f
6f086dfc
RS
2991/* Pass through the INSNS of function FNDECL and convert virtual register
2992 references to hard register references. */
2993
2994void
2995instantiate_virtual_regs (fndecl, insns)
2996 tree fndecl;
2997 rtx insns;
2998{
2999 rtx insn;
e9a25f70 3000 int i;
6f086dfc
RS
3001
3002 /* Compute the offsets to use for this function. */
3003 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3004 var_offset = STARTING_FRAME_OFFSET;
3005 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3006 out_arg_offset = STACK_POINTER_OFFSET;
71038426 3007 cfa_offset = ARG_POINTER_CFA_OFFSET;
6f086dfc
RS
3008
3009 /* Scan all variables and parameters of this function. For each that is
3010 in memory, instantiate all virtual registers if the result is a valid
3011 address. If not, we do it later. That will handle most uses of virtual
3012 regs on many machines. */
3013 instantiate_decls (fndecl, 1);
3014
3015 /* Initialize recognition, indicating that volatile is OK. */
3016 init_recog ();
3017
3018 /* Scan through all the insns, instantiating every virtual register still
3019 present. */
3020 for (insn = insns; insn; insn = NEXT_INSN (insn))
3021 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3022 || GET_CODE (insn) == CALL_INSN)
3023 {
3024 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 3025 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
3026 }
3027
e9a25f70
JL
3028 /* Instantiate the stack slots for the parm registers, for later use in
3029 addressof elimination. */
3030 for (i = 0; i < max_parm_reg; ++i)
3031 if (parm_reg_stack_loc[i])
3032 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3033
6f086dfc
RS
3034 /* Now instantiate the remaining register equivalences for debugging info.
3035 These will not be valid addresses. */
3036 instantiate_decls (fndecl, 0);
3037
3038 /* Indicate that, from now on, assign_stack_local should use
3039 frame_pointer_rtx. */
3040 virtuals_instantiated = 1;
3041}
3042
3043/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3044 all virtual registers in their DECL_RTL's.
3045
3046 If VALID_ONLY, do this only if the resulting address is still valid.
3047 Otherwise, always do it. */
3048
3049static void
3050instantiate_decls (fndecl, valid_only)
3051 tree fndecl;
3052 int valid_only;
3053{
3054 tree decl;
3055
e1686233 3056 if (DECL_SAVED_INSNS (fndecl))
6f086dfc
RS
3057 /* When compiling an inline function, the obstack used for
3058 rtl allocation is the maybepermanent_obstack. Calling
3059 `resume_temporary_allocation' switches us back to that
3060 obstack while we process this function's parameters. */
3061 resume_temporary_allocation ();
3062
3063 /* Process all parameters of the function. */
3064 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3065 {
e5e809f4
JL
3066 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3067
ce717ce4
JW
3068 instantiate_decl (DECL_RTL (decl), size, valid_only);
3069
3070 /* If the parameter was promoted, then the incoming RTL mode may be
3071 larger than the declared type size. We must use the larger of
3072 the two sizes. */
3073 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3074 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
3075 }
3076
0f41302f 3077 /* Now process all variables defined in the function or its subblocks. */
6f086dfc
RS
3078 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3079
79c0672e 3080 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
6f086dfc
RS
3081 {
3082 /* Save all rtl allocated for this function by raising the
3083 high-water mark on the maybepermanent_obstack. */
3084 preserve_data ();
3085 /* All further rtl allocation is now done in the current_obstack. */
3086 rtl_in_current_obstack ();
3087 }
3088}
3089
3090/* Subroutine of instantiate_decls: Process all decls in the given
3091 BLOCK node and all its subblocks. */
3092
3093static void
3094instantiate_decls_1 (let, valid_only)
3095 tree let;
3096 int valid_only;
3097{
3098 tree t;
3099
3100 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
3101 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3102 valid_only);
6f086dfc
RS
3103
3104 /* Process all subblocks. */
3105 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3106 instantiate_decls_1 (t, valid_only);
3107}
5a73491b 3108
8008b228 3109/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
3110 decl and the size of the object, do any instantiation required.
3111
3112 If VALID_ONLY is non-zero, it means that the RTL should only be
3113 changed if the new address is valid. */
3114
3115static void
3116instantiate_decl (x, size, valid_only)
3117 rtx x;
3118 int size;
3119 int valid_only;
3120{
3121 enum machine_mode mode;
3122 rtx addr;
3123
3124 /* If this is not a MEM, no need to do anything. Similarly if the
3125 address is a constant or a register that is not a virtual register. */
3126
3127 if (x == 0 || GET_CODE (x) != MEM)
3128 return;
3129
3130 addr = XEXP (x, 0);
3131 if (CONSTANT_P (addr)
956d6950 3132 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
5a73491b
RK
3133 || (GET_CODE (addr) == REG
3134 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3135 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3136 return;
3137
3138 /* If we should only do this if the address is valid, copy the address.
3139 We need to do this so we can undo any changes that might make the
3140 address invalid. This copy is unfortunate, but probably can't be
3141 avoided. */
3142
3143 if (valid_only)
3144 addr = copy_rtx (addr);
3145
3146 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3147
87ce34d6
JW
3148 if (valid_only)
3149 {
3150 /* Now verify that the resulting address is valid for every integer or
3151 floating-point mode up to and including SIZE bytes long. We do this
3152 since the object might be accessed in any mode and frame addresses
3153 are shared. */
3154
3155 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3156 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3157 mode = GET_MODE_WIDER_MODE (mode))
3158 if (! memory_address_p (mode, addr))
3159 return;
5a73491b 3160
87ce34d6
JW
3161 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3162 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3163 mode = GET_MODE_WIDER_MODE (mode))
3164 if (! memory_address_p (mode, addr))
3165 return;
3166 }
5a73491b 3167
87ce34d6
JW
3168 /* Put back the address now that we have updated it and we either know
3169 it is valid or we don't care whether it is valid. */
5a73491b
RK
3170
3171 XEXP (x, 0) = addr;
3172}
6f086dfc
RS
3173\f
3174/* Given a pointer to a piece of rtx and an optional pointer to the
3175 containing object, instantiate any virtual registers present in it.
3176
3177 If EXTRA_INSNS, we always do the replacement and generate
3178 any extra insns before OBJECT. If it zero, we do nothing if replacement
3179 is not valid.
3180
3181 Return 1 if we either had nothing to do or if we were able to do the
3182 needed replacement. Return 0 otherwise; we only return zero if
3183 EXTRA_INSNS is zero.
3184
3185 We first try some simple transformations to avoid the creation of extra
3186 pseudos. */
3187
3188static int
3189instantiate_virtual_regs_1 (loc, object, extra_insns)
3190 rtx *loc;
3191 rtx object;
3192 int extra_insns;
3193{
3194 rtx x;
3195 RTX_CODE code;
3196 rtx new = 0;
e5e809f4 3197 HOST_WIDE_INT offset;
6f086dfc
RS
3198 rtx temp;
3199 rtx seq;
3200 int i, j;
3201 char *fmt;
3202
3203 /* Re-start here to avoid recursion in common cases. */
3204 restart:
3205
3206 x = *loc;
3207 if (x == 0)
3208 return 1;
3209
3210 code = GET_CODE (x);
3211
3212 /* Check for some special cases. */
3213 switch (code)
3214 {
3215 case CONST_INT:
3216 case CONST_DOUBLE:
3217 case CONST:
3218 case SYMBOL_REF:
3219 case CODE_LABEL:
3220 case PC:
3221 case CC0:
3222 case ASM_INPUT:
3223 case ADDR_VEC:
3224 case ADDR_DIFF_VEC:
3225 case RETURN:
3226 return 1;
3227
3228 case SET:
3229 /* We are allowed to set the virtual registers. This means that
38e01259 3230 the actual register should receive the source minus the
6f086dfc
RS
3231 appropriate offset. This is used, for example, in the handling
3232 of non-local gotos. */
3233 if (SET_DEST (x) == virtual_incoming_args_rtx)
3234 new = arg_pointer_rtx, offset = - in_arg_offset;
3235 else if (SET_DEST (x) == virtual_stack_vars_rtx)
dfd3dae6 3236 new = frame_pointer_rtx, offset = - var_offset;
6f086dfc
RS
3237 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3238 new = stack_pointer_rtx, offset = - dynamic_offset;
3239 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3240 new = stack_pointer_rtx, offset = - out_arg_offset;
71038426
RH
3241 else if (SET_DEST (x) == virtual_cfa_rtx)
3242 new = arg_pointer_rtx, offset = - cfa_offset;
6f086dfc
RS
3243
3244 if (new)
3245 {
3246 /* The only valid sources here are PLUS or REG. Just do
3247 the simplest possible thing to handle them. */
3248 if (GET_CODE (SET_SRC (x)) != REG
3249 && GET_CODE (SET_SRC (x)) != PLUS)
3250 abort ();
3251
3252 start_sequence ();
3253 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 3254 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
3255 else
3256 temp = SET_SRC (x);
5f4f0e22 3257 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
3258 seq = get_insns ();
3259 end_sequence ();
3260
3261 emit_insns_before (seq, object);
3262 SET_DEST (x) = new;
3263
e9a25f70 3264 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc
RS
3265 || ! extra_insns)
3266 abort ();
3267
3268 return 1;
3269 }
3270
3271 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3272 loc = &SET_SRC (x);
3273 goto restart;
3274
3275 case PLUS:
3276 /* Handle special case of virtual register plus constant. */
3277 if (CONSTANT_P (XEXP (x, 1)))
3278 {
b1f82ccf 3279 rtx old, new_offset;
6f086dfc
RS
3280
3281 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3282 if (GET_CODE (XEXP (x, 0)) == PLUS)
3283 {
3284 rtx inner = XEXP (XEXP (x, 0), 0);
3285
3286 if (inner == virtual_incoming_args_rtx)
3287 new = arg_pointer_rtx, offset = in_arg_offset;
3288 else if (inner == virtual_stack_vars_rtx)
3289 new = frame_pointer_rtx, offset = var_offset;
3290 else if (inner == virtual_stack_dynamic_rtx)
3291 new = stack_pointer_rtx, offset = dynamic_offset;
3292 else if (inner == virtual_outgoing_args_rtx)
3293 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3294 else if (inner == virtual_cfa_rtx)
3295 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3296 else
3297 {
3298 loc = &XEXP (x, 0);
3299 goto restart;
3300 }
3301
3302 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3303 extra_insns);
38a448ca 3304 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
6f086dfc
RS
3305 }
3306
3307 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3308 new = arg_pointer_rtx, offset = in_arg_offset;
3309 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3310 new = frame_pointer_rtx, offset = var_offset;
3311 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3312 new = stack_pointer_rtx, offset = dynamic_offset;
3313 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3314 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3315 else if (XEXP (x, 0) == virtual_cfa_rtx)
3316 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3317 else
3318 {
3319 /* We know the second operand is a constant. Unless the
3320 first operand is a REG (which has been already checked),
3321 it needs to be checked. */
3322 if (GET_CODE (XEXP (x, 0)) != REG)
3323 {
3324 loc = &XEXP (x, 0);
3325 goto restart;
3326 }
3327 return 1;
3328 }
3329
b1f82ccf 3330 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 3331
b1f82ccf
DE
3332 /* If the new constant is zero, try to replace the sum with just
3333 the register. */
3334 if (new_offset == const0_rtx
3335 && validate_change (object, loc, new, 0))
6f086dfc
RS
3336 return 1;
3337
b1f82ccf
DE
3338 /* Next try to replace the register and new offset.
3339 There are two changes to validate here and we can't assume that
3340 in the case of old offset equals new just changing the register
3341 will yield a valid insn. In the interests of a little efficiency,
3342 however, we only call validate change once (we don't queue up the
0f41302f 3343 changes and then call apply_change_group). */
b1f82ccf
DE
3344
3345 old = XEXP (x, 0);
3346 if (offset == 0
3347 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3348 : (XEXP (x, 0) = new,
3349 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
3350 {
3351 if (! extra_insns)
3352 {
3353 XEXP (x, 0) = old;
3354 return 0;
3355 }
3356
3357 /* Otherwise copy the new constant into a register and replace
3358 constant with that register. */
3359 temp = gen_reg_rtx (Pmode);
b1f82ccf 3360 XEXP (x, 0) = new;
6f086dfc 3361 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 3362 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
3363 else
3364 {
3365 /* If that didn't work, replace this expression with a
3366 register containing the sum. */
3367
6f086dfc 3368 XEXP (x, 0) = old;
38a448ca 3369 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
3370
3371 start_sequence ();
5f4f0e22 3372 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
3373 seq = get_insns ();
3374 end_sequence ();
3375
3376 emit_insns_before (seq, object);
3377 if (! validate_change (object, loc, temp, 0)
3378 && ! validate_replace_rtx (x, temp, object))
3379 abort ();
3380 }
3381 }
3382
3383 return 1;
3384 }
3385
3386 /* Fall through to generic two-operand expression case. */
3387 case EXPR_LIST:
3388 case CALL:
3389 case COMPARE:
3390 case MINUS:
3391 case MULT:
3392 case DIV: case UDIV:
3393 case MOD: case UMOD:
3394 case AND: case IOR: case XOR:
45620ed4
RK
3395 case ROTATERT: case ROTATE:
3396 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
3397 case NE: case EQ:
3398 case GE: case GT: case GEU: case GTU:
3399 case LE: case LT: case LEU: case LTU:
3400 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3401 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3402 loc = &XEXP (x, 0);
3403 goto restart;
3404
3405 case MEM:
3406 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 3407 handled by our scan of decls. The only special handling we
6f086dfc 3408 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 3409 shared if we have to change it to a pseudo.
6f086dfc
RS
3410
3411 If the rtx is a simple reference to an address via a virtual register,
3412 it can potentially be shared. In such cases, first try to make it
3413 a valid address, which can also be shared. Otherwise, copy it and
3414 proceed normally.
3415
3416 First check for common cases that need no processing. These are
3417 usually due to instantiation already being done on a previous instance
3418 of a shared rtx. */
3419
3420 temp = XEXP (x, 0);
3421 if (CONSTANT_ADDRESS_P (temp)
3422#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3423 || temp == arg_pointer_rtx
b37f453b
DE
3424#endif
3425#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3426 || temp == hard_frame_pointer_rtx
6f086dfc
RS
3427#endif
3428 || temp == frame_pointer_rtx)
3429 return 1;
3430
3431 if (GET_CODE (temp) == PLUS
3432 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3433 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
3434#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3435 || XEXP (temp, 0) == hard_frame_pointer_rtx
3436#endif
6f086dfc
RS
3437#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3438 || XEXP (temp, 0) == arg_pointer_rtx
3439#endif
3440 ))
3441 return 1;
3442
3443 if (temp == virtual_stack_vars_rtx
3444 || temp == virtual_incoming_args_rtx
3445 || (GET_CODE (temp) == PLUS
3446 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3447 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3448 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3449 {
3450 /* This MEM may be shared. If the substitution can be done without
3451 the need to generate new pseudos, we want to do it in place
3452 so all copies of the shared rtx benefit. The call below will
3453 only make substitutions if the resulting address is still
3454 valid.
3455
3456 Note that we cannot pass X as the object in the recursive call
3457 since the insn being processed may not allow all valid
6461be14
RS
3458 addresses. However, if we were not passed on object, we can
3459 only modify X without copying it if X will have a valid
3460 address.
6f086dfc 3461
6461be14
RS
3462 ??? Also note that this can still lose if OBJECT is an insn that
3463 has less restrictions on an address that some other insn.
3464 In that case, we will modify the shared address. This case
4fd796bb
RK
3465 doesn't seem very likely, though. One case where this could
3466 happen is in the case of a USE or CLOBBER reference, but we
3467 take care of that below. */
6461be14
RS
3468
3469 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3470 object ? object : x, 0))
6f086dfc
RS
3471 return 1;
3472
3473 /* Otherwise make a copy and process that copy. We copy the entire
3474 RTL expression since it might be a PLUS which could also be
3475 shared. */
3476 *loc = x = copy_rtx (x);
3477 }
3478
3479 /* Fall through to generic unary operation case. */
6f086dfc
RS
3480 case SUBREG:
3481 case STRICT_LOW_PART:
3482 case NEG: case NOT:
3483 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3484 case SIGN_EXTEND: case ZERO_EXTEND:
3485 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3486 case FLOAT: case FIX:
3487 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3488 case ABS:
3489 case SQRT:
3490 case FFS:
3491 /* These case either have just one operand or we know that we need not
3492 check the rest of the operands. */
3493 loc = &XEXP (x, 0);
3494 goto restart;
3495
4fd796bb
RK
3496 case USE:
3497 case CLOBBER:
3498 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3499 go ahead and make the invalid one, but do it to a copy. For a REG,
3500 just make the recursive call, since there's no chance of a problem. */
3501
3502 if ((GET_CODE (XEXP (x, 0)) == MEM
3503 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3504 0))
3505 || (GET_CODE (XEXP (x, 0)) == REG
7694ce35 3506 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
3507 return 1;
3508
3509 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3510 loc = &XEXP (x, 0);
3511 goto restart;
3512
6f086dfc
RS
3513 case REG:
3514 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3515 in front of this insn and substitute the temporary. */
3516 if (x == virtual_incoming_args_rtx)
3517 new = arg_pointer_rtx, offset = in_arg_offset;
3518 else if (x == virtual_stack_vars_rtx)
3519 new = frame_pointer_rtx, offset = var_offset;
3520 else if (x == virtual_stack_dynamic_rtx)
3521 new = stack_pointer_rtx, offset = dynamic_offset;
3522 else if (x == virtual_outgoing_args_rtx)
3523 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3524 else if (x == virtual_cfa_rtx)
3525 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3526
3527 if (new)
3528 {
3529 temp = plus_constant (new, offset);
3530 if (!validate_change (object, loc, temp, 0))
3531 {
3532 if (! extra_insns)
3533 return 0;
3534
3535 start_sequence ();
5f4f0e22 3536 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
3537 seq = get_insns ();
3538 end_sequence ();
3539
3540 emit_insns_before (seq, object);
3541 if (! validate_change (object, loc, temp, 0)
3542 && ! validate_replace_rtx (x, temp, object))
3543 abort ();
3544 }
3545 }
3546
3547 return 1;
e9a25f70
JL
3548
3549 case ADDRESSOF:
3550 if (GET_CODE (XEXP (x, 0)) == REG)
3551 return 1;
3552
3553 else if (GET_CODE (XEXP (x, 0)) == MEM)
3554 {
3555 /* If we have a (addressof (mem ..)), do any instantiation inside
3556 since we know we'll be making the inside valid when we finally
3557 remove the ADDRESSOF. */
3558 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3559 return 1;
3560 }
3561 break;
3562
3563 default:
3564 break;
6f086dfc
RS
3565 }
3566
3567 /* Scan all subexpressions. */
3568 fmt = GET_RTX_FORMAT (code);
3569 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3570 if (*fmt == 'e')
3571 {
3572 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3573 return 0;
3574 }
3575 else if (*fmt == 'E')
3576 for (j = 0; j < XVECLEN (x, i); j++)
3577 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3578 extra_insns))
3579 return 0;
3580
3581 return 1;
3582}
3583\f
3584/* Optimization: assuming this function does not receive nonlocal gotos,
3585 delete the handlers for such, as well as the insns to establish
3586 and disestablish them. */
3587
3588static void
3589delete_handlers ()
3590{
3591 rtx insn;
3592 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3593 {
3594 /* Delete the handler by turning off the flag that would
3595 prevent jump_optimize from deleting it.
3596 Also permit deletion of the nonlocal labels themselves
3597 if nothing local refers to them. */
3598 if (GET_CODE (insn) == CODE_LABEL)
71cd4a8d
JW
3599 {
3600 tree t, last_t;
3601
3602 LABEL_PRESERVE_P (insn) = 0;
3603
3604 /* Remove it from the nonlocal_label list, to avoid confusing
3605 flow. */
3606 for (t = nonlocal_labels, last_t = 0; t;
3607 last_t = t, t = TREE_CHAIN (t))
3608 if (DECL_RTL (TREE_VALUE (t)) == insn)
3609 break;
3610 if (t)
3611 {
3612 if (! last_t)
3613 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3614 else
3615 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3616 }
3617 }
6f086dfc 3618 if (GET_CODE (insn) == INSN
59257ff7
RK
3619 && ((nonlocal_goto_handler_slot != 0
3620 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3621 || (nonlocal_goto_stack_level != 0
3622 && reg_mentioned_p (nonlocal_goto_stack_level,
3623 PATTERN (insn)))))
6f086dfc
RS
3624 delete_insn (insn);
3625 }
3626}
3627
3628/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3629 of the current function. */
3630
3631rtx
3632nonlocal_label_rtx_list ()
3633{
3634 tree t;
3635 rtx x = 0;
3636
3637 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
38a448ca 3638 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
6f086dfc
RS
3639
3640 return x;
3641}
3642\f
3643/* Output a USE for any register use in RTL.
3644 This is used with -noreg to mark the extent of lifespan
3645 of any registers used in a user-visible variable's DECL_RTL. */
3646
3647void
3648use_variable (rtl)
3649 rtx rtl;
3650{
3651 if (GET_CODE (rtl) == REG)
3652 /* This is a register variable. */
38a448ca 3653 emit_insn (gen_rtx_USE (VOIDmode, rtl));
6f086dfc
RS
3654 else if (GET_CODE (rtl) == MEM
3655 && GET_CODE (XEXP (rtl, 0)) == REG
3656 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3657 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3658 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3659 /* This is a variable-sized structure. */
38a448ca 3660 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
6f086dfc
RS
3661}
3662
3663/* Like use_variable except that it outputs the USEs after INSN
3664 instead of at the end of the insn-chain. */
3665
3666void
3667use_variable_after (rtl, insn)
3668 rtx rtl, insn;
3669{
3670 if (GET_CODE (rtl) == REG)
3671 /* This is a register variable. */
38a448ca 3672 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
6f086dfc
RS
3673 else if (GET_CODE (rtl) == MEM
3674 && GET_CODE (XEXP (rtl, 0)) == REG
3675 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3676 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3677 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3678 /* This is a variable-sized structure. */
38a448ca 3679 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
6f086dfc
RS
3680}
3681\f
3682int
3683max_parm_reg_num ()
3684{
3685 return max_parm_reg;
3686}
3687
3688/* Return the first insn following those generated by `assign_parms'. */
3689
3690rtx
3691get_first_nonparm_insn ()
3692{
3693 if (last_parm_insn)
3694 return NEXT_INSN (last_parm_insn);
3695 return get_insns ();
3696}
3697
5378192b
RS
3698/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3699 Crash if there is none. */
3700
3701rtx
3702get_first_block_beg ()
3703{
3704 register rtx searcher;
3705 register rtx insn = get_first_nonparm_insn ();
3706
3707 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3708 if (GET_CODE (searcher) == NOTE
3709 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3710 return searcher;
3711
3712 abort (); /* Invalid call to this function. (See comments above.) */
3713 return NULL_RTX;
3714}
3715
d181c154
RS
3716/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3717 This means a type for which function calls must pass an address to the
3718 function or get an address back from the function.
3719 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
3720
3721int
3722aggregate_value_p (exp)
3723 tree exp;
3724{
9d790a4f
RS
3725 int i, regno, nregs;
3726 rtx reg;
d181c154
RS
3727 tree type;
3728 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3729 type = exp;
3730 else
3731 type = TREE_TYPE (exp);
3732
3733 if (RETURN_IN_MEMORY (type))
6f086dfc 3734 return 1;
956d6950 3735 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
3736 and thus can't be returned in registers. */
3737 if (TREE_ADDRESSABLE (type))
3738 return 1;
05e3bdb9 3739 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 3740 return 1;
9d790a4f
RS
3741 /* Make sure we have suitable call-clobbered regs to return
3742 the value in; if not, we must return it in memory. */
d181c154 3743 reg = hard_function_value (type, 0);
e71f7aa5
JW
3744
3745 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3746 it is OK. */
3747 if (GET_CODE (reg) != REG)
3748 return 0;
3749
9d790a4f 3750 regno = REGNO (reg);
d181c154 3751 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
9d790a4f
RS
3752 for (i = 0; i < nregs; i++)
3753 if (! call_used_regs[regno + i])
3754 return 1;
6f086dfc
RS
3755 return 0;
3756}
3757\f
3758/* Assign RTL expressions to the function's parameters.
3759 This may involve copying them into registers and using
3760 those registers as the RTL for them.
3761
3762 If SECOND_TIME is non-zero it means that this function is being
3763 called a second time. This is done by integrate.c when a function's
3764 compilation is deferred. We need to come back here in case the
3765 FUNCTION_ARG macro computes items needed for the rest of the compilation
3766 (such as changing which registers are fixed or caller-saved). But suppress
3767 writing any insns or setting DECL_RTL of anything in this case. */
3768
3769void
3770assign_parms (fndecl, second_time)
3771 tree fndecl;
3772 int second_time;
3773{
3774 register tree parm;
3775 register rtx entry_parm = 0;
3776 register rtx stack_parm = 0;
3777 CUMULATIVE_ARGS args_so_far;
621061f4
RK
3778 enum machine_mode promoted_mode, passed_mode;
3779 enum machine_mode nominal_mode, promoted_nominal_mode;
00d8a4c1 3780 int unsignedp;
6f086dfc
RS
3781 /* Total space needed so far for args on the stack,
3782 given as a constant and a tree-expression. */
3783 struct args_size stack_args_size;
3784 tree fntype = TREE_TYPE (fndecl);
3785 tree fnargs = DECL_ARGUMENTS (fndecl);
3786 /* This is used for the arg pointer when referring to stack args. */
3787 rtx internal_arg_pointer;
3788 /* This is a dummy PARM_DECL that we used for the function result if
3789 the function returns a structure. */
3790 tree function_result_decl = 0;
6f086dfc 3791 int varargs_setup = 0;
3412b298 3792 rtx conversion_insns = 0;
6f086dfc
RS
3793
3794 /* Nonzero if the last arg is named `__builtin_va_alist',
3795 which is used on some machines for old-fashioned non-ANSI varargs.h;
3796 this should be stuck onto the stack as if it had arrived there. */
3b69d50e
RK
3797 int hide_last_arg
3798 = (current_function_varargs
3799 && fnargs
6f086dfc
RS
3800 && (parm = tree_last (fnargs)) != 0
3801 && DECL_NAME (parm)
3802 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3803 "__builtin_va_alist")));
3804
3805 /* Nonzero if function takes extra anonymous args.
3806 This means the last named arg must be on the stack
0f41302f 3807 right before the anonymous ones. */
6f086dfc
RS
3808 int stdarg
3809 = (TYPE_ARG_TYPES (fntype) != 0
3810 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3811 != void_type_node));
3812
ebb904cb
RK
3813 current_function_stdarg = stdarg;
3814
6f086dfc
RS
3815 /* If the reg that the virtual arg pointer will be translated into is
3816 not a fixed reg or is the stack pointer, make a copy of the virtual
3817 arg pointer, and address parms via the copy. The frame pointer is
3818 considered fixed even though it is not marked as such.
3819
3820 The second time through, simply use ap to avoid generating rtx. */
3821
3822 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3823 || ! (fixed_regs[ARG_POINTER_REGNUM]
3824 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3825 && ! second_time)
3826 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3827 else
3828 internal_arg_pointer = virtual_incoming_args_rtx;
3829 current_function_internal_arg_pointer = internal_arg_pointer;
3830
3831 stack_args_size.constant = 0;
3832 stack_args_size.var = 0;
3833
3834 /* If struct value address is treated as the first argument, make it so. */
3835 if (aggregate_value_p (DECL_RESULT (fndecl))
3836 && ! current_function_returns_pcc_struct
3837 && struct_value_incoming_rtx == 0)
3838 {
f9f29478 3839 tree type = build_pointer_type (TREE_TYPE (fntype));
6f086dfc 3840
5f4f0e22 3841 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
3842
3843 DECL_ARG_TYPE (function_result_decl) = type;
3844 TREE_CHAIN (function_result_decl) = fnargs;
3845 fnargs = function_result_decl;
3846 }
3847
e9a25f70
JL
3848 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3849 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3850 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
6f086dfc
RS
3851
3852#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 3853 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 3854#else
2c7ee1a6 3855 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
6f086dfc
RS
3856#endif
3857
3858 /* We haven't yet found an argument that we must push and pretend the
3859 caller did. */
3860 current_function_pretend_args_size = 0;
3861
3862 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3863 {
05e3bdb9 3864 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
6f086dfc
RS
3865 struct args_size stack_offset;
3866 struct args_size arg_size;
3867 int passed_pointer = 0;
621061f4 3868 int did_conversion = 0;
6f086dfc 3869 tree passed_type = DECL_ARG_TYPE (parm);
621061f4 3870 tree nominal_type = TREE_TYPE (parm);
6f086dfc
RS
3871
3872 /* Set LAST_NAMED if this is last named arg before some
bf9c83fe 3873 anonymous args. */
6f086dfc
RS
3874 int last_named = ((TREE_CHAIN (parm) == 0
3875 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3b69d50e 3876 && (stdarg || current_function_varargs));
bf9c83fe
JW
3877 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3878 most machines, if this is a varargs/stdarg function, then we treat
3879 the last named arg as if it were anonymous too. */
e5e809f4 3880 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
6f086dfc
RS
3881
3882 if (TREE_TYPE (parm) == error_mark_node
3883 /* This can happen after weird syntax errors
3884 or if an enum type is defined among the parms. */
3885 || TREE_CODE (parm) != PARM_DECL
3886 || passed_type == NULL)
3887 {
38a448ca
RH
3888 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3889 = gen_rtx_MEM (BLKmode, const0_rtx);
6f086dfc
RS
3890 TREE_USED (parm) = 1;
3891 continue;
3892 }
3893
3894 /* For varargs.h function, save info about regs and stack space
3895 used by the individual args, not including the va_alist arg. */
3b69d50e 3896 if (hide_last_arg && last_named)
6f086dfc
RS
3897 current_function_args_info = args_so_far;
3898
3899 /* Find mode of arg as it is passed, and mode of arg
3900 as it should be during execution of this function. */
3901 passed_mode = TYPE_MODE (passed_type);
621061f4 3902 nominal_mode = TYPE_MODE (nominal_type);
6f086dfc 3903
16bae307
RS
3904 /* If the parm's mode is VOID, its value doesn't matter,
3905 and avoid the usual things like emit_move_insn that could crash. */
3906 if (nominal_mode == VOIDmode)
3907 {
3908 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3909 continue;
3910 }
3911
3f46679a
RK
3912 /* If the parm is to be passed as a transparent union, use the
3913 type of the first field for the tests below. We have already
3914 verified that the modes are the same. */
3915 if (DECL_TRANSPARENT_UNION (parm)
3916 || TYPE_TRANSPARENT_UNION (passed_type))
3917 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3918
a14ae508
RK
3919 /* See if this arg was passed by invisible reference. It is if
3920 it is an object whose size depends on the contents of the
3921 object itself or if the machine requires these objects be passed
3922 that way. */
3923
3924 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3925 && contains_placeholder_p (TYPE_SIZE (passed_type)))
657bb6dc 3926 || TREE_ADDRESSABLE (passed_type)
6f086dfc 3927#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
a14ae508 3928 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
bf9c83fe 3929 passed_type, named_arg)
a14ae508
RK
3930#endif
3931 )
6f086dfc 3932 {
621061f4 3933 passed_type = nominal_type = build_pointer_type (passed_type);
6f086dfc
RS
3934 passed_pointer = 1;
3935 passed_mode = nominal_mode = Pmode;
3936 }
6f086dfc 3937
a53e14c0
RK
3938 promoted_mode = passed_mode;
3939
3940#ifdef PROMOTE_FUNCTION_ARGS
3941 /* Compute the mode in which the arg is actually extended to. */
7940255d 3942 unsignedp = TREE_UNSIGNED (passed_type);
a5a52dbc 3943 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
a53e14c0
RK
3944#endif
3945
6f086dfc
RS
3946 /* Let machine desc say which reg (if any) the parm arrives in.
3947 0 means it arrives on the stack. */
3948#ifdef FUNCTION_INCOMING_ARG
a53e14c0 3949 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
bf9c83fe 3950 passed_type, named_arg);
6f086dfc 3951#else
a53e14c0 3952 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
bf9c83fe 3953 passed_type, named_arg);
6f086dfc
RS
3954#endif
3955
621061f4
RK
3956 if (entry_parm == 0)
3957 promoted_mode = passed_mode;
a53e14c0 3958
6f086dfc
RS
3959#ifdef SETUP_INCOMING_VARARGS
3960 /* If this is the last named parameter, do any required setup for
3961 varargs or stdargs. We need to know about the case of this being an
3962 addressable type, in which case we skip the registers it
3963 would have arrived in.
3964
3965 For stdargs, LAST_NAMED will be set for two parameters, the one that
3966 is actually the last named, and the dummy parameter. We only
3967 want to do this action once.
3968
3969 Also, indicate when RTL generation is to be suppressed. */
3970 if (last_named && !varargs_setup)
3971 {
621061f4 3972 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
6f086dfc
RS
3973 current_function_pretend_args_size,
3974 second_time);
3975 varargs_setup = 1;
3976 }
3977#endif
3978
3979 /* Determine parm's home in the stack,
3980 in case it arrives in the stack or we should pretend it did.
3981
3982 Compute the stack position and rtx where the argument arrives
3983 and its size.
3984
3985 There is one complexity here: If this was a parameter that would
3986 have been passed in registers, but wasn't only because it is
3987 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3988 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3989 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3990 0 as it was the previous time. */
3991
621061f4 3992 locate_and_pad_parm (promoted_mode, passed_type,
6f086dfc
RS
3993#ifdef STACK_PARMS_IN_REG_PARM_AREA
3994 1,
3995#else
3996#ifdef FUNCTION_INCOMING_ARG
621061f4 3997 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc 3998 passed_type,
bf9c83fe 3999 (named_arg
6f086dfc
RS
4000 || varargs_setup)) != 0,
4001#else
621061f4 4002 FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc 4003 passed_type,
bf9c83fe 4004 named_arg || varargs_setup) != 0,
6f086dfc
RS
4005#endif
4006#endif
4007 fndecl, &stack_args_size, &stack_offset, &arg_size);
4008
4009 if (! second_time)
4010 {
4011 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4012
4013 if (offset_rtx == const0_rtx)
38a448ca 4014 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
6f086dfc 4015 else
38a448ca
RH
4016 stack_parm = gen_rtx_MEM (promoted_mode,
4017 gen_rtx_PLUS (Pmode,
4018 internal_arg_pointer,
4019 offset_rtx));
6f086dfc
RS
4020
4021 /* If this is a memory ref that contains aggregate components,
a00285d0
RK
4022 mark it as such for cse and loop optimize. Likewise if it
4023 is readonly. */
6f086dfc 4024 MEM_IN_STRUCT_P (stack_parm) = aggregate;
a00285d0 4025 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
41472af8 4026 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
6f086dfc
RS
4027 }
4028
4029 /* If this parameter was passed both in registers and in the stack,
4030 use the copy on the stack. */
621061f4 4031 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
6f086dfc
RS
4032 entry_parm = 0;
4033
461beb10 4034#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
4035 /* If this parm was passed part in regs and part in memory,
4036 pretend it arrived entirely in memory
4037 by pushing the register-part onto the stack.
4038
4039 In the special case of a DImode or DFmode that is split,
4040 we could put it together in a pseudoreg directly,
4041 but for now that's not worth bothering with. */
4042
4043 if (entry_parm)
4044 {
621061f4 4045 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
bf9c83fe 4046 passed_type, named_arg);
6f086dfc
RS
4047
4048 if (nregs > 0)
4049 {
4050 current_function_pretend_args_size
4051 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4052 / (PARM_BOUNDARY / BITS_PER_UNIT)
4053 * (PARM_BOUNDARY / BITS_PER_UNIT));
4054
4055 if (! second_time)
5c4cdc9f
JW
4056 {
4057 /* Handle calls that pass values in multiple non-contiguous
4058 locations. The Irix 6 ABI has examples of this. */
4059 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4060 emit_group_store (validize_mem (stack_parm), entry_parm,
4061 int_size_in_bytes (TREE_TYPE (parm)),
4062 (TYPE_ALIGN (TREE_TYPE (parm))
4063 / BITS_PER_UNIT));
5c4cdc9f
JW
4064 else
4065 move_block_from_reg (REGNO (entry_parm),
4066 validize_mem (stack_parm), nregs,
4067 int_size_in_bytes (TREE_TYPE (parm)));
4068 }
6f086dfc
RS
4069 entry_parm = stack_parm;
4070 }
4071 }
461beb10 4072#endif
6f086dfc
RS
4073
4074 /* If we didn't decide this parm came in a register,
4075 by default it came on the stack. */
4076 if (entry_parm == 0)
4077 entry_parm = stack_parm;
4078
4079 /* Record permanently how this parm was passed. */
4080 if (! second_time)
4081 DECL_INCOMING_RTL (parm) = entry_parm;
4082
4083 /* If there is actually space on the stack for this parm,
4084 count it in stack_args_size; otherwise set stack_parm to 0
4085 to indicate there is no preallocated stack slot for the parm. */
4086
4087 if (entry_parm == stack_parm
d9ca49d5 4088#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 4089 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
4090 there is still an (uninitialized) stack slot allocated for it.
4091
4092 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4093 whether this parameter already has a stack slot allocated,
4094 because an arg block exists only if current_function_args_size
abc95ed3 4095 is larger than some threshold, and we haven't calculated that
d9ca49d5
JW
4096 yet. So, for now, we just assume that stack slots never exist
4097 in this case. */
6f086dfc
RS
4098 || REG_PARM_STACK_SPACE (fndecl) > 0
4099#endif
4100 )
4101 {
4102 stack_args_size.constant += arg_size.constant;
4103 if (arg_size.var)
4104 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4105 }
4106 else
4107 /* No stack slot was pushed for this parm. */
4108 stack_parm = 0;
4109
4110 /* Update info on where next arg arrives in registers. */
4111
621061f4 4112 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
bf9c83fe 4113 passed_type, named_arg);
6f086dfc 4114
0f41302f 4115 /* If this is our second time through, we are done with this parm. */
6f086dfc
RS
4116 if (second_time)
4117 continue;
4118
e16c591a
RS
4119 /* If we can't trust the parm stack slot to be aligned enough
4120 for its ultimate type, don't use that slot after entry.
4121 We'll make another stack slot, if we need one. */
4122 {
e16c591a 4123 int thisparm_boundary
621061f4 4124 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
e16c591a
RS
4125
4126 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4127 stack_parm = 0;
4128 }
4129
cb61f66f
RS
4130 /* If parm was passed in memory, and we need to convert it on entry,
4131 don't store it back in that same slot. */
4132 if (entry_parm != 0
4133 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4134 stack_parm = 0;
4135
4136#if 0
6f086dfc
RS
4137 /* Now adjust STACK_PARM to the mode and precise location
4138 where this parameter should live during execution,
4139 if we discover that it must live in the stack during execution.
4140 To make debuggers happier on big-endian machines, we store
4141 the value in the last bytes of the space available. */
4142
4143 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4144 && stack_parm != 0)
4145 {
4146 rtx offset_rtx;
4147
f76b9db2
ILT
4148 if (BYTES_BIG_ENDIAN
4149 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
6f086dfc
RS
4150 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4151 - GET_MODE_SIZE (nominal_mode));
6f086dfc
RS
4152
4153 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4154 if (offset_rtx == const0_rtx)
38a448ca 4155 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4156 else
38a448ca
RH
4157 stack_parm = gen_rtx_MEM (nominal_mode,
4158 gen_rtx_PLUS (Pmode,
4159 internal_arg_pointer,
4160 offset_rtx));
6f086dfc
RS
4161
4162 /* If this is a memory ref that contains aggregate components,
4163 mark it as such for cse and loop optimize. */
4164 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4165 }
cb61f66f 4166#endif /* 0 */
6f086dfc 4167
9dc0f531
RK
4168#ifdef STACK_REGS
4169 /* We need this "use" info, because the gcc-register->stack-register
4170 converter in reg-stack.c needs to know which registers are active
4171 at the start of the function call. The actual parameter loading
4172 instructions are not always available then anymore, since they might
4173 have been optimised away. */
4174
4175 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
38a448ca 4176 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
9dc0f531
RK
4177#endif
4178
6f086dfc
RS
4179 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4180 in the mode in which it arrives.
4181 STACK_PARM is an RTX for a stack slot where the parameter can live
4182 during the function (in case we want to put it there).
4183 STACK_PARM is 0 if no stack slot was pushed for it.
4184
4185 Now output code if necessary to convert ENTRY_PARM to
4186 the type in which this function declares it,
4187 and store that result in an appropriate place,
4188 which may be a pseudo reg, may be STACK_PARM,
4189 or may be a local stack slot if STACK_PARM is 0.
4190
4191 Set DECL_RTL to that place. */
4192
5c4cdc9f 4193 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4194 {
5c4cdc9f
JW
4195 /* If a BLKmode arrives in registers, copy it to a stack slot.
4196 Handle calls that pass values in multiple non-contiguous
4197 locations. The Irix 6 ABI has examples of this. */
4198 if (GET_CODE (entry_parm) == REG
4199 || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4200 {
621061f4
RK
4201 int size_stored
4202 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4203 UNITS_PER_WORD);
6f086dfc
RS
4204
4205 /* Note that we will be storing an integral number of words.
4206 So we have to be careful to ensure that we allocate an
4207 integral number of words. We do this below in the
4208 assign_stack_local if space was not allocated in the argument
4209 list. If it was, this will not work if PARM_BOUNDARY is not
4210 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4211 if it becomes a problem. */
4212
4213 if (stack_parm == 0)
7e41ffa2
RS
4214 {
4215 stack_parm
621061f4
RK
4216 = assign_stack_local (GET_MODE (entry_parm),
4217 size_stored, 0);
4218
4219 /* If this is a memory ref that contains aggregate
4220 components, mark it as such for cse and loop optimize. */
7e41ffa2
RS
4221 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4222 }
4223
6f086dfc
RS
4224 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4225 abort ();
4226
7a30f0c4
JW
4227 if (TREE_READONLY (parm))
4228 RTX_UNCHANGING_P (stack_parm) = 1;
4229
5c4cdc9f
JW
4230 /* Handle calls that pass values in multiple non-contiguous
4231 locations. The Irix 6 ABI has examples of this. */
4232 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4233 emit_group_store (validize_mem (stack_parm), entry_parm,
4234 int_size_in_bytes (TREE_TYPE (parm)),
4235 (TYPE_ALIGN (TREE_TYPE (parm))
4236 / BITS_PER_UNIT));
5c4cdc9f
JW
4237 else
4238 move_block_from_reg (REGNO (entry_parm),
4239 validize_mem (stack_parm),
4240 size_stored / UNITS_PER_WORD,
4241 int_size_in_bytes (TREE_TYPE (parm)));
6f086dfc
RS
4242 }
4243 DECL_RTL (parm) = stack_parm;
4244 }
74bd77a8 4245 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 4246 && ! DECL_INLINE (fndecl))
6f086dfc
RS
4247 /* layout_decl may set this. */
4248 || TREE_ADDRESSABLE (parm)
4249 || TREE_SIDE_EFFECTS (parm)
4250 /* If -ffloat-store specified, don't put explicit
4251 float variables into registers. */
4252 || (flag_float_store
4253 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4254 /* Always assign pseudo to structure return or item passed
4255 by invisible reference. */
4256 || passed_pointer || parm == function_result_decl)
4257 {
00d8a4c1
RK
4258 /* Store the parm in a pseudoregister during the function, but we
4259 may need to do it in a wider mode. */
4260
4261 register rtx parmreg;
4e86caed 4262 int regno, regnoi = 0, regnor = 0;
00d8a4c1
RK
4263
4264 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
cd5b3469 4265
621061f4
RK
4266 promoted_nominal_mode
4267 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
6f086dfc 4268
621061f4 4269 parmreg = gen_reg_rtx (promoted_nominal_mode);
ddb7361a 4270 mark_user_reg (parmreg);
6f086dfc
RS
4271
4272 /* If this was an item that we received a pointer to, set DECL_RTL
4273 appropriately. */
4274 if (passed_pointer)
4275 {
621061f4 4276 DECL_RTL (parm)
38a448ca 4277 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
6f086dfc
RS
4278 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4279 }
4280 else
4281 DECL_RTL (parm) = parmreg;
4282
4283 /* Copy the value into the register. */
621061f4
RK
4284 if (nominal_mode != passed_mode
4285 || promoted_nominal_mode != promoted_mode)
86f8eff3 4286 {
621061f4
RK
4287 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4288 mode, by the caller. We now have to convert it to
4289 NOMINAL_MODE, if different. However, PARMREG may be in
956d6950 4290 a different mode than NOMINAL_MODE if it is being stored
621061f4
RK
4291 promoted.
4292
4293 If ENTRY_PARM is a hard register, it might be in a register
86f8eff3
RK
4294 not valid for operating in its mode (e.g., an odd-numbered
4295 register for a DFmode). In that case, moves are the only
4296 thing valid, so we can't do a convert from there. This
4297 occurs when the calling sequence allow such misaligned
3412b298
JW
4298 usages.
4299
4300 In addition, the conversion may involve a call, which could
4301 clobber parameters which haven't been copied to pseudo
4302 registers yet. Therefore, we must first copy the parm to
4303 a pseudo reg here, and save the conversion until after all
4304 parameters have been moved. */
4305
4306 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4307
4308 emit_move_insn (tempreg, validize_mem (entry_parm));
4309
4310 push_to_sequence (conversion_insns);
ad241351
RK
4311 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4312
621061f4
RK
4313 expand_assignment (parm,
4314 make_tree (nominal_type, tempreg), 0, 0);
3412b298 4315 conversion_insns = get_insns ();
621061f4 4316 did_conversion = 1;
3412b298 4317 end_sequence ();
86f8eff3 4318 }
6f086dfc
RS
4319 else
4320 emit_move_insn (parmreg, validize_mem (entry_parm));
4321
74bd77a8
RS
4322 /* If we were passed a pointer but the actual value
4323 can safely live in a register, put it in one. */
16bae307 4324 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
4325 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4326 && ! DECL_INLINE (fndecl))
4327 /* layout_decl may set this. */
4328 || TREE_ADDRESSABLE (parm)
4329 || TREE_SIDE_EFFECTS (parm)
4330 /* If -ffloat-store specified, don't put explicit
4331 float variables into registers. */
4332 || (flag_float_store
4333 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4334 {
2654605a
JW
4335 /* We can't use nominal_mode, because it will have been set to
4336 Pmode above. We must use the actual mode of the parm. */
4337 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
ddb7361a 4338 mark_user_reg (parmreg);
74bd77a8
RS
4339 emit_move_insn (parmreg, DECL_RTL (parm));
4340 DECL_RTL (parm) = parmreg;
c110c53d
RS
4341 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4342 now the parm. */
4343 stack_parm = 0;
74bd77a8 4344 }
137a2a7b
DE
4345#ifdef FUNCTION_ARG_CALLEE_COPIES
4346 /* If we are passed an arg by reference and it is our responsibility
4347 to make a copy, do it now.
4348 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4349 original argument, so we must recreate them in the call to
4350 FUNCTION_ARG_CALLEE_COPIES. */
4351 /* ??? Later add code to handle the case that if the argument isn't
4352 modified, don't do the copy. */
4353
4354 else if (passed_pointer
4355 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4356 TYPE_MODE (DECL_ARG_TYPE (parm)),
4357 DECL_ARG_TYPE (parm),
bf9c83fe 4358 named_arg)
926b1b99 4359 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
137a2a7b
DE
4360 {
4361 rtx copy;
4362 tree type = DECL_ARG_TYPE (parm);
4363
4364 /* This sequence may involve a library call perhaps clobbering
4365 registers that haven't been copied to pseudos yet. */
4366
4367 push_to_sequence (conversion_insns);
4368
4369 if (TYPE_SIZE (type) == 0
4370 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1fd3ef7f 4371 /* This is a variable sized object. */
38a448ca
RH
4372 copy = gen_rtx_MEM (BLKmode,
4373 allocate_dynamic_stack_space
4374 (expr_size (parm), NULL_RTX,
4375 TYPE_ALIGN (type)));
137a2a7b 4376 else
1fd3ef7f
RK
4377 copy = assign_stack_temp (TYPE_MODE (type),
4378 int_size_in_bytes (type), 1);
3668e76e 4379 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
e9a25f70 4380 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
137a2a7b
DE
4381
4382 store_expr (parm, copy, 0);
4383 emit_move_insn (parmreg, XEXP (copy, 0));
7d384cc0 4384 if (current_function_check_memory_usage)
86fa911a
RK
4385 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4386 XEXP (copy, 0), ptr_mode,
4387 GEN_INT (int_size_in_bytes (type)),
4388 TYPE_MODE (sizetype),
956d6950
JL
4389 GEN_INT (MEMORY_USE_RW),
4390 TYPE_MODE (integer_type_node));
137a2a7b 4391 conversion_insns = get_insns ();
621061f4 4392 did_conversion = 1;
137a2a7b
DE
4393 end_sequence ();
4394 }
4395#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 4396
6f086dfc 4397 /* In any case, record the parm's desired stack location
14aceb29
RS
4398 in case we later discover it must live in the stack.
4399
4400 If it is a COMPLEX value, store the stack location for both
4401 halves. */
4402
4403 if (GET_CODE (parmreg) == CONCAT)
4404 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4405 else
4406 regno = REGNO (parmreg);
4407
e9a25f70 4408 if (regno >= max_parm_reg)
6f086dfc
RS
4409 {
4410 rtx *new;
e9a25f70 4411 int old_max_parm_reg = max_parm_reg;
14aceb29 4412
e9a25f70
JL
4413 /* It's slow to expand this one register at a time,
4414 but it's also rare and we need max_parm_reg to be
4415 precisely correct. */
4416 max_parm_reg = regno + 1;
4417 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4c9a05bc 4418 bcopy ((char *) parm_reg_stack_loc, (char *) new,
e9a25f70
JL
4419 old_max_parm_reg * sizeof (rtx));
4420 bzero ((char *) (new + old_max_parm_reg),
4421 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
6f086dfc
RS
4422 parm_reg_stack_loc = new;
4423 }
14aceb29
RS
4424
4425 if (GET_CODE (parmreg) == CONCAT)
4426 {
4427 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4428
a03caf76
RK
4429 regnor = REGNO (gen_realpart (submode, parmreg));
4430 regnoi = REGNO (gen_imagpart (submode, parmreg));
4431
7b1a0c14
RS
4432 if (stack_parm != 0)
4433 {
a03caf76 4434 parm_reg_stack_loc[regnor]
3d329b07 4435 = gen_realpart (submode, stack_parm);
a03caf76 4436 parm_reg_stack_loc[regnoi]
3d329b07 4437 = gen_imagpart (submode, stack_parm);
7b1a0c14
RS
4438 }
4439 else
4440 {
a03caf76
RK
4441 parm_reg_stack_loc[regnor] = 0;
4442 parm_reg_stack_loc[regnoi] = 0;
7b1a0c14 4443 }
14aceb29
RS
4444 }
4445 else
4446 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
6f086dfc
RS
4447
4448 /* Mark the register as eliminable if we did no conversion
4449 and it was copied from memory at a fixed offset,
4450 and the arg pointer was not copied to a pseudo-reg.
4451 If the arg pointer is a pseudo reg or the offset formed
4452 an invalid address, such memory-equivalences
4453 as we make here would screw up life analysis for it. */
4454 if (nominal_mode == passed_mode
621061f4 4455 && ! did_conversion
38b610ed
ILT
4456 && stack_parm != 0
4457 && GET_CODE (stack_parm) == MEM
6f086dfc
RS
4458 && stack_offset.var == 0
4459 && reg_mentioned_p (virtual_incoming_args_rtx,
38b610ed 4460 XEXP (stack_parm, 0)))
a03caf76
RK
4461 {
4462 rtx linsn = get_last_insn ();
69685820 4463 rtx sinsn, set;
a03caf76
RK
4464
4465 /* Mark complex types separately. */
4466 if (GET_CODE (parmreg) == CONCAT)
69685820
RK
4467 /* Scan backwards for the set of the real and
4468 imaginary parts. */
4469 for (sinsn = linsn; sinsn != 0;
4470 sinsn = prev_nonnote_insn (sinsn))
4471 {
4472 set = single_set (sinsn);
4473 if (set != 0
4474 && SET_DEST (set) == regno_reg_rtx [regnoi])
4475 REG_NOTES (sinsn)
38a448ca
RH
4476 = gen_rtx_EXPR_LIST (REG_EQUIV,
4477 parm_reg_stack_loc[regnoi],
4478 REG_NOTES (sinsn));
69685820
RK
4479 else if (set != 0
4480 && SET_DEST (set) == regno_reg_rtx [regnor])
4481 REG_NOTES (sinsn)
38a448ca
RH
4482 = gen_rtx_EXPR_LIST (REG_EQUIV,
4483 parm_reg_stack_loc[regnor],
4484 REG_NOTES (sinsn));
69685820
RK
4485 }
4486 else if ((set = single_set (linsn)) != 0
4487 && SET_DEST (set) == parmreg)
a03caf76 4488 REG_NOTES (linsn)
38a448ca
RH
4489 = gen_rtx_EXPR_LIST (REG_EQUIV,
4490 stack_parm, REG_NOTES (linsn));
a03caf76 4491 }
6f086dfc
RS
4492
4493 /* For pointer data type, suggest pointer register. */
e5e809f4 4494 if (POINTER_TYPE_P (TREE_TYPE (parm)))
6c6166bd
RK
4495 mark_reg_pointer (parmreg,
4496 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4497 / BITS_PER_UNIT));
6f086dfc
RS
4498 }
4499 else
4500 {
4501 /* Value must be stored in the stack slot STACK_PARM
4502 during function execution. */
4503
621061f4 4504 if (promoted_mode != nominal_mode)
86f8eff3
RK
4505 {
4506 /* Conversion is required. */
3412b298
JW
4507 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4508
4509 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 4510
3412b298
JW
4511 push_to_sequence (conversion_insns);
4512 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 4513 TREE_UNSIGNED (TREE_TYPE (parm)));
de957303
DE
4514 if (stack_parm)
4515 {
4516 /* ??? This may need a big-endian conversion on sparc64. */
4517 stack_parm = change_address (stack_parm, nominal_mode,
4518 NULL_RTX);
4519 }
3412b298 4520 conversion_insns = get_insns ();
621061f4 4521 did_conversion = 1;
3412b298 4522 end_sequence ();
86f8eff3 4523 }
6f086dfc
RS
4524
4525 if (entry_parm != stack_parm)
4526 {
4527 if (stack_parm == 0)
7e41ffa2
RS
4528 {
4529 stack_parm
4530 = assign_stack_local (GET_MODE (entry_parm),
4531 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4532 /* If this is a memory ref that contains aggregate components,
4533 mark it as such for cse and loop optimize. */
4534 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4535 }
4536
621061f4 4537 if (promoted_mode != nominal_mode)
3412b298
JW
4538 {
4539 push_to_sequence (conversion_insns);
4540 emit_move_insn (validize_mem (stack_parm),
4541 validize_mem (entry_parm));
4542 conversion_insns = get_insns ();
4543 end_sequence ();
4544 }
4545 else
4546 emit_move_insn (validize_mem (stack_parm),
4547 validize_mem (entry_parm));
6f086dfc 4548 }
7d384cc0 4549 if (current_function_check_memory_usage)
86fa911a
RK
4550 {
4551 push_to_sequence (conversion_insns);
4552 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4553 XEXP (stack_parm, 0), ptr_mode,
4554 GEN_INT (GET_MODE_SIZE (GET_MODE
4555 (entry_parm))),
4556 TYPE_MODE (sizetype),
956d6950
JL
4557 GEN_INT (MEMORY_USE_RW),
4558 TYPE_MODE (integer_type_node));
6f086dfc 4559
86fa911a
RK
4560 conversion_insns = get_insns ();
4561 end_sequence ();
4562 }
6f086dfc
RS
4563 DECL_RTL (parm) = stack_parm;
4564 }
4565
4566 /* If this "parameter" was the place where we are receiving the
4567 function's incoming structure pointer, set up the result. */
4568 if (parm == function_result_decl)
ccdecf58
RK
4569 {
4570 tree result = DECL_RESULT (fndecl);
4571 tree restype = TREE_TYPE (result);
4572
4573 DECL_RTL (result)
38a448ca 4574 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
ccdecf58 4575
05e3bdb9 4576 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
ccdecf58 4577 }
6f086dfc
RS
4578
4579 if (TREE_THIS_VOLATILE (parm))
4580 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4581 if (TREE_READONLY (parm))
4582 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4583 }
4584
3412b298
JW
4585 /* Output all parameter conversion instructions (possibly including calls)
4586 now that all parameters have been copied out of hard registers. */
4587 emit_insns (conversion_insns);
4588
6f086dfc
RS
4589 last_parm_insn = get_last_insn ();
4590
4591 current_function_args_size = stack_args_size.constant;
4592
4593 /* Adjust function incoming argument size for alignment and
4594 minimum length. */
4595
4596#ifdef REG_PARM_STACK_SPACE
6f90e075 4597#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
4598 current_function_args_size = MAX (current_function_args_size,
4599 REG_PARM_STACK_SPACE (fndecl));
4600#endif
6f90e075 4601#endif
6f086dfc
RS
4602
4603#ifdef STACK_BOUNDARY
4604#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4605
4606 current_function_args_size
4607 = ((current_function_args_size + STACK_BYTES - 1)
4608 / STACK_BYTES) * STACK_BYTES;
4609#endif
4610
4611#ifdef ARGS_GROW_DOWNWARD
4612 current_function_arg_offset_rtx
5f4f0e22 4613 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
4614 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4615 size_int (-stack_args_size.constant)),
86fa911a 4616 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
6f086dfc
RS
4617#else
4618 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4619#endif
4620
4621 /* See how many bytes, if any, of its args a function should try to pop
4622 on return. */
4623
64e6d9cc 4624 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
4625 current_function_args_size);
4626
3b69d50e
RK
4627 /* For stdarg.h function, save info about
4628 regs and stack space used by the named args. */
6f086dfc 4629
3b69d50e 4630 if (!hide_last_arg)
6f086dfc
RS
4631 current_function_args_info = args_so_far;
4632
4633 /* Set the rtx used for the function return value. Put this in its
4634 own variable so any optimizers that need this information don't have
4635 to include tree.h. Do this here so it gets done when an inlined
4636 function gets output. */
4637
4638 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4639}
4640\f
75dc3319
RK
4641/* Indicate whether REGNO is an incoming argument to the current function
4642 that was promoted to a wider mode. If so, return the RTX for the
4643 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4644 that REGNO is promoted from and whether the promotion was signed or
4645 unsigned. */
4646
4647#ifdef PROMOTE_FUNCTION_ARGS
4648
4649rtx
4650promoted_input_arg (regno, pmode, punsignedp)
4651 int regno;
4652 enum machine_mode *pmode;
4653 int *punsignedp;
4654{
4655 tree arg;
4656
4657 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4658 arg = TREE_CHAIN (arg))
4659 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
621061f4
RK
4660 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4661 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
4662 {
4663 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4664 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4665
a5a52dbc 4666 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
4667 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4668 && mode != DECL_MODE (arg))
4669 {
4670 *pmode = DECL_MODE (arg);
4671 *punsignedp = unsignedp;
4672 return DECL_INCOMING_RTL (arg);
4673 }
4674 }
4675
4676 return 0;
4677}
4678
4679#endif
4680\f
6f086dfc
RS
4681/* Compute the size and offset from the start of the stacked arguments for a
4682 parm passed in mode PASSED_MODE and with type TYPE.
4683
4684 INITIAL_OFFSET_PTR points to the current offset into the stacked
4685 arguments.
4686
4687 The starting offset and size for this parm are returned in *OFFSET_PTR
4688 and *ARG_SIZE_PTR, respectively.
4689
4690 IN_REGS is non-zero if the argument will be passed in registers. It will
4691 never be set if REG_PARM_STACK_SPACE is not defined.
4692
4693 FNDECL is the function in which the argument was defined.
4694
4695 There are two types of rounding that are done. The first, controlled by
4696 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4697 list to be aligned to the specific boundary (in bits). This rounding
4698 affects the initial and starting offsets, but not the argument size.
4699
4700 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4701 optionally rounds the size of the parm to PARM_BOUNDARY. The
4702 initial offset is not affected by this rounding, while the size always
4703 is and the starting offset may be. */
4704
4705/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4706 initial_offset_ptr is positive because locate_and_pad_parm's
4707 callers pass in the total size of args so far as
4708 initial_offset_ptr. arg_size_ptr is always positive.*/
4709
6f086dfc
RS
4710void
4711locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4712 initial_offset_ptr, offset_ptr, arg_size_ptr)
4713 enum machine_mode passed_mode;
4714 tree type;
4715 int in_regs;
4716 tree fndecl;
4717 struct args_size *initial_offset_ptr;
4718 struct args_size *offset_ptr;
4719 struct args_size *arg_size_ptr;
4720{
4721 tree sizetree
4722 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4723 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4724 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6f086dfc
RS
4725
4726#ifdef REG_PARM_STACK_SPACE
4727 /* If we have found a stack parm before we reach the end of the
4728 area reserved for registers, skip that area. */
4729 if (! in_regs)
4730 {
29a82058
JL
4731 int reg_parm_stack_space = 0;
4732
29008b51
JW
4733#ifdef MAYBE_REG_PARM_STACK_SPACE
4734 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4735#else
6f086dfc 4736 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 4737#endif
6f086dfc
RS
4738 if (reg_parm_stack_space > 0)
4739 {
4740 if (initial_offset_ptr->var)
4741 {
4742 initial_offset_ptr->var
4743 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4744 size_int (reg_parm_stack_space));
4745 initial_offset_ptr->constant = 0;
4746 }
4747 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4748 initial_offset_ptr->constant = reg_parm_stack_space;
4749 }
4750 }
4751#endif /* REG_PARM_STACK_SPACE */
4752
4753 arg_size_ptr->var = 0;
4754 arg_size_ptr->constant = 0;
4755
4756#ifdef ARGS_GROW_DOWNWARD
4757 if (initial_offset_ptr->var)
4758 {
4759 offset_ptr->constant = 0;
4760 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4761 initial_offset_ptr->var);
4762 }
4763 else
4764 {
4765 offset_ptr->constant = - initial_offset_ptr->constant;
4766 offset_ptr->var = 0;
4767 }
0b21dcf5 4768 if (where_pad != none
6f086dfc
RS
4769 && (TREE_CODE (sizetree) != INTEGER_CST
4770 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4771 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4772 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19
TG
4773 if (where_pad != downward)
4774 pad_to_arg_alignment (offset_ptr, boundary);
6f086dfc
RS
4775 if (initial_offset_ptr->var)
4776 {
4777 arg_size_ptr->var = size_binop (MINUS_EXPR,
4778 size_binop (MINUS_EXPR,
4779 integer_zero_node,
4780 initial_offset_ptr->var),
4781 offset_ptr->var);
4782 }
4783 else
4784 {
db3cf6fb
MS
4785 arg_size_ptr->constant = (- initial_offset_ptr->constant
4786 - offset_ptr->constant);
6f086dfc 4787 }
6f086dfc
RS
4788#else /* !ARGS_GROW_DOWNWARD */
4789 pad_to_arg_alignment (initial_offset_ptr, boundary);
4790 *offset_ptr = *initial_offset_ptr;
6f086dfc
RS
4791
4792#ifdef PUSH_ROUNDING
4793 if (passed_mode != BLKmode)
4794 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4795#endif
4796
d4b0a7a0
DE
4797 /* Pad_below needs the pre-rounded size to know how much to pad below
4798 so this must be done before rounding up. */
ea5917da
DE
4799 if (where_pad == downward
4800 /* However, BLKmode args passed in regs have their padding done elsewhere.
4801 The stack slot must be able to hold the entire register. */
4802 && !(in_regs && passed_mode == BLKmode))
d4b0a7a0
DE
4803 pad_below (offset_ptr, passed_mode, sizetree);
4804
6f086dfc
RS
4805 if (where_pad != none
4806 && (TREE_CODE (sizetree) != INTEGER_CST
4807 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4808 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4809
4810 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4811#endif /* ARGS_GROW_DOWNWARD */
4812}
4813
e16c591a
RS
4814/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4815 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4816
6f086dfc
RS
4817static void
4818pad_to_arg_alignment (offset_ptr, boundary)
4819 struct args_size *offset_ptr;
4820 int boundary;
4821{
4822 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4823
4824 if (boundary > BITS_PER_UNIT)
4825 {
4826 if (offset_ptr->var)
4827 {
4828 offset_ptr->var =
4829#ifdef ARGS_GROW_DOWNWARD
4830 round_down
4831#else
4832 round_up
4833#endif
4834 (ARGS_SIZE_TREE (*offset_ptr),
4835 boundary / BITS_PER_UNIT);
4836 offset_ptr->constant = 0; /*?*/
4837 }
4838 else
4839 offset_ptr->constant =
4840#ifdef ARGS_GROW_DOWNWARD
4841 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4842#else
4843 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4844#endif
4845 }
4846}
4847
51723711 4848#ifndef ARGS_GROW_DOWNWARD
6f086dfc
RS
4849static void
4850pad_below (offset_ptr, passed_mode, sizetree)
4851 struct args_size *offset_ptr;
4852 enum machine_mode passed_mode;
4853 tree sizetree;
4854{
4855 if (passed_mode != BLKmode)
4856 {
4857 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4858 offset_ptr->constant
4859 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4860 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4861 - GET_MODE_SIZE (passed_mode));
4862 }
4863 else
4864 {
4865 if (TREE_CODE (sizetree) != INTEGER_CST
4866 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4867 {
4868 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4869 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4870 /* Add it in. */
4871 ADD_PARM_SIZE (*offset_ptr, s2);
4872 SUB_PARM_SIZE (*offset_ptr, sizetree);
4873 }
4874 }
4875}
51723711 4876#endif
6f086dfc 4877
487a6e06 4878#ifdef ARGS_GROW_DOWNWARD
6f086dfc
RS
4879static tree
4880round_down (value, divisor)
4881 tree value;
4882 int divisor;
4883{
4884 return size_binop (MULT_EXPR,
4885 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4886 size_int (divisor));
4887}
487a6e06 4888#endif
6f086dfc
RS
4889\f
4890/* Walk the tree of blocks describing the binding levels within a function
4891 and warn about uninitialized variables.
4892 This is done after calling flow_analysis and before global_alloc
4893 clobbers the pseudo-regs to hard regs. */
4894
4895void
4896uninitialized_vars_warning (block)
4897 tree block;
4898{
4899 register tree decl, sub;
4900 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4901 {
4902 if (TREE_CODE (decl) == VAR_DECL
4903 /* These warnings are unreliable for and aggregates
4904 because assigning the fields one by one can fail to convince
4905 flow.c that the entire aggregate was initialized.
4906 Unions are troublesome because members may be shorter. */
05e3bdb9 4907 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
6f086dfc
RS
4908 && DECL_RTL (decl) != 0
4909 && GET_CODE (DECL_RTL (decl)) == REG
6acdd0fd
JL
4910 /* Global optimizations can make it difficult to determine if a
4911 particular variable has been initialized. However, a VAR_DECL
4912 with a nonzero DECL_INITIAL had an initializer, so do not
4913 claim it is potentially uninitialized.
4914
4915 We do not care about the actual value in DECL_INITIAL, so we do
4916 not worry that it may be a dangling pointer. */
4917 && DECL_INITIAL (decl) == NULL_TREE
6f086dfc
RS
4918 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4919 warning_with_decl (decl,
3c8cd8bd 4920 "`%s' might be used uninitialized in this function");
6f086dfc
RS
4921 if (TREE_CODE (decl) == VAR_DECL
4922 && DECL_RTL (decl) != 0
4923 && GET_CODE (DECL_RTL (decl)) == REG
4924 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4925 warning_with_decl (decl,
3c8cd8bd 4926 "variable `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
4927 }
4928 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4929 uninitialized_vars_warning (sub);
4930}
4931
4932/* Do the appropriate part of uninitialized_vars_warning
4933 but for arguments instead of local variables. */
4934
4935void
0cd6ef35 4936setjmp_args_warning ()
6f086dfc
RS
4937{
4938 register tree decl;
4939 for (decl = DECL_ARGUMENTS (current_function_decl);
4940 decl; decl = TREE_CHAIN (decl))
4941 if (DECL_RTL (decl) != 0
4942 && GET_CODE (DECL_RTL (decl)) == REG
4943 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3c8cd8bd 4944 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
4945}
4946
4947/* If this function call setjmp, put all vars into the stack
4948 unless they were declared `register'. */
4949
4950void
4951setjmp_protect (block)
4952 tree block;
4953{
4954 register tree decl, sub;
4955 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4956 if ((TREE_CODE (decl) == VAR_DECL
4957 || TREE_CODE (decl) == PARM_DECL)
4958 && DECL_RTL (decl) != 0
e9a25f70
JL
4959 && (GET_CODE (DECL_RTL (decl)) == REG
4960 || (GET_CODE (DECL_RTL (decl)) == MEM
4961 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
b335c2cc 4962 /* If this variable came from an inline function, it must be
9ec36da5 4963 that its life doesn't overlap the setjmp. If there was a
b335c2cc
TW
4964 setjmp in the function, it would already be in memory. We
4965 must exclude such variable because their DECL_RTL might be
4966 set to strange things such as virtual_stack_vars_rtx. */
4967 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
4968 && (
4969#ifdef NON_SAVING_SETJMP
4970 /* If longjmp doesn't restore the registers,
4971 don't put anything in them. */
4972 NON_SAVING_SETJMP
4973 ||
4974#endif
a82ad570 4975 ! DECL_REGISTER (decl)))
6f086dfc
RS
4976 put_var_into_stack (decl);
4977 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4978 setjmp_protect (sub);
4979}
4980\f
4981/* Like the previous function, but for args instead of local variables. */
4982
4983void
4984setjmp_protect_args ()
4985{
29a82058 4986 register tree decl;
6f086dfc
RS
4987 for (decl = DECL_ARGUMENTS (current_function_decl);
4988 decl; decl = TREE_CHAIN (decl))
4989 if ((TREE_CODE (decl) == VAR_DECL
4990 || TREE_CODE (decl) == PARM_DECL)
4991 && DECL_RTL (decl) != 0
e9a25f70
JL
4992 && (GET_CODE (DECL_RTL (decl)) == REG
4993 || (GET_CODE (DECL_RTL (decl)) == MEM
4994 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
6f086dfc
RS
4995 && (
4996 /* If longjmp doesn't restore the registers,
4997 don't put anything in them. */
4998#ifdef NON_SAVING_SETJMP
4999 NON_SAVING_SETJMP
5000 ||
5001#endif
a82ad570 5002 ! DECL_REGISTER (decl)))
6f086dfc
RS
5003 put_var_into_stack (decl);
5004}
5005\f
5006/* Return the context-pointer register corresponding to DECL,
5007 or 0 if it does not need one. */
5008
5009rtx
5010lookup_static_chain (decl)
5011 tree decl;
5012{
b001a02f
PB
5013 tree context = decl_function_context (decl);
5014 tree link;
7ad8c4bf 5015
38ee6ed9
JM
5016 if (context == 0
5017 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
7ad8c4bf 5018 return 0;
38ee6ed9 5019
6f086dfc
RS
5020 /* We treat inline_function_decl as an alias for the current function
5021 because that is the inline function whose vars, types, etc.
5022 are being merged into the current function.
5023 See expand_inline_function. */
5024 if (context == current_function_decl || context == inline_function_decl)
5025 return virtual_stack_vars_rtx;
5026
5027 for (link = context_display; link; link = TREE_CHAIN (link))
5028 if (TREE_PURPOSE (link) == context)
5029 return RTL_EXPR_RTL (TREE_VALUE (link));
5030
5031 abort ();
5032}
5033\f
5034/* Convert a stack slot address ADDR for variable VAR
5035 (from a containing function)
5036 into an address valid in this function (using a static chain). */
5037
5038rtx
5039fix_lexical_addr (addr, var)
5040 rtx addr;
5041 tree var;
5042{
5043 rtx basereg;
e5e809f4 5044 HOST_WIDE_INT displacement;
6f086dfc
RS
5045 tree context = decl_function_context (var);
5046 struct function *fp;
5047 rtx base = 0;
5048
5049 /* If this is the present function, we need not do anything. */
5050 if (context == current_function_decl || context == inline_function_decl)
5051 return addr;
5052
5053 for (fp = outer_function_chain; fp; fp = fp->next)
5054 if (fp->decl == context)
5055 break;
5056
5057 if (fp == 0)
5058 abort ();
5059
e9a25f70
JL
5060 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5061 addr = XEXP (XEXP (addr, 0), 0);
5062
6f086dfc
RS
5063 /* Decode given address as base reg plus displacement. */
5064 if (GET_CODE (addr) == REG)
5065 basereg = addr, displacement = 0;
5066 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5067 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5068 else
5069 abort ();
5070
5071 /* We accept vars reached via the containing function's
5072 incoming arg pointer and via its stack variables pointer. */
5073 if (basereg == fp->internal_arg_pointer)
5074 {
5075 /* If reached via arg pointer, get the arg pointer value
5076 out of that function's stack frame.
5077
5078 There are two cases: If a separate ap is needed, allocate a
5079 slot in the outer function for it and dereference it that way.
5080 This is correct even if the real ap is actually a pseudo.
5081 Otherwise, just adjust the offset from the frame pointer to
5082 compensate. */
5083
5084#ifdef NEED_SEPARATE_AP
5085 rtx addr;
5086
5087 if (fp->arg_pointer_save_area == 0)
5088 fp->arg_pointer_save_area
5089 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5090
5091 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5092 addr = memory_address (Pmode, addr);
5093
38a448ca 5094 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
6f086dfc
RS
5095#else
5096 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 5097 base = lookup_static_chain (var);
6f086dfc
RS
5098#endif
5099 }
5100
5101 else if (basereg == virtual_stack_vars_rtx)
5102 {
5103 /* This is the same code as lookup_static_chain, duplicated here to
5104 avoid an extra call to decl_function_context. */
5105 tree link;
5106
5107 for (link = context_display; link; link = TREE_CHAIN (link))
5108 if (TREE_PURPOSE (link) == context)
5109 {
5110 base = RTL_EXPR_RTL (TREE_VALUE (link));
5111 break;
5112 }
5113 }
5114
5115 if (base == 0)
5116 abort ();
5117
5118 /* Use same offset, relative to appropriate static chain or argument
5119 pointer. */
5120 return plus_constant (base, displacement);
5121}
5122\f
5123/* Return the address of the trampoline for entering nested fn FUNCTION.
5124 If necessary, allocate a trampoline (in the stack frame)
5125 and emit rtl to initialize its contents (at entry to this function). */
5126
5127rtx
5128trampoline_address (function)
5129 tree function;
5130{
5131 tree link;
5132 tree rtlexp;
5133 rtx tramp;
5134 struct function *fp;
5135 tree fn_context;
5136
5137 /* Find an existing trampoline and return it. */
5138 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5139 if (TREE_PURPOSE (link) == function)
e87ee2a9
RK
5140 return
5141 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5142
6f086dfc
RS
5143 for (fp = outer_function_chain; fp; fp = fp->next)
5144 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5145 if (TREE_PURPOSE (link) == function)
5146 {
5147 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5148 function);
5149 return round_trampoline_addr (tramp);
5150 }
5151
5152 /* None exists; we must make one. */
5153
5154 /* Find the `struct function' for the function containing FUNCTION. */
5155 fp = 0;
5156 fn_context = decl_function_context (function);
4ac74fb8
RK
5157 if (fn_context != current_function_decl
5158 && fn_context != inline_function_decl)
6f086dfc
RS
5159 for (fp = outer_function_chain; fp; fp = fp->next)
5160 if (fp->decl == fn_context)
5161 break;
5162
5163 /* Allocate run-time space for this trampoline
5164 (usually in the defining function's stack frame). */
5165#ifdef ALLOCATE_TRAMPOLINE
5166 tramp = ALLOCATE_TRAMPOLINE (fp);
5167#else
5168 /* If rounding needed, allocate extra space
5169 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5170#ifdef TRAMPOLINE_ALIGNMENT
b02ab63a
RK
5171#define TRAMPOLINE_REAL_SIZE \
5172 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
6f086dfc
RS
5173#else
5174#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5175#endif
5176 if (fp != 0)
5177 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5178 else
5179 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5180#endif
5181
5182 /* Record the trampoline for reuse and note it for later initialization
5183 by expand_function_end. */
5184 if (fp != 0)
5185 {
28498644
RK
5186 push_obstacks (fp->function_maybepermanent_obstack,
5187 fp->function_maybepermanent_obstack);
6f086dfc
RS
5188 rtlexp = make_node (RTL_EXPR);
5189 RTL_EXPR_RTL (rtlexp) = tramp;
5190 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5191 pop_obstacks ();
5192 }
5193 else
5194 {
5195 /* Make the RTL_EXPR node temporary, not momentary, so that the
5196 trampoline_list doesn't become garbage. */
5197 int momentary = suspend_momentary ();
5198 rtlexp = make_node (RTL_EXPR);
5199 resume_momentary (momentary);
5200
5201 RTL_EXPR_RTL (rtlexp) = tramp;
5202 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5203 }
5204
5205 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5206 return round_trampoline_addr (tramp);
5207}
5208
5209/* Given a trampoline address,
5210 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5211
5212static rtx
5213round_trampoline_addr (tramp)
5214 rtx tramp;
5215{
5216#ifdef TRAMPOLINE_ALIGNMENT
5217 /* Round address up to desired boundary. */
5218 rtx temp = gen_reg_rtx (Pmode);
5219 temp = expand_binop (Pmode, add_optab, tramp,
b02ab63a 5220 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
6f086dfc
RS
5221 temp, 0, OPTAB_LIB_WIDEN);
5222 tramp = expand_binop (Pmode, and_optab, temp,
b02ab63a 5223 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
6f086dfc
RS
5224 temp, 0, OPTAB_LIB_WIDEN);
5225#endif
5226 return tramp;
5227}
5228\f
467456d0
RS
5229/* The functions identify_blocks and reorder_blocks provide a way to
5230 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5231 duplicate portions of the RTL code. Call identify_blocks before
5232 changing the RTL, and call reorder_blocks after. */
5233
b2a59b15
MS
5234/* Put all this function's BLOCK nodes including those that are chained
5235 onto the first block into a vector, and return it.
467456d0
RS
5236 Also store in each NOTE for the beginning or end of a block
5237 the index of that block in the vector.
b2a59b15 5238 The arguments are BLOCK, the chain of top-level blocks of the function,
467456d0
RS
5239 and INSNS, the insn chain of the function. */
5240
5241tree *
b2a59b15
MS
5242identify_blocks (block, insns)
5243 tree block;
467456d0
RS
5244 rtx insns;
5245{
fc289cd1
JW
5246 int n_blocks;
5247 tree *block_vector;
5248 int *block_stack;
467456d0 5249 int depth = 0;
b2a59b15
MS
5250 int next_block_number = 1;
5251 int current_block_number = 1;
467456d0
RS
5252 rtx insn;
5253
b2a59b15 5254 if (block == 0)
fc289cd1
JW
5255 return 0;
5256
b2a59b15 5257 n_blocks = all_blocks (block, 0);
fc289cd1
JW
5258 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5259 block_stack = (int *) alloca (n_blocks * sizeof (int));
5260
b2a59b15 5261 all_blocks (block, block_vector);
467456d0
RS
5262
5263 for (insn = insns; insn; insn = NEXT_INSN (insn))
5264 if (GET_CODE (insn) == NOTE)
5265 {
5266 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5267 {
5268 block_stack[depth++] = current_block_number;
5269 current_block_number = next_block_number;
1b2ac438 5270 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
5271 }
5272 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5273 {
1b2ac438 5274 NOTE_BLOCK_NUMBER (insn) = current_block_number;
c7fdfd49 5275 current_block_number = block_stack[--depth];
467456d0
RS
5276 }
5277 }
5278
b2a59b15
MS
5279 if (n_blocks != next_block_number)
5280 abort ();
5281
467456d0
RS
5282 return block_vector;
5283}
5284
5285/* Given BLOCK_VECTOR which was returned by identify_blocks,
5286 and a revised instruction chain, rebuild the tree structure
5287 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 5288 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
5289 Returns the current top-level block. */
5290
5291tree
b2a59b15 5292reorder_blocks (block_vector, block, insns)
467456d0 5293 tree *block_vector;
b2a59b15 5294 tree block;
467456d0
RS
5295 rtx insns;
5296{
b2a59b15 5297 tree current_block = block;
467456d0
RS
5298 rtx insn;
5299
fc289cd1 5300 if (block_vector == 0)
b2a59b15 5301 return block;
fc289cd1 5302
b2a59b15 5303 /* Prune the old trees away, so that it doesn't get in the way. */
fc289cd1 5304 BLOCK_SUBBLOCKS (current_block) = 0;
b2a59b15 5305 BLOCK_CHAIN (current_block) = 0;
fc289cd1 5306
467456d0
RS
5307 for (insn = insns; insn; insn = NEXT_INSN (insn))
5308 if (GET_CODE (insn) == NOTE)
5309 {
5310 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5311 {
5312 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5313 /* If we have seen this block before, copy it. */
5314 if (TREE_ASM_WRITTEN (block))
5315 block = copy_node (block);
fc289cd1 5316 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
5317 TREE_ASM_WRITTEN (block) = 1;
5318 BLOCK_SUPERCONTEXT (block) = current_block;
5319 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5320 BLOCK_SUBBLOCKS (current_block) = block;
5321 current_block = block;
1b2ac438 5322 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5323 }
5324 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5325 {
5326 BLOCK_SUBBLOCKS (current_block)
5327 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5328 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 5329 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5330 }
5331 }
5332
b2a59b15
MS
5333 BLOCK_SUBBLOCKS (current_block)
5334 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
467456d0
RS
5335 return current_block;
5336}
5337
5338/* Reverse the order of elements in the chain T of blocks,
5339 and return the new head of the chain (old last element). */
5340
5341static tree
5342blocks_nreverse (t)
5343 tree t;
5344{
5345 register tree prev = 0, decl, next;
5346 for (decl = t; decl; decl = next)
5347 {
5348 next = BLOCK_CHAIN (decl);
5349 BLOCK_CHAIN (decl) = prev;
5350 prev = decl;
5351 }
5352 return prev;
5353}
5354
b2a59b15
MS
5355/* Count the subblocks of the list starting with BLOCK, and list them
5356 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5357 blocks. */
467456d0
RS
5358
5359static int
5360all_blocks (block, vector)
5361 tree block;
5362 tree *vector;
5363{
b2a59b15
MS
5364 int n_blocks = 0;
5365
5366 while (block)
5367 {
5368 TREE_ASM_WRITTEN (block) = 0;
5369
5370 /* Record this block. */
5371 if (vector)
5372 vector[n_blocks] = block;
5373
5374 ++n_blocks;
5375
5376 /* Record the subblocks, and their subblocks... */
5377 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5378 vector ? vector + n_blocks : 0);
5379 block = BLOCK_CHAIN (block);
5380 }
467456d0
RS
5381
5382 return n_blocks;
5383}
5384\f
6f086dfc
RS
5385/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5386 and initialize static variables for generating RTL for the statements
5387 of the function. */
5388
5389void
5390init_function_start (subr, filename, line)
5391 tree subr;
5392 char *filename;
5393 int line;
5394{
6f086dfc
RS
5395 init_stmt_for_function ();
5396
5397 cse_not_expected = ! optimize;
5398
5399 /* Caller save not needed yet. */
5400 caller_save_needed = 0;
5401
5402 /* No stack slots have been made yet. */
5403 stack_slot_list = 0;
5404
5405 /* There is no stack slot for handling nonlocal gotos. */
5406 nonlocal_goto_handler_slot = 0;
5407 nonlocal_goto_stack_level = 0;
5408
5409 /* No labels have been declared for nonlocal use. */
5410 nonlocal_labels = 0;
5411
5412 /* No function calls so far in this function. */
5413 function_call_count = 0;
5414
5415 /* No parm regs have been allocated.
5416 (This is important for output_inline_function.) */
5417 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5418
5419 /* Initialize the RTL mechanism. */
5420 init_emit ();
5421
5422 /* Initialize the queue of pending postincrement and postdecrements,
5423 and some other info in expr.c. */
5424 init_expr ();
5425
5426 /* We haven't done register allocation yet. */
5427 reg_renumber = 0;
5428
5429 init_const_rtx_hash_table ();
5430
a1d7ffe3 5431 current_function_name = (*decl_printable_name) (subr, 2);
6f086dfc
RS
5432
5433 /* Nonzero if this is a nested function that uses a static chain. */
5434
5435 current_function_needs_context
38ee6ed9
JM
5436 = (decl_function_context (current_function_decl) != 0
5437 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6f086dfc
RS
5438
5439 /* Set if a call to setjmp is seen. */
5440 current_function_calls_setjmp = 0;
5441
5442 /* Set if a call to longjmp is seen. */
5443 current_function_calls_longjmp = 0;
5444
5445 current_function_calls_alloca = 0;
5446 current_function_has_nonlocal_label = 0;
8634413a 5447 current_function_has_nonlocal_goto = 0;
6f086dfc 5448 current_function_contains_functions = 0;
fdb8a883 5449 current_function_sp_is_unchanging = 0;
173cd503 5450 current_function_is_thunk = 0;
6f086dfc
RS
5451
5452 current_function_returns_pcc_struct = 0;
5453 current_function_returns_struct = 0;
5454 current_function_epilogue_delay_list = 0;
5455 current_function_uses_const_pool = 0;
5456 current_function_uses_pic_offset_table = 0;
aeb302bb 5457 current_function_cannot_inline = 0;
6f086dfc
RS
5458
5459 /* We have not yet needed to make a label to jump to for tail-recursion. */
5460 tail_recursion_label = 0;
5461
5462 /* We haven't had a need to make a save area for ap yet. */
5463
5464 arg_pointer_save_area = 0;
5465
5466 /* No stack slots allocated yet. */
5467 frame_offset = 0;
5468
5469 /* No SAVE_EXPRs in this function yet. */
5470 save_expr_regs = 0;
5471
5472 /* No RTL_EXPRs in this function yet. */
5473 rtl_expr_chain = 0;
5474
bc0ebdf9
RK
5475 /* Set up to allocate temporaries. */
5476 init_temp_slots ();
6f086dfc
RS
5477
5478 /* Within function body, compute a type's size as soon it is laid out. */
5479 immediate_size_expand++;
5480
d9a98e1a
RK
5481 /* We haven't made any trampolines for this function yet. */
5482 trampoline_list = 0;
5483
6f086dfc
RS
5484 init_pending_stack_adjust ();
5485 inhibit_defer_pop = 0;
5486
5487 current_function_outgoing_args_size = 0;
5488
6f086dfc 5489 /* Prevent ever trying to delete the first instruction of a function.
b274104c
PB
5490 Also tell final how to output a linenum before the function prologue.
5491 Note linenums could be missing, e.g. when compiling a Java .class file. */
5492 if (line > 0)
5493 emit_line_note (filename, line);
6f086dfc
RS
5494
5495 /* Make sure first insn is a note even if we don't want linenums.
5496 This makes sure the first insn will never be deleted.
5497 Also, final expects a note to appear there. */
5f4f0e22 5498 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5499
5500 /* Set flags used by final.c. */
5501 if (aggregate_value_p (DECL_RESULT (subr)))
5502 {
5503#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 5504 current_function_returns_pcc_struct = 1;
6f086dfc 5505#endif
1b8297c1 5506 current_function_returns_struct = 1;
6f086dfc
RS
5507 }
5508
5509 /* Warn if this value is an aggregate type,
5510 regardless of which calling convention we are using for it. */
5511 if (warn_aggregate_return
05e3bdb9 5512 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc
RS
5513 warning ("function returns an aggregate");
5514
5515 current_function_returns_pointer
8eda074c 5516 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6f086dfc
RS
5517
5518 /* Indicate that we need to distinguish between the return value of the
5519 present function and the return value of a function being called. */
5520 rtx_equal_function_value_matters = 1;
5521
5522 /* Indicate that we have not instantiated virtual registers yet. */
5523 virtuals_instantiated = 0;
5524
5525 /* Indicate we have no need of a frame pointer yet. */
5526 frame_pointer_needed = 0;
5527
ebb904cb 5528 /* By default assume not varargs or stdarg. */
6f086dfc 5529 current_function_varargs = 0;
ebb904cb 5530 current_function_stdarg = 0;
6f086dfc
RS
5531}
5532
5533/* Indicate that the current function uses extra args
5534 not explicitly mentioned in the argument list in any fashion. */
5535
5536void
5537mark_varargs ()
5538{
5539 current_function_varargs = 1;
5540}
5541
5542/* Expand a call to __main at the beginning of a possible main function. */
5543
e2fd1d94
JM
5544#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5545#undef HAS_INIT_SECTION
5546#define HAS_INIT_SECTION
5547#endif
5548
6f086dfc
RS
5549void
5550expand_main_function ()
5551{
e2fd1d94 5552#if !defined (HAS_INIT_SECTION)
b93a436e
JL
5553 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5554 VOIDmode, 0);
e2fd1d94 5555#endif /* not HAS_INIT_SECTION */
6f086dfc
RS
5556}
5557\f
c20bf1f3
JB
5558extern struct obstack permanent_obstack;
5559
6f086dfc
RS
5560/* Start the RTL for a new function, and set variables used for
5561 emitting RTL.
5562 SUBR is the FUNCTION_DECL node.
5563 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5564 the function's parameters, which must be run at any return statement. */
5565
5566void
5567expand_function_start (subr, parms_have_cleanups)
5568 tree subr;
5569 int parms_have_cleanups;
5570{
5571 register int i;
5572 tree tem;
4e86caed 5573 rtx last_ptr = NULL_RTX;
6f086dfc
RS
5574
5575 /* Make sure volatile mem refs aren't considered
5576 valid operands of arithmetic insns. */
5577 init_recog_no_volatile ();
5578
7d384cc0
KR
5579 /* Set this before generating any memory accesses. */
5580 current_function_check_memory_usage
5581 = (flag_check_memory_usage
5582 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5583
07417085
KR
5584 current_function_instrument_entry_exit
5585 = (flag_instrument_function_entry_exit
5586 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5587
6f086dfc
RS
5588 /* If function gets a static chain arg, store it in the stack frame.
5589 Do this first, so it gets the first stack slot offset. */
5590 if (current_function_needs_context)
3e2481e9
JW
5591 {
5592 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
f0c51a1e 5593
f0c51a1e
RK
5594 /* Delay copying static chain if it is not a register to avoid
5595 conflicts with regs used for parameters. */
f95182a4
ILT
5596 if (! SMALL_REGISTER_CLASSES
5597 || GET_CODE (static_chain_incoming_rtx) == REG)
f0c51a1e 5598 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3e2481e9 5599 }
6f086dfc
RS
5600
5601 /* If the parameters of this function need cleaning up, get a label
5602 for the beginning of the code which executes those cleanups. This must
5603 be done before doing anything with return_label. */
5604 if (parms_have_cleanups)
5605 cleanup_label = gen_label_rtx ();
5606 else
5607 cleanup_label = 0;
5608
5609 /* Make the label for return statements to jump to, if this machine
5610 does not have a one-instruction return and uses an epilogue,
5611 or if it returns a structure, or if it has parm cleanups. */
5612#ifdef HAVE_return
5613 if (cleanup_label == 0 && HAVE_return
07417085 5614 && ! current_function_instrument_entry_exit
6f086dfc
RS
5615 && ! current_function_returns_pcc_struct
5616 && ! (current_function_returns_struct && ! optimize))
5617 return_label = 0;
5618 else
5619 return_label = gen_label_rtx ();
5620#else
5621 return_label = gen_label_rtx ();
5622#endif
5623
5624 /* Initialize rtx used to return the value. */
5625 /* Do this before assign_parms so that we copy the struct value address
5626 before any library calls that assign parms might generate. */
5627
5628 /* Decide whether to return the value in memory or in a register. */
5629 if (aggregate_value_p (DECL_RESULT (subr)))
5630 {
5631 /* Returning something that won't go in a register. */
4acc00bf 5632 register rtx value_address = 0;
6f086dfc
RS
5633
5634#ifdef PCC_STATIC_STRUCT_RETURN
5635 if (current_function_returns_pcc_struct)
5636 {
5637 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5638 value_address = assemble_static_space (size);
5639 }
5640 else
5641#endif
5642 {
5643 /* Expect to be passed the address of a place to store the value.
5644 If it is passed as an argument, assign_parms will take care of
5645 it. */
5646 if (struct_value_incoming_rtx)
5647 {
5648 value_address = gen_reg_rtx (Pmode);
5649 emit_move_insn (value_address, struct_value_incoming_rtx);
5650 }
5651 }
5652 if (value_address)
ccdecf58
RK
5653 {
5654 DECL_RTL (DECL_RESULT (subr))
38a448ca 5655 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
ccdecf58 5656 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
05e3bdb9 5657 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
ccdecf58 5658 }
6f086dfc
RS
5659 }
5660 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5661 /* If return mode is void, this decl rtl should not be used. */
5662 DECL_RTL (DECL_RESULT (subr)) = 0;
07417085 5663 else if (parms_have_cleanups || current_function_instrument_entry_exit)
a53e14c0
RK
5664 {
5665 /* If function will end with cleanup code for parms,
5666 compute the return values into a pseudo reg,
5667 which we will copy into the true return register
5668 after the cleanups are done. */
5669
5670 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
a5a52dbc 5671
a53e14c0
RK
5672#ifdef PROMOTE_FUNCTION_RETURN
5673 tree type = TREE_TYPE (DECL_RESULT (subr));
5674 int unsignedp = TREE_UNSIGNED (type);
5675
a5a52dbc 5676 mode = promote_mode (type, mode, &unsignedp, 1);
a53e14c0
RK
5677#endif
5678
5679 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5680 }
6f086dfc
RS
5681 else
5682 /* Scalar, returned in a register. */
5683 {
5684#ifdef FUNCTION_OUTGOING_VALUE
5685 DECL_RTL (DECL_RESULT (subr))
5686 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5687#else
5688 DECL_RTL (DECL_RESULT (subr))
5689 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5690#endif
5691
5692 /* Mark this reg as the function's return value. */
5693 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5694 {
5695 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5696 /* Needed because we may need to move this to memory
5697 in case it's a named return value whose address is taken. */
a82ad570 5698 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
5699 }
5700 }
5701
5702 /* Initialize rtx for parameters and local variables.
5703 In some cases this requires emitting insns. */
5704
5705 assign_parms (subr, 0);
5706
f0c51a1e
RK
5707 /* Copy the static chain now if it wasn't a register. The delay is to
5708 avoid conflicts with the parameter passing registers. */
5709
f95182a4 5710 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
f0c51a1e
RK
5711 if (GET_CODE (static_chain_incoming_rtx) != REG)
5712 emit_move_insn (last_ptr, static_chain_incoming_rtx);
f0c51a1e 5713
6f086dfc
RS
5714 /* The following was moved from init_function_start.
5715 The move is supposed to make sdb output more accurate. */
5716 /* Indicate the beginning of the function body,
5717 as opposed to parm setup. */
5f4f0e22 5718 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
5719
5720 /* If doing stupid allocation, mark parms as born here. */
5721
5722 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 5723 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5724 parm_birth_insn = get_last_insn ();
5725
5726 if (obey_regdecls)
5727 {
5728 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5729 use_variable (regno_reg_rtx[i]);
5730
5731 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5732 use_variable (current_function_internal_arg_pointer);
5733 }
5734
6d7306f7
JM
5735 context_display = 0;
5736 if (current_function_needs_context)
ac9e20f0 5737 {
6d7306f7
JM
5738 /* Fetch static chain values for containing functions. */
5739 tem = decl_function_context (current_function_decl);
5740 /* If not doing stupid register allocation copy the static chain
5741 pointer into a pseudo. If we have small register classes, copy
5742 the value from memory if static_chain_incoming_rtx is a REG. If
5743 we do stupid register allocation, we use the stack address
5744 generated above. */
5745 if (tem && ! obey_regdecls)
5746 {
6d7306f7
JM
5747 /* If the static chain originally came in a register, put it back
5748 there, then move it out in the next insn. The reason for
5749 this peculiar code is to satisfy function integration. */
f95182a4
ILT
5750 if (SMALL_REGISTER_CLASSES
5751 && GET_CODE (static_chain_incoming_rtx) == REG)
6d7306f7 5752 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6d7306f7
JM
5753 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5754 }
ac9e20f0 5755
6d7306f7
JM
5756 while (tem)
5757 {
5758 tree rtlexp = make_node (RTL_EXPR);
6f086dfc 5759
6d7306f7
JM
5760 RTL_EXPR_RTL (rtlexp) = last_ptr;
5761 context_display = tree_cons (tem, rtlexp, context_display);
5762 tem = decl_function_context (tem);
5763 if (tem == 0)
5764 break;
5765 /* Chain thru stack frames, assuming pointer to next lexical frame
5766 is found at the place we always store it. */
6f086dfc 5767#ifdef FRAME_GROWS_DOWNWARD
6d7306f7 5768 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6f086dfc 5769#endif
38a448ca
RH
5770 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5771 memory_address (Pmode, last_ptr)));
6d7306f7
JM
5772
5773 /* If we are not optimizing, ensure that we know that this
5774 piece of context is live over the entire function. */
5775 if (! optimize)
38a448ca
RH
5776 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5777 save_expr_regs);
6d7306f7 5778 }
6f086dfc
RS
5779 }
5780
07417085
KR
5781 if (current_function_instrument_entry_exit)
5782 {
5783 rtx fun = DECL_RTL (current_function_decl);
5784 if (GET_CODE (fun) == MEM)
5785 fun = XEXP (fun, 0);
5786 else
5787 abort ();
5788 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5789 fun, Pmode,
5790 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5791 0,
5792 hard_frame_pointer_rtx),
5793 Pmode);
5794 }
5795
6f086dfc
RS
5796 /* After the display initializations is where the tail-recursion label
5797 should go, if we end up needing one. Ensure we have a NOTE here
5798 since some things (like trampolines) get placed before this. */
5f4f0e22 5799 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5800
5801 /* Evaluate now the sizes of any types declared among the arguments. */
5802 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
7b05e286 5803 {
86fa911a
RK
5804 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5805 EXPAND_MEMORY_USE_BAD);
7b05e286
JW
5806 /* Flush the queue in case this parameter declaration has
5807 side-effects. */
5808 emit_queue ();
5809 }
6f086dfc
RS
5810
5811 /* Make sure there is a line number after the function entry setup code. */
5812 force_next_line_note ();
5813}
5814\f
5815/* Generate RTL for the end of the current function.
980697fd 5816 FILENAME and LINE are the current position in the source file.
6f086dfc 5817
980697fd 5818 It is up to language-specific callers to do cleanups for parameters--
1be07046 5819 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6f086dfc
RS
5820
5821void
1be07046 5822expand_function_end (filename, line, end_bindings)
6f086dfc
RS
5823 char *filename;
5824 int line;
1be07046 5825 int end_bindings;
6f086dfc
RS
5826{
5827 register int i;
5828 tree link;
5829
1e2414db 5830#ifdef TRAMPOLINE_TEMPLATE
6f086dfc 5831 static rtx initial_trampoline;
1e2414db 5832#endif
6f086dfc
RS
5833
5834#ifdef NON_SAVING_SETJMP
5835 /* Don't put any variables in registers if we call setjmp
5836 on a machine that fails to restore the registers. */
5837 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5838 {
b88a3142
RK
5839 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5840 setjmp_protect (DECL_INITIAL (current_function_decl));
5841
6f086dfc
RS
5842 setjmp_protect_args ();
5843 }
5844#endif
5845
5846 /* Save the argument pointer if a save area was made for it. */
5847 if (arg_pointer_save_area)
5848 {
5849 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5850 emit_insn_before (x, tail_recursion_reentry);
5851 }
5852
5853 /* Initialize any trampolines required by this function. */
5854 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5855 {
5856 tree function = TREE_PURPOSE (link);
5857 rtx context = lookup_static_chain (function);
5858 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7a87758d 5859#ifdef TRAMPOLINE_TEMPLATE
1e2414db 5860 rtx blktramp;
7a87758d 5861#endif
6f086dfc
RS
5862 rtx seq;
5863
1e2414db 5864#ifdef TRAMPOLINE_TEMPLATE
6f086dfc
RS
5865 /* First make sure this compilation has a template for
5866 initializing trampolines. */
5867 if (initial_trampoline == 0)
86f8eff3
RK
5868 {
5869 end_temporary_allocation ();
5870 initial_trampoline
38a448ca 5871 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
86f8eff3
RK
5872 resume_temporary_allocation ();
5873 }
1e2414db 5874#endif
6f086dfc
RS
5875
5876 /* Generate insns to initialize the trampoline. */
5877 start_sequence ();
1e2414db
RK
5878 tramp = round_trampoline_addr (XEXP (tramp, 0));
5879#ifdef TRAMPOLINE_TEMPLATE
5880 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5881 emit_block_move (blktramp, initial_trampoline,
5882 GEN_INT (TRAMPOLINE_SIZE),
189cc377 5883 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
1e2414db
RK
5884#endif
5885 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6f086dfc
RS
5886 seq = get_insns ();
5887 end_sequence ();
5888
5889 /* Put those insns at entry to the containing function (this one). */
5890 emit_insns_before (seq, tail_recursion_reentry);
5891 }
6f086dfc 5892
11044f66
RK
5893 /* If we are doing stack checking and this function makes calls,
5894 do a stack probe at the start of the function to ensure we have enough
5895 space for another stack frame. */
5896 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5897 {
5898 rtx insn, seq;
5899
5900 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5901 if (GET_CODE (insn) == CALL_INSN)
5902 {
5903 start_sequence ();
5904 probe_stack_range (STACK_CHECK_PROTECT,
5905 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5906 seq = get_insns ();
5907 end_sequence ();
5908 emit_insns_before (seq, tail_recursion_reentry);
5909 break;
5910 }
5911 }
5912
db8717d9
RK
5913 /* Warn about unused parms if extra warnings were specified. */
5914 if (warn_unused && extra_warnings)
6f086dfc 5915 {
db8717d9 5916 tree decl;
6f086dfc
RS
5917
5918 for (decl = DECL_ARGUMENTS (current_function_decl);
5919 decl; decl = TREE_CHAIN (decl))
497dc802
JM
5920 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5921 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6f086dfc
RS
5922 warning_with_decl (decl, "unused parameter `%s'");
5923 }
6f086dfc
RS
5924
5925 /* Delete handlers for nonlocal gotos if nothing uses them. */
5926 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5927 delete_handlers ();
5928
5929 /* End any sequences that failed to be closed due to syntax errors. */
5930 while (in_sequence_p ())
5f4f0e22 5931 end_sequence ();
6f086dfc
RS
5932
5933 /* Outside function body, can't compute type's actual size
5934 until next function's body starts. */
5935 immediate_size_expand--;
5936
5937 /* If doing stupid register allocation,
5938 mark register parms as dying here. */
5939
5940 if (obey_regdecls)
5941 {
5942 rtx tem;
5943 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5944 use_variable (regno_reg_rtx[i]);
5945
5946 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5947
5948 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5949 {
5950 use_variable (XEXP (tem, 0));
5951 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5952 }
5953
5954 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5955 use_variable (current_function_internal_arg_pointer);
5956 }
5957
5958 clear_pending_stack_adjust ();
5959 do_pending_stack_adjust ();
5960
5961 /* Mark the end of the function body.
5962 If control reaches this insn, the function can drop through
5963 without returning a value. */
5f4f0e22 5964 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc 5965
82e415a3
DE
5966 /* Must mark the last line number note in the function, so that the test
5967 coverage code can avoid counting the last line twice. This just tells
5968 the code to ignore the immediately following line note, since there
5969 already exists a copy of this note somewhere above. This line number
5970 note is still needed for debugging though, so we can't delete it. */
5971 if (flag_test_coverage)
5972 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5973
6f086dfc
RS
5974 /* Output a linenumber for the end of the function.
5975 SDB depends on this. */
5976 emit_line_note_force (filename, line);
5977
5978 /* Output the label for the actual return from the function,
5979 if one is expected. This happens either because a function epilogue
5980 is used instead of a return instruction, or because a return was done
5981 with a goto in order to run local cleanups, or because of pcc-style
5982 structure returning. */
5983
5984 if (return_label)
5985 emit_label (return_label);
5986
1be07046
RS
5987 /* C++ uses this. */
5988 if (end_bindings)
5989 expand_end_bindings (0, 0, 0);
5990
e5a1e0e8
MS
5991 /* Now handle any leftover exception regions that may have been
5992 created for the parameters. */
5993 {
5994 rtx last = get_last_insn ();
5995 rtx label;
5996
5997 expand_leftover_cleanups ();
5998
5999 /* If the above emitted any code, may sure we jump around it. */
6000 if (last != get_last_insn ())
6001 {
6002 label = gen_label_rtx ();
6003 last = emit_jump_insn_after (gen_jump (label), last);
6004 last = emit_barrier_after (last);
6005 emit_label (label);
6006 }
6007 }
6008
07417085
KR
6009 if (current_function_instrument_entry_exit)
6010 {
6011 rtx fun = DECL_RTL (current_function_decl);
6012 if (GET_CODE (fun) == MEM)
6013 fun = XEXP (fun, 0);
6014 else
6015 abort ();
6016 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6017 fun, Pmode,
6018 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6019 0,
6020 hard_frame_pointer_rtx),
6021 Pmode);
6022 }
6023
6f086dfc
RS
6024 /* If we had calls to alloca, and this machine needs
6025 an accurate stack pointer to exit the function,
6026 insert some code to save and restore the stack pointer. */
6027#ifdef EXIT_IGNORE_STACK
6028 if (! EXIT_IGNORE_STACK)
6029#endif
6030 if (current_function_calls_alloca)
6031 {
59257ff7
RK
6032 rtx tem = 0;
6033
6034 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 6035 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
6036 }
6037
6038 /* If scalar return value was computed in a pseudo-reg,
6039 copy that to the hard return register. */
6040 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6041 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6042 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6043 >= FIRST_PSEUDO_REGISTER))
6044 {
6045 rtx real_decl_result;
6046
6047#ifdef FUNCTION_OUTGOING_VALUE
6048 real_decl_result
6049 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6050 current_function_decl);
6051#else
6052 real_decl_result
6053 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6054 current_function_decl);
6055#endif
6056 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
ecec4441
JW
6057 /* If this is a BLKmode structure being returned in registers, then use
6058 the mode computed in expand_return. */
6059 if (GET_MODE (real_decl_result) == BLKmode)
6060 PUT_MODE (real_decl_result,
6061 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6f086dfc
RS
6062 emit_move_insn (real_decl_result,
6063 DECL_RTL (DECL_RESULT (current_function_decl)));
38a448ca 6064 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
f345de42
JL
6065
6066 /* The delay slot scheduler assumes that current_function_return_rtx
6067 holds the hard register containing the return value, not a temporary
6068 pseudo. */
6069 current_function_return_rtx = real_decl_result;
6f086dfc
RS
6070 }
6071
6072 /* If returning a structure, arrange to return the address of the value
6073 in a place where debuggers expect to find it.
6074
6075 If returning a structure PCC style,
6076 the caller also depends on this value.
6077 And current_function_returns_pcc_struct is not necessarily set. */
6078 if (current_function_returns_struct
6079 || current_function_returns_pcc_struct)
6080 {
6081 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6082 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6083#ifdef FUNCTION_OUTGOING_VALUE
6084 rtx outgoing
6085 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6086 current_function_decl);
6087#else
6088 rtx outgoing
6089 = FUNCTION_VALUE (build_pointer_type (type),
6090 current_function_decl);
6091#endif
6092
6093 /* Mark this as a function return value so integrate will delete the
6094 assignment and USE below when inlining this function. */
6095 REG_FUNCTION_VALUE_P (outgoing) = 1;
6096
6097 emit_move_insn (outgoing, value_address);
6098 use_variable (outgoing);
6099 }
6100
71038426
RH
6101 /* If this is an implementation of __throw, do what's necessary to
6102 communicate between __builtin_eh_return and the epilogue. */
6103 expand_eh_return ();
6104
6f086dfc
RS
6105 /* Output a return insn if we are using one.
6106 Otherwise, let the rtl chain end here, to drop through
6107 into the epilogue. */
6108
6109#ifdef HAVE_return
6110 if (HAVE_return)
6111 {
6112 emit_jump_insn (gen_return ());
6113 emit_barrier ();
6114 }
6115#endif
6116
6117 /* Fix up any gotos that jumped out to the outermost
6118 binding level of the function.
6119 Must follow emitting RETURN_LABEL. */
6120
6121 /* If you have any cleanups to do at this point,
6122 and they need to create temporary variables,
6123 then you will lose. */
e15679f8 6124 expand_fixups (get_insns ());
6f086dfc 6125}
bdac5f58
TW
6126\f
6127/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6128
6129static int *prologue;
6130static int *epilogue;
6131
6132/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6133 or a single insn). */
6134
487a6e06 6135#if defined (HAVE_prologue) || defined (HAVE_epilogue)
bdac5f58
TW
6136static int *
6137record_insns (insns)
6138 rtx insns;
6139{
6140 int *vec;
6141
6142 if (GET_CODE (insns) == SEQUENCE)
6143 {
6144 int len = XVECLEN (insns, 0);
6145 vec = (int *) oballoc ((len + 1) * sizeof (int));
6146 vec[len] = 0;
6147 while (--len >= 0)
6148 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6149 }
6150 else
6151 {
6152 vec = (int *) oballoc (2 * sizeof (int));
6153 vec[0] = INSN_UID (insns);
6154 vec[1] = 0;
6155 }
6156 return vec;
6157}
6158
10914065 6159/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 6160
10914065 6161static int
bdac5f58
TW
6162contains (insn, vec)
6163 rtx insn;
6164 int *vec;
6165{
6166 register int i, j;
6167
6168 if (GET_CODE (insn) == INSN
6169 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6170 {
10914065 6171 int count = 0;
bdac5f58
TW
6172 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6173 for (j = 0; vec[j]; j++)
6174 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
6175 count++;
6176 return count;
bdac5f58
TW
6177 }
6178 else
6179 {
6180 for (j = 0; vec[j]; j++)
6181 if (INSN_UID (insn) == vec[j])
10914065 6182 return 1;
bdac5f58
TW
6183 }
6184 return 0;
6185}
081f5e7e 6186#endif /* HAVE_prologue || HAVE_epilogue */
bdac5f58 6187
9faa82d8 6188/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
6189 this into place with notes indicating where the prologue ends and where
6190 the epilogue begins. Update the basic block information when possible. */
6191
6192void
6193thread_prologue_and_epilogue_insns (f)
79c9824e 6194 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6195{
6196#ifdef HAVE_prologue
6197 if (HAVE_prologue)
6198 {
29a82058 6199 rtx head, seq;
bdac5f58
TW
6200
6201 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6202 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6203 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6204 seq = gen_prologue ();
6205 head = emit_insn_after (seq, f);
6206
6207 /* Include the new prologue insns in the first block. Ignore them
6208 if they form a basic block unto themselves. */
6209 if (basic_block_head && n_basic_blocks
6210 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6211 basic_block_head[0] = NEXT_INSN (f);
6212
6213 /* Retain a map of the prologue insns. */
6214 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6215 }
6216 else
6217#endif
6218 prologue = 0;
6219
6220#ifdef HAVE_epilogue
6221 if (HAVE_epilogue)
6222 {
6223 rtx insn = get_last_insn ();
6224 rtx prev = prev_nonnote_insn (insn);
6225
6226 /* If we end with a BARRIER, we don't need an epilogue. */
6227 if (! (prev && GET_CODE (prev) == BARRIER))
6228 {
a78bdb38
JW
6229 rtx tail, seq, tem;
6230 rtx first_use = 0;
6231 rtx last_use = 0;
bdac5f58 6232
a78bdb38
JW
6233 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6234 epilogue insns, the USE insns at the end of a function,
6235 the jump insn that returns, and then a BARRIER. */
bdac5f58 6236
a78bdb38 6237 /* Move the USE insns at the end of a function onto a list. */
bdac5f58
TW
6238 while (prev
6239 && GET_CODE (prev) == INSN
6240 && GET_CODE (PATTERN (prev)) == USE)
6241 {
a78bdb38 6242 tem = prev;
bdac5f58 6243 prev = prev_nonnote_insn (prev);
a78bdb38
JW
6244
6245 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6246 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
83eb3b0d
RK
6247 if (first_use)
6248 {
6249 NEXT_INSN (tem) = first_use;
6250 PREV_INSN (first_use) = tem;
6251 }
6252 first_use = tem;
6253 if (!last_use)
a78bdb38 6254 last_use = tem;
bdac5f58
TW
6255 }
6256
a78bdb38
JW
6257 emit_barrier_after (insn);
6258
bdac5f58
TW
6259 seq = gen_epilogue ();
6260 tail = emit_jump_insn_after (seq, insn);
a78bdb38
JW
6261
6262 /* Insert the USE insns immediately before the return insn, which
6263 must be the first instruction before the final barrier. */
6264 if (first_use)
6265 {
6266 tem = prev_nonnote_insn (get_last_insn ());
6267 NEXT_INSN (PREV_INSN (tem)) = first_use;
6268 PREV_INSN (first_use) = PREV_INSN (tem);
6269 PREV_INSN (tem) = last_use;
6270 NEXT_INSN (last_use) = tem;
6271 }
6272
bdac5f58
TW
6273 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6274
6275 /* Include the new epilogue insns in the last block. Ignore
6276 them if they form a basic block unto themselves. */
6277 if (basic_block_end && n_basic_blocks
6278 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6279 basic_block_end[n_basic_blocks - 1] = tail;
6280
6281 /* Retain a map of the epilogue insns. */
6282 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6283 return;
6284 }
6285 }
6286#endif
6287 epilogue = 0;
6288}
6289
6290/* Reposition the prologue-end and epilogue-begin notes after instruction
6291 scheduling and delayed branch scheduling. */
6292
6293void
6294reposition_prologue_and_epilogue_notes (f)
79c9824e 6295 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6296{
6297#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6298 /* Reposition the prologue and epilogue notes. */
6299 if (n_basic_blocks)
6300 {
6301 rtx next, prev;
bf526252 6302 int len;
bdac5f58
TW
6303
6304 if (prologue)
6305 {
bf526252
RK
6306 register rtx insn, note = 0;
6307
6308 /* Scan from the beginning until we reach the last prologue insn.
6309 We apparently can't depend on basic_block_{head,end} after
6310 reorg has run. */
6311 for (len = 0; prologue[len]; len++)
6312 ;
9392c110
JH
6313 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6314 {
6315 if (GET_CODE (insn) == NOTE)
6316 {
6317 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6318 note = insn;
6319 }
6320 else if ((len -= contains (insn, prologue)) == 0)
6321 {
6322 /* Find the prologue-end note if we haven't already, and
6323 move it to just after the last prologue insn. */
6324 if (note == 0)
6325 {
51723711 6326 for (note = insn; (note = NEXT_INSN (note));)
9392c110
JH
6327 if (GET_CODE (note) == NOTE
6328 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6329 break;
6330 }
c93b03c2 6331
9392c110
JH
6332 next = NEXT_INSN (note);
6333 prev = PREV_INSN (note);
6334 if (prev)
6335 NEXT_INSN (prev) = next;
6336 if (next)
6337 PREV_INSN (next) = prev;
c93b03c2
RH
6338
6339 /* Whether or not we can depend on basic_block_head,
6340 attempt to keep it up-to-date. */
6341 if (basic_block_head[0] == note)
6342 basic_block_head[0] = next;
6343
9392c110
JH
6344 add_insn_after (note, insn);
6345 }
6346 }
bdac5f58
TW
6347 }
6348
6349 if (epilogue)
6350 {
bf526252
RK
6351 register rtx insn, note = 0;
6352
6353 /* Scan from the end until we reach the first epilogue insn.
6354 We apparently can't depend on basic_block_{head,end} after
6355 reorg has run. */
6356 for (len = 0; epilogue[len]; len++)
6357 ;
9392c110
JH
6358 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6359 {
6360 if (GET_CODE (insn) == NOTE)
6361 {
6362 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6363 note = insn;
6364 }
6365 else if ((len -= contains (insn, epilogue)) == 0)
6366 {
6367 /* Find the epilogue-begin note if we haven't already, and
6368 move it to just before the first epilogue insn. */
6369 if (note == 0)
6370 {
51723711 6371 for (note = insn; (note = PREV_INSN (note));)
9392c110
JH
6372 if (GET_CODE (note) == NOTE
6373 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6374 break;
6375 }
6376 next = NEXT_INSN (note);
6377 prev = PREV_INSN (note);
6378 if (prev)
6379 NEXT_INSN (prev) = next;
6380 if (next)
6381 PREV_INSN (next) = prev;
c93b03c2
RH
6382
6383 /* Whether or not we can depend on basic_block_head,
6384 attempt to keep it up-to-date. */
6385 if (n_basic_blocks
6386 && basic_block_head[n_basic_blocks-1] == insn)
6387 basic_block_head[n_basic_blocks-1] = note;
6388
6389 add_insn_before (note, insn);
9392c110
JH
6390 }
6391 }
bdac5f58
TW
6392 }
6393 }
6394#endif /* HAVE_prologue or HAVE_epilogue */
6395}
This page took 1.45877 seconds and 5 git commands to generate.