]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
tm.texi (Varargs): Don't split argument of @item across lines.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
a5cad800 2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
6f086dfc
RS
20
21
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41#include "config.h"
670ee920 42#include "system.h"
6f086dfc
RS
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
1ef08c63 46#include "except.h"
6f086dfc
RS
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
c20bf1f3 57#include "obstack.h"
10f0ad3d 58#include "toplev.h"
6f086dfc 59
c795bca9
BS
60#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62#endif
63
189cc377
RK
64#ifndef TRAMPOLINE_ALIGNMENT
65#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66#endif
67
d16790f2
JW
68#ifndef LOCAL_ALIGNMENT
69#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70#endif
71
293e3de4
RS
72/* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
0f41302f 75 must define both, or neither. */
293e3de4
RS
76#ifndef NAME__MAIN
77#define NAME__MAIN "__main"
78#define SYMBOL__MAIN __main
79#endif
80
6f086dfc
RS
81/* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85
86/* Similar, but round to the next highest integer that meets the
87 alignment. */
88#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89
90/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
95
96#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97#define NEED_SEPARATE_AP
98#endif
99
100/* Number of bytes of args popped by function being compiled on its return.
101 Zero if no bytes are to be popped.
102 May affect compilation of return insn or of function epilogue. */
103
104int current_function_pops_args;
105
106/* Nonzero if function being compiled needs to be given an address
107 where the value should be stored. */
108
109int current_function_returns_struct;
110
111/* Nonzero if function being compiled needs to
112 return the address of where it has put a structure value. */
113
114int current_function_returns_pcc_struct;
115
116/* Nonzero if function being compiled needs to be passed a static chain. */
117
118int current_function_needs_context;
119
120/* Nonzero if function being compiled can call setjmp. */
121
122int current_function_calls_setjmp;
123
124/* Nonzero if function being compiled can call longjmp. */
125
126int current_function_calls_longjmp;
127
128/* Nonzero if function being compiled receives nonlocal gotos
129 from nested functions. */
130
131int current_function_has_nonlocal_label;
132
8634413a
JW
133/* Nonzero if function being compiled has nonlocal gotos to parent
134 function. */
135
136int current_function_has_nonlocal_goto;
137
6f086dfc
RS
138/* Nonzero if function being compiled contains nested functions. */
139
140int current_function_contains_functions;
141
fdb8a883
JW
142/* Nonzero if function being compiled doesn't modify the stack pointer
143 (ignoring the prologue and epilogue). This is only valid after
144 life_analysis has run. */
145
146int current_function_sp_is_unchanging;
147
acd693d1 148/* Nonzero if the function being compiled issues a computed jump. */
ab87f8c8 149
acd693d1 150int current_function_has_computed_jump;
ab87f8c8 151
173cd503
JM
152/* Nonzero if the current function is a thunk (a lightweight function that
153 just adjusts one of its arguments and forwards to another function), so
154 we should try to cut corners where we can. */
155int current_function_is_thunk;
156
6f086dfc
RS
157/* Nonzero if function being compiled can call alloca,
158 either as a subroutine or builtin. */
159
160int current_function_calls_alloca;
161
162/* Nonzero if the current function returns a pointer type */
163
164int current_function_returns_pointer;
165
166/* If some insns can be deferred to the delay slots of the epilogue, the
167 delay list for them is recorded here. */
168
169rtx current_function_epilogue_delay_list;
170
171/* If function's args have a fixed size, this is that size, in bytes.
172 Otherwise, it is -1.
173 May affect compilation of return insn or of function epilogue. */
174
175int current_function_args_size;
176
177/* # bytes the prologue should push and pretend that the caller pushed them.
178 The prologue must do this, but only if parms can be passed in registers. */
179
180int current_function_pretend_args_size;
181
f7339633 182/* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
0f41302f 183 defined, the needed space is pushed by the prologue. */
6f086dfc
RS
184
185int current_function_outgoing_args_size;
186
187/* This is the offset from the arg pointer to the place where the first
188 anonymous arg can be found, if there is one. */
189
190rtx current_function_arg_offset_rtx;
191
192/* Nonzero if current function uses varargs.h or equivalent.
193 Zero for functions that use stdarg.h. */
194
195int current_function_varargs;
196
ebb904cb
RK
197/* Nonzero if current function uses stdarg.h or equivalent.
198 Zero for functions that use varargs.h. */
199
200int current_function_stdarg;
201
6f086dfc
RS
202/* Quantities of various kinds of registers
203 used for the current function's args. */
204
205CUMULATIVE_ARGS current_function_args_info;
206
207/* Name of function now being compiled. */
208
209char *current_function_name;
210
f345de42
JL
211/* If non-zero, an RTL expression for the location at which the current
212 function returns its result. If the current function returns its
213 result in a register, current_function_return_rtx will always be
214 the hard register containing the result. */
6f086dfc
RS
215
216rtx current_function_return_rtx;
217
218/* Nonzero if the current function uses the constant pool. */
219
220int current_function_uses_const_pool;
221
222/* Nonzero if the current function uses pic_offset_table_rtx. */
223int current_function_uses_pic_offset_table;
224
225/* The arg pointer hard register, or the pseudo into which it was copied. */
226rtx current_function_internal_arg_pointer;
227
aeb302bb
JM
228/* Language-specific reason why the current function cannot be made inline. */
229char *current_function_cannot_inline;
230
07417085
KR
231/* Nonzero if instrumentation calls for function entry and exit should be
232 generated. */
233int current_function_instrument_entry_exit;
234
7d384cc0
KR
235/* Nonzero if memory access checking be enabled in the current function. */
236int current_function_check_memory_usage;
237
6f086dfc
RS
238/* The FUNCTION_DECL for an inline function currently being expanded. */
239tree inline_function_decl;
240
241/* Number of function calls seen so far in current function. */
242
243int function_call_count;
244
245/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
246 (labels to which there can be nonlocal gotos from nested functions)
247 in this function. */
248
249tree nonlocal_labels;
250
ba716ac9
BS
251/* List (chain of EXPR_LIST) of stack slots that hold the current handlers
252 for nonlocal gotos. There is one for every nonlocal label in the function;
253 this list matches the one in nonlocal_labels.
6f086dfc
RS
254 Zero when function does not have nonlocal labels. */
255
ba716ac9 256rtx nonlocal_goto_handler_slots;
6f086dfc 257
e881bb1b
RH
258/* List (chain of EXPR_LIST) of labels heading the current handlers for
259 nonlocal gotos. */
260
261rtx nonlocal_goto_handler_labels;
262
6f086dfc
RS
263/* RTX for stack slot that holds the stack pointer value to restore
264 for a nonlocal goto.
265 Zero when function does not have nonlocal labels. */
266
267rtx nonlocal_goto_stack_level;
268
269/* Label that will go on parm cleanup code, if any.
270 Jumping to this label runs cleanup code for parameters, if
271 such code must be run. Following this code is the logical return label. */
272
273rtx cleanup_label;
274
275/* Label that will go on function epilogue.
276 Jumping to this label serves as a "return" instruction
277 on machines which require execution of the epilogue on all returns. */
278
279rtx return_label;
280
281/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
282 So we can mark them all live at the end of the function, if nonopt. */
283rtx save_expr_regs;
284
285/* List (chain of EXPR_LISTs) of all stack slots in this function.
286 Made for the sake of unshare_all_rtl. */
287rtx stack_slot_list;
288
289/* Chain of all RTL_EXPRs that have insns in them. */
290tree rtl_expr_chain;
291
292/* Label to jump back to for tail recursion, or 0 if we have
293 not yet needed one for this function. */
294rtx tail_recursion_label;
295
296/* Place after which to insert the tail_recursion_label if we need one. */
297rtx tail_recursion_reentry;
298
299/* Location at which to save the argument pointer if it will need to be
300 referenced. There are two cases where this is done: if nonlocal gotos
301 exist, or if vars stored at an offset from the argument pointer will be
302 needed by inner routines. */
303
304rtx arg_pointer_save_area;
305
306/* Offset to end of allocated area of stack frame.
307 If stack grows down, this is the address of the last stack slot allocated.
308 If stack grows up, this is the address for the next slot. */
8af5168b 309HOST_WIDE_INT frame_offset;
6f086dfc
RS
310
311/* List (chain of TREE_LISTs) of static chains for containing functions.
312 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
313 in an RTL_EXPR in the TREE_VALUE. */
314static tree context_display;
315
316/* List (chain of TREE_LISTs) of trampolines for nested functions.
317 The trampoline sets up the static chain and jumps to the function.
318 We supply the trampoline's address when the function's address is requested.
319
320 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
321 in an RTL_EXPR in the TREE_VALUE. */
322static tree trampoline_list;
323
324/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
325static rtx parm_birth_insn;
326
327#if 0
328/* Nonzero if a stack slot has been generated whose address is not
329 actually valid. It means that the generated rtl must all be scanned
330 to detect and correct the invalid addresses where they occur. */
331static int invalid_stack_slot;
332#endif
333
334/* Last insn of those whose job was to put parms into their nominal homes. */
335static rtx last_parm_insn;
336
e9a25f70
JL
337/* 1 + last pseudo register number possibly used for loading a copy
338 of a parameter of this function. */
339int max_parm_reg;
6f086dfc
RS
340
341/* Vector indexed by REGNO, containing location on stack in which
342 to put the parm which is nominally in pseudo register REGNO,
e9a25f70
JL
343 if we discover that that parm must go in the stack. The highest
344 element in this vector is one less than MAX_PARM_REG, above. */
345rtx *parm_reg_stack_loc;
6f086dfc 346
6f086dfc
RS
347/* Nonzero once virtual register instantiation has been done.
348 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
349static int virtuals_instantiated;
350
46766466
RS
351/* These variables hold pointers to functions to
352 save and restore machine-specific data,
353 in push_function_context and pop_function_context. */
9e014ded
RK
354void (*save_machine_status) PROTO((struct function *));
355void (*restore_machine_status) PROTO((struct function *));
46766466 356
6f086dfc
RS
357/* Nonzero if we need to distinguish between the return value of this function
358 and the return value of a function called by this function. This helps
359 integrate.c */
360
361extern int rtx_equal_function_value_matters;
e7a84011 362extern tree sequence_rtl_expr;
6f086dfc
RS
363\f
364/* In order to evaluate some expressions, such as function calls returning
365 structures in memory, we need to temporarily allocate stack locations.
366 We record each allocated temporary in the following structure.
367
368 Associated with each temporary slot is a nesting level. When we pop up
369 one level, all temporaries associated with the previous level are freed.
370 Normally, all temporaries are freed after the execution of the statement
371 in which they were created. However, if we are inside a ({...}) grouping,
372 the result may be in a temporary and hence must be preserved. If the
373 result could be in a temporary, we preserve it if we can determine which
374 one it is in. If we cannot determine which temporary may contain the
375 result, all temporaries are preserved. A temporary is preserved by
376 pretending it was allocated at the previous nesting level.
377
378 Automatic variables are also assigned temporary slots, at the nesting
379 level where they are defined. They are marked a "kept" so that
380 free_temp_slots will not free them. */
381
382struct temp_slot
383{
384 /* Points to next temporary slot. */
385 struct temp_slot *next;
0f41302f 386 /* The rtx to used to reference the slot. */
6f086dfc 387 rtx slot;
e5e76139
RK
388 /* The rtx used to represent the address if not the address of the
389 slot above. May be an EXPR_LIST if multiple addresses exist. */
390 rtx address;
d16790f2
JW
391 /* The alignment (in bits) of the slot. */
392 int align;
6f086dfc 393 /* The size, in units, of the slot. */
e5e809f4 394 HOST_WIDE_INT size;
a4c6502a
MM
395 /* The alias set for the slot. If the alias set is zero, we don't
396 know anything about the alias set of the slot. We must only
397 reuse a slot if it is assigned an object of the same alias set.
398 Otherwise, the rest of the compiler may assume that the new use
399 of the slot cannot alias the old use of the slot, which is
400 false. If the slot has alias set zero, then we can't reuse the
401 slot at all, since we have no idea what alias set may have been
402 imposed on the memory. For example, if the stack slot is the
403 call frame for an inline functioned, we have no idea what alias
404 sets will be assigned to various pieces of the call frame. */
405 int alias_set;
e7a84011
RK
406 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
407 tree rtl_expr;
6f086dfc
RS
408 /* Non-zero if this temporary is currently in use. */
409 char in_use;
a25d4ba2
RK
410 /* Non-zero if this temporary has its address taken. */
411 char addr_taken;
6f086dfc
RS
412 /* Nesting level at which this slot is being used. */
413 int level;
414 /* Non-zero if this should survive a call to free_temp_slots. */
415 int keep;
fc91b0d0
RK
416 /* The offset of the slot from the frame_pointer, including extra space
417 for alignment. This info is for combine_temp_slots. */
e5e809f4 418 HOST_WIDE_INT base_offset;
fc91b0d0
RK
419 /* The size of the slot, including extra space for alignment. This
420 info is for combine_temp_slots. */
e5e809f4 421 HOST_WIDE_INT full_size;
6f086dfc
RS
422};
423
424/* List of all temporaries allocated, both available and in use. */
425
426struct temp_slot *temp_slots;
427
428/* Current nesting level for temporaries. */
429
430int temp_slot_level;
e5e809f4
JL
431
432/* Current nesting level for variables in a block. */
433
434int var_temp_slot_level;
f5963e61
JL
435
436/* When temporaries are created by TARGET_EXPRs, they are created at
437 this level of temp_slot_level, so that they can remain allocated
438 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
439 of TARGET_EXPRs. */
440int target_temp_slot_level;
6f086dfc 441\f
e15679f8
RK
442/* This structure is used to record MEMs or pseudos used to replace VAR, any
443 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
444 maintain this list in case two operands of an insn were required to match;
445 in that case we must ensure we use the same replacement. */
446
447struct fixup_replacement
448{
449 rtx old;
450 rtx new;
451 struct fixup_replacement *next;
452};
453
454/* Forward declarations. */
455
1ac4f799
JL
456static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
457 int, struct function *));
d16790f2
JW
458static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
459 int, tree));
e15679f8
RK
460static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
461static void put_reg_into_stack PROTO((struct function *, rtx, tree,
0006e95b 462 enum machine_mode, enum machine_mode,
e5e809f4 463 int, int, int));
e15679f8
RK
464static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
465static struct fixup_replacement
466 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
467static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
468 rtx, int));
469static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
470 struct fixup_replacement **));
471static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
472static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
473static rtx fixup_stack_1 PROTO((rtx, rtx));
474static void optimize_bit_field PROTO((rtx, rtx, rtx *));
475static void instantiate_decls PROTO((tree, int));
476static void instantiate_decls_1 PROTO((tree, int));
477static void instantiate_decl PROTO((rtx, int, int));
478static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
479static void delete_handlers PROTO((void));
480static void pad_to_arg_alignment PROTO((struct args_size *, int));
51723711 481#ifndef ARGS_GROW_DOWNWARD
e15679f8
RK
482static void pad_below PROTO((struct args_size *, enum machine_mode,
483 tree));
51723711 484#endif
487a6e06 485#ifdef ARGS_GROW_DOWNWARD
e15679f8 486static tree round_down PROTO((tree, int));
487a6e06 487#endif
e15679f8
RK
488static rtx round_trampoline_addr PROTO((rtx));
489static tree blocks_nreverse PROTO((tree));
490static int all_blocks PROTO((tree, tree *));
081f5e7e 491#if defined (HAVE_prologue) || defined (HAVE_epilogue)
487a6e06 492static int *record_insns PROTO((rtx));
e15679f8 493static int contains PROTO((rtx, int *));
081f5e7e 494#endif /* HAVE_prologue || HAVE_epilogue */
e9a25f70 495static void put_addressof_into_stack PROTO((rtx));
f7b6d104 496static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
c20bf1f3 497\f
6f086dfc
RS
498/* Pointer to chain of `struct function' for containing functions. */
499struct function *outer_function_chain;
500
501/* Given a function decl for a containing function,
502 return the `struct function' for it. */
503
504struct function *
505find_function_data (decl)
506 tree decl;
507{
508 struct function *p;
e5e809f4 509
6f086dfc
RS
510 for (p = outer_function_chain; p; p = p->next)
511 if (p->decl == decl)
512 return p;
e5e809f4 513
6f086dfc
RS
514 abort ();
515}
516
517/* Save the current context for compilation of a nested function.
518 This is called from language-specific code.
519 The caller is responsible for saving any language-specific status,
6dc42e49 520 since this function knows only about language-independent variables. */
6f086dfc
RS
521
522void
a0dabda5
JM
523push_function_context_to (context)
524 tree context;
6f086dfc
RS
525{
526 struct function *p = (struct function *) xmalloc (sizeof (struct function));
527
528 p->next = outer_function_chain;
529 outer_function_chain = p;
530
531 p->name = current_function_name;
532 p->decl = current_function_decl;
533 p->pops_args = current_function_pops_args;
534 p->returns_struct = current_function_returns_struct;
535 p->returns_pcc_struct = current_function_returns_pcc_struct;
1651bdfe 536 p->returns_pointer = current_function_returns_pointer;
6f086dfc
RS
537 p->needs_context = current_function_needs_context;
538 p->calls_setjmp = current_function_calls_setjmp;
539 p->calls_longjmp = current_function_calls_longjmp;
540 p->calls_alloca = current_function_calls_alloca;
541 p->has_nonlocal_label = current_function_has_nonlocal_label;
8634413a 542 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
a0dabda5 543 p->contains_functions = current_function_contains_functions;
acd693d1 544 p->has_computed_jump = current_function_has_computed_jump;
173cd503 545 p->is_thunk = current_function_is_thunk;
6f086dfc
RS
546 p->args_size = current_function_args_size;
547 p->pretend_args_size = current_function_pretend_args_size;
548 p->arg_offset_rtx = current_function_arg_offset_rtx;
3b69d50e 549 p->varargs = current_function_varargs;
ebb904cb 550 p->stdarg = current_function_stdarg;
6f086dfc
RS
551 p->uses_const_pool = current_function_uses_const_pool;
552 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
553 p->internal_arg_pointer = current_function_internal_arg_pointer;
aeb302bb 554 p->cannot_inline = current_function_cannot_inline;
6f086dfc
RS
555 p->max_parm_reg = max_parm_reg;
556 p->parm_reg_stack_loc = parm_reg_stack_loc;
557 p->outgoing_args_size = current_function_outgoing_args_size;
558 p->return_rtx = current_function_return_rtx;
ba716ac9 559 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
e881bb1b 560 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
6f086dfc
RS
561 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
562 p->nonlocal_labels = nonlocal_labels;
563 p->cleanup_label = cleanup_label;
564 p->return_label = return_label;
565 p->save_expr_regs = save_expr_regs;
566 p->stack_slot_list = stack_slot_list;
567 p->parm_birth_insn = parm_birth_insn;
568 p->frame_offset = frame_offset;
569 p->tail_recursion_label = tail_recursion_label;
570 p->tail_recursion_reentry = tail_recursion_reentry;
571 p->arg_pointer_save_area = arg_pointer_save_area;
572 p->rtl_expr_chain = rtl_expr_chain;
573 p->last_parm_insn = last_parm_insn;
574 p->context_display = context_display;
575 p->trampoline_list = trampoline_list;
576 p->function_call_count = function_call_count;
577 p->temp_slots = temp_slots;
578 p->temp_slot_level = temp_slot_level;
e5e809f4
JL
579 p->target_temp_slot_level = target_temp_slot_level;
580 p->var_temp_slot_level = var_temp_slot_level;
6f086dfc 581 p->fixup_var_refs_queue = 0;
f979c996 582 p->epilogue_delay_list = current_function_epilogue_delay_list;
01c1558a 583 p->args_info = current_function_args_info;
7d384cc0 584 p->check_memory_usage = current_function_check_memory_usage;
07417085 585 p->instrument_entry_exit = current_function_instrument_entry_exit;
6f086dfc 586
a0dabda5 587 save_tree_status (p, context);
6f086dfc
RS
588 save_storage_status (p);
589 save_emit_status (p);
6f086dfc
RS
590 save_expr_status (p);
591 save_stmt_status (p);
e9a25f70 592 save_varasm_status (p, context);
46766466
RS
593 if (save_machine_status)
594 (*save_machine_status) (p);
6f086dfc
RS
595}
596
e4a4639e
JM
597void
598push_function_context ()
599{
a0dabda5 600 push_function_context_to (current_function_decl);
e4a4639e
JM
601}
602
6f086dfc
RS
603/* Restore the last saved context, at the end of a nested function.
604 This function is called from language-specific code. */
605
606void
a0dabda5
JM
607pop_function_context_from (context)
608 tree context;
6f086dfc
RS
609{
610 struct function *p = outer_function_chain;
e5e809f4 611 struct var_refs_queue *queue;
6f086dfc
RS
612
613 outer_function_chain = p->next;
614
49468af2
RK
615 current_function_contains_functions
616 = p->contains_functions || p->inline_obstacks
617 || context == current_function_decl;
acd693d1 618 current_function_has_computed_jump = p->has_computed_jump;
6f086dfc
RS
619 current_function_name = p->name;
620 current_function_decl = p->decl;
621 current_function_pops_args = p->pops_args;
622 current_function_returns_struct = p->returns_struct;
623 current_function_returns_pcc_struct = p->returns_pcc_struct;
1651bdfe 624 current_function_returns_pointer = p->returns_pointer;
6f086dfc
RS
625 current_function_needs_context = p->needs_context;
626 current_function_calls_setjmp = p->calls_setjmp;
627 current_function_calls_longjmp = p->calls_longjmp;
628 current_function_calls_alloca = p->calls_alloca;
629 current_function_has_nonlocal_label = p->has_nonlocal_label;
8634413a 630 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
173cd503 631 current_function_is_thunk = p->is_thunk;
6f086dfc
RS
632 current_function_args_size = p->args_size;
633 current_function_pretend_args_size = p->pretend_args_size;
634 current_function_arg_offset_rtx = p->arg_offset_rtx;
3b69d50e 635 current_function_varargs = p->varargs;
ebb904cb 636 current_function_stdarg = p->stdarg;
6f086dfc
RS
637 current_function_uses_const_pool = p->uses_const_pool;
638 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
639 current_function_internal_arg_pointer = p->internal_arg_pointer;
aeb302bb 640 current_function_cannot_inline = p->cannot_inline;
6f086dfc
RS
641 max_parm_reg = p->max_parm_reg;
642 parm_reg_stack_loc = p->parm_reg_stack_loc;
643 current_function_outgoing_args_size = p->outgoing_args_size;
644 current_function_return_rtx = p->return_rtx;
ba716ac9 645 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
e881bb1b 646 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
6f086dfc
RS
647 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
648 nonlocal_labels = p->nonlocal_labels;
649 cleanup_label = p->cleanup_label;
650 return_label = p->return_label;
651 save_expr_regs = p->save_expr_regs;
652 stack_slot_list = p->stack_slot_list;
653 parm_birth_insn = p->parm_birth_insn;
654 frame_offset = p->frame_offset;
655 tail_recursion_label = p->tail_recursion_label;
656 tail_recursion_reentry = p->tail_recursion_reentry;
657 arg_pointer_save_area = p->arg_pointer_save_area;
658 rtl_expr_chain = p->rtl_expr_chain;
659 last_parm_insn = p->last_parm_insn;
660 context_display = p->context_display;
661 trampoline_list = p->trampoline_list;
662 function_call_count = p->function_call_count;
663 temp_slots = p->temp_slots;
664 temp_slot_level = p->temp_slot_level;
e5e809f4
JL
665 target_temp_slot_level = p->target_temp_slot_level;
666 var_temp_slot_level = p->var_temp_slot_level;
f979c996 667 current_function_epilogue_delay_list = p->epilogue_delay_list;
7cbc7b0c 668 reg_renumber = 0;
01c1558a 669 current_function_args_info = p->args_info;
7d384cc0 670 current_function_check_memory_usage = p->check_memory_usage;
07417085 671 current_function_instrument_entry_exit = p->instrument_entry_exit;
6f086dfc 672
d1485032 673 restore_tree_status (p, context);
6f086dfc
RS
674 restore_storage_status (p);
675 restore_expr_status (p);
676 restore_emit_status (p);
677 restore_stmt_status (p);
a506307a 678 restore_varasm_status (p);
6f086dfc 679
46766466
RS
680 if (restore_machine_status)
681 (*restore_machine_status) (p);
682
6f086dfc
RS
683 /* Finish doing put_var_into_stack for any of our variables
684 which became addressable during the nested function. */
e5e809f4
JL
685 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
686 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
6f086dfc
RS
687
688 free (p);
689
690 /* Reset variables that have known state during rtx generation. */
691 rtx_equal_function_value_matters = 1;
692 virtuals_instantiated = 0;
693}
e4a4639e
JM
694
695void pop_function_context ()
696{
a0dabda5 697 pop_function_context_from (current_function_decl);
e4a4639e 698}
6f086dfc
RS
699\f
700/* Allocate fixed slots in the stack frame of the current function. */
701
702/* Return size needed for stack frame based on slots so far allocated.
c795bca9 703 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
704 the caller may have to do that. */
705
8af5168b 706HOST_WIDE_INT
6f086dfc
RS
707get_frame_size ()
708{
709#ifdef FRAME_GROWS_DOWNWARD
710 return -frame_offset;
711#else
712 return frame_offset;
713#endif
714}
715
716/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
717 with machine mode MODE.
718
719 ALIGN controls the amount of alignment for the address of the slot:
720 0 means according to MODE,
721 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
722 positive specifies alignment boundary in bits.
723
724 We do not round to stack_boundary here. */
725
726rtx
727assign_stack_local (mode, size, align)
728 enum machine_mode mode;
e5e809f4 729 HOST_WIDE_INT size;
6f086dfc
RS
730 int align;
731{
732 register rtx x, addr;
733 int bigend_correction = 0;
734 int alignment;
735
736 if (align == 0)
737 {
d16790f2
JW
738 tree type;
739
740 alignment = GET_MODE_ALIGNMENT (mode);
6f086dfc 741 if (mode == BLKmode)
d16790f2
JW
742 alignment = BIGGEST_ALIGNMENT;
743
744 /* Allow the target to (possibly) increase the alignment of this
745 stack slot. */
746 type = type_for_mode (mode, 0);
747 if (type)
748 alignment = LOCAL_ALIGNMENT (type, alignment);
749
750 alignment /= BITS_PER_UNIT;
6f086dfc
RS
751 }
752 else if (align == -1)
753 {
754 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
755 size = CEIL_ROUND (size, alignment);
756 }
757 else
758 alignment = align / BITS_PER_UNIT;
759
6f086dfc
RS
760 /* Round frame offset to that alignment.
761 We must be careful here, since FRAME_OFFSET might be negative and
762 division with a negative dividend isn't as well defined as we might
763 like. So we instead assume that ALIGNMENT is a power of two and
764 use logical operations which are unambiguous. */
765#ifdef FRAME_GROWS_DOWNWARD
766 frame_offset = FLOOR_ROUND (frame_offset, alignment);
767#else
768 frame_offset = CEIL_ROUND (frame_offset, alignment);
769#endif
770
771 /* On a big-endian machine, if we are allocating more space than we will use,
772 use the least significant bytes of those that are allocated. */
f76b9db2 773 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 774 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
775
776#ifdef FRAME_GROWS_DOWNWARD
777 frame_offset -= size;
778#endif
779
780 /* If we have already instantiated virtual registers, return the actual
781 address relative to the frame pointer. */
782 if (virtuals_instantiated)
783 addr = plus_constant (frame_pointer_rtx,
784 (frame_offset + bigend_correction
785 + STARTING_FRAME_OFFSET));
786 else
787 addr = plus_constant (virtual_stack_vars_rtx,
788 frame_offset + bigend_correction);
789
790#ifndef FRAME_GROWS_DOWNWARD
791 frame_offset += size;
792#endif
793
38a448ca 794 x = gen_rtx_MEM (mode, addr);
6f086dfc 795
38a448ca 796 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
6f086dfc
RS
797
798 return x;
799}
800
801/* Assign a stack slot in a containing function.
802 First three arguments are same as in preceding function.
803 The last argument specifies the function to allocate in. */
804
1ac4f799 805static rtx
6f086dfc
RS
806assign_outer_stack_local (mode, size, align, function)
807 enum machine_mode mode;
e5e809f4 808 HOST_WIDE_INT size;
6f086dfc
RS
809 int align;
810 struct function *function;
811{
812 register rtx x, addr;
813 int bigend_correction = 0;
814 int alignment;
815
816 /* Allocate in the memory associated with the function in whose frame
817 we are assigning. */
818 push_obstacks (function->function_obstack,
819 function->function_maybepermanent_obstack);
820
821 if (align == 0)
822 {
d16790f2
JW
823 tree type;
824
825 alignment = GET_MODE_ALIGNMENT (mode);
6f086dfc 826 if (mode == BLKmode)
d16790f2
JW
827 alignment = BIGGEST_ALIGNMENT;
828
829 /* Allow the target to (possibly) increase the alignment of this
830 stack slot. */
831 type = type_for_mode (mode, 0);
832 if (type)
833 alignment = LOCAL_ALIGNMENT (type, alignment);
834
835 alignment /= BITS_PER_UNIT;
6f086dfc
RS
836 }
837 else if (align == -1)
838 {
839 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
840 size = CEIL_ROUND (size, alignment);
841 }
842 else
843 alignment = align / BITS_PER_UNIT;
844
6f086dfc
RS
845 /* Round frame offset to that alignment. */
846#ifdef FRAME_GROWS_DOWNWARD
2af69b62 847 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
6f086dfc 848#else
2af69b62 849 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
6f086dfc
RS
850#endif
851
852 /* On a big-endian machine, if we are allocating more space than we will use,
853 use the least significant bytes of those that are allocated. */
f76b9db2 854 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 855 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc
RS
856
857#ifdef FRAME_GROWS_DOWNWARD
858 function->frame_offset -= size;
859#endif
860 addr = plus_constant (virtual_stack_vars_rtx,
861 function->frame_offset + bigend_correction);
862#ifndef FRAME_GROWS_DOWNWARD
863 function->frame_offset += size;
864#endif
865
38a448ca 866 x = gen_rtx_MEM (mode, addr);
6f086dfc
RS
867
868 function->stack_slot_list
38a448ca 869 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
6f086dfc
RS
870
871 pop_obstacks ();
872
873 return x;
874}
875\f
876/* Allocate a temporary stack slot and record it for possible later
877 reuse.
878
879 MODE is the machine mode to be given to the returned rtx.
880
881 SIZE is the size in units of the space required. We do no rounding here
882 since assign_stack_local will do any required rounding.
883
d93d4205
MS
884 KEEP is 1 if this slot is to be retained after a call to
885 free_temp_slots. Automatic variables for a block are allocated
e5e809f4
JL
886 with this flag. KEEP is 2 if we allocate a longer term temporary,
887 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
888 if we are to allocate something at an inner level to be treated as
a4c6502a
MM
889 a variable in the block (e.g., a SAVE_EXPR).
890
891 TYPE is the type that will be used for the stack slot. */
6f086dfc 892
d16790f2
JW
893static rtx
894assign_stack_temp_for_type (mode, size, keep, type)
6f086dfc 895 enum machine_mode mode;
e5e809f4 896 HOST_WIDE_INT size;
6f086dfc 897 int keep;
d16790f2 898 tree type;
6f086dfc 899{
d16790f2 900 int align;
a4c6502a 901 int alias_set;
6f086dfc
RS
902 struct temp_slot *p, *best_p = 0;
903
303ec2aa
RK
904 /* If SIZE is -1 it means that somebody tried to allocate a temporary
905 of a variable size. */
906 if (size == -1)
907 abort ();
908
a4c6502a
MM
909 /* If we know the alias set for the memory that will be used, use
910 it. If there's no TYPE, then we don't know anything about the
911 alias set for the memory. */
912 if (type)
913 alias_set = get_alias_set (type);
914 else
915 alias_set = 0;
916
d16790f2
JW
917 align = GET_MODE_ALIGNMENT (mode);
918 if (mode == BLKmode)
919 align = BIGGEST_ALIGNMENT;
6f086dfc 920
d16790f2
JW
921 if (! type)
922 type = type_for_mode (mode, 0);
923 if (type)
924 align = LOCAL_ALIGNMENT (type, align);
925
926 /* Try to find an available, already-allocated temporary of the proper
927 mode which meets the size and alignment requirements. Choose the
928 smallest one with the closest alignment. */
929 for (p = temp_slots; p; p = p->next)
930 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
931 && ! p->in_use
a4c6502a
MM
932 && (!flag_strict_aliasing
933 || (alias_set && p->alias_set == alias_set))
d16790f2
JW
934 && (best_p == 0 || best_p->size > p->size
935 || (best_p->size == p->size && best_p->align > p->align)))
936 {
937 if (p->align == align && p->size == size)
938 {
939 best_p = 0;
940 break;
941 }
6f086dfc 942 best_p = p;
d16790f2 943 }
6f086dfc
RS
944
945 /* Make our best, if any, the one to use. */
946 if (best_p)
a45035b6
JW
947 {
948 /* If there are enough aligned bytes left over, make them into a new
949 temp_slot so that the extra bytes don't get wasted. Do this only
950 for BLKmode slots, so that we can be sure of the alignment. */
a4c6502a
MM
951 if (GET_MODE (best_p->slot) == BLKmode
952 /* We can't split slots if -fstrict-aliasing because the
953 information about the alias set for the new slot will be
954 lost. */
955 && !flag_strict_aliasing)
a45035b6 956 {
d16790f2 957 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 958 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
959
960 if (best_p->size - rounded_size >= alignment)
961 {
962 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
a25d4ba2 963 p->in_use = p->addr_taken = 0;
a45035b6 964 p->size = best_p->size - rounded_size;
307d8cd6
RK
965 p->base_offset = best_p->base_offset + rounded_size;
966 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
967 p->slot = gen_rtx_MEM (BLKmode,
968 plus_constant (XEXP (best_p->slot, 0),
969 rounded_size));
d16790f2 970 p->align = best_p->align;
e5e76139 971 p->address = 0;
84e24c03 972 p->rtl_expr = 0;
a45035b6
JW
973 p->next = temp_slots;
974 temp_slots = p;
975
38a448ca
RH
976 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
977 stack_slot_list);
a45035b6
JW
978
979 best_p->size = rounded_size;
291dde90 980 best_p->full_size = rounded_size;
a45035b6
JW
981 }
982 }
983
984 p = best_p;
985 }
986
6f086dfc
RS
987 /* If we still didn't find one, make a new temporary. */
988 if (p == 0)
989 {
e5e809f4
JL
990 HOST_WIDE_INT frame_offset_old = frame_offset;
991
6f086dfc 992 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
e5e809f4 993
c87a0a39
JL
994 /* We are passing an explicit alignment request to assign_stack_local.
995 One side effect of that is assign_stack_local will not round SIZE
996 to ensure the frame offset remains suitably aligned.
997
998 So for requests which depended on the rounding of SIZE, we go ahead
999 and round it now. We also make sure ALIGNMENT is at least
1000 BIGGEST_ALIGNMENT. */
6f67a30d
JW
1001 if (mode == BLKmode && align < (BIGGEST_ALIGNMENT / BITS_PER_UNIT))
1002 abort();
1003 p->slot = assign_stack_local (mode,
1004 mode == BLKmode
1005 ? CEIL_ROUND (size, align) : size,
1006 align);
d16790f2
JW
1007
1008 p->align = align;
a4c6502a 1009 p->alias_set = alias_set;
e5e809f4 1010
b2a80c0d
DE
1011 /* The following slot size computation is necessary because we don't
1012 know the actual size of the temporary slot until assign_stack_local
1013 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
1014 requested temporary. Note that extra space added for alignment
1015 can be either above or below this stack slot depending on which
1016 way the frame grows. We include the extra space if and only if it
1017 is above this slot. */
b2a80c0d
DE
1018#ifdef FRAME_GROWS_DOWNWARD
1019 p->size = frame_offset_old - frame_offset;
1020#else
fc91b0d0
RK
1021 p->size = size;
1022#endif
e5e809f4 1023
fc91b0d0
RK
1024 /* Now define the fields used by combine_temp_slots. */
1025#ifdef FRAME_GROWS_DOWNWARD
1026 p->base_offset = frame_offset;
1027 p->full_size = frame_offset_old - frame_offset;
1028#else
1029 p->base_offset = frame_offset_old;
1030 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 1031#endif
e5e76139 1032 p->address = 0;
6f086dfc
RS
1033 p->next = temp_slots;
1034 temp_slots = p;
1035 }
1036
1037 p->in_use = 1;
a25d4ba2 1038 p->addr_taken = 0;
e7a84011 1039 p->rtl_expr = sequence_rtl_expr;
a25d4ba2 1040
d93d4205
MS
1041 if (keep == 2)
1042 {
1043 p->level = target_temp_slot_level;
1044 p->keep = 0;
1045 }
e5e809f4
JL
1046 else if (keep == 3)
1047 {
1048 p->level = var_temp_slot_level;
1049 p->keep = 0;
1050 }
d93d4205
MS
1051 else
1052 {
1053 p->level = temp_slot_level;
1054 p->keep = keep;
1055 }
1995f267
RK
1056
1057 /* We may be reusing an old slot, so clear any MEM flags that may have been
1058 set from before. */
1059 RTX_UNCHANGING_P (p->slot) = 0;
1060 MEM_IN_STRUCT_P (p->slot) = 0;
c6df88cb
MM
1061 MEM_SCALAR_P (p->slot) = 0;
1062 MEM_ALIAS_SET (p->slot) = 0;
6f086dfc
RS
1063 return p->slot;
1064}
d16790f2
JW
1065
1066/* Allocate a temporary stack slot and record it for possible later
1067 reuse. First three arguments are same as in preceding function. */
1068
1069rtx
1070assign_stack_temp (mode, size, keep)
1071 enum machine_mode mode;
1072 HOST_WIDE_INT size;
1073 int keep;
1074{
1075 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1076}
638141a6 1077\f
230f21b4
PB
1078/* Assign a temporary of given TYPE.
1079 KEEP is as for assign_stack_temp.
1080 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
1081 it is 0 if a register is OK.
1082 DONT_PROMOTE is 1 if we should not promote values in register
1083 to wider modes. */
230f21b4
PB
1084
1085rtx
b55d9ff8 1086assign_temp (type, keep, memory_required, dont_promote)
230f21b4
PB
1087 tree type;
1088 int keep;
1089 int memory_required;
b55d9ff8 1090 int dont_promote;
230f21b4
PB
1091{
1092 enum machine_mode mode = TYPE_MODE (type);
638141a6
RK
1093 int unsignedp = TREE_UNSIGNED (type);
1094
230f21b4
PB
1095 if (mode == BLKmode || memory_required)
1096 {
e5e809f4 1097 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
1098 rtx tmp;
1099
1100 /* Unfortunately, we don't yet know how to allocate variable-sized
1101 temporaries. However, sometimes we have a fixed upper limit on
1102 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 1103 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
1104 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1105 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1106 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1107 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1108
d16790f2 1109 tmp = assign_stack_temp_for_type (mode, size, keep, type);
c6df88cb 1110 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
230f21b4
PB
1111 return tmp;
1112 }
638141a6 1113
230f21b4 1114#ifndef PROMOTE_FOR_CALL_ONLY
b55d9ff8
RK
1115 if (! dont_promote)
1116 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 1117#endif
638141a6 1118
230f21b4
PB
1119 return gen_reg_rtx (mode);
1120}
638141a6 1121\f
a45035b6
JW
1122/* Combine temporary stack slots which are adjacent on the stack.
1123
1124 This allows for better use of already allocated stack space. This is only
1125 done for BLKmode slots because we can be sure that we won't have alignment
1126 problems in this case. */
1127
1128void
1129combine_temp_slots ()
1130{
1131 struct temp_slot *p, *q;
1132 struct temp_slot *prev_p, *prev_q;
e5e809f4
JL
1133 int num_slots;
1134
a4c6502a
MM
1135 /* We can't combine slots, because the information about which slot
1136 is in which alias set will be lost. */
1137 if (flag_strict_aliasing)
1138 return;
1139
e5e809f4
JL
1140 /* If there are a lot of temp slots, don't do anything unless
1141 high levels of optimizaton. */
1142 if (! flag_expensive_optimizations)
1143 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1144 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1145 return;
a45035b6 1146
e9b7093a
RS
1147 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1148 {
1149 int delete_p = 0;
e5e809f4 1150
e9b7093a
RS
1151 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1152 for (q = p->next, prev_q = p; q; q = prev_q->next)
a45035b6 1153 {
e9b7093a
RS
1154 int delete_q = 0;
1155 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
a45035b6 1156 {
fc91b0d0 1157 if (p->base_offset + p->full_size == q->base_offset)
e9b7093a
RS
1158 {
1159 /* Q comes after P; combine Q into P. */
1160 p->size += q->size;
307d8cd6 1161 p->full_size += q->full_size;
e9b7093a
RS
1162 delete_q = 1;
1163 }
fc91b0d0 1164 else if (q->base_offset + q->full_size == p->base_offset)
e9b7093a
RS
1165 {
1166 /* P comes after Q; combine P into Q. */
1167 q->size += p->size;
307d8cd6 1168 q->full_size += p->full_size;
e9b7093a
RS
1169 delete_p = 1;
1170 break;
1171 }
a45035b6 1172 }
e9b7093a
RS
1173 /* Either delete Q or advance past it. */
1174 if (delete_q)
1175 prev_q->next = q->next;
1176 else
1177 prev_q = q;
a45035b6 1178 }
e9b7093a
RS
1179 /* Either delete P or advance past it. */
1180 if (delete_p)
1181 {
1182 if (prev_p)
1183 prev_p->next = p->next;
1184 else
1185 temp_slots = p->next;
1186 }
1187 else
1188 prev_p = p;
1189 }
a45035b6 1190}
6f086dfc 1191\f
e5e76139
RK
1192/* Find the temp slot corresponding to the object at address X. */
1193
1194static struct temp_slot *
1195find_temp_slot_from_address (x)
1196 rtx x;
1197{
1198 struct temp_slot *p;
1199 rtx next;
1200
1201 for (p = temp_slots; p; p = p->next)
1202 {
1203 if (! p->in_use)
1204 continue;
e5e809f4 1205
e5e76139 1206 else if (XEXP (p->slot, 0) == x
abb52246
RK
1207 || p->address == x
1208 || (GET_CODE (x) == PLUS
1209 && XEXP (x, 0) == virtual_stack_vars_rtx
1210 && GET_CODE (XEXP (x, 1)) == CONST_INT
1211 && INTVAL (XEXP (x, 1)) >= p->base_offset
1212 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
e5e76139
RK
1213 return p;
1214
1215 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1216 for (next = p->address; next; next = XEXP (next, 1))
1217 if (XEXP (next, 0) == x)
1218 return p;
1219 }
1220
1221 return 0;
1222}
1223
9faa82d8 1224/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 1225 that previously was known by OLD. */
e5e76139
RK
1226
1227void
1228update_temp_slot_address (old, new)
1229 rtx old, new;
1230{
1231 struct temp_slot *p = find_temp_slot_from_address (old);
1232
1233 /* If none, return. Else add NEW as an alias. */
1234 if (p == 0)
1235 return;
1236 else if (p->address == 0)
1237 p->address = new;
1238 else
1239 {
1240 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1241 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1242
38a448ca 1243 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1244 }
1245}
1246
a25d4ba2 1247/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1248 address was taken. */
a25d4ba2
RK
1249
1250void
1251mark_temp_addr_taken (x)
1252 rtx x;
1253{
1254 struct temp_slot *p;
1255
1256 if (x == 0)
1257 return;
1258
1259 /* If X is not in memory or is at a constant address, it cannot be in
1260 a temporary slot. */
1261 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1262 return;
1263
1264 p = find_temp_slot_from_address (XEXP (x, 0));
1265 if (p != 0)
1266 p->addr_taken = 1;
1267}
1268
9cca6a99
MS
1269/* If X could be a reference to a temporary slot, mark that slot as
1270 belonging to the to one level higher than the current level. If X
1271 matched one of our slots, just mark that one. Otherwise, we can't
1272 easily predict which it is, so upgrade all of them. Kept slots
1273 need not be touched.
6f086dfc
RS
1274
1275 This is called when an ({...}) construct occurs and a statement
1276 returns a value in memory. */
1277
1278void
1279preserve_temp_slots (x)
1280 rtx x;
1281{
a25d4ba2 1282 struct temp_slot *p = 0;
6f086dfc 1283
73620b82
RK
1284 /* If there is no result, we still might have some objects whose address
1285 were taken, so we need to make sure they stay around. */
e3a77161 1286 if (x == 0)
73620b82
RK
1287 {
1288 for (p = temp_slots; p; p = p->next)
1289 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1290 p->level--;
1291
1292 return;
1293 }
e3a77161
RK
1294
1295 /* If X is a register that is being used as a pointer, see if we have
1296 a temporary slot we know it points to. To be consistent with
1297 the code below, we really should preserve all non-kept slots
1298 if we can't find a match, but that seems to be much too costly. */
a25d4ba2
RK
1299 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1300 p = find_temp_slot_from_address (x);
1301
6f086dfc 1302 /* If X is not in memory or is at a constant address, it cannot be in
e19571db
RK
1303 a temporary slot, but it can contain something whose address was
1304 taken. */
a25d4ba2 1305 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
e19571db
RK
1306 {
1307 for (p = temp_slots; p; p = p->next)
1308 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1309 p->level--;
1310
1311 return;
1312 }
6f086dfc
RS
1313
1314 /* First see if we can find a match. */
73620b82 1315 if (p == 0)
a25d4ba2
RK
1316 p = find_temp_slot_from_address (XEXP (x, 0));
1317
e5e76139
RK
1318 if (p != 0)
1319 {
a25d4ba2
RK
1320 /* Move everything at our level whose address was taken to our new
1321 level in case we used its address. */
1322 struct temp_slot *q;
1323
9cca6a99
MS
1324 if (p->level == temp_slot_level)
1325 {
1326 for (q = temp_slots; q; q = q->next)
1327 if (q != p && q->addr_taken && q->level == p->level)
1328 q->level--;
a25d4ba2 1329
9cca6a99
MS
1330 p->level--;
1331 p->addr_taken = 0;
1332 }
e5e76139
RK
1333 return;
1334 }
6f086dfc
RS
1335
1336 /* Otherwise, preserve all non-kept slots at this level. */
1337 for (p = temp_slots; p; p = p->next)
1338 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1339 p->level--;
1340}
1341
422c8f63
RK
1342/* X is the result of an RTL_EXPR. If it is a temporary slot associated
1343 with that RTL_EXPR, promote it into a temporary slot at the present
1344 level so it will not be freed when we free slots made in the
1345 RTL_EXPR. */
1346
1347void
1348preserve_rtl_expr_result (x)
1349 rtx x;
1350{
1351 struct temp_slot *p;
1352
1353 /* If X is not in memory or is at a constant address, it cannot be in
1354 a temporary slot. */
1355 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1356 return;
1357
199b61d8
RK
1358 /* If we can find a match, move it to our level unless it is already at
1359 an upper level. */
1360 p = find_temp_slot_from_address (XEXP (x, 0));
1361 if (p != 0)
1362 {
1363 p->level = MIN (p->level, temp_slot_level);
1364 p->rtl_expr = 0;
1365 }
422c8f63
RK
1366
1367 return;
1368}
1369
6f086dfc 1370/* Free all temporaries used so far. This is normally called at the end
e7a84011
RK
1371 of generating code for a statement. Don't free any temporaries
1372 currently in use for an RTL_EXPR that hasn't yet been emitted.
1373 We could eventually do better than this since it can be reused while
1374 generating the same RTL_EXPR, but this is complex and probably not
1375 worthwhile. */
6f086dfc
RS
1376
1377void
1378free_temp_slots ()
1379{
1380 struct temp_slot *p;
1381
1382 for (p = temp_slots; p; p = p->next)
e7a84011
RK
1383 if (p->in_use && p->level == temp_slot_level && ! p->keep
1384 && p->rtl_expr == 0)
1385 p->in_use = 0;
1386
1387 combine_temp_slots ();
1388}
1389
1390/* Free all temporary slots used in T, an RTL_EXPR node. */
1391
1392void
1393free_temps_for_rtl_expr (t)
1394 tree t;
1395{
1396 struct temp_slot *p;
1397
1398 for (p = temp_slots; p; p = p->next)
1399 if (p->rtl_expr == t)
6f086dfc 1400 p->in_use = 0;
a45035b6
JW
1401
1402 combine_temp_slots ();
6f086dfc
RS
1403}
1404
956d6950 1405/* Mark all temporaries ever allocated in this function as not suitable
a94e4054
RK
1406 for reuse until the current level is exited. */
1407
1408void
1409mark_all_temps_used ()
1410{
1411 struct temp_slot *p;
1412
1413 for (p = temp_slots; p; p = p->next)
1414 {
85b119d1 1415 p->in_use = p->keep = 1;
27ce006b 1416 p->level = MIN (p->level, temp_slot_level);
a94e4054
RK
1417 }
1418}
1419
6f086dfc
RS
1420/* Push deeper into the nesting level for stack temporaries. */
1421
1422void
1423push_temp_slots ()
1424{
6f086dfc
RS
1425 temp_slot_level++;
1426}
1427
e5e809f4
JL
1428/* Likewise, but save the new level as the place to allocate variables
1429 for blocks. */
1430
1431void
1432push_temp_slots_for_block ()
1433{
1434 push_temp_slots ();
1435
1436 var_temp_slot_level = temp_slot_level;
1437}
1438
f5963e61
JL
1439/* Likewise, but save the new level as the place to allocate temporaries
1440 for TARGET_EXPRs. */
1441
1442void
1443push_temp_slots_for_target ()
1444{
1445 push_temp_slots ();
1446
1447 target_temp_slot_level = temp_slot_level;
1448}
1449
1450/* Set and get the value of target_temp_slot_level. The only
1451 permitted use of these functions is to save and restore this value. */
1452
1453int
1454get_target_temp_slot_level ()
1455{
1456 return target_temp_slot_level;
1457}
1458
1459void
1460set_target_temp_slot_level (level)
1461 int level;
1462{
1463 target_temp_slot_level = level;
1464}
1465
6f086dfc
RS
1466/* Pop a temporary nesting level. All slots in use in the current level
1467 are freed. */
1468
1469void
1470pop_temp_slots ()
1471{
1472 struct temp_slot *p;
1473
6f086dfc 1474 for (p = temp_slots; p; p = p->next)
e7a84011 1475 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
6f086dfc
RS
1476 p->in_use = 0;
1477
a45035b6
JW
1478 combine_temp_slots ();
1479
6f086dfc
RS
1480 temp_slot_level--;
1481}
bc0ebdf9
RK
1482
1483/* Initialize temporary slots. */
1484
1485void
1486init_temp_slots ()
1487{
1488 /* We have not allocated any temporaries yet. */
1489 temp_slots = 0;
1490 temp_slot_level = 0;
e5e809f4 1491 var_temp_slot_level = 0;
bc0ebdf9
RK
1492 target_temp_slot_level = 0;
1493}
6f086dfc
RS
1494\f
1495/* Retroactively move an auto variable from a register to a stack slot.
1496 This is done when an address-reference to the variable is seen. */
1497
1498void
1499put_var_into_stack (decl)
1500 tree decl;
1501{
1502 register rtx reg;
00d8a4c1 1503 enum machine_mode promoted_mode, decl_mode;
6f086dfc 1504 struct function *function = 0;
c20bf1f3 1505 tree context;
e9a25f70 1506 int can_use_addressof;
c20bf1f3 1507
c20bf1f3 1508 context = decl_function_context (decl);
6f086dfc 1509
9ec36da5 1510 /* Get the current rtl used for this object and its original mode. */
6f086dfc 1511 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
1512
1513 /* No need to do anything if decl has no rtx yet
1514 since in that case caller is setting TREE_ADDRESSABLE
1515 and a stack slot will be assigned when the rtl is made. */
1516 if (reg == 0)
1517 return;
00d8a4c1
RK
1518
1519 /* Get the declared mode for this object. */
1520 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1521 : DECL_MODE (decl));
2baccce2
RS
1522 /* Get the mode it's actually stored in. */
1523 promoted_mode = GET_MODE (reg);
6f086dfc
RS
1524
1525 /* If this variable comes from an outer function,
1526 find that function's saved context. */
4ac74fb8 1527 if (context != current_function_decl && context != inline_function_decl)
6f086dfc
RS
1528 for (function = outer_function_chain; function; function = function->next)
1529 if (function->decl == context)
1530 break;
1531
6f086dfc
RS
1532 /* If this is a variable-size object with a pseudo to address it,
1533 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 1534 if (DECL_NONLOCAL (decl)
6f086dfc
RS
1535 && GET_CODE (reg) == MEM
1536 && GET_CODE (XEXP (reg, 0)) == REG
1537 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
1538 {
1539 reg = XEXP (reg, 0);
1540 decl_mode = promoted_mode = GET_MODE (reg);
1541 }
e15762df 1542
e9a25f70
JL
1543 can_use_addressof
1544 = (function == 0
e5e809f4 1545 && optimize > 0
e9a25f70
JL
1546 /* FIXME make it work for promoted modes too */
1547 && decl_mode == promoted_mode
1548#ifdef NON_SAVING_SETJMP
1549 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1550#endif
1551 );
1552
1553 /* If we can't use ADDRESSOF, make sure we see through one we already
1554 generated. */
1555 if (! can_use_addressof && GET_CODE (reg) == MEM
1556 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1557 reg = XEXP (XEXP (reg, 0), 0);
1558
293e3de4
RS
1559 /* Now we should have a value that resides in one or more pseudo regs. */
1560
1561 if (GET_CODE (reg) == REG)
e9a25f70
JL
1562 {
1563 /* If this variable lives in the current function and we don't need
1564 to put things in the stack for the sake of setjmp, try to keep it
1565 in a register until we know we actually need the address. */
1566 if (can_use_addressof)
1567 gen_mem_addressof (reg, decl);
1568 else
1569 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1570 promoted_mode, decl_mode,
e5e809f4
JL
1571 TREE_SIDE_EFFECTS (decl), 0,
1572 TREE_USED (decl)
1573 || DECL_INITIAL (decl) != 0);
e9a25f70 1574 }
293e3de4
RS
1575 else if (GET_CODE (reg) == CONCAT)
1576 {
1577 /* A CONCAT contains two pseudos; put them both in the stack.
1578 We do it so they end up consecutive. */
1579 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1580 tree part_type = TREE_TYPE (TREE_TYPE (decl));
4738c10d 1581#ifdef FRAME_GROWS_DOWNWARD
293e3de4 1582 /* Since part 0 should have a lower address, do it second. */
0006e95b 1583 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1584 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1585 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1586 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1587 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1588 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4 1589#else
0006e95b 1590 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4
JL
1591 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1592 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
0006e95b 1593 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4
JL
1594 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1595 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
293e3de4
RS
1596#endif
1597
1598 /* Change the CONCAT into a combined MEM for both parts. */
1599 PUT_CODE (reg, MEM);
0006e95b 1600 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
41472af8 1601 MEM_ALIAS_SET (reg) = get_alias_set (decl);
0006e95b 1602
293e3de4
RS
1603 /* The two parts are in memory order already.
1604 Use the lower parts address as ours. */
1605 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1606 /* Prevent sharing of rtl that might lose. */
1607 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1608 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1609 }
86fa911a
RK
1610 else
1611 return;
1612
7d384cc0 1613 if (current_function_check_memory_usage)
86fa911a
RK
1614 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1615 XEXP (reg, 0), ptr_mode,
1616 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1617 TYPE_MODE (sizetype),
956d6950
JL
1618 GEN_INT (MEMORY_USE_RW),
1619 TYPE_MODE (integer_type_node));
293e3de4
RS
1620}
1621
1622/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1623 into the stack frame of FUNCTION (0 means the current function).
1624 DECL_MODE is the machine mode of the user-level data type.
0006e95b 1625 PROMOTED_MODE is the machine mode of the register.
e5e809f4
JL
1626 VOLATILE_P is nonzero if this is for a "volatile" decl.
1627 USED_P is nonzero if this reg might have already been used in an insn. */
293e3de4
RS
1628
1629static void
e9a25f70 1630put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
e5e809f4 1631 original_regno, used_p)
293e3de4
RS
1632 struct function *function;
1633 rtx reg;
1634 tree type;
1635 enum machine_mode promoted_mode, decl_mode;
0006e95b 1636 int volatile_p;
e9a25f70 1637 int original_regno;
e5e809f4 1638 int used_p;
293e3de4
RS
1639{
1640 rtx new = 0;
e9a25f70
JL
1641 int regno = original_regno;
1642
1643 if (regno == 0)
1644 regno = REGNO (reg);
6f086dfc
RS
1645
1646 if (function)
1647 {
e9a25f70
JL
1648 if (regno < function->max_parm_reg)
1649 new = function->parm_reg_stack_loc[regno];
6f086dfc 1650 if (new == 0)
e15762df 1651 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
6f086dfc
RS
1652 0, function);
1653 }
1654 else
1655 {
e9a25f70
JL
1656 if (regno < max_parm_reg)
1657 new = parm_reg_stack_loc[regno];
6f086dfc 1658 if (new == 0)
e15762df 1659 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
1660 }
1661
0006e95b 1662 PUT_MODE (reg, decl_mode);
6f086dfc
RS
1663 XEXP (reg, 0) = XEXP (new, 0);
1664 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
0006e95b 1665 MEM_VOLATILE_P (reg) = volatile_p;
6f086dfc
RS
1666 PUT_CODE (reg, MEM);
1667
1668 /* If this is a memory ref that contains aggregate components,
bdd3e6ab
JW
1669 mark it as such for cse and loop optimize. If we are reusing a
1670 previously generated stack slot, then we need to copy the bit in
1671 case it was set for other reasons. For instance, it is set for
1672 __builtin_va_alist. */
c6df88cb
MM
1673 MEM_SET_IN_STRUCT_P (reg,
1674 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
41472af8 1675 MEM_ALIAS_SET (reg) = get_alias_set (type);
6f086dfc
RS
1676
1677 /* Now make sure that all refs to the variable, previously made
1678 when it was a register, are fixed up to be valid again. */
e5e809f4
JL
1679
1680 if (used_p && function != 0)
6f086dfc
RS
1681 {
1682 struct var_refs_queue *temp;
1683
1684 /* Variable is inherited; fix it up when we get back to its function. */
1685 push_obstacks (function->function_obstack,
1686 function->function_maybepermanent_obstack);
4da73fa0
RK
1687
1688 /* See comment in restore_tree_status in tree.c for why this needs to be
1689 on saveable obstack. */
6f086dfc 1690 temp
4da73fa0 1691 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
6f086dfc 1692 temp->modified = reg;
00d8a4c1 1693 temp->promoted_mode = promoted_mode;
293e3de4 1694 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
1695 temp->next = function->fixup_var_refs_queue;
1696 function->fixup_var_refs_queue = temp;
1697 pop_obstacks ();
1698 }
e5e809f4 1699 else if (used_p)
6f086dfc 1700 /* Variable is local; fix it up now. */
293e3de4 1701 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
6f086dfc
RS
1702}
1703\f
1704static void
00d8a4c1 1705fixup_var_refs (var, promoted_mode, unsignedp)
6f086dfc 1706 rtx var;
00d8a4c1
RK
1707 enum machine_mode promoted_mode;
1708 int unsignedp;
6f086dfc
RS
1709{
1710 tree pending;
1711 rtx first_insn = get_insns ();
1712 struct sequence_stack *stack = sequence_stack;
1713 tree rtl_exps = rtl_expr_chain;
1714
1715 /* Must scan all insns for stack-refs that exceed the limit. */
00d8a4c1 1716 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
6f086dfc
RS
1717
1718 /* Scan all pending sequences too. */
1719 for (; stack; stack = stack->next)
1720 {
1721 push_to_sequence (stack->first);
00d8a4c1
RK
1722 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1723 stack->first, stack->next != 0);
6f086dfc
RS
1724 /* Update remembered end of sequence
1725 in case we added an insn at the end. */
1726 stack->last = get_last_insn ();
1727 end_sequence ();
1728 }
1729
1730 /* Scan all waiting RTL_EXPRs too. */
1731 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1732 {
1733 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1734 if (seq != const0_rtx && seq != 0)
1735 {
1736 push_to_sequence (seq);
00d8a4c1 1737 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
6f086dfc
RS
1738 end_sequence ();
1739 }
1740 }
d33c2956
DB
1741
1742 /* Scan the catch clauses for exception handling too. */
1743 push_to_sequence (catch_clauses);
1744 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0);
1745 end_sequence ();
6f086dfc
RS
1746}
1747\f
e15679f8 1748/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
6f086dfc 1749 some part of an insn. Return a struct fixup_replacement whose OLD
0f41302f 1750 value is equal to X. Allocate a new structure if no such entry exists. */
6f086dfc
RS
1751
1752static struct fixup_replacement *
2740a678 1753find_fixup_replacement (replacements, x)
6f086dfc
RS
1754 struct fixup_replacement **replacements;
1755 rtx x;
1756{
1757 struct fixup_replacement *p;
1758
1759 /* See if we have already replaced this. */
1760 for (p = *replacements; p && p->old != x; p = p->next)
1761 ;
1762
1763 if (p == 0)
1764 {
1765 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1766 p->old = x;
1767 p->new = 0;
1768 p->next = *replacements;
1769 *replacements = p;
1770 }
1771
1772 return p;
1773}
1774
1775/* Scan the insn-chain starting with INSN for refs to VAR
1776 and fix them up. TOPLEVEL is nonzero if this chain is the
1777 main chain of insns for the current function. */
1778
1779static void
00d8a4c1 1780fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
6f086dfc 1781 rtx var;
00d8a4c1
RK
1782 enum machine_mode promoted_mode;
1783 int unsignedp;
6f086dfc
RS
1784 rtx insn;
1785 int toplevel;
1786{
02a10449
RK
1787 rtx call_dest = 0;
1788
6f086dfc
RS
1789 while (insn)
1790 {
1791 rtx next = NEXT_INSN (insn);
e5e809f4 1792 rtx set, prev, prev_set;
6f086dfc 1793 rtx note;
e5e809f4 1794
e15762df 1795 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc 1796 {
63770d6a
RK
1797 /* If this is a CLOBBER of VAR, delete it.
1798
1799 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1800 and REG_RETVAL notes too. */
926d1ca5 1801 if (GET_CODE (PATTERN (insn)) == CLOBBER
07362cb3
JW
1802 && (XEXP (PATTERN (insn), 0) == var
1803 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1804 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1805 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
63770d6a
RK
1806 {
1807 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1808 /* The REG_LIBCALL note will go away since we are going to
1809 turn INSN into a NOTE, so just delete the
1810 corresponding REG_RETVAL note. */
1811 remove_note (XEXP (note, 0),
1812 find_reg_note (XEXP (note, 0), REG_RETVAL,
1813 NULL_RTX));
1814
1815 /* In unoptimized compilation, we shouldn't call delete_insn
1816 except in jump.c doing warnings. */
1817 PUT_CODE (insn, NOTE);
1818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (insn) = 0;
1820 }
1821
6f086dfc 1822 /* The insn to load VAR from a home in the arglist
e5e809f4
JL
1823 is now a no-op. When we see it, just delete it.
1824 Similarly if this is storing VAR from a register from which
1825 it was loaded in the previous insn. This will occur
1826 when an ADDRESSOF was made for an arglist slot. */
63770d6a 1827 else if (toplevel
e5e809f4
JL
1828 && (set = single_set (insn)) != 0
1829 && SET_DEST (set) == var
63770d6a
RK
1830 /* If this represents the result of an insn group,
1831 don't delete the insn. */
1832 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
e5e809f4
JL
1833 && (rtx_equal_p (SET_SRC (set), var)
1834 || (GET_CODE (SET_SRC (set)) == REG
1835 && (prev = prev_nonnote_insn (insn)) != 0
1836 && (prev_set = single_set (prev)) != 0
1837 && SET_DEST (prev_set) == SET_SRC (set)
1838 && rtx_equal_p (SET_SRC (prev_set), var))))
6f086dfc 1839 {
b4ff474c
RS
1840 /* In unoptimized compilation, we shouldn't call delete_insn
1841 except in jump.c doing warnings. */
1842 PUT_CODE (insn, NOTE);
1843 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1844 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1845 if (insn == last_parm_insn)
1846 last_parm_insn = PREV_INSN (next);
1847 }
1848 else
1849 {
02a10449
RK
1850 struct fixup_replacement *replacements = 0;
1851 rtx next_insn = NEXT_INSN (insn);
1852
e9a25f70
JL
1853 if (SMALL_REGISTER_CLASSES)
1854 {
1855 /* If the insn that copies the results of a CALL_INSN
1856 into a pseudo now references VAR, we have to use an
1857 intermediate pseudo since we want the life of the
1858 return value register to be only a single insn.
02a10449 1859
e9a25f70
JL
1860 If we don't use an intermediate pseudo, such things as
1861 address computations to make the address of VAR valid
1862 if it is not can be placed between the CALL_INSN and INSN.
02a10449 1863
e9a25f70
JL
1864 To make sure this doesn't happen, we record the destination
1865 of the CALL_INSN and see if the next insn uses both that
1866 and VAR. */
02a10449 1867
f95182a4
ILT
1868 if (call_dest != 0 && GET_CODE (insn) == INSN
1869 && reg_mentioned_p (var, PATTERN (insn))
1870 && reg_mentioned_p (call_dest, PATTERN (insn)))
1871 {
1872 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
02a10449 1873
f95182a4 1874 emit_insn_before (gen_move_insn (temp, call_dest), insn);
02a10449 1875
f95182a4
ILT
1876 PATTERN (insn) = replace_rtx (PATTERN (insn),
1877 call_dest, temp);
1878 }
02a10449 1879
f95182a4
ILT
1880 if (GET_CODE (insn) == CALL_INSN
1881 && GET_CODE (PATTERN (insn)) == SET)
1882 call_dest = SET_DEST (PATTERN (insn));
1883 else if (GET_CODE (insn) == CALL_INSN
1884 && GET_CODE (PATTERN (insn)) == PARALLEL
1885 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1886 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1887 else
1888 call_dest = 0;
1889 }
02a10449 1890
6f086dfc
RS
1891 /* See if we have to do anything to INSN now that VAR is in
1892 memory. If it needs to be loaded into a pseudo, use a single
1893 pseudo for the entire insn in case there is a MATCH_DUP
1894 between two operands. We pass a pointer to the head of
1895 a list of struct fixup_replacements. If fixup_var_refs_1
1896 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1897 it will record them in this list.
1898
1899 If it allocated a pseudo for any replacement, we copy into
1900 it here. */
1901
00d8a4c1
RK
1902 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1903 &replacements);
6f086dfc 1904
77121fee
JW
1905 /* If this is last_parm_insn, and any instructions were output
1906 after it to fix it up, then we must set last_parm_insn to
1907 the last such instruction emitted. */
1908 if (insn == last_parm_insn)
1909 last_parm_insn = PREV_INSN (next_insn);
1910
6f086dfc
RS
1911 while (replacements)
1912 {
1913 if (GET_CODE (replacements->new) == REG)
1914 {
1915 rtx insert_before;
00d8a4c1 1916 rtx seq;
6f086dfc
RS
1917
1918 /* OLD might be a (subreg (mem)). */
1919 if (GET_CODE (replacements->old) == SUBREG)
1920 replacements->old
1921 = fixup_memory_subreg (replacements->old, insn, 0);
1922 else
1923 replacements->old
1924 = fixup_stack_1 (replacements->old, insn);
1925
5fa7422b 1926 insert_before = insn;
6f086dfc 1927
00d8a4c1
RK
1928 /* If we are changing the mode, do a conversion.
1929 This might be wasteful, but combine.c will
1930 eliminate much of the waste. */
1931
1932 if (GET_MODE (replacements->new)
1933 != GET_MODE (replacements->old))
1934 {
1935 start_sequence ();
1936 convert_move (replacements->new,
1937 replacements->old, unsignedp);
1938 seq = gen_sequence ();
1939 end_sequence ();
1940 }
1941 else
1942 seq = gen_move_insn (replacements->new,
1943 replacements->old);
1944
1945 emit_insn_before (seq, insert_before);
6f086dfc
RS
1946 }
1947
1948 replacements = replacements->next;
1949 }
1950 }
1951
1952 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1953 But don't touch other insns referred to by reg-notes;
1954 we will get them elsewhere. */
1955 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1956 if (GET_CODE (note) != INSN_LIST)
ab6155b7
RK
1957 XEXP (note, 0)
1958 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
6f086dfc
RS
1959 }
1960 insn = next;
1961 }
1962}
1963\f
00d8a4c1
RK
1964/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1965 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1966
1967 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1968 contain a list of original rtx's and replacements. If we find that we need
1969 to modify this insn by replacing a memory reference with a pseudo or by
1970 making a new MEM to implement a SUBREG, we consult that list to see if
1971 we have already chosen a replacement. If none has already been allocated,
1972 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1973 or the SUBREG, as appropriate, to the pseudo. */
1974
1975static void
00d8a4c1 1976fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1977 register rtx var;
00d8a4c1 1978 enum machine_mode promoted_mode;
6f086dfc
RS
1979 register rtx *loc;
1980 rtx insn;
1981 struct fixup_replacement **replacements;
1982{
1983 register int i;
1984 register rtx x = *loc;
1985 RTX_CODE code = GET_CODE (x);
1986 register char *fmt;
1987 register rtx tem, tem1;
1988 struct fixup_replacement *replacement;
1989
1990 switch (code)
1991 {
e9a25f70
JL
1992 case ADDRESSOF:
1993 if (XEXP (x, 0) == var)
1994 {
956d6950
JL
1995 /* Prevent sharing of rtl that might lose. */
1996 rtx sub = copy_rtx (XEXP (var, 0));
1997
956d6950
JL
1998 if (! validate_change (insn, loc, sub, 0))
1999 {
5f98f7c4
RH
2000 rtx y = gen_reg_rtx (GET_MODE (sub));
2001 rtx seq, new_insn;
2002
2003 /* We should be able to replace with a register or all is lost.
2004 Note that we can't use validate_change to verify this, since
2005 we're not caring for replacing all dups simultaneously. */
2006 if (! validate_replace_rtx (*loc, y, insn))
2007 abort ();
2008
2009 /* Careful! First try to recognize a direct move of the
2010 value, mimicking how things are done in gen_reload wrt
2011 PLUS. Consider what happens when insn is a conditional
2012 move instruction and addsi3 clobbers flags. */
2013
2014 start_sequence ();
2015 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2016 seq = gen_sequence ();
2017 end_sequence ();
2018
2019 if (recog_memoized (new_insn) < 0)
2020 {
2021 /* That failed. Fall back on force_operand and hope. */
956d6950 2022
5f98f7c4
RH
2023 start_sequence ();
2024 force_operand (sub, y);
2025 seq = gen_sequence ();
2026 end_sequence ();
2027 }
956d6950 2028
5f98f7c4
RH
2029#ifdef HAVE_cc0
2030 /* Don't separate setter from user. */
2031 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2032 insn = PREV_INSN (insn);
2033#endif
2034
2035 emit_insn_before (seq, insn);
2036 }
e9a25f70
JL
2037 }
2038 return;
2039
6f086dfc
RS
2040 case MEM:
2041 if (var == x)
2042 {
2043 /* If we already have a replacement, use it. Otherwise,
2044 try to fix up this address in case it is invalid. */
2045
2740a678 2046 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
2047 if (replacement->new)
2048 {
2049 *loc = replacement->new;
2050 return;
2051 }
2052
2053 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2054
00d8a4c1
RK
2055 /* Unless we are forcing memory to register or we changed the mode,
2056 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
2057
2058 INSN_CODE (insn) = -1;
00d8a4c1
RK
2059 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2060 && recog_memoized (insn) >= 0)
6f086dfc
RS
2061 return;
2062
00d8a4c1 2063 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
2064 return;
2065 }
2066
2067 /* If X contains VAR, we need to unshare it here so that we update
2068 each occurrence separately. But all identical MEMs in one insn
2069 must be replaced with the same rtx because of the possibility of
2070 MATCH_DUPs. */
2071
2072 if (reg_mentioned_p (var, x))
2073 {
2740a678 2074 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
2075 if (replacement->new == 0)
2076 replacement->new = copy_most_rtx (x, var);
2077
2078 *loc = x = replacement->new;
2079 }
2080 break;
2081
2082 case REG:
2083 case CC0:
2084 case PC:
2085 case CONST_INT:
2086 case CONST:
2087 case SYMBOL_REF:
2088 case LABEL_REF:
2089 case CONST_DOUBLE:
2090 return;
2091
2092 case SIGN_EXTRACT:
2093 case ZERO_EXTRACT:
2094 /* Note that in some cases those types of expressions are altered
2095 by optimize_bit_field, and do not survive to get here. */
2096 if (XEXP (x, 0) == var
2097 || (GET_CODE (XEXP (x, 0)) == SUBREG
2098 && SUBREG_REG (XEXP (x, 0)) == var))
2099 {
2100 /* Get TEM as a valid MEM in the mode presently in the insn.
2101
2102 We don't worry about the possibility of MATCH_DUP here; it
2103 is highly unlikely and would be tricky to handle. */
2104
2105 tem = XEXP (x, 0);
2106 if (GET_CODE (tem) == SUBREG)
0e09cc26
RK
2107 {
2108 if (GET_MODE_BITSIZE (GET_MODE (tem))
2109 > GET_MODE_BITSIZE (GET_MODE (var)))
2110 {
2111 replacement = find_fixup_replacement (replacements, var);
2112 if (replacement->new == 0)
2113 replacement->new = gen_reg_rtx (GET_MODE (var));
2114 SUBREG_REG (tem) = replacement->new;
2115 }
ef933d26
RK
2116 else
2117 tem = fixup_memory_subreg (tem, insn, 0);
0e09cc26
RK
2118 }
2119 else
2120 tem = fixup_stack_1 (tem, insn);
6f086dfc
RS
2121
2122 /* Unless we want to load from memory, get TEM into the proper mode
2123 for an extract from memory. This can only be done if the
2124 extract is at a constant position and length. */
2125
2126 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2127 && GET_CODE (XEXP (x, 2)) == CONST_INT
2128 && ! mode_dependent_address_p (XEXP (tem, 0))
2129 && ! MEM_VOLATILE_P (tem))
2130 {
2131 enum machine_mode wanted_mode = VOIDmode;
2132 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2133 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6f086dfc
RS
2134
2135#ifdef HAVE_extzv
2136 if (GET_CODE (x) == ZERO_EXTRACT)
0d8e55d8
JL
2137 {
2138 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2139 if (wanted_mode == VOIDmode)
2140 wanted_mode = word_mode;
2141 }
6f086dfc
RS
2142#endif
2143#ifdef HAVE_extv
2144 if (GET_CODE (x) == SIGN_EXTRACT)
0d8e55d8
JL
2145 {
2146 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2147 if (wanted_mode == VOIDmode)
2148 wanted_mode = word_mode;
2149 }
6f086dfc 2150#endif
6dc42e49 2151 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2152 if (wanted_mode != VOIDmode
2153 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2154 {
e5e809f4 2155 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2156 rtx old_pos = XEXP (x, 2);
2157 rtx newmem;
2158
2159 /* If the bytes and bits are counted differently, we
2160 must adjust the offset. */
f76b9db2
ILT
2161 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2162 offset = (GET_MODE_SIZE (is_mode)
2163 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2164
2165 pos %= GET_MODE_BITSIZE (wanted_mode);
2166
38a448ca
RH
2167 newmem = gen_rtx_MEM (wanted_mode,
2168 plus_constant (XEXP (tem, 0), offset));
6f086dfc 2169 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2170 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2171
2172 /* Make the change and see if the insn remains valid. */
2173 INSN_CODE (insn) = -1;
2174 XEXP (x, 0) = newmem;
5f4f0e22 2175 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
2176
2177 if (recog_memoized (insn) >= 0)
2178 return;
2179
2180 /* Otherwise, restore old position. XEXP (x, 0) will be
2181 restored later. */
2182 XEXP (x, 2) = old_pos;
2183 }
2184 }
2185
2186 /* If we get here, the bitfield extract insn can't accept a memory
2187 reference. Copy the input into a register. */
2188
2189 tem1 = gen_reg_rtx (GET_MODE (tem));
2190 emit_insn_before (gen_move_insn (tem1, tem), insn);
2191 XEXP (x, 0) = tem1;
2192 return;
2193 }
2194 break;
2195
2196 case SUBREG:
2197 if (SUBREG_REG (x) == var)
2198 {
00d8a4c1
RK
2199 /* If this is a special SUBREG made because VAR was promoted
2200 from a wider mode, replace it with VAR and call ourself
2201 recursively, this time saying that the object previously
2202 had its current mode (by virtue of the SUBREG). */
2203
2204 if (SUBREG_PROMOTED_VAR_P (x))
2205 {
2206 *loc = var;
2207 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2208 return;
2209 }
2210
6f086dfc
RS
2211 /* If this SUBREG makes VAR wider, it has become a paradoxical
2212 SUBREG with VAR in memory, but these aren't allowed at this
2213 stage of the compilation. So load VAR into a pseudo and take
2214 a SUBREG of that pseudo. */
2215 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2216 {
2740a678 2217 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
2218 if (replacement->new == 0)
2219 replacement->new = gen_reg_rtx (GET_MODE (var));
2220 SUBREG_REG (x) = replacement->new;
2221 return;
2222 }
2223
2224 /* See if we have already found a replacement for this SUBREG.
2225 If so, use it. Otherwise, make a MEM and see if the insn
2226 is recognized. If not, or if we should force MEM into a register,
2227 make a pseudo for this SUBREG. */
2740a678 2228 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
2229 if (replacement->new)
2230 {
2231 *loc = replacement->new;
2232 return;
2233 }
2234
2235 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2236
f898f031 2237 INSN_CODE (insn) = -1;
6f086dfc
RS
2238 if (! flag_force_mem && recog_memoized (insn) >= 0)
2239 return;
2240
2241 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2242 return;
2243 }
2244 break;
2245
2246 case SET:
2247 /* First do special simplification of bit-field references. */
2248 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2249 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2250 optimize_bit_field (x, insn, 0);
2251 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2252 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 2253 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc 2254
0e09cc26
RK
2255 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2256 into a register and then store it back out. */
2257 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2258 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2259 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2260 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2261 > GET_MODE_SIZE (GET_MODE (var))))
2262 {
2263 replacement = find_fixup_replacement (replacements, var);
2264 if (replacement->new == 0)
2265 replacement->new = gen_reg_rtx (GET_MODE (var));
2266
2267 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2268 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2269 }
2270
6f086dfc 2271 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
0f41302f 2272 insn into a pseudo and store the low part of the pseudo into VAR. */
6f086dfc
RS
2273 if (GET_CODE (SET_DEST (x)) == SUBREG
2274 && SUBREG_REG (SET_DEST (x)) == var
2275 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2276 > GET_MODE_SIZE (GET_MODE (var))))
2277 {
2278 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2279 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2280 tem)),
2281 insn);
2282 break;
2283 }
2284
2285 {
2286 rtx dest = SET_DEST (x);
2287 rtx src = SET_SRC (x);
29a82058 2288#ifdef HAVE_insv
6f086dfc 2289 rtx outerdest = dest;
29a82058 2290#endif
6f086dfc
RS
2291
2292 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2293 || GET_CODE (dest) == SIGN_EXTRACT
2294 || GET_CODE (dest) == ZERO_EXTRACT)
2295 dest = XEXP (dest, 0);
2296
2297 if (GET_CODE (src) == SUBREG)
2298 src = XEXP (src, 0);
2299
2300 /* If VAR does not appear at the top level of the SET
2301 just scan the lower levels of the tree. */
2302
2303 if (src != var && dest != var)
2304 break;
2305
2306 /* We will need to rerecognize this insn. */
2307 INSN_CODE (insn) = -1;
2308
2309#ifdef HAVE_insv
2310 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2311 {
2312 /* Since this case will return, ensure we fixup all the
2313 operands here. */
00d8a4c1
RK
2314 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2315 insn, replacements);
2316 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2317 insn, replacements);
2318 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2319 insn, replacements);
6f086dfc
RS
2320
2321 tem = XEXP (outerdest, 0);
2322
2323 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2324 that may appear inside a ZERO_EXTRACT.
2325 This was legitimate when the MEM was a REG. */
2326 if (GET_CODE (tem) == SUBREG
2327 && SUBREG_REG (tem) == var)
0e09cc26 2328 tem = fixup_memory_subreg (tem, insn, 0);
6f086dfc
RS
2329 else
2330 tem = fixup_stack_1 (tem, insn);
2331
2332 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2333 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2334 && ! mode_dependent_address_p (XEXP (tem, 0))
2335 && ! MEM_VOLATILE_P (tem))
2336 {
0d8e55d8 2337 enum machine_mode wanted_mode;
6f086dfc 2338 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2339 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
6f086dfc 2340
0d8e55d8
JL
2341 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2342 if (wanted_mode == VOIDmode)
2343 wanted_mode = word_mode;
2344
6dc42e49 2345 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2346 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2347 {
e5e809f4 2348 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2349 rtx old_pos = XEXP (outerdest, 2);
2350 rtx newmem;
2351
f76b9db2
ILT
2352 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2353 offset = (GET_MODE_SIZE (is_mode)
2354 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2355
2356 pos %= GET_MODE_BITSIZE (wanted_mode);
2357
38a448ca
RH
2358 newmem = gen_rtx_MEM (wanted_mode,
2359 plus_constant (XEXP (tem, 0), offset));
6f086dfc 2360 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2361 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2362
2363 /* Make the change and see if the insn remains valid. */
2364 INSN_CODE (insn) = -1;
2365 XEXP (outerdest, 0) = newmem;
5f4f0e22 2366 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
2367
2368 if (recog_memoized (insn) >= 0)
2369 return;
2370
2371 /* Otherwise, restore old position. XEXP (x, 0) will be
2372 restored later. */
2373 XEXP (outerdest, 2) = old_pos;
2374 }
2375 }
2376
2377 /* If we get here, the bit-field store doesn't allow memory
2378 or isn't located at a constant position. Load the value into
2379 a register, do the store, and put it back into memory. */
2380
2381 tem1 = gen_reg_rtx (GET_MODE (tem));
2382 emit_insn_before (gen_move_insn (tem1, tem), insn);
2383 emit_insn_after (gen_move_insn (tem, tem1), insn);
2384 XEXP (outerdest, 0) = tem1;
2385 return;
2386 }
2387#endif
2388
2389 /* STRICT_LOW_PART is a no-op on memory references
2390 and it can cause combinations to be unrecognizable,
2391 so eliminate it. */
2392
2393 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2394 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2395
2396 /* A valid insn to copy VAR into or out of a register
2397 must be left alone, to avoid an infinite loop here.
2398 If the reference to VAR is by a subreg, fix that up,
2399 since SUBREG is not valid for a memref.
e15762df
RK
2400 Also fix up the address of the stack slot.
2401
2402 Note that we must not try to recognize the insn until
2403 after we know that we have valid addresses and no
2404 (subreg (mem ...) ...) constructs, since these interfere
2405 with determining the validity of the insn. */
6f086dfc
RS
2406
2407 if ((SET_SRC (x) == var
2408 || (GET_CODE (SET_SRC (x)) == SUBREG
2409 && SUBREG_REG (SET_SRC (x)) == var))
2410 && (GET_CODE (SET_DEST (x)) == REG
2411 || (GET_CODE (SET_DEST (x)) == SUBREG
2412 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1d273bf5 2413 && GET_MODE (var) == promoted_mode
c46722a7 2414 && x == single_set (insn))
6f086dfc 2415 {
e15762df
RK
2416 rtx pat;
2417
2740a678 2418 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 2419 if (replacement->new)
6f086dfc 2420 SET_SRC (x) = replacement->new;
6f086dfc
RS
2421 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2422 SET_SRC (x) = replacement->new
2423 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2424 else
2425 SET_SRC (x) = replacement->new
2426 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
2427
2428 if (recog_memoized (insn) >= 0)
2429 return;
2430
2431 /* INSN is not valid, but we know that we want to
2432 copy SET_SRC (x) to SET_DEST (x) in some way. So
2433 we generate the move and see whether it requires more
2434 than one insn. If it does, we emit those insns and
2435 delete INSN. Otherwise, we an just replace the pattern
2436 of INSN; we have already verified above that INSN has
2437 no other function that to do X. */
2438
2439 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2440 if (GET_CODE (pat) == SEQUENCE)
2441 {
2442 emit_insn_after (pat, insn);
2443 PUT_CODE (insn, NOTE);
2444 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2445 NOTE_SOURCE_FILE (insn) = 0;
2446 }
2447 else
2448 PATTERN (insn) = pat;
2449
6f086dfc
RS
2450 return;
2451 }
2452
2453 if ((SET_DEST (x) == var
2454 || (GET_CODE (SET_DEST (x)) == SUBREG
2455 && SUBREG_REG (SET_DEST (x)) == var))
2456 && (GET_CODE (SET_SRC (x)) == REG
2457 || (GET_CODE (SET_SRC (x)) == SUBREG
2458 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1d273bf5 2459 && GET_MODE (var) == promoted_mode
c46722a7 2460 && x == single_set (insn))
6f086dfc 2461 {
e15762df
RK
2462 rtx pat;
2463
6f086dfc
RS
2464 if (GET_CODE (SET_DEST (x)) == SUBREG)
2465 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2466 else
2467 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
2468
2469 if (recog_memoized (insn) >= 0)
2470 return;
2471
2472 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2473 if (GET_CODE (pat) == SEQUENCE)
2474 {
2475 emit_insn_after (pat, insn);
2476 PUT_CODE (insn, NOTE);
2477 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2478 NOTE_SOURCE_FILE (insn) = 0;
2479 }
2480 else
2481 PATTERN (insn) = pat;
2482
6f086dfc
RS
2483 return;
2484 }
2485
2486 /* Otherwise, storing into VAR must be handled specially
2487 by storing into a temporary and copying that into VAR
00d8a4c1
RK
2488 with a new insn after this one. Note that this case
2489 will be used when storing into a promoted scalar since
2490 the insn will now have different modes on the input
2491 and output and hence will be invalid (except for the case
2492 of setting it to a constant, which does not need any
2493 change if it is valid). We generate extra code in that case,
2494 but combine.c will eliminate it. */
6f086dfc
RS
2495
2496 if (dest == var)
2497 {
2498 rtx temp;
00d8a4c1
RK
2499 rtx fixeddest = SET_DEST (x);
2500
6f086dfc 2501 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
2502 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2503 fixeddest = XEXP (fixeddest, 0);
6f086dfc 2504 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1 2505 if (GET_CODE (fixeddest) == SUBREG)
926d1ca5
RK
2506 {
2507 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2508 promoted_mode = GET_MODE (fixeddest);
2509 }
6f086dfc 2510 else
00d8a4c1
RK
2511 fixeddest = fixup_stack_1 (fixeddest, insn);
2512
926d1ca5 2513 temp = gen_reg_rtx (promoted_mode);
00d8a4c1
RK
2514
2515 emit_insn_after (gen_move_insn (fixeddest,
2516 gen_lowpart (GET_MODE (fixeddest),
2517 temp)),
2518 insn);
6f086dfc 2519
6f086dfc
RS
2520 SET_DEST (x) = temp;
2521 }
2522 }
e9a25f70
JL
2523
2524 default:
2525 break;
6f086dfc
RS
2526 }
2527
2528 /* Nothing special about this RTX; fix its operands. */
2529
2530 fmt = GET_RTX_FORMAT (code);
2531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2532 {
2533 if (fmt[i] == 'e')
00d8a4c1 2534 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
2535 if (fmt[i] == 'E')
2536 {
2537 register int j;
2538 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
2539 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2540 insn, replacements);
6f086dfc
RS
2541 }
2542 }
2543}
2544\f
2545/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2546 return an rtx (MEM:m1 newaddr) which is equivalent.
2547 If any insns must be emitted to compute NEWADDR, put them before INSN.
2548
2549 UNCRITICAL nonzero means accept paradoxical subregs.
0f41302f 2550 This is used for subregs found inside REG_NOTES. */
6f086dfc
RS
2551
2552static rtx
2553fixup_memory_subreg (x, insn, uncritical)
2554 rtx x;
2555 rtx insn;
2556 int uncritical;
2557{
2558 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2559 rtx addr = XEXP (SUBREG_REG (x), 0);
2560 enum machine_mode mode = GET_MODE (x);
29a82058 2561 rtx result;
6f086dfc
RS
2562
2563 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2564 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2565 && ! uncritical)
2566 abort ();
2567
f76b9db2
ILT
2568 if (BYTES_BIG_ENDIAN)
2569 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2570 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
6f086dfc
RS
2571 addr = plus_constant (addr, offset);
2572 if (!flag_force_addr && memory_address_p (mode, addr))
2573 /* Shortcut if no insns need be emitted. */
2574 return change_address (SUBREG_REG (x), mode, addr);
2575 start_sequence ();
2576 result = change_address (SUBREG_REG (x), mode, addr);
2577 emit_insn_before (gen_sequence (), insn);
2578 end_sequence ();
2579 return result;
2580}
2581
2582/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2583 Replace subexpressions of X in place.
2584 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2585 Otherwise return X, with its contents possibly altered.
2586
ab6155b7
RK
2587 If any insns must be emitted to compute NEWADDR, put them before INSN.
2588
2589 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
2590
2591static rtx
ab6155b7 2592walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
2593 register rtx x;
2594 rtx insn;
ab6155b7 2595 int uncritical;
6f086dfc
RS
2596{
2597 register enum rtx_code code;
2598 register char *fmt;
2599 register int i;
2600
2601 if (x == 0)
2602 return 0;
2603
2604 code = GET_CODE (x);
2605
2606 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 2607 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
2608
2609 /* Nothing special about this RTX; fix its operands. */
2610
2611 fmt = GET_RTX_FORMAT (code);
2612 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2613 {
2614 if (fmt[i] == 'e')
ab6155b7 2615 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
2616 if (fmt[i] == 'E')
2617 {
2618 register int j;
2619 for (j = 0; j < XVECLEN (x, i); j++)
2620 XVECEXP (x, i, j)
ab6155b7 2621 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
2622 }
2623 }
2624 return x;
2625}
2626\f
6f086dfc
RS
2627/* For each memory ref within X, if it refers to a stack slot
2628 with an out of range displacement, put the address in a temp register
2629 (emitting new insns before INSN to load these registers)
2630 and alter the memory ref to use that register.
2631 Replace each such MEM rtx with a copy, to avoid clobberage. */
2632
2633static rtx
2634fixup_stack_1 (x, insn)
2635 rtx x;
2636 rtx insn;
2637{
2638 register int i;
2639 register RTX_CODE code = GET_CODE (x);
2640 register char *fmt;
2641
2642 if (code == MEM)
2643 {
2644 register rtx ad = XEXP (x, 0);
2645 /* If we have address of a stack slot but it's not valid
2646 (displacement is too large), compute the sum in a register. */
2647 if (GET_CODE (ad) == PLUS
2648 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
2649 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2650 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
e9a25f70
JL
2651 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2652#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2653 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2654#endif
2655 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
956d6950 2656 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
40d05551 2657 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
2658 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2659 {
2660 rtx temp, seq;
2661 if (memory_address_p (GET_MODE (x), ad))
2662 return x;
2663
2664 start_sequence ();
2665 temp = copy_to_reg (ad);
2666 seq = gen_sequence ();
2667 end_sequence ();
2668 emit_insn_before (seq, insn);
2669 return change_address (x, VOIDmode, temp);
2670 }
2671 return x;
2672 }
2673
2674 fmt = GET_RTX_FORMAT (code);
2675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2676 {
2677 if (fmt[i] == 'e')
2678 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2679 if (fmt[i] == 'E')
2680 {
2681 register int j;
2682 for (j = 0; j < XVECLEN (x, i); j++)
2683 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2684 }
2685 }
2686 return x;
2687}
2688\f
2689/* Optimization: a bit-field instruction whose field
2690 happens to be a byte or halfword in memory
2691 can be changed to a move instruction.
2692
2693 We call here when INSN is an insn to examine or store into a bit-field.
2694 BODY is the SET-rtx to be altered.
2695
2696 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2697 (Currently this is called only from function.c, and EQUIV_MEM
2698 is always 0.) */
2699
2700static void
2701optimize_bit_field (body, insn, equiv_mem)
2702 rtx body;
2703 rtx insn;
2704 rtx *equiv_mem;
2705{
2706 register rtx bitfield;
2707 int destflag;
2708 rtx seq = 0;
2709 enum machine_mode mode;
2710
2711 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2712 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2713 bitfield = SET_DEST (body), destflag = 1;
2714 else
2715 bitfield = SET_SRC (body), destflag = 0;
2716
2717 /* First check that the field being stored has constant size and position
2718 and is in fact a byte or halfword suitably aligned. */
2719
2720 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2721 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2722 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2723 != BLKmode)
2724 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2725 {
2726 register rtx memref = 0;
2727
2728 /* Now check that the containing word is memory, not a register,
2729 and that it is safe to change the machine mode. */
2730
2731 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2732 memref = XEXP (bitfield, 0);
2733 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2734 && equiv_mem != 0)
2735 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2736 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2737 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2738 memref = SUBREG_REG (XEXP (bitfield, 0));
2739 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2740 && equiv_mem != 0
2741 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2742 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2743
2744 if (memref
2745 && ! mode_dependent_address_p (XEXP (memref, 0))
2746 && ! MEM_VOLATILE_P (memref))
2747 {
2748 /* Now adjust the address, first for any subreg'ing
2749 that we are now getting rid of,
2750 and then for which byte of the word is wanted. */
2751
e5e809f4 2752 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
b88a3142
RK
2753 rtx insns;
2754
6f086dfc 2755 /* Adjust OFFSET to count bits from low-address byte. */
f76b9db2
ILT
2756 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2757 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2758 - offset - INTVAL (XEXP (bitfield, 1)));
2759
6f086dfc
RS
2760 /* Adjust OFFSET to count bytes from low-address byte. */
2761 offset /= BITS_PER_UNIT;
2762 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2763 {
2764 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
f76b9db2
ILT
2765 if (BYTES_BIG_ENDIAN)
2766 offset -= (MIN (UNITS_PER_WORD,
2767 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2768 - MIN (UNITS_PER_WORD,
2769 GET_MODE_SIZE (GET_MODE (memref))));
6f086dfc
RS
2770 }
2771
b88a3142
RK
2772 start_sequence ();
2773 memref = change_address (memref, mode,
6f086dfc 2774 plus_constant (XEXP (memref, 0), offset));
b88a3142
RK
2775 insns = get_insns ();
2776 end_sequence ();
2777 emit_insns_before (insns, insn);
6f086dfc
RS
2778
2779 /* Store this memory reference where
2780 we found the bit field reference. */
2781
2782 if (destflag)
2783 {
2784 validate_change (insn, &SET_DEST (body), memref, 1);
2785 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2786 {
2787 rtx src = SET_SRC (body);
2788 while (GET_CODE (src) == SUBREG
2789 && SUBREG_WORD (src) == 0)
2790 src = SUBREG_REG (src);
2791 if (GET_MODE (src) != GET_MODE (memref))
2792 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2793 validate_change (insn, &SET_SRC (body), src, 1);
2794 }
2795 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2796 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2797 /* This shouldn't happen because anything that didn't have
2798 one of these modes should have got converted explicitly
2799 and then referenced through a subreg.
2800 This is so because the original bit-field was
2801 handled by agg_mode and so its tree structure had
2802 the same mode that memref now has. */
2803 abort ();
2804 }
2805 else
2806 {
2807 rtx dest = SET_DEST (body);
2808
2809 while (GET_CODE (dest) == SUBREG
4013a709
RK
2810 && SUBREG_WORD (dest) == 0
2811 && (GET_MODE_CLASS (GET_MODE (dest))
ab87f8c8
JL
2812 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2813 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2814 <= UNITS_PER_WORD))
6f086dfc
RS
2815 dest = SUBREG_REG (dest);
2816
2817 validate_change (insn, &SET_DEST (body), dest, 1);
2818
2819 if (GET_MODE (dest) == GET_MODE (memref))
2820 validate_change (insn, &SET_SRC (body), memref, 1);
2821 else
2822 {
2823 /* Convert the mem ref to the destination mode. */
2824 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2825
2826 start_sequence ();
2827 convert_move (newreg, memref,
2828 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2829 seq = get_insns ();
2830 end_sequence ();
2831
2832 validate_change (insn, &SET_SRC (body), newreg, 1);
2833 }
2834 }
2835
2836 /* See if we can convert this extraction or insertion into
2837 a simple move insn. We might not be able to do so if this
2838 was, for example, part of a PARALLEL.
2839
2840 If we succeed, write out any needed conversions. If we fail,
2841 it is hard to guess why we failed, so don't do anything
2842 special; just let the optimization be suppressed. */
2843
2844 if (apply_change_group () && seq)
2845 emit_insns_before (seq, insn);
2846 }
2847 }
2848}
2849\f
2850/* These routines are responsible for converting virtual register references
2851 to the actual hard register references once RTL generation is complete.
2852
2853 The following four variables are used for communication between the
2854 routines. They contain the offsets of the virtual registers from their
2855 respective hard registers. */
2856
2857static int in_arg_offset;
2858static int var_offset;
2859static int dynamic_offset;
2860static int out_arg_offset;
71038426 2861static int cfa_offset;
6f086dfc
RS
2862
2863/* In most machines, the stack pointer register is equivalent to the bottom
2864 of the stack. */
2865
2866#ifndef STACK_POINTER_OFFSET
2867#define STACK_POINTER_OFFSET 0
2868#endif
2869
2870/* If not defined, pick an appropriate default for the offset of dynamically
2871 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2872 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2873
2874#ifndef STACK_DYNAMIC_OFFSET
2875
2876#ifdef ACCUMULATE_OUTGOING_ARGS
2877/* The bottom of the stack points to the actual arguments. If
2878 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2879 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2880 stack space for register parameters is not pushed by the caller, but
2881 rather part of the fixed stack areas and hence not included in
2882 `current_function_outgoing_args_size'. Nevertheless, we must allow
2883 for it when allocating stack dynamic objects. */
2884
2885#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2886#define STACK_DYNAMIC_OFFSET(FNDECL) \
2887(current_function_outgoing_args_size \
2888 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2889
2890#else
2891#define STACK_DYNAMIC_OFFSET(FNDECL) \
2892(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2893#endif
2894
2895#else
2896#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2897#endif
2898#endif
2899
71038426
RH
2900/* On a few machines, the CFA coincides with the arg pointer. */
2901
2902#ifndef ARG_POINTER_CFA_OFFSET
2903#define ARG_POINTER_CFA_OFFSET 0
2904#endif
2905
2906
e9a25f70
JL
2907/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2908 its address taken. DECL is the decl for the object stored in the
2909 register, for later use if we do need to force REG into the stack.
2910 REG is overwritten by the MEM like in put_reg_into_stack. */
2911
2912rtx
2913gen_mem_addressof (reg, decl)
2914 rtx reg;
2915 tree decl;
2916{
2917 tree type = TREE_TYPE (decl);
38a448ca 2918 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
e9a25f70 2919 SET_ADDRESSOF_DECL (r, decl);
95ca22f4
MM
2920 /* If the original REG was a user-variable, then so is the REG whose
2921 address is being taken. */
2922 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
e9a25f70
JL
2923
2924 XEXP (reg, 0) = r;
2925 PUT_CODE (reg, MEM);
2926 PUT_MODE (reg, DECL_MODE (decl));
2927 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
c6df88cb 2928 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
41472af8 2929 MEM_ALIAS_SET (reg) = get_alias_set (decl);
e9a25f70 2930
e5e809f4
JL
2931 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2932 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2933
e9a25f70
JL
2934 return reg;
2935}
2936
2937/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2938
2939void
2940flush_addressof (decl)
2941 tree decl;
2942{
2943 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2944 && DECL_RTL (decl) != 0
2945 && GET_CODE (DECL_RTL (decl)) == MEM
2946 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2947 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2948 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2949}
2950
2951/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2952
2953static void
2954put_addressof_into_stack (r)
2955 rtx r;
2956{
2957 tree decl = ADDRESSOF_DECL (r);
2958 rtx reg = XEXP (r, 0);
2959
2960 if (GET_CODE (reg) != REG)
2961 abort ();
2962
2963 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2964 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
e5e809f4
JL
2965 ADDRESSOF_REGNO (r),
2966 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
e9a25f70
JL
2967}
2968
b5bd3b3c
AS
2969/* List of replacements made below in purge_addressof_1 when creating
2970 bitfield insertions. */
2971static rtx purge_addressof_replacements;
2972
e9a25f70
JL
2973/* Helper function for purge_addressof. See if the rtx expression at *LOC
2974 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2975 the stack. */
2976
2977static void
f7b6d104 2978purge_addressof_1 (loc, insn, force, store)
e9a25f70
JL
2979 rtx *loc;
2980 rtx insn;
f7b6d104 2981 int force, store;
e9a25f70
JL
2982{
2983 rtx x;
2984 RTX_CODE code;
2985 int i, j;
2986 char *fmt;
2987
2988 /* Re-start here to avoid recursion in common cases. */
2989 restart:
2990
2991 x = *loc;
2992 if (x == 0)
2993 return;
2994
2995 code = GET_CODE (x);
2996
2997 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2998 {
2999 rtx insns;
956d6950
JL
3000 /* We must create a copy of the rtx because it was created by
3001 overwriting a REG rtx which is always shared. */
3002 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
e9a25f70 3003
ab87f8c8
JL
3004 if (validate_change (insn, loc, sub, 0)
3005 || validate_replace_rtx (x, sub, insn))
e9a25f70 3006 return;
ab87f8c8 3007
e9a25f70 3008 start_sequence ();
ab87f8c8
JL
3009 sub = force_operand (sub, NULL_RTX);
3010 if (! validate_change (insn, loc, sub, 0)
3011 && ! validate_replace_rtx (x, sub, insn))
e9a25f70
JL
3012 abort ();
3013
f7b6d104 3014 insns = gen_sequence ();
e9a25f70 3015 end_sequence ();
18e765cb 3016 emit_insn_before (insns, insn);
e9a25f70
JL
3017 return;
3018 }
3019 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3020 {
3021 rtx sub = XEXP (XEXP (x, 0), 0);
ab87f8c8 3022 rtx sub2;
e5e809f4 3023
6d8ccdbb 3024 if (GET_CODE (sub) == MEM)
ab87f8c8
JL
3025 {
3026 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3027 MEM_COPY_ATTRIBUTES (sub2, sub);
3028 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3029 sub = sub2;
3030 }
e5e809f4 3031
f5963e61
JL
3032 if (GET_CODE (sub) == REG
3033 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
e5e809f4
JL
3034 {
3035 put_addressof_into_stack (XEXP (x, 0));
3036 return;
3037 }
3038 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
e9a25f70 3039 {
f7b6d104
RH
3040 int size_x, size_sub;
3041
b5bd3b3c
AS
3042 if (!insn)
3043 {
3044 /* When processing REG_NOTES look at the list of
3045 replacements done on the insn to find the register that X
3046 was replaced by. */
3047 rtx tem;
3048
3049 for (tem = purge_addressof_replacements; tem != NULL_RTX;
3050 tem = XEXP (XEXP (tem, 1), 1))
fbdfe39c
RH
3051 {
3052 rtx y = XEXP (tem, 0);
3053 if (GET_CODE (y) == MEM
3054 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
3055 {
3056 /* It can happen that the note may speak of things in
3057 a wider (or just different) mode than the code did.
3058 This is especially true of REG_RETVAL. */
3059
3060 rtx z = XEXP (XEXP (tem, 1), 0);
3061 if (GET_MODE (x) != GET_MODE (y))
3062 {
3063 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3064 z = SUBREG_REG (z);
3065
3066 /* ??? If we'd gotten into any of the really complex
3067 cases below, I'm not sure we can do a proper
3068 replacement. Might we be able to delete the
3069 note in some cases? */
3070 if (GET_MODE_SIZE (GET_MODE (x))
3071 < GET_MODE_SIZE (GET_MODE (y)))
3072 abort ();
3073
d91dfff4
R
3074 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3075 && (GET_MODE_SIZE (GET_MODE (x))
3076 > GET_MODE_SIZE (GET_MODE (z))))
3077 {
3078 /* This can occur as a result in invalid
3079 pointer casts, e.g. float f; ...
3080 *(long long int *)&f.
3081 ??? We could emit a warning here, but
3082 without a line number that wouldn't be
3083 very helpful. */
3084 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3085 }
3086 else
3087 z = gen_lowpart (GET_MODE (x), z);
fbdfe39c
RH
3088 }
3089
3090 *loc = z;
3091 return;
3092 }
3093 }
b5bd3b3c
AS
3094
3095 /* There should always be such a replacement. */
3096 abort ();
3097 }
3098
f7b6d104
RH
3099 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3100 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3101
3102 /* Don't even consider working with paradoxical subregs,
3103 or the moral equivalent seen here. */
470032d7 3104 if (size_x <= size_sub
d006aa54 3105 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
e9a25f70 3106 {
f7b6d104
RH
3107 /* Do a bitfield insertion to mirror what would happen
3108 in memory. */
3109
f7b6d104
RH
3110 rtx val, seq;
3111
f7b6d104
RH
3112 if (store)
3113 {
de0dd934
R
3114 rtx p;
3115
f7b6d104
RH
3116 start_sequence ();
3117 val = gen_reg_rtx (GET_MODE (x));
3118 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
3119 {
3120 /* Discard the current sequence and put the
3121 ADDRESSOF on stack. */
3122 end_sequence ();
3123 goto give_up;
3124 }
f7b6d104
RH
3125 seq = gen_sequence ();
3126 end_sequence ();
3127 emit_insn_before (seq, insn);
3128
3129 start_sequence ();
47401c4d 3130 store_bit_field (sub, size_x, 0, GET_MODE (x),
f7b6d104
RH
3131 val, GET_MODE_SIZE (GET_MODE (sub)),
3132 GET_MODE_SIZE (GET_MODE (sub)));
3133
de0dd934
R
3134 /* Make sure to unshare any shared rtl that store_bit_field
3135 might have created. */
3136 for (p = get_insns(); p; p = NEXT_INSN (p))
3137 {
3138 reset_used_flags (PATTERN (p));
3139 reset_used_flags (REG_NOTES (p));
3140 reset_used_flags (LOG_LINKS (p));
3141 }
3142 unshare_all_rtl (get_insns ());
3143
f7b6d104
RH
3144 seq = gen_sequence ();
3145 end_sequence ();
3146 emit_insn_after (seq, insn);
3147 }
3148 else
3149 {
3150 start_sequence ();
47401c4d 3151 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
f7b6d104
RH
3152 GET_MODE (x), GET_MODE (x),
3153 GET_MODE_SIZE (GET_MODE (sub)),
3154 GET_MODE_SIZE (GET_MODE (sub)));
3155
f7b6d104 3156 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
3157 {
3158 /* Discard the current sequence and put the
3159 ADDRESSOF on stack. */
3160 end_sequence ();
3161 goto give_up;
3162 }
f7b6d104
RH
3163
3164 seq = gen_sequence ();
3165 end_sequence ();
3166 emit_insn_before (seq, insn);
3167 }
3168
b5bd3b3c
AS
3169 /* Remember the replacement so that the same one can be done
3170 on the REG_NOTES. */
3171 purge_addressof_replacements
3172 = gen_rtx_EXPR_LIST (VOIDmode, x,
3173 gen_rtx_EXPR_LIST (VOIDmode, val,
3174 purge_addressof_replacements));
3175
f7b6d104
RH
3176 /* We replaced with a reg -- all done. */
3177 return;
e9a25f70
JL
3178 }
3179 }
3180 else if (validate_change (insn, loc, sub, 0))
fbdfe39c
RH
3181 {
3182 /* Remember the replacement so that the same one can be done
3183 on the REG_NOTES. */
3184 purge_addressof_replacements
3185 = gen_rtx_EXPR_LIST (VOIDmode, x,
3186 gen_rtx_EXPR_LIST (VOIDmode, sub,
3187 purge_addressof_replacements));
3188 goto restart;
3189 }
b5bd3b3c 3190 give_up:;
e9a25f70
JL
3191 /* else give up and put it into the stack */
3192 }
3193 else if (code == ADDRESSOF)
3194 {
3195 put_addressof_into_stack (x);
3196 return;
3197 }
f7b6d104
RH
3198 else if (code == SET)
3199 {
3200 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3201 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3202 return;
3203 }
e9a25f70
JL
3204
3205 /* Scan all subexpressions. */
3206 fmt = GET_RTX_FORMAT (code);
3207 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3208 {
3209 if (*fmt == 'e')
f7b6d104 3210 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
e9a25f70
JL
3211 else if (*fmt == 'E')
3212 for (j = 0; j < XVECLEN (x, i); j++)
f7b6d104 3213 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
e9a25f70
JL
3214 }
3215}
3216
3217/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3218 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3219 stack. */
3220
3221void
3222purge_addressof (insns)
3223 rtx insns;
3224{
3225 rtx insn;
3226 for (insn = insns; insn; insn = NEXT_INSN (insn))
3227 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3228 || GET_CODE (insn) == CALL_INSN)
3229 {
3230 purge_addressof_1 (&PATTERN (insn), insn,
f7b6d104
RH
3231 asm_noperands (PATTERN (insn)) > 0, 0);
3232 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
e9a25f70 3233 }
da9b1f9c 3234 purge_addressof_replacements = 0;
e9a25f70
JL
3235}
3236\f
6f086dfc
RS
3237/* Pass through the INSNS of function FNDECL and convert virtual register
3238 references to hard register references. */
3239
3240void
3241instantiate_virtual_regs (fndecl, insns)
3242 tree fndecl;
3243 rtx insns;
3244{
3245 rtx insn;
e9a25f70 3246 int i;
6f086dfc
RS
3247
3248 /* Compute the offsets to use for this function. */
3249 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3250 var_offset = STARTING_FRAME_OFFSET;
3251 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3252 out_arg_offset = STACK_POINTER_OFFSET;
71038426 3253 cfa_offset = ARG_POINTER_CFA_OFFSET;
6f086dfc
RS
3254
3255 /* Scan all variables and parameters of this function. For each that is
3256 in memory, instantiate all virtual registers if the result is a valid
3257 address. If not, we do it later. That will handle most uses of virtual
3258 regs on many machines. */
3259 instantiate_decls (fndecl, 1);
3260
3261 /* Initialize recognition, indicating that volatile is OK. */
3262 init_recog ();
3263
3264 /* Scan through all the insns, instantiating every virtual register still
3265 present. */
3266 for (insn = insns; insn; insn = NEXT_INSN (insn))
3267 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3268 || GET_CODE (insn) == CALL_INSN)
3269 {
3270 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 3271 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
3272 }
3273
e9a25f70
JL
3274 /* Instantiate the stack slots for the parm registers, for later use in
3275 addressof elimination. */
3276 for (i = 0; i < max_parm_reg; ++i)
3277 if (parm_reg_stack_loc[i])
3278 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3279
6f086dfc
RS
3280 /* Now instantiate the remaining register equivalences for debugging info.
3281 These will not be valid addresses. */
3282 instantiate_decls (fndecl, 0);
3283
3284 /* Indicate that, from now on, assign_stack_local should use
3285 frame_pointer_rtx. */
3286 virtuals_instantiated = 1;
3287}
3288
3289/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3290 all virtual registers in their DECL_RTL's.
3291
3292 If VALID_ONLY, do this only if the resulting address is still valid.
3293 Otherwise, always do it. */
3294
3295static void
3296instantiate_decls (fndecl, valid_only)
3297 tree fndecl;
3298 int valid_only;
3299{
3300 tree decl;
3301
e1686233 3302 if (DECL_SAVED_INSNS (fndecl))
6f086dfc
RS
3303 /* When compiling an inline function, the obstack used for
3304 rtl allocation is the maybepermanent_obstack. Calling
3305 `resume_temporary_allocation' switches us back to that
3306 obstack while we process this function's parameters. */
3307 resume_temporary_allocation ();
3308
3309 /* Process all parameters of the function. */
3310 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3311 {
e5e809f4
JL
3312 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3313
ce717ce4
JW
3314 instantiate_decl (DECL_RTL (decl), size, valid_only);
3315
3316 /* If the parameter was promoted, then the incoming RTL mode may be
3317 larger than the declared type size. We must use the larger of
3318 the two sizes. */
3319 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3320 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
3321 }
3322
0f41302f 3323 /* Now process all variables defined in the function or its subblocks. */
6f086dfc
RS
3324 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3325
79c0672e 3326 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
6f086dfc
RS
3327 {
3328 /* Save all rtl allocated for this function by raising the
3329 high-water mark on the maybepermanent_obstack. */
3330 preserve_data ();
3331 /* All further rtl allocation is now done in the current_obstack. */
3332 rtl_in_current_obstack ();
3333 }
3334}
3335
3336/* Subroutine of instantiate_decls: Process all decls in the given
3337 BLOCK node and all its subblocks. */
3338
3339static void
3340instantiate_decls_1 (let, valid_only)
3341 tree let;
3342 int valid_only;
3343{
3344 tree t;
3345
3346 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
3347 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3348 valid_only);
6f086dfc
RS
3349
3350 /* Process all subblocks. */
3351 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3352 instantiate_decls_1 (t, valid_only);
3353}
5a73491b 3354
8008b228 3355/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
3356 decl and the size of the object, do any instantiation required.
3357
3358 If VALID_ONLY is non-zero, it means that the RTL should only be
3359 changed if the new address is valid. */
3360
3361static void
3362instantiate_decl (x, size, valid_only)
3363 rtx x;
3364 int size;
3365 int valid_only;
3366{
3367 enum machine_mode mode;
3368 rtx addr;
3369
3370 /* If this is not a MEM, no need to do anything. Similarly if the
3371 address is a constant or a register that is not a virtual register. */
3372
3373 if (x == 0 || GET_CODE (x) != MEM)
3374 return;
3375
3376 addr = XEXP (x, 0);
3377 if (CONSTANT_P (addr)
956d6950 3378 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
5a73491b
RK
3379 || (GET_CODE (addr) == REG
3380 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3381 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3382 return;
3383
3384 /* If we should only do this if the address is valid, copy the address.
3385 We need to do this so we can undo any changes that might make the
3386 address invalid. This copy is unfortunate, but probably can't be
3387 avoided. */
3388
3389 if (valid_only)
3390 addr = copy_rtx (addr);
3391
3392 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3393
87ce34d6
JW
3394 if (valid_only)
3395 {
3396 /* Now verify that the resulting address is valid for every integer or
3397 floating-point mode up to and including SIZE bytes long. We do this
3398 since the object might be accessed in any mode and frame addresses
3399 are shared. */
3400
3401 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3402 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3403 mode = GET_MODE_WIDER_MODE (mode))
3404 if (! memory_address_p (mode, addr))
3405 return;
5a73491b 3406
87ce34d6
JW
3407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3408 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3409 mode = GET_MODE_WIDER_MODE (mode))
3410 if (! memory_address_p (mode, addr))
3411 return;
3412 }
5a73491b 3413
87ce34d6
JW
3414 /* Put back the address now that we have updated it and we either know
3415 it is valid or we don't care whether it is valid. */
5a73491b
RK
3416
3417 XEXP (x, 0) = addr;
3418}
6f086dfc
RS
3419\f
3420/* Given a pointer to a piece of rtx and an optional pointer to the
3421 containing object, instantiate any virtual registers present in it.
3422
3423 If EXTRA_INSNS, we always do the replacement and generate
3424 any extra insns before OBJECT. If it zero, we do nothing if replacement
3425 is not valid.
3426
3427 Return 1 if we either had nothing to do or if we were able to do the
3428 needed replacement. Return 0 otherwise; we only return zero if
3429 EXTRA_INSNS is zero.
3430
3431 We first try some simple transformations to avoid the creation of extra
3432 pseudos. */
3433
3434static int
3435instantiate_virtual_regs_1 (loc, object, extra_insns)
3436 rtx *loc;
3437 rtx object;
3438 int extra_insns;
3439{
3440 rtx x;
3441 RTX_CODE code;
3442 rtx new = 0;
e5e809f4 3443 HOST_WIDE_INT offset;
6f086dfc
RS
3444 rtx temp;
3445 rtx seq;
3446 int i, j;
3447 char *fmt;
3448
3449 /* Re-start here to avoid recursion in common cases. */
3450 restart:
3451
3452 x = *loc;
3453 if (x == 0)
3454 return 1;
3455
3456 code = GET_CODE (x);
3457
3458 /* Check for some special cases. */
3459 switch (code)
3460 {
3461 case CONST_INT:
3462 case CONST_DOUBLE:
3463 case CONST:
3464 case SYMBOL_REF:
3465 case CODE_LABEL:
3466 case PC:
3467 case CC0:
3468 case ASM_INPUT:
3469 case ADDR_VEC:
3470 case ADDR_DIFF_VEC:
3471 case RETURN:
3472 return 1;
3473
3474 case SET:
3475 /* We are allowed to set the virtual registers. This means that
38e01259 3476 the actual register should receive the source minus the
6f086dfc
RS
3477 appropriate offset. This is used, for example, in the handling
3478 of non-local gotos. */
3479 if (SET_DEST (x) == virtual_incoming_args_rtx)
3480 new = arg_pointer_rtx, offset = - in_arg_offset;
3481 else if (SET_DEST (x) == virtual_stack_vars_rtx)
dfd3dae6 3482 new = frame_pointer_rtx, offset = - var_offset;
6f086dfc
RS
3483 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3484 new = stack_pointer_rtx, offset = - dynamic_offset;
3485 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3486 new = stack_pointer_rtx, offset = - out_arg_offset;
71038426
RH
3487 else if (SET_DEST (x) == virtual_cfa_rtx)
3488 new = arg_pointer_rtx, offset = - cfa_offset;
6f086dfc
RS
3489
3490 if (new)
3491 {
3492 /* The only valid sources here are PLUS or REG. Just do
3493 the simplest possible thing to handle them. */
3494 if (GET_CODE (SET_SRC (x)) != REG
3495 && GET_CODE (SET_SRC (x)) != PLUS)
3496 abort ();
3497
3498 start_sequence ();
3499 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 3500 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
3501 else
3502 temp = SET_SRC (x);
5f4f0e22 3503 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
3504 seq = get_insns ();
3505 end_sequence ();
3506
3507 emit_insns_before (seq, object);
3508 SET_DEST (x) = new;
3509
e9a25f70 3510 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc
RS
3511 || ! extra_insns)
3512 abort ();
3513
3514 return 1;
3515 }
3516
3517 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3518 loc = &SET_SRC (x);
3519 goto restart;
3520
3521 case PLUS:
3522 /* Handle special case of virtual register plus constant. */
3523 if (CONSTANT_P (XEXP (x, 1)))
3524 {
b1f82ccf 3525 rtx old, new_offset;
6f086dfc
RS
3526
3527 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3528 if (GET_CODE (XEXP (x, 0)) == PLUS)
3529 {
3530 rtx inner = XEXP (XEXP (x, 0), 0);
3531
3532 if (inner == virtual_incoming_args_rtx)
3533 new = arg_pointer_rtx, offset = in_arg_offset;
3534 else if (inner == virtual_stack_vars_rtx)
3535 new = frame_pointer_rtx, offset = var_offset;
3536 else if (inner == virtual_stack_dynamic_rtx)
3537 new = stack_pointer_rtx, offset = dynamic_offset;
3538 else if (inner == virtual_outgoing_args_rtx)
3539 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3540 else if (inner == virtual_cfa_rtx)
3541 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3542 else
3543 {
3544 loc = &XEXP (x, 0);
3545 goto restart;
3546 }
3547
3548 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3549 extra_insns);
38a448ca 3550 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
6f086dfc
RS
3551 }
3552
3553 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3554 new = arg_pointer_rtx, offset = in_arg_offset;
3555 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3556 new = frame_pointer_rtx, offset = var_offset;
3557 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3558 new = stack_pointer_rtx, offset = dynamic_offset;
3559 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3560 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3561 else if (XEXP (x, 0) == virtual_cfa_rtx)
3562 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3563 else
3564 {
3565 /* We know the second operand is a constant. Unless the
3566 first operand is a REG (which has been already checked),
3567 it needs to be checked. */
3568 if (GET_CODE (XEXP (x, 0)) != REG)
3569 {
3570 loc = &XEXP (x, 0);
3571 goto restart;
3572 }
3573 return 1;
3574 }
3575
b1f82ccf 3576 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 3577
b1f82ccf
DE
3578 /* If the new constant is zero, try to replace the sum with just
3579 the register. */
3580 if (new_offset == const0_rtx
3581 && validate_change (object, loc, new, 0))
6f086dfc
RS
3582 return 1;
3583
b1f82ccf
DE
3584 /* Next try to replace the register and new offset.
3585 There are two changes to validate here and we can't assume that
3586 in the case of old offset equals new just changing the register
3587 will yield a valid insn. In the interests of a little efficiency,
3588 however, we only call validate change once (we don't queue up the
0f41302f 3589 changes and then call apply_change_group). */
b1f82ccf
DE
3590
3591 old = XEXP (x, 0);
3592 if (offset == 0
3593 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3594 : (XEXP (x, 0) = new,
3595 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
3596 {
3597 if (! extra_insns)
3598 {
3599 XEXP (x, 0) = old;
3600 return 0;
3601 }
3602
3603 /* Otherwise copy the new constant into a register and replace
3604 constant with that register. */
3605 temp = gen_reg_rtx (Pmode);
b1f82ccf 3606 XEXP (x, 0) = new;
6f086dfc 3607 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 3608 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
3609 else
3610 {
3611 /* If that didn't work, replace this expression with a
3612 register containing the sum. */
3613
6f086dfc 3614 XEXP (x, 0) = old;
38a448ca 3615 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
3616
3617 start_sequence ();
5f4f0e22 3618 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
3619 seq = get_insns ();
3620 end_sequence ();
3621
3622 emit_insns_before (seq, object);
3623 if (! validate_change (object, loc, temp, 0)
3624 && ! validate_replace_rtx (x, temp, object))
3625 abort ();
3626 }
3627 }
3628
3629 return 1;
3630 }
3631
3632 /* Fall through to generic two-operand expression case. */
3633 case EXPR_LIST:
3634 case CALL:
3635 case COMPARE:
3636 case MINUS:
3637 case MULT:
3638 case DIV: case UDIV:
3639 case MOD: case UMOD:
3640 case AND: case IOR: case XOR:
45620ed4
RK
3641 case ROTATERT: case ROTATE:
3642 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
3643 case NE: case EQ:
3644 case GE: case GT: case GEU: case GTU:
3645 case LE: case LT: case LEU: case LTU:
3646 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3647 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3648 loc = &XEXP (x, 0);
3649 goto restart;
3650
3651 case MEM:
3652 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 3653 handled by our scan of decls. The only special handling we
6f086dfc 3654 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 3655 shared if we have to change it to a pseudo.
6f086dfc
RS
3656
3657 If the rtx is a simple reference to an address via a virtual register,
3658 it can potentially be shared. In such cases, first try to make it
3659 a valid address, which can also be shared. Otherwise, copy it and
3660 proceed normally.
3661
3662 First check for common cases that need no processing. These are
3663 usually due to instantiation already being done on a previous instance
3664 of a shared rtx. */
3665
3666 temp = XEXP (x, 0);
3667 if (CONSTANT_ADDRESS_P (temp)
3668#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3669 || temp == arg_pointer_rtx
b37f453b
DE
3670#endif
3671#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3672 || temp == hard_frame_pointer_rtx
6f086dfc
RS
3673#endif
3674 || temp == frame_pointer_rtx)
3675 return 1;
3676
3677 if (GET_CODE (temp) == PLUS
3678 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3679 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
3680#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3681 || XEXP (temp, 0) == hard_frame_pointer_rtx
3682#endif
6f086dfc
RS
3683#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3684 || XEXP (temp, 0) == arg_pointer_rtx
3685#endif
3686 ))
3687 return 1;
3688
3689 if (temp == virtual_stack_vars_rtx
3690 || temp == virtual_incoming_args_rtx
3691 || (GET_CODE (temp) == PLUS
3692 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3693 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3694 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3695 {
3696 /* This MEM may be shared. If the substitution can be done without
3697 the need to generate new pseudos, we want to do it in place
3698 so all copies of the shared rtx benefit. The call below will
3699 only make substitutions if the resulting address is still
3700 valid.
3701
3702 Note that we cannot pass X as the object in the recursive call
3703 since the insn being processed may not allow all valid
6461be14
RS
3704 addresses. However, if we were not passed on object, we can
3705 only modify X without copying it if X will have a valid
3706 address.
6f086dfc 3707
6461be14
RS
3708 ??? Also note that this can still lose if OBJECT is an insn that
3709 has less restrictions on an address that some other insn.
3710 In that case, we will modify the shared address. This case
4fd796bb
RK
3711 doesn't seem very likely, though. One case where this could
3712 happen is in the case of a USE or CLOBBER reference, but we
3713 take care of that below. */
6461be14
RS
3714
3715 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3716 object ? object : x, 0))
6f086dfc
RS
3717 return 1;
3718
3719 /* Otherwise make a copy and process that copy. We copy the entire
3720 RTL expression since it might be a PLUS which could also be
3721 shared. */
3722 *loc = x = copy_rtx (x);
3723 }
3724
3725 /* Fall through to generic unary operation case. */
6f086dfc
RS
3726 case SUBREG:
3727 case STRICT_LOW_PART:
3728 case NEG: case NOT:
3729 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3730 case SIGN_EXTEND: case ZERO_EXTEND:
3731 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3732 case FLOAT: case FIX:
3733 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3734 case ABS:
3735 case SQRT:
3736 case FFS:
3737 /* These case either have just one operand or we know that we need not
3738 check the rest of the operands. */
3739 loc = &XEXP (x, 0);
3740 goto restart;
3741
4fd796bb
RK
3742 case USE:
3743 case CLOBBER:
3744 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3745 go ahead and make the invalid one, but do it to a copy. For a REG,
3746 just make the recursive call, since there's no chance of a problem. */
3747
3748 if ((GET_CODE (XEXP (x, 0)) == MEM
3749 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3750 0))
3751 || (GET_CODE (XEXP (x, 0)) == REG
7694ce35 3752 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
3753 return 1;
3754
3755 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3756 loc = &XEXP (x, 0);
3757 goto restart;
3758
6f086dfc
RS
3759 case REG:
3760 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3761 in front of this insn and substitute the temporary. */
3762 if (x == virtual_incoming_args_rtx)
3763 new = arg_pointer_rtx, offset = in_arg_offset;
3764 else if (x == virtual_stack_vars_rtx)
3765 new = frame_pointer_rtx, offset = var_offset;
3766 else if (x == virtual_stack_dynamic_rtx)
3767 new = stack_pointer_rtx, offset = dynamic_offset;
3768 else if (x == virtual_outgoing_args_rtx)
3769 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3770 else if (x == virtual_cfa_rtx)
3771 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3772
3773 if (new)
3774 {
3775 temp = plus_constant (new, offset);
3776 if (!validate_change (object, loc, temp, 0))
3777 {
3778 if (! extra_insns)
3779 return 0;
3780
3781 start_sequence ();
5f4f0e22 3782 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
3783 seq = get_insns ();
3784 end_sequence ();
3785
3786 emit_insns_before (seq, object);
3787 if (! validate_change (object, loc, temp, 0)
3788 && ! validate_replace_rtx (x, temp, object))
3789 abort ();
3790 }
3791 }
3792
3793 return 1;
e9a25f70
JL
3794
3795 case ADDRESSOF:
3796 if (GET_CODE (XEXP (x, 0)) == REG)
3797 return 1;
3798
3799 else if (GET_CODE (XEXP (x, 0)) == MEM)
3800 {
3801 /* If we have a (addressof (mem ..)), do any instantiation inside
3802 since we know we'll be making the inside valid when we finally
3803 remove the ADDRESSOF. */
3804 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3805 return 1;
3806 }
3807 break;
3808
3809 default:
3810 break;
6f086dfc
RS
3811 }
3812
3813 /* Scan all subexpressions. */
3814 fmt = GET_RTX_FORMAT (code);
3815 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3816 if (*fmt == 'e')
3817 {
3818 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3819 return 0;
3820 }
3821 else if (*fmt == 'E')
3822 for (j = 0; j < XVECLEN (x, i); j++)
3823 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3824 extra_insns))
3825 return 0;
3826
3827 return 1;
3828}
3829\f
3830/* Optimization: assuming this function does not receive nonlocal gotos,
3831 delete the handlers for such, as well as the insns to establish
3832 and disestablish them. */
3833
3834static void
3835delete_handlers ()
3836{
3837 rtx insn;
3838 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3839 {
3840 /* Delete the handler by turning off the flag that would
3841 prevent jump_optimize from deleting it.
3842 Also permit deletion of the nonlocal labels themselves
3843 if nothing local refers to them. */
3844 if (GET_CODE (insn) == CODE_LABEL)
71cd4a8d
JW
3845 {
3846 tree t, last_t;
3847
3848 LABEL_PRESERVE_P (insn) = 0;
3849
3850 /* Remove it from the nonlocal_label list, to avoid confusing
3851 flow. */
3852 for (t = nonlocal_labels, last_t = 0; t;
3853 last_t = t, t = TREE_CHAIN (t))
3854 if (DECL_RTL (TREE_VALUE (t)) == insn)
3855 break;
3856 if (t)
3857 {
3858 if (! last_t)
3859 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3860 else
3861 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3862 }
3863 }
ba716ac9
BS
3864 if (GET_CODE (insn) == INSN)
3865 {
3866 int can_delete = 0;
3867 rtx t;
3868 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3869 if (reg_mentioned_p (t, PATTERN (insn)))
3870 {
3871 can_delete = 1;
3872 break;
3873 }
3874 if (can_delete
59257ff7
RK
3875 || (nonlocal_goto_stack_level != 0
3876 && reg_mentioned_p (nonlocal_goto_stack_level,
ba716ac9
BS
3877 PATTERN (insn))))
3878 delete_insn (insn);
3879 }
6f086dfc
RS
3880 }
3881}
6f086dfc
RS
3882\f
3883/* Output a USE for any register use in RTL.
3884 This is used with -noreg to mark the extent of lifespan
3885 of any registers used in a user-visible variable's DECL_RTL. */
3886
3887void
3888use_variable (rtl)
3889 rtx rtl;
3890{
3891 if (GET_CODE (rtl) == REG)
3892 /* This is a register variable. */
38a448ca 3893 emit_insn (gen_rtx_USE (VOIDmode, rtl));
6f086dfc
RS
3894 else if (GET_CODE (rtl) == MEM
3895 && GET_CODE (XEXP (rtl, 0)) == REG
3896 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3897 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3898 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3899 /* This is a variable-sized structure. */
38a448ca 3900 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
6f086dfc
RS
3901}
3902
3903/* Like use_variable except that it outputs the USEs after INSN
3904 instead of at the end of the insn-chain. */
3905
3906void
3907use_variable_after (rtl, insn)
3908 rtx rtl, insn;
3909{
3910 if (GET_CODE (rtl) == REG)
3911 /* This is a register variable. */
38a448ca 3912 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
6f086dfc
RS
3913 else if (GET_CODE (rtl) == MEM
3914 && GET_CODE (XEXP (rtl, 0)) == REG
3915 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3916 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3917 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3918 /* This is a variable-sized structure. */
38a448ca 3919 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
6f086dfc
RS
3920}
3921\f
3922int
3923max_parm_reg_num ()
3924{
3925 return max_parm_reg;
3926}
3927
3928/* Return the first insn following those generated by `assign_parms'. */
3929
3930rtx
3931get_first_nonparm_insn ()
3932{
3933 if (last_parm_insn)
3934 return NEXT_INSN (last_parm_insn);
3935 return get_insns ();
3936}
3937
5378192b
RS
3938/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3939 Crash if there is none. */
3940
3941rtx
3942get_first_block_beg ()
3943{
3944 register rtx searcher;
3945 register rtx insn = get_first_nonparm_insn ();
3946
3947 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3948 if (GET_CODE (searcher) == NOTE
3949 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3950 return searcher;
3951
3952 abort (); /* Invalid call to this function. (See comments above.) */
3953 return NULL_RTX;
3954}
3955
d181c154
RS
3956/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3957 This means a type for which function calls must pass an address to the
3958 function or get an address back from the function.
3959 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
3960
3961int
3962aggregate_value_p (exp)
3963 tree exp;
3964{
9d790a4f
RS
3965 int i, regno, nregs;
3966 rtx reg;
d181c154
RS
3967 tree type;
3968 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3969 type = exp;
3970 else
3971 type = TREE_TYPE (exp);
3972
3973 if (RETURN_IN_MEMORY (type))
6f086dfc 3974 return 1;
956d6950 3975 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
3976 and thus can't be returned in registers. */
3977 if (TREE_ADDRESSABLE (type))
3978 return 1;
05e3bdb9 3979 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 3980 return 1;
9d790a4f
RS
3981 /* Make sure we have suitable call-clobbered regs to return
3982 the value in; if not, we must return it in memory. */
d181c154 3983 reg = hard_function_value (type, 0);
e71f7aa5
JW
3984
3985 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3986 it is OK. */
3987 if (GET_CODE (reg) != REG)
3988 return 0;
3989
9d790a4f 3990 regno = REGNO (reg);
d181c154 3991 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
9d790a4f
RS
3992 for (i = 0; i < nregs; i++)
3993 if (! call_used_regs[regno + i])
3994 return 1;
6f086dfc
RS
3995 return 0;
3996}
3997\f
3998/* Assign RTL expressions to the function's parameters.
3999 This may involve copying them into registers and using
4000 those registers as the RTL for them.
4001
4002 If SECOND_TIME is non-zero it means that this function is being
4003 called a second time. This is done by integrate.c when a function's
4004 compilation is deferred. We need to come back here in case the
4005 FUNCTION_ARG macro computes items needed for the rest of the compilation
4006 (such as changing which registers are fixed or caller-saved). But suppress
4007 writing any insns or setting DECL_RTL of anything in this case. */
4008
4009void
4010assign_parms (fndecl, second_time)
4011 tree fndecl;
4012 int second_time;
4013{
4014 register tree parm;
4015 register rtx entry_parm = 0;
4016 register rtx stack_parm = 0;
4017 CUMULATIVE_ARGS args_so_far;
621061f4
RK
4018 enum machine_mode promoted_mode, passed_mode;
4019 enum machine_mode nominal_mode, promoted_nominal_mode;
00d8a4c1 4020 int unsignedp;
6f086dfc
RS
4021 /* Total space needed so far for args on the stack,
4022 given as a constant and a tree-expression. */
4023 struct args_size stack_args_size;
4024 tree fntype = TREE_TYPE (fndecl);
4025 tree fnargs = DECL_ARGUMENTS (fndecl);
4026 /* This is used for the arg pointer when referring to stack args. */
4027 rtx internal_arg_pointer;
4028 /* This is a dummy PARM_DECL that we used for the function result if
4029 the function returns a structure. */
4030 tree function_result_decl = 0;
6f086dfc 4031 int varargs_setup = 0;
3412b298 4032 rtx conversion_insns = 0;
6f086dfc
RS
4033
4034 /* Nonzero if the last arg is named `__builtin_va_alist',
4035 which is used on some machines for old-fashioned non-ANSI varargs.h;
4036 this should be stuck onto the stack as if it had arrived there. */
3b69d50e
RK
4037 int hide_last_arg
4038 = (current_function_varargs
4039 && fnargs
6f086dfc
RS
4040 && (parm = tree_last (fnargs)) != 0
4041 && DECL_NAME (parm)
4042 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4043 "__builtin_va_alist")));
4044
4045 /* Nonzero if function takes extra anonymous args.
4046 This means the last named arg must be on the stack
0f41302f 4047 right before the anonymous ones. */
6f086dfc
RS
4048 int stdarg
4049 = (TYPE_ARG_TYPES (fntype) != 0
4050 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4051 != void_type_node));
4052
ebb904cb
RK
4053 current_function_stdarg = stdarg;
4054
6f086dfc
RS
4055 /* If the reg that the virtual arg pointer will be translated into is
4056 not a fixed reg or is the stack pointer, make a copy of the virtual
4057 arg pointer, and address parms via the copy. The frame pointer is
4058 considered fixed even though it is not marked as such.
4059
4060 The second time through, simply use ap to avoid generating rtx. */
4061
4062 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4063 || ! (fixed_regs[ARG_POINTER_REGNUM]
4064 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4065 && ! second_time)
4066 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4067 else
4068 internal_arg_pointer = virtual_incoming_args_rtx;
4069 current_function_internal_arg_pointer = internal_arg_pointer;
4070
4071 stack_args_size.constant = 0;
4072 stack_args_size.var = 0;
4073
4074 /* If struct value address is treated as the first argument, make it so. */
4075 if (aggregate_value_p (DECL_RESULT (fndecl))
4076 && ! current_function_returns_pcc_struct
4077 && struct_value_incoming_rtx == 0)
4078 {
f9f29478 4079 tree type = build_pointer_type (TREE_TYPE (fntype));
6f086dfc 4080
5f4f0e22 4081 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
4082
4083 DECL_ARG_TYPE (function_result_decl) = type;
4084 TREE_CHAIN (function_result_decl) = fnargs;
4085 fnargs = function_result_decl;
4086 }
4087
e9a25f70
JL
4088 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4089 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4090 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
6f086dfc
RS
4091
4092#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 4093 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 4094#else
2c7ee1a6 4095 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
6f086dfc
RS
4096#endif
4097
4098 /* We haven't yet found an argument that we must push and pretend the
4099 caller did. */
4100 current_function_pretend_args_size = 0;
4101
4102 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4103 {
05e3bdb9 4104 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
6f086dfc
RS
4105 struct args_size stack_offset;
4106 struct args_size arg_size;
4107 int passed_pointer = 0;
621061f4 4108 int did_conversion = 0;
6f086dfc 4109 tree passed_type = DECL_ARG_TYPE (parm);
621061f4 4110 tree nominal_type = TREE_TYPE (parm);
9ab70a9b 4111 int pretend_named;
6f086dfc
RS
4112
4113 /* Set LAST_NAMED if this is last named arg before some
bf9c83fe 4114 anonymous args. */
6f086dfc
RS
4115 int last_named = ((TREE_CHAIN (parm) == 0
4116 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3b69d50e 4117 && (stdarg || current_function_varargs));
bf9c83fe
JW
4118 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4119 most machines, if this is a varargs/stdarg function, then we treat
4120 the last named arg as if it were anonymous too. */
e5e809f4 4121 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
6f086dfc
RS
4122
4123 if (TREE_TYPE (parm) == error_mark_node
4124 /* This can happen after weird syntax errors
4125 or if an enum type is defined among the parms. */
4126 || TREE_CODE (parm) != PARM_DECL
4127 || passed_type == NULL)
4128 {
38a448ca
RH
4129 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4130 = gen_rtx_MEM (BLKmode, const0_rtx);
6f086dfc
RS
4131 TREE_USED (parm) = 1;
4132 continue;
4133 }
4134
4135 /* For varargs.h function, save info about regs and stack space
4136 used by the individual args, not including the va_alist arg. */
3b69d50e 4137 if (hide_last_arg && last_named)
6f086dfc
RS
4138 current_function_args_info = args_so_far;
4139
4140 /* Find mode of arg as it is passed, and mode of arg
4141 as it should be during execution of this function. */
4142 passed_mode = TYPE_MODE (passed_type);
621061f4 4143 nominal_mode = TYPE_MODE (nominal_type);
6f086dfc 4144
16bae307
RS
4145 /* If the parm's mode is VOID, its value doesn't matter,
4146 and avoid the usual things like emit_move_insn that could crash. */
4147 if (nominal_mode == VOIDmode)
4148 {
4149 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4150 continue;
4151 }
4152
3f46679a
RK
4153 /* If the parm is to be passed as a transparent union, use the
4154 type of the first field for the tests below. We have already
4155 verified that the modes are the same. */
4156 if (DECL_TRANSPARENT_UNION (parm)
4157 || TYPE_TRANSPARENT_UNION (passed_type))
4158 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4159
a14ae508
RK
4160 /* See if this arg was passed by invisible reference. It is if
4161 it is an object whose size depends on the contents of the
4162 object itself or if the machine requires these objects be passed
4163 that way. */
4164
4165 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4166 && contains_placeholder_p (TYPE_SIZE (passed_type)))
657bb6dc 4167 || TREE_ADDRESSABLE (passed_type)
6f086dfc 4168#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
a14ae508 4169 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
bf9c83fe 4170 passed_type, named_arg)
a14ae508
RK
4171#endif
4172 )
6f086dfc 4173 {
621061f4 4174 passed_type = nominal_type = build_pointer_type (passed_type);
6f086dfc
RS
4175 passed_pointer = 1;
4176 passed_mode = nominal_mode = Pmode;
4177 }
6f086dfc 4178
a53e14c0
RK
4179 promoted_mode = passed_mode;
4180
4181#ifdef PROMOTE_FUNCTION_ARGS
4182 /* Compute the mode in which the arg is actually extended to. */
7940255d 4183 unsignedp = TREE_UNSIGNED (passed_type);
a5a52dbc 4184 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
a53e14c0
RK
4185#endif
4186
6f086dfc
RS
4187 /* Let machine desc say which reg (if any) the parm arrives in.
4188 0 means it arrives on the stack. */
4189#ifdef FUNCTION_INCOMING_ARG
a53e14c0 4190 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
bf9c83fe 4191 passed_type, named_arg);
6f086dfc 4192#else
a53e14c0 4193 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
bf9c83fe 4194 passed_type, named_arg);
6f086dfc
RS
4195#endif
4196
621061f4
RK
4197 if (entry_parm == 0)
4198 promoted_mode = passed_mode;
a53e14c0 4199
6f086dfc
RS
4200#ifdef SETUP_INCOMING_VARARGS
4201 /* If this is the last named parameter, do any required setup for
4202 varargs or stdargs. We need to know about the case of this being an
4203 addressable type, in which case we skip the registers it
4204 would have arrived in.
4205
4206 For stdargs, LAST_NAMED will be set for two parameters, the one that
4207 is actually the last named, and the dummy parameter. We only
4208 want to do this action once.
4209
4210 Also, indicate when RTL generation is to be suppressed. */
4211 if (last_named && !varargs_setup)
4212 {
621061f4 4213 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
6f086dfc
RS
4214 current_function_pretend_args_size,
4215 second_time);
4216 varargs_setup = 1;
4217 }
4218#endif
4219
4220 /* Determine parm's home in the stack,
4221 in case it arrives in the stack or we should pretend it did.
4222
4223 Compute the stack position and rtx where the argument arrives
4224 and its size.
4225
4226 There is one complexity here: If this was a parameter that would
4227 have been passed in registers, but wasn't only because it is
4228 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4229 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4230 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4231 0 as it was the previous time. */
4232
9ab70a9b 4233 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
ab87f8c8 4234 locate_and_pad_parm (nominal_mode, passed_type,
6f086dfc
RS
4235#ifdef STACK_PARMS_IN_REG_PARM_AREA
4236 1,
4237#else
4238#ifdef FUNCTION_INCOMING_ARG
621061f4 4239 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc 4240 passed_type,
9ab70a9b 4241 pretend_named) != 0,
6f086dfc 4242#else
621061f4 4243 FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc 4244 passed_type,
9ab70a9b 4245 pretend_named) != 0,
6f086dfc
RS
4246#endif
4247#endif
4248 fndecl, &stack_args_size, &stack_offset, &arg_size);
4249
4250 if (! second_time)
4251 {
4252 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4253
4254 if (offset_rtx == const0_rtx)
ab87f8c8 4255 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4256 else
ab87f8c8 4257 stack_parm = gen_rtx_MEM (nominal_mode,
38a448ca
RH
4258 gen_rtx_PLUS (Pmode,
4259 internal_arg_pointer,
4260 offset_rtx));
6f086dfc
RS
4261
4262 /* If this is a memory ref that contains aggregate components,
a00285d0
RK
4263 mark it as such for cse and loop optimize. Likewise if it
4264 is readonly. */
c6df88cb 4265 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
a00285d0 4266 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
41472af8 4267 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
6f086dfc
RS
4268 }
4269
4270 /* If this parameter was passed both in registers and in the stack,
4271 use the copy on the stack. */
621061f4 4272 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
6f086dfc
RS
4273 entry_parm = 0;
4274
461beb10 4275#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
4276 /* If this parm was passed part in regs and part in memory,
4277 pretend it arrived entirely in memory
4278 by pushing the register-part onto the stack.
4279
4280 In the special case of a DImode or DFmode that is split,
4281 we could put it together in a pseudoreg directly,
4282 but for now that's not worth bothering with. */
4283
4284 if (entry_parm)
4285 {
621061f4 4286 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
bf9c83fe 4287 passed_type, named_arg);
6f086dfc
RS
4288
4289 if (nregs > 0)
4290 {
4291 current_function_pretend_args_size
4292 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4293 / (PARM_BOUNDARY / BITS_PER_UNIT)
4294 * (PARM_BOUNDARY / BITS_PER_UNIT));
4295
4296 if (! second_time)
5c4cdc9f
JW
4297 {
4298 /* Handle calls that pass values in multiple non-contiguous
4299 locations. The Irix 6 ABI has examples of this. */
4300 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4301 emit_group_store (validize_mem (stack_parm), entry_parm,
4302 int_size_in_bytes (TREE_TYPE (parm)),
4303 (TYPE_ALIGN (TREE_TYPE (parm))
4304 / BITS_PER_UNIT));
5c4cdc9f
JW
4305 else
4306 move_block_from_reg (REGNO (entry_parm),
4307 validize_mem (stack_parm), nregs,
4308 int_size_in_bytes (TREE_TYPE (parm)));
4309 }
6f086dfc
RS
4310 entry_parm = stack_parm;
4311 }
4312 }
461beb10 4313#endif
6f086dfc
RS
4314
4315 /* If we didn't decide this parm came in a register,
4316 by default it came on the stack. */
4317 if (entry_parm == 0)
4318 entry_parm = stack_parm;
4319
4320 /* Record permanently how this parm was passed. */
4321 if (! second_time)
4322 DECL_INCOMING_RTL (parm) = entry_parm;
4323
4324 /* If there is actually space on the stack for this parm,
4325 count it in stack_args_size; otherwise set stack_parm to 0
4326 to indicate there is no preallocated stack slot for the parm. */
4327
4328 if (entry_parm == stack_parm
ab87f8c8
JL
4329 || (GET_CODE (entry_parm) == PARALLEL
4330 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
d9ca49d5 4331#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 4332 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
4333 there is still an (uninitialized) stack slot allocated for it.
4334
4335 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4336 whether this parameter already has a stack slot allocated,
4337 because an arg block exists only if current_function_args_size
abc95ed3 4338 is larger than some threshold, and we haven't calculated that
d9ca49d5
JW
4339 yet. So, for now, we just assume that stack slots never exist
4340 in this case. */
6f086dfc
RS
4341 || REG_PARM_STACK_SPACE (fndecl) > 0
4342#endif
4343 )
4344 {
4345 stack_args_size.constant += arg_size.constant;
4346 if (arg_size.var)
4347 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4348 }
4349 else
4350 /* No stack slot was pushed for this parm. */
4351 stack_parm = 0;
4352
4353 /* Update info on where next arg arrives in registers. */
4354
621061f4 4355 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
bf9c83fe 4356 passed_type, named_arg);
6f086dfc 4357
0f41302f 4358 /* If this is our second time through, we are done with this parm. */
6f086dfc
RS
4359 if (second_time)
4360 continue;
4361
e16c591a
RS
4362 /* If we can't trust the parm stack slot to be aligned enough
4363 for its ultimate type, don't use that slot after entry.
4364 We'll make another stack slot, if we need one. */
4365 {
e16c591a 4366 int thisparm_boundary
621061f4 4367 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
e16c591a
RS
4368
4369 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4370 stack_parm = 0;
4371 }
4372
cb61f66f
RS
4373 /* If parm was passed in memory, and we need to convert it on entry,
4374 don't store it back in that same slot. */
4375 if (entry_parm != 0
4376 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4377 stack_parm = 0;
4378
4379#if 0
6f086dfc
RS
4380 /* Now adjust STACK_PARM to the mode and precise location
4381 where this parameter should live during execution,
4382 if we discover that it must live in the stack during execution.
4383 To make debuggers happier on big-endian machines, we store
4384 the value in the last bytes of the space available. */
4385
4386 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4387 && stack_parm != 0)
4388 {
4389 rtx offset_rtx;
4390
f76b9db2
ILT
4391 if (BYTES_BIG_ENDIAN
4392 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
6f086dfc
RS
4393 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4394 - GET_MODE_SIZE (nominal_mode));
6f086dfc
RS
4395
4396 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4397 if (offset_rtx == const0_rtx)
38a448ca 4398 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4399 else
38a448ca
RH
4400 stack_parm = gen_rtx_MEM (nominal_mode,
4401 gen_rtx_PLUS (Pmode,
4402 internal_arg_pointer,
4403 offset_rtx));
6f086dfc
RS
4404
4405 /* If this is a memory ref that contains aggregate components,
4406 mark it as such for cse and loop optimize. */
c6df88cb 4407 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
6f086dfc 4408 }
cb61f66f 4409#endif /* 0 */
6f086dfc 4410
9dc0f531
RK
4411#ifdef STACK_REGS
4412 /* We need this "use" info, because the gcc-register->stack-register
4413 converter in reg-stack.c needs to know which registers are active
4414 at the start of the function call. The actual parameter loading
4415 instructions are not always available then anymore, since they might
4416 have been optimised away. */
4417
4418 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
38a448ca 4419 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
9dc0f531
RK
4420#endif
4421
6f086dfc
RS
4422 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4423 in the mode in which it arrives.
4424 STACK_PARM is an RTX for a stack slot where the parameter can live
4425 during the function (in case we want to put it there).
4426 STACK_PARM is 0 if no stack slot was pushed for it.
4427
4428 Now output code if necessary to convert ENTRY_PARM to
4429 the type in which this function declares it,
4430 and store that result in an appropriate place,
4431 which may be a pseudo reg, may be STACK_PARM,
4432 or may be a local stack slot if STACK_PARM is 0.
4433
4434 Set DECL_RTL to that place. */
4435
5c4cdc9f 4436 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4437 {
5c4cdc9f
JW
4438 /* If a BLKmode arrives in registers, copy it to a stack slot.
4439 Handle calls that pass values in multiple non-contiguous
4440 locations. The Irix 6 ABI has examples of this. */
4441 if (GET_CODE (entry_parm) == REG
4442 || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4443 {
621061f4
RK
4444 int size_stored
4445 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4446 UNITS_PER_WORD);
6f086dfc
RS
4447
4448 /* Note that we will be storing an integral number of words.
4449 So we have to be careful to ensure that we allocate an
4450 integral number of words. We do this below in the
4451 assign_stack_local if space was not allocated in the argument
4452 list. If it was, this will not work if PARM_BOUNDARY is not
4453 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4454 if it becomes a problem. */
4455
4456 if (stack_parm == 0)
7e41ffa2
RS
4457 {
4458 stack_parm
621061f4
RK
4459 = assign_stack_local (GET_MODE (entry_parm),
4460 size_stored, 0);
4461
4462 /* If this is a memory ref that contains aggregate
4463 components, mark it as such for cse and loop optimize. */
c6df88cb 4464 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4465 }
4466
6f086dfc
RS
4467 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4468 abort ();
4469
7a30f0c4
JW
4470 if (TREE_READONLY (parm))
4471 RTX_UNCHANGING_P (stack_parm) = 1;
4472
5c4cdc9f
JW
4473 /* Handle calls that pass values in multiple non-contiguous
4474 locations. The Irix 6 ABI has examples of this. */
4475 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4476 emit_group_store (validize_mem (stack_parm), entry_parm,
4477 int_size_in_bytes (TREE_TYPE (parm)),
4478 (TYPE_ALIGN (TREE_TYPE (parm))
4479 / BITS_PER_UNIT));
5c4cdc9f
JW
4480 else
4481 move_block_from_reg (REGNO (entry_parm),
4482 validize_mem (stack_parm),
4483 size_stored / UNITS_PER_WORD,
4484 int_size_in_bytes (TREE_TYPE (parm)));
6f086dfc
RS
4485 }
4486 DECL_RTL (parm) = stack_parm;
4487 }
74bd77a8 4488 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 4489 && ! DECL_INLINE (fndecl))
6f086dfc
RS
4490 /* layout_decl may set this. */
4491 || TREE_ADDRESSABLE (parm)
4492 || TREE_SIDE_EFFECTS (parm)
4493 /* If -ffloat-store specified, don't put explicit
4494 float variables into registers. */
4495 || (flag_float_store
4496 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4497 /* Always assign pseudo to structure return or item passed
4498 by invisible reference. */
4499 || passed_pointer || parm == function_result_decl)
4500 {
00d8a4c1
RK
4501 /* Store the parm in a pseudoregister during the function, but we
4502 may need to do it in a wider mode. */
4503
4504 register rtx parmreg;
4e86caed 4505 int regno, regnoi = 0, regnor = 0;
00d8a4c1
RK
4506
4507 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
cd5b3469 4508
621061f4
RK
4509 promoted_nominal_mode
4510 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
6f086dfc 4511
621061f4 4512 parmreg = gen_reg_rtx (promoted_nominal_mode);
ddb7361a 4513 mark_user_reg (parmreg);
6f086dfc
RS
4514
4515 /* If this was an item that we received a pointer to, set DECL_RTL
4516 appropriately. */
4517 if (passed_pointer)
4518 {
621061f4 4519 DECL_RTL (parm)
38a448ca 4520 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
c6df88cb 4521 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
6f086dfc
RS
4522 }
4523 else
4524 DECL_RTL (parm) = parmreg;
4525
4526 /* Copy the value into the register. */
621061f4
RK
4527 if (nominal_mode != passed_mode
4528 || promoted_nominal_mode != promoted_mode)
86f8eff3 4529 {
efd8cba0 4530 int save_tree_used;
621061f4
RK
4531 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4532 mode, by the caller. We now have to convert it to
4533 NOMINAL_MODE, if different. However, PARMREG may be in
956d6950 4534 a different mode than NOMINAL_MODE if it is being stored
621061f4
RK
4535 promoted.
4536
4537 If ENTRY_PARM is a hard register, it might be in a register
86f8eff3
RK
4538 not valid for operating in its mode (e.g., an odd-numbered
4539 register for a DFmode). In that case, moves are the only
4540 thing valid, so we can't do a convert from there. This
4541 occurs when the calling sequence allow such misaligned
3412b298
JW
4542 usages.
4543
4544 In addition, the conversion may involve a call, which could
4545 clobber parameters which haven't been copied to pseudo
4546 registers yet. Therefore, we must first copy the parm to
4547 a pseudo reg here, and save the conversion until after all
4548 parameters have been moved. */
4549
4550 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4551
4552 emit_move_insn (tempreg, validize_mem (entry_parm));
4553
4554 push_to_sequence (conversion_insns);
ad241351
RK
4555 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4556
efd8cba0
DB
4557 /* TREE_USED gets set erroneously during expand_assignment. */
4558 save_tree_used = TREE_USED (parm);
621061f4
RK
4559 expand_assignment (parm,
4560 make_tree (nominal_type, tempreg), 0, 0);
efd8cba0 4561 TREE_USED (parm) = save_tree_used;
3412b298 4562 conversion_insns = get_insns ();
621061f4 4563 did_conversion = 1;
3412b298 4564 end_sequence ();
86f8eff3 4565 }
6f086dfc
RS
4566 else
4567 emit_move_insn (parmreg, validize_mem (entry_parm));
4568
74bd77a8
RS
4569 /* If we were passed a pointer but the actual value
4570 can safely live in a register, put it in one. */
16bae307 4571 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
4572 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4573 && ! DECL_INLINE (fndecl))
4574 /* layout_decl may set this. */
4575 || TREE_ADDRESSABLE (parm)
4576 || TREE_SIDE_EFFECTS (parm)
4577 /* If -ffloat-store specified, don't put explicit
4578 float variables into registers. */
4579 || (flag_float_store
4580 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4581 {
2654605a
JW
4582 /* We can't use nominal_mode, because it will have been set to
4583 Pmode above. We must use the actual mode of the parm. */
4584 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
ddb7361a 4585 mark_user_reg (parmreg);
74bd77a8
RS
4586 emit_move_insn (parmreg, DECL_RTL (parm));
4587 DECL_RTL (parm) = parmreg;
c110c53d
RS
4588 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4589 now the parm. */
4590 stack_parm = 0;
74bd77a8 4591 }
137a2a7b
DE
4592#ifdef FUNCTION_ARG_CALLEE_COPIES
4593 /* If we are passed an arg by reference and it is our responsibility
4594 to make a copy, do it now.
4595 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4596 original argument, so we must recreate them in the call to
4597 FUNCTION_ARG_CALLEE_COPIES. */
4598 /* ??? Later add code to handle the case that if the argument isn't
4599 modified, don't do the copy. */
4600
4601 else if (passed_pointer
4602 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4603 TYPE_MODE (DECL_ARG_TYPE (parm)),
4604 DECL_ARG_TYPE (parm),
bf9c83fe 4605 named_arg)
926b1b99 4606 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
137a2a7b
DE
4607 {
4608 rtx copy;
4609 tree type = DECL_ARG_TYPE (parm);
4610
4611 /* This sequence may involve a library call perhaps clobbering
4612 registers that haven't been copied to pseudos yet. */
4613
4614 push_to_sequence (conversion_insns);
4615
4616 if (TYPE_SIZE (type) == 0
4617 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1fd3ef7f 4618 /* This is a variable sized object. */
38a448ca
RH
4619 copy = gen_rtx_MEM (BLKmode,
4620 allocate_dynamic_stack_space
4621 (expr_size (parm), NULL_RTX,
4622 TYPE_ALIGN (type)));
137a2a7b 4623 else
1fd3ef7f
RK
4624 copy = assign_stack_temp (TYPE_MODE (type),
4625 int_size_in_bytes (type), 1);
c6df88cb 4626 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
e9a25f70 4627 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
137a2a7b
DE
4628
4629 store_expr (parm, copy, 0);
4630 emit_move_insn (parmreg, XEXP (copy, 0));
7d384cc0 4631 if (current_function_check_memory_usage)
86fa911a
RK
4632 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4633 XEXP (copy, 0), ptr_mode,
4634 GEN_INT (int_size_in_bytes (type)),
4635 TYPE_MODE (sizetype),
956d6950
JL
4636 GEN_INT (MEMORY_USE_RW),
4637 TYPE_MODE (integer_type_node));
137a2a7b 4638 conversion_insns = get_insns ();
621061f4 4639 did_conversion = 1;
137a2a7b
DE
4640 end_sequence ();
4641 }
4642#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 4643
6f086dfc 4644 /* In any case, record the parm's desired stack location
14aceb29
RS
4645 in case we later discover it must live in the stack.
4646
4647 If it is a COMPLEX value, store the stack location for both
4648 halves. */
4649
4650 if (GET_CODE (parmreg) == CONCAT)
4651 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4652 else
4653 regno = REGNO (parmreg);
4654
e9a25f70 4655 if (regno >= max_parm_reg)
6f086dfc
RS
4656 {
4657 rtx *new;
e9a25f70 4658 int old_max_parm_reg = max_parm_reg;
14aceb29 4659
e9a25f70
JL
4660 /* It's slow to expand this one register at a time,
4661 but it's also rare and we need max_parm_reg to be
4662 precisely correct. */
4663 max_parm_reg = regno + 1;
4664 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4c9a05bc 4665 bcopy ((char *) parm_reg_stack_loc, (char *) new,
e9a25f70
JL
4666 old_max_parm_reg * sizeof (rtx));
4667 bzero ((char *) (new + old_max_parm_reg),
4668 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
6f086dfc
RS
4669 parm_reg_stack_loc = new;
4670 }
14aceb29
RS
4671
4672 if (GET_CODE (parmreg) == CONCAT)
4673 {
4674 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4675
a03caf76
RK
4676 regnor = REGNO (gen_realpart (submode, parmreg));
4677 regnoi = REGNO (gen_imagpart (submode, parmreg));
4678
7b1a0c14
RS
4679 if (stack_parm != 0)
4680 {
a03caf76 4681 parm_reg_stack_loc[regnor]
3d329b07 4682 = gen_realpart (submode, stack_parm);
a03caf76 4683 parm_reg_stack_loc[regnoi]
3d329b07 4684 = gen_imagpart (submode, stack_parm);
7b1a0c14
RS
4685 }
4686 else
4687 {
a03caf76
RK
4688 parm_reg_stack_loc[regnor] = 0;
4689 parm_reg_stack_loc[regnoi] = 0;
7b1a0c14 4690 }
14aceb29
RS
4691 }
4692 else
4693 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
6f086dfc
RS
4694
4695 /* Mark the register as eliminable if we did no conversion
4696 and it was copied from memory at a fixed offset,
4697 and the arg pointer was not copied to a pseudo-reg.
4698 If the arg pointer is a pseudo reg or the offset formed
4699 an invalid address, such memory-equivalences
4700 as we make here would screw up life analysis for it. */
4701 if (nominal_mode == passed_mode
621061f4 4702 && ! did_conversion
38b610ed
ILT
4703 && stack_parm != 0
4704 && GET_CODE (stack_parm) == MEM
6f086dfc
RS
4705 && stack_offset.var == 0
4706 && reg_mentioned_p (virtual_incoming_args_rtx,
38b610ed 4707 XEXP (stack_parm, 0)))
a03caf76
RK
4708 {
4709 rtx linsn = get_last_insn ();
69685820 4710 rtx sinsn, set;
a03caf76
RK
4711
4712 /* Mark complex types separately. */
4713 if (GET_CODE (parmreg) == CONCAT)
69685820
RK
4714 /* Scan backwards for the set of the real and
4715 imaginary parts. */
4716 for (sinsn = linsn; sinsn != 0;
4717 sinsn = prev_nonnote_insn (sinsn))
4718 {
4719 set = single_set (sinsn);
4720 if (set != 0
4721 && SET_DEST (set) == regno_reg_rtx [regnoi])
4722 REG_NOTES (sinsn)
38a448ca
RH
4723 = gen_rtx_EXPR_LIST (REG_EQUIV,
4724 parm_reg_stack_loc[regnoi],
4725 REG_NOTES (sinsn));
69685820
RK
4726 else if (set != 0
4727 && SET_DEST (set) == regno_reg_rtx [regnor])
4728 REG_NOTES (sinsn)
38a448ca
RH
4729 = gen_rtx_EXPR_LIST (REG_EQUIV,
4730 parm_reg_stack_loc[regnor],
4731 REG_NOTES (sinsn));
69685820
RK
4732 }
4733 else if ((set = single_set (linsn)) != 0
4734 && SET_DEST (set) == parmreg)
a03caf76 4735 REG_NOTES (linsn)
38a448ca
RH
4736 = gen_rtx_EXPR_LIST (REG_EQUIV,
4737 stack_parm, REG_NOTES (linsn));
a03caf76 4738 }
6f086dfc
RS
4739
4740 /* For pointer data type, suggest pointer register. */
e5e809f4 4741 if (POINTER_TYPE_P (TREE_TYPE (parm)))
6c6166bd
RK
4742 mark_reg_pointer (parmreg,
4743 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4744 / BITS_PER_UNIT));
6f086dfc
RS
4745 }
4746 else
4747 {
4748 /* Value must be stored in the stack slot STACK_PARM
4749 during function execution. */
4750
621061f4 4751 if (promoted_mode != nominal_mode)
86f8eff3
RK
4752 {
4753 /* Conversion is required. */
3412b298
JW
4754 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4755
4756 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 4757
3412b298
JW
4758 push_to_sequence (conversion_insns);
4759 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 4760 TREE_UNSIGNED (TREE_TYPE (parm)));
de957303
DE
4761 if (stack_parm)
4762 {
4763 /* ??? This may need a big-endian conversion on sparc64. */
4764 stack_parm = change_address (stack_parm, nominal_mode,
4765 NULL_RTX);
4766 }
3412b298 4767 conversion_insns = get_insns ();
621061f4 4768 did_conversion = 1;
3412b298 4769 end_sequence ();
86f8eff3 4770 }
6f086dfc
RS
4771
4772 if (entry_parm != stack_parm)
4773 {
4774 if (stack_parm == 0)
7e41ffa2
RS
4775 {
4776 stack_parm
4777 = assign_stack_local (GET_MODE (entry_parm),
4778 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4779 /* If this is a memory ref that contains aggregate components,
4780 mark it as such for cse and loop optimize. */
c6df88cb 4781 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4782 }
4783
621061f4 4784 if (promoted_mode != nominal_mode)
3412b298
JW
4785 {
4786 push_to_sequence (conversion_insns);
4787 emit_move_insn (validize_mem (stack_parm),
4788 validize_mem (entry_parm));
4789 conversion_insns = get_insns ();
4790 end_sequence ();
4791 }
4792 else
4793 emit_move_insn (validize_mem (stack_parm),
4794 validize_mem (entry_parm));
6f086dfc 4795 }
7d384cc0 4796 if (current_function_check_memory_usage)
86fa911a
RK
4797 {
4798 push_to_sequence (conversion_insns);
4799 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4800 XEXP (stack_parm, 0), ptr_mode,
4801 GEN_INT (GET_MODE_SIZE (GET_MODE
4802 (entry_parm))),
4803 TYPE_MODE (sizetype),
956d6950
JL
4804 GEN_INT (MEMORY_USE_RW),
4805 TYPE_MODE (integer_type_node));
6f086dfc 4806
86fa911a
RK
4807 conversion_insns = get_insns ();
4808 end_sequence ();
4809 }
6f086dfc
RS
4810 DECL_RTL (parm) = stack_parm;
4811 }
4812
4813 /* If this "parameter" was the place where we are receiving the
4814 function's incoming structure pointer, set up the result. */
4815 if (parm == function_result_decl)
ccdecf58
RK
4816 {
4817 tree result = DECL_RESULT (fndecl);
4818 tree restype = TREE_TYPE (result);
4819
4820 DECL_RTL (result)
38a448ca 4821 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
ccdecf58 4822
c6df88cb
MM
4823 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4824 AGGREGATE_TYPE_P (restype));
ccdecf58 4825 }
6f086dfc
RS
4826
4827 if (TREE_THIS_VOLATILE (parm))
4828 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4829 if (TREE_READONLY (parm))
4830 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4831 }
4832
3412b298
JW
4833 /* Output all parameter conversion instructions (possibly including calls)
4834 now that all parameters have been copied out of hard registers. */
4835 emit_insns (conversion_insns);
4836
6f086dfc
RS
4837 last_parm_insn = get_last_insn ();
4838
4839 current_function_args_size = stack_args_size.constant;
4840
4841 /* Adjust function incoming argument size for alignment and
4842 minimum length. */
4843
4844#ifdef REG_PARM_STACK_SPACE
6f90e075 4845#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
4846 current_function_args_size = MAX (current_function_args_size,
4847 REG_PARM_STACK_SPACE (fndecl));
4848#endif
6f90e075 4849#endif
6f086dfc 4850
c795bca9
BS
4851#ifdef PREFERRED_STACK_BOUNDARY
4852#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
6f086dfc
RS
4853
4854 current_function_args_size
4855 = ((current_function_args_size + STACK_BYTES - 1)
4856 / STACK_BYTES) * STACK_BYTES;
4857#endif
4858
4859#ifdef ARGS_GROW_DOWNWARD
4860 current_function_arg_offset_rtx
5f4f0e22 4861 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
4862 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4863 size_int (-stack_args_size.constant)),
86fa911a 4864 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
6f086dfc
RS
4865#else
4866 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4867#endif
4868
4869 /* See how many bytes, if any, of its args a function should try to pop
4870 on return. */
4871
64e6d9cc 4872 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
4873 current_function_args_size);
4874
3b69d50e
RK
4875 /* For stdarg.h function, save info about
4876 regs and stack space used by the named args. */
6f086dfc 4877
3b69d50e 4878 if (!hide_last_arg)
6f086dfc
RS
4879 current_function_args_info = args_so_far;
4880
4881 /* Set the rtx used for the function return value. Put this in its
4882 own variable so any optimizers that need this information don't have
4883 to include tree.h. Do this here so it gets done when an inlined
4884 function gets output. */
4885
4886 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4887}
4888\f
75dc3319
RK
4889/* Indicate whether REGNO is an incoming argument to the current function
4890 that was promoted to a wider mode. If so, return the RTX for the
4891 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4892 that REGNO is promoted from and whether the promotion was signed or
4893 unsigned. */
4894
4895#ifdef PROMOTE_FUNCTION_ARGS
4896
4897rtx
4898promoted_input_arg (regno, pmode, punsignedp)
4899 int regno;
4900 enum machine_mode *pmode;
4901 int *punsignedp;
4902{
4903 tree arg;
4904
4905 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4906 arg = TREE_CHAIN (arg))
4907 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
621061f4
RK
4908 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4909 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
4910 {
4911 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4912 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4913
a5a52dbc 4914 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
4915 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4916 && mode != DECL_MODE (arg))
4917 {
4918 *pmode = DECL_MODE (arg);
4919 *punsignedp = unsignedp;
4920 return DECL_INCOMING_RTL (arg);
4921 }
4922 }
4923
4924 return 0;
4925}
4926
4927#endif
4928\f
6f086dfc
RS
4929/* Compute the size and offset from the start of the stacked arguments for a
4930 parm passed in mode PASSED_MODE and with type TYPE.
4931
4932 INITIAL_OFFSET_PTR points to the current offset into the stacked
4933 arguments.
4934
4935 The starting offset and size for this parm are returned in *OFFSET_PTR
4936 and *ARG_SIZE_PTR, respectively.
4937
4938 IN_REGS is non-zero if the argument will be passed in registers. It will
4939 never be set if REG_PARM_STACK_SPACE is not defined.
4940
4941 FNDECL is the function in which the argument was defined.
4942
4943 There are two types of rounding that are done. The first, controlled by
4944 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4945 list to be aligned to the specific boundary (in bits). This rounding
4946 affects the initial and starting offsets, but not the argument size.
4947
4948 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4949 optionally rounds the size of the parm to PARM_BOUNDARY. The
4950 initial offset is not affected by this rounding, while the size always
4951 is and the starting offset may be. */
4952
4953/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4954 initial_offset_ptr is positive because locate_and_pad_parm's
4955 callers pass in the total size of args so far as
4956 initial_offset_ptr. arg_size_ptr is always positive.*/
4957
6f086dfc
RS
4958void
4959locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4960 initial_offset_ptr, offset_ptr, arg_size_ptr)
4961 enum machine_mode passed_mode;
4962 tree type;
4963 int in_regs;
91813b28 4964 tree fndecl ATTRIBUTE_UNUSED;
6f086dfc
RS
4965 struct args_size *initial_offset_ptr;
4966 struct args_size *offset_ptr;
4967 struct args_size *arg_size_ptr;
4968{
4969 tree sizetree
4970 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4971 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4972 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6f086dfc
RS
4973
4974#ifdef REG_PARM_STACK_SPACE
4975 /* If we have found a stack parm before we reach the end of the
4976 area reserved for registers, skip that area. */
4977 if (! in_regs)
4978 {
29a82058
JL
4979 int reg_parm_stack_space = 0;
4980
29008b51
JW
4981#ifdef MAYBE_REG_PARM_STACK_SPACE
4982 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4983#else
6f086dfc 4984 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 4985#endif
6f086dfc
RS
4986 if (reg_parm_stack_space > 0)
4987 {
4988 if (initial_offset_ptr->var)
4989 {
4990 initial_offset_ptr->var
4991 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4992 size_int (reg_parm_stack_space));
4993 initial_offset_ptr->constant = 0;
4994 }
4995 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4996 initial_offset_ptr->constant = reg_parm_stack_space;
4997 }
4998 }
4999#endif /* REG_PARM_STACK_SPACE */
5000
5001 arg_size_ptr->var = 0;
5002 arg_size_ptr->constant = 0;
5003
5004#ifdef ARGS_GROW_DOWNWARD
5005 if (initial_offset_ptr->var)
5006 {
5007 offset_ptr->constant = 0;
5008 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5009 initial_offset_ptr->var);
5010 }
5011 else
5012 {
5013 offset_ptr->constant = - initial_offset_ptr->constant;
5014 offset_ptr->var = 0;
5015 }
0b21dcf5 5016 if (where_pad != none
6f086dfc
RS
5017 && (TREE_CODE (sizetree) != INTEGER_CST
5018 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5019 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5020 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19
TG
5021 if (where_pad != downward)
5022 pad_to_arg_alignment (offset_ptr, boundary);
6f086dfc
RS
5023 if (initial_offset_ptr->var)
5024 {
5025 arg_size_ptr->var = size_binop (MINUS_EXPR,
5026 size_binop (MINUS_EXPR,
5027 integer_zero_node,
5028 initial_offset_ptr->var),
5029 offset_ptr->var);
5030 }
5031 else
5032 {
db3cf6fb
MS
5033 arg_size_ptr->constant = (- initial_offset_ptr->constant
5034 - offset_ptr->constant);
6f086dfc 5035 }
6f086dfc
RS
5036#else /* !ARGS_GROW_DOWNWARD */
5037 pad_to_arg_alignment (initial_offset_ptr, boundary);
5038 *offset_ptr = *initial_offset_ptr;
6f086dfc
RS
5039
5040#ifdef PUSH_ROUNDING
5041 if (passed_mode != BLKmode)
5042 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5043#endif
5044
d4b0a7a0
DE
5045 /* Pad_below needs the pre-rounded size to know how much to pad below
5046 so this must be done before rounding up. */
ea5917da
DE
5047 if (where_pad == downward
5048 /* However, BLKmode args passed in regs have their padding done elsewhere.
5049 The stack slot must be able to hold the entire register. */
5050 && !(in_regs && passed_mode == BLKmode))
d4b0a7a0
DE
5051 pad_below (offset_ptr, passed_mode, sizetree);
5052
6f086dfc
RS
5053 if (where_pad != none
5054 && (TREE_CODE (sizetree) != INTEGER_CST
5055 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5056 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5057
5058 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5059#endif /* ARGS_GROW_DOWNWARD */
5060}
5061
e16c591a
RS
5062/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5063 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5064
6f086dfc
RS
5065static void
5066pad_to_arg_alignment (offset_ptr, boundary)
5067 struct args_size *offset_ptr;
5068 int boundary;
5069{
5070 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5071
5072 if (boundary > BITS_PER_UNIT)
5073 {
5074 if (offset_ptr->var)
5075 {
5076 offset_ptr->var =
5077#ifdef ARGS_GROW_DOWNWARD
5078 round_down
5079#else
5080 round_up
5081#endif
5082 (ARGS_SIZE_TREE (*offset_ptr),
5083 boundary / BITS_PER_UNIT);
5084 offset_ptr->constant = 0; /*?*/
5085 }
5086 else
5087 offset_ptr->constant =
5088#ifdef ARGS_GROW_DOWNWARD
5089 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5090#else
5091 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5092#endif
5093 }
5094}
5095
51723711 5096#ifndef ARGS_GROW_DOWNWARD
6f086dfc
RS
5097static void
5098pad_below (offset_ptr, passed_mode, sizetree)
5099 struct args_size *offset_ptr;
5100 enum machine_mode passed_mode;
5101 tree sizetree;
5102{
5103 if (passed_mode != BLKmode)
5104 {
5105 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5106 offset_ptr->constant
5107 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5108 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5109 - GET_MODE_SIZE (passed_mode));
5110 }
5111 else
5112 {
5113 if (TREE_CODE (sizetree) != INTEGER_CST
5114 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5115 {
5116 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5117 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5118 /* Add it in. */
5119 ADD_PARM_SIZE (*offset_ptr, s2);
5120 SUB_PARM_SIZE (*offset_ptr, sizetree);
5121 }
5122 }
5123}
51723711 5124#endif
6f086dfc 5125
487a6e06 5126#ifdef ARGS_GROW_DOWNWARD
6f086dfc
RS
5127static tree
5128round_down (value, divisor)
5129 tree value;
5130 int divisor;
5131{
5132 return size_binop (MULT_EXPR,
5133 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5134 size_int (divisor));
5135}
487a6e06 5136#endif
6f086dfc
RS
5137\f
5138/* Walk the tree of blocks describing the binding levels within a function
5139 and warn about uninitialized variables.
5140 This is done after calling flow_analysis and before global_alloc
5141 clobbers the pseudo-regs to hard regs. */
5142
5143void
5144uninitialized_vars_warning (block)
5145 tree block;
5146{
5147 register tree decl, sub;
5148 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5149 {
5150 if (TREE_CODE (decl) == VAR_DECL
5151 /* These warnings are unreliable for and aggregates
5152 because assigning the fields one by one can fail to convince
5153 flow.c that the entire aggregate was initialized.
5154 Unions are troublesome because members may be shorter. */
05e3bdb9 5155 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
6f086dfc
RS
5156 && DECL_RTL (decl) != 0
5157 && GET_CODE (DECL_RTL (decl)) == REG
6acdd0fd
JL
5158 /* Global optimizations can make it difficult to determine if a
5159 particular variable has been initialized. However, a VAR_DECL
5160 with a nonzero DECL_INITIAL had an initializer, so do not
5161 claim it is potentially uninitialized.
5162
5163 We do not care about the actual value in DECL_INITIAL, so we do
5164 not worry that it may be a dangling pointer. */
5165 && DECL_INITIAL (decl) == NULL_TREE
6f086dfc
RS
5166 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5167 warning_with_decl (decl,
3c8cd8bd 5168 "`%s' might be used uninitialized in this function");
6f086dfc
RS
5169 if (TREE_CODE (decl) == VAR_DECL
5170 && DECL_RTL (decl) != 0
5171 && GET_CODE (DECL_RTL (decl)) == REG
5172 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5173 warning_with_decl (decl,
3c8cd8bd 5174 "variable `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5175 }
5176 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5177 uninitialized_vars_warning (sub);
5178}
5179
5180/* Do the appropriate part of uninitialized_vars_warning
5181 but for arguments instead of local variables. */
5182
5183void
0cd6ef35 5184setjmp_args_warning ()
6f086dfc
RS
5185{
5186 register tree decl;
5187 for (decl = DECL_ARGUMENTS (current_function_decl);
5188 decl; decl = TREE_CHAIN (decl))
5189 if (DECL_RTL (decl) != 0
5190 && GET_CODE (DECL_RTL (decl)) == REG
5191 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3c8cd8bd 5192 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5193}
5194
5195/* If this function call setjmp, put all vars into the stack
5196 unless they were declared `register'. */
5197
5198void
5199setjmp_protect (block)
5200 tree block;
5201{
5202 register tree decl, sub;
5203 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5204 if ((TREE_CODE (decl) == VAR_DECL
5205 || TREE_CODE (decl) == PARM_DECL)
5206 && DECL_RTL (decl) != 0
e9a25f70
JL
5207 && (GET_CODE (DECL_RTL (decl)) == REG
5208 || (GET_CODE (DECL_RTL (decl)) == MEM
5209 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
b335c2cc 5210 /* If this variable came from an inline function, it must be
9ec36da5 5211 that its life doesn't overlap the setjmp. If there was a
b335c2cc
TW
5212 setjmp in the function, it would already be in memory. We
5213 must exclude such variable because their DECL_RTL might be
5214 set to strange things such as virtual_stack_vars_rtx. */
5215 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
5216 && (
5217#ifdef NON_SAVING_SETJMP
5218 /* If longjmp doesn't restore the registers,
5219 don't put anything in them. */
5220 NON_SAVING_SETJMP
5221 ||
5222#endif
a82ad570 5223 ! DECL_REGISTER (decl)))
6f086dfc
RS
5224 put_var_into_stack (decl);
5225 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5226 setjmp_protect (sub);
5227}
5228\f
5229/* Like the previous function, but for args instead of local variables. */
5230
5231void
5232setjmp_protect_args ()
5233{
29a82058 5234 register tree decl;
6f086dfc
RS
5235 for (decl = DECL_ARGUMENTS (current_function_decl);
5236 decl; decl = TREE_CHAIN (decl))
5237 if ((TREE_CODE (decl) == VAR_DECL
5238 || TREE_CODE (decl) == PARM_DECL)
5239 && DECL_RTL (decl) != 0
e9a25f70
JL
5240 && (GET_CODE (DECL_RTL (decl)) == REG
5241 || (GET_CODE (DECL_RTL (decl)) == MEM
5242 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
6f086dfc
RS
5243 && (
5244 /* If longjmp doesn't restore the registers,
5245 don't put anything in them. */
5246#ifdef NON_SAVING_SETJMP
5247 NON_SAVING_SETJMP
5248 ||
5249#endif
a82ad570 5250 ! DECL_REGISTER (decl)))
6f086dfc
RS
5251 put_var_into_stack (decl);
5252}
5253\f
5254/* Return the context-pointer register corresponding to DECL,
5255 or 0 if it does not need one. */
5256
5257rtx
5258lookup_static_chain (decl)
5259 tree decl;
5260{
b001a02f
PB
5261 tree context = decl_function_context (decl);
5262 tree link;
7ad8c4bf 5263
38ee6ed9
JM
5264 if (context == 0
5265 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
7ad8c4bf 5266 return 0;
38ee6ed9 5267
6f086dfc
RS
5268 /* We treat inline_function_decl as an alias for the current function
5269 because that is the inline function whose vars, types, etc.
5270 are being merged into the current function.
5271 See expand_inline_function. */
5272 if (context == current_function_decl || context == inline_function_decl)
5273 return virtual_stack_vars_rtx;
5274
5275 for (link = context_display; link; link = TREE_CHAIN (link))
5276 if (TREE_PURPOSE (link) == context)
5277 return RTL_EXPR_RTL (TREE_VALUE (link));
5278
5279 abort ();
5280}
5281\f
5282/* Convert a stack slot address ADDR for variable VAR
5283 (from a containing function)
5284 into an address valid in this function (using a static chain). */
5285
5286rtx
5287fix_lexical_addr (addr, var)
5288 rtx addr;
5289 tree var;
5290{
5291 rtx basereg;
e5e809f4 5292 HOST_WIDE_INT displacement;
6f086dfc
RS
5293 tree context = decl_function_context (var);
5294 struct function *fp;
5295 rtx base = 0;
5296
5297 /* If this is the present function, we need not do anything. */
5298 if (context == current_function_decl || context == inline_function_decl)
5299 return addr;
5300
5301 for (fp = outer_function_chain; fp; fp = fp->next)
5302 if (fp->decl == context)
5303 break;
5304
5305 if (fp == 0)
5306 abort ();
5307
e9a25f70
JL
5308 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5309 addr = XEXP (XEXP (addr, 0), 0);
5310
6f086dfc
RS
5311 /* Decode given address as base reg plus displacement. */
5312 if (GET_CODE (addr) == REG)
5313 basereg = addr, displacement = 0;
5314 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5315 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5316 else
5317 abort ();
5318
5319 /* We accept vars reached via the containing function's
5320 incoming arg pointer and via its stack variables pointer. */
5321 if (basereg == fp->internal_arg_pointer)
5322 {
5323 /* If reached via arg pointer, get the arg pointer value
5324 out of that function's stack frame.
5325
5326 There are two cases: If a separate ap is needed, allocate a
5327 slot in the outer function for it and dereference it that way.
5328 This is correct even if the real ap is actually a pseudo.
5329 Otherwise, just adjust the offset from the frame pointer to
5330 compensate. */
5331
5332#ifdef NEED_SEPARATE_AP
5333 rtx addr;
5334
5335 if (fp->arg_pointer_save_area == 0)
5336 fp->arg_pointer_save_area
5337 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5338
5339 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5340 addr = memory_address (Pmode, addr);
5341
38a448ca 5342 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
6f086dfc
RS
5343#else
5344 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 5345 base = lookup_static_chain (var);
6f086dfc
RS
5346#endif
5347 }
5348
5349 else if (basereg == virtual_stack_vars_rtx)
5350 {
5351 /* This is the same code as lookup_static_chain, duplicated here to
5352 avoid an extra call to decl_function_context. */
5353 tree link;
5354
5355 for (link = context_display; link; link = TREE_CHAIN (link))
5356 if (TREE_PURPOSE (link) == context)
5357 {
5358 base = RTL_EXPR_RTL (TREE_VALUE (link));
5359 break;
5360 }
5361 }
5362
5363 if (base == 0)
5364 abort ();
5365
5366 /* Use same offset, relative to appropriate static chain or argument
5367 pointer. */
5368 return plus_constant (base, displacement);
5369}
5370\f
5371/* Return the address of the trampoline for entering nested fn FUNCTION.
5372 If necessary, allocate a trampoline (in the stack frame)
5373 and emit rtl to initialize its contents (at entry to this function). */
5374
5375rtx
5376trampoline_address (function)
5377 tree function;
5378{
5379 tree link;
5380 tree rtlexp;
5381 rtx tramp;
5382 struct function *fp;
5383 tree fn_context;
5384
5385 /* Find an existing trampoline and return it. */
5386 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5387 if (TREE_PURPOSE (link) == function)
e87ee2a9
RK
5388 return
5389 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5390
6f086dfc
RS
5391 for (fp = outer_function_chain; fp; fp = fp->next)
5392 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5393 if (TREE_PURPOSE (link) == function)
5394 {
5395 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5396 function);
5397 return round_trampoline_addr (tramp);
5398 }
5399
5400 /* None exists; we must make one. */
5401
5402 /* Find the `struct function' for the function containing FUNCTION. */
5403 fp = 0;
5404 fn_context = decl_function_context (function);
4ac74fb8
RK
5405 if (fn_context != current_function_decl
5406 && fn_context != inline_function_decl)
6f086dfc
RS
5407 for (fp = outer_function_chain; fp; fp = fp->next)
5408 if (fp->decl == fn_context)
5409 break;
5410
5411 /* Allocate run-time space for this trampoline
5412 (usually in the defining function's stack frame). */
5413#ifdef ALLOCATE_TRAMPOLINE
5414 tramp = ALLOCATE_TRAMPOLINE (fp);
5415#else
5416 /* If rounding needed, allocate extra space
5417 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5418#ifdef TRAMPOLINE_ALIGNMENT
b02ab63a
RK
5419#define TRAMPOLINE_REAL_SIZE \
5420 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
6f086dfc
RS
5421#else
5422#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5423#endif
5424 if (fp != 0)
5425 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5426 else
5427 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5428#endif
5429
5430 /* Record the trampoline for reuse and note it for later initialization
5431 by expand_function_end. */
5432 if (fp != 0)
5433 {
28498644
RK
5434 push_obstacks (fp->function_maybepermanent_obstack,
5435 fp->function_maybepermanent_obstack);
6f086dfc
RS
5436 rtlexp = make_node (RTL_EXPR);
5437 RTL_EXPR_RTL (rtlexp) = tramp;
5438 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5439 pop_obstacks ();
5440 }
5441 else
5442 {
5443 /* Make the RTL_EXPR node temporary, not momentary, so that the
5444 trampoline_list doesn't become garbage. */
5445 int momentary = suspend_momentary ();
5446 rtlexp = make_node (RTL_EXPR);
5447 resume_momentary (momentary);
5448
5449 RTL_EXPR_RTL (rtlexp) = tramp;
5450 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5451 }
5452
5453 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5454 return round_trampoline_addr (tramp);
5455}
5456
5457/* Given a trampoline address,
5458 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5459
5460static rtx
5461round_trampoline_addr (tramp)
5462 rtx tramp;
5463{
5464#ifdef TRAMPOLINE_ALIGNMENT
5465 /* Round address up to desired boundary. */
5466 rtx temp = gen_reg_rtx (Pmode);
5467 temp = expand_binop (Pmode, add_optab, tramp,
b02ab63a 5468 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
6f086dfc
RS
5469 temp, 0, OPTAB_LIB_WIDEN);
5470 tramp = expand_binop (Pmode, and_optab, temp,
b02ab63a 5471 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
6f086dfc
RS
5472 temp, 0, OPTAB_LIB_WIDEN);
5473#endif
5474 return tramp;
5475}
5476\f
467456d0
RS
5477/* The functions identify_blocks and reorder_blocks provide a way to
5478 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5479 duplicate portions of the RTL code. Call identify_blocks before
5480 changing the RTL, and call reorder_blocks after. */
5481
b2a59b15
MS
5482/* Put all this function's BLOCK nodes including those that are chained
5483 onto the first block into a vector, and return it.
467456d0
RS
5484 Also store in each NOTE for the beginning or end of a block
5485 the index of that block in the vector.
b2a59b15 5486 The arguments are BLOCK, the chain of top-level blocks of the function,
467456d0
RS
5487 and INSNS, the insn chain of the function. */
5488
5489tree *
b2a59b15
MS
5490identify_blocks (block, insns)
5491 tree block;
467456d0
RS
5492 rtx insns;
5493{
fc289cd1
JW
5494 int n_blocks;
5495 tree *block_vector;
5496 int *block_stack;
467456d0 5497 int depth = 0;
b2a59b15
MS
5498 int next_block_number = 1;
5499 int current_block_number = 1;
467456d0
RS
5500 rtx insn;
5501
b2a59b15 5502 if (block == 0)
fc289cd1
JW
5503 return 0;
5504
b2a59b15 5505 n_blocks = all_blocks (block, 0);
fc289cd1
JW
5506 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5507 block_stack = (int *) alloca (n_blocks * sizeof (int));
5508
b2a59b15 5509 all_blocks (block, block_vector);
467456d0
RS
5510
5511 for (insn = insns; insn; insn = NEXT_INSN (insn))
5512 if (GET_CODE (insn) == NOTE)
5513 {
5514 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5515 {
5516 block_stack[depth++] = current_block_number;
5517 current_block_number = next_block_number;
1b2ac438 5518 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
5519 }
5520 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5521 {
1b2ac438 5522 NOTE_BLOCK_NUMBER (insn) = current_block_number;
c7fdfd49 5523 current_block_number = block_stack[--depth];
467456d0
RS
5524 }
5525 }
5526
b2a59b15
MS
5527 if (n_blocks != next_block_number)
5528 abort ();
5529
467456d0
RS
5530 return block_vector;
5531}
5532
5533/* Given BLOCK_VECTOR which was returned by identify_blocks,
5534 and a revised instruction chain, rebuild the tree structure
5535 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 5536 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
5537 Returns the current top-level block. */
5538
5539tree
b2a59b15 5540reorder_blocks (block_vector, block, insns)
467456d0 5541 tree *block_vector;
b2a59b15 5542 tree block;
467456d0
RS
5543 rtx insns;
5544{
b2a59b15 5545 tree current_block = block;
467456d0
RS
5546 rtx insn;
5547
fc289cd1 5548 if (block_vector == 0)
b2a59b15 5549 return block;
fc289cd1 5550
b2a59b15 5551 /* Prune the old trees away, so that it doesn't get in the way. */
fc289cd1 5552 BLOCK_SUBBLOCKS (current_block) = 0;
b2a59b15 5553 BLOCK_CHAIN (current_block) = 0;
fc289cd1 5554
467456d0
RS
5555 for (insn = insns; insn; insn = NEXT_INSN (insn))
5556 if (GET_CODE (insn) == NOTE)
5557 {
5558 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5559 {
5560 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5561 /* If we have seen this block before, copy it. */
5562 if (TREE_ASM_WRITTEN (block))
5563 block = copy_node (block);
fc289cd1 5564 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
5565 TREE_ASM_WRITTEN (block) = 1;
5566 BLOCK_SUPERCONTEXT (block) = current_block;
5567 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5568 BLOCK_SUBBLOCKS (current_block) = block;
5569 current_block = block;
1b2ac438 5570 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5571 }
5572 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5573 {
5574 BLOCK_SUBBLOCKS (current_block)
5575 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5576 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 5577 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5578 }
5579 }
5580
b2a59b15
MS
5581 BLOCK_SUBBLOCKS (current_block)
5582 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
467456d0
RS
5583 return current_block;
5584}
5585
5586/* Reverse the order of elements in the chain T of blocks,
5587 and return the new head of the chain (old last element). */
5588
5589static tree
5590blocks_nreverse (t)
5591 tree t;
5592{
5593 register tree prev = 0, decl, next;
5594 for (decl = t; decl; decl = next)
5595 {
5596 next = BLOCK_CHAIN (decl);
5597 BLOCK_CHAIN (decl) = prev;
5598 prev = decl;
5599 }
5600 return prev;
5601}
5602
b2a59b15
MS
5603/* Count the subblocks of the list starting with BLOCK, and list them
5604 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5605 blocks. */
467456d0
RS
5606
5607static int
5608all_blocks (block, vector)
5609 tree block;
5610 tree *vector;
5611{
b2a59b15
MS
5612 int n_blocks = 0;
5613
5614 while (block)
5615 {
5616 TREE_ASM_WRITTEN (block) = 0;
5617
5618 /* Record this block. */
5619 if (vector)
5620 vector[n_blocks] = block;
5621
5622 ++n_blocks;
5623
5624 /* Record the subblocks, and their subblocks... */
5625 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5626 vector ? vector + n_blocks : 0);
5627 block = BLOCK_CHAIN (block);
5628 }
467456d0
RS
5629
5630 return n_blocks;
5631}
5632\f
6f086dfc
RS
5633/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5634 and initialize static variables for generating RTL for the statements
5635 of the function. */
5636
5637void
5638init_function_start (subr, filename, line)
5639 tree subr;
5640 char *filename;
5641 int line;
5642{
6f086dfc
RS
5643 init_stmt_for_function ();
5644
5645 cse_not_expected = ! optimize;
5646
5647 /* Caller save not needed yet. */
5648 caller_save_needed = 0;
5649
5650 /* No stack slots have been made yet. */
5651 stack_slot_list = 0;
5652
5653 /* There is no stack slot for handling nonlocal gotos. */
ba716ac9 5654 nonlocal_goto_handler_slots = 0;
6f086dfc
RS
5655 nonlocal_goto_stack_level = 0;
5656
5657 /* No labels have been declared for nonlocal use. */
5658 nonlocal_labels = 0;
e881bb1b 5659 nonlocal_goto_handler_labels = 0;
6f086dfc
RS
5660
5661 /* No function calls so far in this function. */
5662 function_call_count = 0;
5663
5664 /* No parm regs have been allocated.
5665 (This is important for output_inline_function.) */
5666 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5667
5668 /* Initialize the RTL mechanism. */
5669 init_emit ();
5670
5671 /* Initialize the queue of pending postincrement and postdecrements,
5672 and some other info in expr.c. */
5673 init_expr ();
5674
5675 /* We haven't done register allocation yet. */
5676 reg_renumber = 0;
5677
5678 init_const_rtx_hash_table ();
5679
a1d7ffe3 5680 current_function_name = (*decl_printable_name) (subr, 2);
6f086dfc
RS
5681
5682 /* Nonzero if this is a nested function that uses a static chain. */
5683
5684 current_function_needs_context
38ee6ed9
JM
5685 = (decl_function_context (current_function_decl) != 0
5686 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6f086dfc
RS
5687
5688 /* Set if a call to setjmp is seen. */
5689 current_function_calls_setjmp = 0;
5690
5691 /* Set if a call to longjmp is seen. */
5692 current_function_calls_longjmp = 0;
5693
5694 current_function_calls_alloca = 0;
5695 current_function_has_nonlocal_label = 0;
8634413a 5696 current_function_has_nonlocal_goto = 0;
6f086dfc 5697 current_function_contains_functions = 0;
fdb8a883 5698 current_function_sp_is_unchanging = 0;
acd693d1 5699 current_function_has_computed_jump = 0;
173cd503 5700 current_function_is_thunk = 0;
6f086dfc
RS
5701
5702 current_function_returns_pcc_struct = 0;
5703 current_function_returns_struct = 0;
5704 current_function_epilogue_delay_list = 0;
5705 current_function_uses_const_pool = 0;
5706 current_function_uses_pic_offset_table = 0;
aeb302bb 5707 current_function_cannot_inline = 0;
6f086dfc
RS
5708
5709 /* We have not yet needed to make a label to jump to for tail-recursion. */
5710 tail_recursion_label = 0;
5711
5712 /* We haven't had a need to make a save area for ap yet. */
5713
5714 arg_pointer_save_area = 0;
5715
5716 /* No stack slots allocated yet. */
5717 frame_offset = 0;
5718
5719 /* No SAVE_EXPRs in this function yet. */
5720 save_expr_regs = 0;
5721
5722 /* No RTL_EXPRs in this function yet. */
5723 rtl_expr_chain = 0;
5724
bc0ebdf9
RK
5725 /* Set up to allocate temporaries. */
5726 init_temp_slots ();
6f086dfc
RS
5727
5728 /* Within function body, compute a type's size as soon it is laid out. */
5729 immediate_size_expand++;
5730
d9a98e1a
RK
5731 /* We haven't made any trampolines for this function yet. */
5732 trampoline_list = 0;
5733
6f086dfc
RS
5734 init_pending_stack_adjust ();
5735 inhibit_defer_pop = 0;
5736
5737 current_function_outgoing_args_size = 0;
5738
6f086dfc 5739 /* Prevent ever trying to delete the first instruction of a function.
b274104c
PB
5740 Also tell final how to output a linenum before the function prologue.
5741 Note linenums could be missing, e.g. when compiling a Java .class file. */
5742 if (line > 0)
5743 emit_line_note (filename, line);
6f086dfc
RS
5744
5745 /* Make sure first insn is a note even if we don't want linenums.
5746 This makes sure the first insn will never be deleted.
5747 Also, final expects a note to appear there. */
5f4f0e22 5748 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5749
5750 /* Set flags used by final.c. */
5751 if (aggregate_value_p (DECL_RESULT (subr)))
5752 {
5753#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 5754 current_function_returns_pcc_struct = 1;
6f086dfc 5755#endif
1b8297c1 5756 current_function_returns_struct = 1;
6f086dfc
RS
5757 }
5758
5759 /* Warn if this value is an aggregate type,
5760 regardless of which calling convention we are using for it. */
5761 if (warn_aggregate_return
05e3bdb9 5762 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc
RS
5763 warning ("function returns an aggregate");
5764
5765 current_function_returns_pointer
8eda074c 5766 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6f086dfc
RS
5767
5768 /* Indicate that we need to distinguish between the return value of the
5769 present function and the return value of a function being called. */
5770 rtx_equal_function_value_matters = 1;
5771
5772 /* Indicate that we have not instantiated virtual registers yet. */
5773 virtuals_instantiated = 0;
5774
5775 /* Indicate we have no need of a frame pointer yet. */
5776 frame_pointer_needed = 0;
5777
ebb904cb 5778 /* By default assume not varargs or stdarg. */
6f086dfc 5779 current_function_varargs = 0;
ebb904cb 5780 current_function_stdarg = 0;
6f086dfc
RS
5781}
5782
5783/* Indicate that the current function uses extra args
5784 not explicitly mentioned in the argument list in any fashion. */
5785
5786void
5787mark_varargs ()
5788{
5789 current_function_varargs = 1;
5790}
5791
5792/* Expand a call to __main at the beginning of a possible main function. */
5793
e2fd1d94
JM
5794#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5795#undef HAS_INIT_SECTION
5796#define HAS_INIT_SECTION
5797#endif
5798
6f086dfc
RS
5799void
5800expand_main_function ()
5801{
e2fd1d94 5802#if !defined (HAS_INIT_SECTION)
b93a436e
JL
5803 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5804 VOIDmode, 0);
e2fd1d94 5805#endif /* not HAS_INIT_SECTION */
6f086dfc
RS
5806}
5807\f
c20bf1f3
JB
5808extern struct obstack permanent_obstack;
5809
6f086dfc
RS
5810/* Start the RTL for a new function, and set variables used for
5811 emitting RTL.
5812 SUBR is the FUNCTION_DECL node.
5813 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5814 the function's parameters, which must be run at any return statement. */
5815
5816void
5817expand_function_start (subr, parms_have_cleanups)
5818 tree subr;
5819 int parms_have_cleanups;
5820{
5821 register int i;
5822 tree tem;
4e86caed 5823 rtx last_ptr = NULL_RTX;
6f086dfc
RS
5824
5825 /* Make sure volatile mem refs aren't considered
5826 valid operands of arithmetic insns. */
5827 init_recog_no_volatile ();
5828
7d384cc0
KR
5829 /* Set this before generating any memory accesses. */
5830 current_function_check_memory_usage
5831 = (flag_check_memory_usage
5832 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5833
07417085
KR
5834 current_function_instrument_entry_exit
5835 = (flag_instrument_function_entry_exit
5836 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5837
6f086dfc
RS
5838 /* If function gets a static chain arg, store it in the stack frame.
5839 Do this first, so it gets the first stack slot offset. */
5840 if (current_function_needs_context)
3e2481e9
JW
5841 {
5842 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
f0c51a1e 5843
f0c51a1e
RK
5844 /* Delay copying static chain if it is not a register to avoid
5845 conflicts with regs used for parameters. */
f95182a4
ILT
5846 if (! SMALL_REGISTER_CLASSES
5847 || GET_CODE (static_chain_incoming_rtx) == REG)
f0c51a1e 5848 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3e2481e9 5849 }
6f086dfc
RS
5850
5851 /* If the parameters of this function need cleaning up, get a label
5852 for the beginning of the code which executes those cleanups. This must
5853 be done before doing anything with return_label. */
5854 if (parms_have_cleanups)
5855 cleanup_label = gen_label_rtx ();
5856 else
5857 cleanup_label = 0;
5858
5859 /* Make the label for return statements to jump to, if this machine
5860 does not have a one-instruction return and uses an epilogue,
5861 or if it returns a structure, or if it has parm cleanups. */
5862#ifdef HAVE_return
5863 if (cleanup_label == 0 && HAVE_return
07417085 5864 && ! current_function_instrument_entry_exit
6f086dfc
RS
5865 && ! current_function_returns_pcc_struct
5866 && ! (current_function_returns_struct && ! optimize))
5867 return_label = 0;
5868 else
5869 return_label = gen_label_rtx ();
5870#else
5871 return_label = gen_label_rtx ();
5872#endif
5873
5874 /* Initialize rtx used to return the value. */
5875 /* Do this before assign_parms so that we copy the struct value address
5876 before any library calls that assign parms might generate. */
5877
5878 /* Decide whether to return the value in memory or in a register. */
5879 if (aggregate_value_p (DECL_RESULT (subr)))
5880 {
5881 /* Returning something that won't go in a register. */
4acc00bf 5882 register rtx value_address = 0;
6f086dfc
RS
5883
5884#ifdef PCC_STATIC_STRUCT_RETURN
5885 if (current_function_returns_pcc_struct)
5886 {
5887 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5888 value_address = assemble_static_space (size);
5889 }
5890 else
5891#endif
5892 {
5893 /* Expect to be passed the address of a place to store the value.
5894 If it is passed as an argument, assign_parms will take care of
5895 it. */
5896 if (struct_value_incoming_rtx)
5897 {
5898 value_address = gen_reg_rtx (Pmode);
5899 emit_move_insn (value_address, struct_value_incoming_rtx);
5900 }
5901 }
5902 if (value_address)
ccdecf58
RK
5903 {
5904 DECL_RTL (DECL_RESULT (subr))
38a448ca 5905 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
c6df88cb
MM
5906 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5907 AGGREGATE_TYPE_P (TREE_TYPE
5908 (DECL_RESULT
5909 (subr))));
ccdecf58 5910 }
6f086dfc
RS
5911 }
5912 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5913 /* If return mode is void, this decl rtl should not be used. */
5914 DECL_RTL (DECL_RESULT (subr)) = 0;
07417085 5915 else if (parms_have_cleanups || current_function_instrument_entry_exit)
a53e14c0
RK
5916 {
5917 /* If function will end with cleanup code for parms,
5918 compute the return values into a pseudo reg,
5919 which we will copy into the true return register
5920 after the cleanups are done. */
5921
5922 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
a5a52dbc 5923
a53e14c0
RK
5924#ifdef PROMOTE_FUNCTION_RETURN
5925 tree type = TREE_TYPE (DECL_RESULT (subr));
5926 int unsignedp = TREE_UNSIGNED (type);
5927
a5a52dbc 5928 mode = promote_mode (type, mode, &unsignedp, 1);
a53e14c0
RK
5929#endif
5930
5931 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5932 }
6f086dfc
RS
5933 else
5934 /* Scalar, returned in a register. */
5935 {
5936#ifdef FUNCTION_OUTGOING_VALUE
5937 DECL_RTL (DECL_RESULT (subr))
5938 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5939#else
5940 DECL_RTL (DECL_RESULT (subr))
5941 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5942#endif
5943
5944 /* Mark this reg as the function's return value. */
5945 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5946 {
5947 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5948 /* Needed because we may need to move this to memory
5949 in case it's a named return value whose address is taken. */
a82ad570 5950 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
5951 }
5952 }
5953
5954 /* Initialize rtx for parameters and local variables.
5955 In some cases this requires emitting insns. */
5956
5957 assign_parms (subr, 0);
5958
f0c51a1e
RK
5959 /* Copy the static chain now if it wasn't a register. The delay is to
5960 avoid conflicts with the parameter passing registers. */
5961
f95182a4 5962 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
f0c51a1e
RK
5963 if (GET_CODE (static_chain_incoming_rtx) != REG)
5964 emit_move_insn (last_ptr, static_chain_incoming_rtx);
f0c51a1e 5965
6f086dfc
RS
5966 /* The following was moved from init_function_start.
5967 The move is supposed to make sdb output more accurate. */
5968 /* Indicate the beginning of the function body,
5969 as opposed to parm setup. */
5f4f0e22 5970 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
5971
5972 /* If doing stupid allocation, mark parms as born here. */
5973
5974 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 5975 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5976 parm_birth_insn = get_last_insn ();
5977
5978 if (obey_regdecls)
5979 {
5980 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5981 use_variable (regno_reg_rtx[i]);
5982
5983 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5984 use_variable (current_function_internal_arg_pointer);
5985 }
5986
6d7306f7
JM
5987 context_display = 0;
5988 if (current_function_needs_context)
ac9e20f0 5989 {
6d7306f7
JM
5990 /* Fetch static chain values for containing functions. */
5991 tem = decl_function_context (current_function_decl);
5992 /* If not doing stupid register allocation copy the static chain
5993 pointer into a pseudo. If we have small register classes, copy
5994 the value from memory if static_chain_incoming_rtx is a REG. If
5995 we do stupid register allocation, we use the stack address
5996 generated above. */
5997 if (tem && ! obey_regdecls)
5998 {
6d7306f7
JM
5999 /* If the static chain originally came in a register, put it back
6000 there, then move it out in the next insn. The reason for
6001 this peculiar code is to satisfy function integration. */
f95182a4
ILT
6002 if (SMALL_REGISTER_CLASSES
6003 && GET_CODE (static_chain_incoming_rtx) == REG)
6d7306f7 6004 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6d7306f7
JM
6005 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6006 }
ac9e20f0 6007
6d7306f7
JM
6008 while (tem)
6009 {
6010 tree rtlexp = make_node (RTL_EXPR);
6f086dfc 6011
6d7306f7
JM
6012 RTL_EXPR_RTL (rtlexp) = last_ptr;
6013 context_display = tree_cons (tem, rtlexp, context_display);
6014 tem = decl_function_context (tem);
6015 if (tem == 0)
6016 break;
6017 /* Chain thru stack frames, assuming pointer to next lexical frame
6018 is found at the place we always store it. */
6f086dfc 6019#ifdef FRAME_GROWS_DOWNWARD
6d7306f7 6020 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6f086dfc 6021#endif
38a448ca
RH
6022 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6023 memory_address (Pmode, last_ptr)));
6d7306f7
JM
6024
6025 /* If we are not optimizing, ensure that we know that this
6026 piece of context is live over the entire function. */
6027 if (! optimize)
38a448ca
RH
6028 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6029 save_expr_regs);
6d7306f7 6030 }
6f086dfc
RS
6031 }
6032
07417085
KR
6033 if (current_function_instrument_entry_exit)
6034 {
6035 rtx fun = DECL_RTL (current_function_decl);
6036 if (GET_CODE (fun) == MEM)
6037 fun = XEXP (fun, 0);
6038 else
6039 abort ();
6040 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6041 fun, Pmode,
6042 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6043 0,
6044 hard_frame_pointer_rtx),
6045 Pmode);
6046 }
6047
6f086dfc
RS
6048 /* After the display initializations is where the tail-recursion label
6049 should go, if we end up needing one. Ensure we have a NOTE here
6050 since some things (like trampolines) get placed before this. */
5f4f0e22 6051 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
6052
6053 /* Evaluate now the sizes of any types declared among the arguments. */
6054 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
7b05e286 6055 {
86fa911a
RK
6056 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6057 EXPAND_MEMORY_USE_BAD);
7b05e286
JW
6058 /* Flush the queue in case this parameter declaration has
6059 side-effects. */
6060 emit_queue ();
6061 }
6f086dfc
RS
6062
6063 /* Make sure there is a line number after the function entry setup code. */
6064 force_next_line_note ();
6065}
6066\f
6067/* Generate RTL for the end of the current function.
980697fd 6068 FILENAME and LINE are the current position in the source file.
6f086dfc 6069
980697fd 6070 It is up to language-specific callers to do cleanups for parameters--
1be07046 6071 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6f086dfc
RS
6072
6073void
1be07046 6074expand_function_end (filename, line, end_bindings)
6f086dfc
RS
6075 char *filename;
6076 int line;
1be07046 6077 int end_bindings;
6f086dfc
RS
6078{
6079 register int i;
6080 tree link;
6081
1e2414db 6082#ifdef TRAMPOLINE_TEMPLATE
6f086dfc 6083 static rtx initial_trampoline;
1e2414db 6084#endif
6f086dfc
RS
6085
6086#ifdef NON_SAVING_SETJMP
6087 /* Don't put any variables in registers if we call setjmp
6088 on a machine that fails to restore the registers. */
6089 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6090 {
b88a3142
RK
6091 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6092 setjmp_protect (DECL_INITIAL (current_function_decl));
6093
6f086dfc
RS
6094 setjmp_protect_args ();
6095 }
6096#endif
6097
6098 /* Save the argument pointer if a save area was made for it. */
6099 if (arg_pointer_save_area)
6100 {
ea0f9a85
JW
6101 /* arg_pointer_save_area may not be a valid memory address, so we
6102 have to check it and fix it if necessary. */
6103 rtx seq;
6104 start_sequence ();
6105 emit_move_insn (validize_mem (arg_pointer_save_area),
6106 virtual_incoming_args_rtx);
6107 seq = gen_sequence ();
6108 end_sequence ();
6109 emit_insn_before (seq, tail_recursion_reentry);
6f086dfc
RS
6110 }
6111
6112 /* Initialize any trampolines required by this function. */
6113 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6114 {
6115 tree function = TREE_PURPOSE (link);
6116 rtx context = lookup_static_chain (function);
6117 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7a87758d 6118#ifdef TRAMPOLINE_TEMPLATE
1e2414db 6119 rtx blktramp;
7a87758d 6120#endif
6f086dfc
RS
6121 rtx seq;
6122
1e2414db 6123#ifdef TRAMPOLINE_TEMPLATE
6f086dfc
RS
6124 /* First make sure this compilation has a template for
6125 initializing trampolines. */
6126 if (initial_trampoline == 0)
86f8eff3
RK
6127 {
6128 end_temporary_allocation ();
6129 initial_trampoline
38a448ca 6130 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
86f8eff3
RK
6131 resume_temporary_allocation ();
6132 }
1e2414db 6133#endif
6f086dfc
RS
6134
6135 /* Generate insns to initialize the trampoline. */
6136 start_sequence ();
1e2414db
RK
6137 tramp = round_trampoline_addr (XEXP (tramp, 0));
6138#ifdef TRAMPOLINE_TEMPLATE
6139 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6140 emit_block_move (blktramp, initial_trampoline,
6141 GEN_INT (TRAMPOLINE_SIZE),
189cc377 6142 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
1e2414db
RK
6143#endif
6144 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6f086dfc
RS
6145 seq = get_insns ();
6146 end_sequence ();
6147
6148 /* Put those insns at entry to the containing function (this one). */
6149 emit_insns_before (seq, tail_recursion_reentry);
6150 }
6f086dfc 6151
11044f66
RK
6152 /* If we are doing stack checking and this function makes calls,
6153 do a stack probe at the start of the function to ensure we have enough
6154 space for another stack frame. */
6155 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6156 {
6157 rtx insn, seq;
6158
6159 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6160 if (GET_CODE (insn) == CALL_INSN)
6161 {
6162 start_sequence ();
6163 probe_stack_range (STACK_CHECK_PROTECT,
6164 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6165 seq = get_insns ();
6166 end_sequence ();
6167 emit_insns_before (seq, tail_recursion_reentry);
6168 break;
6169 }
6170 }
6171
db8717d9
RK
6172 /* Warn about unused parms if extra warnings were specified. */
6173 if (warn_unused && extra_warnings)
6f086dfc 6174 {
db8717d9 6175 tree decl;
6f086dfc
RS
6176
6177 for (decl = DECL_ARGUMENTS (current_function_decl);
6178 decl; decl = TREE_CHAIN (decl))
497dc802
JM
6179 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6180 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6f086dfc
RS
6181 warning_with_decl (decl, "unused parameter `%s'");
6182 }
6f086dfc
RS
6183
6184 /* Delete handlers for nonlocal gotos if nothing uses them. */
ba716ac9
BS
6185 if (nonlocal_goto_handler_slots != 0
6186 && ! current_function_has_nonlocal_label)
6f086dfc
RS
6187 delete_handlers ();
6188
6189 /* End any sequences that failed to be closed due to syntax errors. */
6190 while (in_sequence_p ())
5f4f0e22 6191 end_sequence ();
6f086dfc
RS
6192
6193 /* Outside function body, can't compute type's actual size
6194 until next function's body starts. */
6195 immediate_size_expand--;
6196
6197 /* If doing stupid register allocation,
6198 mark register parms as dying here. */
6199
6200 if (obey_regdecls)
6201 {
6202 rtx tem;
6203 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6204 use_variable (regno_reg_rtx[i]);
6205
6206 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6207
6208 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6209 {
6210 use_variable (XEXP (tem, 0));
6211 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6212 }
6213
6214 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6215 use_variable (current_function_internal_arg_pointer);
6216 }
6217
6218 clear_pending_stack_adjust ();
6219 do_pending_stack_adjust ();
6220
6221 /* Mark the end of the function body.
6222 If control reaches this insn, the function can drop through
6223 without returning a value. */
5f4f0e22 6224 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc 6225
82e415a3
DE
6226 /* Must mark the last line number note in the function, so that the test
6227 coverage code can avoid counting the last line twice. This just tells
6228 the code to ignore the immediately following line note, since there
6229 already exists a copy of this note somewhere above. This line number
6230 note is still needed for debugging though, so we can't delete it. */
6231 if (flag_test_coverage)
6232 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6233
6f086dfc
RS
6234 /* Output a linenumber for the end of the function.
6235 SDB depends on this. */
6236 emit_line_note_force (filename, line);
6237
6238 /* Output the label for the actual return from the function,
6239 if one is expected. This happens either because a function epilogue
6240 is used instead of a return instruction, or because a return was done
6241 with a goto in order to run local cleanups, or because of pcc-style
6242 structure returning. */
6243
6244 if (return_label)
6245 emit_label (return_label);
6246
1be07046
RS
6247 /* C++ uses this. */
6248 if (end_bindings)
6249 expand_end_bindings (0, 0, 0);
6250
e5a1e0e8
MS
6251 /* Now handle any leftover exception regions that may have been
6252 created for the parameters. */
6253 {
6254 rtx last = get_last_insn ();
6255 rtx label;
6256
6257 expand_leftover_cleanups ();
6258
6259 /* If the above emitted any code, may sure we jump around it. */
6260 if (last != get_last_insn ())
6261 {
6262 label = gen_label_rtx ();
6263 last = emit_jump_insn_after (gen_jump (label), last);
6264 last = emit_barrier_after (last);
6265 emit_label (label);
6266 }
6267 }
6268
07417085
KR
6269 if (current_function_instrument_entry_exit)
6270 {
6271 rtx fun = DECL_RTL (current_function_decl);
6272 if (GET_CODE (fun) == MEM)
6273 fun = XEXP (fun, 0);
6274 else
6275 abort ();
6276 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6277 fun, Pmode,
6278 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6279 0,
6280 hard_frame_pointer_rtx),
6281 Pmode);
6282 }
6283
6f086dfc
RS
6284 /* If we had calls to alloca, and this machine needs
6285 an accurate stack pointer to exit the function,
6286 insert some code to save and restore the stack pointer. */
6287#ifdef EXIT_IGNORE_STACK
6288 if (! EXIT_IGNORE_STACK)
6289#endif
6290 if (current_function_calls_alloca)
6291 {
59257ff7
RK
6292 rtx tem = 0;
6293
6294 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 6295 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
6296 }
6297
6298 /* If scalar return value was computed in a pseudo-reg,
6299 copy that to the hard return register. */
6300 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6301 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6302 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6303 >= FIRST_PSEUDO_REGISTER))
6304 {
6305 rtx real_decl_result;
6306
6307#ifdef FUNCTION_OUTGOING_VALUE
6308 real_decl_result
6309 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6310 current_function_decl);
6311#else
6312 real_decl_result
6313 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6314 current_function_decl);
6315#endif
6316 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
ecec4441
JW
6317 /* If this is a BLKmode structure being returned in registers, then use
6318 the mode computed in expand_return. */
6319 if (GET_MODE (real_decl_result) == BLKmode)
6320 PUT_MODE (real_decl_result,
6321 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6f086dfc
RS
6322 emit_move_insn (real_decl_result,
6323 DECL_RTL (DECL_RESULT (current_function_decl)));
38a448ca 6324 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
f345de42
JL
6325
6326 /* The delay slot scheduler assumes that current_function_return_rtx
6327 holds the hard register containing the return value, not a temporary
6328 pseudo. */
6329 current_function_return_rtx = real_decl_result;
6f086dfc
RS
6330 }
6331
6332 /* If returning a structure, arrange to return the address of the value
6333 in a place where debuggers expect to find it.
6334
6335 If returning a structure PCC style,
6336 the caller also depends on this value.
6337 And current_function_returns_pcc_struct is not necessarily set. */
6338 if (current_function_returns_struct
6339 || current_function_returns_pcc_struct)
6340 {
6341 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6342 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6343#ifdef FUNCTION_OUTGOING_VALUE
6344 rtx outgoing
6345 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6346 current_function_decl);
6347#else
6348 rtx outgoing
6349 = FUNCTION_VALUE (build_pointer_type (type),
6350 current_function_decl);
6351#endif
6352
6353 /* Mark this as a function return value so integrate will delete the
6354 assignment and USE below when inlining this function. */
6355 REG_FUNCTION_VALUE_P (outgoing) = 1;
6356
6357 emit_move_insn (outgoing, value_address);
6358 use_variable (outgoing);
6359 }
6360
71038426
RH
6361 /* If this is an implementation of __throw, do what's necessary to
6362 communicate between __builtin_eh_return and the epilogue. */
6363 expand_eh_return ();
6364
6f086dfc
RS
6365 /* Output a return insn if we are using one.
6366 Otherwise, let the rtl chain end here, to drop through
6367 into the epilogue. */
6368
6369#ifdef HAVE_return
6370 if (HAVE_return)
6371 {
6372 emit_jump_insn (gen_return ());
6373 emit_barrier ();
6374 }
6375#endif
6376
6377 /* Fix up any gotos that jumped out to the outermost
6378 binding level of the function.
6379 Must follow emitting RETURN_LABEL. */
6380
6381 /* If you have any cleanups to do at this point,
6382 and they need to create temporary variables,
6383 then you will lose. */
e15679f8 6384 expand_fixups (get_insns ());
6f086dfc 6385}
bdac5f58
TW
6386\f
6387/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6388
6389static int *prologue;
6390static int *epilogue;
6391
6392/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6393 or a single insn). */
6394
487a6e06 6395#if defined (HAVE_prologue) || defined (HAVE_epilogue)
bdac5f58
TW
6396static int *
6397record_insns (insns)
6398 rtx insns;
6399{
6400 int *vec;
6401
6402 if (GET_CODE (insns) == SEQUENCE)
6403 {
6404 int len = XVECLEN (insns, 0);
6405 vec = (int *) oballoc ((len + 1) * sizeof (int));
6406 vec[len] = 0;
6407 while (--len >= 0)
6408 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6409 }
6410 else
6411 {
6412 vec = (int *) oballoc (2 * sizeof (int));
6413 vec[0] = INSN_UID (insns);
6414 vec[1] = 0;
6415 }
6416 return vec;
6417}
6418
10914065 6419/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 6420
10914065 6421static int
bdac5f58
TW
6422contains (insn, vec)
6423 rtx insn;
6424 int *vec;
6425{
6426 register int i, j;
6427
6428 if (GET_CODE (insn) == INSN
6429 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6430 {
10914065 6431 int count = 0;
bdac5f58
TW
6432 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6433 for (j = 0; vec[j]; j++)
6434 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
6435 count++;
6436 return count;
bdac5f58
TW
6437 }
6438 else
6439 {
6440 for (j = 0; vec[j]; j++)
6441 if (INSN_UID (insn) == vec[j])
10914065 6442 return 1;
bdac5f58
TW
6443 }
6444 return 0;
6445}
081f5e7e 6446#endif /* HAVE_prologue || HAVE_epilogue */
bdac5f58 6447
9faa82d8 6448/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
6449 this into place with notes indicating where the prologue ends and where
6450 the epilogue begins. Update the basic block information when possible. */
6451
6452void
6453thread_prologue_and_epilogue_insns (f)
e881bb1b 6454 rtx f;
bdac5f58 6455{
e881bb1b
RH
6456 int insertted = 0;
6457
6458 prologue = 0;
bdac5f58
TW
6459#ifdef HAVE_prologue
6460 if (HAVE_prologue)
6461 {
e881bb1b 6462 rtx seq;
bdac5f58 6463
e881bb1b
RH
6464 start_sequence ();
6465 seq = gen_prologue();
6466 emit_insn (seq);
bdac5f58
TW
6467
6468 /* Retain a map of the prologue insns. */
e881bb1b
RH
6469 if (GET_CODE (seq) != SEQUENCE)
6470 seq = get_insns ();
6471 prologue = record_insns (seq);
6472
6473 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6474 seq = gen_sequence ();
6475 end_sequence ();
6476
6477 /* If optimization is off, and perhaps in an empty function,
6478 the entry block will have no successors. */
6479 if (ENTRY_BLOCK_PTR->succ)
6480 {
6481 /* Can't deal with multiple successsors of the entry block. */
6482 if (ENTRY_BLOCK_PTR->succ->succ_next)
6483 abort ();
6484
6485 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6486 insertted = 1;
6487 }
6488 else
6489 emit_insn_after (seq, f);
bdac5f58 6490 }
bdac5f58 6491#endif
bdac5f58 6492
e881bb1b 6493 epilogue = 0;
bdac5f58
TW
6494#ifdef HAVE_epilogue
6495 if (HAVE_epilogue)
6496 {
e881bb1b
RH
6497 edge e;
6498 basic_block bb = 0;
6499 rtx tail = get_last_insn ();
6500
6501 /* ??? This is gastly. If function returns were not done via uses,
6502 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6503 and all of this uglyness would go away. */
bdac5f58 6504
e881bb1b 6505 switch (optimize)
bdac5f58 6506 {
e881bb1b
RH
6507 default:
6508 /* If the exit block has no non-fake predecessors, we don't
6509 need an epilogue. Furthermore, only pay attention to the
6510 fallthru predecessors; if (conditional) return insns were
6511 generated, by definition we do not need to emit epilogue
6512 insns. */
6513
6514 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6515 if ((e->flags & EDGE_FAKE) == 0
6516 && (e->flags & EDGE_FALLTHRU) != 0)
6517 break;
6518 if (e == NULL)
6519 break;
6520
6521 /* We can't handle multiple epilogues -- if one is needed,
6522 we won't be able to place it multiple times.
6523
6524 ??? Fix epilogue expanders to not assume they are the
6525 last thing done compiling the function. Either that
6526 or copy_rtx each insn.
6527
6528 ??? Blah, it's not a simple expression to assert that
6529 we've exactly one fallthru exit edge. */
6530
6531 bb = e->src;
6532 tail = bb->end;
6533
6534 /* ??? If the last insn of the basic block is a jump, then we
6535 are creating a new basic block. Wimp out and leave these
6536 insns outside any block. */
6537 if (GET_CODE (tail) == JUMP_INSN)
6538 bb = 0;
6539
6540 /* FALLTHRU */
6541 case 0:
6542 {
6543 rtx prev, seq, first_use;
6544
6545 /* Move the USE insns at the end of a function onto a list. */
6546 prev = tail;
6547 if (GET_CODE (prev) == BARRIER
6548 || GET_CODE (prev) == NOTE)
bdac5f58 6549 prev = prev_nonnote_insn (prev);
a78bdb38 6550
e881bb1b
RH
6551 first_use = 0;
6552 if (prev
6553 && GET_CODE (prev) == INSN
6554 && GET_CODE (PATTERN (prev)) == USE)
6555 {
6556 /* If the end of the block is the use, grab hold of something
6557 else so that we emit barriers etc in the right place. */
6558 if (prev == tail)
6559 {
6560 do
6561 tail = PREV_INSN (tail);
6562 while (GET_CODE (tail) == INSN
6563 && GET_CODE (PATTERN (tail)) == USE);
6564 }
bdac5f58 6565
e881bb1b
RH
6566 do
6567 {
6568 rtx use = prev;
6569 prev = prev_nonnote_insn (prev);
6570
6571 remove_insn (use);
6572 if (first_use)
6573 {
6574 NEXT_INSN (use) = first_use;
6575 PREV_INSN (first_use) = use;
6576 }
6577 else
6578 NEXT_INSN (use) = NULL_RTX;
6579 first_use = use;
6580 }
6581 while (prev
6582 && GET_CODE (prev) == INSN
6583 && GET_CODE (PATTERN (prev)) == USE);
6584 }
a78bdb38 6585
e881bb1b
RH
6586 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6587 epilogue insns, the USE insns at the end of a function,
6588 the jump insn that returns, and then a BARRIER. */
a78bdb38 6589
e881bb1b
RH
6590 if (GET_CODE (tail) != BARRIER)
6591 {
6592 prev = next_nonnote_insn (tail);
6593 if (!prev || GET_CODE (prev) != BARRIER)
6594 emit_barrier_after (tail);
6595 }
a78bdb38 6596
e881bb1b
RH
6597 seq = gen_epilogue ();
6598 prev = tail;
6599 tail = emit_jump_insn_after (seq, tail);
bdac5f58 6600
e881bb1b
RH
6601 /* Insert the USE insns immediately before the return insn, which
6602 must be the last instruction emitted in the sequence. */
6603 if (first_use)
6604 emit_insns_before (first_use, tail);
6605 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
bdac5f58 6606
e881bb1b
RH
6607 /* Update the tail of the basic block. */
6608 if (bb)
6609 bb->end = tail;
6610
6611 /* Retain a map of the epilogue insns. */
6612 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6613 }
bdac5f58
TW
6614 }
6615 }
6616#endif
e881bb1b
RH
6617
6618 if (insertted)
6619 commit_edge_insertions ();
bdac5f58
TW
6620}
6621
6622/* Reposition the prologue-end and epilogue-begin notes after instruction
6623 scheduling and delayed branch scheduling. */
6624
6625void
6626reposition_prologue_and_epilogue_notes (f)
79c9824e 6627 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6628{
6629#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6630 /* Reposition the prologue and epilogue notes. */
6631 if (n_basic_blocks)
6632 {
bf526252 6633 int len;
bdac5f58
TW
6634
6635 if (prologue)
6636 {
bf526252
RK
6637 register rtx insn, note = 0;
6638
6639 /* Scan from the beginning until we reach the last prologue insn.
6640 We apparently can't depend on basic_block_{head,end} after
6641 reorg has run. */
6642 for (len = 0; prologue[len]; len++)
6643 ;
9392c110
JH
6644 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6645 {
6646 if (GET_CODE (insn) == NOTE)
6647 {
6648 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6649 note = insn;
6650 }
6651 else if ((len -= contains (insn, prologue)) == 0)
6652 {
89e99eea 6653 rtx next;
9392c110
JH
6654 /* Find the prologue-end note if we haven't already, and
6655 move it to just after the last prologue insn. */
6656 if (note == 0)
6657 {
51723711 6658 for (note = insn; (note = NEXT_INSN (note));)
9392c110
JH
6659 if (GET_CODE (note) == NOTE
6660 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6661 break;
6662 }
c93b03c2 6663
9392c110 6664 next = NEXT_INSN (note);
c93b03c2 6665
3b413743 6666 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2 6667 attempt to keep it up-to-date. */
3b413743
RH
6668 if (BLOCK_HEAD (0) == note)
6669 BLOCK_HEAD (0) = next;
c93b03c2 6670
89e99eea 6671 remove_insn (note);
9392c110
JH
6672 add_insn_after (note, insn);
6673 }
6674 }
bdac5f58
TW
6675 }
6676
6677 if (epilogue)
6678 {
bf526252
RK
6679 register rtx insn, note = 0;
6680
6681 /* Scan from the end until we reach the first epilogue insn.
6682 We apparently can't depend on basic_block_{head,end} after
6683 reorg has run. */
6684 for (len = 0; epilogue[len]; len++)
6685 ;
9392c110
JH
6686 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6687 {
6688 if (GET_CODE (insn) == NOTE)
6689 {
6690 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6691 note = insn;
6692 }
6693 else if ((len -= contains (insn, epilogue)) == 0)
6694 {
6695 /* Find the epilogue-begin note if we haven't already, and
6696 move it to just before the first epilogue insn. */
6697 if (note == 0)
6698 {
51723711 6699 for (note = insn; (note = PREV_INSN (note));)
9392c110
JH
6700 if (GET_CODE (note) == NOTE
6701 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6702 break;
6703 }
c93b03c2 6704
3b413743 6705 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2
RH
6706 attempt to keep it up-to-date. */
6707 if (n_basic_blocks
3b413743
RH
6708 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6709 BLOCK_HEAD (n_basic_blocks-1) = note;
c93b03c2 6710
89e99eea 6711 remove_insn (note);
c93b03c2 6712 add_insn_before (note, insn);
9392c110
JH
6713 }
6714 }
bdac5f58
TW
6715 }
6716 }
6717#endif /* HAVE_prologue or HAVE_epilogue */
6718}
This page took 1.43619 seconds and 5 git commands to generate.