]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
function.c (assign_stack_temp_for_type): Fix change of Mar 5 for the fact that ALIGN...
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
a5cad800 2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
6f086dfc
RS
20
21
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41#include "config.h"
670ee920 42#include "system.h"
6f086dfc
RS
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
1ef08c63 46#include "except.h"
6f086dfc
RS
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
c20bf1f3 57#include "obstack.h"
10f0ad3d 58#include "toplev.h"
fe9b4957 59#include "hash.h"
6f086dfc 60
189cc377
RK
61#ifndef TRAMPOLINE_ALIGNMENT
62#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63#endif
64
d16790f2
JW
65#ifndef LOCAL_ALIGNMENT
66#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67#endif
68
293e3de4
RS
69/* Some systems use __main in a way incompatible with its use in gcc, in these
70 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
71 give the same symbol without quotes for an alternative entry point. You
0f41302f 72 must define both, or neither. */
293e3de4
RS
73#ifndef NAME__MAIN
74#define NAME__MAIN "__main"
75#define SYMBOL__MAIN __main
76#endif
77
6f086dfc
RS
78/* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
82
83/* Similar, but round to the next highest integer that meets the
84 alignment. */
85#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
86
87/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
88 during rtl generation. If they are different register numbers, this is
89 always true. It may also be true if
90 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
91 generation. See fix_lexical_addr for details. */
92
93#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
94#define NEED_SEPARATE_AP
95#endif
96
54ff41b7
JW
97/* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
100 compiler passes. */
54ff41b7
JW
101int current_function_is_leaf;
102
fdb8a883
JW
103/* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 life_analysis has run. */
fdb8a883
JW
106int current_function_sp_is_unchanging;
107
54ff41b7
JW
108/* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
111int current_function_uses_only_leaf_regs;
112
6f086dfc
RS
113/* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
115static int virtuals_instantiated;
116
46766466
RS
117/* These variables hold pointers to functions to
118 save and restore machine-specific data,
119 in push_function_context and pop_function_context. */
9e014ded
RK
120void (*save_machine_status) PROTO((struct function *));
121void (*restore_machine_status) PROTO((struct function *));
46766466 122
49ad7cfa
BS
123/* The FUNCTION_DECL for an inline function currently being expanded. */
124tree inline_function_decl;
b384405b
BS
125
126/* The currently compiled function. */
127struct function *current_function = 0;
128
129/* Global list of all compiled functions. */
130struct function *all_functions = 0;
5c7675e9
RH
131
132/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
133static int *prologue;
134static int *epilogue;
6f086dfc
RS
135\f
136/* In order to evaluate some expressions, such as function calls returning
137 structures in memory, we need to temporarily allocate stack locations.
138 We record each allocated temporary in the following structure.
139
140 Associated with each temporary slot is a nesting level. When we pop up
141 one level, all temporaries associated with the previous level are freed.
142 Normally, all temporaries are freed after the execution of the statement
143 in which they were created. However, if we are inside a ({...}) grouping,
144 the result may be in a temporary and hence must be preserved. If the
145 result could be in a temporary, we preserve it if we can determine which
146 one it is in. If we cannot determine which temporary may contain the
147 result, all temporaries are preserved. A temporary is preserved by
148 pretending it was allocated at the previous nesting level.
149
150 Automatic variables are also assigned temporary slots, at the nesting
151 level where they are defined. They are marked a "kept" so that
152 free_temp_slots will not free them. */
153
154struct temp_slot
155{
156 /* Points to next temporary slot. */
157 struct temp_slot *next;
0f41302f 158 /* The rtx to used to reference the slot. */
6f086dfc 159 rtx slot;
e5e76139
RK
160 /* The rtx used to represent the address if not the address of the
161 slot above. May be an EXPR_LIST if multiple addresses exist. */
162 rtx address;
d16790f2
JW
163 /* The alignment (in bits) of the slot. */
164 int align;
6f086dfc 165 /* The size, in units, of the slot. */
e5e809f4 166 HOST_WIDE_INT size;
a4c6502a
MM
167 /* The alias set for the slot. If the alias set is zero, we don't
168 know anything about the alias set of the slot. We must only
169 reuse a slot if it is assigned an object of the same alias set.
170 Otherwise, the rest of the compiler may assume that the new use
171 of the slot cannot alias the old use of the slot, which is
172 false. If the slot has alias set zero, then we can't reuse the
173 slot at all, since we have no idea what alias set may have been
174 imposed on the memory. For example, if the stack slot is the
175 call frame for an inline functioned, we have no idea what alias
176 sets will be assigned to various pieces of the call frame. */
177 int alias_set;
e7a84011
RK
178 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
179 tree rtl_expr;
6f086dfc
RS
180 /* Non-zero if this temporary is currently in use. */
181 char in_use;
a25d4ba2
RK
182 /* Non-zero if this temporary has its address taken. */
183 char addr_taken;
6f086dfc
RS
184 /* Nesting level at which this slot is being used. */
185 int level;
186 /* Non-zero if this should survive a call to free_temp_slots. */
187 int keep;
fc91b0d0
RK
188 /* The offset of the slot from the frame_pointer, including extra space
189 for alignment. This info is for combine_temp_slots. */
e5e809f4 190 HOST_WIDE_INT base_offset;
fc91b0d0
RK
191 /* The size of the slot, including extra space for alignment. This
192 info is for combine_temp_slots. */
e5e809f4 193 HOST_WIDE_INT full_size;
6f086dfc 194};
6f086dfc 195\f
e15679f8
RK
196/* This structure is used to record MEMs or pseudos used to replace VAR, any
197 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
198 maintain this list in case two operands of an insn were required to match;
199 in that case we must ensure we use the same replacement. */
200
201struct fixup_replacement
202{
203 rtx old;
204 rtx new;
205 struct fixup_replacement *next;
206};
207
fe9b4957
MM
208struct insns_for_mem_entry {
209 /* The KEY in HE will be a MEM. */
210 struct hash_entry he;
211 /* These are the INSNS which reference the MEM. */
212 rtx insns;
213};
214
e15679f8
RK
215/* Forward declarations. */
216
1ac4f799
JL
217static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
218 int, struct function *));
d16790f2
JW
219static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
220 int, tree));
e15679f8
RK
221static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
222static void put_reg_into_stack PROTO((struct function *, rtx, tree,
0006e95b 223 enum machine_mode, enum machine_mode,
fe9b4957
MM
224 int, int, int,
225 struct hash_table *));
226static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
227 struct hash_table *));
e15679f8
RK
228static struct fixup_replacement
229 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
230static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
fe9b4957 231 rtx, int, struct hash_table *));
e15679f8
RK
232static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
233 struct fixup_replacement **));
234static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
235static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
236static rtx fixup_stack_1 PROTO((rtx, rtx));
237static void optimize_bit_field PROTO((rtx, rtx, rtx *));
238static void instantiate_decls PROTO((tree, int));
239static void instantiate_decls_1 PROTO((tree, int));
240static void instantiate_decl PROTO((rtx, int, int));
241static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
242static void delete_handlers PROTO((void));
243static void pad_to_arg_alignment PROTO((struct args_size *, int));
51723711 244#ifndef ARGS_GROW_DOWNWARD
e15679f8
RK
245static void pad_below PROTO((struct args_size *, enum machine_mode,
246 tree));
51723711 247#endif
487a6e06 248#ifdef ARGS_GROW_DOWNWARD
e15679f8 249static tree round_down PROTO((tree, int));
487a6e06 250#endif
e15679f8
RK
251static rtx round_trampoline_addr PROTO((rtx));
252static tree blocks_nreverse PROTO((tree));
253static int all_blocks PROTO((tree, tree *));
081f5e7e 254#if defined (HAVE_prologue) || defined (HAVE_epilogue)
487a6e06 255static int *record_insns PROTO((rtx));
e15679f8 256static int contains PROTO((rtx, int *));
081f5e7e 257#endif /* HAVE_prologue || HAVE_epilogue */
fe9b4957
MM
258static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
259static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
260 struct hash_table *));
261static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
262 struct hash_table *,
263 hash_table_key));
264static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
265static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
266static int insns_for_mem_walk PROTO ((rtx *, void *));
267static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
268
c20bf1f3 269\f
6f086dfc
RS
270/* Pointer to chain of `struct function' for containing functions. */
271struct function *outer_function_chain;
272
273/* Given a function decl for a containing function,
274 return the `struct function' for it. */
275
276struct function *
277find_function_data (decl)
278 tree decl;
279{
280 struct function *p;
e5e809f4 281
6f086dfc
RS
282 for (p = outer_function_chain; p; p = p->next)
283 if (p->decl == decl)
284 return p;
e5e809f4 285
6f086dfc
RS
286 abort ();
287}
288
289/* Save the current context for compilation of a nested function.
290 This is called from language-specific code.
291 The caller is responsible for saving any language-specific status,
6dc42e49 292 since this function knows only about language-independent variables. */
6f086dfc
RS
293
294void
a0dabda5
JM
295push_function_context_to (context)
296 tree context;
6f086dfc 297{
b384405b
BS
298 struct function *p;
299
300 if (current_function == 0)
301 init_dummy_function_start ();
302 p = current_function;
6f086dfc
RS
303
304 p->next = outer_function_chain;
305 outer_function_chain = p;
6f086dfc 306 p->decl = current_function_decl;
6f086dfc
RS
307 p->fixup_var_refs_queue = 0;
308
a0dabda5 309 save_tree_status (p, context);
e9a25f70 310 save_varasm_status (p, context);
46766466
RS
311 if (save_machine_status)
312 (*save_machine_status) (p);
b384405b
BS
313
314 current_function = 0;
6f086dfc
RS
315}
316
e4a4639e
JM
317void
318push_function_context ()
319{
a0dabda5 320 push_function_context_to (current_function_decl);
e4a4639e
JM
321}
322
6f086dfc
RS
323/* Restore the last saved context, at the end of a nested function.
324 This function is called from language-specific code. */
325
326void
a0dabda5
JM
327pop_function_context_from (context)
328 tree context;
6f086dfc
RS
329{
330 struct function *p = outer_function_chain;
e5e809f4 331 struct var_refs_queue *queue;
6f086dfc 332
b384405b 333 current_function = p;
6f086dfc
RS
334 outer_function_chain = p->next;
335
49468af2 336 current_function_contains_functions
49ad7cfa 337 |= p->inline_obstacks || context == current_function_decl;
6f086dfc 338 current_function_decl = p->decl;
7cbc7b0c 339 reg_renumber = 0;
6f086dfc 340
d1485032 341 restore_tree_status (p, context);
6f086dfc 342 restore_emit_status (p);
a506307a 343 restore_varasm_status (p);
6f086dfc 344
46766466
RS
345 if (restore_machine_status)
346 (*restore_machine_status) (p);
347
6f086dfc
RS
348 /* Finish doing put_var_into_stack for any of our variables
349 which became addressable during the nested function. */
e5e809f4 350 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
fe9b4957
MM
351 fixup_var_refs (queue->modified, queue->promoted_mode,
352 queue->unsignedp, 0);
6f086dfc 353
6f086dfc
RS
354 /* Reset variables that have known state during rtx generation. */
355 rtx_equal_function_value_matters = 1;
356 virtuals_instantiated = 0;
357}
e4a4639e
JM
358
359void pop_function_context ()
360{
a0dabda5 361 pop_function_context_from (current_function_decl);
e4a4639e 362}
6f086dfc
RS
363\f
364/* Allocate fixed slots in the stack frame of the current function. */
365
49ad7cfa
BS
366/* Return size needed for stack frame based on slots so far allocated in
367 function F.
c795bca9 368 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
369 the caller may have to do that. */
370
8af5168b 371HOST_WIDE_INT
49ad7cfa
BS
372get_func_frame_size (f)
373 struct function *f;
6f086dfc
RS
374{
375#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 376 return -f->x_frame_offset;
6f086dfc 377#else
49ad7cfa 378 return f->x_frame_offset;
6f086dfc
RS
379#endif
380}
381
49ad7cfa
BS
382/* Return size needed for stack frame based on slots so far allocated.
383 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
384 the caller may have to do that. */
385HOST_WIDE_INT
386get_frame_size ()
387{
388 return get_func_frame_size (current_function);
389}
390
6f086dfc
RS
391/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
392 with machine mode MODE.
393
394 ALIGN controls the amount of alignment for the address of the slot:
395 0 means according to MODE,
396 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
397 positive specifies alignment boundary in bits.
398
399 We do not round to stack_boundary here. */
400
401rtx
402assign_stack_local (mode, size, align)
403 enum machine_mode mode;
e5e809f4 404 HOST_WIDE_INT size;
6f086dfc
RS
405 int align;
406{
407 register rtx x, addr;
408 int bigend_correction = 0;
409 int alignment;
410
411 if (align == 0)
412 {
d16790f2
JW
413 tree type;
414
415 alignment = GET_MODE_ALIGNMENT (mode);
6f086dfc 416 if (mode == BLKmode)
d16790f2
JW
417 alignment = BIGGEST_ALIGNMENT;
418
419 /* Allow the target to (possibly) increase the alignment of this
420 stack slot. */
421 type = type_for_mode (mode, 0);
422 if (type)
423 alignment = LOCAL_ALIGNMENT (type, alignment);
424
425 alignment /= BITS_PER_UNIT;
6f086dfc
RS
426 }
427 else if (align == -1)
428 {
429 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
430 size = CEIL_ROUND (size, alignment);
431 }
432 else
433 alignment = align / BITS_PER_UNIT;
434
1474e303
JL
435#ifdef FRAME_GROWS_DOWNWARD
436 frame_offset -= size;
437#endif
438
6f086dfc
RS
439 /* Round frame offset to that alignment.
440 We must be careful here, since FRAME_OFFSET might be negative and
441 division with a negative dividend isn't as well defined as we might
442 like. So we instead assume that ALIGNMENT is a power of two and
443 use logical operations which are unambiguous. */
444#ifdef FRAME_GROWS_DOWNWARD
445 frame_offset = FLOOR_ROUND (frame_offset, alignment);
446#else
447 frame_offset = CEIL_ROUND (frame_offset, alignment);
448#endif
449
450 /* On a big-endian machine, if we are allocating more space than we will use,
451 use the least significant bytes of those that are allocated. */
f76b9db2 452 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 453 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 454
6f086dfc
RS
455 /* If we have already instantiated virtual registers, return the actual
456 address relative to the frame pointer. */
457 if (virtuals_instantiated)
458 addr = plus_constant (frame_pointer_rtx,
459 (frame_offset + bigend_correction
460 + STARTING_FRAME_OFFSET));
461 else
462 addr = plus_constant (virtual_stack_vars_rtx,
463 frame_offset + bigend_correction);
464
465#ifndef FRAME_GROWS_DOWNWARD
466 frame_offset += size;
467#endif
468
38a448ca 469 x = gen_rtx_MEM (mode, addr);
6f086dfc 470
38a448ca 471 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
6f086dfc
RS
472
473 return x;
474}
475
476/* Assign a stack slot in a containing function.
477 First three arguments are same as in preceding function.
478 The last argument specifies the function to allocate in. */
479
1ac4f799 480static rtx
6f086dfc
RS
481assign_outer_stack_local (mode, size, align, function)
482 enum machine_mode mode;
e5e809f4 483 HOST_WIDE_INT size;
6f086dfc
RS
484 int align;
485 struct function *function;
486{
487 register rtx x, addr;
488 int bigend_correction = 0;
489 int alignment;
490
491 /* Allocate in the memory associated with the function in whose frame
492 we are assigning. */
493 push_obstacks (function->function_obstack,
494 function->function_maybepermanent_obstack);
495
496 if (align == 0)
497 {
d16790f2
JW
498 tree type;
499
500 alignment = GET_MODE_ALIGNMENT (mode);
6f086dfc 501 if (mode == BLKmode)
d16790f2
JW
502 alignment = BIGGEST_ALIGNMENT;
503
504 /* Allow the target to (possibly) increase the alignment of this
505 stack slot. */
506 type = type_for_mode (mode, 0);
507 if (type)
508 alignment = LOCAL_ALIGNMENT (type, alignment);
509
510 alignment /= BITS_PER_UNIT;
6f086dfc
RS
511 }
512 else if (align == -1)
513 {
514 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
515 size = CEIL_ROUND (size, alignment);
516 }
517 else
518 alignment = align / BITS_PER_UNIT;
519
1474e303 520#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 521 function->x_frame_offset -= size;
1474e303
JL
522#endif
523
6f086dfc
RS
524 /* Round frame offset to that alignment. */
525#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 526 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
6f086dfc 527#else
49ad7cfa 528 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
6f086dfc
RS
529#endif
530
531 /* On a big-endian machine, if we are allocating more space than we will use,
532 use the least significant bytes of those that are allocated. */
f76b9db2 533 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 534 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 535
6f086dfc 536 addr = plus_constant (virtual_stack_vars_rtx,
49ad7cfa 537 function->x_frame_offset + bigend_correction);
6f086dfc 538#ifndef FRAME_GROWS_DOWNWARD
49ad7cfa 539 function->x_frame_offset += size;
6f086dfc
RS
540#endif
541
38a448ca 542 x = gen_rtx_MEM (mode, addr);
6f086dfc 543
49ad7cfa
BS
544 function->x_stack_slot_list
545 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
6f086dfc
RS
546
547 pop_obstacks ();
548
549 return x;
550}
551\f
552/* Allocate a temporary stack slot and record it for possible later
553 reuse.
554
555 MODE is the machine mode to be given to the returned rtx.
556
557 SIZE is the size in units of the space required. We do no rounding here
558 since assign_stack_local will do any required rounding.
559
d93d4205
MS
560 KEEP is 1 if this slot is to be retained after a call to
561 free_temp_slots. Automatic variables for a block are allocated
e5e809f4
JL
562 with this flag. KEEP is 2 if we allocate a longer term temporary,
563 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
564 if we are to allocate something at an inner level to be treated as
a4c6502a
MM
565 a variable in the block (e.g., a SAVE_EXPR).
566
567 TYPE is the type that will be used for the stack slot. */
6f086dfc 568
d16790f2
JW
569static rtx
570assign_stack_temp_for_type (mode, size, keep, type)
6f086dfc 571 enum machine_mode mode;
e5e809f4 572 HOST_WIDE_INT size;
6f086dfc 573 int keep;
d16790f2 574 tree type;
6f086dfc 575{
d16790f2 576 int align;
a4c6502a 577 int alias_set;
6f086dfc
RS
578 struct temp_slot *p, *best_p = 0;
579
303ec2aa
RK
580 /* If SIZE is -1 it means that somebody tried to allocate a temporary
581 of a variable size. */
582 if (size == -1)
583 abort ();
584
a4c6502a
MM
585 /* If we know the alias set for the memory that will be used, use
586 it. If there's no TYPE, then we don't know anything about the
587 alias set for the memory. */
588 if (type)
589 alias_set = get_alias_set (type);
590 else
591 alias_set = 0;
592
d16790f2
JW
593 align = GET_MODE_ALIGNMENT (mode);
594 if (mode == BLKmode)
595 align = BIGGEST_ALIGNMENT;
6f086dfc 596
d16790f2
JW
597 if (! type)
598 type = type_for_mode (mode, 0);
599 if (type)
600 align = LOCAL_ALIGNMENT (type, align);
601
602 /* Try to find an available, already-allocated temporary of the proper
603 mode which meets the size and alignment requirements. Choose the
604 smallest one with the closest alignment. */
605 for (p = temp_slots; p; p = p->next)
606 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
607 && ! p->in_use
a4c6502a
MM
608 && (!flag_strict_aliasing
609 || (alias_set && p->alias_set == alias_set))
d16790f2
JW
610 && (best_p == 0 || best_p->size > p->size
611 || (best_p->size == p->size && best_p->align > p->align)))
612 {
613 if (p->align == align && p->size == size)
614 {
615 best_p = 0;
616 break;
617 }
6f086dfc 618 best_p = p;
d16790f2 619 }
6f086dfc
RS
620
621 /* Make our best, if any, the one to use. */
622 if (best_p)
a45035b6
JW
623 {
624 /* If there are enough aligned bytes left over, make them into a new
625 temp_slot so that the extra bytes don't get wasted. Do this only
626 for BLKmode slots, so that we can be sure of the alignment. */
a4c6502a
MM
627 if (GET_MODE (best_p->slot) == BLKmode
628 /* We can't split slots if -fstrict-aliasing because the
629 information about the alias set for the new slot will be
630 lost. */
631 && !flag_strict_aliasing)
a45035b6 632 {
d16790f2 633 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 634 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
635
636 if (best_p->size - rounded_size >= alignment)
637 {
638 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
a25d4ba2 639 p->in_use = p->addr_taken = 0;
a45035b6 640 p->size = best_p->size - rounded_size;
307d8cd6
RK
641 p->base_offset = best_p->base_offset + rounded_size;
642 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
643 p->slot = gen_rtx_MEM (BLKmode,
644 plus_constant (XEXP (best_p->slot, 0),
645 rounded_size));
d16790f2 646 p->align = best_p->align;
e5e76139 647 p->address = 0;
84e24c03 648 p->rtl_expr = 0;
a45035b6
JW
649 p->next = temp_slots;
650 temp_slots = p;
651
38a448ca
RH
652 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
653 stack_slot_list);
a45035b6
JW
654
655 best_p->size = rounded_size;
291dde90 656 best_p->full_size = rounded_size;
a45035b6
JW
657 }
658 }
659
660 p = best_p;
661 }
662
6f086dfc
RS
663 /* If we still didn't find one, make a new temporary. */
664 if (p == 0)
665 {
e5e809f4
JL
666 HOST_WIDE_INT frame_offset_old = frame_offset;
667
6f086dfc 668 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
e5e809f4 669
c87a0a39
JL
670 /* We are passing an explicit alignment request to assign_stack_local.
671 One side effect of that is assign_stack_local will not round SIZE
672 to ensure the frame offset remains suitably aligned.
673
674 So for requests which depended on the rounding of SIZE, we go ahead
675 and round it now. We also make sure ALIGNMENT is at least
676 BIGGEST_ALIGNMENT. */
010529e5 677 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
6f67a30d
JW
678 abort();
679 p->slot = assign_stack_local (mode,
010529e5
AS
680 (mode == BLKmode
681 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
682 : size),
6f67a30d 683 align);
d16790f2
JW
684
685 p->align = align;
a4c6502a 686 p->alias_set = alias_set;
e5e809f4 687
b2a80c0d
DE
688 /* The following slot size computation is necessary because we don't
689 know the actual size of the temporary slot until assign_stack_local
690 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
691 requested temporary. Note that extra space added for alignment
692 can be either above or below this stack slot depending on which
693 way the frame grows. We include the extra space if and only if it
694 is above this slot. */
b2a80c0d
DE
695#ifdef FRAME_GROWS_DOWNWARD
696 p->size = frame_offset_old - frame_offset;
697#else
fc91b0d0
RK
698 p->size = size;
699#endif
e5e809f4 700
fc91b0d0
RK
701 /* Now define the fields used by combine_temp_slots. */
702#ifdef FRAME_GROWS_DOWNWARD
703 p->base_offset = frame_offset;
704 p->full_size = frame_offset_old - frame_offset;
705#else
706 p->base_offset = frame_offset_old;
707 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 708#endif
e5e76139 709 p->address = 0;
6f086dfc
RS
710 p->next = temp_slots;
711 temp_slots = p;
712 }
713
714 p->in_use = 1;
a25d4ba2 715 p->addr_taken = 0;
49ad7cfa 716 p->rtl_expr = seq_rtl_expr;
a25d4ba2 717
d93d4205
MS
718 if (keep == 2)
719 {
720 p->level = target_temp_slot_level;
721 p->keep = 0;
722 }
e5e809f4
JL
723 else if (keep == 3)
724 {
725 p->level = var_temp_slot_level;
726 p->keep = 0;
727 }
d93d4205
MS
728 else
729 {
730 p->level = temp_slot_level;
731 p->keep = keep;
732 }
1995f267
RK
733
734 /* We may be reusing an old slot, so clear any MEM flags that may have been
735 set from before. */
736 RTX_UNCHANGING_P (p->slot) = 0;
737 MEM_IN_STRUCT_P (p->slot) = 0;
c6df88cb
MM
738 MEM_SCALAR_P (p->slot) = 0;
739 MEM_ALIAS_SET (p->slot) = 0;
6f086dfc
RS
740 return p->slot;
741}
d16790f2
JW
742
743/* Allocate a temporary stack slot and record it for possible later
744 reuse. First three arguments are same as in preceding function. */
745
746rtx
747assign_stack_temp (mode, size, keep)
748 enum machine_mode mode;
749 HOST_WIDE_INT size;
750 int keep;
751{
752 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
753}
638141a6 754\f
230f21b4
PB
755/* Assign a temporary of given TYPE.
756 KEEP is as for assign_stack_temp.
757 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
758 it is 0 if a register is OK.
759 DONT_PROMOTE is 1 if we should not promote values in register
760 to wider modes. */
230f21b4
PB
761
762rtx
b55d9ff8 763assign_temp (type, keep, memory_required, dont_promote)
230f21b4
PB
764 tree type;
765 int keep;
766 int memory_required;
b55d9ff8 767 int dont_promote;
230f21b4
PB
768{
769 enum machine_mode mode = TYPE_MODE (type);
638141a6
RK
770 int unsignedp = TREE_UNSIGNED (type);
771
230f21b4
PB
772 if (mode == BLKmode || memory_required)
773 {
e5e809f4 774 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
775 rtx tmp;
776
777 /* Unfortunately, we don't yet know how to allocate variable-sized
778 temporaries. However, sometimes we have a fixed upper limit on
779 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 780 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
781 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
782 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
783 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
784 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
785
d16790f2 786 tmp = assign_stack_temp_for_type (mode, size, keep, type);
c6df88cb 787 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
230f21b4
PB
788 return tmp;
789 }
638141a6 790
230f21b4 791#ifndef PROMOTE_FOR_CALL_ONLY
b55d9ff8
RK
792 if (! dont_promote)
793 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 794#endif
638141a6 795
230f21b4
PB
796 return gen_reg_rtx (mode);
797}
638141a6 798\f
a45035b6
JW
799/* Combine temporary stack slots which are adjacent on the stack.
800
801 This allows for better use of already allocated stack space. This is only
802 done for BLKmode slots because we can be sure that we won't have alignment
803 problems in this case. */
804
805void
806combine_temp_slots ()
807{
808 struct temp_slot *p, *q;
809 struct temp_slot *prev_p, *prev_q;
e5e809f4
JL
810 int num_slots;
811
a4c6502a
MM
812 /* We can't combine slots, because the information about which slot
813 is in which alias set will be lost. */
814 if (flag_strict_aliasing)
815 return;
816
e5e809f4
JL
817 /* If there are a lot of temp slots, don't do anything unless
818 high levels of optimizaton. */
819 if (! flag_expensive_optimizations)
820 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
821 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
822 return;
a45035b6 823
e9b7093a
RS
824 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
825 {
826 int delete_p = 0;
e5e809f4 827
e9b7093a
RS
828 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
829 for (q = p->next, prev_q = p; q; q = prev_q->next)
a45035b6 830 {
e9b7093a
RS
831 int delete_q = 0;
832 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
a45035b6 833 {
fc91b0d0 834 if (p->base_offset + p->full_size == q->base_offset)
e9b7093a
RS
835 {
836 /* Q comes after P; combine Q into P. */
837 p->size += q->size;
307d8cd6 838 p->full_size += q->full_size;
e9b7093a
RS
839 delete_q = 1;
840 }
fc91b0d0 841 else if (q->base_offset + q->full_size == p->base_offset)
e9b7093a
RS
842 {
843 /* P comes after Q; combine P into Q. */
844 q->size += p->size;
307d8cd6 845 q->full_size += p->full_size;
e9b7093a
RS
846 delete_p = 1;
847 break;
848 }
a45035b6 849 }
e9b7093a
RS
850 /* Either delete Q or advance past it. */
851 if (delete_q)
852 prev_q->next = q->next;
853 else
854 prev_q = q;
a45035b6 855 }
e9b7093a
RS
856 /* Either delete P or advance past it. */
857 if (delete_p)
858 {
859 if (prev_p)
860 prev_p->next = p->next;
861 else
862 temp_slots = p->next;
863 }
864 else
865 prev_p = p;
866 }
a45035b6 867}
6f086dfc 868\f
e5e76139
RK
869/* Find the temp slot corresponding to the object at address X. */
870
871static struct temp_slot *
872find_temp_slot_from_address (x)
873 rtx x;
874{
875 struct temp_slot *p;
876 rtx next;
877
878 for (p = temp_slots; p; p = p->next)
879 {
880 if (! p->in_use)
881 continue;
e5e809f4 882
e5e76139 883 else if (XEXP (p->slot, 0) == x
abb52246
RK
884 || p->address == x
885 || (GET_CODE (x) == PLUS
886 && XEXP (x, 0) == virtual_stack_vars_rtx
887 && GET_CODE (XEXP (x, 1)) == CONST_INT
888 && INTVAL (XEXP (x, 1)) >= p->base_offset
889 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
e5e76139
RK
890 return p;
891
892 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
893 for (next = p->address; next; next = XEXP (next, 1))
894 if (XEXP (next, 0) == x)
895 return p;
896 }
897
898 return 0;
899}
900
9faa82d8 901/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 902 that previously was known by OLD. */
e5e76139
RK
903
904void
905update_temp_slot_address (old, new)
906 rtx old, new;
907{
908 struct temp_slot *p = find_temp_slot_from_address (old);
909
910 /* If none, return. Else add NEW as an alias. */
911 if (p == 0)
912 return;
913 else if (p->address == 0)
914 p->address = new;
915 else
916 {
917 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 918 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 919
38a448ca 920 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
921 }
922}
923
a25d4ba2 924/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 925 address was taken. */
a25d4ba2
RK
926
927void
928mark_temp_addr_taken (x)
929 rtx x;
930{
931 struct temp_slot *p;
932
933 if (x == 0)
934 return;
935
936 /* If X is not in memory or is at a constant address, it cannot be in
937 a temporary slot. */
938 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
939 return;
940
941 p = find_temp_slot_from_address (XEXP (x, 0));
942 if (p != 0)
943 p->addr_taken = 1;
944}
945
9cca6a99
MS
946/* If X could be a reference to a temporary slot, mark that slot as
947 belonging to the to one level higher than the current level. If X
948 matched one of our slots, just mark that one. Otherwise, we can't
949 easily predict which it is, so upgrade all of them. Kept slots
950 need not be touched.
6f086dfc
RS
951
952 This is called when an ({...}) construct occurs and a statement
953 returns a value in memory. */
954
955void
956preserve_temp_slots (x)
957 rtx x;
958{
a25d4ba2 959 struct temp_slot *p = 0;
6f086dfc 960
73620b82
RK
961 /* If there is no result, we still might have some objects whose address
962 were taken, so we need to make sure they stay around. */
e3a77161 963 if (x == 0)
73620b82
RK
964 {
965 for (p = temp_slots; p; p = p->next)
966 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
967 p->level--;
968
969 return;
970 }
e3a77161
RK
971
972 /* If X is a register that is being used as a pointer, see if we have
973 a temporary slot we know it points to. To be consistent with
974 the code below, we really should preserve all non-kept slots
975 if we can't find a match, but that seems to be much too costly. */
a25d4ba2
RK
976 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
977 p = find_temp_slot_from_address (x);
978
6f086dfc 979 /* If X is not in memory or is at a constant address, it cannot be in
e19571db
RK
980 a temporary slot, but it can contain something whose address was
981 taken. */
a25d4ba2 982 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
e19571db
RK
983 {
984 for (p = temp_slots; p; p = p->next)
985 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
986 p->level--;
987
988 return;
989 }
6f086dfc
RS
990
991 /* First see if we can find a match. */
73620b82 992 if (p == 0)
a25d4ba2
RK
993 p = find_temp_slot_from_address (XEXP (x, 0));
994
e5e76139
RK
995 if (p != 0)
996 {
a25d4ba2
RK
997 /* Move everything at our level whose address was taken to our new
998 level in case we used its address. */
999 struct temp_slot *q;
1000
9cca6a99
MS
1001 if (p->level == temp_slot_level)
1002 {
1003 for (q = temp_slots; q; q = q->next)
1004 if (q != p && q->addr_taken && q->level == p->level)
1005 q->level--;
a25d4ba2 1006
9cca6a99
MS
1007 p->level--;
1008 p->addr_taken = 0;
1009 }
e5e76139
RK
1010 return;
1011 }
6f086dfc
RS
1012
1013 /* Otherwise, preserve all non-kept slots at this level. */
1014 for (p = temp_slots; p; p = p->next)
1015 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1016 p->level--;
1017}
1018
422c8f63
RK
1019/* X is the result of an RTL_EXPR. If it is a temporary slot associated
1020 with that RTL_EXPR, promote it into a temporary slot at the present
1021 level so it will not be freed when we free slots made in the
1022 RTL_EXPR. */
1023
1024void
1025preserve_rtl_expr_result (x)
1026 rtx x;
1027{
1028 struct temp_slot *p;
1029
1030 /* If X is not in memory or is at a constant address, it cannot be in
1031 a temporary slot. */
1032 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1033 return;
1034
199b61d8
RK
1035 /* If we can find a match, move it to our level unless it is already at
1036 an upper level. */
1037 p = find_temp_slot_from_address (XEXP (x, 0));
1038 if (p != 0)
1039 {
1040 p->level = MIN (p->level, temp_slot_level);
1041 p->rtl_expr = 0;
1042 }
422c8f63
RK
1043
1044 return;
1045}
1046
6f086dfc 1047/* Free all temporaries used so far. This is normally called at the end
e7a84011
RK
1048 of generating code for a statement. Don't free any temporaries
1049 currently in use for an RTL_EXPR that hasn't yet been emitted.
1050 We could eventually do better than this since it can be reused while
1051 generating the same RTL_EXPR, but this is complex and probably not
1052 worthwhile. */
6f086dfc
RS
1053
1054void
1055free_temp_slots ()
1056{
1057 struct temp_slot *p;
1058
1059 for (p = temp_slots; p; p = p->next)
e7a84011
RK
1060 if (p->in_use && p->level == temp_slot_level && ! p->keep
1061 && p->rtl_expr == 0)
1062 p->in_use = 0;
1063
1064 combine_temp_slots ();
1065}
1066
1067/* Free all temporary slots used in T, an RTL_EXPR node. */
1068
1069void
1070free_temps_for_rtl_expr (t)
1071 tree t;
1072{
1073 struct temp_slot *p;
1074
1075 for (p = temp_slots; p; p = p->next)
1076 if (p->rtl_expr == t)
6f086dfc 1077 p->in_use = 0;
a45035b6
JW
1078
1079 combine_temp_slots ();
6f086dfc
RS
1080}
1081
956d6950 1082/* Mark all temporaries ever allocated in this function as not suitable
a94e4054
RK
1083 for reuse until the current level is exited. */
1084
1085void
1086mark_all_temps_used ()
1087{
1088 struct temp_slot *p;
1089
1090 for (p = temp_slots; p; p = p->next)
1091 {
85b119d1 1092 p->in_use = p->keep = 1;
27ce006b 1093 p->level = MIN (p->level, temp_slot_level);
a94e4054
RK
1094 }
1095}
1096
6f086dfc
RS
1097/* Push deeper into the nesting level for stack temporaries. */
1098
1099void
1100push_temp_slots ()
1101{
6f086dfc
RS
1102 temp_slot_level++;
1103}
1104
e5e809f4
JL
1105/* Likewise, but save the new level as the place to allocate variables
1106 for blocks. */
1107
1108void
1109push_temp_slots_for_block ()
1110{
1111 push_temp_slots ();
1112
1113 var_temp_slot_level = temp_slot_level;
1114}
1115
f5963e61
JL
1116/* Likewise, but save the new level as the place to allocate temporaries
1117 for TARGET_EXPRs. */
1118
1119void
1120push_temp_slots_for_target ()
1121{
1122 push_temp_slots ();
1123
1124 target_temp_slot_level = temp_slot_level;
1125}
1126
1127/* Set and get the value of target_temp_slot_level. The only
1128 permitted use of these functions is to save and restore this value. */
1129
1130int
1131get_target_temp_slot_level ()
1132{
1133 return target_temp_slot_level;
1134}
1135
1136void
1137set_target_temp_slot_level (level)
1138 int level;
1139{
1140 target_temp_slot_level = level;
1141}
1142
6f086dfc
RS
1143/* Pop a temporary nesting level. All slots in use in the current level
1144 are freed. */
1145
1146void
1147pop_temp_slots ()
1148{
1149 struct temp_slot *p;
1150
6f086dfc 1151 for (p = temp_slots; p; p = p->next)
e7a84011 1152 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
6f086dfc
RS
1153 p->in_use = 0;
1154
a45035b6
JW
1155 combine_temp_slots ();
1156
6f086dfc
RS
1157 temp_slot_level--;
1158}
bc0ebdf9
RK
1159
1160/* Initialize temporary slots. */
1161
1162void
1163init_temp_slots ()
1164{
1165 /* We have not allocated any temporaries yet. */
1166 temp_slots = 0;
1167 temp_slot_level = 0;
e5e809f4 1168 var_temp_slot_level = 0;
bc0ebdf9
RK
1169 target_temp_slot_level = 0;
1170}
6f086dfc
RS
1171\f
1172/* Retroactively move an auto variable from a register to a stack slot.
1173 This is done when an address-reference to the variable is seen. */
1174
1175void
1176put_var_into_stack (decl)
1177 tree decl;
1178{
1179 register rtx reg;
00d8a4c1 1180 enum machine_mode promoted_mode, decl_mode;
6f086dfc 1181 struct function *function = 0;
c20bf1f3 1182 tree context;
e9a25f70 1183 int can_use_addressof;
c20bf1f3 1184
c20bf1f3 1185 context = decl_function_context (decl);
6f086dfc 1186
9ec36da5 1187 /* Get the current rtl used for this object and its original mode. */
6f086dfc 1188 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
1189
1190 /* No need to do anything if decl has no rtx yet
1191 since in that case caller is setting TREE_ADDRESSABLE
1192 and a stack slot will be assigned when the rtl is made. */
1193 if (reg == 0)
1194 return;
00d8a4c1
RK
1195
1196 /* Get the declared mode for this object. */
1197 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1198 : DECL_MODE (decl));
2baccce2
RS
1199 /* Get the mode it's actually stored in. */
1200 promoted_mode = GET_MODE (reg);
6f086dfc
RS
1201
1202 /* If this variable comes from an outer function,
1203 find that function's saved context. */
4ac74fb8 1204 if (context != current_function_decl && context != inline_function_decl)
6f086dfc
RS
1205 for (function = outer_function_chain; function; function = function->next)
1206 if (function->decl == context)
1207 break;
1208
6f086dfc
RS
1209 /* If this is a variable-size object with a pseudo to address it,
1210 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 1211 if (DECL_NONLOCAL (decl)
6f086dfc
RS
1212 && GET_CODE (reg) == MEM
1213 && GET_CODE (XEXP (reg, 0)) == REG
1214 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
1215 {
1216 reg = XEXP (reg, 0);
1217 decl_mode = promoted_mode = GET_MODE (reg);
1218 }
e15762df 1219
e9a25f70
JL
1220 can_use_addressof
1221 = (function == 0
e5e809f4 1222 && optimize > 0
e9a25f70
JL
1223 /* FIXME make it work for promoted modes too */
1224 && decl_mode == promoted_mode
1225#ifdef NON_SAVING_SETJMP
1226 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1227#endif
1228 );
1229
1230 /* If we can't use ADDRESSOF, make sure we see through one we already
1231 generated. */
1232 if (! can_use_addressof && GET_CODE (reg) == MEM
1233 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1234 reg = XEXP (XEXP (reg, 0), 0);
1235
293e3de4
RS
1236 /* Now we should have a value that resides in one or more pseudo regs. */
1237
1238 if (GET_CODE (reg) == REG)
e9a25f70
JL
1239 {
1240 /* If this variable lives in the current function and we don't need
1241 to put things in the stack for the sake of setjmp, try to keep it
1242 in a register until we know we actually need the address. */
1243 if (can_use_addressof)
1244 gen_mem_addressof (reg, decl);
1245 else
1246 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1247 promoted_mode, decl_mode,
e5e809f4 1248 TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1249 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1250 0);
e9a25f70 1251 }
293e3de4
RS
1252 else if (GET_CODE (reg) == CONCAT)
1253 {
1254 /* A CONCAT contains two pseudos; put them both in the stack.
1255 We do it so they end up consecutive. */
1256 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1257 tree part_type = TREE_TYPE (TREE_TYPE (decl));
4738c10d 1258#ifdef FRAME_GROWS_DOWNWARD
293e3de4 1259 /* Since part 0 should have a lower address, do it second. */
0006e95b 1260 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4 1261 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1262 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1263 0);
0006e95b 1264 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4 1265 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1266 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1267 0);
293e3de4 1268#else
0006e95b 1269 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4 1270 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1271 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1272 0);
0006e95b 1273 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4 1274 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1275 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1276 0);
293e3de4
RS
1277#endif
1278
1279 /* Change the CONCAT into a combined MEM for both parts. */
1280 PUT_CODE (reg, MEM);
0006e95b 1281 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
41472af8 1282 MEM_ALIAS_SET (reg) = get_alias_set (decl);
0006e95b 1283
293e3de4
RS
1284 /* The two parts are in memory order already.
1285 Use the lower parts address as ours. */
1286 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1287 /* Prevent sharing of rtl that might lose. */
1288 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1289 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1290 }
86fa911a
RK
1291 else
1292 return;
1293
7d384cc0 1294 if (current_function_check_memory_usage)
86fa911a 1295 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 1296 XEXP (reg, 0), Pmode,
86fa911a
RK
1297 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1298 TYPE_MODE (sizetype),
956d6950
JL
1299 GEN_INT (MEMORY_USE_RW),
1300 TYPE_MODE (integer_type_node));
293e3de4
RS
1301}
1302
1303/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1304 into the stack frame of FUNCTION (0 means the current function).
1305 DECL_MODE is the machine mode of the user-level data type.
0006e95b 1306 PROMOTED_MODE is the machine mode of the register.
e5e809f4
JL
1307 VOLATILE_P is nonzero if this is for a "volatile" decl.
1308 USED_P is nonzero if this reg might have already been used in an insn. */
293e3de4
RS
1309
1310static void
e9a25f70 1311put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
fe9b4957 1312 original_regno, used_p, ht)
293e3de4
RS
1313 struct function *function;
1314 rtx reg;
1315 tree type;
1316 enum machine_mode promoted_mode, decl_mode;
0006e95b 1317 int volatile_p;
e9a25f70 1318 int original_regno;
e5e809f4 1319 int used_p;
fe9b4957 1320 struct hash_table *ht;
293e3de4
RS
1321{
1322 rtx new = 0;
e9a25f70
JL
1323 int regno = original_regno;
1324
1325 if (regno == 0)
1326 regno = REGNO (reg);
6f086dfc
RS
1327
1328 if (function)
1329 {
49ad7cfa
BS
1330 if (regno < function->x_max_parm_reg)
1331 new = function->x_parm_reg_stack_loc[regno];
6f086dfc 1332 if (new == 0)
e15762df 1333 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
6f086dfc
RS
1334 0, function);
1335 }
1336 else
1337 {
e9a25f70
JL
1338 if (regno < max_parm_reg)
1339 new = parm_reg_stack_loc[regno];
6f086dfc 1340 if (new == 0)
e15762df 1341 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
1342 }
1343
0006e95b 1344 PUT_MODE (reg, decl_mode);
6f086dfc
RS
1345 XEXP (reg, 0) = XEXP (new, 0);
1346 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
0006e95b 1347 MEM_VOLATILE_P (reg) = volatile_p;
6f086dfc
RS
1348 PUT_CODE (reg, MEM);
1349
1350 /* If this is a memory ref that contains aggregate components,
bdd3e6ab
JW
1351 mark it as such for cse and loop optimize. If we are reusing a
1352 previously generated stack slot, then we need to copy the bit in
1353 case it was set for other reasons. For instance, it is set for
1354 __builtin_va_alist. */
c6df88cb
MM
1355 MEM_SET_IN_STRUCT_P (reg,
1356 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
41472af8 1357 MEM_ALIAS_SET (reg) = get_alias_set (type);
6f086dfc
RS
1358
1359 /* Now make sure that all refs to the variable, previously made
1360 when it was a register, are fixed up to be valid again. */
e5e809f4
JL
1361
1362 if (used_p && function != 0)
6f086dfc
RS
1363 {
1364 struct var_refs_queue *temp;
1365
1366 /* Variable is inherited; fix it up when we get back to its function. */
1367 push_obstacks (function->function_obstack,
1368 function->function_maybepermanent_obstack);
4da73fa0
RK
1369
1370 /* See comment in restore_tree_status in tree.c for why this needs to be
1371 on saveable obstack. */
6f086dfc 1372 temp
4da73fa0 1373 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
6f086dfc 1374 temp->modified = reg;
00d8a4c1 1375 temp->promoted_mode = promoted_mode;
293e3de4 1376 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
1377 temp->next = function->fixup_var_refs_queue;
1378 function->fixup_var_refs_queue = temp;
1379 pop_obstacks ();
1380 }
e5e809f4 1381 else if (used_p)
6f086dfc 1382 /* Variable is local; fix it up now. */
fe9b4957 1383 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
6f086dfc
RS
1384}
1385\f
1386static void
fe9b4957 1387fixup_var_refs (var, promoted_mode, unsignedp, ht)
6f086dfc 1388 rtx var;
00d8a4c1
RK
1389 enum machine_mode promoted_mode;
1390 int unsignedp;
fe9b4957 1391 struct hash_table *ht;
6f086dfc
RS
1392{
1393 tree pending;
1394 rtx first_insn = get_insns ();
49ad7cfa 1395 struct sequence_stack *stack = seq_stack;
6f086dfc
RS
1396 tree rtl_exps = rtl_expr_chain;
1397
1398 /* Must scan all insns for stack-refs that exceed the limit. */
fe9b4957
MM
1399 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1400 stack == 0, ht);
1401 /* If there's a hash table, it must record all uses of VAR. */
1402 if (ht)
1403 return;
6f086dfc
RS
1404
1405 /* Scan all pending sequences too. */
1406 for (; stack; stack = stack->next)
1407 {
1408 push_to_sequence (stack->first);
00d8a4c1 1409 fixup_var_refs_insns (var, promoted_mode, unsignedp,
fe9b4957 1410 stack->first, stack->next != 0, 0);
6f086dfc
RS
1411 /* Update remembered end of sequence
1412 in case we added an insn at the end. */
1413 stack->last = get_last_insn ();
1414 end_sequence ();
1415 }
1416
1417 /* Scan all waiting RTL_EXPRs too. */
1418 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1419 {
1420 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1421 if (seq != const0_rtx && seq != 0)
1422 {
1423 push_to_sequence (seq);
fe9b4957
MM
1424 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1425 0);
6f086dfc
RS
1426 end_sequence ();
1427 }
1428 }
d33c2956
DB
1429
1430 /* Scan the catch clauses for exception handling too. */
1431 push_to_sequence (catch_clauses);
fe9b4957
MM
1432 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1433 0, 0);
d33c2956 1434 end_sequence ();
6f086dfc
RS
1435}
1436\f
e15679f8 1437/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
6f086dfc 1438 some part of an insn. Return a struct fixup_replacement whose OLD
0f41302f 1439 value is equal to X. Allocate a new structure if no such entry exists. */
6f086dfc
RS
1440
1441static struct fixup_replacement *
2740a678 1442find_fixup_replacement (replacements, x)
6f086dfc
RS
1443 struct fixup_replacement **replacements;
1444 rtx x;
1445{
1446 struct fixup_replacement *p;
1447
1448 /* See if we have already replaced this. */
1449 for (p = *replacements; p && p->old != x; p = p->next)
1450 ;
1451
1452 if (p == 0)
1453 {
1454 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1455 p->old = x;
1456 p->new = 0;
1457 p->next = *replacements;
1458 *replacements = p;
1459 }
1460
1461 return p;
1462}
1463
1464/* Scan the insn-chain starting with INSN for refs to VAR
1465 and fix them up. TOPLEVEL is nonzero if this chain is the
1466 main chain of insns for the current function. */
1467
1468static void
fe9b4957 1469fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
6f086dfc 1470 rtx var;
00d8a4c1
RK
1471 enum machine_mode promoted_mode;
1472 int unsignedp;
6f086dfc
RS
1473 rtx insn;
1474 int toplevel;
fe9b4957 1475 struct hash_table *ht;
6f086dfc 1476{
02a10449 1477 rtx call_dest = 0;
07444f1d 1478 rtx insn_list = NULL_RTX;
fe9b4957
MM
1479
1480 /* If we already know which INSNs reference VAR there's no need
1481 to walk the entire instruction chain. */
1482 if (ht)
1483 {
1484 insn_list = ((struct insns_for_mem_entry *)
1485 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1486 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1487 insn_list = XEXP (insn_list, 1);
1488 }
02a10449 1489
6f086dfc
RS
1490 while (insn)
1491 {
1492 rtx next = NEXT_INSN (insn);
e5e809f4 1493 rtx set, prev, prev_set;
6f086dfc 1494 rtx note;
e5e809f4 1495
e15762df 1496 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc 1497 {
63770d6a
RK
1498 /* If this is a CLOBBER of VAR, delete it.
1499
1500 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1501 and REG_RETVAL notes too. */
926d1ca5 1502 if (GET_CODE (PATTERN (insn)) == CLOBBER
07362cb3
JW
1503 && (XEXP (PATTERN (insn), 0) == var
1504 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1505 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1506 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
63770d6a
RK
1507 {
1508 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1509 /* The REG_LIBCALL note will go away since we are going to
1510 turn INSN into a NOTE, so just delete the
1511 corresponding REG_RETVAL note. */
1512 remove_note (XEXP (note, 0),
1513 find_reg_note (XEXP (note, 0), REG_RETVAL,
1514 NULL_RTX));
1515
1516 /* In unoptimized compilation, we shouldn't call delete_insn
1517 except in jump.c doing warnings. */
1518 PUT_CODE (insn, NOTE);
1519 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1520 NOTE_SOURCE_FILE (insn) = 0;
1521 }
1522
6f086dfc 1523 /* The insn to load VAR from a home in the arglist
e5e809f4
JL
1524 is now a no-op. When we see it, just delete it.
1525 Similarly if this is storing VAR from a register from which
1526 it was loaded in the previous insn. This will occur
1527 when an ADDRESSOF was made for an arglist slot. */
63770d6a 1528 else if (toplevel
e5e809f4
JL
1529 && (set = single_set (insn)) != 0
1530 && SET_DEST (set) == var
63770d6a
RK
1531 /* If this represents the result of an insn group,
1532 don't delete the insn. */
1533 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
e5e809f4
JL
1534 && (rtx_equal_p (SET_SRC (set), var)
1535 || (GET_CODE (SET_SRC (set)) == REG
1536 && (prev = prev_nonnote_insn (insn)) != 0
1537 && (prev_set = single_set (prev)) != 0
1538 && SET_DEST (prev_set) == SET_SRC (set)
1539 && rtx_equal_p (SET_SRC (prev_set), var))))
6f086dfc 1540 {
b4ff474c
RS
1541 /* In unoptimized compilation, we shouldn't call delete_insn
1542 except in jump.c doing warnings. */
1543 PUT_CODE (insn, NOTE);
1544 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1545 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1546 if (insn == last_parm_insn)
1547 last_parm_insn = PREV_INSN (next);
1548 }
1549 else
1550 {
02a10449
RK
1551 struct fixup_replacement *replacements = 0;
1552 rtx next_insn = NEXT_INSN (insn);
1553
e9a25f70
JL
1554 if (SMALL_REGISTER_CLASSES)
1555 {
1556 /* If the insn that copies the results of a CALL_INSN
1557 into a pseudo now references VAR, we have to use an
1558 intermediate pseudo since we want the life of the
1559 return value register to be only a single insn.
02a10449 1560
e9a25f70
JL
1561 If we don't use an intermediate pseudo, such things as
1562 address computations to make the address of VAR valid
1563 if it is not can be placed between the CALL_INSN and INSN.
02a10449 1564
e9a25f70
JL
1565 To make sure this doesn't happen, we record the destination
1566 of the CALL_INSN and see if the next insn uses both that
1567 and VAR. */
02a10449 1568
f95182a4
ILT
1569 if (call_dest != 0 && GET_CODE (insn) == INSN
1570 && reg_mentioned_p (var, PATTERN (insn))
1571 && reg_mentioned_p (call_dest, PATTERN (insn)))
1572 {
1573 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
02a10449 1574
f95182a4 1575 emit_insn_before (gen_move_insn (temp, call_dest), insn);
02a10449 1576
f95182a4
ILT
1577 PATTERN (insn) = replace_rtx (PATTERN (insn),
1578 call_dest, temp);
1579 }
02a10449 1580
f95182a4
ILT
1581 if (GET_CODE (insn) == CALL_INSN
1582 && GET_CODE (PATTERN (insn)) == SET)
1583 call_dest = SET_DEST (PATTERN (insn));
1584 else if (GET_CODE (insn) == CALL_INSN
1585 && GET_CODE (PATTERN (insn)) == PARALLEL
1586 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1587 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1588 else
1589 call_dest = 0;
1590 }
02a10449 1591
6f086dfc
RS
1592 /* See if we have to do anything to INSN now that VAR is in
1593 memory. If it needs to be loaded into a pseudo, use a single
1594 pseudo for the entire insn in case there is a MATCH_DUP
1595 between two operands. We pass a pointer to the head of
1596 a list of struct fixup_replacements. If fixup_var_refs_1
1597 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1598 it will record them in this list.
1599
1600 If it allocated a pseudo for any replacement, we copy into
1601 it here. */
1602
00d8a4c1
RK
1603 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1604 &replacements);
6f086dfc 1605
77121fee
JW
1606 /* If this is last_parm_insn, and any instructions were output
1607 after it to fix it up, then we must set last_parm_insn to
1608 the last such instruction emitted. */
1609 if (insn == last_parm_insn)
1610 last_parm_insn = PREV_INSN (next_insn);
1611
6f086dfc
RS
1612 while (replacements)
1613 {
1614 if (GET_CODE (replacements->new) == REG)
1615 {
1616 rtx insert_before;
00d8a4c1 1617 rtx seq;
6f086dfc
RS
1618
1619 /* OLD might be a (subreg (mem)). */
1620 if (GET_CODE (replacements->old) == SUBREG)
1621 replacements->old
1622 = fixup_memory_subreg (replacements->old, insn, 0);
1623 else
1624 replacements->old
1625 = fixup_stack_1 (replacements->old, insn);
1626
5fa7422b 1627 insert_before = insn;
6f086dfc 1628
00d8a4c1
RK
1629 /* If we are changing the mode, do a conversion.
1630 This might be wasteful, but combine.c will
1631 eliminate much of the waste. */
1632
1633 if (GET_MODE (replacements->new)
1634 != GET_MODE (replacements->old))
1635 {
1636 start_sequence ();
1637 convert_move (replacements->new,
1638 replacements->old, unsignedp);
1639 seq = gen_sequence ();
1640 end_sequence ();
1641 }
1642 else
1643 seq = gen_move_insn (replacements->new,
1644 replacements->old);
1645
1646 emit_insn_before (seq, insert_before);
6f086dfc
RS
1647 }
1648
1649 replacements = replacements->next;
1650 }
1651 }
1652
1653 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1654 But don't touch other insns referred to by reg-notes;
1655 we will get them elsewhere. */
1656 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1657 if (GET_CODE (note) != INSN_LIST)
ab6155b7
RK
1658 XEXP (note, 0)
1659 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
6f086dfc 1660 }
fe9b4957
MM
1661
1662 if (!ht)
1663 insn = next;
1664 else if (insn_list)
1665 {
1666 insn = XEXP (insn_list, 0);
1667 insn_list = XEXP (insn_list, 1);
1668 }
1669 else
1670 insn = NULL_RTX;
6f086dfc
RS
1671 }
1672}
1673\f
00d8a4c1
RK
1674/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1675 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1676
1677 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1678 contain a list of original rtx's and replacements. If we find that we need
1679 to modify this insn by replacing a memory reference with a pseudo or by
1680 making a new MEM to implement a SUBREG, we consult that list to see if
1681 we have already chosen a replacement. If none has already been allocated,
1682 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1683 or the SUBREG, as appropriate, to the pseudo. */
1684
1685static void
00d8a4c1 1686fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1687 register rtx var;
00d8a4c1 1688 enum machine_mode promoted_mode;
6f086dfc
RS
1689 register rtx *loc;
1690 rtx insn;
1691 struct fixup_replacement **replacements;
1692{
1693 register int i;
1694 register rtx x = *loc;
1695 RTX_CODE code = GET_CODE (x);
6f7d635c 1696 register const char *fmt;
6f086dfc
RS
1697 register rtx tem, tem1;
1698 struct fixup_replacement *replacement;
1699
1700 switch (code)
1701 {
e9a25f70
JL
1702 case ADDRESSOF:
1703 if (XEXP (x, 0) == var)
1704 {
956d6950
JL
1705 /* Prevent sharing of rtl that might lose. */
1706 rtx sub = copy_rtx (XEXP (var, 0));
1707
956d6950
JL
1708 if (! validate_change (insn, loc, sub, 0))
1709 {
5f98f7c4
RH
1710 rtx y = gen_reg_rtx (GET_MODE (sub));
1711 rtx seq, new_insn;
1712
1713 /* We should be able to replace with a register or all is lost.
1714 Note that we can't use validate_change to verify this, since
1715 we're not caring for replacing all dups simultaneously. */
1716 if (! validate_replace_rtx (*loc, y, insn))
1717 abort ();
1718
1719 /* Careful! First try to recognize a direct move of the
1720 value, mimicking how things are done in gen_reload wrt
1721 PLUS. Consider what happens when insn is a conditional
1722 move instruction and addsi3 clobbers flags. */
1723
1724 start_sequence ();
1725 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1726 seq = gen_sequence ();
1727 end_sequence ();
1728
1729 if (recog_memoized (new_insn) < 0)
1730 {
1731 /* That failed. Fall back on force_operand and hope. */
956d6950 1732
5f98f7c4
RH
1733 start_sequence ();
1734 force_operand (sub, y);
1735 seq = gen_sequence ();
1736 end_sequence ();
1737 }
956d6950 1738
5f98f7c4
RH
1739#ifdef HAVE_cc0
1740 /* Don't separate setter from user. */
1741 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1742 insn = PREV_INSN (insn);
1743#endif
1744
1745 emit_insn_before (seq, insn);
1746 }
e9a25f70
JL
1747 }
1748 return;
1749
6f086dfc
RS
1750 case MEM:
1751 if (var == x)
1752 {
1753 /* If we already have a replacement, use it. Otherwise,
1754 try to fix up this address in case it is invalid. */
1755
2740a678 1756 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1757 if (replacement->new)
1758 {
1759 *loc = replacement->new;
1760 return;
1761 }
1762
1763 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1764
00d8a4c1
RK
1765 /* Unless we are forcing memory to register or we changed the mode,
1766 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1767
1768 INSN_CODE (insn) = -1;
00d8a4c1
RK
1769 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1770 && recog_memoized (insn) >= 0)
6f086dfc
RS
1771 return;
1772
00d8a4c1 1773 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1774 return;
1775 }
1776
1777 /* If X contains VAR, we need to unshare it here so that we update
1778 each occurrence separately. But all identical MEMs in one insn
1779 must be replaced with the same rtx because of the possibility of
1780 MATCH_DUPs. */
1781
1782 if (reg_mentioned_p (var, x))
1783 {
2740a678 1784 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1785 if (replacement->new == 0)
1786 replacement->new = copy_most_rtx (x, var);
1787
1788 *loc = x = replacement->new;
1789 }
1790 break;
1791
1792 case REG:
1793 case CC0:
1794 case PC:
1795 case CONST_INT:
1796 case CONST:
1797 case SYMBOL_REF:
1798 case LABEL_REF:
1799 case CONST_DOUBLE:
1800 return;
1801
1802 case SIGN_EXTRACT:
1803 case ZERO_EXTRACT:
1804 /* Note that in some cases those types of expressions are altered
1805 by optimize_bit_field, and do not survive to get here. */
1806 if (XEXP (x, 0) == var
1807 || (GET_CODE (XEXP (x, 0)) == SUBREG
1808 && SUBREG_REG (XEXP (x, 0)) == var))
1809 {
1810 /* Get TEM as a valid MEM in the mode presently in the insn.
1811
1812 We don't worry about the possibility of MATCH_DUP here; it
1813 is highly unlikely and would be tricky to handle. */
1814
1815 tem = XEXP (x, 0);
1816 if (GET_CODE (tem) == SUBREG)
0e09cc26
RK
1817 {
1818 if (GET_MODE_BITSIZE (GET_MODE (tem))
1819 > GET_MODE_BITSIZE (GET_MODE (var)))
1820 {
1821 replacement = find_fixup_replacement (replacements, var);
1822 if (replacement->new == 0)
1823 replacement->new = gen_reg_rtx (GET_MODE (var));
1824 SUBREG_REG (tem) = replacement->new;
1825 }
ef933d26
RK
1826 else
1827 tem = fixup_memory_subreg (tem, insn, 0);
0e09cc26
RK
1828 }
1829 else
1830 tem = fixup_stack_1 (tem, insn);
6f086dfc
RS
1831
1832 /* Unless we want to load from memory, get TEM into the proper mode
1833 for an extract from memory. This can only be done if the
1834 extract is at a constant position and length. */
1835
1836 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1837 && GET_CODE (XEXP (x, 2)) == CONST_INT
1838 && ! mode_dependent_address_p (XEXP (tem, 0))
1839 && ! MEM_VOLATILE_P (tem))
1840 {
1841 enum machine_mode wanted_mode = VOIDmode;
1842 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 1843 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6f086dfc
RS
1844
1845#ifdef HAVE_extzv
1846 if (GET_CODE (x) == ZERO_EXTRACT)
0d8e55d8
JL
1847 {
1848 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1849 if (wanted_mode == VOIDmode)
1850 wanted_mode = word_mode;
1851 }
6f086dfc
RS
1852#endif
1853#ifdef HAVE_extv
1854 if (GET_CODE (x) == SIGN_EXTRACT)
0d8e55d8
JL
1855 {
1856 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1857 if (wanted_mode == VOIDmode)
1858 wanted_mode = word_mode;
1859 }
6f086dfc 1860#endif
6dc42e49 1861 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1862 if (wanted_mode != VOIDmode
1863 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1864 {
e5e809f4 1865 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
1866 rtx old_pos = XEXP (x, 2);
1867 rtx newmem;
1868
1869 /* If the bytes and bits are counted differently, we
1870 must adjust the offset. */
f76b9db2
ILT
1871 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1872 offset = (GET_MODE_SIZE (is_mode)
1873 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
1874
1875 pos %= GET_MODE_BITSIZE (wanted_mode);
1876
38a448ca
RH
1877 newmem = gen_rtx_MEM (wanted_mode,
1878 plus_constant (XEXP (tem, 0), offset));
6f086dfc 1879 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 1880 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
1881
1882 /* Make the change and see if the insn remains valid. */
1883 INSN_CODE (insn) = -1;
1884 XEXP (x, 0) = newmem;
5f4f0e22 1885 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1886
1887 if (recog_memoized (insn) >= 0)
1888 return;
1889
1890 /* Otherwise, restore old position. XEXP (x, 0) will be
1891 restored later. */
1892 XEXP (x, 2) = old_pos;
1893 }
1894 }
1895
1896 /* If we get here, the bitfield extract insn can't accept a memory
1897 reference. Copy the input into a register. */
1898
1899 tem1 = gen_reg_rtx (GET_MODE (tem));
1900 emit_insn_before (gen_move_insn (tem1, tem), insn);
1901 XEXP (x, 0) = tem1;
1902 return;
1903 }
1904 break;
1905
1906 case SUBREG:
1907 if (SUBREG_REG (x) == var)
1908 {
00d8a4c1
RK
1909 /* If this is a special SUBREG made because VAR was promoted
1910 from a wider mode, replace it with VAR and call ourself
1911 recursively, this time saying that the object previously
1912 had its current mode (by virtue of the SUBREG). */
1913
1914 if (SUBREG_PROMOTED_VAR_P (x))
1915 {
1916 *loc = var;
1917 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1918 return;
1919 }
1920
6f086dfc
RS
1921 /* If this SUBREG makes VAR wider, it has become a paradoxical
1922 SUBREG with VAR in memory, but these aren't allowed at this
1923 stage of the compilation. So load VAR into a pseudo and take
1924 a SUBREG of that pseudo. */
1925 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1926 {
2740a678 1927 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1928 if (replacement->new == 0)
1929 replacement->new = gen_reg_rtx (GET_MODE (var));
1930 SUBREG_REG (x) = replacement->new;
1931 return;
1932 }
1933
1934 /* See if we have already found a replacement for this SUBREG.
1935 If so, use it. Otherwise, make a MEM and see if the insn
1936 is recognized. If not, or if we should force MEM into a register,
1937 make a pseudo for this SUBREG. */
2740a678 1938 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1939 if (replacement->new)
1940 {
1941 *loc = replacement->new;
1942 return;
1943 }
1944
1945 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1946
f898f031 1947 INSN_CODE (insn) = -1;
6f086dfc
RS
1948 if (! flag_force_mem && recog_memoized (insn) >= 0)
1949 return;
1950
1951 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1952 return;
1953 }
1954 break;
1955
1956 case SET:
1957 /* First do special simplification of bit-field references. */
1958 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1959 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1960 optimize_bit_field (x, insn, 0);
1961 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1962 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1963 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc 1964
0e09cc26
RK
1965 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1966 into a register and then store it back out. */
1967 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1968 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1969 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1970 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1971 > GET_MODE_SIZE (GET_MODE (var))))
1972 {
1973 replacement = find_fixup_replacement (replacements, var);
1974 if (replacement->new == 0)
1975 replacement->new = gen_reg_rtx (GET_MODE (var));
1976
1977 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1978 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1979 }
1980
6f086dfc 1981 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
0f41302f 1982 insn into a pseudo and store the low part of the pseudo into VAR. */
6f086dfc
RS
1983 if (GET_CODE (SET_DEST (x)) == SUBREG
1984 && SUBREG_REG (SET_DEST (x)) == var
1985 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1986 > GET_MODE_SIZE (GET_MODE (var))))
1987 {
1988 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1989 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1990 tem)),
1991 insn);
1992 break;
1993 }
1994
1995 {
1996 rtx dest = SET_DEST (x);
1997 rtx src = SET_SRC (x);
29a82058 1998#ifdef HAVE_insv
6f086dfc 1999 rtx outerdest = dest;
29a82058 2000#endif
6f086dfc
RS
2001
2002 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2003 || GET_CODE (dest) == SIGN_EXTRACT
2004 || GET_CODE (dest) == ZERO_EXTRACT)
2005 dest = XEXP (dest, 0);
2006
2007 if (GET_CODE (src) == SUBREG)
2008 src = XEXP (src, 0);
2009
2010 /* If VAR does not appear at the top level of the SET
2011 just scan the lower levels of the tree. */
2012
2013 if (src != var && dest != var)
2014 break;
2015
2016 /* We will need to rerecognize this insn. */
2017 INSN_CODE (insn) = -1;
2018
2019#ifdef HAVE_insv
2020 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2021 {
2022 /* Since this case will return, ensure we fixup all the
2023 operands here. */
00d8a4c1
RK
2024 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2025 insn, replacements);
2026 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2027 insn, replacements);
2028 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2029 insn, replacements);
6f086dfc
RS
2030
2031 tem = XEXP (outerdest, 0);
2032
2033 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2034 that may appear inside a ZERO_EXTRACT.
2035 This was legitimate when the MEM was a REG. */
2036 if (GET_CODE (tem) == SUBREG
2037 && SUBREG_REG (tem) == var)
0e09cc26 2038 tem = fixup_memory_subreg (tem, insn, 0);
6f086dfc
RS
2039 else
2040 tem = fixup_stack_1 (tem, insn);
2041
2042 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2043 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2044 && ! mode_dependent_address_p (XEXP (tem, 0))
2045 && ! MEM_VOLATILE_P (tem))
2046 {
0d8e55d8 2047 enum machine_mode wanted_mode;
6f086dfc 2048 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2049 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
6f086dfc 2050
0d8e55d8
JL
2051 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2052 if (wanted_mode == VOIDmode)
2053 wanted_mode = word_mode;
2054
6dc42e49 2055 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2056 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2057 {
e5e809f4 2058 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2059 rtx old_pos = XEXP (outerdest, 2);
2060 rtx newmem;
2061
f76b9db2
ILT
2062 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2063 offset = (GET_MODE_SIZE (is_mode)
2064 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2065
2066 pos %= GET_MODE_BITSIZE (wanted_mode);
2067
38a448ca
RH
2068 newmem = gen_rtx_MEM (wanted_mode,
2069 plus_constant (XEXP (tem, 0), offset));
6f086dfc 2070 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2071 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2072
2073 /* Make the change and see if the insn remains valid. */
2074 INSN_CODE (insn) = -1;
2075 XEXP (outerdest, 0) = newmem;
5f4f0e22 2076 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
2077
2078 if (recog_memoized (insn) >= 0)
2079 return;
2080
2081 /* Otherwise, restore old position. XEXP (x, 0) will be
2082 restored later. */
2083 XEXP (outerdest, 2) = old_pos;
2084 }
2085 }
2086
2087 /* If we get here, the bit-field store doesn't allow memory
2088 or isn't located at a constant position. Load the value into
2089 a register, do the store, and put it back into memory. */
2090
2091 tem1 = gen_reg_rtx (GET_MODE (tem));
2092 emit_insn_before (gen_move_insn (tem1, tem), insn);
2093 emit_insn_after (gen_move_insn (tem, tem1), insn);
2094 XEXP (outerdest, 0) = tem1;
2095 return;
2096 }
2097#endif
2098
2099 /* STRICT_LOW_PART is a no-op on memory references
2100 and it can cause combinations to be unrecognizable,
2101 so eliminate it. */
2102
2103 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2104 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2105
2106 /* A valid insn to copy VAR into or out of a register
2107 must be left alone, to avoid an infinite loop here.
2108 If the reference to VAR is by a subreg, fix that up,
2109 since SUBREG is not valid for a memref.
e15762df
RK
2110 Also fix up the address of the stack slot.
2111
2112 Note that we must not try to recognize the insn until
2113 after we know that we have valid addresses and no
2114 (subreg (mem ...) ...) constructs, since these interfere
2115 with determining the validity of the insn. */
6f086dfc
RS
2116
2117 if ((SET_SRC (x) == var
2118 || (GET_CODE (SET_SRC (x)) == SUBREG
2119 && SUBREG_REG (SET_SRC (x)) == var))
2120 && (GET_CODE (SET_DEST (x)) == REG
2121 || (GET_CODE (SET_DEST (x)) == SUBREG
2122 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1d273bf5 2123 && GET_MODE (var) == promoted_mode
c46722a7 2124 && x == single_set (insn))
6f086dfc 2125 {
e15762df
RK
2126 rtx pat;
2127
2740a678 2128 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 2129 if (replacement->new)
6f086dfc 2130 SET_SRC (x) = replacement->new;
6f086dfc
RS
2131 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2132 SET_SRC (x) = replacement->new
2133 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2134 else
2135 SET_SRC (x) = replacement->new
2136 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
2137
2138 if (recog_memoized (insn) >= 0)
2139 return;
2140
2141 /* INSN is not valid, but we know that we want to
2142 copy SET_SRC (x) to SET_DEST (x) in some way. So
2143 we generate the move and see whether it requires more
2144 than one insn. If it does, we emit those insns and
2145 delete INSN. Otherwise, we an just replace the pattern
2146 of INSN; we have already verified above that INSN has
2147 no other function that to do X. */
2148
2149 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2150 if (GET_CODE (pat) == SEQUENCE)
2151 {
2152 emit_insn_after (pat, insn);
2153 PUT_CODE (insn, NOTE);
2154 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2155 NOTE_SOURCE_FILE (insn) = 0;
2156 }
2157 else
2158 PATTERN (insn) = pat;
2159
6f086dfc
RS
2160 return;
2161 }
2162
2163 if ((SET_DEST (x) == var
2164 || (GET_CODE (SET_DEST (x)) == SUBREG
2165 && SUBREG_REG (SET_DEST (x)) == var))
2166 && (GET_CODE (SET_SRC (x)) == REG
2167 || (GET_CODE (SET_SRC (x)) == SUBREG
2168 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1d273bf5 2169 && GET_MODE (var) == promoted_mode
c46722a7 2170 && x == single_set (insn))
6f086dfc 2171 {
e15762df
RK
2172 rtx pat;
2173
6f086dfc
RS
2174 if (GET_CODE (SET_DEST (x)) == SUBREG)
2175 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2176 else
2177 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
2178
2179 if (recog_memoized (insn) >= 0)
2180 return;
2181
2182 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2183 if (GET_CODE (pat) == SEQUENCE)
2184 {
2185 emit_insn_after (pat, insn);
2186 PUT_CODE (insn, NOTE);
2187 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2188 NOTE_SOURCE_FILE (insn) = 0;
2189 }
2190 else
2191 PATTERN (insn) = pat;
2192
6f086dfc
RS
2193 return;
2194 }
2195
2196 /* Otherwise, storing into VAR must be handled specially
2197 by storing into a temporary and copying that into VAR
00d8a4c1
RK
2198 with a new insn after this one. Note that this case
2199 will be used when storing into a promoted scalar since
2200 the insn will now have different modes on the input
2201 and output and hence will be invalid (except for the case
2202 of setting it to a constant, which does not need any
2203 change if it is valid). We generate extra code in that case,
2204 but combine.c will eliminate it. */
6f086dfc
RS
2205
2206 if (dest == var)
2207 {
2208 rtx temp;
00d8a4c1
RK
2209 rtx fixeddest = SET_DEST (x);
2210
6f086dfc 2211 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
2212 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2213 fixeddest = XEXP (fixeddest, 0);
6f086dfc 2214 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1 2215 if (GET_CODE (fixeddest) == SUBREG)
926d1ca5
RK
2216 {
2217 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2218 promoted_mode = GET_MODE (fixeddest);
2219 }
6f086dfc 2220 else
00d8a4c1
RK
2221 fixeddest = fixup_stack_1 (fixeddest, insn);
2222
926d1ca5 2223 temp = gen_reg_rtx (promoted_mode);
00d8a4c1
RK
2224
2225 emit_insn_after (gen_move_insn (fixeddest,
2226 gen_lowpart (GET_MODE (fixeddest),
2227 temp)),
2228 insn);
6f086dfc 2229
6f086dfc
RS
2230 SET_DEST (x) = temp;
2231 }
2232 }
e9a25f70
JL
2233
2234 default:
2235 break;
6f086dfc
RS
2236 }
2237
2238 /* Nothing special about this RTX; fix its operands. */
2239
2240 fmt = GET_RTX_FORMAT (code);
2241 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2242 {
2243 if (fmt[i] == 'e')
00d8a4c1 2244 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
2245 if (fmt[i] == 'E')
2246 {
2247 register int j;
2248 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
2249 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2250 insn, replacements);
6f086dfc
RS
2251 }
2252 }
2253}
2254\f
2255/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2256 return an rtx (MEM:m1 newaddr) which is equivalent.
2257 If any insns must be emitted to compute NEWADDR, put them before INSN.
2258
2259 UNCRITICAL nonzero means accept paradoxical subregs.
0f41302f 2260 This is used for subregs found inside REG_NOTES. */
6f086dfc
RS
2261
2262static rtx
2263fixup_memory_subreg (x, insn, uncritical)
2264 rtx x;
2265 rtx insn;
2266 int uncritical;
2267{
2268 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2269 rtx addr = XEXP (SUBREG_REG (x), 0);
2270 enum machine_mode mode = GET_MODE (x);
29a82058 2271 rtx result;
6f086dfc
RS
2272
2273 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2274 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2275 && ! uncritical)
2276 abort ();
2277
f76b9db2
ILT
2278 if (BYTES_BIG_ENDIAN)
2279 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2280 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
6f086dfc
RS
2281 addr = plus_constant (addr, offset);
2282 if (!flag_force_addr && memory_address_p (mode, addr))
2283 /* Shortcut if no insns need be emitted. */
2284 return change_address (SUBREG_REG (x), mode, addr);
2285 start_sequence ();
2286 result = change_address (SUBREG_REG (x), mode, addr);
2287 emit_insn_before (gen_sequence (), insn);
2288 end_sequence ();
2289 return result;
2290}
2291
2292/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2293 Replace subexpressions of X in place.
2294 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2295 Otherwise return X, with its contents possibly altered.
2296
ab6155b7
RK
2297 If any insns must be emitted to compute NEWADDR, put them before INSN.
2298
2299 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
2300
2301static rtx
ab6155b7 2302walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
2303 register rtx x;
2304 rtx insn;
ab6155b7 2305 int uncritical;
6f086dfc
RS
2306{
2307 register enum rtx_code code;
6f7d635c 2308 register const char *fmt;
6f086dfc
RS
2309 register int i;
2310
2311 if (x == 0)
2312 return 0;
2313
2314 code = GET_CODE (x);
2315
2316 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 2317 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
2318
2319 /* Nothing special about this RTX; fix its operands. */
2320
2321 fmt = GET_RTX_FORMAT (code);
2322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2323 {
2324 if (fmt[i] == 'e')
ab6155b7 2325 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
2326 if (fmt[i] == 'E')
2327 {
2328 register int j;
2329 for (j = 0; j < XVECLEN (x, i); j++)
2330 XVECEXP (x, i, j)
ab6155b7 2331 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
2332 }
2333 }
2334 return x;
2335}
2336\f
6f086dfc
RS
2337/* For each memory ref within X, if it refers to a stack slot
2338 with an out of range displacement, put the address in a temp register
2339 (emitting new insns before INSN to load these registers)
2340 and alter the memory ref to use that register.
2341 Replace each such MEM rtx with a copy, to avoid clobberage. */
2342
2343static rtx
2344fixup_stack_1 (x, insn)
2345 rtx x;
2346 rtx insn;
2347{
2348 register int i;
2349 register RTX_CODE code = GET_CODE (x);
6f7d635c 2350 register const char *fmt;
6f086dfc
RS
2351
2352 if (code == MEM)
2353 {
2354 register rtx ad = XEXP (x, 0);
2355 /* If we have address of a stack slot but it's not valid
2356 (displacement is too large), compute the sum in a register. */
2357 if (GET_CODE (ad) == PLUS
2358 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
2359 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2360 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
e9a25f70
JL
2361 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2362#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2363 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2364#endif
2365 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
956d6950 2366 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
40d05551 2367 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
2368 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2369 {
2370 rtx temp, seq;
2371 if (memory_address_p (GET_MODE (x), ad))
2372 return x;
2373
2374 start_sequence ();
2375 temp = copy_to_reg (ad);
2376 seq = gen_sequence ();
2377 end_sequence ();
2378 emit_insn_before (seq, insn);
2379 return change_address (x, VOIDmode, temp);
2380 }
2381 return x;
2382 }
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
2388 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
2393 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2394 }
2395 }
2396 return x;
2397}
2398\f
2399/* Optimization: a bit-field instruction whose field
2400 happens to be a byte or halfword in memory
2401 can be changed to a move instruction.
2402
2403 We call here when INSN is an insn to examine or store into a bit-field.
2404 BODY is the SET-rtx to be altered.
2405
2406 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2407 (Currently this is called only from function.c, and EQUIV_MEM
2408 is always 0.) */
2409
2410static void
2411optimize_bit_field (body, insn, equiv_mem)
2412 rtx body;
2413 rtx insn;
2414 rtx *equiv_mem;
2415{
2416 register rtx bitfield;
2417 int destflag;
2418 rtx seq = 0;
2419 enum machine_mode mode;
2420
2421 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2422 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2423 bitfield = SET_DEST (body), destflag = 1;
2424 else
2425 bitfield = SET_SRC (body), destflag = 0;
2426
2427 /* First check that the field being stored has constant size and position
2428 and is in fact a byte or halfword suitably aligned. */
2429
2430 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2431 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2432 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2433 != BLKmode)
2434 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2435 {
2436 register rtx memref = 0;
2437
2438 /* Now check that the containing word is memory, not a register,
2439 and that it is safe to change the machine mode. */
2440
2441 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2442 memref = XEXP (bitfield, 0);
2443 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2444 && equiv_mem != 0)
2445 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2446 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2447 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2448 memref = SUBREG_REG (XEXP (bitfield, 0));
2449 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2450 && equiv_mem != 0
2451 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2452 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2453
2454 if (memref
2455 && ! mode_dependent_address_p (XEXP (memref, 0))
2456 && ! MEM_VOLATILE_P (memref))
2457 {
2458 /* Now adjust the address, first for any subreg'ing
2459 that we are now getting rid of,
2460 and then for which byte of the word is wanted. */
2461
e5e809f4 2462 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
b88a3142
RK
2463 rtx insns;
2464
6f086dfc 2465 /* Adjust OFFSET to count bits from low-address byte. */
f76b9db2
ILT
2466 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2467 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2468 - offset - INTVAL (XEXP (bitfield, 1)));
2469
6f086dfc
RS
2470 /* Adjust OFFSET to count bytes from low-address byte. */
2471 offset /= BITS_PER_UNIT;
2472 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2473 {
2474 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
f76b9db2
ILT
2475 if (BYTES_BIG_ENDIAN)
2476 offset -= (MIN (UNITS_PER_WORD,
2477 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2478 - MIN (UNITS_PER_WORD,
2479 GET_MODE_SIZE (GET_MODE (memref))));
6f086dfc
RS
2480 }
2481
b88a3142
RK
2482 start_sequence ();
2483 memref = change_address (memref, mode,
6f086dfc 2484 plus_constant (XEXP (memref, 0), offset));
b88a3142
RK
2485 insns = get_insns ();
2486 end_sequence ();
2487 emit_insns_before (insns, insn);
6f086dfc
RS
2488
2489 /* Store this memory reference where
2490 we found the bit field reference. */
2491
2492 if (destflag)
2493 {
2494 validate_change (insn, &SET_DEST (body), memref, 1);
2495 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2496 {
2497 rtx src = SET_SRC (body);
2498 while (GET_CODE (src) == SUBREG
2499 && SUBREG_WORD (src) == 0)
2500 src = SUBREG_REG (src);
2501 if (GET_MODE (src) != GET_MODE (memref))
2502 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2503 validate_change (insn, &SET_SRC (body), src, 1);
2504 }
2505 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2506 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2507 /* This shouldn't happen because anything that didn't have
2508 one of these modes should have got converted explicitly
2509 and then referenced through a subreg.
2510 This is so because the original bit-field was
2511 handled by agg_mode and so its tree structure had
2512 the same mode that memref now has. */
2513 abort ();
2514 }
2515 else
2516 {
2517 rtx dest = SET_DEST (body);
2518
2519 while (GET_CODE (dest) == SUBREG
4013a709
RK
2520 && SUBREG_WORD (dest) == 0
2521 && (GET_MODE_CLASS (GET_MODE (dest))
ab87f8c8
JL
2522 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2524 <= UNITS_PER_WORD))
6f086dfc
RS
2525 dest = SUBREG_REG (dest);
2526
2527 validate_change (insn, &SET_DEST (body), dest, 1);
2528
2529 if (GET_MODE (dest) == GET_MODE (memref))
2530 validate_change (insn, &SET_SRC (body), memref, 1);
2531 else
2532 {
2533 /* Convert the mem ref to the destination mode. */
2534 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2535
2536 start_sequence ();
2537 convert_move (newreg, memref,
2538 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2539 seq = get_insns ();
2540 end_sequence ();
2541
2542 validate_change (insn, &SET_SRC (body), newreg, 1);
2543 }
2544 }
2545
2546 /* See if we can convert this extraction or insertion into
2547 a simple move insn. We might not be able to do so if this
2548 was, for example, part of a PARALLEL.
2549
2550 If we succeed, write out any needed conversions. If we fail,
2551 it is hard to guess why we failed, so don't do anything
2552 special; just let the optimization be suppressed. */
2553
2554 if (apply_change_group () && seq)
2555 emit_insns_before (seq, insn);
2556 }
2557 }
2558}
2559\f
2560/* These routines are responsible for converting virtual register references
2561 to the actual hard register references once RTL generation is complete.
2562
2563 The following four variables are used for communication between the
2564 routines. They contain the offsets of the virtual registers from their
2565 respective hard registers. */
2566
2567static int in_arg_offset;
2568static int var_offset;
2569static int dynamic_offset;
2570static int out_arg_offset;
71038426 2571static int cfa_offset;
6f086dfc
RS
2572
2573/* In most machines, the stack pointer register is equivalent to the bottom
2574 of the stack. */
2575
2576#ifndef STACK_POINTER_OFFSET
2577#define STACK_POINTER_OFFSET 0
2578#endif
2579
2580/* If not defined, pick an appropriate default for the offset of dynamically
2581 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2582 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2583
2584#ifndef STACK_DYNAMIC_OFFSET
2585
2586#ifdef ACCUMULATE_OUTGOING_ARGS
2587/* The bottom of the stack points to the actual arguments. If
2588 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2589 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2590 stack space for register parameters is not pushed by the caller, but
2591 rather part of the fixed stack areas and hence not included in
2592 `current_function_outgoing_args_size'. Nevertheless, we must allow
2593 for it when allocating stack dynamic objects. */
2594
2595#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2596#define STACK_DYNAMIC_OFFSET(FNDECL) \
2597(current_function_outgoing_args_size \
2598 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2599
2600#else
2601#define STACK_DYNAMIC_OFFSET(FNDECL) \
2602(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2603#endif
2604
2605#else
2606#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2607#endif
2608#endif
2609
71038426
RH
2610/* On a few machines, the CFA coincides with the arg pointer. */
2611
2612#ifndef ARG_POINTER_CFA_OFFSET
2613#define ARG_POINTER_CFA_OFFSET 0
2614#endif
2615
2616
e9a25f70
JL
2617/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2618 its address taken. DECL is the decl for the object stored in the
2619 register, for later use if we do need to force REG into the stack.
2620 REG is overwritten by the MEM like in put_reg_into_stack. */
2621
2622rtx
2623gen_mem_addressof (reg, decl)
2624 rtx reg;
2625 tree decl;
2626{
2627 tree type = TREE_TYPE (decl);
8f985ec4
ZW
2628 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2629 REGNO (reg), decl);
95ca22f4
MM
2630 /* If the original REG was a user-variable, then so is the REG whose
2631 address is being taken. */
2632 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
e9a25f70
JL
2633
2634 XEXP (reg, 0) = r;
2635 PUT_CODE (reg, MEM);
2636 PUT_MODE (reg, DECL_MODE (decl));
2637 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
c6df88cb 2638 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
41472af8 2639 MEM_ALIAS_SET (reg) = get_alias_set (decl);
e9a25f70 2640
e5e809f4 2641 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
fe9b4957 2642 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
e5e809f4 2643
e9a25f70
JL
2644 return reg;
2645}
2646
2647/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2648
2649void
2650flush_addressof (decl)
2651 tree decl;
2652{
2653 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2654 && DECL_RTL (decl) != 0
2655 && GET_CODE (DECL_RTL (decl)) == MEM
2656 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2657 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
fe9b4957 2658 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
e9a25f70
JL
2659}
2660
2661/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2662
2663static void
fe9b4957 2664put_addressof_into_stack (r, ht)
e9a25f70 2665 rtx r;
fe9b4957 2666 struct hash_table *ht;
e9a25f70
JL
2667{
2668 tree decl = ADDRESSOF_DECL (r);
2669 rtx reg = XEXP (r, 0);
2670
2671 if (GET_CODE (reg) != REG)
2672 abort ();
2673
2674 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2675 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
e5e809f4 2676 ADDRESSOF_REGNO (r),
fe9b4957 2677 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
e9a25f70
JL
2678}
2679
b5bd3b3c
AS
2680/* List of replacements made below in purge_addressof_1 when creating
2681 bitfield insertions. */
8b04083b
VM
2682static rtx purge_bitfield_addressof_replacements;
2683
2684/* List of replacements made below in purge_addressof_1 for patterns
2685 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2686 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2687 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2688 enough in complex cases, e.g. when some field values can be
2689 extracted by usage MEM with narrower mode. */
b5bd3b3c
AS
2690static rtx purge_addressof_replacements;
2691
e9a25f70
JL
2692/* Helper function for purge_addressof. See if the rtx expression at *LOC
2693 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2694 the stack. */
2695
2696static void
fe9b4957 2697purge_addressof_1 (loc, insn, force, store, ht)
e9a25f70
JL
2698 rtx *loc;
2699 rtx insn;
f7b6d104 2700 int force, store;
fe9b4957 2701 struct hash_table *ht;
e9a25f70
JL
2702{
2703 rtx x;
2704 RTX_CODE code;
2705 int i, j;
6f7d635c 2706 const char *fmt;
e9a25f70
JL
2707
2708 /* Re-start here to avoid recursion in common cases. */
2709 restart:
2710
2711 x = *loc;
2712 if (x == 0)
2713 return;
2714
2715 code = GET_CODE (x);
2716
2717 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2718 {
2719 rtx insns;
956d6950
JL
2720 /* We must create a copy of the rtx because it was created by
2721 overwriting a REG rtx which is always shared. */
2722 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
e9a25f70 2723
ab87f8c8
JL
2724 if (validate_change (insn, loc, sub, 0)
2725 || validate_replace_rtx (x, sub, insn))
e9a25f70 2726 return;
ab87f8c8 2727
e9a25f70 2728 start_sequence ();
ab87f8c8
JL
2729 sub = force_operand (sub, NULL_RTX);
2730 if (! validate_change (insn, loc, sub, 0)
2731 && ! validate_replace_rtx (x, sub, insn))
e9a25f70
JL
2732 abort ();
2733
f7b6d104 2734 insns = gen_sequence ();
e9a25f70 2735 end_sequence ();
18e765cb 2736 emit_insn_before (insns, insn);
e9a25f70
JL
2737 return;
2738 }
2739 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2740 {
2741 rtx sub = XEXP (XEXP (x, 0), 0);
ab87f8c8 2742 rtx sub2;
e5e809f4 2743
6d8ccdbb 2744 if (GET_CODE (sub) == MEM)
ab87f8c8
JL
2745 {
2746 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2747 MEM_COPY_ATTRIBUTES (sub2, sub);
2748 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2749 sub = sub2;
2750 }
e5e809f4 2751
f5963e61
JL
2752 if (GET_CODE (sub) == REG
2753 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
e5e809f4 2754 {
fe9b4957 2755 put_addressof_into_stack (XEXP (x, 0), ht);
e5e809f4
JL
2756 return;
2757 }
2758 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
e9a25f70 2759 {
f7b6d104
RH
2760 int size_x, size_sub;
2761
b5bd3b3c
AS
2762 if (!insn)
2763 {
2764 /* When processing REG_NOTES look at the list of
2765 replacements done on the insn to find the register that X
2766 was replaced by. */
2767 rtx tem;
2768
8b04083b
VM
2769 for (tem = purge_bitfield_addressof_replacements;
2770 tem != NULL_RTX;
b5bd3b3c 2771 tem = XEXP (XEXP (tem, 1), 1))
8b04083b
VM
2772 if (rtx_equal_p (x, XEXP (tem, 0)))
2773 {
2774 *loc = XEXP (XEXP (tem, 1), 0);
2775 return;
2776 }
fbdfe39c 2777
8b04083b
VM
2778 /* See comment for purge_addressof_replacements. */
2779 for (tem = purge_addressof_replacements;
2780 tem != NULL_RTX;
2781 tem = XEXP (XEXP (tem, 1), 1))
2782 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2783 {
2784 rtx z = XEXP (XEXP (tem, 1), 0);
fbdfe39c 2785
8b04083b
VM
2786 if (GET_MODE (x) == GET_MODE (z)
2787 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2788 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2789 abort ();
2790
2791 /* It can happen that the note may speak of things
2792 in a wider (or just different) mode than the
2793 code did. This is especially true of
2794 REG_RETVAL. */
2795
2796 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2797 z = SUBREG_REG (z);
2798
2799 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2800 && (GET_MODE_SIZE (GET_MODE (x))
2801 > GET_MODE_SIZE (GET_MODE (z))))
2802 {
2803 /* This can occur as a result in invalid
2804 pointer casts, e.g. float f; ...
2805 *(long long int *)&f.
2806 ??? We could emit a warning here, but
2807 without a line number that wouldn't be
2808 very helpful. */
2809 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2810 }
2811 else
2812 z = gen_lowpart (GET_MODE (x), z);
2813
2814 *loc = z;
2815 return;
2816 }
b5bd3b3c
AS
2817
2818 /* There should always be such a replacement. */
2819 abort ();
2820 }
2821
f7b6d104
RH
2822 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2823 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2824
2825 /* Don't even consider working with paradoxical subregs,
2826 or the moral equivalent seen here. */
470032d7 2827 if (size_x <= size_sub
d006aa54 2828 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
e9a25f70 2829 {
f7b6d104
RH
2830 /* Do a bitfield insertion to mirror what would happen
2831 in memory. */
2832
f7b6d104
RH
2833 rtx val, seq;
2834
f7b6d104
RH
2835 if (store)
2836 {
fe9b4957 2837 rtx p = PREV_INSN (insn);
de0dd934 2838
f7b6d104
RH
2839 start_sequence ();
2840 val = gen_reg_rtx (GET_MODE (x));
2841 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
2842 {
2843 /* Discard the current sequence and put the
2844 ADDRESSOF on stack. */
2845 end_sequence ();
2846 goto give_up;
2847 }
f7b6d104
RH
2848 seq = gen_sequence ();
2849 end_sequence ();
2850 emit_insn_before (seq, insn);
fe9b4957
MM
2851 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2852 insn, ht);
f7b6d104
RH
2853
2854 start_sequence ();
47401c4d 2855 store_bit_field (sub, size_x, 0, GET_MODE (x),
f7b6d104
RH
2856 val, GET_MODE_SIZE (GET_MODE (sub)),
2857 GET_MODE_SIZE (GET_MODE (sub)));
2858
de0dd934
R
2859 /* Make sure to unshare any shared rtl that store_bit_field
2860 might have created. */
2861 for (p = get_insns(); p; p = NEXT_INSN (p))
2862 {
2863 reset_used_flags (PATTERN (p));
2864 reset_used_flags (REG_NOTES (p));
2865 reset_used_flags (LOG_LINKS (p));
2866 }
2867 unshare_all_rtl (get_insns ());
2868
f7b6d104
RH
2869 seq = gen_sequence ();
2870 end_sequence ();
fe9b4957
MM
2871 p = emit_insn_after (seq, insn);
2872 if (NEXT_INSN (insn))
2873 compute_insns_for_mem (NEXT_INSN (insn),
2874 p ? NEXT_INSN (p) : NULL_RTX,
2875 ht);
f7b6d104
RH
2876 }
2877 else
2878 {
fe9b4957
MM
2879 rtx p = PREV_INSN (insn);
2880
f7b6d104 2881 start_sequence ();
47401c4d 2882 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
f7b6d104
RH
2883 GET_MODE (x), GET_MODE (x),
2884 GET_MODE_SIZE (GET_MODE (sub)),
2885 GET_MODE_SIZE (GET_MODE (sub)));
2886
f7b6d104 2887 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
2888 {
2889 /* Discard the current sequence and put the
2890 ADDRESSOF on stack. */
2891 end_sequence ();
2892 goto give_up;
2893 }
f7b6d104
RH
2894
2895 seq = gen_sequence ();
2896 end_sequence ();
2897 emit_insn_before (seq, insn);
fe9b4957
MM
2898 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2899 insn, ht);
f7b6d104
RH
2900 }
2901
b5bd3b3c
AS
2902 /* Remember the replacement so that the same one can be done
2903 on the REG_NOTES. */
8b04083b 2904 purge_bitfield_addressof_replacements
b5bd3b3c 2905 = gen_rtx_EXPR_LIST (VOIDmode, x,
8b04083b
VM
2906 gen_rtx_EXPR_LIST
2907 (VOIDmode, val,
2908 purge_bitfield_addressof_replacements));
b5bd3b3c 2909
f7b6d104
RH
2910 /* We replaced with a reg -- all done. */
2911 return;
e9a25f70
JL
2912 }
2913 }
2914 else if (validate_change (insn, loc, sub, 0))
fbdfe39c
RH
2915 {
2916 /* Remember the replacement so that the same one can be done
2917 on the REG_NOTES. */
8b04083b
VM
2918 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2919 {
2920 rtx tem;
2921
2922 for (tem = purge_addressof_replacements;
2923 tem != NULL_RTX;
2924 tem = XEXP (XEXP (tem, 1), 1))
2925 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2926 {
2927 XEXP (XEXP (tem, 1), 0) = sub;
2928 return;
2929 }
2930 purge_addressof_replacements
2931 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2932 gen_rtx_EXPR_LIST (VOIDmode, sub,
2933 purge_addressof_replacements));
2934 return;
2935 }
fbdfe39c
RH
2936 goto restart;
2937 }
b5bd3b3c 2938 give_up:;
e9a25f70
JL
2939 /* else give up and put it into the stack */
2940 }
2941 else if (code == ADDRESSOF)
2942 {
fe9b4957 2943 put_addressof_into_stack (x, ht);
e9a25f70
JL
2944 return;
2945 }
f7b6d104
RH
2946 else if (code == SET)
2947 {
fe9b4957
MM
2948 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2949 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
f7b6d104
RH
2950 return;
2951 }
e9a25f70
JL
2952
2953 /* Scan all subexpressions. */
2954 fmt = GET_RTX_FORMAT (code);
2955 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2956 {
2957 if (*fmt == 'e')
fe9b4957 2958 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
e9a25f70
JL
2959 else if (*fmt == 'E')
2960 for (j = 0; j < XVECLEN (x, i); j++)
fe9b4957
MM
2961 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
2962 }
2963}
2964
2965/* Return a new hash table entry in HT. */
2966
2967static struct hash_entry *
2968insns_for_mem_newfunc (he, ht, k)
2969 struct hash_entry *he;
2970 struct hash_table *ht;
2971 hash_table_key k ATTRIBUTE_UNUSED;
2972{
2973 struct insns_for_mem_entry *ifmhe;
2974 if (he)
2975 return he;
2976
2977 ifmhe = ((struct insns_for_mem_entry *)
2978 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
2979 ifmhe->insns = NULL_RTX;
2980
2981 return &ifmhe->he;
2982}
2983
2984/* Return a hash value for K, a REG. */
2985
2986static unsigned long
2987insns_for_mem_hash (k)
2988 hash_table_key k;
2989{
2990 /* K is really a RTX. Just use the address as the hash value. */
2991 return (unsigned long) k;
2992}
2993
2994/* Return non-zero if K1 and K2 (two REGs) are the same. */
2995
2996static boolean
2997insns_for_mem_comp (k1, k2)
2998 hash_table_key k1;
2999 hash_table_key k2;
3000{
3001 return k1 == k2;
3002}
3003
3004struct insns_for_mem_walk_info {
3005 /* The hash table that we are using to record which INSNs use which
3006 MEMs. */
3007 struct hash_table *ht;
3008
3009 /* The INSN we are currently proessing. */
3010 rtx insn;
3011
3012 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3013 to find the insns that use the REGs in the ADDRESSOFs. */
3014 int pass;
3015};
3016
3017/* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3018 that might be used in an ADDRESSOF expression, record this INSN in
3019 the hash table given by DATA (which is really a pointer to an
3020 insns_for_mem_walk_info structure). */
3021
3022static int
3023insns_for_mem_walk (r, data)
3024 rtx *r;
3025 void *data;
3026{
3027 struct insns_for_mem_walk_info *ifmwi
3028 = (struct insns_for_mem_walk_info *) data;
3029
3030 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3031 && GET_CODE (XEXP (*r, 0)) == REG)
3032 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3033 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3034 {
3035 /* Lookup this MEM in the hashtable, creating it if necessary. */
3036 struct insns_for_mem_entry *ifme
3037 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3038 *r,
3039 /*create=*/0,
3040 /*copy=*/0);
3041
3042 /* If we have not already recorded this INSN, do so now. Since
3043 we process the INSNs in order, we know that if we have
3044 recorded it it must be at the front of the list. */
3045 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3046 {
3047 /* We do the allocation on the same obstack as is used for
3048 the hash table since this memory will not be used once
3049 the hash table is deallocated. */
3050 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3051 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3052 ifme->insns);
3053 pop_obstacks ();
3054 }
e9a25f70 3055 }
fe9b4957
MM
3056
3057 return 0;
3058}
3059
3060/* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3061 which REGs in HT. */
3062
3063static void
3064compute_insns_for_mem (insns, last_insn, ht)
3065 rtx insns;
3066 rtx last_insn;
3067 struct hash_table *ht;
3068{
3069 rtx insn;
3070 struct insns_for_mem_walk_info ifmwi;
3071 ifmwi.ht = ht;
3072
3073 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3074 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3075 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3076 {
3077 ifmwi.insn = insn;
3078 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3079 }
e9a25f70
JL
3080}
3081
3082/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3083 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3084 stack. */
3085
3086void
3087purge_addressof (insns)
3088 rtx insns;
3089{
3090 rtx insn;
fe9b4957
MM
3091 struct hash_table ht;
3092
3093 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3094 requires a fixup pass over the instruction stream to correct
3095 INSNs that depended on the REG being a REG, and not a MEM. But,
3096 these fixup passes are slow. Furthermore, more MEMs are not
3097 mentioned in very many instructions. So, we speed up the process
3098 by pre-calculating which REGs occur in which INSNs; that allows
3099 us to perform the fixup passes much more quickly. */
3100 hash_table_init (&ht,
3101 insns_for_mem_newfunc,
3102 insns_for_mem_hash,
3103 insns_for_mem_comp);
3104 compute_insns_for_mem (insns, NULL_RTX, &ht);
3105
e9a25f70
JL
3106 for (insn = insns; insn; insn = NEXT_INSN (insn))
3107 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3108 || GET_CODE (insn) == CALL_INSN)
3109 {
3110 purge_addressof_1 (&PATTERN (insn), insn,
fe9b4957
MM
3111 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3112 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
e9a25f70 3113 }
fe9b4957
MM
3114
3115 /* Clean up. */
3116 hash_table_free (&ht);
8b04083b 3117 purge_bitfield_addressof_replacements = 0;
da9b1f9c 3118 purge_addressof_replacements = 0;
e9a25f70
JL
3119}
3120\f
6f086dfc
RS
3121/* Pass through the INSNS of function FNDECL and convert virtual register
3122 references to hard register references. */
3123
3124void
3125instantiate_virtual_regs (fndecl, insns)
3126 tree fndecl;
3127 rtx insns;
3128{
3129 rtx insn;
e9a25f70 3130 int i;
6f086dfc
RS
3131
3132 /* Compute the offsets to use for this function. */
3133 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3134 var_offset = STARTING_FRAME_OFFSET;
3135 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3136 out_arg_offset = STACK_POINTER_OFFSET;
71038426 3137 cfa_offset = ARG_POINTER_CFA_OFFSET;
6f086dfc
RS
3138
3139 /* Scan all variables and parameters of this function. For each that is
3140 in memory, instantiate all virtual registers if the result is a valid
3141 address. If not, we do it later. That will handle most uses of virtual
3142 regs on many machines. */
3143 instantiate_decls (fndecl, 1);
3144
3145 /* Initialize recognition, indicating that volatile is OK. */
3146 init_recog ();
3147
3148 /* Scan through all the insns, instantiating every virtual register still
3149 present. */
3150 for (insn = insns; insn; insn = NEXT_INSN (insn))
3151 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3152 || GET_CODE (insn) == CALL_INSN)
3153 {
3154 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 3155 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
3156 }
3157
e9a25f70
JL
3158 /* Instantiate the stack slots for the parm registers, for later use in
3159 addressof elimination. */
3160 for (i = 0; i < max_parm_reg; ++i)
3161 if (parm_reg_stack_loc[i])
3162 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3163
6f086dfc
RS
3164 /* Now instantiate the remaining register equivalences for debugging info.
3165 These will not be valid addresses. */
3166 instantiate_decls (fndecl, 0);
3167
3168 /* Indicate that, from now on, assign_stack_local should use
3169 frame_pointer_rtx. */
3170 virtuals_instantiated = 1;
3171}
3172
3173/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3174 all virtual registers in their DECL_RTL's.
3175
3176 If VALID_ONLY, do this only if the resulting address is still valid.
3177 Otherwise, always do it. */
3178
3179static void
3180instantiate_decls (fndecl, valid_only)
3181 tree fndecl;
3182 int valid_only;
3183{
3184 tree decl;
3185
e1686233 3186 if (DECL_SAVED_INSNS (fndecl))
6f086dfc
RS
3187 /* When compiling an inline function, the obstack used for
3188 rtl allocation is the maybepermanent_obstack. Calling
3189 `resume_temporary_allocation' switches us back to that
3190 obstack while we process this function's parameters. */
3191 resume_temporary_allocation ();
3192
3193 /* Process all parameters of the function. */
3194 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3195 {
e5e809f4
JL
3196 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3197
ce717ce4
JW
3198 instantiate_decl (DECL_RTL (decl), size, valid_only);
3199
3200 /* If the parameter was promoted, then the incoming RTL mode may be
3201 larger than the declared type size. We must use the larger of
3202 the two sizes. */
3203 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3204 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
3205 }
3206
0f41302f 3207 /* Now process all variables defined in the function or its subblocks. */
6f086dfc
RS
3208 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3209
79c0672e 3210 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
6f086dfc
RS
3211 {
3212 /* Save all rtl allocated for this function by raising the
3213 high-water mark on the maybepermanent_obstack. */
3214 preserve_data ();
3215 /* All further rtl allocation is now done in the current_obstack. */
3216 rtl_in_current_obstack ();
3217 }
3218}
3219
3220/* Subroutine of instantiate_decls: Process all decls in the given
3221 BLOCK node and all its subblocks. */
3222
3223static void
3224instantiate_decls_1 (let, valid_only)
3225 tree let;
3226 int valid_only;
3227{
3228 tree t;
3229
3230 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
3231 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3232 valid_only);
6f086dfc
RS
3233
3234 /* Process all subblocks. */
3235 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3236 instantiate_decls_1 (t, valid_only);
3237}
5a73491b 3238
8008b228 3239/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
3240 decl and the size of the object, do any instantiation required.
3241
3242 If VALID_ONLY is non-zero, it means that the RTL should only be
3243 changed if the new address is valid. */
3244
3245static void
3246instantiate_decl (x, size, valid_only)
3247 rtx x;
3248 int size;
3249 int valid_only;
3250{
3251 enum machine_mode mode;
3252 rtx addr;
3253
3254 /* If this is not a MEM, no need to do anything. Similarly if the
3255 address is a constant or a register that is not a virtual register. */
3256
3257 if (x == 0 || GET_CODE (x) != MEM)
3258 return;
3259
3260 addr = XEXP (x, 0);
3261 if (CONSTANT_P (addr)
956d6950 3262 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
5a73491b
RK
3263 || (GET_CODE (addr) == REG
3264 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3265 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3266 return;
3267
3268 /* If we should only do this if the address is valid, copy the address.
3269 We need to do this so we can undo any changes that might make the
3270 address invalid. This copy is unfortunate, but probably can't be
3271 avoided. */
3272
3273 if (valid_only)
3274 addr = copy_rtx (addr);
3275
3276 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3277
87ce34d6
JW
3278 if (valid_only)
3279 {
3280 /* Now verify that the resulting address is valid for every integer or
3281 floating-point mode up to and including SIZE bytes long. We do this
3282 since the object might be accessed in any mode and frame addresses
3283 are shared. */
3284
3285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3286 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3287 mode = GET_MODE_WIDER_MODE (mode))
3288 if (! memory_address_p (mode, addr))
3289 return;
5a73491b 3290
87ce34d6
JW
3291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3292 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3293 mode = GET_MODE_WIDER_MODE (mode))
3294 if (! memory_address_p (mode, addr))
3295 return;
3296 }
5a73491b 3297
87ce34d6
JW
3298 /* Put back the address now that we have updated it and we either know
3299 it is valid or we don't care whether it is valid. */
5a73491b
RK
3300
3301 XEXP (x, 0) = addr;
3302}
6f086dfc
RS
3303\f
3304/* Given a pointer to a piece of rtx and an optional pointer to the
3305 containing object, instantiate any virtual registers present in it.
3306
3307 If EXTRA_INSNS, we always do the replacement and generate
3308 any extra insns before OBJECT. If it zero, we do nothing if replacement
3309 is not valid.
3310
3311 Return 1 if we either had nothing to do or if we were able to do the
3312 needed replacement. Return 0 otherwise; we only return zero if
3313 EXTRA_INSNS is zero.
3314
3315 We first try some simple transformations to avoid the creation of extra
3316 pseudos. */
3317
3318static int
3319instantiate_virtual_regs_1 (loc, object, extra_insns)
3320 rtx *loc;
3321 rtx object;
3322 int extra_insns;
3323{
3324 rtx x;
3325 RTX_CODE code;
3326 rtx new = 0;
07444f1d 3327 HOST_WIDE_INT offset = 0;
6f086dfc
RS
3328 rtx temp;
3329 rtx seq;
3330 int i, j;
6f7d635c 3331 const char *fmt;
6f086dfc
RS
3332
3333 /* Re-start here to avoid recursion in common cases. */
3334 restart:
3335
3336 x = *loc;
3337 if (x == 0)
3338 return 1;
3339
3340 code = GET_CODE (x);
3341
3342 /* Check for some special cases. */
3343 switch (code)
3344 {
3345 case CONST_INT:
3346 case CONST_DOUBLE:
3347 case CONST:
3348 case SYMBOL_REF:
3349 case CODE_LABEL:
3350 case PC:
3351 case CC0:
3352 case ASM_INPUT:
3353 case ADDR_VEC:
3354 case ADDR_DIFF_VEC:
3355 case RETURN:
3356 return 1;
3357
3358 case SET:
3359 /* We are allowed to set the virtual registers. This means that
38e01259 3360 the actual register should receive the source minus the
6f086dfc
RS
3361 appropriate offset. This is used, for example, in the handling
3362 of non-local gotos. */
3363 if (SET_DEST (x) == virtual_incoming_args_rtx)
3364 new = arg_pointer_rtx, offset = - in_arg_offset;
3365 else if (SET_DEST (x) == virtual_stack_vars_rtx)
dfd3dae6 3366 new = frame_pointer_rtx, offset = - var_offset;
6f086dfc
RS
3367 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3368 new = stack_pointer_rtx, offset = - dynamic_offset;
3369 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3370 new = stack_pointer_rtx, offset = - out_arg_offset;
71038426
RH
3371 else if (SET_DEST (x) == virtual_cfa_rtx)
3372 new = arg_pointer_rtx, offset = - cfa_offset;
6f086dfc
RS
3373
3374 if (new)
3375 {
3376 /* The only valid sources here are PLUS or REG. Just do
3377 the simplest possible thing to handle them. */
3378 if (GET_CODE (SET_SRC (x)) != REG
3379 && GET_CODE (SET_SRC (x)) != PLUS)
3380 abort ();
3381
3382 start_sequence ();
3383 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 3384 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
3385 else
3386 temp = SET_SRC (x);
5f4f0e22 3387 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
3388 seq = get_insns ();
3389 end_sequence ();
3390
3391 emit_insns_before (seq, object);
3392 SET_DEST (x) = new;
3393
e9a25f70 3394 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc
RS
3395 || ! extra_insns)
3396 abort ();
3397
3398 return 1;
3399 }
3400
3401 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3402 loc = &SET_SRC (x);
3403 goto restart;
3404
3405 case PLUS:
3406 /* Handle special case of virtual register plus constant. */
3407 if (CONSTANT_P (XEXP (x, 1)))
3408 {
b1f82ccf 3409 rtx old, new_offset;
6f086dfc
RS
3410
3411 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3412 if (GET_CODE (XEXP (x, 0)) == PLUS)
3413 {
3414 rtx inner = XEXP (XEXP (x, 0), 0);
3415
3416 if (inner == virtual_incoming_args_rtx)
3417 new = arg_pointer_rtx, offset = in_arg_offset;
3418 else if (inner == virtual_stack_vars_rtx)
3419 new = frame_pointer_rtx, offset = var_offset;
3420 else if (inner == virtual_stack_dynamic_rtx)
3421 new = stack_pointer_rtx, offset = dynamic_offset;
3422 else if (inner == virtual_outgoing_args_rtx)
3423 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3424 else if (inner == virtual_cfa_rtx)
3425 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3426 else
3427 {
3428 loc = &XEXP (x, 0);
3429 goto restart;
3430 }
3431
3432 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3433 extra_insns);
38a448ca 3434 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
6f086dfc
RS
3435 }
3436
3437 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3438 new = arg_pointer_rtx, offset = in_arg_offset;
3439 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3440 new = frame_pointer_rtx, offset = var_offset;
3441 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3442 new = stack_pointer_rtx, offset = dynamic_offset;
3443 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3444 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3445 else if (XEXP (x, 0) == virtual_cfa_rtx)
3446 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3447 else
3448 {
3449 /* We know the second operand is a constant. Unless the
3450 first operand is a REG (which has been already checked),
3451 it needs to be checked. */
3452 if (GET_CODE (XEXP (x, 0)) != REG)
3453 {
3454 loc = &XEXP (x, 0);
3455 goto restart;
3456 }
3457 return 1;
3458 }
3459
b1f82ccf 3460 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 3461
b1f82ccf
DE
3462 /* If the new constant is zero, try to replace the sum with just
3463 the register. */
3464 if (new_offset == const0_rtx
3465 && validate_change (object, loc, new, 0))
6f086dfc
RS
3466 return 1;
3467
b1f82ccf
DE
3468 /* Next try to replace the register and new offset.
3469 There are two changes to validate here and we can't assume that
3470 in the case of old offset equals new just changing the register
3471 will yield a valid insn. In the interests of a little efficiency,
3472 however, we only call validate change once (we don't queue up the
0f41302f 3473 changes and then call apply_change_group). */
b1f82ccf
DE
3474
3475 old = XEXP (x, 0);
3476 if (offset == 0
3477 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3478 : (XEXP (x, 0) = new,
3479 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
3480 {
3481 if (! extra_insns)
3482 {
3483 XEXP (x, 0) = old;
3484 return 0;
3485 }
3486
3487 /* Otherwise copy the new constant into a register and replace
3488 constant with that register. */
3489 temp = gen_reg_rtx (Pmode);
b1f82ccf 3490 XEXP (x, 0) = new;
6f086dfc 3491 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 3492 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
3493 else
3494 {
3495 /* If that didn't work, replace this expression with a
3496 register containing the sum. */
3497
6f086dfc 3498 XEXP (x, 0) = old;
38a448ca 3499 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
3500
3501 start_sequence ();
5f4f0e22 3502 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
3503 seq = get_insns ();
3504 end_sequence ();
3505
3506 emit_insns_before (seq, object);
3507 if (! validate_change (object, loc, temp, 0)
3508 && ! validate_replace_rtx (x, temp, object))
3509 abort ();
3510 }
3511 }
3512
3513 return 1;
3514 }
3515
3516 /* Fall through to generic two-operand expression case. */
3517 case EXPR_LIST:
3518 case CALL:
3519 case COMPARE:
3520 case MINUS:
3521 case MULT:
3522 case DIV: case UDIV:
3523 case MOD: case UMOD:
3524 case AND: case IOR: case XOR:
45620ed4
RK
3525 case ROTATERT: case ROTATE:
3526 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
3527 case NE: case EQ:
3528 case GE: case GT: case GEU: case GTU:
3529 case LE: case LT: case LEU: case LTU:
3530 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3531 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3532 loc = &XEXP (x, 0);
3533 goto restart;
3534
3535 case MEM:
3536 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 3537 handled by our scan of decls. The only special handling we
6f086dfc 3538 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 3539 shared if we have to change it to a pseudo.
6f086dfc
RS
3540
3541 If the rtx is a simple reference to an address via a virtual register,
3542 it can potentially be shared. In such cases, first try to make it
3543 a valid address, which can also be shared. Otherwise, copy it and
3544 proceed normally.
3545
3546 First check for common cases that need no processing. These are
3547 usually due to instantiation already being done on a previous instance
3548 of a shared rtx. */
3549
3550 temp = XEXP (x, 0);
3551 if (CONSTANT_ADDRESS_P (temp)
3552#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3553 || temp == arg_pointer_rtx
b37f453b
DE
3554#endif
3555#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3556 || temp == hard_frame_pointer_rtx
6f086dfc
RS
3557#endif
3558 || temp == frame_pointer_rtx)
3559 return 1;
3560
3561 if (GET_CODE (temp) == PLUS
3562 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3563 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
3564#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3565 || XEXP (temp, 0) == hard_frame_pointer_rtx
3566#endif
6f086dfc
RS
3567#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3568 || XEXP (temp, 0) == arg_pointer_rtx
3569#endif
3570 ))
3571 return 1;
3572
3573 if (temp == virtual_stack_vars_rtx
3574 || temp == virtual_incoming_args_rtx
3575 || (GET_CODE (temp) == PLUS
3576 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3577 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3578 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3579 {
3580 /* This MEM may be shared. If the substitution can be done without
3581 the need to generate new pseudos, we want to do it in place
3582 so all copies of the shared rtx benefit. The call below will
3583 only make substitutions if the resulting address is still
3584 valid.
3585
3586 Note that we cannot pass X as the object in the recursive call
3587 since the insn being processed may not allow all valid
6461be14
RS
3588 addresses. However, if we were not passed on object, we can
3589 only modify X without copying it if X will have a valid
3590 address.
6f086dfc 3591
6461be14
RS
3592 ??? Also note that this can still lose if OBJECT is an insn that
3593 has less restrictions on an address that some other insn.
3594 In that case, we will modify the shared address. This case
4fd796bb
RK
3595 doesn't seem very likely, though. One case where this could
3596 happen is in the case of a USE or CLOBBER reference, but we
3597 take care of that below. */
6461be14
RS
3598
3599 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3600 object ? object : x, 0))
6f086dfc
RS
3601 return 1;
3602
3603 /* Otherwise make a copy and process that copy. We copy the entire
3604 RTL expression since it might be a PLUS which could also be
3605 shared. */
3606 *loc = x = copy_rtx (x);
3607 }
3608
3609 /* Fall through to generic unary operation case. */
6f086dfc
RS
3610 case SUBREG:
3611 case STRICT_LOW_PART:
3612 case NEG: case NOT:
3613 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3614 case SIGN_EXTEND: case ZERO_EXTEND:
3615 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3616 case FLOAT: case FIX:
3617 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3618 case ABS:
3619 case SQRT:
3620 case FFS:
3621 /* These case either have just one operand or we know that we need not
3622 check the rest of the operands. */
3623 loc = &XEXP (x, 0);
3624 goto restart;
3625
4fd796bb
RK
3626 case USE:
3627 case CLOBBER:
3628 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3629 go ahead and make the invalid one, but do it to a copy. For a REG,
3630 just make the recursive call, since there's no chance of a problem. */
3631
3632 if ((GET_CODE (XEXP (x, 0)) == MEM
3633 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3634 0))
3635 || (GET_CODE (XEXP (x, 0)) == REG
7694ce35 3636 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
3637 return 1;
3638
3639 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3640 loc = &XEXP (x, 0);
3641 goto restart;
3642
6f086dfc
RS
3643 case REG:
3644 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3645 in front of this insn and substitute the temporary. */
3646 if (x == virtual_incoming_args_rtx)
3647 new = arg_pointer_rtx, offset = in_arg_offset;
3648 else if (x == virtual_stack_vars_rtx)
3649 new = frame_pointer_rtx, offset = var_offset;
3650 else if (x == virtual_stack_dynamic_rtx)
3651 new = stack_pointer_rtx, offset = dynamic_offset;
3652 else if (x == virtual_outgoing_args_rtx)
3653 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3654 else if (x == virtual_cfa_rtx)
3655 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3656
3657 if (new)
3658 {
3659 temp = plus_constant (new, offset);
3660 if (!validate_change (object, loc, temp, 0))
3661 {
3662 if (! extra_insns)
3663 return 0;
3664
3665 start_sequence ();
5f4f0e22 3666 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
3667 seq = get_insns ();
3668 end_sequence ();
3669
3670 emit_insns_before (seq, object);
3671 if (! validate_change (object, loc, temp, 0)
3672 && ! validate_replace_rtx (x, temp, object))
3673 abort ();
3674 }
3675 }
3676
3677 return 1;
e9a25f70
JL
3678
3679 case ADDRESSOF:
3680 if (GET_CODE (XEXP (x, 0)) == REG)
3681 return 1;
3682
3683 else if (GET_CODE (XEXP (x, 0)) == MEM)
3684 {
3685 /* If we have a (addressof (mem ..)), do any instantiation inside
3686 since we know we'll be making the inside valid when we finally
3687 remove the ADDRESSOF. */
3688 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3689 return 1;
3690 }
3691 break;
3692
3693 default:
3694 break;
6f086dfc
RS
3695 }
3696
3697 /* Scan all subexpressions. */
3698 fmt = GET_RTX_FORMAT (code);
3699 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3700 if (*fmt == 'e')
3701 {
3702 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3703 return 0;
3704 }
3705 else if (*fmt == 'E')
3706 for (j = 0; j < XVECLEN (x, i); j++)
3707 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3708 extra_insns))
3709 return 0;
3710
3711 return 1;
3712}
3713\f
3714/* Optimization: assuming this function does not receive nonlocal gotos,
3715 delete the handlers for such, as well as the insns to establish
3716 and disestablish them. */
3717
3718static void
3719delete_handlers ()
3720{
3721 rtx insn;
3722 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3723 {
3724 /* Delete the handler by turning off the flag that would
3725 prevent jump_optimize from deleting it.
3726 Also permit deletion of the nonlocal labels themselves
3727 if nothing local refers to them. */
3728 if (GET_CODE (insn) == CODE_LABEL)
71cd4a8d
JW
3729 {
3730 tree t, last_t;
3731
3732 LABEL_PRESERVE_P (insn) = 0;
3733
3734 /* Remove it from the nonlocal_label list, to avoid confusing
3735 flow. */
3736 for (t = nonlocal_labels, last_t = 0; t;
3737 last_t = t, t = TREE_CHAIN (t))
3738 if (DECL_RTL (TREE_VALUE (t)) == insn)
3739 break;
3740 if (t)
3741 {
3742 if (! last_t)
3743 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3744 else
3745 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3746 }
3747 }
ba716ac9
BS
3748 if (GET_CODE (insn) == INSN)
3749 {
3750 int can_delete = 0;
3751 rtx t;
3752 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3753 if (reg_mentioned_p (t, PATTERN (insn)))
3754 {
3755 can_delete = 1;
3756 break;
3757 }
3758 if (can_delete
59257ff7
RK
3759 || (nonlocal_goto_stack_level != 0
3760 && reg_mentioned_p (nonlocal_goto_stack_level,
ba716ac9
BS
3761 PATTERN (insn))))
3762 delete_insn (insn);
3763 }
6f086dfc
RS
3764 }
3765}
6f086dfc
RS
3766\f
3767/* Output a USE for any register use in RTL.
3768 This is used with -noreg to mark the extent of lifespan
3769 of any registers used in a user-visible variable's DECL_RTL. */
3770
3771void
3772use_variable (rtl)
3773 rtx rtl;
3774{
3775 if (GET_CODE (rtl) == REG)
3776 /* This is a register variable. */
38a448ca 3777 emit_insn (gen_rtx_USE (VOIDmode, rtl));
6f086dfc
RS
3778 else if (GET_CODE (rtl) == MEM
3779 && GET_CODE (XEXP (rtl, 0)) == REG
3780 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3781 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3782 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3783 /* This is a variable-sized structure. */
38a448ca 3784 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
6f086dfc
RS
3785}
3786
3787/* Like use_variable except that it outputs the USEs after INSN
3788 instead of at the end of the insn-chain. */
3789
3790void
3791use_variable_after (rtl, insn)
3792 rtx rtl, insn;
3793{
3794 if (GET_CODE (rtl) == REG)
3795 /* This is a register variable. */
38a448ca 3796 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
6f086dfc
RS
3797 else if (GET_CODE (rtl) == MEM
3798 && GET_CODE (XEXP (rtl, 0)) == REG
3799 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3800 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3801 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3802 /* This is a variable-sized structure. */
38a448ca 3803 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
6f086dfc
RS
3804}
3805\f
3806int
3807max_parm_reg_num ()
3808{
3809 return max_parm_reg;
3810}
3811
3812/* Return the first insn following those generated by `assign_parms'. */
3813
3814rtx
3815get_first_nonparm_insn ()
3816{
3817 if (last_parm_insn)
3818 return NEXT_INSN (last_parm_insn);
3819 return get_insns ();
3820}
3821
5378192b
RS
3822/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3823 Crash if there is none. */
3824
3825rtx
3826get_first_block_beg ()
3827{
3828 register rtx searcher;
3829 register rtx insn = get_first_nonparm_insn ();
3830
3831 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3832 if (GET_CODE (searcher) == NOTE
3833 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3834 return searcher;
3835
3836 abort (); /* Invalid call to this function. (See comments above.) */
3837 return NULL_RTX;
3838}
3839
d181c154
RS
3840/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3841 This means a type for which function calls must pass an address to the
3842 function or get an address back from the function.
3843 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
3844
3845int
3846aggregate_value_p (exp)
3847 tree exp;
3848{
9d790a4f
RS
3849 int i, regno, nregs;
3850 rtx reg;
d181c154
RS
3851 tree type;
3852 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3853 type = exp;
3854 else
3855 type = TREE_TYPE (exp);
3856
3857 if (RETURN_IN_MEMORY (type))
6f086dfc 3858 return 1;
956d6950 3859 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
3860 and thus can't be returned in registers. */
3861 if (TREE_ADDRESSABLE (type))
3862 return 1;
05e3bdb9 3863 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 3864 return 1;
9d790a4f
RS
3865 /* Make sure we have suitable call-clobbered regs to return
3866 the value in; if not, we must return it in memory. */
d181c154 3867 reg = hard_function_value (type, 0);
e71f7aa5
JW
3868
3869 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3870 it is OK. */
3871 if (GET_CODE (reg) != REG)
3872 return 0;
3873
9d790a4f 3874 regno = REGNO (reg);
d181c154 3875 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
9d790a4f
RS
3876 for (i = 0; i < nregs; i++)
3877 if (! call_used_regs[regno + i])
3878 return 1;
6f086dfc
RS
3879 return 0;
3880}
3881\f
3882/* Assign RTL expressions to the function's parameters.
3883 This may involve copying them into registers and using
3884 those registers as the RTL for them.
3885
3886 If SECOND_TIME is non-zero it means that this function is being
3887 called a second time. This is done by integrate.c when a function's
3888 compilation is deferred. We need to come back here in case the
3889 FUNCTION_ARG macro computes items needed for the rest of the compilation
3890 (such as changing which registers are fixed or caller-saved). But suppress
3891 writing any insns or setting DECL_RTL of anything in this case. */
3892
3893void
3894assign_parms (fndecl, second_time)
3895 tree fndecl;
3896 int second_time;
3897{
3898 register tree parm;
3899 register rtx entry_parm = 0;
3900 register rtx stack_parm = 0;
3901 CUMULATIVE_ARGS args_so_far;
621061f4
RK
3902 enum machine_mode promoted_mode, passed_mode;
3903 enum machine_mode nominal_mode, promoted_nominal_mode;
00d8a4c1 3904 int unsignedp;
6f086dfc
RS
3905 /* Total space needed so far for args on the stack,
3906 given as a constant and a tree-expression. */
3907 struct args_size stack_args_size;
3908 tree fntype = TREE_TYPE (fndecl);
3909 tree fnargs = DECL_ARGUMENTS (fndecl);
3910 /* This is used for the arg pointer when referring to stack args. */
3911 rtx internal_arg_pointer;
3912 /* This is a dummy PARM_DECL that we used for the function result if
3913 the function returns a structure. */
3914 tree function_result_decl = 0;
54ea1de9 3915#ifdef SETUP_INCOMING_VARARGS
6f086dfc 3916 int varargs_setup = 0;
54ea1de9 3917#endif
3412b298 3918 rtx conversion_insns = 0;
6f086dfc
RS
3919
3920 /* Nonzero if the last arg is named `__builtin_va_alist',
3921 which is used on some machines for old-fashioned non-ANSI varargs.h;
3922 this should be stuck onto the stack as if it had arrived there. */
3b69d50e
RK
3923 int hide_last_arg
3924 = (current_function_varargs
3925 && fnargs
6f086dfc
RS
3926 && (parm = tree_last (fnargs)) != 0
3927 && DECL_NAME (parm)
3928 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3929 "__builtin_va_alist")));
3930
3931 /* Nonzero if function takes extra anonymous args.
3932 This means the last named arg must be on the stack
0f41302f 3933 right before the anonymous ones. */
6f086dfc
RS
3934 int stdarg
3935 = (TYPE_ARG_TYPES (fntype) != 0
3936 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3937 != void_type_node));
3938
ebb904cb
RK
3939 current_function_stdarg = stdarg;
3940
6f086dfc
RS
3941 /* If the reg that the virtual arg pointer will be translated into is
3942 not a fixed reg or is the stack pointer, make a copy of the virtual
3943 arg pointer, and address parms via the copy. The frame pointer is
3944 considered fixed even though it is not marked as such.
3945
3946 The second time through, simply use ap to avoid generating rtx. */
3947
3948 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3949 || ! (fixed_regs[ARG_POINTER_REGNUM]
3950 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3951 && ! second_time)
3952 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3953 else
3954 internal_arg_pointer = virtual_incoming_args_rtx;
3955 current_function_internal_arg_pointer = internal_arg_pointer;
3956
3957 stack_args_size.constant = 0;
3958 stack_args_size.var = 0;
3959
3960 /* If struct value address is treated as the first argument, make it so. */
3961 if (aggregate_value_p (DECL_RESULT (fndecl))
3962 && ! current_function_returns_pcc_struct
3963 && struct_value_incoming_rtx == 0)
3964 {
f9f29478 3965 tree type = build_pointer_type (TREE_TYPE (fntype));
6f086dfc 3966
5f4f0e22 3967 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
3968
3969 DECL_ARG_TYPE (function_result_decl) = type;
3970 TREE_CHAIN (function_result_decl) = fnargs;
3971 fnargs = function_result_decl;
3972 }
3973
e9a25f70
JL
3974 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3975 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3976 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
6f086dfc
RS
3977
3978#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 3979 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 3980#else
2c7ee1a6 3981 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
6f086dfc
RS
3982#endif
3983
3984 /* We haven't yet found an argument that we must push and pretend the
3985 caller did. */
3986 current_function_pretend_args_size = 0;
3987
3988 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3989 {
05e3bdb9 3990 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
6f086dfc
RS
3991 struct args_size stack_offset;
3992 struct args_size arg_size;
3993 int passed_pointer = 0;
621061f4 3994 int did_conversion = 0;
6f086dfc 3995 tree passed_type = DECL_ARG_TYPE (parm);
621061f4 3996 tree nominal_type = TREE_TYPE (parm);
9ab70a9b 3997 int pretend_named;
6f086dfc
RS
3998
3999 /* Set LAST_NAMED if this is last named arg before some
bf9c83fe 4000 anonymous args. */
6f086dfc
RS
4001 int last_named = ((TREE_CHAIN (parm) == 0
4002 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3b69d50e 4003 && (stdarg || current_function_varargs));
bf9c83fe
JW
4004 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4005 most machines, if this is a varargs/stdarg function, then we treat
4006 the last named arg as if it were anonymous too. */
e5e809f4 4007 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
6f086dfc
RS
4008
4009 if (TREE_TYPE (parm) == error_mark_node
4010 /* This can happen after weird syntax errors
4011 or if an enum type is defined among the parms. */
4012 || TREE_CODE (parm) != PARM_DECL
4013 || passed_type == NULL)
4014 {
38a448ca
RH
4015 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4016 = gen_rtx_MEM (BLKmode, const0_rtx);
6f086dfc
RS
4017 TREE_USED (parm) = 1;
4018 continue;
4019 }
4020
4021 /* For varargs.h function, save info about regs and stack space
4022 used by the individual args, not including the va_alist arg. */
3b69d50e 4023 if (hide_last_arg && last_named)
6f086dfc
RS
4024 current_function_args_info = args_so_far;
4025
4026 /* Find mode of arg as it is passed, and mode of arg
4027 as it should be during execution of this function. */
4028 passed_mode = TYPE_MODE (passed_type);
621061f4 4029 nominal_mode = TYPE_MODE (nominal_type);
6f086dfc 4030
16bae307
RS
4031 /* If the parm's mode is VOID, its value doesn't matter,
4032 and avoid the usual things like emit_move_insn that could crash. */
4033 if (nominal_mode == VOIDmode)
4034 {
4035 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4036 continue;
4037 }
4038
3f46679a
RK
4039 /* If the parm is to be passed as a transparent union, use the
4040 type of the first field for the tests below. We have already
4041 verified that the modes are the same. */
4042 if (DECL_TRANSPARENT_UNION (parm)
4043 || TYPE_TRANSPARENT_UNION (passed_type))
4044 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4045
a14ae508
RK
4046 /* See if this arg was passed by invisible reference. It is if
4047 it is an object whose size depends on the contents of the
4048 object itself or if the machine requires these objects be passed
4049 that way. */
4050
4051 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4052 && contains_placeholder_p (TYPE_SIZE (passed_type)))
657bb6dc 4053 || TREE_ADDRESSABLE (passed_type)
6f086dfc 4054#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
a14ae508 4055 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
bf9c83fe 4056 passed_type, named_arg)
a14ae508
RK
4057#endif
4058 )
6f086dfc 4059 {
621061f4 4060 passed_type = nominal_type = build_pointer_type (passed_type);
6f086dfc
RS
4061 passed_pointer = 1;
4062 passed_mode = nominal_mode = Pmode;
4063 }
6f086dfc 4064
a53e14c0
RK
4065 promoted_mode = passed_mode;
4066
4067#ifdef PROMOTE_FUNCTION_ARGS
4068 /* Compute the mode in which the arg is actually extended to. */
7940255d 4069 unsignedp = TREE_UNSIGNED (passed_type);
a5a52dbc 4070 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
a53e14c0
RK
4071#endif
4072
6f086dfc
RS
4073 /* Let machine desc say which reg (if any) the parm arrives in.
4074 0 means it arrives on the stack. */
4075#ifdef FUNCTION_INCOMING_ARG
a53e14c0 4076 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
bf9c83fe 4077 passed_type, named_arg);
6f086dfc 4078#else
a53e14c0 4079 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
bf9c83fe 4080 passed_type, named_arg);
6f086dfc
RS
4081#endif
4082
621061f4
RK
4083 if (entry_parm == 0)
4084 promoted_mode = passed_mode;
a53e14c0 4085
6f086dfc
RS
4086#ifdef SETUP_INCOMING_VARARGS
4087 /* If this is the last named parameter, do any required setup for
4088 varargs or stdargs. We need to know about the case of this being an
4089 addressable type, in which case we skip the registers it
4090 would have arrived in.
4091
4092 For stdargs, LAST_NAMED will be set for two parameters, the one that
4093 is actually the last named, and the dummy parameter. We only
4094 want to do this action once.
4095
4096 Also, indicate when RTL generation is to be suppressed. */
4097 if (last_named && !varargs_setup)
4098 {
621061f4 4099 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
6f086dfc
RS
4100 current_function_pretend_args_size,
4101 second_time);
4102 varargs_setup = 1;
4103 }
4104#endif
4105
4106 /* Determine parm's home in the stack,
4107 in case it arrives in the stack or we should pretend it did.
4108
4109 Compute the stack position and rtx where the argument arrives
4110 and its size.
4111
4112 There is one complexity here: If this was a parameter that would
4113 have been passed in registers, but wasn't only because it is
4114 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4115 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4116 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4117 0 as it was the previous time. */
4118
9ab70a9b 4119 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
0f11903b 4120 locate_and_pad_parm (promoted_mode, passed_type,
6f086dfc
RS
4121#ifdef STACK_PARMS_IN_REG_PARM_AREA
4122 1,
4123#else
4124#ifdef FUNCTION_INCOMING_ARG
621061f4 4125 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc 4126 passed_type,
9ab70a9b 4127 pretend_named) != 0,
6f086dfc 4128#else
621061f4 4129 FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc 4130 passed_type,
9ab70a9b 4131 pretend_named) != 0,
6f086dfc
RS
4132#endif
4133#endif
4134 fndecl, &stack_args_size, &stack_offset, &arg_size);
4135
4136 if (! second_time)
4137 {
4138 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4139
4140 if (offset_rtx == const0_rtx)
0f11903b 4141 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
6f086dfc 4142 else
0f11903b 4143 stack_parm = gen_rtx_MEM (promoted_mode,
38a448ca
RH
4144 gen_rtx_PLUS (Pmode,
4145 internal_arg_pointer,
4146 offset_rtx));
6f086dfc
RS
4147
4148 /* If this is a memory ref that contains aggregate components,
a00285d0
RK
4149 mark it as such for cse and loop optimize. Likewise if it
4150 is readonly. */
c6df88cb 4151 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
a00285d0 4152 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
41472af8 4153 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
6f086dfc
RS
4154 }
4155
4156 /* If this parameter was passed both in registers and in the stack,
4157 use the copy on the stack. */
621061f4 4158 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
6f086dfc
RS
4159 entry_parm = 0;
4160
461beb10 4161#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
4162 /* If this parm was passed part in regs and part in memory,
4163 pretend it arrived entirely in memory
4164 by pushing the register-part onto the stack.
4165
4166 In the special case of a DImode or DFmode that is split,
4167 we could put it together in a pseudoreg directly,
4168 but for now that's not worth bothering with. */
4169
4170 if (entry_parm)
4171 {
621061f4 4172 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
bf9c83fe 4173 passed_type, named_arg);
6f086dfc
RS
4174
4175 if (nregs > 0)
4176 {
4177 current_function_pretend_args_size
4178 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4179 / (PARM_BOUNDARY / BITS_PER_UNIT)
4180 * (PARM_BOUNDARY / BITS_PER_UNIT));
4181
4182 if (! second_time)
5c4cdc9f
JW
4183 {
4184 /* Handle calls that pass values in multiple non-contiguous
4185 locations. The Irix 6 ABI has examples of this. */
4186 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4187 emit_group_store (validize_mem (stack_parm), entry_parm,
4188 int_size_in_bytes (TREE_TYPE (parm)),
4189 (TYPE_ALIGN (TREE_TYPE (parm))
4190 / BITS_PER_UNIT));
5c4cdc9f
JW
4191 else
4192 move_block_from_reg (REGNO (entry_parm),
4193 validize_mem (stack_parm), nregs,
4194 int_size_in_bytes (TREE_TYPE (parm)));
4195 }
6f086dfc
RS
4196 entry_parm = stack_parm;
4197 }
4198 }
461beb10 4199#endif
6f086dfc
RS
4200
4201 /* If we didn't decide this parm came in a register,
4202 by default it came on the stack. */
4203 if (entry_parm == 0)
4204 entry_parm = stack_parm;
4205
4206 /* Record permanently how this parm was passed. */
4207 if (! second_time)
4208 DECL_INCOMING_RTL (parm) = entry_parm;
4209
4210 /* If there is actually space on the stack for this parm,
4211 count it in stack_args_size; otherwise set stack_parm to 0
4212 to indicate there is no preallocated stack slot for the parm. */
4213
4214 if (entry_parm == stack_parm
ab87f8c8
JL
4215 || (GET_CODE (entry_parm) == PARALLEL
4216 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
d9ca49d5 4217#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 4218 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
4219 there is still an (uninitialized) stack slot allocated for it.
4220
4221 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4222 whether this parameter already has a stack slot allocated,
4223 because an arg block exists only if current_function_args_size
abc95ed3 4224 is larger than some threshold, and we haven't calculated that
d9ca49d5
JW
4225 yet. So, for now, we just assume that stack slots never exist
4226 in this case. */
6f086dfc
RS
4227 || REG_PARM_STACK_SPACE (fndecl) > 0
4228#endif
4229 )
4230 {
4231 stack_args_size.constant += arg_size.constant;
4232 if (arg_size.var)
4233 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4234 }
4235 else
4236 /* No stack slot was pushed for this parm. */
4237 stack_parm = 0;
4238
4239 /* Update info on where next arg arrives in registers. */
4240
621061f4 4241 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
bf9c83fe 4242 passed_type, named_arg);
6f086dfc 4243
0f41302f 4244 /* If this is our second time through, we are done with this parm. */
6f086dfc
RS
4245 if (second_time)
4246 continue;
4247
e16c591a
RS
4248 /* If we can't trust the parm stack slot to be aligned enough
4249 for its ultimate type, don't use that slot after entry.
4250 We'll make another stack slot, if we need one. */
4251 {
e16c591a 4252 int thisparm_boundary
621061f4 4253 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
e16c591a
RS
4254
4255 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4256 stack_parm = 0;
4257 }
4258
cb61f66f
RS
4259 /* If parm was passed in memory, and we need to convert it on entry,
4260 don't store it back in that same slot. */
4261 if (entry_parm != 0
4262 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4263 stack_parm = 0;
4264
4265#if 0
6f086dfc
RS
4266 /* Now adjust STACK_PARM to the mode and precise location
4267 where this parameter should live during execution,
4268 if we discover that it must live in the stack during execution.
4269 To make debuggers happier on big-endian machines, we store
4270 the value in the last bytes of the space available. */
4271
4272 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4273 && stack_parm != 0)
4274 {
4275 rtx offset_rtx;
4276
f76b9db2
ILT
4277 if (BYTES_BIG_ENDIAN
4278 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
6f086dfc
RS
4279 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4280 - GET_MODE_SIZE (nominal_mode));
6f086dfc
RS
4281
4282 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4283 if (offset_rtx == const0_rtx)
38a448ca 4284 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4285 else
38a448ca
RH
4286 stack_parm = gen_rtx_MEM (nominal_mode,
4287 gen_rtx_PLUS (Pmode,
4288 internal_arg_pointer,
4289 offset_rtx));
6f086dfc
RS
4290
4291 /* If this is a memory ref that contains aggregate components,
4292 mark it as such for cse and loop optimize. */
c6df88cb 4293 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
6f086dfc 4294 }
cb61f66f 4295#endif /* 0 */
6f086dfc 4296
9dc0f531
RK
4297#ifdef STACK_REGS
4298 /* We need this "use" info, because the gcc-register->stack-register
4299 converter in reg-stack.c needs to know which registers are active
4300 at the start of the function call. The actual parameter loading
4301 instructions are not always available then anymore, since they might
4302 have been optimised away. */
4303
4304 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
38a448ca 4305 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
9dc0f531
RK
4306#endif
4307
6f086dfc
RS
4308 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4309 in the mode in which it arrives.
4310 STACK_PARM is an RTX for a stack slot where the parameter can live
4311 during the function (in case we want to put it there).
4312 STACK_PARM is 0 if no stack slot was pushed for it.
4313
4314 Now output code if necessary to convert ENTRY_PARM to
4315 the type in which this function declares it,
4316 and store that result in an appropriate place,
4317 which may be a pseudo reg, may be STACK_PARM,
4318 or may be a local stack slot if STACK_PARM is 0.
4319
4320 Set DECL_RTL to that place. */
4321
5c4cdc9f 4322 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4323 {
5c4cdc9f
JW
4324 /* If a BLKmode arrives in registers, copy it to a stack slot.
4325 Handle calls that pass values in multiple non-contiguous
4326 locations. The Irix 6 ABI has examples of this. */
4327 if (GET_CODE (entry_parm) == REG
4328 || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4329 {
621061f4
RK
4330 int size_stored
4331 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4332 UNITS_PER_WORD);
6f086dfc
RS
4333
4334 /* Note that we will be storing an integral number of words.
4335 So we have to be careful to ensure that we allocate an
4336 integral number of words. We do this below in the
4337 assign_stack_local if space was not allocated in the argument
4338 list. If it was, this will not work if PARM_BOUNDARY is not
4339 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4340 if it becomes a problem. */
4341
4342 if (stack_parm == 0)
7e41ffa2
RS
4343 {
4344 stack_parm
621061f4
RK
4345 = assign_stack_local (GET_MODE (entry_parm),
4346 size_stored, 0);
4347
4348 /* If this is a memory ref that contains aggregate
4349 components, mark it as such for cse and loop optimize. */
c6df88cb 4350 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4351 }
4352
6f086dfc
RS
4353 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4354 abort ();
4355
7a30f0c4
JW
4356 if (TREE_READONLY (parm))
4357 RTX_UNCHANGING_P (stack_parm) = 1;
4358
5c4cdc9f
JW
4359 /* Handle calls that pass values in multiple non-contiguous
4360 locations. The Irix 6 ABI has examples of this. */
4361 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4362 emit_group_store (validize_mem (stack_parm), entry_parm,
4363 int_size_in_bytes (TREE_TYPE (parm)),
4364 (TYPE_ALIGN (TREE_TYPE (parm))
4365 / BITS_PER_UNIT));
5c4cdc9f
JW
4366 else
4367 move_block_from_reg (REGNO (entry_parm),
4368 validize_mem (stack_parm),
4369 size_stored / UNITS_PER_WORD,
4370 int_size_in_bytes (TREE_TYPE (parm)));
6f086dfc
RS
4371 }
4372 DECL_RTL (parm) = stack_parm;
4373 }
74bd77a8 4374 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 4375 && ! DECL_INLINE (fndecl))
6f086dfc
RS
4376 /* layout_decl may set this. */
4377 || TREE_ADDRESSABLE (parm)
4378 || TREE_SIDE_EFFECTS (parm)
4379 /* If -ffloat-store specified, don't put explicit
4380 float variables into registers. */
4381 || (flag_float_store
4382 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4383 /* Always assign pseudo to structure return or item passed
4384 by invisible reference. */
4385 || passed_pointer || parm == function_result_decl)
4386 {
00d8a4c1
RK
4387 /* Store the parm in a pseudoregister during the function, but we
4388 may need to do it in a wider mode. */
4389
4390 register rtx parmreg;
4e86caed 4391 int regno, regnoi = 0, regnor = 0;
00d8a4c1
RK
4392
4393 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
cd5b3469 4394
621061f4
RK
4395 promoted_nominal_mode
4396 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
6f086dfc 4397
621061f4 4398 parmreg = gen_reg_rtx (promoted_nominal_mode);
ddb7361a 4399 mark_user_reg (parmreg);
6f086dfc
RS
4400
4401 /* If this was an item that we received a pointer to, set DECL_RTL
4402 appropriately. */
4403 if (passed_pointer)
4404 {
621061f4 4405 DECL_RTL (parm)
38a448ca 4406 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
c6df88cb 4407 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
6f086dfc
RS
4408 }
4409 else
4410 DECL_RTL (parm) = parmreg;
4411
4412 /* Copy the value into the register. */
621061f4
RK
4413 if (nominal_mode != passed_mode
4414 || promoted_nominal_mode != promoted_mode)
86f8eff3 4415 {
efd8cba0 4416 int save_tree_used;
621061f4
RK
4417 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4418 mode, by the caller. We now have to convert it to
4419 NOMINAL_MODE, if different. However, PARMREG may be in
956d6950 4420 a different mode than NOMINAL_MODE if it is being stored
621061f4
RK
4421 promoted.
4422
4423 If ENTRY_PARM is a hard register, it might be in a register
86f8eff3
RK
4424 not valid for operating in its mode (e.g., an odd-numbered
4425 register for a DFmode). In that case, moves are the only
4426 thing valid, so we can't do a convert from there. This
4427 occurs when the calling sequence allow such misaligned
3412b298
JW
4428 usages.
4429
4430 In addition, the conversion may involve a call, which could
4431 clobber parameters which haven't been copied to pseudo
4432 registers yet. Therefore, we must first copy the parm to
4433 a pseudo reg here, and save the conversion until after all
4434 parameters have been moved. */
4435
4436 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4437
4438 emit_move_insn (tempreg, validize_mem (entry_parm));
4439
4440 push_to_sequence (conversion_insns);
ad241351
RK
4441 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4442
efd8cba0
DB
4443 /* TREE_USED gets set erroneously during expand_assignment. */
4444 save_tree_used = TREE_USED (parm);
621061f4
RK
4445 expand_assignment (parm,
4446 make_tree (nominal_type, tempreg), 0, 0);
efd8cba0 4447 TREE_USED (parm) = save_tree_used;
3412b298 4448 conversion_insns = get_insns ();
621061f4 4449 did_conversion = 1;
3412b298 4450 end_sequence ();
86f8eff3 4451 }
6f086dfc
RS
4452 else
4453 emit_move_insn (parmreg, validize_mem (entry_parm));
4454
74bd77a8
RS
4455 /* If we were passed a pointer but the actual value
4456 can safely live in a register, put it in one. */
16bae307 4457 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
4458 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4459 && ! DECL_INLINE (fndecl))
4460 /* layout_decl may set this. */
4461 || TREE_ADDRESSABLE (parm)
4462 || TREE_SIDE_EFFECTS (parm)
4463 /* If -ffloat-store specified, don't put explicit
4464 float variables into registers. */
4465 || (flag_float_store
4466 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4467 {
2654605a
JW
4468 /* We can't use nominal_mode, because it will have been set to
4469 Pmode above. We must use the actual mode of the parm. */
4470 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
ddb7361a 4471 mark_user_reg (parmreg);
74bd77a8
RS
4472 emit_move_insn (parmreg, DECL_RTL (parm));
4473 DECL_RTL (parm) = parmreg;
c110c53d
RS
4474 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4475 now the parm. */
4476 stack_parm = 0;
74bd77a8 4477 }
137a2a7b
DE
4478#ifdef FUNCTION_ARG_CALLEE_COPIES
4479 /* If we are passed an arg by reference and it is our responsibility
4480 to make a copy, do it now.
4481 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4482 original argument, so we must recreate them in the call to
4483 FUNCTION_ARG_CALLEE_COPIES. */
4484 /* ??? Later add code to handle the case that if the argument isn't
4485 modified, don't do the copy. */
4486
4487 else if (passed_pointer
4488 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4489 TYPE_MODE (DECL_ARG_TYPE (parm)),
4490 DECL_ARG_TYPE (parm),
bf9c83fe 4491 named_arg)
926b1b99 4492 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
137a2a7b
DE
4493 {
4494 rtx copy;
4495 tree type = DECL_ARG_TYPE (parm);
4496
4497 /* This sequence may involve a library call perhaps clobbering
4498 registers that haven't been copied to pseudos yet. */
4499
4500 push_to_sequence (conversion_insns);
4501
4502 if (TYPE_SIZE (type) == 0
4503 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1fd3ef7f 4504 /* This is a variable sized object. */
38a448ca
RH
4505 copy = gen_rtx_MEM (BLKmode,
4506 allocate_dynamic_stack_space
4507 (expr_size (parm), NULL_RTX,
4508 TYPE_ALIGN (type)));
137a2a7b 4509 else
1fd3ef7f
RK
4510 copy = assign_stack_temp (TYPE_MODE (type),
4511 int_size_in_bytes (type), 1);
c6df88cb 4512 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
e9a25f70 4513 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
137a2a7b
DE
4514
4515 store_expr (parm, copy, 0);
4516 emit_move_insn (parmreg, XEXP (copy, 0));
7d384cc0 4517 if (current_function_check_memory_usage)
86fa911a 4518 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4519 XEXP (copy, 0), Pmode,
86fa911a
RK
4520 GEN_INT (int_size_in_bytes (type)),
4521 TYPE_MODE (sizetype),
956d6950
JL
4522 GEN_INT (MEMORY_USE_RW),
4523 TYPE_MODE (integer_type_node));
137a2a7b 4524 conversion_insns = get_insns ();
621061f4 4525 did_conversion = 1;
137a2a7b
DE
4526 end_sequence ();
4527 }
4528#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 4529
6f086dfc 4530 /* In any case, record the parm's desired stack location
14aceb29
RS
4531 in case we later discover it must live in the stack.
4532
4533 If it is a COMPLEX value, store the stack location for both
4534 halves. */
4535
4536 if (GET_CODE (parmreg) == CONCAT)
4537 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4538 else
4539 regno = REGNO (parmreg);
4540
e9a25f70 4541 if (regno >= max_parm_reg)
6f086dfc
RS
4542 {
4543 rtx *new;
e9a25f70 4544 int old_max_parm_reg = max_parm_reg;
14aceb29 4545
e9a25f70
JL
4546 /* It's slow to expand this one register at a time,
4547 but it's also rare and we need max_parm_reg to be
4548 precisely correct. */
4549 max_parm_reg = regno + 1;
4550 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4c9a05bc 4551 bcopy ((char *) parm_reg_stack_loc, (char *) new,
e9a25f70
JL
4552 old_max_parm_reg * sizeof (rtx));
4553 bzero ((char *) (new + old_max_parm_reg),
4554 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
6f086dfc
RS
4555 parm_reg_stack_loc = new;
4556 }
14aceb29
RS
4557
4558 if (GET_CODE (parmreg) == CONCAT)
4559 {
4560 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4561
a03caf76
RK
4562 regnor = REGNO (gen_realpart (submode, parmreg));
4563 regnoi = REGNO (gen_imagpart (submode, parmreg));
4564
7b1a0c14
RS
4565 if (stack_parm != 0)
4566 {
a03caf76 4567 parm_reg_stack_loc[regnor]
3d329b07 4568 = gen_realpart (submode, stack_parm);
a03caf76 4569 parm_reg_stack_loc[regnoi]
3d329b07 4570 = gen_imagpart (submode, stack_parm);
7b1a0c14
RS
4571 }
4572 else
4573 {
a03caf76
RK
4574 parm_reg_stack_loc[regnor] = 0;
4575 parm_reg_stack_loc[regnoi] = 0;
7b1a0c14 4576 }
14aceb29
RS
4577 }
4578 else
4579 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
6f086dfc
RS
4580
4581 /* Mark the register as eliminable if we did no conversion
4582 and it was copied from memory at a fixed offset,
4583 and the arg pointer was not copied to a pseudo-reg.
4584 If the arg pointer is a pseudo reg or the offset formed
4585 an invalid address, such memory-equivalences
4586 as we make here would screw up life analysis for it. */
4587 if (nominal_mode == passed_mode
621061f4 4588 && ! did_conversion
38b610ed
ILT
4589 && stack_parm != 0
4590 && GET_CODE (stack_parm) == MEM
6f086dfc
RS
4591 && stack_offset.var == 0
4592 && reg_mentioned_p (virtual_incoming_args_rtx,
38b610ed 4593 XEXP (stack_parm, 0)))
a03caf76
RK
4594 {
4595 rtx linsn = get_last_insn ();
69685820 4596 rtx sinsn, set;
a03caf76
RK
4597
4598 /* Mark complex types separately. */
4599 if (GET_CODE (parmreg) == CONCAT)
69685820
RK
4600 /* Scan backwards for the set of the real and
4601 imaginary parts. */
4602 for (sinsn = linsn; sinsn != 0;
4603 sinsn = prev_nonnote_insn (sinsn))
4604 {
4605 set = single_set (sinsn);
4606 if (set != 0
4607 && SET_DEST (set) == regno_reg_rtx [regnoi])
4608 REG_NOTES (sinsn)
38a448ca
RH
4609 = gen_rtx_EXPR_LIST (REG_EQUIV,
4610 parm_reg_stack_loc[regnoi],
4611 REG_NOTES (sinsn));
69685820
RK
4612 else if (set != 0
4613 && SET_DEST (set) == regno_reg_rtx [regnor])
4614 REG_NOTES (sinsn)
38a448ca
RH
4615 = gen_rtx_EXPR_LIST (REG_EQUIV,
4616 parm_reg_stack_loc[regnor],
4617 REG_NOTES (sinsn));
69685820
RK
4618 }
4619 else if ((set = single_set (linsn)) != 0
4620 && SET_DEST (set) == parmreg)
a03caf76 4621 REG_NOTES (linsn)
38a448ca
RH
4622 = gen_rtx_EXPR_LIST (REG_EQUIV,
4623 stack_parm, REG_NOTES (linsn));
a03caf76 4624 }
6f086dfc
RS
4625
4626 /* For pointer data type, suggest pointer register. */
e5e809f4 4627 if (POINTER_TYPE_P (TREE_TYPE (parm)))
6c6166bd
RK
4628 mark_reg_pointer (parmreg,
4629 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4630 / BITS_PER_UNIT));
6f086dfc
RS
4631 }
4632 else
4633 {
4634 /* Value must be stored in the stack slot STACK_PARM
4635 during function execution. */
4636
621061f4 4637 if (promoted_mode != nominal_mode)
86f8eff3
RK
4638 {
4639 /* Conversion is required. */
3412b298
JW
4640 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4641
4642 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 4643
3412b298
JW
4644 push_to_sequence (conversion_insns);
4645 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 4646 TREE_UNSIGNED (TREE_TYPE (parm)));
de957303
DE
4647 if (stack_parm)
4648 {
4649 /* ??? This may need a big-endian conversion on sparc64. */
4650 stack_parm = change_address (stack_parm, nominal_mode,
4651 NULL_RTX);
4652 }
3412b298 4653 conversion_insns = get_insns ();
621061f4 4654 did_conversion = 1;
3412b298 4655 end_sequence ();
86f8eff3 4656 }
6f086dfc
RS
4657
4658 if (entry_parm != stack_parm)
4659 {
4660 if (stack_parm == 0)
7e41ffa2
RS
4661 {
4662 stack_parm
4663 = assign_stack_local (GET_MODE (entry_parm),
4664 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4665 /* If this is a memory ref that contains aggregate components,
4666 mark it as such for cse and loop optimize. */
c6df88cb 4667 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4668 }
4669
621061f4 4670 if (promoted_mode != nominal_mode)
3412b298
JW
4671 {
4672 push_to_sequence (conversion_insns);
4673 emit_move_insn (validize_mem (stack_parm),
4674 validize_mem (entry_parm));
4675 conversion_insns = get_insns ();
4676 end_sequence ();
4677 }
4678 else
4679 emit_move_insn (validize_mem (stack_parm),
4680 validize_mem (entry_parm));
6f086dfc 4681 }
7d384cc0 4682 if (current_function_check_memory_usage)
86fa911a
RK
4683 {
4684 push_to_sequence (conversion_insns);
4685 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4686 XEXP (stack_parm, 0), Pmode,
86fa911a
RK
4687 GEN_INT (GET_MODE_SIZE (GET_MODE
4688 (entry_parm))),
4689 TYPE_MODE (sizetype),
956d6950
JL
4690 GEN_INT (MEMORY_USE_RW),
4691 TYPE_MODE (integer_type_node));
6f086dfc 4692
86fa911a
RK
4693 conversion_insns = get_insns ();
4694 end_sequence ();
4695 }
6f086dfc
RS
4696 DECL_RTL (parm) = stack_parm;
4697 }
4698
4699 /* If this "parameter" was the place where we are receiving the
4700 function's incoming structure pointer, set up the result. */
4701 if (parm == function_result_decl)
ccdecf58
RK
4702 {
4703 tree result = DECL_RESULT (fndecl);
4704 tree restype = TREE_TYPE (result);
4705
4706 DECL_RTL (result)
38a448ca 4707 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
ccdecf58 4708
c6df88cb
MM
4709 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4710 AGGREGATE_TYPE_P (restype));
ccdecf58 4711 }
6f086dfc
RS
4712
4713 if (TREE_THIS_VOLATILE (parm))
4714 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4715 if (TREE_READONLY (parm))
4716 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4717 }
4718
3412b298
JW
4719 /* Output all parameter conversion instructions (possibly including calls)
4720 now that all parameters have been copied out of hard registers. */
4721 emit_insns (conversion_insns);
4722
6f086dfc
RS
4723 last_parm_insn = get_last_insn ();
4724
4725 current_function_args_size = stack_args_size.constant;
4726
4727 /* Adjust function incoming argument size for alignment and
4728 minimum length. */
4729
4730#ifdef REG_PARM_STACK_SPACE
6f90e075 4731#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
4732 current_function_args_size = MAX (current_function_args_size,
4733 REG_PARM_STACK_SPACE (fndecl));
4734#endif
6f90e075 4735#endif
6f086dfc 4736
4433e339
RH
4737#ifdef STACK_BOUNDARY
4738#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4739
4740 current_function_args_size
4741 = ((current_function_args_size + STACK_BYTES - 1)
4742 / STACK_BYTES) * STACK_BYTES;
4743#endif
4744
6f086dfc
RS
4745#ifdef ARGS_GROW_DOWNWARD
4746 current_function_arg_offset_rtx
5f4f0e22 4747 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
4748 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4749 size_int (-stack_args_size.constant)),
86fa911a 4750 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
6f086dfc
RS
4751#else
4752 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4753#endif
4754
4755 /* See how many bytes, if any, of its args a function should try to pop
4756 on return. */
4757
64e6d9cc 4758 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
4759 current_function_args_size);
4760
3b69d50e
RK
4761 /* For stdarg.h function, save info about
4762 regs and stack space used by the named args. */
6f086dfc 4763
3b69d50e 4764 if (!hide_last_arg)
6f086dfc
RS
4765 current_function_args_info = args_so_far;
4766
4767 /* Set the rtx used for the function return value. Put this in its
4768 own variable so any optimizers that need this information don't have
4769 to include tree.h. Do this here so it gets done when an inlined
4770 function gets output. */
4771
4772 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4773}
4774\f
75dc3319
RK
4775/* Indicate whether REGNO is an incoming argument to the current function
4776 that was promoted to a wider mode. If so, return the RTX for the
4777 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4778 that REGNO is promoted from and whether the promotion was signed or
4779 unsigned. */
4780
4781#ifdef PROMOTE_FUNCTION_ARGS
4782
4783rtx
4784promoted_input_arg (regno, pmode, punsignedp)
4785 int regno;
4786 enum machine_mode *pmode;
4787 int *punsignedp;
4788{
4789 tree arg;
4790
4791 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4792 arg = TREE_CHAIN (arg))
4793 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
621061f4
RK
4794 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4795 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
4796 {
4797 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4798 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4799
a5a52dbc 4800 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
4801 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4802 && mode != DECL_MODE (arg))
4803 {
4804 *pmode = DECL_MODE (arg);
4805 *punsignedp = unsignedp;
4806 return DECL_INCOMING_RTL (arg);
4807 }
4808 }
4809
4810 return 0;
4811}
4812
4813#endif
4814\f
6f086dfc
RS
4815/* Compute the size and offset from the start of the stacked arguments for a
4816 parm passed in mode PASSED_MODE and with type TYPE.
4817
4818 INITIAL_OFFSET_PTR points to the current offset into the stacked
4819 arguments.
4820
4821 The starting offset and size for this parm are returned in *OFFSET_PTR
4822 and *ARG_SIZE_PTR, respectively.
4823
4824 IN_REGS is non-zero if the argument will be passed in registers. It will
4825 never be set if REG_PARM_STACK_SPACE is not defined.
4826
4827 FNDECL is the function in which the argument was defined.
4828
4829 There are two types of rounding that are done. The first, controlled by
4830 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4831 list to be aligned to the specific boundary (in bits). This rounding
4832 affects the initial and starting offsets, but not the argument size.
4833
4834 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4835 optionally rounds the size of the parm to PARM_BOUNDARY. The
4836 initial offset is not affected by this rounding, while the size always
4837 is and the starting offset may be. */
4838
4839/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4840 initial_offset_ptr is positive because locate_and_pad_parm's
4841 callers pass in the total size of args so far as
4842 initial_offset_ptr. arg_size_ptr is always positive.*/
4843
6f086dfc
RS
4844void
4845locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4846 initial_offset_ptr, offset_ptr, arg_size_ptr)
4847 enum machine_mode passed_mode;
4848 tree type;
4849 int in_regs;
91813b28 4850 tree fndecl ATTRIBUTE_UNUSED;
6f086dfc
RS
4851 struct args_size *initial_offset_ptr;
4852 struct args_size *offset_ptr;
4853 struct args_size *arg_size_ptr;
4854{
4855 tree sizetree
4856 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4857 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4858 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6f086dfc
RS
4859
4860#ifdef REG_PARM_STACK_SPACE
4861 /* If we have found a stack parm before we reach the end of the
4862 area reserved for registers, skip that area. */
4863 if (! in_regs)
4864 {
29a82058
JL
4865 int reg_parm_stack_space = 0;
4866
29008b51
JW
4867#ifdef MAYBE_REG_PARM_STACK_SPACE
4868 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4869#else
6f086dfc 4870 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 4871#endif
6f086dfc
RS
4872 if (reg_parm_stack_space > 0)
4873 {
4874 if (initial_offset_ptr->var)
4875 {
4876 initial_offset_ptr->var
4877 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4878 size_int (reg_parm_stack_space));
4879 initial_offset_ptr->constant = 0;
4880 }
4881 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4882 initial_offset_ptr->constant = reg_parm_stack_space;
4883 }
4884 }
4885#endif /* REG_PARM_STACK_SPACE */
4886
4887 arg_size_ptr->var = 0;
4888 arg_size_ptr->constant = 0;
4889
4890#ifdef ARGS_GROW_DOWNWARD
4891 if (initial_offset_ptr->var)
4892 {
4893 offset_ptr->constant = 0;
4894 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4895 initial_offset_ptr->var);
4896 }
4897 else
4898 {
4899 offset_ptr->constant = - initial_offset_ptr->constant;
4900 offset_ptr->var = 0;
4901 }
0b21dcf5 4902 if (where_pad != none
6f086dfc
RS
4903 && (TREE_CODE (sizetree) != INTEGER_CST
4904 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4905 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4906 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19
TG
4907 if (where_pad != downward)
4908 pad_to_arg_alignment (offset_ptr, boundary);
6f086dfc
RS
4909 if (initial_offset_ptr->var)
4910 {
4911 arg_size_ptr->var = size_binop (MINUS_EXPR,
4912 size_binop (MINUS_EXPR,
4913 integer_zero_node,
4914 initial_offset_ptr->var),
4915 offset_ptr->var);
4916 }
4917 else
4918 {
db3cf6fb
MS
4919 arg_size_ptr->constant = (- initial_offset_ptr->constant
4920 - offset_ptr->constant);
6f086dfc 4921 }
6f086dfc
RS
4922#else /* !ARGS_GROW_DOWNWARD */
4923 pad_to_arg_alignment (initial_offset_ptr, boundary);
4924 *offset_ptr = *initial_offset_ptr;
6f086dfc
RS
4925
4926#ifdef PUSH_ROUNDING
4927 if (passed_mode != BLKmode)
4928 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4929#endif
4930
d4b0a7a0
DE
4931 /* Pad_below needs the pre-rounded size to know how much to pad below
4932 so this must be done before rounding up. */
ea5917da
DE
4933 if (where_pad == downward
4934 /* However, BLKmode args passed in regs have their padding done elsewhere.
4935 The stack slot must be able to hold the entire register. */
4936 && !(in_regs && passed_mode == BLKmode))
d4b0a7a0
DE
4937 pad_below (offset_ptr, passed_mode, sizetree);
4938
6f086dfc
RS
4939 if (where_pad != none
4940 && (TREE_CODE (sizetree) != INTEGER_CST
4941 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4942 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4943
4944 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4945#endif /* ARGS_GROW_DOWNWARD */
4946}
4947
e16c591a
RS
4948/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4949 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4950
6f086dfc
RS
4951static void
4952pad_to_arg_alignment (offset_ptr, boundary)
4953 struct args_size *offset_ptr;
4954 int boundary;
4955{
4956 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4957
4958 if (boundary > BITS_PER_UNIT)
4959 {
4960 if (offset_ptr->var)
4961 {
4962 offset_ptr->var =
4963#ifdef ARGS_GROW_DOWNWARD
4964 round_down
4965#else
4966 round_up
4967#endif
4968 (ARGS_SIZE_TREE (*offset_ptr),
4969 boundary / BITS_PER_UNIT);
4970 offset_ptr->constant = 0; /*?*/
4971 }
4972 else
4973 offset_ptr->constant =
4974#ifdef ARGS_GROW_DOWNWARD
4975 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4976#else
4977 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4978#endif
4979 }
4980}
4981
51723711 4982#ifndef ARGS_GROW_DOWNWARD
6f086dfc
RS
4983static void
4984pad_below (offset_ptr, passed_mode, sizetree)
4985 struct args_size *offset_ptr;
4986 enum machine_mode passed_mode;
4987 tree sizetree;
4988{
4989 if (passed_mode != BLKmode)
4990 {
4991 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4992 offset_ptr->constant
4993 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4994 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4995 - GET_MODE_SIZE (passed_mode));
4996 }
4997 else
4998 {
4999 if (TREE_CODE (sizetree) != INTEGER_CST
5000 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5001 {
5002 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5003 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5004 /* Add it in. */
5005 ADD_PARM_SIZE (*offset_ptr, s2);
5006 SUB_PARM_SIZE (*offset_ptr, sizetree);
5007 }
5008 }
5009}
51723711 5010#endif
6f086dfc 5011
487a6e06 5012#ifdef ARGS_GROW_DOWNWARD
6f086dfc
RS
5013static tree
5014round_down (value, divisor)
5015 tree value;
5016 int divisor;
5017{
5018 return size_binop (MULT_EXPR,
5019 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5020 size_int (divisor));
5021}
487a6e06 5022#endif
6f086dfc
RS
5023\f
5024/* Walk the tree of blocks describing the binding levels within a function
5025 and warn about uninitialized variables.
5026 This is done after calling flow_analysis and before global_alloc
5027 clobbers the pseudo-regs to hard regs. */
5028
5029void
5030uninitialized_vars_warning (block)
5031 tree block;
5032{
5033 register tree decl, sub;
5034 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5035 {
5036 if (TREE_CODE (decl) == VAR_DECL
5037 /* These warnings are unreliable for and aggregates
5038 because assigning the fields one by one can fail to convince
5039 flow.c that the entire aggregate was initialized.
5040 Unions are troublesome because members may be shorter. */
05e3bdb9 5041 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
6f086dfc
RS
5042 && DECL_RTL (decl) != 0
5043 && GET_CODE (DECL_RTL (decl)) == REG
6acdd0fd
JL
5044 /* Global optimizations can make it difficult to determine if a
5045 particular variable has been initialized. However, a VAR_DECL
5046 with a nonzero DECL_INITIAL had an initializer, so do not
5047 claim it is potentially uninitialized.
5048
5049 We do not care about the actual value in DECL_INITIAL, so we do
5050 not worry that it may be a dangling pointer. */
5051 && DECL_INITIAL (decl) == NULL_TREE
6f086dfc
RS
5052 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5053 warning_with_decl (decl,
3c8cd8bd 5054 "`%s' might be used uninitialized in this function");
6f086dfc
RS
5055 if (TREE_CODE (decl) == VAR_DECL
5056 && DECL_RTL (decl) != 0
5057 && GET_CODE (DECL_RTL (decl)) == REG
5058 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5059 warning_with_decl (decl,
3c8cd8bd 5060 "variable `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5061 }
5062 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5063 uninitialized_vars_warning (sub);
5064}
5065
5066/* Do the appropriate part of uninitialized_vars_warning
5067 but for arguments instead of local variables. */
5068
5069void
0cd6ef35 5070setjmp_args_warning ()
6f086dfc
RS
5071{
5072 register tree decl;
5073 for (decl = DECL_ARGUMENTS (current_function_decl);
5074 decl; decl = TREE_CHAIN (decl))
5075 if (DECL_RTL (decl) != 0
5076 && GET_CODE (DECL_RTL (decl)) == REG
5077 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3c8cd8bd 5078 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5079}
5080
5081/* If this function call setjmp, put all vars into the stack
5082 unless they were declared `register'. */
5083
5084void
5085setjmp_protect (block)
5086 tree block;
5087{
5088 register tree decl, sub;
5089 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5090 if ((TREE_CODE (decl) == VAR_DECL
5091 || TREE_CODE (decl) == PARM_DECL)
5092 && DECL_RTL (decl) != 0
e9a25f70
JL
5093 && (GET_CODE (DECL_RTL (decl)) == REG
5094 || (GET_CODE (DECL_RTL (decl)) == MEM
5095 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
b335c2cc 5096 /* If this variable came from an inline function, it must be
9ec36da5 5097 that its life doesn't overlap the setjmp. If there was a
b335c2cc
TW
5098 setjmp in the function, it would already be in memory. We
5099 must exclude such variable because their DECL_RTL might be
5100 set to strange things such as virtual_stack_vars_rtx. */
5101 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
5102 && (
5103#ifdef NON_SAVING_SETJMP
5104 /* If longjmp doesn't restore the registers,
5105 don't put anything in them. */
5106 NON_SAVING_SETJMP
5107 ||
5108#endif
a82ad570 5109 ! DECL_REGISTER (decl)))
6f086dfc
RS
5110 put_var_into_stack (decl);
5111 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5112 setjmp_protect (sub);
5113}
5114\f
5115/* Like the previous function, but for args instead of local variables. */
5116
5117void
5118setjmp_protect_args ()
5119{
29a82058 5120 register tree decl;
6f086dfc
RS
5121 for (decl = DECL_ARGUMENTS (current_function_decl);
5122 decl; decl = TREE_CHAIN (decl))
5123 if ((TREE_CODE (decl) == VAR_DECL
5124 || TREE_CODE (decl) == PARM_DECL)
5125 && DECL_RTL (decl) != 0
e9a25f70
JL
5126 && (GET_CODE (DECL_RTL (decl)) == REG
5127 || (GET_CODE (DECL_RTL (decl)) == MEM
5128 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
6f086dfc
RS
5129 && (
5130 /* If longjmp doesn't restore the registers,
5131 don't put anything in them. */
5132#ifdef NON_SAVING_SETJMP
5133 NON_SAVING_SETJMP
5134 ||
5135#endif
a82ad570 5136 ! DECL_REGISTER (decl)))
6f086dfc
RS
5137 put_var_into_stack (decl);
5138}
5139\f
5140/* Return the context-pointer register corresponding to DECL,
5141 or 0 if it does not need one. */
5142
5143rtx
5144lookup_static_chain (decl)
5145 tree decl;
5146{
b001a02f
PB
5147 tree context = decl_function_context (decl);
5148 tree link;
7ad8c4bf 5149
38ee6ed9
JM
5150 if (context == 0
5151 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
7ad8c4bf 5152 return 0;
38ee6ed9 5153
6f086dfc
RS
5154 /* We treat inline_function_decl as an alias for the current function
5155 because that is the inline function whose vars, types, etc.
5156 are being merged into the current function.
5157 See expand_inline_function. */
5158 if (context == current_function_decl || context == inline_function_decl)
5159 return virtual_stack_vars_rtx;
5160
5161 for (link = context_display; link; link = TREE_CHAIN (link))
5162 if (TREE_PURPOSE (link) == context)
5163 return RTL_EXPR_RTL (TREE_VALUE (link));
5164
5165 abort ();
5166}
5167\f
5168/* Convert a stack slot address ADDR for variable VAR
5169 (from a containing function)
5170 into an address valid in this function (using a static chain). */
5171
5172rtx
5173fix_lexical_addr (addr, var)
5174 rtx addr;
5175 tree var;
5176{
5177 rtx basereg;
e5e809f4 5178 HOST_WIDE_INT displacement;
6f086dfc
RS
5179 tree context = decl_function_context (var);
5180 struct function *fp;
5181 rtx base = 0;
5182
5183 /* If this is the present function, we need not do anything. */
5184 if (context == current_function_decl || context == inline_function_decl)
5185 return addr;
5186
5187 for (fp = outer_function_chain; fp; fp = fp->next)
5188 if (fp->decl == context)
5189 break;
5190
5191 if (fp == 0)
5192 abort ();
5193
e9a25f70
JL
5194 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5195 addr = XEXP (XEXP (addr, 0), 0);
5196
6f086dfc
RS
5197 /* Decode given address as base reg plus displacement. */
5198 if (GET_CODE (addr) == REG)
5199 basereg = addr, displacement = 0;
5200 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5201 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5202 else
5203 abort ();
5204
5205 /* We accept vars reached via the containing function's
5206 incoming arg pointer and via its stack variables pointer. */
5207 if (basereg == fp->internal_arg_pointer)
5208 {
5209 /* If reached via arg pointer, get the arg pointer value
5210 out of that function's stack frame.
5211
5212 There are two cases: If a separate ap is needed, allocate a
5213 slot in the outer function for it and dereference it that way.
5214 This is correct even if the real ap is actually a pseudo.
5215 Otherwise, just adjust the offset from the frame pointer to
5216 compensate. */
5217
5218#ifdef NEED_SEPARATE_AP
5219 rtx addr;
5220
49ad7cfa
BS
5221 if (fp->x_arg_pointer_save_area == 0)
5222 fp->x_arg_pointer_save_area
6f086dfc
RS
5223 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5224
49ad7cfa 5225 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
6f086dfc
RS
5226 addr = memory_address (Pmode, addr);
5227
38a448ca 5228 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
6f086dfc
RS
5229#else
5230 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 5231 base = lookup_static_chain (var);
6f086dfc
RS
5232#endif
5233 }
5234
5235 else if (basereg == virtual_stack_vars_rtx)
5236 {
5237 /* This is the same code as lookup_static_chain, duplicated here to
5238 avoid an extra call to decl_function_context. */
5239 tree link;
5240
5241 for (link = context_display; link; link = TREE_CHAIN (link))
5242 if (TREE_PURPOSE (link) == context)
5243 {
5244 base = RTL_EXPR_RTL (TREE_VALUE (link));
5245 break;
5246 }
5247 }
5248
5249 if (base == 0)
5250 abort ();
5251
5252 /* Use same offset, relative to appropriate static chain or argument
5253 pointer. */
5254 return plus_constant (base, displacement);
5255}
5256\f
5257/* Return the address of the trampoline for entering nested fn FUNCTION.
5258 If necessary, allocate a trampoline (in the stack frame)
5259 and emit rtl to initialize its contents (at entry to this function). */
5260
5261rtx
5262trampoline_address (function)
5263 tree function;
5264{
5265 tree link;
5266 tree rtlexp;
5267 rtx tramp;
5268 struct function *fp;
5269 tree fn_context;
5270
5271 /* Find an existing trampoline and return it. */
5272 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5273 if (TREE_PURPOSE (link) == function)
e87ee2a9
RK
5274 return
5275 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5276
6f086dfc 5277 for (fp = outer_function_chain; fp; fp = fp->next)
49ad7cfa 5278 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
6f086dfc
RS
5279 if (TREE_PURPOSE (link) == function)
5280 {
5281 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5282 function);
5283 return round_trampoline_addr (tramp);
5284 }
5285
5286 /* None exists; we must make one. */
5287
5288 /* Find the `struct function' for the function containing FUNCTION. */
5289 fp = 0;
5290 fn_context = decl_function_context (function);
4ac74fb8
RK
5291 if (fn_context != current_function_decl
5292 && fn_context != inline_function_decl)
6f086dfc
RS
5293 for (fp = outer_function_chain; fp; fp = fp->next)
5294 if (fp->decl == fn_context)
5295 break;
5296
5297 /* Allocate run-time space for this trampoline
5298 (usually in the defining function's stack frame). */
5299#ifdef ALLOCATE_TRAMPOLINE
5300 tramp = ALLOCATE_TRAMPOLINE (fp);
5301#else
5302 /* If rounding needed, allocate extra space
5303 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5304#ifdef TRAMPOLINE_ALIGNMENT
b02ab63a
RK
5305#define TRAMPOLINE_REAL_SIZE \
5306 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
6f086dfc
RS
5307#else
5308#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5309#endif
5310 if (fp != 0)
5311 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5312 else
5313 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5314#endif
5315
5316 /* Record the trampoline for reuse and note it for later initialization
5317 by expand_function_end. */
5318 if (fp != 0)
5319 {
28498644
RK
5320 push_obstacks (fp->function_maybepermanent_obstack,
5321 fp->function_maybepermanent_obstack);
6f086dfc
RS
5322 rtlexp = make_node (RTL_EXPR);
5323 RTL_EXPR_RTL (rtlexp) = tramp;
49ad7cfa
BS
5324 fp->x_trampoline_list = tree_cons (function, rtlexp,
5325 fp->x_trampoline_list);
6f086dfc
RS
5326 pop_obstacks ();
5327 }
5328 else
5329 {
5330 /* Make the RTL_EXPR node temporary, not momentary, so that the
5331 trampoline_list doesn't become garbage. */
5332 int momentary = suspend_momentary ();
5333 rtlexp = make_node (RTL_EXPR);
5334 resume_momentary (momentary);
5335
5336 RTL_EXPR_RTL (rtlexp) = tramp;
5337 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5338 }
5339
5340 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5341 return round_trampoline_addr (tramp);
5342}
5343
5344/* Given a trampoline address,
5345 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5346
5347static rtx
5348round_trampoline_addr (tramp)
5349 rtx tramp;
5350{
5351#ifdef TRAMPOLINE_ALIGNMENT
5352 /* Round address up to desired boundary. */
5353 rtx temp = gen_reg_rtx (Pmode);
5354 temp = expand_binop (Pmode, add_optab, tramp,
b02ab63a 5355 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
6f086dfc
RS
5356 temp, 0, OPTAB_LIB_WIDEN);
5357 tramp = expand_binop (Pmode, and_optab, temp,
b02ab63a 5358 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
6f086dfc
RS
5359 temp, 0, OPTAB_LIB_WIDEN);
5360#endif
5361 return tramp;
5362}
5363\f
467456d0
RS
5364/* The functions identify_blocks and reorder_blocks provide a way to
5365 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5366 duplicate portions of the RTL code. Call identify_blocks before
5367 changing the RTL, and call reorder_blocks after. */
5368
b2a59b15
MS
5369/* Put all this function's BLOCK nodes including those that are chained
5370 onto the first block into a vector, and return it.
467456d0
RS
5371 Also store in each NOTE for the beginning or end of a block
5372 the index of that block in the vector.
b2a59b15 5373 The arguments are BLOCK, the chain of top-level blocks of the function,
467456d0
RS
5374 and INSNS, the insn chain of the function. */
5375
5376tree *
b2a59b15
MS
5377identify_blocks (block, insns)
5378 tree block;
467456d0
RS
5379 rtx insns;
5380{
fc289cd1
JW
5381 int n_blocks;
5382 tree *block_vector;
5383 int *block_stack;
467456d0 5384 int depth = 0;
b2a59b15
MS
5385 int next_block_number = 1;
5386 int current_block_number = 1;
467456d0
RS
5387 rtx insn;
5388
b2a59b15 5389 if (block == 0)
fc289cd1
JW
5390 return 0;
5391
b2a59b15 5392 n_blocks = all_blocks (block, 0);
fc289cd1
JW
5393 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5394 block_stack = (int *) alloca (n_blocks * sizeof (int));
5395
b2a59b15 5396 all_blocks (block, block_vector);
467456d0
RS
5397
5398 for (insn = insns; insn; insn = NEXT_INSN (insn))
5399 if (GET_CODE (insn) == NOTE)
5400 {
5401 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5402 {
5403 block_stack[depth++] = current_block_number;
5404 current_block_number = next_block_number;
1b2ac438 5405 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
5406 }
5407 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5408 {
1b2ac438 5409 NOTE_BLOCK_NUMBER (insn) = current_block_number;
c7fdfd49 5410 current_block_number = block_stack[--depth];
467456d0
RS
5411 }
5412 }
5413
b2a59b15
MS
5414 if (n_blocks != next_block_number)
5415 abort ();
5416
467456d0
RS
5417 return block_vector;
5418}
5419
5420/* Given BLOCK_VECTOR which was returned by identify_blocks,
5421 and a revised instruction chain, rebuild the tree structure
5422 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 5423 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
5424 Returns the current top-level block. */
5425
5426tree
b2a59b15 5427reorder_blocks (block_vector, block, insns)
467456d0 5428 tree *block_vector;
b2a59b15 5429 tree block;
467456d0
RS
5430 rtx insns;
5431{
b2a59b15 5432 tree current_block = block;
467456d0
RS
5433 rtx insn;
5434
fc289cd1 5435 if (block_vector == 0)
b2a59b15 5436 return block;
fc289cd1 5437
b2a59b15 5438 /* Prune the old trees away, so that it doesn't get in the way. */
fc289cd1 5439 BLOCK_SUBBLOCKS (current_block) = 0;
b2a59b15 5440 BLOCK_CHAIN (current_block) = 0;
fc289cd1 5441
467456d0
RS
5442 for (insn = insns; insn; insn = NEXT_INSN (insn))
5443 if (GET_CODE (insn) == NOTE)
5444 {
5445 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5446 {
5447 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5448 /* If we have seen this block before, copy it. */
5449 if (TREE_ASM_WRITTEN (block))
5450 block = copy_node (block);
fc289cd1 5451 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
5452 TREE_ASM_WRITTEN (block) = 1;
5453 BLOCK_SUPERCONTEXT (block) = current_block;
5454 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5455 BLOCK_SUBBLOCKS (current_block) = block;
5456 current_block = block;
1b2ac438 5457 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5458 }
5459 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5460 {
5461 BLOCK_SUBBLOCKS (current_block)
5462 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5463 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 5464 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5465 }
5466 }
5467
b2a59b15
MS
5468 BLOCK_SUBBLOCKS (current_block)
5469 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
467456d0
RS
5470 return current_block;
5471}
5472
5473/* Reverse the order of elements in the chain T of blocks,
5474 and return the new head of the chain (old last element). */
5475
5476static tree
5477blocks_nreverse (t)
5478 tree t;
5479{
5480 register tree prev = 0, decl, next;
5481 for (decl = t; decl; decl = next)
5482 {
5483 next = BLOCK_CHAIN (decl);
5484 BLOCK_CHAIN (decl) = prev;
5485 prev = decl;
5486 }
5487 return prev;
5488}
5489
b2a59b15
MS
5490/* Count the subblocks of the list starting with BLOCK, and list them
5491 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5492 blocks. */
467456d0
RS
5493
5494static int
5495all_blocks (block, vector)
5496 tree block;
5497 tree *vector;
5498{
b2a59b15
MS
5499 int n_blocks = 0;
5500
5501 while (block)
5502 {
5503 TREE_ASM_WRITTEN (block) = 0;
5504
5505 /* Record this block. */
5506 if (vector)
5507 vector[n_blocks] = block;
5508
5509 ++n_blocks;
5510
5511 /* Record the subblocks, and their subblocks... */
5512 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5513 vector ? vector + n_blocks : 0);
5514 block = BLOCK_CHAIN (block);
5515 }
467456d0
RS
5516
5517 return n_blocks;
5518}
5519\f
b384405b
BS
5520/* Allocate a function structure and reset its contents to the defaults. */
5521static void
5522prepare_function_start ()
6f086dfc 5523{
b384405b
BS
5524 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5525
6f086dfc
RS
5526 init_stmt_for_function ();
5527
5528 cse_not_expected = ! optimize;
5529
5530 /* Caller save not needed yet. */
5531 caller_save_needed = 0;
5532
5533 /* No stack slots have been made yet. */
5534 stack_slot_list = 0;
5535
b384405b
BS
5536 current_function_has_nonlocal_label = 0;
5537 current_function_has_nonlocal_goto = 0;
5538
6f086dfc 5539 /* There is no stack slot for handling nonlocal gotos. */
ba716ac9 5540 nonlocal_goto_handler_slots = 0;
6f086dfc
RS
5541 nonlocal_goto_stack_level = 0;
5542
5543 /* No labels have been declared for nonlocal use. */
5544 nonlocal_labels = 0;
e881bb1b 5545 nonlocal_goto_handler_labels = 0;
6f086dfc
RS
5546
5547 /* No function calls so far in this function. */
5548 function_call_count = 0;
5549
5550 /* No parm regs have been allocated.
5551 (This is important for output_inline_function.) */
5552 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5553
5554 /* Initialize the RTL mechanism. */
5555 init_emit ();
5556
5557 /* Initialize the queue of pending postincrement and postdecrements,
5558 and some other info in expr.c. */
5559 init_expr ();
b384405b 5560
6f086dfc
RS
5561 /* We haven't done register allocation yet. */
5562 reg_renumber = 0;
5563
5564 init_const_rtx_hash_table ();
5565
6f086dfc
RS
5566 /* Set if a call to setjmp is seen. */
5567 current_function_calls_setjmp = 0;
5568
5569 /* Set if a call to longjmp is seen. */
5570 current_function_calls_longjmp = 0;
5571
5572 current_function_calls_alloca = 0;
6f086dfc 5573 current_function_contains_functions = 0;
54ff41b7 5574 current_function_is_leaf = 0;
fdb8a883 5575 current_function_sp_is_unchanging = 0;
54ff41b7 5576 current_function_uses_only_leaf_regs = 0;
acd693d1 5577 current_function_has_computed_jump = 0;
173cd503 5578 current_function_is_thunk = 0;
6f086dfc
RS
5579
5580 current_function_returns_pcc_struct = 0;
5581 current_function_returns_struct = 0;
5582 current_function_epilogue_delay_list = 0;
5583 current_function_uses_const_pool = 0;
5584 current_function_uses_pic_offset_table = 0;
aeb302bb 5585 current_function_cannot_inline = 0;
6f086dfc 5586
49ad7cfa
BS
5587 current_function->inlinable = 0;
5588
6f086dfc
RS
5589 /* We have not yet needed to make a label to jump to for tail-recursion. */
5590 tail_recursion_label = 0;
5591
5592 /* We haven't had a need to make a save area for ap yet. */
6f086dfc
RS
5593 arg_pointer_save_area = 0;
5594
5595 /* No stack slots allocated yet. */
5596 frame_offset = 0;
5597
5598 /* No SAVE_EXPRs in this function yet. */
5599 save_expr_regs = 0;
5600
5601 /* No RTL_EXPRs in this function yet. */
5602 rtl_expr_chain = 0;
5603
bc0ebdf9
RK
5604 /* Set up to allocate temporaries. */
5605 init_temp_slots ();
6f086dfc 5606
b384405b
BS
5607 /* Indicate that we need to distinguish between the return value of the
5608 present function and the return value of a function being called. */
5609 rtx_equal_function_value_matters = 1;
5610
5611 /* Indicate that we have not instantiated virtual registers yet. */
5612 virtuals_instantiated = 0;
5613
5614 /* Indicate we have no need of a frame pointer yet. */
5615 frame_pointer_needed = 0;
5616
5617 /* By default assume not varargs or stdarg. */
5618 current_function_varargs = 0;
5619 current_function_stdarg = 0;
6f086dfc 5620
d9a98e1a
RK
5621 /* We haven't made any trampolines for this function yet. */
5622 trampoline_list = 0;
5623
6f086dfc
RS
5624 init_pending_stack_adjust ();
5625 inhibit_defer_pop = 0;
5626
5627 current_function_outgoing_args_size = 0;
b384405b
BS
5628}
5629
5630/* Initialize the rtl expansion mechanism so that we can do simple things
5631 like generate sequences. This is used to provide a context during global
5632 initialization of some passes. */
5633void
5634init_dummy_function_start ()
5635{
5636 prepare_function_start ();
5637}
5638
5639/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5640 and initialize static variables for generating RTL for the statements
5641 of the function. */
5642
5643void
5644init_function_start (subr, filename, line)
5645 tree subr;
5646 char *filename;
5647 int line;
5648{
5649 prepare_function_start ();
5650
5651 /* Remember this function for later. */
5652 current_function->next_global = all_functions;
5653 all_functions = current_function;
5654
5655 current_function_name = (*decl_printable_name) (subr, 2);
5656
5657 /* Nonzero if this is a nested function that uses a static chain. */
5658
5659 current_function_needs_context
5660 = (decl_function_context (current_function_decl) != 0
5661 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5662
5663 /* Within function body, compute a type's size as soon it is laid out. */
5664 immediate_size_expand++;
6f086dfc 5665
6f086dfc 5666 /* Prevent ever trying to delete the first instruction of a function.
b274104c
PB
5667 Also tell final how to output a linenum before the function prologue.
5668 Note linenums could be missing, e.g. when compiling a Java .class file. */
5669 if (line > 0)
5670 emit_line_note (filename, line);
6f086dfc
RS
5671
5672 /* Make sure first insn is a note even if we don't want linenums.
5673 This makes sure the first insn will never be deleted.
5674 Also, final expects a note to appear there. */
5f4f0e22 5675 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5676
5677 /* Set flags used by final.c. */
5678 if (aggregate_value_p (DECL_RESULT (subr)))
5679 {
5680#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 5681 current_function_returns_pcc_struct = 1;
6f086dfc 5682#endif
1b8297c1 5683 current_function_returns_struct = 1;
6f086dfc
RS
5684 }
5685
5686 /* Warn if this value is an aggregate type,
5687 regardless of which calling convention we are using for it. */
5688 if (warn_aggregate_return
05e3bdb9 5689 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc
RS
5690 warning ("function returns an aggregate");
5691
5692 current_function_returns_pointer
8eda074c 5693 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
49ad7cfa 5694}
5c7675e9 5695
49ad7cfa
BS
5696/* Make sure all values used by the optimization passes have sane
5697 defaults. */
5698void
5699init_function_for_compilation ()
5700{
5701 reg_renumber = 0;
5c7675e9
RH
5702 /* No prologue/epilogue insns yet. */
5703 prologue = epilogue = 0;
6f086dfc
RS
5704}
5705
5706/* Indicate that the current function uses extra args
5707 not explicitly mentioned in the argument list in any fashion. */
5708
5709void
5710mark_varargs ()
5711{
5712 current_function_varargs = 1;
5713}
5714
5715/* Expand a call to __main at the beginning of a possible main function. */
5716
e2fd1d94
JM
5717#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5718#undef HAS_INIT_SECTION
5719#define HAS_INIT_SECTION
5720#endif
5721
6f086dfc
RS
5722void
5723expand_main_function ()
5724{
e2fd1d94 5725#if !defined (HAS_INIT_SECTION)
b93a436e
JL
5726 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5727 VOIDmode, 0);
e2fd1d94 5728#endif /* not HAS_INIT_SECTION */
6f086dfc
RS
5729}
5730\f
c20bf1f3
JB
5731extern struct obstack permanent_obstack;
5732
6f086dfc
RS
5733/* Start the RTL for a new function, and set variables used for
5734 emitting RTL.
5735 SUBR is the FUNCTION_DECL node.
5736 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5737 the function's parameters, which must be run at any return statement. */
5738
5739void
5740expand_function_start (subr, parms_have_cleanups)
5741 tree subr;
5742 int parms_have_cleanups;
5743{
5744 register int i;
5745 tree tem;
4e86caed 5746 rtx last_ptr = NULL_RTX;
6f086dfc
RS
5747
5748 /* Make sure volatile mem refs aren't considered
5749 valid operands of arithmetic insns. */
5750 init_recog_no_volatile ();
5751
7d384cc0
KR
5752 /* Set this before generating any memory accesses. */
5753 current_function_check_memory_usage
5754 = (flag_check_memory_usage
5755 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5756
07417085
KR
5757 current_function_instrument_entry_exit
5758 = (flag_instrument_function_entry_exit
5759 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5760
6f086dfc
RS
5761 /* If function gets a static chain arg, store it in the stack frame.
5762 Do this first, so it gets the first stack slot offset. */
5763 if (current_function_needs_context)
3e2481e9
JW
5764 {
5765 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
f0c51a1e 5766
f0c51a1e
RK
5767 /* Delay copying static chain if it is not a register to avoid
5768 conflicts with regs used for parameters. */
f95182a4
ILT
5769 if (! SMALL_REGISTER_CLASSES
5770 || GET_CODE (static_chain_incoming_rtx) == REG)
f0c51a1e 5771 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3e2481e9 5772 }
6f086dfc
RS
5773
5774 /* If the parameters of this function need cleaning up, get a label
5775 for the beginning of the code which executes those cleanups. This must
5776 be done before doing anything with return_label. */
5777 if (parms_have_cleanups)
5778 cleanup_label = gen_label_rtx ();
5779 else
5780 cleanup_label = 0;
5781
5782 /* Make the label for return statements to jump to, if this machine
5783 does not have a one-instruction return and uses an epilogue,
5784 or if it returns a structure, or if it has parm cleanups. */
5785#ifdef HAVE_return
5786 if (cleanup_label == 0 && HAVE_return
07417085 5787 && ! current_function_instrument_entry_exit
6f086dfc
RS
5788 && ! current_function_returns_pcc_struct
5789 && ! (current_function_returns_struct && ! optimize))
5790 return_label = 0;
5791 else
5792 return_label = gen_label_rtx ();
5793#else
5794 return_label = gen_label_rtx ();
5795#endif
5796
5797 /* Initialize rtx used to return the value. */
5798 /* Do this before assign_parms so that we copy the struct value address
5799 before any library calls that assign parms might generate. */
5800
5801 /* Decide whether to return the value in memory or in a register. */
5802 if (aggregate_value_p (DECL_RESULT (subr)))
5803 {
5804 /* Returning something that won't go in a register. */
4acc00bf 5805 register rtx value_address = 0;
6f086dfc
RS
5806
5807#ifdef PCC_STATIC_STRUCT_RETURN
5808 if (current_function_returns_pcc_struct)
5809 {
5810 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5811 value_address = assemble_static_space (size);
5812 }
5813 else
5814#endif
5815 {
5816 /* Expect to be passed the address of a place to store the value.
5817 If it is passed as an argument, assign_parms will take care of
5818 it. */
5819 if (struct_value_incoming_rtx)
5820 {
5821 value_address = gen_reg_rtx (Pmode);
5822 emit_move_insn (value_address, struct_value_incoming_rtx);
5823 }
5824 }
5825 if (value_address)
ccdecf58
RK
5826 {
5827 DECL_RTL (DECL_RESULT (subr))
38a448ca 5828 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
c6df88cb
MM
5829 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5830 AGGREGATE_TYPE_P (TREE_TYPE
5831 (DECL_RESULT
5832 (subr))));
ccdecf58 5833 }
6f086dfc
RS
5834 }
5835 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5836 /* If return mode is void, this decl rtl should not be used. */
5837 DECL_RTL (DECL_RESULT (subr)) = 0;
07417085 5838 else if (parms_have_cleanups || current_function_instrument_entry_exit)
a53e14c0
RK
5839 {
5840 /* If function will end with cleanup code for parms,
5841 compute the return values into a pseudo reg,
5842 which we will copy into the true return register
5843 after the cleanups are done. */
5844
5845 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
a5a52dbc 5846
a53e14c0
RK
5847#ifdef PROMOTE_FUNCTION_RETURN
5848 tree type = TREE_TYPE (DECL_RESULT (subr));
5849 int unsignedp = TREE_UNSIGNED (type);
5850
a5a52dbc 5851 mode = promote_mode (type, mode, &unsignedp, 1);
a53e14c0
RK
5852#endif
5853
5854 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5855 }
6f086dfc
RS
5856 else
5857 /* Scalar, returned in a register. */
5858 {
5859#ifdef FUNCTION_OUTGOING_VALUE
5860 DECL_RTL (DECL_RESULT (subr))
5861 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5862#else
5863 DECL_RTL (DECL_RESULT (subr))
5864 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5865#endif
5866
5867 /* Mark this reg as the function's return value. */
5868 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5869 {
5870 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5871 /* Needed because we may need to move this to memory
5872 in case it's a named return value whose address is taken. */
a82ad570 5873 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
5874 }
5875 }
5876
5877 /* Initialize rtx for parameters and local variables.
5878 In some cases this requires emitting insns. */
5879
5880 assign_parms (subr, 0);
5881
f0c51a1e
RK
5882 /* Copy the static chain now if it wasn't a register. The delay is to
5883 avoid conflicts with the parameter passing registers. */
5884
f95182a4 5885 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
f0c51a1e
RK
5886 if (GET_CODE (static_chain_incoming_rtx) != REG)
5887 emit_move_insn (last_ptr, static_chain_incoming_rtx);
f0c51a1e 5888
6f086dfc
RS
5889 /* The following was moved from init_function_start.
5890 The move is supposed to make sdb output more accurate. */
5891 /* Indicate the beginning of the function body,
5892 as opposed to parm setup. */
5f4f0e22 5893 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
5894
5895 /* If doing stupid allocation, mark parms as born here. */
5896
5897 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 5898 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5899 parm_birth_insn = get_last_insn ();
5900
5901 if (obey_regdecls)
5902 {
5903 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5904 use_variable (regno_reg_rtx[i]);
5905
5906 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5907 use_variable (current_function_internal_arg_pointer);
5908 }
5909
6d7306f7
JM
5910 context_display = 0;
5911 if (current_function_needs_context)
ac9e20f0 5912 {
6d7306f7
JM
5913 /* Fetch static chain values for containing functions. */
5914 tem = decl_function_context (current_function_decl);
5915 /* If not doing stupid register allocation copy the static chain
5916 pointer into a pseudo. If we have small register classes, copy
5917 the value from memory if static_chain_incoming_rtx is a REG. If
5918 we do stupid register allocation, we use the stack address
5919 generated above. */
5920 if (tem && ! obey_regdecls)
5921 {
6d7306f7
JM
5922 /* If the static chain originally came in a register, put it back
5923 there, then move it out in the next insn. The reason for
5924 this peculiar code is to satisfy function integration. */
f95182a4
ILT
5925 if (SMALL_REGISTER_CLASSES
5926 && GET_CODE (static_chain_incoming_rtx) == REG)
6d7306f7 5927 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6d7306f7
JM
5928 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5929 }
ac9e20f0 5930
6d7306f7
JM
5931 while (tem)
5932 {
5933 tree rtlexp = make_node (RTL_EXPR);
6f086dfc 5934
6d7306f7
JM
5935 RTL_EXPR_RTL (rtlexp) = last_ptr;
5936 context_display = tree_cons (tem, rtlexp, context_display);
5937 tem = decl_function_context (tem);
5938 if (tem == 0)
5939 break;
5940 /* Chain thru stack frames, assuming pointer to next lexical frame
5941 is found at the place we always store it. */
6f086dfc 5942#ifdef FRAME_GROWS_DOWNWARD
6d7306f7 5943 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6f086dfc 5944#endif
38a448ca
RH
5945 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5946 memory_address (Pmode, last_ptr)));
6d7306f7
JM
5947
5948 /* If we are not optimizing, ensure that we know that this
5949 piece of context is live over the entire function. */
5950 if (! optimize)
38a448ca
RH
5951 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5952 save_expr_regs);
6d7306f7 5953 }
6f086dfc
RS
5954 }
5955
07417085
KR
5956 if (current_function_instrument_entry_exit)
5957 {
5958 rtx fun = DECL_RTL (current_function_decl);
5959 if (GET_CODE (fun) == MEM)
5960 fun = XEXP (fun, 0);
5961 else
5962 abort ();
5963 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5964 fun, Pmode,
5965 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5966 0,
5967 hard_frame_pointer_rtx),
5968 Pmode);
5969 }
5970
6f086dfc
RS
5971 /* After the display initializations is where the tail-recursion label
5972 should go, if we end up needing one. Ensure we have a NOTE here
5973 since some things (like trampolines) get placed before this. */
5f4f0e22 5974 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5975
5976 /* Evaluate now the sizes of any types declared among the arguments. */
5977 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
7b05e286 5978 {
86fa911a
RK
5979 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5980 EXPAND_MEMORY_USE_BAD);
7b05e286
JW
5981 /* Flush the queue in case this parameter declaration has
5982 side-effects. */
5983 emit_queue ();
5984 }
6f086dfc
RS
5985
5986 /* Make sure there is a line number after the function entry setup code. */
5987 force_next_line_note ();
5988}
5989\f
49ad7cfa
BS
5990/* Undo the effects of init_dummy_function_start. */
5991void
5992expand_dummy_function_end ()
5993{
5994 /* End any sequences that failed to be closed due to syntax errors. */
5995 while (in_sequence_p ())
5996 end_sequence ();
5997
5998 /* Outside function body, can't compute type's actual size
5999 until next function's body starts. */
6000 current_function = 0;
6001}
6002
6f086dfc 6003/* Generate RTL for the end of the current function.
980697fd 6004 FILENAME and LINE are the current position in the source file.
6f086dfc 6005
980697fd 6006 It is up to language-specific callers to do cleanups for parameters--
1be07046 6007 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6f086dfc
RS
6008
6009void
1be07046 6010expand_function_end (filename, line, end_bindings)
6f086dfc
RS
6011 char *filename;
6012 int line;
1be07046 6013 int end_bindings;
6f086dfc
RS
6014{
6015 register int i;
6016 tree link;
6017
1e2414db 6018#ifdef TRAMPOLINE_TEMPLATE
6f086dfc 6019 static rtx initial_trampoline;
1e2414db 6020#endif
6f086dfc 6021
49ad7cfa
BS
6022 finish_expr_for_function ();
6023
6f086dfc
RS
6024#ifdef NON_SAVING_SETJMP
6025 /* Don't put any variables in registers if we call setjmp
6026 on a machine that fails to restore the registers. */
6027 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6028 {
b88a3142
RK
6029 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6030 setjmp_protect (DECL_INITIAL (current_function_decl));
6031
6f086dfc
RS
6032 setjmp_protect_args ();
6033 }
6034#endif
6035
6036 /* Save the argument pointer if a save area was made for it. */
6037 if (arg_pointer_save_area)
6038 {
ea0f9a85
JW
6039 /* arg_pointer_save_area may not be a valid memory address, so we
6040 have to check it and fix it if necessary. */
6041 rtx seq;
6042 start_sequence ();
6043 emit_move_insn (validize_mem (arg_pointer_save_area),
6044 virtual_incoming_args_rtx);
6045 seq = gen_sequence ();
6046 end_sequence ();
6047 emit_insn_before (seq, tail_recursion_reentry);
6f086dfc
RS
6048 }
6049
6050 /* Initialize any trampolines required by this function. */
6051 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6052 {
6053 tree function = TREE_PURPOSE (link);
6054 rtx context = lookup_static_chain (function);
6055 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7a87758d 6056#ifdef TRAMPOLINE_TEMPLATE
1e2414db 6057 rtx blktramp;
7a87758d 6058#endif
6f086dfc
RS
6059 rtx seq;
6060
1e2414db 6061#ifdef TRAMPOLINE_TEMPLATE
6f086dfc
RS
6062 /* First make sure this compilation has a template for
6063 initializing trampolines. */
6064 if (initial_trampoline == 0)
86f8eff3
RK
6065 {
6066 end_temporary_allocation ();
6067 initial_trampoline
38a448ca 6068 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
86f8eff3
RK
6069 resume_temporary_allocation ();
6070 }
1e2414db 6071#endif
6f086dfc
RS
6072
6073 /* Generate insns to initialize the trampoline. */
6074 start_sequence ();
1e2414db
RK
6075 tramp = round_trampoline_addr (XEXP (tramp, 0));
6076#ifdef TRAMPOLINE_TEMPLATE
6077 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6078 emit_block_move (blktramp, initial_trampoline,
6079 GEN_INT (TRAMPOLINE_SIZE),
189cc377 6080 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
1e2414db
RK
6081#endif
6082 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6f086dfc
RS
6083 seq = get_insns ();
6084 end_sequence ();
6085
6086 /* Put those insns at entry to the containing function (this one). */
6087 emit_insns_before (seq, tail_recursion_reentry);
6088 }
6f086dfc 6089
11044f66
RK
6090 /* If we are doing stack checking and this function makes calls,
6091 do a stack probe at the start of the function to ensure we have enough
6092 space for another stack frame. */
6093 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6094 {
6095 rtx insn, seq;
6096
6097 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6098 if (GET_CODE (insn) == CALL_INSN)
6099 {
6100 start_sequence ();
6101 probe_stack_range (STACK_CHECK_PROTECT,
6102 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6103 seq = get_insns ();
6104 end_sequence ();
6105 emit_insns_before (seq, tail_recursion_reentry);
6106 break;
6107 }
6108 }
6109
db8717d9
RK
6110 /* Warn about unused parms if extra warnings were specified. */
6111 if (warn_unused && extra_warnings)
6f086dfc 6112 {
db8717d9 6113 tree decl;
6f086dfc
RS
6114
6115 for (decl = DECL_ARGUMENTS (current_function_decl);
6116 decl; decl = TREE_CHAIN (decl))
497dc802
JM
6117 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6118 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6f086dfc
RS
6119 warning_with_decl (decl, "unused parameter `%s'");
6120 }
6f086dfc
RS
6121
6122 /* Delete handlers for nonlocal gotos if nothing uses them. */
ba716ac9
BS
6123 if (nonlocal_goto_handler_slots != 0
6124 && ! current_function_has_nonlocal_label)
6f086dfc
RS
6125 delete_handlers ();
6126
6127 /* End any sequences that failed to be closed due to syntax errors. */
6128 while (in_sequence_p ())
5f4f0e22 6129 end_sequence ();
6f086dfc
RS
6130
6131 /* Outside function body, can't compute type's actual size
6132 until next function's body starts. */
6133 immediate_size_expand--;
6134
6135 /* If doing stupid register allocation,
6136 mark register parms as dying here. */
6137
6138 if (obey_regdecls)
6139 {
6140 rtx tem;
6141 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6142 use_variable (regno_reg_rtx[i]);
6143
6144 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6145
6146 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6147 {
6148 use_variable (XEXP (tem, 0));
6149 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6150 }
6151
6152 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6153 use_variable (current_function_internal_arg_pointer);
6154 }
6155
6156 clear_pending_stack_adjust ();
6157 do_pending_stack_adjust ();
6158
6159 /* Mark the end of the function body.
6160 If control reaches this insn, the function can drop through
6161 without returning a value. */
5f4f0e22 6162 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc 6163
82e415a3
DE
6164 /* Must mark the last line number note in the function, so that the test
6165 coverage code can avoid counting the last line twice. This just tells
6166 the code to ignore the immediately following line note, since there
6167 already exists a copy of this note somewhere above. This line number
6168 note is still needed for debugging though, so we can't delete it. */
6169 if (flag_test_coverage)
6170 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6171
6f086dfc
RS
6172 /* Output a linenumber for the end of the function.
6173 SDB depends on this. */
6174 emit_line_note_force (filename, line);
6175
6176 /* Output the label for the actual return from the function,
6177 if one is expected. This happens either because a function epilogue
6178 is used instead of a return instruction, or because a return was done
6179 with a goto in order to run local cleanups, or because of pcc-style
6180 structure returning. */
6181
6182 if (return_label)
6183 emit_label (return_label);
6184
1be07046
RS
6185 /* C++ uses this. */
6186 if (end_bindings)
6187 expand_end_bindings (0, 0, 0);
6188
e5a1e0e8
MS
6189 /* Now handle any leftover exception regions that may have been
6190 created for the parameters. */
6191 {
6192 rtx last = get_last_insn ();
6193 rtx label;
6194
6195 expand_leftover_cleanups ();
6196
6197 /* If the above emitted any code, may sure we jump around it. */
6198 if (last != get_last_insn ())
6199 {
6200 label = gen_label_rtx ();
6201 last = emit_jump_insn_after (gen_jump (label), last);
6202 last = emit_barrier_after (last);
6203 emit_label (label);
6204 }
6205 }
6206
07417085
KR
6207 if (current_function_instrument_entry_exit)
6208 {
6209 rtx fun = DECL_RTL (current_function_decl);
6210 if (GET_CODE (fun) == MEM)
6211 fun = XEXP (fun, 0);
6212 else
6213 abort ();
6214 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6215 fun, Pmode,
6216 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6217 0,
6218 hard_frame_pointer_rtx),
6219 Pmode);
6220 }
6221
6f086dfc
RS
6222 /* If we had calls to alloca, and this machine needs
6223 an accurate stack pointer to exit the function,
6224 insert some code to save and restore the stack pointer. */
6225#ifdef EXIT_IGNORE_STACK
6226 if (! EXIT_IGNORE_STACK)
6227#endif
6228 if (current_function_calls_alloca)
6229 {
59257ff7
RK
6230 rtx tem = 0;
6231
6232 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 6233 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
6234 }
6235
6236 /* If scalar return value was computed in a pseudo-reg,
6237 copy that to the hard return register. */
6238 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6239 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6240 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6241 >= FIRST_PSEUDO_REGISTER))
6242 {
6243 rtx real_decl_result;
6244
6245#ifdef FUNCTION_OUTGOING_VALUE
6246 real_decl_result
6247 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6248 current_function_decl);
6249#else
6250 real_decl_result
6251 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6252 current_function_decl);
6253#endif
6254 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
ecec4441
JW
6255 /* If this is a BLKmode structure being returned in registers, then use
6256 the mode computed in expand_return. */
6257 if (GET_MODE (real_decl_result) == BLKmode)
6258 PUT_MODE (real_decl_result,
6259 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6f086dfc
RS
6260 emit_move_insn (real_decl_result,
6261 DECL_RTL (DECL_RESULT (current_function_decl)));
38a448ca 6262 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
f345de42
JL
6263
6264 /* The delay slot scheduler assumes that current_function_return_rtx
6265 holds the hard register containing the return value, not a temporary
6266 pseudo. */
6267 current_function_return_rtx = real_decl_result;
6f086dfc
RS
6268 }
6269
6270 /* If returning a structure, arrange to return the address of the value
6271 in a place where debuggers expect to find it.
6272
6273 If returning a structure PCC style,
6274 the caller also depends on this value.
6275 And current_function_returns_pcc_struct is not necessarily set. */
6276 if (current_function_returns_struct
6277 || current_function_returns_pcc_struct)
6278 {
6279 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6280 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6281#ifdef FUNCTION_OUTGOING_VALUE
6282 rtx outgoing
6283 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6284 current_function_decl);
6285#else
6286 rtx outgoing
6287 = FUNCTION_VALUE (build_pointer_type (type),
6288 current_function_decl);
6289#endif
6290
6291 /* Mark this as a function return value so integrate will delete the
6292 assignment and USE below when inlining this function. */
6293 REG_FUNCTION_VALUE_P (outgoing) = 1;
6294
6295 emit_move_insn (outgoing, value_address);
6296 use_variable (outgoing);
6297 }
6298
71038426
RH
6299 /* If this is an implementation of __throw, do what's necessary to
6300 communicate between __builtin_eh_return and the epilogue. */
6301 expand_eh_return ();
6302
6f086dfc
RS
6303 /* Output a return insn if we are using one.
6304 Otherwise, let the rtl chain end here, to drop through
6305 into the epilogue. */
6306
6307#ifdef HAVE_return
6308 if (HAVE_return)
6309 {
6310 emit_jump_insn (gen_return ());
6311 emit_barrier ();
6312 }
6313#endif
6314
6315 /* Fix up any gotos that jumped out to the outermost
6316 binding level of the function.
6317 Must follow emitting RETURN_LABEL. */
6318
6319 /* If you have any cleanups to do at this point,
6320 and they need to create temporary variables,
6321 then you will lose. */
e15679f8 6322 expand_fixups (get_insns ());
6f086dfc 6323}
bdac5f58 6324\f
bdac5f58
TW
6325/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6326 or a single insn). */
6327
487a6e06 6328#if defined (HAVE_prologue) || defined (HAVE_epilogue)
bdac5f58
TW
6329static int *
6330record_insns (insns)
6331 rtx insns;
6332{
6333 int *vec;
6334
6335 if (GET_CODE (insns) == SEQUENCE)
6336 {
6337 int len = XVECLEN (insns, 0);
6338 vec = (int *) oballoc ((len + 1) * sizeof (int));
6339 vec[len] = 0;
6340 while (--len >= 0)
6341 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6342 }
6343 else
6344 {
6345 vec = (int *) oballoc (2 * sizeof (int));
6346 vec[0] = INSN_UID (insns);
6347 vec[1] = 0;
6348 }
6349 return vec;
6350}
6351
10914065 6352/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 6353
10914065 6354static int
bdac5f58
TW
6355contains (insn, vec)
6356 rtx insn;
6357 int *vec;
6358{
6359 register int i, j;
6360
6361 if (GET_CODE (insn) == INSN
6362 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6363 {
10914065 6364 int count = 0;
bdac5f58
TW
6365 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6366 for (j = 0; vec[j]; j++)
6367 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
6368 count++;
6369 return count;
bdac5f58
TW
6370 }
6371 else
6372 {
6373 for (j = 0; vec[j]; j++)
6374 if (INSN_UID (insn) == vec[j])
10914065 6375 return 1;
bdac5f58
TW
6376 }
6377 return 0;
6378}
5c7675e9
RH
6379
6380int
6381prologue_epilogue_contains (insn)
6382 rtx insn;
6383{
6384 if (prologue && contains (insn, prologue))
6385 return 1;
6386 if (epilogue && contains (insn, epilogue))
6387 return 1;
6388 return 0;
6389}
081f5e7e 6390#endif /* HAVE_prologue || HAVE_epilogue */
bdac5f58 6391
9faa82d8 6392/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
6393 this into place with notes indicating where the prologue ends and where
6394 the epilogue begins. Update the basic block information when possible. */
6395
6396void
6397thread_prologue_and_epilogue_insns (f)
54ea1de9 6398 rtx f ATTRIBUTE_UNUSED;
bdac5f58 6399{
e881bb1b
RH
6400 int insertted = 0;
6401
bdac5f58
TW
6402#ifdef HAVE_prologue
6403 if (HAVE_prologue)
6404 {
e881bb1b 6405 rtx seq;
bdac5f58 6406
e881bb1b
RH
6407 start_sequence ();
6408 seq = gen_prologue();
6409 emit_insn (seq);
bdac5f58
TW
6410
6411 /* Retain a map of the prologue insns. */
e881bb1b
RH
6412 if (GET_CODE (seq) != SEQUENCE)
6413 seq = get_insns ();
6414 prologue = record_insns (seq);
6415
6416 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6417 seq = gen_sequence ();
6418 end_sequence ();
6419
6420 /* If optimization is off, and perhaps in an empty function,
6421 the entry block will have no successors. */
6422 if (ENTRY_BLOCK_PTR->succ)
6423 {
6424 /* Can't deal with multiple successsors of the entry block. */
6425 if (ENTRY_BLOCK_PTR->succ->succ_next)
6426 abort ();
6427
6428 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6429 insertted = 1;
6430 }
6431 else
6432 emit_insn_after (seq, f);
bdac5f58 6433 }
bdac5f58 6434#endif
bdac5f58
TW
6435
6436#ifdef HAVE_epilogue
6437 if (HAVE_epilogue)
6438 {
e881bb1b
RH
6439 edge e;
6440 basic_block bb = 0;
6441 rtx tail = get_last_insn ();
6442
6443 /* ??? This is gastly. If function returns were not done via uses,
6444 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6445 and all of this uglyness would go away. */
bdac5f58 6446
e881bb1b 6447 switch (optimize)
bdac5f58 6448 {
e881bb1b
RH
6449 default:
6450 /* If the exit block has no non-fake predecessors, we don't
6451 need an epilogue. Furthermore, only pay attention to the
6452 fallthru predecessors; if (conditional) return insns were
6453 generated, by definition we do not need to emit epilogue
6454 insns. */
6455
6456 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6457 if ((e->flags & EDGE_FAKE) == 0
6458 && (e->flags & EDGE_FALLTHRU) != 0)
6459 break;
6460 if (e == NULL)
6461 break;
6462
6463 /* We can't handle multiple epilogues -- if one is needed,
6464 we won't be able to place it multiple times.
6465
6466 ??? Fix epilogue expanders to not assume they are the
6467 last thing done compiling the function. Either that
6468 or copy_rtx each insn.
6469
6470 ??? Blah, it's not a simple expression to assert that
6471 we've exactly one fallthru exit edge. */
6472
6473 bb = e->src;
6474 tail = bb->end;
6475
6476 /* ??? If the last insn of the basic block is a jump, then we
6477 are creating a new basic block. Wimp out and leave these
6478 insns outside any block. */
6479 if (GET_CODE (tail) == JUMP_INSN)
6480 bb = 0;
6481
6482 /* FALLTHRU */
6483 case 0:
6484 {
6485 rtx prev, seq, first_use;
6486
6487 /* Move the USE insns at the end of a function onto a list. */
6488 prev = tail;
6489 if (GET_CODE (prev) == BARRIER
6490 || GET_CODE (prev) == NOTE)
bdac5f58 6491 prev = prev_nonnote_insn (prev);
a78bdb38 6492
e881bb1b
RH
6493 first_use = 0;
6494 if (prev
6495 && GET_CODE (prev) == INSN
6496 && GET_CODE (PATTERN (prev)) == USE)
6497 {
6498 /* If the end of the block is the use, grab hold of something
6499 else so that we emit barriers etc in the right place. */
6500 if (prev == tail)
6501 {
6502 do
6503 tail = PREV_INSN (tail);
6504 while (GET_CODE (tail) == INSN
6505 && GET_CODE (PATTERN (tail)) == USE);
6506 }
bdac5f58 6507
e881bb1b
RH
6508 do
6509 {
6510 rtx use = prev;
6511 prev = prev_nonnote_insn (prev);
6512
6513 remove_insn (use);
6514 if (first_use)
6515 {
6516 NEXT_INSN (use) = first_use;
6517 PREV_INSN (first_use) = use;
6518 }
6519 else
6520 NEXT_INSN (use) = NULL_RTX;
6521 first_use = use;
6522 }
6523 while (prev
6524 && GET_CODE (prev) == INSN
6525 && GET_CODE (PATTERN (prev)) == USE);
6526 }
a78bdb38 6527
e881bb1b
RH
6528 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6529 epilogue insns, the USE insns at the end of a function,
6530 the jump insn that returns, and then a BARRIER. */
a78bdb38 6531
e881bb1b
RH
6532 if (GET_CODE (tail) != BARRIER)
6533 {
6534 prev = next_nonnote_insn (tail);
6535 if (!prev || GET_CODE (prev) != BARRIER)
6536 emit_barrier_after (tail);
6537 }
a78bdb38 6538
e881bb1b
RH
6539 seq = gen_epilogue ();
6540 prev = tail;
6541 tail = emit_jump_insn_after (seq, tail);
bdac5f58 6542
e881bb1b
RH
6543 /* Insert the USE insns immediately before the return insn, which
6544 must be the last instruction emitted in the sequence. */
6545 if (first_use)
6546 emit_insns_before (first_use, tail);
6547 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
bdac5f58 6548
e881bb1b
RH
6549 /* Update the tail of the basic block. */
6550 if (bb)
6551 bb->end = tail;
6552
6553 /* Retain a map of the epilogue insns. */
6554 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6555 }
bdac5f58
TW
6556 }
6557 }
6558#endif
e881bb1b
RH
6559
6560 if (insertted)
6561 commit_edge_insertions ();
bdac5f58
TW
6562}
6563
6564/* Reposition the prologue-end and epilogue-begin notes after instruction
6565 scheduling and delayed branch scheduling. */
6566
6567void
6568reposition_prologue_and_epilogue_notes (f)
79c9824e 6569 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6570{
6571#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6572 /* Reposition the prologue and epilogue notes. */
6573 if (n_basic_blocks)
6574 {
bf526252 6575 int len;
bdac5f58
TW
6576
6577 if (prologue)
6578 {
bf526252
RK
6579 register rtx insn, note = 0;
6580
6581 /* Scan from the beginning until we reach the last prologue insn.
6582 We apparently can't depend on basic_block_{head,end} after
6583 reorg has run. */
6584 for (len = 0; prologue[len]; len++)
6585 ;
9392c110
JH
6586 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6587 {
6588 if (GET_CODE (insn) == NOTE)
6589 {
6590 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6591 note = insn;
6592 }
6593 else if ((len -= contains (insn, prologue)) == 0)
6594 {
89e99eea 6595 rtx next;
9392c110
JH
6596 /* Find the prologue-end note if we haven't already, and
6597 move it to just after the last prologue insn. */
6598 if (note == 0)
6599 {
51723711 6600 for (note = insn; (note = NEXT_INSN (note));)
9392c110
JH
6601 if (GET_CODE (note) == NOTE
6602 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6603 break;
6604 }
c93b03c2 6605
9392c110 6606 next = NEXT_INSN (note);
c93b03c2 6607
3b413743 6608 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2 6609 attempt to keep it up-to-date. */
3b413743
RH
6610 if (BLOCK_HEAD (0) == note)
6611 BLOCK_HEAD (0) = next;
c93b03c2 6612
89e99eea 6613 remove_insn (note);
9392c110
JH
6614 add_insn_after (note, insn);
6615 }
6616 }
bdac5f58
TW
6617 }
6618
6619 if (epilogue)
6620 {
bf526252
RK
6621 register rtx insn, note = 0;
6622
6623 /* Scan from the end until we reach the first epilogue insn.
6624 We apparently can't depend on basic_block_{head,end} after
6625 reorg has run. */
6626 for (len = 0; epilogue[len]; len++)
6627 ;
9392c110
JH
6628 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6629 {
6630 if (GET_CODE (insn) == NOTE)
6631 {
6632 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6633 note = insn;
6634 }
6635 else if ((len -= contains (insn, epilogue)) == 0)
6636 {
6637 /* Find the epilogue-begin note if we haven't already, and
6638 move it to just before the first epilogue insn. */
6639 if (note == 0)
6640 {
51723711 6641 for (note = insn; (note = PREV_INSN (note));)
9392c110
JH
6642 if (GET_CODE (note) == NOTE
6643 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6644 break;
6645 }
c93b03c2 6646
3b413743 6647 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2
RH
6648 attempt to keep it up-to-date. */
6649 if (n_basic_blocks
3b413743
RH
6650 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6651 BLOCK_HEAD (n_basic_blocks-1) = note;
c93b03c2 6652
89e99eea 6653 remove_insn (note);
c93b03c2 6654 add_insn_before (note, insn);
9392c110
JH
6655 }
6656 }
bdac5f58
TW
6657 }
6658 }
6659#endif /* HAVE_prologue or HAVE_epilogue */
6660}
This page took 1.776866 seconds and 5 git commands to generate.