]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
re PR c++/11468 (Deriving from CNI class java::lang::Object causing an ICE)
[gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
bfc45551
AM
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
6f086dfc 22
6f086dfc
RS
23/* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 35 not get a hard register. */
6f086dfc
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
6f086dfc
RS
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
1ef08c63 44#include "except.h"
6f086dfc 45#include "function.h"
6f086dfc 46#include "expr.h"
c6b97fac 47#include "optabs.h"
e78d8e51 48#include "libfuncs.h"
6f086dfc
RS
49#include "regs.h"
50#include "hard-reg-set.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "output.h"
bdac5f58 54#include "basic-block.h"
10f0ad3d 55#include "toplev.h"
e2500fed 56#include "hashtab.h"
87ff9c8e 57#include "ggc.h"
b1474bb7 58#include "tm_p.h"
c0e7830f 59#include "integrate.h"
7afff7cf 60#include "langhooks.h"
61f71b34 61#include "target.h"
623a66fa 62#include "cfglayout.h"
4744afba 63#include "tree-gimple.h"
ef330312 64#include "tree-pass.h"
7d69de61 65#include "predict.h"
e3df376d 66#include "vecprim.h"
7d69de61 67
d16790f2
JW
68#ifndef LOCAL_ALIGNMENT
69#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70#endif
71
95f3f59e
JDA
72#ifndef STACK_ALIGNMENT_NEEDED
73#define STACK_ALIGNMENT_NEEDED 1
74#endif
75
975f3818
RS
76#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
293e3de4
RS
78/* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
0f41302f 81 must define both, or neither. */
293e3de4
RS
82#ifndef NAME__MAIN
83#define NAME__MAIN "__main"
293e3de4
RS
84#endif
85
6f086dfc
RS
86/* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90
91/* Similar, but round to the next highest integer that meets the
92 alignment. */
93#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94
54ff41b7
JW
95/* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
718fe406 98 compiler passes. */
54ff41b7
JW
99int current_function_is_leaf;
100
fdb8a883
JW
101/* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
718fe406 103 life_analysis has run. */
fdb8a883
JW
104int current_function_sp_is_unchanging;
105
54ff41b7
JW
106/* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
109int current_function_uses_only_leaf_regs;
110
6f086dfc 111/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115int virtuals_instantiated;
6f086dfc 116
df696a75 117/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 118static GTY(()) int funcdef_no;
f6f315fe 119
414c4dc4
NC
120/* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
fa8db1f7 122struct machine_function * (*init_machine_status) (void);
46766466 123
b384405b 124/* The currently compiled function. */
01d939e8 125struct function *cfun = 0;
b384405b 126
5c7675e9 127/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
f995dcfe
KH
128static VEC(int,heap) *prologue;
129static VEC(int,heap) *epilogue;
0a1c58a2
JL
130
131/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
f995dcfe 133static VEC(int,heap) *sibcall_epilogue;
6f086dfc
RS
134\f
135/* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
138
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
148
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
152
e2500fed 153struct temp_slot GTY(())
6f086dfc
RS
154{
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
0aea6467
ZD
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
159
0f41302f 160 /* The rtx to used to reference the slot. */
6f086dfc 161 rtx slot;
e5e76139
RK
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
718fe406 165 /* The alignment (in bits) of the slot. */
b5c02bff 166 unsigned int align;
6f086dfc 167 /* The size, in units, of the slot. */
e5e809f4 168 HOST_WIDE_INT size;
1da68f56
RK
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
cc2902df 174 /* Nonzero if this temporary is currently in use. */
6f086dfc 175 char in_use;
cc2902df 176 /* Nonzero if this temporary has its address taken. */
a25d4ba2 177 char addr_taken;
6f086dfc
RS
178 /* Nesting level at which this slot is being used. */
179 int level;
cc2902df 180 /* Nonzero if this should survive a call to free_temp_slots. */
6f086dfc 181 int keep;
fc91b0d0
RK
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
e5e809f4 184 HOST_WIDE_INT base_offset;
fc91b0d0
RK
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
e5e809f4 187 HOST_WIDE_INT full_size;
6f086dfc 188};
6f086dfc 189\f
e15679f8
RK
190/* Forward declarations. */
191
fa8db1f7
AJ
192static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
193 struct function *);
194static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
195static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 197static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7 198static void reorder_fix_fragments (tree);
fa8db1f7
AJ
199static int all_blocks (tree, tree *);
200static tree *get_block_vector (tree, int *);
201extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 202/* We always define `record_insns' even if it's not used so that we
ec97b83a 203 can always export `prologue_epilogue_contains'. */
f995dcfe
KH
204static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205static int contains (rtx, VEC(int,heap) **);
73ef99fb 206#ifdef HAVE_return
fa8db1f7 207static void emit_return_into_block (basic_block, rtx);
73ef99fb 208#endif
3258e996 209#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
fa8db1f7 210static rtx keep_stack_depressed (rtx);
7393c642 211#endif
3a70d621 212static void prepare_function_start (tree);
fa8db1f7
AJ
213static void do_clobber_return_reg (rtx, void *);
214static void do_use_return_reg (rtx, void *);
4c4d143a 215static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 216\f
6f086dfc 217/* Pointer to chain of `struct function' for containing functions. */
1be4cd1f 218struct function *outer_function_chain;
6f086dfc
RS
219
220/* Given a function decl for a containing function,
221 return the `struct function' for it. */
222
223struct function *
fa8db1f7 224find_function_data (tree decl)
6f086dfc
RS
225{
226 struct function *p;
e5e809f4 227
eb3ae3e1 228 for (p = outer_function_chain; p; p = p->outer)
6f086dfc
RS
229 if (p->decl == decl)
230 return p;
e5e809f4 231
0bccc606 232 gcc_unreachable ();
6f086dfc
RS
233}
234
235/* Save the current context for compilation of a nested function.
8c5666b4 236 This is called from language-specific code. The caller should use
b03e38e1 237 the enter_nested langhook to save any language-specific state,
8c5666b4
BS
238 since this function knows only about language-independent
239 variables. */
6f086dfc
RS
240
241void
5acbdd12 242push_function_context_to (tree context ATTRIBUTE_UNUSED)
6f086dfc 243{
eb3ae3e1 244 struct function *p;
36edd3cc 245
01d939e8 246 if (cfun == 0)
b384405b 247 init_dummy_function_start ();
01d939e8 248 p = cfun;
6f086dfc 249
eb3ae3e1 250 p->outer = outer_function_chain;
6f086dfc 251 outer_function_chain = p;
6f086dfc 252
ae2bcd98 253 lang_hooks.function.enter_nested (p);
b384405b 254
01d939e8 255 cfun = 0;
6f086dfc
RS
256}
257
e4a4639e 258void
fa8db1f7 259push_function_context (void)
e4a4639e 260{
a0dabda5 261 push_function_context_to (current_function_decl);
e4a4639e
JM
262}
263
6f086dfc
RS
264/* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
266
267void
fa8db1f7 268pop_function_context_from (tree context ATTRIBUTE_UNUSED)
6f086dfc
RS
269{
270 struct function *p = outer_function_chain;
271
01d939e8 272 cfun = p;
eb3ae3e1 273 outer_function_chain = p->outer;
6f086dfc 274
6f086dfc 275 current_function_decl = p->decl;
6f086dfc 276
ae2bcd98 277 lang_hooks.function.leave_nested (p);
46766466 278
6f086dfc 279 /* Reset variables that have known state during rtx generation. */
6f086dfc 280 virtuals_instantiated = 0;
1b3d8f8a 281 generating_concat_p = 1;
6f086dfc 282}
e4a4639e 283
36edd3cc 284void
fa8db1f7 285pop_function_context (void)
e4a4639e 286{
a0dabda5 287 pop_function_context_from (current_function_decl);
e4a4639e 288}
e2ecd91c 289
fa51b01b
RH
290/* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
293
294void
fa8db1f7 295free_after_parsing (struct function *f)
fa51b01b
RH
296{
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
301
ae2bcd98 302 lang_hooks.function.final (f);
fa51b01b
RH
303}
304
e2ecd91c
BS
305/* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
0a8a198c 307 reclaim the memory. */
21cd906e 308
e2ecd91c 309void
fa8db1f7 310free_after_compilation (struct function *f)
e2ecd91c 311{
f995dcfe
KH
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
315
e2500fed
GK
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
997de8ed 321 f->cfg = NULL;
fa51b01b 322
0aea6467
ZD
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
fa51b01b
RH
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
fa51b01b 328 f->x_nonlocal_goto_handler_labels = NULL;
fa51b01b 329 f->x_return_label = NULL;
6e3077c6 330 f->x_naked_return_label = NULL;
fa51b01b 331 f->x_stack_slot_list = NULL;
ede497cf 332 f->x_stack_check_probe_note = NULL;
fa51b01b 333 f->x_arg_pointer_save_area = NULL;
fa51b01b 334 f->x_parm_birth_insn = NULL;
fa51b01b 335 f->epilogue_delay_list = NULL;
e2ecd91c 336}
6f086dfc
RS
337\f
338/* Allocate fixed slots in the stack frame of the current function. */
339
49ad7cfa
BS
340/* Return size needed for stack frame based on slots so far allocated in
341 function F.
c795bca9 342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
343 the caller may have to do that. */
344
7b25e663 345static HOST_WIDE_INT
fa8db1f7 346get_func_frame_size (struct function *f)
6f086dfc 347{
f62c8a5c
JJ
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
350 else
351 return f->x_frame_offset;
6f086dfc
RS
352}
353
49ad7cfa
BS
354/* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
9fb798d7 357
49ad7cfa 358HOST_WIDE_INT
fa8db1f7 359get_frame_size (void)
49ad7cfa 360{
01d939e8 361 return get_func_frame_size (cfun);
49ad7cfa
BS
362}
363
9fb798d7
EB
364/* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
366 return FALSE. */
367
368bool
369frame_offset_overflow (HOST_WIDE_INT offset, tree func)
370{
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
372
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
376 {
377 error ("%Jtotal size of local objects too large", func);
378 return TRUE;
379 }
380
381 return FALSE;
382}
383
6f086dfc
RS
384/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
718fe406 386
6f086dfc
RS
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 390 -2 means use BITS_PER_UNIT,
6f086dfc
RS
391 positive specifies alignment boundary in bits.
392
e2ecd91c 393 We do not round to stack_boundary here.
6f086dfc 394
e2ecd91c
BS
395 FUNCTION specifies the function to allocate in. */
396
397static rtx
fa8db1f7
AJ
398assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
6f086dfc 400{
b3694847 401 rtx x, addr;
6f086dfc 402 int bigend_correction = 0;
95899b34 403 unsigned int alignment;
58dbcf05 404 int frame_off, frame_alignment, frame_phase;
6f086dfc
RS
405
406 if (align == 0)
407 {
d16790f2
JW
408 tree type;
409
6f086dfc 410 if (mode == BLKmode)
d16790f2 411 alignment = BIGGEST_ALIGNMENT;
dbab7b72 412 else
718fe406 413 alignment = GET_MODE_ALIGNMENT (mode);
d16790f2
JW
414
415 /* Allow the target to (possibly) increase the alignment of this
416 stack slot. */
ae2bcd98 417 type = lang_hooks.types.type_for_mode (mode, 0);
d16790f2
JW
418 if (type)
419 alignment = LOCAL_ALIGNMENT (type, alignment);
420
421 alignment /= BITS_PER_UNIT;
6f086dfc
RS
422 }
423 else if (align == -1)
424 {
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
427 }
cfa29a4c
EB
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
430 else
431 alignment = align / BITS_PER_UNIT;
432
f62c8a5c
JJ
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
1474e303 435
a0871656
JH
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
442
58dbcf05
AH
443 /* Calculate how many bytes the start of local variables is off from
444 stack alignment. */
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
448
95f3f59e
JDA
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
453 || mode != BLKmode
454 || size != 0)
455 {
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
f62c8a5c
JJ
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
464 + frame_phase);
465 else
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
469 + frame_phase);
95f3f59e 470 }
6f086dfc
RS
471
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
d70eadf7 474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 475 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 476
6f086dfc
RS
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
01d939e8 479 if (function == cfun && virtuals_instantiated)
6f086dfc 480 addr = plus_constant (frame_pointer_rtx,
c41536f5 481 trunc_int_for_mode
6f086dfc 482 (frame_offset + bigend_correction
c41536f5 483 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
484 else
485 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5
AO
486 trunc_int_for_mode
487 (function->x_frame_offset + bigend_correction,
488 Pmode));
6f086dfc 489
f62c8a5c
JJ
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
6f086dfc 492
38a448ca 493 x = gen_rtx_MEM (mode, addr);
be0c514c 494 MEM_NOTRAP_P (x) = 1;
6f086dfc 495
e2ecd91c
BS
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
498
9fb798d7
EB
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
9070115b 501
6f086dfc
RS
502 return x;
503}
504
e2ecd91c
BS
505/* Wrapper around assign_stack_local_1; assign a local stack slot for the
506 current function. */
3bdf5ad1 507
e2ecd91c 508rtx
fa8db1f7 509assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
6f086dfc 510{
01d939e8 511 return assign_stack_local_1 (mode, size, align, cfun);
6f086dfc 512}
0aea6467
ZD
513
514\f
515/* Removes temporary slot TEMP from LIST. */
516
517static void
518cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
519{
520 if (temp->next)
521 temp->next->prev = temp->prev;
522 if (temp->prev)
523 temp->prev->next = temp->next;
524 else
525 *list = temp->next;
526
527 temp->prev = temp->next = NULL;
528}
529
530/* Inserts temporary slot TEMP to LIST. */
531
532static void
533insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
534{
535 temp->next = *list;
536 if (*list)
537 (*list)->prev = temp;
538 temp->prev = NULL;
539 *list = temp;
540}
541
542/* Returns the list of used temp slots at LEVEL. */
543
544static struct temp_slot **
545temp_slots_at_level (int level)
546{
6370682a
KH
547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
548 {
549 size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
550 temp_slot_p *p;
0aea6467 551
6370682a
KH
552 VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
553 p = VEC_address (temp_slot_p, used_temp_slots);
554 memset (&p[old_length], 0,
555 sizeof (temp_slot_p) * (level + 1 - old_length));
556 }
0aea6467 557
6370682a 558 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
0aea6467
ZD
559}
560
561/* Returns the maximal temporary slot level. */
562
563static int
564max_slot_level (void)
565{
566 if (!used_temp_slots)
567 return -1;
568
6370682a 569 return VEC_length (temp_slot_p, used_temp_slots) - 1;
0aea6467
ZD
570}
571
572/* Moves temporary slot TEMP to LEVEL. */
573
574static void
575move_slot_to_level (struct temp_slot *temp, int level)
576{
577 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
578 insert_slot_to_list (temp, temp_slots_at_level (level));
579 temp->level = level;
580}
581
582/* Make temporary slot TEMP available. */
583
584static void
585make_slot_available (struct temp_slot *temp)
586{
587 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
588 insert_slot_to_list (temp, &avail_temp_slots);
589 temp->in_use = 0;
590 temp->level = -1;
591}
6f086dfc
RS
592\f
593/* Allocate a temporary stack slot and record it for possible later
594 reuse.
595
596 MODE is the machine mode to be given to the returned rtx.
597
598 SIZE is the size in units of the space required. We do no rounding here
599 since assign_stack_local will do any required rounding.
600
d93d4205
MS
601 KEEP is 1 if this slot is to be retained after a call to
602 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
603 with this flag. KEEP values of 2 or 3 were needed respectively
604 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535a42b1 605 or for SAVE_EXPRs, but they are now unused.
a4c6502a
MM
606
607 TYPE is the type that will be used for the stack slot. */
6f086dfc 608
a06ef755 609rtx
535a42b1
NS
610assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
611 int keep, tree type)
6f086dfc 612{
74e2819c 613 unsigned int align;
0aea6467 614 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 615 rtx slot;
6f086dfc 616
303ec2aa
RK
617 /* If SIZE is -1 it means that somebody tried to allocate a temporary
618 of a variable size. */
0bccc606 619 gcc_assert (size != -1);
303ec2aa 620
7efcb746 621 /* These are now unused. */
0bccc606 622 gcc_assert (keep <= 1);
7efcb746 623
d16790f2
JW
624 if (mode == BLKmode)
625 align = BIGGEST_ALIGNMENT;
dbab7b72
JH
626 else
627 align = GET_MODE_ALIGNMENT (mode);
6f086dfc 628
d16790f2 629 if (! type)
ae2bcd98 630 type = lang_hooks.types.type_for_mode (mode, 0);
3bdf5ad1 631
d16790f2
JW
632 if (type)
633 align = LOCAL_ALIGNMENT (type, align);
634
635 /* Try to find an available, already-allocated temporary of the proper
636 mode which meets the size and alignment requirements. Choose the
3e8b0446
ZD
637 smallest one with the closest alignment.
638
639 If assign_stack_temp is called outside of the tree->rtl expansion,
640 we cannot reuse the stack slots (that may still refer to
641 VIRTUAL_STACK_VARS_REGNUM). */
642 if (!virtuals_instantiated)
0aea6467 643 {
3e8b0446 644 for (p = avail_temp_slots; p; p = p->next)
0aea6467 645 {
3e8b0446
ZD
646 if (p->align >= align && p->size >= size
647 && GET_MODE (p->slot) == mode
648 && objects_must_conflict_p (p->type, type)
649 && (best_p == 0 || best_p->size > p->size
650 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 651 {
3e8b0446
ZD
652 if (p->align == align && p->size == size)
653 {
654 selected = p;
655 cut_slot_from_list (selected, &avail_temp_slots);
656 best_p = 0;
657 break;
658 }
659 best_p = p;
0aea6467 660 }
0aea6467
ZD
661 }
662 }
6f086dfc
RS
663
664 /* Make our best, if any, the one to use. */
665 if (best_p)
a45035b6 666 {
0aea6467
ZD
667 selected = best_p;
668 cut_slot_from_list (selected, &avail_temp_slots);
669
a45035b6
JW
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 673 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 674 {
d16790f2 675 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 676 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
677
678 if (best_p->size - rounded_size >= alignment)
679 {
703ad42b 680 p = ggc_alloc (sizeof (struct temp_slot));
a25d4ba2 681 p->in_use = p->addr_taken = 0;
a45035b6 682 p->size = best_p->size - rounded_size;
307d8cd6
RK
683 p->base_offset = best_p->base_offset + rounded_size;
684 p->full_size = best_p->full_size - rounded_size;
be0c514c 685 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 686 p->align = best_p->align;
e5e76139 687 p->address = 0;
1da68f56 688 p->type = best_p->type;
0aea6467 689 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 690
38a448ca
RH
691 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
692 stack_slot_list);
a45035b6
JW
693
694 best_p->size = rounded_size;
291dde90 695 best_p->full_size = rounded_size;
a45035b6
JW
696 }
697 }
a45035b6 698 }
718fe406 699
6f086dfc 700 /* If we still didn't find one, make a new temporary. */
0aea6467 701 if (selected == 0)
6f086dfc 702 {
e5e809f4
JL
703 HOST_WIDE_INT frame_offset_old = frame_offset;
704
703ad42b 705 p = ggc_alloc (sizeof (struct temp_slot));
e5e809f4 706
c87a0a39
JL
707 /* We are passing an explicit alignment request to assign_stack_local.
708 One side effect of that is assign_stack_local will not round SIZE
709 to ensure the frame offset remains suitably aligned.
710
711 So for requests which depended on the rounding of SIZE, we go ahead
712 and round it now. We also make sure ALIGNMENT is at least
713 BIGGEST_ALIGNMENT. */
0bccc606 714 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 715 p->slot = assign_stack_local (mode,
010529e5 716 (mode == BLKmode
fc555370 717 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 718 : size),
6f67a30d 719 align);
d16790f2
JW
720
721 p->align = align;
e5e809f4 722
b2a80c0d
DE
723 /* The following slot size computation is necessary because we don't
724 know the actual size of the temporary slot until assign_stack_local
725 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
726 requested temporary. Note that extra space added for alignment
727 can be either above or below this stack slot depending on which
728 way the frame grows. We include the extra space if and only if it
729 is above this slot. */
f62c8a5c
JJ
730 if (FRAME_GROWS_DOWNWARD)
731 p->size = frame_offset_old - frame_offset;
732 else
733 p->size = size;
e5e809f4 734
fc91b0d0 735 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
736 if (FRAME_GROWS_DOWNWARD)
737 {
738 p->base_offset = frame_offset;
739 p->full_size = frame_offset_old - frame_offset;
740 }
741 else
742 {
743 p->base_offset = frame_offset_old;
744 p->full_size = frame_offset - frame_offset_old;
745 }
e5e76139 746 p->address = 0;
0aea6467
ZD
747
748 selected = p;
6f086dfc
RS
749 }
750
0aea6467 751 p = selected;
6f086dfc 752 p->in_use = 1;
a25d4ba2 753 p->addr_taken = 0;
1da68f56 754 p->type = type;
7efcb746
PB
755 p->level = temp_slot_level;
756 p->keep = keep;
1995f267 757
0aea6467
ZD
758 pp = temp_slots_at_level (p->level);
759 insert_slot_to_list (p, pp);
faa964e5
UW
760
761 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
762 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
763 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 764
1da68f56
RK
765 /* If we know the alias set for the memory that will be used, use
766 it. If there's no TYPE, then we don't know anything about the
767 alias set for the memory. */
faa964e5
UW
768 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
769 set_mem_align (slot, align);
1da68f56 770
30f7a378 771 /* If a type is specified, set the relevant flags. */
3bdf5ad1 772 if (type != 0)
1da68f56 773 {
faa964e5
UW
774 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
775 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
1da68f56 776 }
be0c514c 777 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 778
faa964e5 779 return slot;
6f086dfc 780}
d16790f2
JW
781
782/* Allocate a temporary stack slot and record it for possible later
783 reuse. First three arguments are same as in preceding function. */
784
785rtx
fa8db1f7 786assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
787{
788 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
789}
638141a6 790\f
9432c136
EB
791/* Assign a temporary.
792 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
793 and so that should be used in error messages. In either case, we
794 allocate of the given type.
230f21b4
PB
795 KEEP is as for assign_stack_temp.
796 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
797 it is 0 if a register is OK.
798 DONT_PROMOTE is 1 if we should not promote values in register
799 to wider modes. */
230f21b4
PB
800
801rtx
fa8db1f7
AJ
802assign_temp (tree type_or_decl, int keep, int memory_required,
803 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 804{
9432c136
EB
805 tree type, decl;
806 enum machine_mode mode;
9e1622ed 807#ifdef PROMOTE_MODE
9432c136
EB
808 int unsignedp;
809#endif
810
811 if (DECL_P (type_or_decl))
812 decl = type_or_decl, type = TREE_TYPE (decl);
813 else
814 decl = NULL, type = type_or_decl;
815
816 mode = TYPE_MODE (type);
9e1622ed 817#ifdef PROMOTE_MODE
8df83eae 818 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 819#endif
638141a6 820
230f21b4
PB
821 if (mode == BLKmode || memory_required)
822 {
e5e809f4 823 HOST_WIDE_INT size = int_size_in_bytes (type);
e30bb772 824 tree size_tree;
230f21b4
PB
825 rtx tmp;
826
44affdae
JH
827 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
828 problems with allocating the stack space. */
829 if (size == 0)
830 size = 1;
831
230f21b4
PB
832 /* Unfortunately, we don't yet know how to allocate variable-sized
833 temporaries. However, sometimes we have a fixed upper limit on
834 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 835 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
836 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
837 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
3bdf5ad1
RK
838 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
839 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
230f21b4 840
e30bb772
RK
841 /* If we still haven't been able to get a size, see if the language
842 can compute a maximum size. */
843 if (size == -1
8963a517 844 && (size_tree = lang_hooks.types.max_size (type)) != 0
e30bb772
RK
845 && host_integerp (size_tree, 1))
846 size = tree_low_cst (size_tree, 1);
847
9432c136
EB
848 /* The size of the temporary may be too large to fit into an integer. */
849 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 850 this to things that aren't compiler-generated temporaries. The
535a42b1 851 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
852 if (decl && size == -1
853 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
854 {
dee15844 855 error ("size of variable %q+D is too large", decl);
9432c136
EB
856 size = 1;
857 }
858
d16790f2 859 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
860 return tmp;
861 }
638141a6 862
9e1622ed 863#ifdef PROMOTE_MODE
b55d9ff8
RK
864 if (! dont_promote)
865 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 866#endif
638141a6 867
230f21b4
PB
868 return gen_reg_rtx (mode);
869}
638141a6 870\f
a45035b6
JW
871/* Combine temporary stack slots which are adjacent on the stack.
872
873 This allows for better use of already allocated stack space. This is only
874 done for BLKmode slots because we can be sure that we won't have alignment
875 problems in this case. */
876
6fe79279 877static void
fa8db1f7 878combine_temp_slots (void)
a45035b6 879{
0aea6467 880 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
881 int num_slots;
882
a4c6502a
MM
883 /* We can't combine slots, because the information about which slot
884 is in which alias set will be lost. */
885 if (flag_strict_aliasing)
886 return;
887
718fe406 888 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 889 high levels of optimization. */
e5e809f4 890 if (! flag_expensive_optimizations)
0aea6467 891 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
892 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
893 return;
a45035b6 894
0aea6467 895 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
896 {
897 int delete_p = 0;
e5e809f4 898
0aea6467
ZD
899 next = p->next;
900
901 if (GET_MODE (p->slot) != BLKmode)
902 continue;
903
904 for (q = p->next; q; q = next_q)
e9b7093a 905 {
0aea6467
ZD
906 int delete_q = 0;
907
908 next_q = q->next;
909
910 if (GET_MODE (q->slot) != BLKmode)
911 continue;
912
913 if (p->base_offset + p->full_size == q->base_offset)
914 {
915 /* Q comes after P; combine Q into P. */
916 p->size += q->size;
917 p->full_size += q->full_size;
918 delete_q = 1;
919 }
920 else if (q->base_offset + q->full_size == p->base_offset)
921 {
922 /* P comes after Q; combine P into Q. */
923 q->size += p->size;
924 q->full_size += p->full_size;
925 delete_p = 1;
926 break;
927 }
928 if (delete_q)
929 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 930 }
0aea6467
ZD
931
932 /* Either delete P or advance past it. */
933 if (delete_p)
934 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 935 }
a45035b6 936}
6f086dfc 937\f
e5e76139
RK
938/* Find the temp slot corresponding to the object at address X. */
939
940static struct temp_slot *
fa8db1f7 941find_temp_slot_from_address (rtx x)
e5e76139
RK
942{
943 struct temp_slot *p;
944 rtx next;
0aea6467 945 int i;
e5e76139 946
0aea6467
ZD
947 for (i = max_slot_level (); i >= 0; i--)
948 for (p = *temp_slots_at_level (i); p; p = p->next)
949 {
950 if (XEXP (p->slot, 0) == x
951 || p->address == x
952 || (GET_CODE (x) == PLUS
953 && XEXP (x, 0) == virtual_stack_vars_rtx
954 && GET_CODE (XEXP (x, 1)) == CONST_INT
955 && INTVAL (XEXP (x, 1)) >= p->base_offset
956 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
957 return p;
958
959 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
960 for (next = p->address; next; next = XEXP (next, 1))
961 if (XEXP (next, 0) == x)
962 return p;
963 }
e5e76139 964
14a774a9
RK
965 /* If we have a sum involving a register, see if it points to a temp
966 slot. */
f8cfc6aa 967 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
14a774a9
RK
968 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
969 return p;
f8cfc6aa 970 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
14a774a9
RK
971 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
972 return p;
973
e5e76139
RK
974 return 0;
975}
718fe406 976
9faa82d8 977/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 978 that previously was known by OLD. */
e5e76139
RK
979
980void
fa8db1f7 981update_temp_slot_address (rtx old, rtx new)
e5e76139 982{
14a774a9 983 struct temp_slot *p;
e5e76139 984
14a774a9 985 if (rtx_equal_p (old, new))
e5e76139 986 return;
14a774a9
RK
987
988 p = find_temp_slot_from_address (old);
989
700f19f0
RK
990 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
991 is a register, see if one operand of the PLUS is a temporary
992 location. If so, NEW points into it. Otherwise, if both OLD and
993 NEW are a PLUS and if there is a register in common between them.
994 If so, try a recursive call on those values. */
14a774a9
RK
995 if (p == 0)
996 {
700f19f0
RK
997 if (GET_CODE (old) != PLUS)
998 return;
999
f8cfc6aa 1000 if (REG_P (new))
700f19f0
RK
1001 {
1002 update_temp_slot_address (XEXP (old, 0), new);
1003 update_temp_slot_address (XEXP (old, 1), new);
1004 return;
1005 }
1006 else if (GET_CODE (new) != PLUS)
14a774a9
RK
1007 return;
1008
1009 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1010 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1011 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1012 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1013 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1014 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1015 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1016 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1017
1018 return;
1019 }
1020
718fe406 1021 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
1022 else if (p->address == 0)
1023 p->address = new;
1024 else
1025 {
1026 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1027 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1028
38a448ca 1029 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1030 }
1031}
1032
a25d4ba2 1033/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1034 address was taken. */
a25d4ba2
RK
1035
1036void
fa8db1f7 1037mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1038{
1039 struct temp_slot *p;
1040
1041 if (x == 0)
1042 return;
1043
1044 /* If X is not in memory or is at a constant address, it cannot be in
1045 a temporary slot. */
3c0cb5de 1046 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1047 return;
1048
1049 p = find_temp_slot_from_address (XEXP (x, 0));
1050 if (p != 0)
1051 p->addr_taken = 1;
1052}
1053
9cca6a99
MS
1054/* If X could be a reference to a temporary slot, mark that slot as
1055 belonging to the to one level higher than the current level. If X
1056 matched one of our slots, just mark that one. Otherwise, we can't
1057 easily predict which it is, so upgrade all of them. Kept slots
1058 need not be touched.
6f086dfc
RS
1059
1060 This is called when an ({...}) construct occurs and a statement
1061 returns a value in memory. */
1062
1063void
fa8db1f7 1064preserve_temp_slots (rtx x)
6f086dfc 1065{
0aea6467 1066 struct temp_slot *p = 0, *next;
6f086dfc 1067
73620b82
RK
1068 /* If there is no result, we still might have some objects whose address
1069 were taken, so we need to make sure they stay around. */
e3a77161 1070 if (x == 0)
73620b82 1071 {
0aea6467
ZD
1072 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1073 {
1074 next = p->next;
1075
1076 if (p->addr_taken)
1077 move_slot_to_level (p, temp_slot_level - 1);
1078 }
73620b82 1079
8fff4fc1
RH
1080 return;
1081 }
f7b6d104 1082
8fff4fc1
RH
1083 /* If X is a register that is being used as a pointer, see if we have
1084 a temporary slot we know it points to. To be consistent with
1085 the code below, we really should preserve all non-kept slots
1086 if we can't find a match, but that seems to be much too costly. */
1087 if (REG_P (x) && REG_POINTER (x))
1088 p = find_temp_slot_from_address (x);
f7b6d104 1089
8fff4fc1
RH
1090 /* If X is not in memory or is at a constant address, it cannot be in
1091 a temporary slot, but it can contain something whose address was
1092 taken. */
1093 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1094 {
1095 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1096 {
1097 next = p->next;
b5bd3b3c 1098
8fff4fc1
RH
1099 if (p->addr_taken)
1100 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1101 }
c5c76735 1102
8fff4fc1
RH
1103 return;
1104 }
1105
1106 /* First see if we can find a match. */
1107 if (p == 0)
1108 p = find_temp_slot_from_address (XEXP (x, 0));
1109
1110 if (p != 0)
1111 {
1112 /* Move everything at our level whose address was taken to our new
1113 level in case we used its address. */
1114 struct temp_slot *q;
1115
1116 if (p->level == temp_slot_level)
fbdfe39c 1117 {
8fff4fc1 1118 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1119 {
8fff4fc1 1120 next = q->next;
8b04083b 1121
8fff4fc1
RH
1122 if (p != q && q->addr_taken)
1123 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1124 }
8fff4fc1
RH
1125
1126 move_slot_to_level (p, temp_slot_level - 1);
1127 p->addr_taken = 0;
fbdfe39c 1128 }
8fff4fc1 1129 return;
f7b6d104 1130 }
e9a25f70 1131
8fff4fc1
RH
1132 /* Otherwise, preserve all non-kept slots at this level. */
1133 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1134 {
8fff4fc1 1135 next = p->next;
fe9b4957 1136
8fff4fc1
RH
1137 if (!p->keep)
1138 move_slot_to_level (p, temp_slot_level - 1);
1139 }
fe9b4957
MM
1140}
1141
8fff4fc1
RH
1142/* Free all temporaries used so far. This is normally called at the
1143 end of generating code for a statement. */
fe9b4957 1144
8fff4fc1
RH
1145void
1146free_temp_slots (void)
fe9b4957 1147{
8fff4fc1 1148 struct temp_slot *p, *next;
fe9b4957 1149
8fff4fc1
RH
1150 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1151 {
1152 next = p->next;
fe9b4957 1153
8fff4fc1
RH
1154 if (!p->keep)
1155 make_slot_available (p);
1156 }
fe9b4957 1157
8fff4fc1
RH
1158 combine_temp_slots ();
1159}
fe9b4957 1160
8fff4fc1 1161/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1162
8fff4fc1
RH
1163void
1164push_temp_slots (void)
fe9b4957 1165{
8fff4fc1 1166 temp_slot_level++;
fe9b4957
MM
1167}
1168
8fff4fc1
RH
1169/* Pop a temporary nesting level. All slots in use in the current level
1170 are freed. */
fe9b4957 1171
8fff4fc1
RH
1172void
1173pop_temp_slots (void)
fe9b4957 1174{
8fff4fc1 1175 struct temp_slot *p, *next;
fe9b4957 1176
8fff4fc1
RH
1177 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1178 {
1179 next = p->next;
1180 make_slot_available (p);
1181 }
e9a25f70 1182
8fff4fc1 1183 combine_temp_slots ();
b987f237 1184
8fff4fc1 1185 temp_slot_level--;
8c36698e
NC
1186}
1187
8fff4fc1 1188/* Initialize temporary slots. */
e9a25f70
JL
1189
1190void
8fff4fc1 1191init_temp_slots (void)
e9a25f70 1192{
8fff4fc1
RH
1193 /* We have not allocated any temporaries yet. */
1194 avail_temp_slots = 0;
1195 used_temp_slots = 0;
1196 temp_slot_level = 0;
8fff4fc1
RH
1197}
1198\f
1199/* These routines are responsible for converting virtual register references
1200 to the actual hard register references once RTL generation is complete.
718fe406 1201
8fff4fc1
RH
1202 The following four variables are used for communication between the
1203 routines. They contain the offsets of the virtual registers from their
1204 respective hard registers. */
fe9b4957 1205
8fff4fc1
RH
1206static int in_arg_offset;
1207static int var_offset;
1208static int dynamic_offset;
1209static int out_arg_offset;
1210static int cfa_offset;
8a5275eb 1211
8fff4fc1
RH
1212/* In most machines, the stack pointer register is equivalent to the bottom
1213 of the stack. */
718fe406 1214
8fff4fc1
RH
1215#ifndef STACK_POINTER_OFFSET
1216#define STACK_POINTER_OFFSET 0
1217#endif
8c36698e 1218
8fff4fc1
RH
1219/* If not defined, pick an appropriate default for the offset of dynamically
1220 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1221 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1222
8fff4fc1 1223#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1224
8fff4fc1
RH
1225/* The bottom of the stack points to the actual arguments. If
1226 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1227 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1228 stack space for register parameters is not pushed by the caller, but
1229 rather part of the fixed stack areas and hence not included in
1230 `current_function_outgoing_args_size'. Nevertheless, we must allow
1231 for it when allocating stack dynamic objects. */
8a5275eb 1232
8fff4fc1
RH
1233#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1234#define STACK_DYNAMIC_OFFSET(FNDECL) \
1235((ACCUMULATE_OUTGOING_ARGS \
1236 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1237 + (STACK_POINTER_OFFSET)) \
4fa48eae 1238
8fff4fc1
RH
1239#else
1240#define STACK_DYNAMIC_OFFSET(FNDECL) \
1241((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1242 + (STACK_POINTER_OFFSET))
1243#endif
1244#endif
4fa48eae 1245
659e47fb 1246\f
bbf9b913
RH
1247/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1248 is a virtual register, return the equivalent hard register and set the
1249 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1250
bbf9b913
RH
1251static rtx
1252instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1253{
bbf9b913
RH
1254 rtx new;
1255 HOST_WIDE_INT offset;
6f086dfc 1256
bbf9b913
RH
1257 if (x == virtual_incoming_args_rtx)
1258 new = arg_pointer_rtx, offset = in_arg_offset;
1259 else if (x == virtual_stack_vars_rtx)
1260 new = frame_pointer_rtx, offset = var_offset;
1261 else if (x == virtual_stack_dynamic_rtx)
1262 new = stack_pointer_rtx, offset = dynamic_offset;
1263 else if (x == virtual_outgoing_args_rtx)
1264 new = stack_pointer_rtx, offset = out_arg_offset;
1265 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1266 {
1267#ifdef FRAME_POINTER_CFA_OFFSET
1268 new = frame_pointer_rtx;
1269#else
1270 new = arg_pointer_rtx;
1271#endif
1272 offset = cfa_offset;
1273 }
bbf9b913
RH
1274 else
1275 return NULL_RTX;
6f086dfc 1276
bbf9b913
RH
1277 *poffset = offset;
1278 return new;
6f086dfc
RS
1279}
1280
bbf9b913
RH
1281/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1282 Instantiate any virtual registers present inside of *LOC. The expression
1283 is simplified, as much as possible, but is not to be considered "valid"
1284 in any sense implied by the target. If any change is made, set CHANGED
1285 to true. */
6f086dfc 1286
bbf9b913
RH
1287static int
1288instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1289{
bbf9b913
RH
1290 HOST_WIDE_INT offset;
1291 bool *changed = (bool *) data;
1292 rtx x, new;
6f086dfc 1293
bbf9b913
RH
1294 x = *loc;
1295 if (x == 0)
1296 return 0;
1297
1298 switch (GET_CODE (x))
6f086dfc 1299 {
bbf9b913
RH
1300 case REG:
1301 new = instantiate_new_reg (x, &offset);
1302 if (new)
1303 {
1304 *loc = plus_constant (new, offset);
1305 if (changed)
1306 *changed = true;
1307 }
1308 return -1;
1309
1310 case PLUS:
1311 new = instantiate_new_reg (XEXP (x, 0), &offset);
1312 if (new)
1313 {
1314 new = plus_constant (new, offset);
1315 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1316 if (changed)
1317 *changed = true;
1318 return -1;
1319 }
e5e809f4 1320
bbf9b913
RH
1321 /* FIXME -- from old code */
1322 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1323 we can commute the PLUS and SUBREG because pointers into the
1324 frame are well-behaved. */
1325 break;
ce717ce4 1326
bbf9b913
RH
1327 default:
1328 break;
6f086dfc
RS
1329 }
1330
bbf9b913 1331 return 0;
6f086dfc
RS
1332}
1333
bbf9b913
RH
1334/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1335 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1336
bbf9b913
RH
1337static int
1338safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1339{
bbf9b913 1340 const struct insn_operand_data *op_data;
6f086dfc 1341
bbf9b913
RH
1342 if (code < 0)
1343 return true;
6f086dfc 1344
bbf9b913
RH
1345 op_data = &insn_data[code].operand[operand];
1346 if (op_data->predicate == NULL)
1347 return true;
5a73491b 1348
bbf9b913
RH
1349 return op_data->predicate (x, op_data->mode);
1350}
5a73491b 1351
bbf9b913
RH
1352/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1353 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1354
1355static void
bbf9b913 1356instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1357{
bbf9b913
RH
1358 HOST_WIDE_INT offset;
1359 int insn_code, i;
9325973e 1360 bool any_change = false;
bbf9b913 1361 rtx set, new, x, seq;
32e66afd 1362
bbf9b913
RH
1363 /* There are some special cases to be handled first. */
1364 set = single_set (insn);
1365 if (set)
32e66afd 1366 {
bbf9b913
RH
1367 /* We're allowed to assign to a virtual register. This is interpreted
1368 to mean that the underlying register gets assigned the inverse
1369 transformation. This is used, for example, in the handling of
1370 non-local gotos. */
1371 new = instantiate_new_reg (SET_DEST (set), &offset);
1372 if (new)
1373 {
1374 start_sequence ();
32e66afd 1375
bbf9b913
RH
1376 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1377 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1378 GEN_INT (-offset));
1379 x = force_operand (x, new);
1380 if (x != new)
1381 emit_move_insn (new, x);
5a73491b 1382
bbf9b913
RH
1383 seq = get_insns ();
1384 end_sequence ();
5a73491b 1385
bbf9b913
RH
1386 emit_insn_before (seq, insn);
1387 delete_insn (insn);
1388 return;
1389 }
5a73491b 1390
bbf9b913
RH
1391 /* Handle a straight copy from a virtual register by generating a
1392 new add insn. The difference between this and falling through
1393 to the generic case is avoiding a new pseudo and eliminating a
1394 move insn in the initial rtl stream. */
1395 new = instantiate_new_reg (SET_SRC (set), &offset);
1396 if (new && offset != 0
1397 && REG_P (SET_DEST (set))
1398 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1399 {
1400 start_sequence ();
5a73491b 1401
bbf9b913
RH
1402 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1403 new, GEN_INT (offset), SET_DEST (set),
1404 1, OPTAB_LIB_WIDEN);
1405 if (x != SET_DEST (set))
1406 emit_move_insn (SET_DEST (set), x);
770ae6cc 1407
bbf9b913
RH
1408 seq = get_insns ();
1409 end_sequence ();
87ce34d6 1410
bbf9b913
RH
1411 emit_insn_before (seq, insn);
1412 delete_insn (insn);
87ce34d6 1413 return;
bbf9b913 1414 }
5a73491b 1415
bbf9b913 1416 extract_insn (insn);
9325973e 1417 insn_code = INSN_CODE (insn);
5a73491b 1418
bbf9b913
RH
1419 /* Handle a plus involving a virtual register by determining if the
1420 operands remain valid if they're modified in place. */
1421 if (GET_CODE (SET_SRC (set)) == PLUS
1422 && recog_data.n_operands >= 3
1423 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1424 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1425 && GET_CODE (recog_data.operand[2]) == CONST_INT
1426 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1427 {
1428 offset += INTVAL (recog_data.operand[2]);
5a73491b 1429
bbf9b913 1430 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1431 if (offset == 0
1432 && REG_P (SET_DEST (set))
1433 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1434 {
1435 start_sequence ();
1436 emit_move_insn (SET_DEST (set), new);
1437 seq = get_insns ();
1438 end_sequence ();
d1405722 1439
bbf9b913
RH
1440 emit_insn_before (seq, insn);
1441 delete_insn (insn);
1442 return;
1443 }
d1405722 1444
bbf9b913 1445 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1446
1447 /* Using validate_change and apply_change_group here leaves
1448 recog_data in an invalid state. Since we know exactly what
1449 we want to check, do those two by hand. */
1450 if (safe_insn_predicate (insn_code, 1, new)
1451 && safe_insn_predicate (insn_code, 2, x))
1452 {
1453 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1454 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1455 any_change = true;
9325973e
RH
1456
1457 /* Fall through into the regular operand fixup loop in
1458 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1459 }
1460 }
1461 }
d1405722 1462 else
9325973e
RH
1463 {
1464 extract_insn (insn);
1465 insn_code = INSN_CODE (insn);
1466 }
5dc96d60 1467
bbf9b913
RH
1468 /* In the general case, we expect virtual registers to appear only in
1469 operands, and then only as either bare registers or inside memories. */
1470 for (i = 0; i < recog_data.n_operands; ++i)
1471 {
1472 x = recog_data.operand[i];
1473 switch (GET_CODE (x))
1474 {
1475 case MEM:
1476 {
1477 rtx addr = XEXP (x, 0);
1478 bool changed = false;
1479
1480 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1481 if (!changed)
1482 continue;
1483
1484 start_sequence ();
1485 x = replace_equiv_address (x, addr);
1486 seq = get_insns ();
1487 end_sequence ();
1488 if (seq)
1489 emit_insn_before (seq, insn);
1490 }
1491 break;
1492
1493 case REG:
1494 new = instantiate_new_reg (x, &offset);
1495 if (new == NULL)
1496 continue;
1497 if (offset == 0)
1498 x = new;
1499 else
1500 {
1501 start_sequence ();
6f086dfc 1502
bbf9b913
RH
1503 /* Careful, special mode predicates may have stuff in
1504 insn_data[insn_code].operand[i].mode that isn't useful
1505 to us for computing a new value. */
1506 /* ??? Recognize address_operand and/or "p" constraints
1507 to see if (plus new offset) is a valid before we put
1508 this through expand_simple_binop. */
1509 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1510 GEN_INT (offset), NULL_RTX,
1511 1, OPTAB_LIB_WIDEN);
1512 seq = get_insns ();
1513 end_sequence ();
1514 emit_insn_before (seq, insn);
1515 }
1516 break;
6f086dfc 1517
bbf9b913
RH
1518 case SUBREG:
1519 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1520 if (new == NULL)
1521 continue;
1522 if (offset != 0)
1523 {
1524 start_sequence ();
1525 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1526 GEN_INT (offset), NULL_RTX,
1527 1, OPTAB_LIB_WIDEN);
1528 seq = get_insns ();
1529 end_sequence ();
1530 emit_insn_before (seq, insn);
1531 }
fbdd0b09
RH
1532 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1533 GET_MODE (new), SUBREG_BYTE (x));
bbf9b913 1534 break;
6f086dfc 1535
bbf9b913
RH
1536 default:
1537 continue;
1538 }
6f086dfc 1539
bbf9b913
RH
1540 /* At this point, X contains the new value for the operand.
1541 Validate the new value vs the insn predicate. Note that
1542 asm insns will have insn_code -1 here. */
1543 if (!safe_insn_predicate (insn_code, i, x))
1544 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6f086dfc 1545
bbf9b913
RH
1546 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1547 any_change = true;
1548 }
6f086dfc 1549
bbf9b913
RH
1550 if (any_change)
1551 {
1552 /* Propagate operand changes into the duplicates. */
1553 for (i = 0; i < recog_data.n_dups; ++i)
1554 *recog_data.dup_loc[i]
1555 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
5dc96d60 1556
bbf9b913
RH
1557 /* Force re-recognition of the instruction for validation. */
1558 INSN_CODE (insn) = -1;
1559 }
6f086dfc 1560
bbf9b913 1561 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1562 {
bbf9b913 1563 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1564 {
bbf9b913
RH
1565 error_for_asm (insn, "impossible constraint in %<asm%>");
1566 delete_insn (insn);
1567 }
1568 }
1569 else
1570 {
1571 if (recog_memoized (insn) < 0)
1572 fatal_insn_not_found (insn);
1573 }
1574}
14a774a9 1575
bbf9b913
RH
1576/* Subroutine of instantiate_decls. Given RTL representing a decl,
1577 do any instantiation required. */
14a774a9 1578
bbf9b913
RH
1579static void
1580instantiate_decl (rtx x)
1581{
1582 rtx addr;
6f086dfc 1583
bbf9b913
RH
1584 if (x == 0)
1585 return;
6f086dfc 1586
bbf9b913
RH
1587 /* If this is a CONCAT, recurse for the pieces. */
1588 if (GET_CODE (x) == CONCAT)
1589 {
1590 instantiate_decl (XEXP (x, 0));
1591 instantiate_decl (XEXP (x, 1));
1592 return;
1593 }
6f086dfc 1594
bbf9b913
RH
1595 /* If this is not a MEM, no need to do anything. Similarly if the
1596 address is a constant or a register that is not a virtual register. */
1597 if (!MEM_P (x))
1598 return;
6f086dfc 1599
bbf9b913
RH
1600 addr = XEXP (x, 0);
1601 if (CONSTANT_P (addr)
1602 || (REG_P (addr)
1603 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1604 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1605 return;
6f086dfc 1606
bbf9b913
RH
1607 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1608}
6f086dfc 1609
434eba35
JJ
1610/* Helper for instantiate_decls called via walk_tree: Process all decls
1611 in the given DECL_VALUE_EXPR. */
1612
1613static tree
1614instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1615{
1616 tree t = *tp;
1617 if (! EXPR_P (t))
1618 {
1619 *walk_subtrees = 0;
1620 if (DECL_P (t) && DECL_RTL_SET_P (t))
1621 instantiate_decl (DECL_RTL (t));
1622 }
1623 return NULL;
1624}
1625
bbf9b913
RH
1626/* Subroutine of instantiate_decls: Process all decls in the given
1627 BLOCK node and all its subblocks. */
6f086dfc 1628
bbf9b913
RH
1629static void
1630instantiate_decls_1 (tree let)
1631{
1632 tree t;
6f086dfc 1633
bbf9b913 1634 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
434eba35
JJ
1635 {
1636 if (DECL_RTL_SET_P (t))
1637 instantiate_decl (DECL_RTL (t));
1638 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1639 {
1640 tree v = DECL_VALUE_EXPR (t);
1641 walk_tree (&v, instantiate_expr, NULL, NULL);
1642 }
1643 }
6f086dfc 1644
bbf9b913
RH
1645 /* Process all subblocks. */
1646 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1647 instantiate_decls_1 (t);
1648}
6f086dfc 1649
bbf9b913
RH
1650/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1651 all virtual registers in their DECL_RTL's. */
6f086dfc 1652
bbf9b913
RH
1653static void
1654instantiate_decls (tree fndecl)
1655{
1656 tree decl;
6f086dfc 1657
bbf9b913
RH
1658 /* Process all parameters of the function. */
1659 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1660 {
1661 instantiate_decl (DECL_RTL (decl));
1662 instantiate_decl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1663 if (DECL_HAS_VALUE_EXPR_P (decl))
1664 {
1665 tree v = DECL_VALUE_EXPR (decl);
1666 walk_tree (&v, instantiate_expr, NULL, NULL);
1667 }
bbf9b913 1668 }
4fd796bb 1669
bbf9b913
RH
1670 /* Now process all variables defined in the function or its subblocks. */
1671 instantiate_decls_1 (DECL_INITIAL (fndecl));
1672}
6f086dfc 1673
bbf9b913
RH
1674/* Pass through the INSNS of function FNDECL and convert virtual register
1675 references to hard register references. */
6f086dfc 1676
c2924966 1677static unsigned int
bbf9b913
RH
1678instantiate_virtual_regs (void)
1679{
1680 rtx insn;
6f086dfc 1681
bbf9b913
RH
1682 /* Compute the offsets to use for this function. */
1683 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1684 var_offset = STARTING_FRAME_OFFSET;
1685 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1686 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1687#ifdef FRAME_POINTER_CFA_OFFSET
1688 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1689#else
bbf9b913 1690 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1691#endif
e9a25f70 1692
bbf9b913
RH
1693 /* Initialize recognition, indicating that volatile is OK. */
1694 init_recog ();
6f086dfc 1695
bbf9b913
RH
1696 /* Scan through all the insns, instantiating every virtual register still
1697 present. */
1698 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1699 if (INSN_P (insn))
6f086dfc 1700 {
bbf9b913
RH
1701 /* These patterns in the instruction stream can never be recognized.
1702 Fortunately, they shouldn't contain virtual registers either. */
1703 if (GET_CODE (PATTERN (insn)) == USE
1704 || GET_CODE (PATTERN (insn)) == CLOBBER
1705 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1706 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1707 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1708 continue;
1709
1710 instantiate_virtual_regs_in_insn (insn);
1711
1712 if (INSN_DELETED_P (insn))
1713 continue;
1714
1715 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1716
1717 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1718 if (GET_CODE (insn) == CALL_INSN)
1719 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1720 instantiate_virtual_regs_in_rtx, NULL);
6f086dfc 1721 }
6f086dfc 1722
bbf9b913
RH
1723 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1724 instantiate_decls (current_function_decl);
1725
1726 /* Indicate that, from now on, assign_stack_local should use
1727 frame_pointer_rtx. */
1728 virtuals_instantiated = 1;
c2924966 1729 return 0;
6f086dfc 1730}
ef330312
PB
1731
1732struct tree_opt_pass pass_instantiate_virtual_regs =
1733{
defb77dc 1734 "vregs", /* name */
ef330312
PB
1735 NULL, /* gate */
1736 instantiate_virtual_regs, /* execute */
1737 NULL, /* sub */
1738 NULL, /* next */
1739 0, /* static_pass_number */
1740 0, /* tv_id */
1741 0, /* properties_required */
1742 0, /* properties_provided */
1743 0, /* properties_destroyed */
1744 0, /* todo_flags_start */
defb77dc 1745 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
1746 0 /* letter */
1747};
1748
6f086dfc 1749\f
d181c154
RS
1750/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1751 This means a type for which function calls must pass an address to the
1752 function or get an address back from the function.
1753 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1754
1755int
61f71b34 1756aggregate_value_p (tree exp, tree fntype)
6f086dfc 1757{
9d790a4f
RS
1758 int i, regno, nregs;
1759 rtx reg;
2f939d94
TP
1760
1761 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d181c154 1762
61f71b34
DD
1763 if (fntype)
1764 switch (TREE_CODE (fntype))
1765 {
1766 case CALL_EXPR:
1767 fntype = get_callee_fndecl (fntype);
1768 fntype = fntype ? TREE_TYPE (fntype) : 0;
1769 break;
1770 case FUNCTION_DECL:
1771 fntype = TREE_TYPE (fntype);
1772 break;
1773 case FUNCTION_TYPE:
1774 case METHOD_TYPE:
1775 break;
1776 case IDENTIFIER_NODE:
1777 fntype = 0;
1778 break;
1779 default:
1780 /* We don't expect other rtl types here. */
0bccc606 1781 gcc_unreachable ();
61f71b34
DD
1782 }
1783
d7bf8ada
MM
1784 if (TREE_CODE (type) == VOID_TYPE)
1785 return 0;
cc77ae10
JM
1786 /* If the front end has decided that this needs to be passed by
1787 reference, do so. */
1788 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1789 && DECL_BY_REFERENCE (exp))
1790 return 1;
61f71b34 1791 if (targetm.calls.return_in_memory (type, fntype))
6f086dfc 1792 return 1;
956d6950 1793 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1794 and thus can't be returned in registers. */
1795 if (TREE_ADDRESSABLE (type))
1796 return 1;
05e3bdb9 1797 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1798 return 1;
9d790a4f
RS
1799 /* Make sure we have suitable call-clobbered regs to return
1800 the value in; if not, we must return it in memory. */
1d636cc6 1801 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
1802
1803 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1804 it is OK. */
f8cfc6aa 1805 if (!REG_P (reg))
e71f7aa5
JW
1806 return 0;
1807
9d790a4f 1808 regno = REGNO (reg);
66fd46b6 1809 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
1810 for (i = 0; i < nregs; i++)
1811 if (! call_used_regs[regno + i])
1812 return 1;
6f086dfc
RS
1813 return 0;
1814}
1815\f
8fff4fc1
RH
1816/* Return true if we should assign DECL a pseudo register; false if it
1817 should live on the local stack. */
1818
1819bool
1820use_register_for_decl (tree decl)
1821{
1822 /* Honor volatile. */
1823 if (TREE_SIDE_EFFECTS (decl))
1824 return false;
1825
1826 /* Honor addressability. */
1827 if (TREE_ADDRESSABLE (decl))
1828 return false;
1829
1830 /* Only register-like things go in registers. */
1831 if (DECL_MODE (decl) == BLKmode)
1832 return false;
1833
1834 /* If -ffloat-store specified, don't put explicit float variables
1835 into registers. */
1836 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1837 propagates values across these stores, and it probably shouldn't. */
1838 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1839 return false;
1840
78e0d62b
RH
1841 /* If we're not interested in tracking debugging information for
1842 this decl, then we can certainly put it in a register. */
1843 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
1844 return true;
1845
8fff4fc1
RH
1846 return (optimize || DECL_REGISTER (decl));
1847}
1848
0976078c
RH
1849/* Return true if TYPE should be passed by invisible reference. */
1850
1851bool
8cd5a4e0
RH
1852pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1853 tree type, bool named_arg)
0976078c
RH
1854{
1855 if (type)
1856 {
1857 /* If this type contains non-trivial constructors, then it is
1858 forbidden for the middle-end to create any new copies. */
1859 if (TREE_ADDRESSABLE (type))
1860 return true;
1861
d58247a3
RH
1862 /* GCC post 3.4 passes *all* variable sized types by reference. */
1863 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c
RH
1864 return true;
1865 }
1866
8cd5a4e0 1867 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
1868}
1869
6cdd5672
RH
1870/* Return true if TYPE, which is passed by reference, should be callee
1871 copied instead of caller copied. */
1872
1873bool
1874reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1875 tree type, bool named_arg)
1876{
1877 if (type && TREE_ADDRESSABLE (type))
1878 return false;
1879 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1880}
1881
6071dc7f
RH
1882/* Structures to communicate between the subroutines of assign_parms.
1883 The first holds data persistent across all parameters, the second
1884 is cleared out for each parameter. */
6f086dfc 1885
6071dc7f 1886struct assign_parm_data_all
6f086dfc 1887{
6f086dfc 1888 CUMULATIVE_ARGS args_so_far;
6f086dfc 1889 struct args_size stack_args_size;
6071dc7f
RH
1890 tree function_result_decl;
1891 tree orig_fnargs;
1892 rtx conversion_insns;
1893 HOST_WIDE_INT pretend_args_size;
1894 HOST_WIDE_INT extra_pretend_bytes;
1895 int reg_parm_stack_space;
1896};
6f086dfc 1897
6071dc7f
RH
1898struct assign_parm_data_one
1899{
1900 tree nominal_type;
1901 tree passed_type;
1902 rtx entry_parm;
1903 rtx stack_parm;
1904 enum machine_mode nominal_mode;
1905 enum machine_mode passed_mode;
1906 enum machine_mode promoted_mode;
1907 struct locate_and_pad_arg_data locate;
1908 int partial;
1909 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
1910 BOOL_BITFIELD passed_pointer : 1;
1911 BOOL_BITFIELD on_stack : 1;
1912 BOOL_BITFIELD loaded_in_reg : 1;
1913};
ebb904cb 1914
6071dc7f 1915/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 1916
6071dc7f
RH
1917static void
1918assign_parms_initialize_all (struct assign_parm_data_all *all)
1919{
1920 tree fntype;
6f086dfc 1921
6071dc7f
RH
1922 memset (all, 0, sizeof (*all));
1923
1924 fntype = TREE_TYPE (current_function_decl);
1925
1926#ifdef INIT_CUMULATIVE_INCOMING_ARGS
1927 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1928#else
1929 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1930 current_function_decl, -1);
1931#endif
1932
1933#ifdef REG_PARM_STACK_SPACE
1934 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1935#endif
1936}
6f086dfc 1937
6071dc7f
RH
1938/* If ARGS contains entries with complex types, split the entry into two
1939 entries of the component type. Return a new list of substitutions are
1940 needed, else the old list. */
1941
1942static tree
1943split_complex_args (tree args)
1944{
1945 tree p;
1946
1947 /* Before allocating memory, check for the common case of no complex. */
1948 for (p = args; p; p = TREE_CHAIN (p))
1949 {
1950 tree type = TREE_TYPE (p);
1951 if (TREE_CODE (type) == COMPLEX_TYPE
1952 && targetm.calls.split_complex_arg (type))
1953 goto found;
1954 }
1955 return args;
1956
1957 found:
1958 args = copy_list (args);
1959
1960 for (p = args; p; p = TREE_CHAIN (p))
1961 {
1962 tree type = TREE_TYPE (p);
1963 if (TREE_CODE (type) == COMPLEX_TYPE
1964 && targetm.calls.split_complex_arg (type))
1965 {
1966 tree decl;
1967 tree subtype = TREE_TYPE (type);
6ccd356e 1968 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
1969
1970 /* Rewrite the PARM_DECL's type with its component. */
1971 TREE_TYPE (p) = subtype;
1972 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1973 DECL_MODE (p) = VOIDmode;
1974 DECL_SIZE (p) = NULL;
1975 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
1976 /* If this arg must go in memory, put it in a pseudo here.
1977 We can't allow it to go in memory as per normal parms,
1978 because the usual place might not have the imag part
1979 adjacent to the real part. */
1980 DECL_ARTIFICIAL (p) = addressable;
1981 DECL_IGNORED_P (p) = addressable;
1982 TREE_ADDRESSABLE (p) = 0;
6071dc7f
RH
1983 layout_decl (p, 0);
1984
1985 /* Build a second synthetic decl. */
1986 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1987 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
1988 DECL_ARTIFICIAL (decl) = addressable;
1989 DECL_IGNORED_P (decl) = addressable;
6071dc7f
RH
1990 layout_decl (decl, 0);
1991
1992 /* Splice it in; skip the new decl. */
1993 TREE_CHAIN (decl) = TREE_CHAIN (p);
1994 TREE_CHAIN (p) = decl;
1995 p = decl;
1996 }
1997 }
1998
1999 return args;
2000}
2001
2002/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2003 the hidden struct return argument, and (abi willing) complex args.
2004 Return the new parameter list. */
2005
2006static tree
2007assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2008{
2009 tree fndecl = current_function_decl;
2010 tree fntype = TREE_TYPE (fndecl);
2011 tree fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2012
2013 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2014 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
6f086dfc 2015 && ! current_function_returns_pcc_struct
61f71b34 2016 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2017 {
f9f29478 2018 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2019 tree decl;
6f086dfc 2020
6071dc7f
RH
2021 decl = build_decl (PARM_DECL, NULL_TREE, type);
2022 DECL_ARG_TYPE (decl) = type;
2023 DECL_ARTIFICIAL (decl) = 1;
78e0d62b 2024 DECL_IGNORED_P (decl) = 1;
6f086dfc 2025
6071dc7f
RH
2026 TREE_CHAIN (decl) = fnargs;
2027 fnargs = decl;
2028 all->function_result_decl = decl;
6f086dfc 2029 }
718fe406 2030
6071dc7f 2031 all->orig_fnargs = fnargs;
ded9bf77 2032
42ba5130
RH
2033 /* If the target wants to split complex arguments into scalars, do so. */
2034 if (targetm.calls.split_complex_arg)
ded9bf77
AH
2035 fnargs = split_complex_args (fnargs);
2036
6071dc7f
RH
2037 return fnargs;
2038}
e7949876 2039
6071dc7f
RH
2040/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2041 data for the parameter. Incorporate ABI specifics such as pass-by-
2042 reference and type promotion. */
6f086dfc 2043
6071dc7f
RH
2044static void
2045assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2046 struct assign_parm_data_one *data)
2047{
2048 tree nominal_type, passed_type;
2049 enum machine_mode nominal_mode, passed_mode, promoted_mode;
6f086dfc 2050
6071dc7f
RH
2051 memset (data, 0, sizeof (*data));
2052
8117c488
NS
2053 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2054 if (!current_function_stdarg)
2055 data->named_arg = 1; /* No varadic parms. */
2056 else if (TREE_CHAIN (parm))
2057 data->named_arg = 1; /* Not the last non-varadic parm. */
2058 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2059 data->named_arg = 1; /* Only varadic ones are unnamed. */
6071dc7f 2060 else
8117c488 2061 data->named_arg = 0; /* Treat as varadic. */
6071dc7f
RH
2062
2063 nominal_type = TREE_TYPE (parm);
2064 passed_type = DECL_ARG_TYPE (parm);
2065
2066 /* Look out for errors propagating this far. Also, if the parameter's
2067 type is void then its value doesn't matter. */
2068 if (TREE_TYPE (parm) == error_mark_node
2069 /* This can happen after weird syntax errors
2070 or if an enum type is defined among the parms. */
2071 || TREE_CODE (parm) != PARM_DECL
2072 || passed_type == NULL
2073 || VOID_TYPE_P (nominal_type))
2074 {
2075 nominal_type = passed_type = void_type_node;
2076 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2077 goto egress;
2078 }
108b7d3d 2079
6071dc7f
RH
2080 /* Find mode of arg as it is passed, and mode of arg as it should be
2081 during execution of this function. */
2082 passed_mode = TYPE_MODE (passed_type);
2083 nominal_mode = TYPE_MODE (nominal_type);
2084
2085 /* If the parm is to be passed as a transparent union, use the type of
2086 the first field for the tests below. We have already verified that
2087 the modes are the same. */
52dd234b
RH
2088 if (TREE_CODE (passed_type) == UNION_TYPE
2089 && TYPE_TRANSPARENT_UNION (passed_type))
6071dc7f
RH
2090 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2091
0976078c
RH
2092 /* See if this arg was passed by invisible reference. */
2093 if (pass_by_reference (&all->args_so_far, passed_mode,
2094 passed_type, data->named_arg))
6071dc7f
RH
2095 {
2096 passed_type = nominal_type = build_pointer_type (passed_type);
2097 data->passed_pointer = true;
2098 passed_mode = nominal_mode = Pmode;
2099 }
6f086dfc 2100
6071dc7f
RH
2101 /* Find mode as it is passed by the ABI. */
2102 promoted_mode = passed_mode;
2103 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2104 {
2105 int unsignedp = TYPE_UNSIGNED (passed_type);
2106 promoted_mode = promote_mode (passed_type, promoted_mode,
2107 &unsignedp, 1);
2108 }
6f086dfc 2109
6071dc7f
RH
2110 egress:
2111 data->nominal_type = nominal_type;
2112 data->passed_type = passed_type;
2113 data->nominal_mode = nominal_mode;
2114 data->passed_mode = passed_mode;
2115 data->promoted_mode = promoted_mode;
2116}
16bae307 2117
6071dc7f 2118/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2119
6071dc7f
RH
2120static void
2121assign_parms_setup_varargs (struct assign_parm_data_all *all,
2122 struct assign_parm_data_one *data, bool no_rtl)
2123{
2124 int varargs_pretend_bytes = 0;
2125
2126 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2127 data->promoted_mode,
2128 data->passed_type,
2129 &varargs_pretend_bytes, no_rtl);
2130
2131 /* If the back-end has requested extra stack space, record how much is
2132 needed. Do not change pretend_args_size otherwise since it may be
2133 nonzero from an earlier partial argument. */
2134 if (varargs_pretend_bytes > 0)
2135 all->pretend_args_size = varargs_pretend_bytes;
2136}
a53e14c0 2137
6071dc7f
RH
2138/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2139 the incoming location of the current parameter. */
2140
2141static void
2142assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2143 struct assign_parm_data_one *data)
2144{
2145 HOST_WIDE_INT pretend_bytes = 0;
2146 rtx entry_parm;
2147 bool in_regs;
2148
2149 if (data->promoted_mode == VOIDmode)
2150 {
2151 data->entry_parm = data->stack_parm = const0_rtx;
2152 return;
2153 }
a53e14c0 2154
6f086dfc 2155#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2156 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2157 data->passed_type, data->named_arg);
6f086dfc 2158#else
6071dc7f
RH
2159 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2160 data->passed_type, data->named_arg);
6f086dfc
RS
2161#endif
2162
6071dc7f
RH
2163 if (entry_parm == 0)
2164 data->promoted_mode = data->passed_mode;
6f086dfc 2165
6071dc7f
RH
2166 /* Determine parm's home in the stack, in case it arrives in the stack
2167 or we should pretend it did. Compute the stack position and rtx where
2168 the argument arrives and its size.
6f086dfc 2169
6071dc7f
RH
2170 There is one complexity here: If this was a parameter that would
2171 have been passed in registers, but wasn't only because it is
2172 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2173 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2174 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2175 as it was the previous time. */
2176 in_regs = entry_parm != 0;
6f086dfc 2177#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2178 in_regs = true;
e7949876 2179#endif
6071dc7f
RH
2180 if (!in_regs && !data->named_arg)
2181 {
2182 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2183 {
6071dc7f 2184 rtx tem;
6f086dfc 2185#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2186 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2187 data->passed_type, true);
6f086dfc 2188#else
6071dc7f
RH
2189 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2190 data->passed_type, true);
6f086dfc 2191#endif
6071dc7f 2192 in_regs = tem != NULL;
e7949876 2193 }
6071dc7f 2194 }
e7949876 2195
6071dc7f
RH
2196 /* If this parameter was passed both in registers and in the stack, use
2197 the copy on the stack. */
fe984136
RH
2198 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2199 data->passed_type))
6071dc7f 2200 entry_parm = 0;
e7949876 2201
6071dc7f
RH
2202 if (entry_parm)
2203 {
2204 int partial;
2205
78a52f11
RH
2206 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2207 data->promoted_mode,
2208 data->passed_type,
2209 data->named_arg);
6071dc7f
RH
2210 data->partial = partial;
2211
2212 /* The caller might already have allocated stack space for the
2213 register parameters. */
2214 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2215 {
6071dc7f
RH
2216 /* Part of this argument is passed in registers and part
2217 is passed on the stack. Ask the prologue code to extend
2218 the stack part so that we can recreate the full value.
2219
2220 PRETEND_BYTES is the size of the registers we need to store.
2221 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2222 stack space that the prologue should allocate.
2223
2224 Internally, gcc assumes that the argument pointer is aligned
2225 to STACK_BOUNDARY bits. This is used both for alignment
2226 optimizations (see init_emit) and to locate arguments that are
2227 aligned to more than PARM_BOUNDARY bits. We must preserve this
2228 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2229 a stack boundary. */
2230
2231 /* We assume at most one partial arg, and it must be the first
2232 argument on the stack. */
0bccc606 2233 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2234
78a52f11 2235 pretend_bytes = partial;
6071dc7f
RH
2236 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2237
2238 /* We want to align relative to the actual stack pointer, so
2239 don't include this in the stack size until later. */
2240 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2241 }
6071dc7f 2242 }
e7949876 2243
6071dc7f
RH
2244 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2245 entry_parm ? data->partial : 0, current_function_decl,
2246 &all->stack_args_size, &data->locate);
6f086dfc 2247
6071dc7f
RH
2248 /* Adjust offsets to include the pretend args. */
2249 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2250 data->locate.slot_offset.constant += pretend_bytes;
2251 data->locate.offset.constant += pretend_bytes;
ebca59c3 2252
6071dc7f
RH
2253 data->entry_parm = entry_parm;
2254}
6f086dfc 2255
6071dc7f
RH
2256/* A subroutine of assign_parms. If there is actually space on the stack
2257 for this parm, count it in stack_args_size and return true. */
6f086dfc 2258
6071dc7f
RH
2259static bool
2260assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2261 struct assign_parm_data_one *data)
2262{
2e6ae27f 2263 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2264 if (data->entry_parm == NULL)
2265 ;
2266 /* Also true if we're partially in registers and partially not,
2267 since we've arranged to drop the entire argument on the stack. */
2268 else if (data->partial != 0)
2269 ;
2270 /* Also true if the target says that it's passed in both registers
2271 and on the stack. */
2272 else if (GET_CODE (data->entry_parm) == PARALLEL
2273 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2274 ;
2275 /* Also true if the target says that there's stack allocated for
2276 all register parameters. */
2277 else if (all->reg_parm_stack_space > 0)
2278 ;
2279 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2280 else
2281 return false;
6f086dfc 2282
6071dc7f
RH
2283 all->stack_args_size.constant += data->locate.size.constant;
2284 if (data->locate.size.var)
2285 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2286
6071dc7f
RH
2287 return true;
2288}
0d1416c6 2289
6071dc7f
RH
2290/* A subroutine of assign_parms. Given that this parameter is allocated
2291 stack space by the ABI, find it. */
6f086dfc 2292
6071dc7f
RH
2293static void
2294assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2295{
2296 rtx offset_rtx, stack_parm;
2297 unsigned int align, boundary;
6f086dfc 2298
6071dc7f
RH
2299 /* If we're passing this arg using a reg, make its stack home the
2300 aligned stack slot. */
2301 if (data->entry_parm)
2302 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2303 else
2304 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2305
2306 stack_parm = current_function_internal_arg_pointer;
2307 if (offset_rtx != const0_rtx)
2308 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2309 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2310
2311 set_mem_attributes (stack_parm, parm, 1);
2312
bfc45551
AM
2313 boundary = data->locate.boundary;
2314 align = BITS_PER_UNIT;
6071dc7f
RH
2315
2316 /* If we're padding upward, we know that the alignment of the slot
2317 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2318 intentionally forcing upward padding. Otherwise we have to come
2319 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2320 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f
RH
2321 align = boundary;
2322 else if (GET_CODE (offset_rtx) == CONST_INT)
2323 {
2324 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2325 align = align & -align;
2326 }
bfc45551 2327 set_mem_align (stack_parm, align);
6071dc7f
RH
2328
2329 if (data->entry_parm)
2330 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2331
2332 data->stack_parm = stack_parm;
2333}
2334
2335/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2336 always valid and contiguous. */
2337
2338static void
2339assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2340{
2341 rtx entry_parm = data->entry_parm;
2342 rtx stack_parm = data->stack_parm;
2343
2344 /* If this parm was passed part in regs and part in memory, pretend it
2345 arrived entirely in memory by pushing the register-part onto the stack.
2346 In the special case of a DImode or DFmode that is split, we could put
2347 it together in a pseudoreg directly, but for now that's not worth
2348 bothering with. */
2349 if (data->partial != 0)
2350 {
2351 /* Handle calls that pass values in multiple non-contiguous
2352 locations. The Irix 6 ABI has examples of this. */
2353 if (GET_CODE (entry_parm) == PARALLEL)
2354 emit_group_store (validize_mem (stack_parm), entry_parm,
2355 data->passed_type,
2356 int_size_in_bytes (data->passed_type));
6f086dfc 2357 else
78a52f11
RH
2358 {
2359 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2360 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2361 data->partial / UNITS_PER_WORD);
2362 }
6f086dfc 2363
6071dc7f
RH
2364 entry_parm = stack_parm;
2365 }
6f086dfc 2366
6071dc7f
RH
2367 /* If we didn't decide this parm came in a register, by default it came
2368 on the stack. */
2369 else if (entry_parm == NULL)
2370 entry_parm = stack_parm;
2371
2372 /* When an argument is passed in multiple locations, we can't make use
2373 of this information, but we can save some copying if the whole argument
2374 is passed in a single register. */
2375 else if (GET_CODE (entry_parm) == PARALLEL
2376 && data->nominal_mode != BLKmode
2377 && data->passed_mode != BLKmode)
2378 {
2379 size_t i, len = XVECLEN (entry_parm, 0);
2380
2381 for (i = 0; i < len; i++)
2382 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2383 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2384 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2385 == data->passed_mode)
2386 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2387 {
2388 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2389 break;
2390 }
2391 }
e68a6ce1 2392
6071dc7f
RH
2393 data->entry_parm = entry_parm;
2394}
6f086dfc 2395
6071dc7f
RH
2396/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2397 always valid and properly aligned. */
6f086dfc 2398
6071dc7f
RH
2399static void
2400assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2401{
2402 rtx stack_parm = data->stack_parm;
2403
2404 /* If we can't trust the parm stack slot to be aligned enough for its
2405 ultimate type, don't use that slot after entry. We'll make another
2406 stack slot, if we need one. */
bfc45551
AM
2407 if (stack_parm
2408 && ((STRICT_ALIGNMENT
2409 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2410 || (data->nominal_type
2411 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2412 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2413 stack_parm = NULL;
2414
2415 /* If parm was passed in memory, and we need to convert it on entry,
2416 don't store it back in that same slot. */
2417 else if (data->entry_parm == stack_parm
2418 && data->nominal_mode != BLKmode
2419 && data->nominal_mode != data->passed_mode)
2420 stack_parm = NULL;
2421
7d69de61
RH
2422 /* If stack protection is in effect for this function, don't leave any
2423 pointers in their passed stack slots. */
2424 else if (cfun->stack_protect_guard
2425 && (flag_stack_protect == 2
2426 || data->passed_pointer
2427 || POINTER_TYPE_P (data->nominal_type)))
2428 stack_parm = NULL;
2429
6071dc7f
RH
2430 data->stack_parm = stack_parm;
2431}
a0506b54 2432
6071dc7f
RH
2433/* A subroutine of assign_parms. Return true if the current parameter
2434 should be stored as a BLKmode in the current frame. */
2435
2436static bool
2437assign_parm_setup_block_p (struct assign_parm_data_one *data)
2438{
2439 if (data->nominal_mode == BLKmode)
2440 return true;
2441 if (GET_CODE (data->entry_parm) == PARALLEL)
2442 return true;
531547e9 2443
6e985040 2444#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2445 /* Only assign_parm_setup_block knows how to deal with register arguments
2446 that are padded at the least significant end. */
2447 if (REG_P (data->entry_parm)
2448 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2449 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2450 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2451 return true;
6e985040 2452#endif
6071dc7f
RH
2453
2454 return false;
2455}
2456
2457/* A subroutine of assign_parms. Arrange for the parameter to be
2458 present and valid in DATA->STACK_RTL. */
2459
2460static void
27e29549
RH
2461assign_parm_setup_block (struct assign_parm_data_all *all,
2462 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2463{
2464 rtx entry_parm = data->entry_parm;
2465 rtx stack_parm = data->stack_parm;
bfc45551
AM
2466 HOST_WIDE_INT size;
2467 HOST_WIDE_INT size_stored;
17284759 2468 rtx orig_entry_parm = entry_parm;
6071dc7f 2469
27e29549
RH
2470 if (GET_CODE (entry_parm) == PARALLEL)
2471 entry_parm = emit_group_move_into_temps (entry_parm);
2472
6071dc7f
RH
2473 /* If we've a non-block object that's nevertheless passed in parts,
2474 reconstitute it in register operations rather than on the stack. */
2475 if (GET_CODE (entry_parm) == PARALLEL
640019aa 2476 && data->nominal_mode != BLKmode)
6071dc7f 2477 {
17284759 2478 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
6071dc7f 2479
640019aa
AH
2480 if ((XVECLEN (entry_parm, 0) > 1
2481 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2482 && use_register_for_decl (parm))
2483 {
2484 rtx parmreg = gen_reg_rtx (data->nominal_mode);
27e29549 2485
640019aa 2486 push_to_sequence (all->conversion_insns);
4af46a32 2487
640019aa
AH
2488 /* For values returned in multiple registers, handle possible
2489 incompatible calls to emit_group_store.
4af46a32 2490
640019aa
AH
2491 For example, the following would be invalid, and would have to
2492 be fixed by the conditional below:
4af46a32 2493
640019aa
AH
2494 emit_group_store ((reg:SF), (parallel:DF))
2495 emit_group_store ((reg:SI), (parallel:DI))
2496
2497 An example of this are doubles in e500 v2:
2498 (parallel:DF (expr_list (reg:SI) (const_int 0))
2499 (expr_list (reg:SI) (const_int 4))). */
2500 if (data->nominal_mode != data->passed_mode)
2501 {
2502 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2503 emit_group_store (t, entry_parm, NULL_TREE,
2504 GET_MODE_SIZE (GET_MODE (entry_parm)));
2505 convert_move (parmreg, t, 0);
2506 }
2507 else
2508 emit_group_store (parmreg, entry_parm, data->nominal_type,
2509 int_size_in_bytes (data->nominal_type));
27e29549 2510
640019aa
AH
2511 all->conversion_insns = get_insns ();
2512 end_sequence ();
27e29549 2513
640019aa
AH
2514 SET_DECL_RTL (parm, parmreg);
2515 return;
2516 }
6071dc7f
RH
2517 }
2518
bfc45551
AM
2519 size = int_size_in_bytes (data->passed_type);
2520 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2521 if (stack_parm == 0)
2522 {
a561d88b 2523 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2524 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2525 DECL_ALIGN (parm));
bfc45551
AM
2526 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2527 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2528 set_mem_attributes (stack_parm, parm, 1);
2529 }
2530
6071dc7f
RH
2531 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2532 calls that pass values in multiple non-contiguous locations. */
2533 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2534 {
6071dc7f
RH
2535 rtx mem;
2536
2537 /* Note that we will be storing an integral number of words.
2538 So we have to be careful to ensure that we allocate an
bfc45551 2539 integral number of words. We do this above when we call
6071dc7f
RH
2540 assign_stack_local if space was not allocated in the argument
2541 list. If it was, this will not work if PARM_BOUNDARY is not
2542 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2543 if it becomes a problem. Exception is when BLKmode arrives
2544 with arguments not conforming to word_mode. */
2545
bfc45551
AM
2546 if (data->stack_parm == 0)
2547 ;
6071dc7f
RH
2548 else if (GET_CODE (entry_parm) == PARALLEL)
2549 ;
0bccc606
NS
2550 else
2551 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2552
6071dc7f 2553 mem = validize_mem (stack_parm);
c6b97fac 2554
6071dc7f
RH
2555 /* Handle values in multiple non-contiguous locations. */
2556 if (GET_CODE (entry_parm) == PARALLEL)
27e29549
RH
2557 {
2558 push_to_sequence (all->conversion_insns);
2559 emit_group_store (mem, entry_parm, data->passed_type, size);
2560 all->conversion_insns = get_insns ();
2561 end_sequence ();
2562 }
c6b97fac 2563
6071dc7f
RH
2564 else if (size == 0)
2565 ;
5c07bd7a 2566
6071dc7f
RH
2567 /* If SIZE is that of a mode no bigger than a word, just use
2568 that mode's store operation. */
2569 else if (size <= UNITS_PER_WORD)
2570 {
2571 enum machine_mode mode
2572 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2573
6071dc7f 2574 if (mode != BLKmode
6e985040 2575#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2576 && (size == UNITS_PER_WORD
2577 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2578 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2579#endif
6071dc7f
RH
2580 )
2581 {
2582 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2583 emit_move_insn (change_address (mem, mode, 0), reg);
2584 }
c6b97fac 2585
6071dc7f
RH
2586 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2587 machine must be aligned to the left before storing
2588 to memory. Note that the previous test doesn't
2589 handle all cases (e.g. SIZE == 3). */
2590 else if (size != UNITS_PER_WORD
6e985040 2591#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2592 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2593 == downward)
6e985040 2594#else
6071dc7f 2595 && BYTES_BIG_ENDIAN
6e985040 2596#endif
6071dc7f
RH
2597 )
2598 {
2599 rtx tem, x;
2600 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2601 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2602
09b52670 2603 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2604 build_int_cst (NULL_TREE, by),
4a90aeeb 2605 NULL_RTX, 1);
6071dc7f
RH
2606 tem = change_address (mem, word_mode, 0);
2607 emit_move_insn (tem, x);
6f086dfc 2608 }
6071dc7f 2609 else
27e29549 2610 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2611 size_stored / UNITS_PER_WORD);
6f086dfc 2612 }
6071dc7f 2613 else
27e29549 2614 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2615 size_stored / UNITS_PER_WORD);
2616 }
bfc45551
AM
2617 else if (data->stack_parm == 0)
2618 {
2619 push_to_sequence (all->conversion_insns);
2620 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2621 BLOCK_OP_NORMAL);
2622 all->conversion_insns = get_insns ();
2623 end_sequence ();
2624 }
6071dc7f 2625
bfc45551 2626 data->stack_parm = stack_parm;
6071dc7f
RH
2627 SET_DECL_RTL (parm, stack_parm);
2628}
2629
2630/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2631 parameter. Get it there. Perform all ABI specified conversions. */
2632
2633static void
2634assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2635 struct assign_parm_data_one *data)
2636{
2637 rtx parmreg;
2638 enum machine_mode promoted_nominal_mode;
2639 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2640 bool did_conversion = false;
2641
2642 /* Store the parm in a pseudoregister during the function, but we may
2643 need to do it in a wider mode. */
2644
3f9e6aed
PB
2645 /* This is not really promoting for a call. However we need to be
2646 consistent with assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2647 promoted_nominal_mode
3f9e6aed 2648 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
6071dc7f
RH
2649
2650 parmreg = gen_reg_rtx (promoted_nominal_mode);
2651
2652 if (!DECL_ARTIFICIAL (parm))
2653 mark_user_reg (parmreg);
2654
2655 /* If this was an item that we received a pointer to,
2656 set DECL_RTL appropriately. */
2657 if (data->passed_pointer)
2658 {
2659 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2660 set_mem_attributes (x, parm, 1);
2661 SET_DECL_RTL (parm, x);
2662 }
2663 else
389fdba0 2664 SET_DECL_RTL (parm, parmreg);
6071dc7f
RH
2665
2666 /* Copy the value into the register. */
2667 if (data->nominal_mode != data->passed_mode
2668 || promoted_nominal_mode != data->promoted_mode)
2669 {
2670 int save_tree_used;
2671
2672 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2673 mode, by the caller. We now have to convert it to
2674 NOMINAL_MODE, if different. However, PARMREG may be in
2675 a different mode than NOMINAL_MODE if it is being stored
2676 promoted.
2677
2678 If ENTRY_PARM is a hard register, it might be in a register
2679 not valid for operating in its mode (e.g., an odd-numbered
2680 register for a DFmode). In that case, moves are the only
2681 thing valid, so we can't do a convert from there. This
2682 occurs when the calling sequence allow such misaligned
2683 usages.
2684
2685 In addition, the conversion may involve a call, which could
2686 clobber parameters which haven't been copied to pseudo
2687 registers yet. Therefore, we must first copy the parm to
2688 a pseudo reg here, and save the conversion until after all
2689 parameters have been moved. */
2690
2691 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2692
2693 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2694
2695 push_to_sequence (all->conversion_insns);
2696 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2697
2698 if (GET_CODE (tempreg) == SUBREG
2699 && GET_MODE (tempreg) == data->nominal_mode
2700 && REG_P (SUBREG_REG (tempreg))
2701 && data->nominal_mode == data->passed_mode
2702 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2703 && GET_MODE_SIZE (GET_MODE (tempreg))
2704 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 2705 {
6071dc7f
RH
2706 /* The argument is already sign/zero extended, so note it
2707 into the subreg. */
2708 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2709 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2710 }
00d8a4c1 2711
6071dc7f
RH
2712 /* TREE_USED gets set erroneously during expand_assignment. */
2713 save_tree_used = TREE_USED (parm);
e836a5a2 2714 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
6071dc7f
RH
2715 TREE_USED (parm) = save_tree_used;
2716 all->conversion_insns = get_insns ();
2717 end_sequence ();
00d8a4c1 2718
6071dc7f
RH
2719 did_conversion = true;
2720 }
2721 else
2722 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2723
2724 /* If we were passed a pointer but the actual value can safely live
2725 in a register, put it in one. */
2726 if (data->passed_pointer
2727 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2728 /* If by-reference argument was promoted, demote it. */
2729 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2730 || use_register_for_decl (parm)))
2731 {
2732 /* We can't use nominal_mode, because it will have been set to
2733 Pmode above. We must use the actual mode of the parm. */
2734 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2735 mark_user_reg (parmreg);
cd5b3469 2736
6071dc7f
RH
2737 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2738 {
2739 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2740 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2741
2742 push_to_sequence (all->conversion_insns);
2743 emit_move_insn (tempreg, DECL_RTL (parm));
2744 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2745 emit_move_insn (parmreg, tempreg);
27e29549 2746 all->conversion_insns = get_insns ();
6071dc7f 2747 end_sequence ();
6f086dfc 2748
6071dc7f
RH
2749 did_conversion = true;
2750 }
2751 else
2752 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 2753
6071dc7f 2754 SET_DECL_RTL (parm, parmreg);
797a6ac1 2755
6071dc7f
RH
2756 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2757 now the parm. */
2758 data->stack_parm = NULL;
2759 }
ddef6bc7 2760
6071dc7f
RH
2761 /* Mark the register as eliminable if we did no conversion and it was
2762 copied from memory at a fixed offset, and the arg pointer was not
2763 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2764 offset formed an invalid address, such memory-equivalences as we
2765 make here would screw up life analysis for it. */
2766 if (data->nominal_mode == data->passed_mode
2767 && !did_conversion
2768 && data->stack_parm != 0
2769 && MEM_P (data->stack_parm)
2770 && data->locate.offset.var == 0
2771 && reg_mentioned_p (virtual_incoming_args_rtx,
2772 XEXP (data->stack_parm, 0)))
2773 {
2774 rtx linsn = get_last_insn ();
2775 rtx sinsn, set;
a03caf76 2776
6071dc7f
RH
2777 /* Mark complex types separately. */
2778 if (GET_CODE (parmreg) == CONCAT)
2779 {
2780 enum machine_mode submode
2781 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
2782 int regnor = REGNO (XEXP (parmreg, 0));
2783 int regnoi = REGNO (XEXP (parmreg, 1));
2784 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2785 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2786 GET_MODE_SIZE (submode));
6071dc7f
RH
2787
2788 /* Scan backwards for the set of the real and
2789 imaginary parts. */
2790 for (sinsn = linsn; sinsn != 0;
2791 sinsn = prev_nonnote_insn (sinsn))
2792 {
2793 set = single_set (sinsn);
2794 if (set == 0)
2795 continue;
2796
2797 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2798 REG_NOTES (sinsn)
2799 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2800 REG_NOTES (sinsn));
2801 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2802 REG_NOTES (sinsn)
2803 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2804 REG_NOTES (sinsn));
a03caf76 2805 }
6071dc7f
RH
2806 }
2807 else if ((set = single_set (linsn)) != 0
2808 && SET_DEST (set) == parmreg)
2809 REG_NOTES (linsn)
2810 = gen_rtx_EXPR_LIST (REG_EQUIV,
2811 data->stack_parm, REG_NOTES (linsn));
2812 }
2813
2814 /* For pointer data type, suggest pointer register. */
2815 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2816 mark_reg_pointer (parmreg,
2817 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2818}
2819
2820/* A subroutine of assign_parms. Allocate stack space to hold the current
2821 parameter. Get it there. Perform all ABI specified conversions. */
2822
2823static void
2824assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2825 struct assign_parm_data_one *data)
2826{
2827 /* Value must be stored in the stack slot STACK_PARM during function
2828 execution. */
bfc45551 2829 bool to_conversion = false;
6071dc7f
RH
2830
2831 if (data->promoted_mode != data->nominal_mode)
2832 {
2833 /* Conversion is required. */
2834 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 2835
6071dc7f
RH
2836 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2837
2838 push_to_sequence (all->conversion_insns);
bfc45551
AM
2839 to_conversion = true;
2840
6071dc7f
RH
2841 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2842 TYPE_UNSIGNED (TREE_TYPE (parm)));
2843
2844 if (data->stack_parm)
2845 /* ??? This may need a big-endian conversion on sparc64. */
2846 data->stack_parm
2847 = adjust_address (data->stack_parm, data->nominal_mode, 0);
6071dc7f
RH
2848 }
2849
2850 if (data->entry_parm != data->stack_parm)
2851 {
bfc45551
AM
2852 rtx src, dest;
2853
6071dc7f
RH
2854 if (data->stack_parm == 0)
2855 {
2856 data->stack_parm
2857 = assign_stack_local (GET_MODE (data->entry_parm),
2858 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
bfc45551 2859 TYPE_ALIGN (data->passed_type));
6071dc7f 2860 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 2861 }
6071dc7f 2862
bfc45551
AM
2863 dest = validize_mem (data->stack_parm);
2864 src = validize_mem (data->entry_parm);
2865
2866 if (MEM_P (src))
6f086dfc 2867 {
bfc45551
AM
2868 /* Use a block move to handle potentially misaligned entry_parm. */
2869 if (!to_conversion)
2870 push_to_sequence (all->conversion_insns);
2871 to_conversion = true;
2872
2873 emit_block_move (dest, src,
2874 GEN_INT (int_size_in_bytes (data->passed_type)),
2875 BLOCK_OP_NORMAL);
6071dc7f
RH
2876 }
2877 else
bfc45551
AM
2878 emit_move_insn (dest, src);
2879 }
2880
2881 if (to_conversion)
2882 {
2883 all->conversion_insns = get_insns ();
2884 end_sequence ();
6071dc7f 2885 }
6f086dfc 2886
6071dc7f
RH
2887 SET_DECL_RTL (parm, data->stack_parm);
2888}
3412b298 2889
6071dc7f
RH
2890/* A subroutine of assign_parms. If the ABI splits complex arguments, then
2891 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 2892
6071dc7f 2893static void
6ccd356e 2894assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
6071dc7f
RH
2895{
2896 tree parm;
6ccd356e 2897 tree orig_fnargs = all->orig_fnargs;
f4ef873c 2898
6071dc7f
RH
2899 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2900 {
2901 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2902 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2903 {
2904 rtx tmp, real, imag;
2905 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 2906
6071dc7f
RH
2907 real = DECL_RTL (fnargs);
2908 imag = DECL_RTL (TREE_CHAIN (fnargs));
2909 if (inner != GET_MODE (real))
6f086dfc 2910 {
6071dc7f
RH
2911 real = gen_lowpart_SUBREG (inner, real);
2912 imag = gen_lowpart_SUBREG (inner, imag);
2913 }
6ccd356e
AM
2914
2915 if (TREE_ADDRESSABLE (parm))
2916 {
2917 rtx rmem, imem;
2918 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2919
2920 /* split_complex_arg put the real and imag parts in
2921 pseudos. Move them to memory. */
bfc45551
AM
2922 tmp = assign_stack_local (DECL_MODE (parm), size,
2923 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
2924 set_mem_attributes (tmp, parm, 1);
2925 rmem = adjust_address_nv (tmp, inner, 0);
2926 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2927 push_to_sequence (all->conversion_insns);
2928 emit_move_insn (rmem, real);
2929 emit_move_insn (imem, imag);
2930 all->conversion_insns = get_insns ();
2931 end_sequence ();
2932 }
2933 else
2934 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 2935 SET_DECL_RTL (parm, tmp);
7e41ffa2 2936
6071dc7f
RH
2937 real = DECL_INCOMING_RTL (fnargs);
2938 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2939 if (inner != GET_MODE (real))
2940 {
2941 real = gen_lowpart_SUBREG (inner, real);
2942 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 2943 }
6071dc7f
RH
2944 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2945 set_decl_incoming_rtl (parm, tmp);
2946 fnargs = TREE_CHAIN (fnargs);
2947 }
2948 else
2949 {
2950 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2951 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
6f086dfc 2952
6071dc7f
RH
2953 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2954 instead of the copy of decl, i.e. FNARGS. */
2955 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2956 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
6f086dfc 2957 }
6071dc7f
RH
2958
2959 fnargs = TREE_CHAIN (fnargs);
6f086dfc 2960 }
6071dc7f
RH
2961}
2962
2963/* Assign RTL expressions to the function's parameters. This may involve
2964 copying them into registers and using those registers as the DECL_RTL. */
2965
6fe79279 2966static void
6071dc7f
RH
2967assign_parms (tree fndecl)
2968{
2969 struct assign_parm_data_all all;
2970 tree fnargs, parm;
6f086dfc 2971
150cdc9e
RH
2972 current_function_internal_arg_pointer
2973 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
2974
2975 assign_parms_initialize_all (&all);
2976 fnargs = assign_parms_augmented_arg_list (&all);
2977
2978 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
ded9bf77 2979 {
6071dc7f
RH
2980 struct assign_parm_data_one data;
2981
2982 /* Extract the type of PARM; adjust it according to ABI. */
2983 assign_parm_find_data_types (&all, parm, &data);
2984
2985 /* Early out for errors and void parameters. */
2986 if (data.passed_mode == VOIDmode)
ded9bf77 2987 {
6071dc7f
RH
2988 SET_DECL_RTL (parm, const0_rtx);
2989 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2990 continue;
2991 }
196c42cd 2992
8117c488
NS
2993 if (current_function_stdarg && !TREE_CHAIN (parm))
2994 assign_parms_setup_varargs (&all, &data, false);
196c42cd 2995
6071dc7f
RH
2996 /* Find out where the parameter arrives in this function. */
2997 assign_parm_find_entry_rtl (&all, &data);
2998
2999 /* Find out where stack space for this parameter might be. */
3000 if (assign_parm_is_stack_parm (&all, &data))
3001 {
3002 assign_parm_find_stack_rtl (parm, &data);
3003 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3004 }
6071dc7f
RH
3005
3006 /* Record permanently how this parm was passed. */
3007 set_decl_incoming_rtl (parm, data.entry_parm);
3008
3009 /* Update info on where next arg arrives in registers. */
3010 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3011 data.passed_type, data.named_arg);
3012
3013 assign_parm_adjust_stack_rtl (&data);
3014
3015 if (assign_parm_setup_block_p (&data))
27e29549 3016 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3017 else if (data.passed_pointer || use_register_for_decl (parm))
3018 assign_parm_setup_reg (&all, parm, &data);
3019 else
3020 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3021 }
3022
6071dc7f 3023 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
6ccd356e 3024 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3025
3412b298
JW
3026 /* Output all parameter conversion instructions (possibly including calls)
3027 now that all parameters have been copied out of hard registers. */
6071dc7f 3028 emit_insn (all.conversion_insns);
3412b298 3029
b36a8cc2
OH
3030 /* If we are receiving a struct value address as the first argument, set up
3031 the RTL for the function result. As this might require code to convert
3032 the transmitted address to Pmode, we do this here to ensure that possible
3033 preliminary conversions of the address have been emitted already. */
6071dc7f 3034 if (all.function_result_decl)
b36a8cc2 3035 {
6071dc7f
RH
3036 tree result = DECL_RESULT (current_function_decl);
3037 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3038 rtx x;
fa8db1f7 3039
cc77ae10
JM
3040 if (DECL_BY_REFERENCE (result))
3041 x = addr;
3042 else
3043 {
3044 addr = convert_memory_address (Pmode, addr);
3045 x = gen_rtx_MEM (DECL_MODE (result), addr);
3046 set_mem_attributes (x, result, 1);
3047 }
b36a8cc2
OH
3048 SET_DECL_RTL (result, x);
3049 }
3050
53c428d0 3051 /* We have aligned all the args, so add space for the pretend args. */
6071dc7f
RH
3052 current_function_pretend_args_size = all.pretend_args_size;
3053 all.stack_args_size.constant += all.extra_pretend_bytes;
3054 current_function_args_size = all.stack_args_size.constant;
6f086dfc
RS
3055
3056 /* Adjust function incoming argument size for alignment and
3057 minimum length. */
3058
3059#ifdef REG_PARM_STACK_SPACE
3060 current_function_args_size = MAX (current_function_args_size,
3061 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3062#endif
6f086dfc 3063
53366450
PB
3064 current_function_args_size = CEIL_ROUND (current_function_args_size,
3065 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3066
6f086dfc
RS
3067#ifdef ARGS_GROW_DOWNWARD
3068 current_function_arg_offset_rtx
477eff96 3069 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3070 : expand_expr (size_diffop (all.stack_args_size.var,
3071 size_int (-all.stack_args_size.constant)),
a57263bc 3072 NULL_RTX, VOIDmode, 0));
6f086dfc 3073#else
6071dc7f 3074 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3075#endif
3076
3077 /* See how many bytes, if any, of its args a function should try to pop
3078 on return. */
3079
64e6d9cc 3080 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
3081 current_function_args_size);
3082
3b69d50e
RK
3083 /* For stdarg.h function, save info about
3084 regs and stack space used by the named args. */
6f086dfc 3085
6071dc7f 3086 current_function_args_info = all.args_so_far;
6f086dfc
RS
3087
3088 /* Set the rtx used for the function return value. Put this in its
3089 own variable so any optimizers that need this information don't have
3090 to include tree.h. Do this here so it gets done when an inlined
3091 function gets output. */
3092
19e7881c
MM
3093 current_function_return_rtx
3094 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3095 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3096
3097 /* If scalar return value was computed in a pseudo-reg, or was a named
3098 return value that got dumped to the stack, copy that to the hard
3099 return register. */
3100 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3101 {
3102 tree decl_result = DECL_RESULT (fndecl);
3103 rtx decl_rtl = DECL_RTL (decl_result);
3104
3105 if (REG_P (decl_rtl)
3106 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3107 : DECL_REGISTER (decl_result))
3108 {
3109 rtx real_decl_rtl;
3110
1d636cc6
RG
3111 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3112 fndecl, true);
ce5e43d0
JJ
3113 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3114 /* The delay slot scheduler assumes that current_function_return_rtx
3115 holds the hard register containing the return value, not a
3116 temporary pseudo. */
3117 current_function_return_rtx = real_decl_rtl;
3118 }
3119 }
6f086dfc 3120}
4744afba
RH
3121
3122/* A subroutine of gimplify_parameters, invoked via walk_tree.
3123 For all seen types, gimplify their sizes. */
3124
3125static tree
3126gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3127{
3128 tree t = *tp;
3129
3130 *walk_subtrees = 0;
3131 if (TYPE_P (t))
3132 {
3133 if (POINTER_TYPE_P (t))
3134 *walk_subtrees = 1;
ad50bc8d
RH
3135 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3136 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba
RH
3137 {
3138 gimplify_type_sizes (t, (tree *) data);
3139 *walk_subtrees = 1;
3140 }
3141 }
3142
3143 return NULL;
3144}
3145
3146/* Gimplify the parameter list for current_function_decl. This involves
3147 evaluating SAVE_EXPRs of variable sized parameters and generating code
3148 to implement callee-copies reference parameters. Returns a list of
3149 statements to add to the beginning of the function, or NULL if nothing
3150 to do. */
3151
3152tree
3153gimplify_parameters (void)
3154{
3155 struct assign_parm_data_all all;
3156 tree fnargs, parm, stmts = NULL;
3157
3158 assign_parms_initialize_all (&all);
3159 fnargs = assign_parms_augmented_arg_list (&all);
3160
3161 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3162 {
3163 struct assign_parm_data_one data;
3164
3165 /* Extract the type of PARM; adjust it according to ABI. */
3166 assign_parm_find_data_types (&all, parm, &data);
3167
3168 /* Early out for errors and void parameters. */
3169 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3170 continue;
3171
3172 /* Update info on where next arg arrives in registers. */
3173 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3174 data.passed_type, data.named_arg);
3175
3176 /* ??? Once upon a time variable_size stuffed parameter list
3177 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3178 turned out to be less than manageable in the gimple world.
3179 Now we have to hunt them down ourselves. */
3180 walk_tree_without_duplicates (&data.passed_type,
3181 gimplify_parm_type, &stmts);
3182
3183 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3184 {
3185 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3186 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3187 }
3188
3189 if (data.passed_pointer)
3190 {
3191 tree type = TREE_TYPE (data.passed_type);
3192 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3193 type, data.named_arg))
3194 {
3195 tree local, t;
3196
3197 /* For constant sized objects, this is trivial; for
3198 variable-sized objects, we have to play games. */
3199 if (TREE_CONSTANT (DECL_SIZE (parm)))
3200 {
3201 local = create_tmp_var (type, get_name (parm));
3202 DECL_IGNORED_P (local) = 0;
3203 }
3204 else
3205 {
3206 tree ptr_type, addr, args;
3207
3208 ptr_type = build_pointer_type (type);
3209 addr = create_tmp_var (ptr_type, get_name (parm));
3210 DECL_IGNORED_P (addr) = 0;
3211 local = build_fold_indirect_ref (addr);
3212
3213 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3214 t = built_in_decls[BUILT_IN_ALLOCA];
3215 t = build_function_call_expr (t, args);
3216 t = fold_convert (ptr_type, t);
3217 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3218 gimplify_and_add (t, &stmts);
3219 }
3220
3221 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3222 gimplify_and_add (t, &stmts);
3223
833b3afe
DB
3224 SET_DECL_VALUE_EXPR (parm, local);
3225 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3226 }
3227 }
3228 }
3229
3230 return stmts;
3231}
6f086dfc 3232\f
75dc3319
RK
3233/* Indicate whether REGNO is an incoming argument to the current function
3234 that was promoted to a wider mode. If so, return the RTX for the
3235 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3236 that REGNO is promoted from and whether the promotion was signed or
3237 unsigned. */
3238
75dc3319 3239rtx
fa8db1f7 3240promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
75dc3319
RK
3241{
3242 tree arg;
3243
3244 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3245 arg = TREE_CHAIN (arg))
f8cfc6aa 3246 if (REG_P (DECL_INCOMING_RTL (arg))
621061f4
RK
3247 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3248 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
3249 {
3250 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
8df83eae 3251 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
75dc3319 3252
a5a52dbc 3253 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
3254 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3255 && mode != DECL_MODE (arg))
3256 {
3257 *pmode = DECL_MODE (arg);
3258 *punsignedp = unsignedp;
3259 return DECL_INCOMING_RTL (arg);
3260 }
3261 }
3262
3263 return 0;
3264}
3265
75dc3319 3266\f
6f086dfc
RS
3267/* Compute the size and offset from the start of the stacked arguments for a
3268 parm passed in mode PASSED_MODE and with type TYPE.
3269
3270 INITIAL_OFFSET_PTR points to the current offset into the stacked
3271 arguments.
3272
e7949876
AM
3273 The starting offset and size for this parm are returned in
3274 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3275 nonzero, the offset is that of stack slot, which is returned in
3276 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3277 padding required from the initial offset ptr to the stack slot.
6f086dfc 3278
cc2902df 3279 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3280 never be set if REG_PARM_STACK_SPACE is not defined.
3281
3282 FNDECL is the function in which the argument was defined.
3283
3284 There are two types of rounding that are done. The first, controlled by
3285 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3286 list to be aligned to the specific boundary (in bits). This rounding
3287 affects the initial and starting offsets, but not the argument size.
3288
3289 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3290 optionally rounds the size of the parm to PARM_BOUNDARY. The
3291 initial offset is not affected by this rounding, while the size always
3292 is and the starting offset may be. */
3293
e7949876
AM
3294/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3295 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3296 callers pass in the total size of args so far as
e7949876 3297 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3298
6f086dfc 3299void
fa8db1f7
AJ
3300locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3301 int partial, tree fndecl ATTRIBUTE_UNUSED,
3302 struct args_size *initial_offset_ptr,
3303 struct locate_and_pad_arg_data *locate)
6f086dfc 3304{
e7949876
AM
3305 tree sizetree;
3306 enum direction where_pad;
c7e777b5 3307 unsigned int boundary;
e7949876
AM
3308 int reg_parm_stack_space = 0;
3309 int part_size_in_regs;
6f086dfc
RS
3310
3311#ifdef REG_PARM_STACK_SPACE
e7949876 3312 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3313
6f086dfc
RS
3314 /* If we have found a stack parm before we reach the end of the
3315 area reserved for registers, skip that area. */
3316 if (! in_regs)
3317 {
6f086dfc
RS
3318 if (reg_parm_stack_space > 0)
3319 {
3320 if (initial_offset_ptr->var)
3321 {
3322 initial_offset_ptr->var
3323 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3324 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3325 initial_offset_ptr->constant = 0;
3326 }
3327 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3328 initial_offset_ptr->constant = reg_parm_stack_space;
3329 }
3330 }
3331#endif /* REG_PARM_STACK_SPACE */
3332
78a52f11 3333 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3334
3335 sizetree
3336 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3337 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3338 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3339 locate->where_pad = where_pad;
bfc45551 3340 locate->boundary = boundary;
6f086dfc 3341
c7e777b5
RH
3342 /* Remember if the outgoing parameter requires extra alignment on the
3343 calling function side. */
3344 if (boundary > PREFERRED_STACK_BOUNDARY)
3345 boundary = PREFERRED_STACK_BOUNDARY;
3346 if (cfun->stack_alignment_needed < boundary)
3347 cfun->stack_alignment_needed = boundary;
3348
6f086dfc 3349#ifdef ARGS_GROW_DOWNWARD
e7949876 3350 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3351 if (initial_offset_ptr->var)
e7949876
AM
3352 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3353 initial_offset_ptr->var);
9dff28ab 3354
e7949876
AM
3355 {
3356 tree s2 = sizetree;
3357 if (where_pad != none
3358 && (!host_integerp (sizetree, 1)
3359 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3360 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3361 SUB_PARM_SIZE (locate->slot_offset, s2);
3362 }
3363
3364 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3365
3366 if (!in_regs
3367#ifdef REG_PARM_STACK_SPACE
3368 || REG_PARM_STACK_SPACE (fndecl) > 0
3369#endif
3370 )
e7949876
AM
3371 pad_to_arg_alignment (&locate->slot_offset, boundary,
3372 &locate->alignment_pad);
9dff28ab 3373
e7949876
AM
3374 locate->size.constant = (-initial_offset_ptr->constant
3375 - locate->slot_offset.constant);
6f086dfc 3376 if (initial_offset_ptr->var)
e7949876
AM
3377 locate->size.var = size_binop (MINUS_EXPR,
3378 size_binop (MINUS_EXPR,
3379 ssize_int (0),
3380 initial_offset_ptr->var),
3381 locate->slot_offset.var);
3382
3383 /* Pad_below needs the pre-rounded size to know how much to pad
3384 below. */
3385 locate->offset = locate->slot_offset;
3386 if (where_pad == downward)
3387 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3388
6f086dfc 3389#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3390 if (!in_regs
3391#ifdef REG_PARM_STACK_SPACE
3392 || REG_PARM_STACK_SPACE (fndecl) > 0
3393#endif
3394 )
e7949876
AM
3395 pad_to_arg_alignment (initial_offset_ptr, boundary,
3396 &locate->alignment_pad);
3397 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3398
3399#ifdef PUSH_ROUNDING
3400 if (passed_mode != BLKmode)
3401 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3402#endif
3403
d4b0a7a0
DE
3404 /* Pad_below needs the pre-rounded size to know how much to pad below
3405 so this must be done before rounding up. */
e7949876
AM
3406 locate->offset = locate->slot_offset;
3407 if (where_pad == downward)
3408 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3409
6f086dfc 3410 if (where_pad != none
1468899d
RK
3411 && (!host_integerp (sizetree, 1)
3412 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3413 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3414
e7949876
AM
3415 ADD_PARM_SIZE (locate->size, sizetree);
3416
3417 locate->size.constant -= part_size_in_regs;
6f086dfc
RS
3418#endif /* ARGS_GROW_DOWNWARD */
3419}
3420
e16c591a
RS
3421/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3422 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3423
6f086dfc 3424static void
fa8db1f7
AJ
3425pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3426 struct args_size *alignment_pad)
6f086dfc 3427{
a544cfd2
KG
3428 tree save_var = NULL_TREE;
3429 HOST_WIDE_INT save_constant = 0;
a751cd5b 3430 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3431 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3432
3433#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3434 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3435 the real alignment of %sp. However, when it does this, the
3436 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3437 if (SPARC_STACK_BOUNDARY_HACK)
3438 sp_offset = 0;
3439#endif
4fc026cd 3440
9399d5c6 3441 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
3442 {
3443 save_var = offset_ptr->var;
3444 save_constant = offset_ptr->constant;
3445 }
3446
3447 alignment_pad->var = NULL_TREE;
3448 alignment_pad->constant = 0;
4fc026cd 3449
6f086dfc
RS
3450 if (boundary > BITS_PER_UNIT)
3451 {
3452 if (offset_ptr->var)
3453 {
a594a19c
GK
3454 tree sp_offset_tree = ssize_int (sp_offset);
3455 tree offset = size_binop (PLUS_EXPR,
3456 ARGS_SIZE_TREE (*offset_ptr),
3457 sp_offset_tree);
6f086dfc 3458#ifdef ARGS_GROW_DOWNWARD
a594a19c 3459 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3460#else
a594a19c 3461 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3462#endif
a594a19c
GK
3463
3464 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3465 /* ARGS_SIZE_TREE includes constant term. */
3466 offset_ptr->constant = 0;
dd3f0101
KH
3467 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3468 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3469 save_var);
6f086dfc
RS
3470 }
3471 else
718fe406 3472 {
a594a19c 3473 offset_ptr->constant = -sp_offset +
6f086dfc 3474#ifdef ARGS_GROW_DOWNWARD
a594a19c 3475 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3476#else
a594a19c 3477 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3478#endif
718fe406
KH
3479 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3480 alignment_pad->constant = offset_ptr->constant - save_constant;
3481 }
6f086dfc
RS
3482 }
3483}
3484
3485static void
fa8db1f7 3486pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3487{
3488 if (passed_mode != BLKmode)
3489 {
3490 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3491 offset_ptr->constant
3492 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3493 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3494 - GET_MODE_SIZE (passed_mode));
3495 }
3496 else
3497 {
3498 if (TREE_CODE (sizetree) != INTEGER_CST
3499 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3500 {
3501 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3502 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3503 /* Add it in. */
3504 ADD_PARM_SIZE (*offset_ptr, s2);
3505 SUB_PARM_SIZE (*offset_ptr, sizetree);
3506 }
3507 }
3508}
6f086dfc
RS
3509\f
3510/* Walk the tree of blocks describing the binding levels within a function
6de9cd9a 3511 and warn about variables the might be killed by setjmp or vfork.
6f086dfc
RS
3512 This is done after calling flow_analysis and before global_alloc
3513 clobbers the pseudo-regs to hard regs. */
3514
3515void
6de9cd9a 3516setjmp_vars_warning (tree block)
6f086dfc 3517{
b3694847 3518 tree decl, sub;
6de9cd9a 3519
6f086dfc
RS
3520 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3521 {
6de9cd9a 3522 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3523 && DECL_RTL_SET_P (decl)
f8cfc6aa 3524 && REG_P (DECL_RTL (decl))
6f086dfc 3525 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
dee15844 3526 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
971801ff 3527 " or %<vfork%>",
dee15844 3528 decl);
6f086dfc 3529 }
6de9cd9a 3530
6f086dfc 3531 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
6de9cd9a 3532 setjmp_vars_warning (sub);
6f086dfc
RS
3533}
3534
6de9cd9a 3535/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3536 but for arguments instead of local variables. */
3537
3538void
fa8db1f7 3539setjmp_args_warning (void)
6f086dfc 3540{
b3694847 3541 tree decl;
6f086dfc
RS
3542 for (decl = DECL_ARGUMENTS (current_function_decl);
3543 decl; decl = TREE_CHAIN (decl))
3544 if (DECL_RTL (decl) != 0
f8cfc6aa 3545 && REG_P (DECL_RTL (decl))
6f086dfc 3546 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
dee15844
JM
3547 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3548 decl);
6f086dfc
RS
3549}
3550
6f086dfc 3551\f
a20612aa
RH
3552/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3553 and create duplicate blocks. */
3554/* ??? Need an option to either create block fragments or to create
3555 abstract origin duplicates of a source block. It really depends
3556 on what optimization has been performed. */
467456d0 3557
116eebd6 3558void
fa8db1f7 3559reorder_blocks (void)
467456d0 3560{
116eebd6 3561 tree block = DECL_INITIAL (current_function_decl);
2c217442 3562 VEC(tree,heap) *block_stack;
467456d0 3563
1a4450c7 3564 if (block == NULL_TREE)
116eebd6 3565 return;
fc289cd1 3566
2c217442 3567 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 3568
a20612aa 3569 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 3570 clear_block_marks (block);
a20612aa 3571
116eebd6
MM
3572 /* Prune the old trees away, so that they don't get in the way. */
3573 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3574 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 3575
a20612aa 3576 /* Recreate the block tree from the note nesting. */
116eebd6 3577 reorder_blocks_1 (get_insns (), block, &block_stack);
718fe406 3578 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
18c038b9 3579
a20612aa
RH
3580 /* Remove deleted blocks from the block fragment chains. */
3581 reorder_fix_fragments (block);
2c217442
KH
3582
3583 VEC_free (tree, heap, block_stack);
467456d0
RS
3584}
3585
a20612aa 3586/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 3587
6de9cd9a
DN
3588void
3589clear_block_marks (tree block)
cc1fe44f 3590{
a20612aa 3591 while (block)
cc1fe44f 3592 {
a20612aa 3593 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 3594 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 3595 block = BLOCK_CHAIN (block);
cc1fe44f
DD
3596 }
3597}
3598
0a1c58a2 3599static void
2c217442 3600reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
3601{
3602 rtx insn;
3603
3604 for (insn = insns; insn; insn = NEXT_INSN (insn))
3605 {
4b4bf941 3606 if (NOTE_P (insn))
0a1c58a2
JL
3607 {
3608 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3609 {
3610 tree block = NOTE_BLOCK (insn);
a20612aa
RH
3611
3612 /* If we have seen this block before, that means it now
3613 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
3614 if (TREE_ASM_WRITTEN (block))
3615 {
a20612aa
RH
3616 tree new_block = copy_node (block);
3617 tree origin;
3618
3619 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3620 ? BLOCK_FRAGMENT_ORIGIN (block)
3621 : block);
3622 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3623 BLOCK_FRAGMENT_CHAIN (new_block)
3624 = BLOCK_FRAGMENT_CHAIN (origin);
3625 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3626
3627 NOTE_BLOCK (insn) = new_block;
3628 block = new_block;
0a1c58a2 3629 }
a20612aa 3630
0a1c58a2
JL
3631 BLOCK_SUBBLOCKS (block) = 0;
3632 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
3633 /* When there's only one block for the entire function,
3634 current_block == block and we mustn't do this, it
3635 will cause infinite recursion. */
3636 if (block != current_block)
3637 {
3638 BLOCK_SUPERCONTEXT (block) = current_block;
3639 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3640 BLOCK_SUBBLOCKS (current_block) = block;
3641 current_block = block;
3642 }
2c217442 3643 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2
JL
3644 }
3645 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3646 {
2c217442 3647 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2
JL
3648 BLOCK_SUBBLOCKS (current_block)
3649 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3650 current_block = BLOCK_SUPERCONTEXT (current_block);
3651 }
3652 }
0a1c58a2
JL
3653 }
3654}
3655
a20612aa
RH
3656/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3657 appears in the block tree, select one of the fragments to become
3658 the new origin block. */
3659
3660static void
fa8db1f7 3661reorder_fix_fragments (tree block)
a20612aa
RH
3662{
3663 while (block)
3664 {
3665 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3666 tree new_origin = NULL_TREE;
3667
3668 if (dup_origin)
3669 {
3670 if (! TREE_ASM_WRITTEN (dup_origin))
3671 {
3672 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
797a6ac1 3673
a20612aa
RH
3674 /* Find the first of the remaining fragments. There must
3675 be at least one -- the current block. */
3676 while (! TREE_ASM_WRITTEN (new_origin))
3677 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3678 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3679 }
3680 }
3681 else if (! dup_origin)
3682 new_origin = block;
3683
3684 /* Re-root the rest of the fragments to the new origin. In the
3685 case that DUP_ORIGIN was null, that means BLOCK was the origin
3686 of a chain of fragments and we want to remove those fragments
3687 that didn't make it to the output. */
3688 if (new_origin)
3689 {
3690 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3691 tree chain = *pp;
3692
3693 while (chain)
3694 {
3695 if (TREE_ASM_WRITTEN (chain))
3696 {
3697 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3698 *pp = chain;
3699 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3700 }
3701 chain = BLOCK_FRAGMENT_CHAIN (chain);
3702 }
3703 *pp = NULL_TREE;
3704 }
3705
3706 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3707 block = BLOCK_CHAIN (block);
3708 }
3709}
3710
467456d0
RS
3711/* Reverse the order of elements in the chain T of blocks,
3712 and return the new head of the chain (old last element). */
3713
6de9cd9a 3714tree
fa8db1f7 3715blocks_nreverse (tree t)
467456d0 3716{
b3694847 3717 tree prev = 0, decl, next;
467456d0
RS
3718 for (decl = t; decl; decl = next)
3719 {
3720 next = BLOCK_CHAIN (decl);
3721 BLOCK_CHAIN (decl) = prev;
3722 prev = decl;
3723 }
3724 return prev;
3725}
3726
18c038b9
MM
3727/* Count the subblocks of the list starting with BLOCK. If VECTOR is
3728 non-NULL, list them all into VECTOR, in a depth-first preorder
3729 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 3730 blocks. */
467456d0
RS
3731
3732static int
fa8db1f7 3733all_blocks (tree block, tree *vector)
467456d0 3734{
b2a59b15
MS
3735 int n_blocks = 0;
3736
a84efb51
JO
3737 while (block)
3738 {
3739 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 3740
a84efb51
JO
3741 /* Record this block. */
3742 if (vector)
3743 vector[n_blocks] = block;
b2a59b15 3744
a84efb51 3745 ++n_blocks;
718fe406 3746
a84efb51
JO
3747 /* Record the subblocks, and their subblocks... */
3748 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3749 vector ? vector + n_blocks : 0);
3750 block = BLOCK_CHAIN (block);
3751 }
467456d0
RS
3752
3753 return n_blocks;
3754}
18c038b9
MM
3755
3756/* Return a vector containing all the blocks rooted at BLOCK. The
3757 number of elements in the vector is stored in N_BLOCKS_P. The
3758 vector is dynamically allocated; it is the caller's responsibility
3759 to call `free' on the pointer returned. */
718fe406 3760
18c038b9 3761static tree *
fa8db1f7 3762get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
3763{
3764 tree *block_vector;
3765
3766 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 3767 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
3768 all_blocks (block, block_vector);
3769
3770 return block_vector;
3771}
3772
f83b236e 3773static GTY(()) int next_block_index = 2;
18c038b9
MM
3774
3775/* Set BLOCK_NUMBER for all the blocks in FN. */
3776
3777void
fa8db1f7 3778number_blocks (tree fn)
18c038b9
MM
3779{
3780 int i;
3781 int n_blocks;
3782 tree *block_vector;
3783
3784 /* For SDB and XCOFF debugging output, we start numbering the blocks
3785 from 1 within each function, rather than keeping a running
3786 count. */
3787#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
3788 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3789 next_block_index = 1;
18c038b9
MM
3790#endif
3791
3792 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3793
3794 /* The top-level BLOCK isn't numbered at all. */
3795 for (i = 1; i < n_blocks; ++i)
3796 /* We number the blocks from two. */
3797 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3798
3799 free (block_vector);
3800
3801 return;
3802}
df8992f8
RH
3803
3804/* If VAR is present in a subblock of BLOCK, return the subblock. */
3805
3806tree
fa8db1f7 3807debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
3808{
3809 tree t;
3810
3811 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3812 if (t == var)
3813 return block;
3814
3815 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3816 {
3817 tree ret = debug_find_var_in_block_tree (var, t);
3818 if (ret)
3819 return ret;
3820 }
3821
3822 return NULL_TREE;
3823}
467456d0 3824\f
3a70d621
RH
3825/* Allocate a function structure for FNDECL and set its contents
3826 to the defaults. */
7a80cf9a 3827
3a70d621
RH
3828void
3829allocate_struct_function (tree fndecl)
6f086dfc 3830{
3a70d621 3831 tree result;
6de9cd9a 3832 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 3833
3a70d621 3834 cfun = ggc_alloc_cleared (sizeof (struct function));
b384405b 3835
3a70d621
RH
3836 cfun->stack_alignment_needed = STACK_BOUNDARY;
3837 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6f086dfc 3838
3a70d621 3839 current_function_funcdef_no = funcdef_no++;
6f086dfc 3840
3a70d621 3841 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6f086dfc 3842
3a70d621 3843 init_eh_for_function ();
6f086dfc 3844
ae2bcd98 3845 lang_hooks.function.init (cfun);
3a70d621
RH
3846 if (init_machine_status)
3847 cfun->machine = (*init_machine_status) ();
e2ecd91c 3848
3a70d621
RH
3849 if (fndecl == NULL)
3850 return;
a0871656 3851
1da326c3 3852 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3a70d621 3853 cfun->decl = fndecl;
6f086dfc 3854
3a70d621 3855 result = DECL_RESULT (fndecl);
61f71b34 3856 if (aggregate_value_p (result, fndecl))
3a70d621
RH
3857 {
3858#ifdef PCC_STATIC_STRUCT_RETURN
3859 current_function_returns_pcc_struct = 1;
3860#endif
3861 current_function_returns_struct = 1;
3862 }
6f086dfc 3863
3a70d621 3864 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6f086dfc 3865
6de9cd9a
DN
3866 current_function_stdarg
3867 = (fntype
3868 && TYPE_ARG_TYPES (fntype) != 0
3869 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3870 != void_type_node));
9d30f3c1
JJ
3871
3872 /* Assume all registers in stdarg functions need to be saved. */
3873 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3874 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3a70d621 3875}
6f086dfc 3876
3a70d621 3877/* Reset cfun, and other non-struct-function variables to defaults as
2067c116 3878 appropriate for emitting rtl at the start of a function. */
6f086dfc 3879
3a70d621
RH
3880static void
3881prepare_function_start (tree fndecl)
3882{
1da326c3
SB
3883 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3884 cfun = DECL_STRUCT_FUNCTION (fndecl);
3a70d621
RH
3885 else
3886 allocate_struct_function (fndecl);
0de456a5
JH
3887 init_emit ();
3888 init_varasm_status (cfun);
3889 init_expr ();
6f086dfc 3890
3a70d621 3891 cse_not_expected = ! optimize;
6f086dfc 3892
3a70d621
RH
3893 /* Caller save not needed yet. */
3894 caller_save_needed = 0;
6f086dfc 3895
3a70d621
RH
3896 /* We haven't done register allocation yet. */
3897 reg_renumber = 0;
6f086dfc 3898
b384405b
BS
3899 /* Indicate that we have not instantiated virtual registers yet. */
3900 virtuals_instantiated = 0;
3901
1b3d8f8a
GK
3902 /* Indicate that we want CONCATs now. */
3903 generating_concat_p = 1;
3904
b384405b
BS
3905 /* Indicate we have no need of a frame pointer yet. */
3906 frame_pointer_needed = 0;
b384405b
BS
3907}
3908
3909/* Initialize the rtl expansion mechanism so that we can do simple things
3910 like generate sequences. This is used to provide a context during global
3911 initialization of some passes. */
3912void
fa8db1f7 3913init_dummy_function_start (void)
b384405b 3914{
3a70d621 3915 prepare_function_start (NULL);
b384405b
BS
3916}
3917
3918/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3919 and initialize static variables for generating RTL for the statements
3920 of the function. */
3921
3922void
fa8db1f7 3923init_function_start (tree subr)
b384405b 3924{
3a70d621 3925 prepare_function_start (subr);
b384405b 3926
ee6b0296
NS
3927 /* Prevent ever trying to delete the first instruction of a
3928 function. Also tell final how to output a linenum before the
3929 function prologue. Note linenums could be missing, e.g. when
3930 compiling a Java .class file. */
3c20847b 3931 if (! DECL_IS_BUILTIN (subr))
f31686a3 3932 emit_line_note (DECL_SOURCE_LOCATION (subr));
6f086dfc
RS
3933
3934 /* Make sure first insn is a note even if we don't want linenums.
3935 This makes sure the first insn will never be deleted.
3936 Also, final expects a note to appear there. */
2e040219 3937 emit_note (NOTE_INSN_DELETED);
6f086dfc 3938
6f086dfc
RS
3939 /* Warn if this value is an aggregate type,
3940 regardless of which calling convention we are using for it. */
ccf08a6e
DD
3941 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3942 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 3943}
5c7675e9 3944
49ad7cfa
BS
3945/* Make sure all values used by the optimization passes have sane
3946 defaults. */
c2924966 3947unsigned int
fa8db1f7 3948init_function_for_compilation (void)
49ad7cfa
BS
3949{
3950 reg_renumber = 0;
0a1c58a2 3951
f995dcfe
KH
3952 /* No prologue/epilogue insns yet. Make sure that these vectors are
3953 empty. */
3954 gcc_assert (VEC_length (int, prologue) == 0);
3955 gcc_assert (VEC_length (int, epilogue) == 0);
3956 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
c2924966 3957 return 0;
6f086dfc
RS
3958}
3959
ef330312
PB
3960struct tree_opt_pass pass_init_function =
3961{
3962 NULL, /* name */
3963 NULL, /* gate */
3964 init_function_for_compilation, /* execute */
3965 NULL, /* sub */
3966 NULL, /* next */
3967 0, /* static_pass_number */
3968 0, /* tv_id */
3969 0, /* properties_required */
3970 0, /* properties_provided */
3971 0, /* properties_destroyed */
3972 0, /* todo_flags_start */
3973 0, /* todo_flags_finish */
3974 0 /* letter */
3975};
3976
3977
6f086dfc 3978void
fa8db1f7 3979expand_main_function (void)
6f086dfc 3980{
3a57c6cb
MM
3981#if (defined(INVOKE__main) \
3982 || (!defined(HAS_INIT_SECTION) \
3983 && !defined(INIT_SECTION_ASM_OP) \
3984 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 3985 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 3986#endif
6f086dfc
RS
3987}
3988\f
7d69de61
RH
3989/* Expand code to initialize the stack_protect_guard. This is invoked at
3990 the beginning of a function to be protected. */
3991
3992#ifndef HAVE_stack_protect_set
3993# define HAVE_stack_protect_set 0
3994# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3995#endif
3996
3997void
3998stack_protect_prologue (void)
3999{
4000 tree guard_decl = targetm.stack_protect_guard ();
4001 rtx x, y;
4002
4003 /* Avoid expand_expr here, because we don't want guard_decl pulled
4004 into registers unless absolutely necessary. And we know that
4005 cfun->stack_protect_guard is a local stack slot, so this skips
4006 all the fluff. */
4007 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4008 y = validize_mem (DECL_RTL (guard_decl));
4009
4010 /* Allow the target to copy from Y to X without leaking Y into a
4011 register. */
4012 if (HAVE_stack_protect_set)
4013 {
4014 rtx insn = gen_stack_protect_set (x, y);
4015 if (insn)
4016 {
4017 emit_insn (insn);
4018 return;
4019 }
4020 }
4021
4022 /* Otherwise do a straight move. */
4023 emit_move_insn (x, y);
4024}
4025
4026/* Expand code to verify the stack_protect_guard. This is invoked at
4027 the end of a function to be protected. */
4028
4029#ifndef HAVE_stack_protect_test
b76be05e
JJ
4030# define HAVE_stack_protect_test 0
4031# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4032#endif
4033
b755446c 4034void
7d69de61
RH
4035stack_protect_epilogue (void)
4036{
4037 tree guard_decl = targetm.stack_protect_guard ();
4038 rtx label = gen_label_rtx ();
4039 rtx x, y, tmp;
4040
4041 /* Avoid expand_expr here, because we don't want guard_decl pulled
4042 into registers unless absolutely necessary. And we know that
4043 cfun->stack_protect_guard is a local stack slot, so this skips
4044 all the fluff. */
4045 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4046 y = validize_mem (DECL_RTL (guard_decl));
4047
4048 /* Allow the target to compare Y with X without leaking either into
4049 a register. */
4050 switch (HAVE_stack_protect_test != 0)
4051 {
4052 case 1:
3aebbe5f 4053 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4054 if (tmp)
4055 {
4056 emit_insn (tmp);
7d69de61
RH
4057 break;
4058 }
4059 /* FALLTHRU */
4060
4061 default:
4062 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4063 break;
4064 }
4065
4066 /* The noreturn predictor has been moved to the tree level. The rtl-level
4067 predictors estimate this branch about 20%, which isn't enough to get
4068 things moved out of line. Since this is the only extant case of adding
4069 a noreturn function at the rtl level, it doesn't seem worth doing ought
4070 except adding the prediction by hand. */
4071 tmp = get_last_insn ();
4072 if (JUMP_P (tmp))
4073 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4074
4075 expand_expr_stmt (targetm.stack_protect_fail ());
4076 emit_label (label);
4077}
4078\f
6f086dfc
RS
4079/* Start the RTL for a new function, and set variables used for
4080 emitting RTL.
4081 SUBR is the FUNCTION_DECL node.
4082 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4083 the function's parameters, which must be run at any return statement. */
4084
4085void
b79c5284 4086expand_function_start (tree subr)
6f086dfc 4087{
6f086dfc
RS
4088 /* Make sure volatile mem refs aren't considered
4089 valid operands of arithmetic insns. */
4090 init_recog_no_volatile ();
4091
70f4f91c
WC
4092 current_function_profile
4093 = (profile_flag
4094 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4095
a157febd
GK
4096 current_function_limit_stack
4097 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4098
52a11cbf
RH
4099 /* Make the label for return statements to jump to. Do not special
4100 case machines with special return instructions -- they will be
4101 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4102 return_label = gen_label_rtx ();
6f086dfc
RS
4103
4104 /* Initialize rtx used to return the value. */
4105 /* Do this before assign_parms so that we copy the struct value address
4106 before any library calls that assign parms might generate. */
4107
4108 /* Decide whether to return the value in memory or in a register. */
61f71b34 4109 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4110 {
4111 /* Returning something that won't go in a register. */
b3694847 4112 rtx value_address = 0;
6f086dfc
RS
4113
4114#ifdef PCC_STATIC_STRUCT_RETURN
4115 if (current_function_returns_pcc_struct)
4116 {
4117 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4118 value_address = assemble_static_space (size);
4119 }
4120 else
4121#endif
4122 {
2225b57c 4123 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4124 /* Expect to be passed the address of a place to store the value.
4125 If it is passed as an argument, assign_parms will take care of
4126 it. */
61f71b34 4127 if (sv)
6f086dfc
RS
4128 {
4129 value_address = gen_reg_rtx (Pmode);
61f71b34 4130 emit_move_insn (value_address, sv);
6f086dfc
RS
4131 }
4132 }
4133 if (value_address)
ccdecf58 4134 {
01c98570
JM
4135 rtx x = value_address;
4136 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4137 {
4138 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4139 set_mem_attributes (x, DECL_RESULT (subr), 1);
4140 }
abde42f7 4141 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4142 }
6f086dfc
RS
4143 }
4144 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4145 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4146 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4147 else
a53e14c0 4148 {
d5bf1143
RH
4149 /* Compute the return values into a pseudo reg, which we will copy
4150 into the true return register after the cleanups are done. */
bef5d8b6
RS
4151 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4152 if (TYPE_MODE (return_type) != BLKmode
4153 && targetm.calls.return_in_msb (return_type))
4154 /* expand_function_end will insert the appropriate padding in
4155 this case. Use the return value's natural (unpadded) mode
4156 within the function proper. */
4157 SET_DECL_RTL (DECL_RESULT (subr),
4158 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4159 else
0bccc606 4160 {
bef5d8b6
RS
4161 /* In order to figure out what mode to use for the pseudo, we
4162 figure out what the mode of the eventual return register will
4163 actually be, and use that. */
1d636cc6 4164 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4165
4166 /* Structures that are returned in registers are not
4167 aggregate_value_p, so we may see a PARALLEL or a REG. */
4168 if (REG_P (hard_reg))
4169 SET_DECL_RTL (DECL_RESULT (subr),
4170 gen_reg_rtx (GET_MODE (hard_reg)));
4171 else
4172 {
4173 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4174 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4175 }
0bccc606 4176 }
a53e14c0 4177
084a1106
JDA
4178 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4179 result to the real return register(s). */
4180 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4181 }
6f086dfc
RS
4182
4183 /* Initialize rtx for parameters and local variables.
4184 In some cases this requires emitting insns. */
0d1416c6 4185 assign_parms (subr);
6f086dfc 4186
6de9cd9a
DN
4187 /* If function gets a static chain arg, store it. */
4188 if (cfun->static_chain_decl)
4189 {
7e140280
RH
4190 tree parm = cfun->static_chain_decl;
4191 rtx local = gen_reg_rtx (Pmode);
4192
4193 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4194 SET_DECL_RTL (parm, local);
7e140280 4195 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4196
7e140280 4197 emit_move_insn (local, static_chain_incoming_rtx);
6de9cd9a
DN
4198 }
4199
4200 /* If the function receives a non-local goto, then store the
4201 bits we need to restore the frame pointer. */
4202 if (cfun->nonlocal_goto_save_area)
4203 {
4204 tree t_save;
4205 rtx r_save;
4206
4207 /* ??? We need to do this save early. Unfortunately here is
4208 before the frame variable gets declared. Help out... */
4209 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4210
3244e67d
RS
4211 t_save = build4 (ARRAY_REF, ptr_type_node,
4212 cfun->nonlocal_goto_save_area,
4213 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4214 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4215 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4216
6de9cd9a
DN
4217 emit_move_insn (r_save, virtual_stack_vars_rtx);
4218 update_nonlocal_goto_save_area ();
4219 }
f0c51a1e 4220
6f086dfc
RS
4221 /* The following was moved from init_function_start.
4222 The move is supposed to make sdb output more accurate. */
4223 /* Indicate the beginning of the function body,
4224 as opposed to parm setup. */
2e040219 4225 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4226
ede497cf
SB
4227 gcc_assert (NOTE_P (get_last_insn ()));
4228
6f086dfc
RS
4229 parm_birth_insn = get_last_insn ();
4230
70f4f91c 4231 if (current_function_profile)
f6f315fe 4232 {
f6f315fe 4233#ifdef PROFILE_HOOK
df696a75 4234 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4235#endif
f6f315fe 4236 }
411707f4 4237
ede497cf
SB
4238 /* After the display initializations is where the stack checking
4239 probe should go. */
4240 if(flag_stack_check)
4241 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc 4242
6f086dfc
RS
4243 /* Make sure there is a line number after the function entry setup code. */
4244 force_next_line_note ();
4245}
4246\f
49ad7cfa
BS
4247/* Undo the effects of init_dummy_function_start. */
4248void
fa8db1f7 4249expand_dummy_function_end (void)
49ad7cfa
BS
4250{
4251 /* End any sequences that failed to be closed due to syntax errors. */
4252 while (in_sequence_p ())
4253 end_sequence ();
4254
4255 /* Outside function body, can't compute type's actual size
4256 until next function's body starts. */
fa51b01b 4257
01d939e8
BS
4258 free_after_parsing (cfun);
4259 free_after_compilation (cfun);
01d939e8 4260 cfun = 0;
49ad7cfa
BS
4261}
4262
c13fde05
RH
4263/* Call DOIT for each hard register used as a return value from
4264 the current function. */
bd695e1e
RH
4265
4266void
fa8db1f7 4267diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4268{
c13fde05
RH
4269 rtx outgoing = current_function_return_rtx;
4270
4271 if (! outgoing)
4272 return;
bd695e1e 4273
f8cfc6aa 4274 if (REG_P (outgoing))
c13fde05
RH
4275 (*doit) (outgoing, arg);
4276 else if (GET_CODE (outgoing) == PARALLEL)
4277 {
4278 int i;
bd695e1e 4279
c13fde05
RH
4280 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4281 {
4282 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4283
f8cfc6aa 4284 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4285 (*doit) (x, arg);
bd695e1e
RH
4286 }
4287 }
4288}
4289
c13fde05 4290static void
fa8db1f7 4291do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4292{
4293 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4294}
4295
4296void
fa8db1f7 4297clobber_return_register (void)
c13fde05
RH
4298{
4299 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4300
4301 /* In case we do use pseudo to return value, clobber it too. */
4302 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4303 {
4304 tree decl_result = DECL_RESULT (current_function_decl);
4305 rtx decl_rtl = DECL_RTL (decl_result);
4306 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4307 {
4308 do_clobber_return_reg (decl_rtl, NULL);
4309 }
4310 }
c13fde05
RH
4311}
4312
4313static void
fa8db1f7 4314do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4315{
4316 emit_insn (gen_rtx_USE (VOIDmode, reg));
4317}
4318
0bf8477d 4319static void
fa8db1f7 4320use_return_register (void)
c13fde05
RH
4321{
4322 diddle_return_value (do_use_return_reg, NULL);
4323}
4324
902edd36
JH
4325/* Possibly warn about unused parameters. */
4326void
4327do_warn_unused_parameter (tree fn)
4328{
4329 tree decl;
4330
4331 for (decl = DECL_ARGUMENTS (fn);
4332 decl; decl = TREE_CHAIN (decl))
4333 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4334 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
b9b8dde3 4335 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4336}
4337
e2500fed
GK
4338static GTY(()) rtx initial_trampoline;
4339
71c0e7fc 4340/* Generate RTL for the end of the current function. */
6f086dfc
RS
4341
4342void
fa8db1f7 4343expand_function_end (void)
6f086dfc 4344{
932f0847 4345 rtx clobber_after;
6f086dfc 4346
964be02f
RH
4347 /* If arg_pointer_save_area was referenced only from a nested
4348 function, we will not have initialized it yet. Do that now. */
4349 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4350 get_arg_pointer_save_area (cfun);
4351
11044f66
RK
4352 /* If we are doing stack checking and this function makes calls,
4353 do a stack probe at the start of the function to ensure we have enough
4354 space for another stack frame. */
4355 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4356 {
4357 rtx insn, seq;
4358
4359 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4360 if (CALL_P (insn))
11044f66
RK
4361 {
4362 start_sequence ();
4363 probe_stack_range (STACK_CHECK_PROTECT,
4364 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4365 seq = get_insns ();
4366 end_sequence ();
ede497cf 4367 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
4368 break;
4369 }
4370 }
4371
902edd36
JH
4372 /* Possibly warn about unused parameters.
4373 When frontend does unit-at-a-time, the warning is already
4374 issued at finalization time. */
4375 if (warn_unused_parameter
4376 && !lang_hooks.callgraph.expand_function)
4377 do_warn_unused_parameter (current_function_decl);
6f086dfc 4378
6f086dfc
RS
4379 /* End any sequences that failed to be closed due to syntax errors. */
4380 while (in_sequence_p ())
5f4f0e22 4381 end_sequence ();
6f086dfc 4382
6f086dfc
RS
4383 clear_pending_stack_adjust ();
4384 do_pending_stack_adjust ();
4385
4386 /* Mark the end of the function body.
4387 If control reaches this insn, the function can drop through
4388 without returning a value. */
2e040219 4389 emit_note (NOTE_INSN_FUNCTION_END);
6f086dfc 4390
82e415a3
DE
4391 /* Must mark the last line number note in the function, so that the test
4392 coverage code can avoid counting the last line twice. This just tells
4393 the code to ignore the immediately following line note, since there
4394 already exists a copy of this note somewhere above. This line number
4395 note is still needed for debugging though, so we can't delete it. */
4396 if (flag_test_coverage)
2e040219 4397 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
82e415a3 4398
6f086dfc
RS
4399 /* Output a linenumber for the end of the function.
4400 SDB depends on this. */
0cea056b
NS
4401 force_next_line_note ();
4402 emit_line_note (input_location);
6f086dfc 4403
fbffc70a 4404 /* Before the return label (if any), clobber the return
a1f300c0 4405 registers so that they are not propagated live to the rest of
fbffc70a
GK
4406 the function. This can only happen with functions that drop
4407 through; if there had been a return statement, there would
932f0847
JH
4408 have either been a return rtx, or a jump to the return label.
4409
4410 We delay actual code generation after the current_function_value_rtx
4411 is computed. */
4412 clobber_after = get_last_insn ();
fbffc70a 4413
526c334b
KH
4414 /* Output the label for the actual return from the function. */
4415 emit_label (return_label);
6f086dfc 4416
815eb8f0
AM
4417 if (USING_SJLJ_EXCEPTIONS)
4418 {
4419 /* Let except.c know where it should emit the call to unregister
4420 the function context for sjlj exceptions. */
4421 if (flag_exceptions)
4422 sjlj_emit_function_exit_after (get_last_insn ());
4423 }
4424 else
4425 {
4426 /* @@@ This is a kludge. We want to ensure that instructions that
4427 may trap are not moved into the epilogue by scheduling, because
4428 we don't always emit unwind information for the epilogue.
4429 However, not all machine descriptions define a blockage insn, so
4430 emit an ASM_INPUT to act as one. */
4431 if (flag_non_call_exceptions)
4432 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4433 }
0b59e81e 4434
652b0932
RH
4435 /* If this is an implementation of throw, do what's necessary to
4436 communicate between __builtin_eh_return and the epilogue. */
4437 expand_eh_return ();
4438
3e4eac3f
RH
4439 /* If scalar return value was computed in a pseudo-reg, or was a named
4440 return value that got dumped to the stack, copy that to the hard
4441 return register. */
19e7881c 4442 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4443 {
3e4eac3f
RH
4444 tree decl_result = DECL_RESULT (current_function_decl);
4445 rtx decl_rtl = DECL_RTL (decl_result);
4446
4447 if (REG_P (decl_rtl)
4448 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4449 : DECL_REGISTER (decl_result))
4450 {
ce5e43d0 4451 rtx real_decl_rtl = current_function_return_rtx;
6f086dfc 4452
ce5e43d0 4453 /* This should be set in assign_parms. */
0bccc606 4454 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4455
4456 /* If this is a BLKmode structure being returned in registers,
4457 then use the mode computed in expand_return. Note that if
797a6ac1 4458 decl_rtl is memory, then its mode may have been changed,
3e4eac3f
RH
4459 but that current_function_return_rtx has not. */
4460 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4461 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 4462
bef5d8b6
RS
4463 /* If a non-BLKmode return value should be padded at the least
4464 significant end of the register, shift it left by the appropriate
4465 amount. BLKmode results are handled using the group load/store
4466 machinery. */
4467 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4468 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4469 {
4470 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4471 REGNO (real_decl_rtl)),
4472 decl_rtl);
4473 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4474 }
3e4eac3f 4475 /* If a named return value dumped decl_return to memory, then
797a6ac1 4476 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 4477 extension. */
bef5d8b6 4478 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 4479 {
8df83eae 4480 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3e4eac3f 4481
61f71b34
DD
4482 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4483 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4484 &unsignedp, 1);
3e4eac3f
RH
4485
4486 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4487 }
aa570f54 4488 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4489 {
4490 /* If expand_function_start has created a PARALLEL for decl_rtl,
4491 move the result to the real return registers. Otherwise, do
4492 a group load from decl_rtl for a named return. */
4493 if (GET_CODE (decl_rtl) == PARALLEL)
4494 emit_group_move (real_decl_rtl, decl_rtl);
4495 else
4496 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4497 TREE_TYPE (decl_result),
084a1106
JDA
4498 int_size_in_bytes (TREE_TYPE (decl_result)));
4499 }
652b0932
RH
4500 /* In the case of complex integer modes smaller than a word, we'll
4501 need to generate some non-trivial bitfield insertions. Do that
4502 on a pseudo and not the hard register. */
4503 else if (GET_CODE (decl_rtl) == CONCAT
4504 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4505 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4506 {
4507 int old_generating_concat_p;
4508 rtx tmp;
4509
4510 old_generating_concat_p = generating_concat_p;
4511 generating_concat_p = 0;
4512 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4513 generating_concat_p = old_generating_concat_p;
4514
4515 emit_move_insn (tmp, decl_rtl);
4516 emit_move_insn (real_decl_rtl, tmp);
4517 }
3e4eac3f
RH
4518 else
4519 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4520 }
6f086dfc
RS
4521 }
4522
4523 /* If returning a structure, arrange to return the address of the value
4524 in a place where debuggers expect to find it.
4525
4526 If returning a structure PCC style,
4527 the caller also depends on this value.
4528 And current_function_returns_pcc_struct is not necessarily set. */
4529 if (current_function_returns_struct
4530 || current_function_returns_pcc_struct)
4531 {
cc77ae10 4532 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 4533 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
4534 rtx outgoing;
4535
4536 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4537 type = TREE_TYPE (type);
4538 else
4539 value_address = XEXP (value_address, 0);
4540
1d636cc6
RG
4541 outgoing = targetm.calls.function_value (build_pointer_type (type),
4542 current_function_decl, true);
6f086dfc
RS
4543
4544 /* Mark this as a function return value so integrate will delete the
4545 assignment and USE below when inlining this function. */
4546 REG_FUNCTION_VALUE_P (outgoing) = 1;
4547
d1608933 4548 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
4549 value_address = convert_memory_address (GET_MODE (outgoing),
4550 value_address);
d1608933 4551
6f086dfc 4552 emit_move_insn (outgoing, value_address);
d1608933
RK
4553
4554 /* Show return register used to hold result (in this case the address
4555 of the result. */
4556 current_function_return_rtx = outgoing;
6f086dfc
RS
4557 }
4558
932f0847
JH
4559 /* Emit the actual code to clobber return register. */
4560 {
609c3937 4561 rtx seq;
797a6ac1 4562
932f0847
JH
4563 start_sequence ();
4564 clobber_return_register ();
609c3937 4565 expand_naked_return ();
2f937369 4566 seq = get_insns ();
932f0847
JH
4567 end_sequence ();
4568
609c3937 4569 emit_insn_after (seq, clobber_after);
932f0847
JH
4570 }
4571
609c3937
RH
4572 /* Output the label for the naked return from the function. */
4573 emit_label (naked_return_label);
6e3077c6 4574
7d69de61
RH
4575 /* If stack protection is enabled for this function, check the guard. */
4576 if (cfun->stack_protect_guard)
4577 stack_protect_epilogue ();
4578
40184445
BS
4579 /* If we had calls to alloca, and this machine needs
4580 an accurate stack pointer to exit the function,
4581 insert some code to save and restore the stack pointer. */
4582 if (! EXIT_IGNORE_STACK
4583 && current_function_calls_alloca)
4584 {
4585 rtx tem = 0;
4586
4587 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4588 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4589 }
4590
c13fde05
RH
4591 /* ??? This should no longer be necessary since stupid is no longer with
4592 us, but there are some parts of the compiler (eg reload_combine, and
4593 sh mach_dep_reorg) that still try and compute their own lifetime info
4594 instead of using the general framework. */
4595 use_return_register ();
6f086dfc 4596}
278ed218
RH
4597
4598rtx
fa8db1f7 4599get_arg_pointer_save_area (struct function *f)
278ed218
RH
4600{
4601 rtx ret = f->x_arg_pointer_save_area;
4602
4603 if (! ret)
4604 {
278ed218
RH
4605 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4606 f->x_arg_pointer_save_area = ret;
964be02f
RH
4607 }
4608
4609 if (f == cfun && ! f->arg_pointer_save_area_init)
4610 {
4611 rtx seq;
278ed218 4612
797a6ac1 4613 /* Save the arg pointer at the beginning of the function. The
964be02f 4614 generated stack slot may not be a valid memory address, so we
278ed218
RH
4615 have to check it and fix it if necessary. */
4616 start_sequence ();
4617 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
2f937369 4618 seq = get_insns ();
278ed218
RH
4619 end_sequence ();
4620
964be02f 4621 push_topmost_sequence ();
1cb2fc7b 4622 emit_insn_after (seq, entry_of_function ());
964be02f 4623 pop_topmost_sequence ();
278ed218
RH
4624 }
4625
4626 return ret;
4627}
bdac5f58 4628\f
2f937369
DM
4629/* Extend a vector that records the INSN_UIDs of INSNS
4630 (a list of one or more insns). */
bdac5f58 4631
0a1c58a2 4632static void
f995dcfe 4633record_insns (rtx insns, VEC(int,heap) **vecp)
bdac5f58 4634{
2f937369 4635 rtx tmp;
0a1c58a2 4636
f995dcfe
KH
4637 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4638 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
bdac5f58
TW
4639}
4640
589fe865 4641/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 4642static void
fa8db1f7 4643set_insn_locators (rtx insn, int loc)
0435312e
JH
4644{
4645 while (insn != NULL_RTX)
4646 {
4647 if (INSN_P (insn))
4648 INSN_LOCATOR (insn) = loc;
4649 insn = NEXT_INSN (insn);
4650 }
4651}
4652
2f937369
DM
4653/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4654 be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 4655
10914065 4656static int
f995dcfe 4657contains (rtx insn, VEC(int,heap) **vec)
bdac5f58 4658{
b3694847 4659 int i, j;
bdac5f58 4660
4b4bf941 4661 if (NONJUMP_INSN_P (insn)
bdac5f58
TW
4662 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4663 {
10914065 4664 int count = 0;
bdac5f58 4665 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
f995dcfe
KH
4666 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4667 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4668 == VEC_index (int, *vec, j))
10914065
TW
4669 count++;
4670 return count;
bdac5f58
TW
4671 }
4672 else
4673 {
f995dcfe
KH
4674 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4675 if (INSN_UID (insn) == VEC_index (int, *vec, j))
10914065 4676 return 1;
bdac5f58
TW
4677 }
4678 return 0;
4679}
5c7675e9
RH
4680
4681int
fa8db1f7 4682prologue_epilogue_contains (rtx insn)
5c7675e9 4683{
f995dcfe 4684 if (contains (insn, &prologue))
5c7675e9 4685 return 1;
f995dcfe 4686 if (contains (insn, &epilogue))
5c7675e9
RH
4687 return 1;
4688 return 0;
4689}
bdac5f58 4690
0a1c58a2 4691int
fa8db1f7 4692sibcall_epilogue_contains (rtx insn)
0a1c58a2
JL
4693{
4694 if (sibcall_epilogue)
f995dcfe 4695 return contains (insn, &sibcall_epilogue);
0a1c58a2
JL
4696 return 0;
4697}
4698
73ef99fb 4699#ifdef HAVE_return
69732dcb
RH
4700/* Insert gen_return at the end of block BB. This also means updating
4701 block_for_insn appropriately. */
4702
4703static void
fa8db1f7 4704emit_return_into_block (basic_block bb, rtx line_note)
69732dcb 4705{
a813c111 4706 emit_jump_insn_after (gen_return (), BB_END (bb));
86c82654 4707 if (line_note)
a813c111 4708 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
69732dcb 4709}
73ef99fb 4710#endif /* HAVE_return */
69732dcb 4711
3258e996
RK
4712#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4713
535a42b1
NS
4714/* These functions convert the epilogue into a variant that does not
4715 modify the stack pointer. This is used in cases where a function
4716 returns an object whose size is not known until it is computed.
4717 The called function leaves the object on the stack, leaves the
4718 stack depressed, and returns a pointer to the object.
4719
4720 What we need to do is track all modifications and references to the
4721 stack pointer, deleting the modifications and changing the
4722 references to point to the location the stack pointer would have
4723 pointed to had the modifications taken place.
4724
4725 These functions need to be portable so we need to make as few
4726 assumptions about the epilogue as we can. However, the epilogue
4727 basically contains three things: instructions to reset the stack
4728 pointer, instructions to reload registers, possibly including the
4729 frame pointer, and an instruction to return to the caller.
4730
4731 We must be sure of what a relevant epilogue insn is doing. We also
4732 make no attempt to validate the insns we make since if they are
4733 invalid, we probably can't do anything valid. The intent is that
4734 these routines get "smarter" as more and more machines start to use
4735 them and they try operating on different epilogues.
4736
4737 We use the following structure to track what the part of the
4738 epilogue that we've already processed has done. We keep two copies
4739 of the SP equivalence, one for use during the insn we are
4740 processing and one for use in the next insn. The difference is
4741 because one part of a PARALLEL may adjust SP and the other may use
4742 it. */
3258e996
RK
4743
4744struct epi_info
4745{
4746 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4747 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
3ef42a0c 4748 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
3258e996
RK
4749 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4750 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4751 should be set to once we no longer need
4752 its value. */
f285d67b
RK
4753 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4754 for registers. */
3258e996
RK
4755};
4756
fa8db1f7 4757static void handle_epilogue_set (rtx, struct epi_info *);
80fcc7bc 4758static void update_epilogue_consts (rtx, rtx, void *);
fa8db1f7 4759static void emit_equiv_load (struct epi_info *);
7393c642 4760
2f937369
DM
4761/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4762 no modifications to the stack pointer. Return the new list of insns. */
7393c642 4763
3258e996 4764static rtx
fa8db1f7 4765keep_stack_depressed (rtx insns)
7393c642 4766{
2f937369 4767 int j;
3258e996 4768 struct epi_info info;
2f937369 4769 rtx insn, next;
7393c642 4770
f285d67b 4771 /* If the epilogue is just a single instruction, it must be OK as is. */
2f937369
DM
4772 if (NEXT_INSN (insns) == NULL_RTX)
4773 return insns;
7393c642 4774
3258e996
RK
4775 /* Otherwise, start a sequence, initialize the information we have, and
4776 process all the insns we were given. */
4777 start_sequence ();
4778
4779 info.sp_equiv_reg = stack_pointer_rtx;
4780 info.sp_offset = 0;
4781 info.equiv_reg_src = 0;
7393c642 4782
f285d67b
RK
4783 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4784 info.const_equiv[j] = 0;
4785
2f937369
DM
4786 insn = insns;
4787 next = NULL_RTX;
4788 while (insn != NULL_RTX)
7393c642 4789 {
2f937369 4790 next = NEXT_INSN (insn);
7393c642 4791
3258e996
RK
4792 if (!INSN_P (insn))
4793 {
4794 add_insn (insn);
2f937369 4795 insn = next;
3258e996
RK
4796 continue;
4797 }
7393c642 4798
3258e996
RK
4799 /* If this insn references the register that SP is equivalent to and
4800 we have a pending load to that register, we must force out the load
4801 first and then indicate we no longer know what SP's equivalent is. */
4802 if (info.equiv_reg_src != 0
4803 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7393c642 4804 {
3258e996
RK
4805 emit_equiv_load (&info);
4806 info.sp_equiv_reg = 0;
4807 }
7393c642 4808
3258e996
RK
4809 info.new_sp_equiv_reg = info.sp_equiv_reg;
4810 info.new_sp_offset = info.sp_offset;
7393c642 4811
3258e996
RK
4812 /* If this is a (RETURN) and the return address is on the stack,
4813 update the address and change to an indirect jump. */
4814 if (GET_CODE (PATTERN (insn)) == RETURN
4815 || (GET_CODE (PATTERN (insn)) == PARALLEL
4816 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4817 {
4818 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4819 rtx base = 0;
4820 HOST_WIDE_INT offset = 0;
4821 rtx jump_insn, jump_set;
4822
4823 /* If the return address is in a register, we can emit the insn
4824 unchanged. Otherwise, it must be a MEM and we see what the
4825 base register and offset are. In any case, we have to emit any
4826 pending load to the equivalent reg of SP, if any. */
f8cfc6aa 4827 if (REG_P (retaddr))
3258e996
RK
4828 {
4829 emit_equiv_load (&info);
4830 add_insn (insn);
2f937369 4831 insn = next;
3258e996
RK
4832 continue;
4833 }
0bccc606 4834 else
3258e996 4835 {
0bccc606
NS
4836 rtx ret_ptr;
4837 gcc_assert (MEM_P (retaddr));
4838
4839 ret_ptr = XEXP (retaddr, 0);
4840
4841 if (REG_P (ret_ptr))
4842 {
4843 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4844 offset = 0;
4845 }
4846 else
4847 {
4848 gcc_assert (GET_CODE (ret_ptr) == PLUS
4849 && REG_P (XEXP (ret_ptr, 0))
4850 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4851 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4852 offset = INTVAL (XEXP (ret_ptr, 1));
4853 }
3258e996 4854 }
3258e996
RK
4855
4856 /* If the base of the location containing the return pointer
4857 is SP, we must update it with the replacement address. Otherwise,
4858 just build the necessary MEM. */
4859 retaddr = plus_constant (base, offset);
4860 if (base == stack_pointer_rtx)
4861 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4862 plus_constant (info.sp_equiv_reg,
4863 info.sp_offset));
4864
4865 retaddr = gen_rtx_MEM (Pmode, retaddr);
be0c514c 4866 MEM_NOTRAP_P (retaddr) = 1;
3258e996
RK
4867
4868 /* If there is a pending load to the equivalent register for SP
4869 and we reference that register, we must load our address into
4870 a scratch register and then do that load. */
4871 if (info.equiv_reg_src
4872 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4873 {
4874 unsigned int regno;
4875 rtx reg;
4876
4877 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4878 if (HARD_REGNO_MODE_OK (regno, Pmode)
53b6fb26
RK
4879 && !fixed_regs[regno]
4880 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5e2d947c
JH
4881 && !REGNO_REG_SET_P
4882 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
b5ed05aa 4883 && !refers_to_regno_p (regno,
66fd46b6
JH
4884 regno + hard_regno_nregs[regno]
4885 [Pmode],
f285d67b
RK
4886 info.equiv_reg_src, NULL)
4887 && info.const_equiv[regno] == 0)
3258e996
RK
4888 break;
4889
0bccc606 4890 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7393c642 4891
3258e996
RK
4892 reg = gen_rtx_REG (Pmode, regno);
4893 emit_move_insn (reg, retaddr);
4894 retaddr = reg;
4895 }
4896
4897 emit_equiv_load (&info);
4898 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4899
4900 /* Show the SET in the above insn is a RETURN. */
4901 jump_set = single_set (jump_insn);
0bccc606
NS
4902 gcc_assert (jump_set);
4903 SET_IS_RETURN_P (jump_set) = 1;
7393c642 4904 }
3258e996
RK
4905
4906 /* If SP is not mentioned in the pattern and its equivalent register, if
4907 any, is not modified, just emit it. Otherwise, if neither is set,
4908 replace the reference to SP and emit the insn. If none of those are
4909 true, handle each SET individually. */
4910 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4911 && (info.sp_equiv_reg == stack_pointer_rtx
4912 || !reg_set_p (info.sp_equiv_reg, insn)))
4913 add_insn (insn);
4914 else if (! reg_set_p (stack_pointer_rtx, insn)
4915 && (info.sp_equiv_reg == stack_pointer_rtx
4916 || !reg_set_p (info.sp_equiv_reg, insn)))
7393c642 4917 {
0bccc606
NS
4918 int changed;
4919
4920 changed = validate_replace_rtx (stack_pointer_rtx,
4921 plus_constant (info.sp_equiv_reg,
4922 info.sp_offset),
4923 insn);
4924 gcc_assert (changed);
7393c642 4925
3258e996
RK
4926 add_insn (insn);
4927 }
4928 else if (GET_CODE (PATTERN (insn)) == SET)
4929 handle_epilogue_set (PATTERN (insn), &info);
4930 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4931 {
4932 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4933 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4934 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4935 }
4936 else
4937 add_insn (insn);
4938
4939 info.sp_equiv_reg = info.new_sp_equiv_reg;
4940 info.sp_offset = info.new_sp_offset;
2f937369 4941
f285d67b
RK
4942 /* Now update any constants this insn sets. */
4943 note_stores (PATTERN (insn), update_epilogue_consts, &info);
2f937369 4944 insn = next;
3258e996
RK
4945 }
4946
2f937369 4947 insns = get_insns ();
3258e996 4948 end_sequence ();
2f937369 4949 return insns;
3258e996
RK
4950}
4951
d6a7951f 4952/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
3258e996 4953 structure that contains information about what we've seen so far. We
797a6ac1 4954 process this SET by either updating that data or by emitting one or
3258e996
RK
4955 more insns. */
4956
4957static void
fa8db1f7 4958handle_epilogue_set (rtx set, struct epi_info *p)
3258e996
RK
4959{
4960 /* First handle the case where we are setting SP. Record what it is being
535a42b1 4961 set from, which we must be able to determine */
3258e996
RK
4962 if (reg_set_p (stack_pointer_rtx, set))
4963 {
0bccc606 4964 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
3258e996 4965
f285d67b 4966 if (GET_CODE (SET_SRC (set)) == PLUS)
3258e996
RK
4967 {
4968 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
f285d67b
RK
4969 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4970 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
f285d67b 4971 else
0bccc606
NS
4972 {
4973 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4974 && (REGNO (XEXP (SET_SRC (set), 1))
4975 < FIRST_PSEUDO_REGISTER)
4976 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4977 p->new_sp_offset
4978 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4979 }
7393c642 4980 }
3258e996
RK
4981 else
4982 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4983
4984 /* If we are adjusting SP, we adjust from the old data. */
4985 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4986 {
4987 p->new_sp_equiv_reg = p->sp_equiv_reg;
4988 p->new_sp_offset += p->sp_offset;
4989 }
4990
0bccc606 4991 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
3258e996
RK
4992
4993 return;
4994 }
4995
535a42b1
NS
4996 /* Next handle the case where we are setting SP's equivalent
4997 register. We must not already have a value to set it to. We
4998 could update, but there seems little point in handling that case.
4999 Note that we have to allow for the case where we are setting the
5000 register set in the previous part of a PARALLEL inside a single
5001 insn. But use the old offset for any updates within this insn.
5002 We must allow for the case where the register is being set in a
5003 different (usually wider) mode than Pmode). */
f189c7ca 5004 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
3258e996 5005 {
0bccc606
NS
5006 gcc_assert (!p->equiv_reg_src
5007 && REG_P (p->new_sp_equiv_reg)
5008 && REG_P (SET_DEST (set))
5009 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5010 <= BITS_PER_WORD)
5011 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5012 p->equiv_reg_src
5013 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5014 plus_constant (p->sp_equiv_reg,
5015 p->sp_offset));
3258e996
RK
5016 }
5017
5018 /* Otherwise, replace any references to SP in the insn to its new value
5019 and emit the insn. */
5020 else
5021 {
5022 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5023 plus_constant (p->sp_equiv_reg,
5024 p->sp_offset));
5025 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5026 plus_constant (p->sp_equiv_reg,
5027 p->sp_offset));
5028 emit_insn (set);
7393c642
RK
5029 }
5030}
3258e996 5031
f285d67b
RK
5032/* Update the tracking information for registers set to constants. */
5033
5034static void
5035update_epilogue_consts (rtx dest, rtx x, void *data)
5036{
5037 struct epi_info *p = (struct epi_info *) data;
8fbc67c0 5038 rtx new;
f285d67b 5039
f8cfc6aa 5040 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
f285d67b 5041 return;
8fbc67c0
RK
5042
5043 /* If we are either clobbering a register or doing a partial set,
5044 show we don't know the value. */
5045 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
f285d67b 5046 p->const_equiv[REGNO (dest)] = 0;
8fbc67c0
RK
5047
5048 /* If we are setting it to a constant, record that constant. */
5049 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
f285d67b 5050 p->const_equiv[REGNO (dest)] = SET_SRC (x);
8fbc67c0
RK
5051
5052 /* If this is a binary operation between a register we have been tracking
5053 and a constant, see if we can compute a new constant value. */
ec8e098d 5054 else if (ARITHMETIC_P (SET_SRC (x))
f8cfc6aa 5055 && REG_P (XEXP (SET_SRC (x), 0))
8fbc67c0
RK
5056 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5057 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5058 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5059 && 0 != (new = simplify_binary_operation
5060 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5061 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5062 XEXP (SET_SRC (x), 1)))
5063 && GET_CODE (new) == CONST_INT)
5064 p->const_equiv[REGNO (dest)] = new;
5065
5066 /* Otherwise, we can't do anything with this value. */
5067 else
5068 p->const_equiv[REGNO (dest)] = 0;
f285d67b
RK
5069}
5070
3258e996
RK
5071/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5072
5073static void
fa8db1f7 5074emit_equiv_load (struct epi_info *p)
3258e996
RK
5075{
5076 if (p->equiv_reg_src != 0)
f285d67b
RK
5077 {
5078 rtx dest = p->sp_equiv_reg;
5079
5080 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5081 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5082 REGNO (p->sp_equiv_reg));
3258e996 5083
f285d67b
RK
5084 emit_move_insn (dest, p->equiv_reg_src);
5085 p->equiv_reg_src = 0;
5086 }
3258e996 5087}
7393c642
RK
5088#endif
5089
9faa82d8 5090/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
5091 this into place with notes indicating where the prologue ends and where
5092 the epilogue begins. Update the basic block information when possible. */
5093
5094void
fa8db1f7 5095thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
bdac5f58 5096{
ca1117cc 5097 int inserted = 0;
19d3c25c 5098 edge e;
91ea4f8d 5099#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 5100 rtx seq;
91ea4f8d 5101#endif
ca1117cc
RH
5102#ifdef HAVE_prologue
5103 rtx prologue_end = NULL_RTX;
5104#endif
86c82654
RH
5105#if defined (HAVE_epilogue) || defined(HAVE_return)
5106 rtx epilogue_end = NULL_RTX;
5107#endif
628f6a4e 5108 edge_iterator ei;
e881bb1b 5109
bdac5f58
TW
5110#ifdef HAVE_prologue
5111 if (HAVE_prologue)
5112 {
e881bb1b 5113 start_sequence ();
718fe406 5114 seq = gen_prologue ();
e881bb1b 5115 emit_insn (seq);
bdac5f58
TW
5116
5117 /* Retain a map of the prologue insns. */
0a1c58a2 5118 record_insns (seq, &prologue);
2e040219 5119 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
9185a8d5 5120
2f937369 5121 seq = get_insns ();
e881bb1b 5122 end_sequence ();
0435312e 5123 set_insn_locators (seq, prologue_locator);
e881bb1b 5124
d6a7951f 5125 /* Can't deal with multiple successors of the entry block
75540af0
JH
5126 at the moment. Function should always have at least one
5127 entry point. */
c5cbcccf 5128 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
e881bb1b 5129
c5cbcccf 5130 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
75540af0 5131 inserted = 1;
bdac5f58 5132 }
bdac5f58 5133#endif
bdac5f58 5134
19d3c25c
RH
5135 /* If the exit block has no non-fake predecessors, we don't need
5136 an epilogue. */
628f6a4e 5137 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
5138 if ((e->flags & EDGE_FAKE) == 0)
5139 break;
5140 if (e == NULL)
5141 goto epilogue_done;
5142
69732dcb
RH
5143#ifdef HAVE_return
5144 if (optimize && HAVE_return)
5145 {
5146 /* If we're allowed to generate a simple return instruction,
5147 then by definition we don't need a full epilogue. Examine
718fe406
KH
5148 the block that falls through to EXIT. If it does not
5149 contain any code, examine its predecessors and try to
69732dcb
RH
5150 emit (conditional) return instructions. */
5151
5152 basic_block last;
69732dcb
RH
5153 rtx label;
5154
628f6a4e 5155 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
5156 if (e->flags & EDGE_FALLTHRU)
5157 break;
5158 if (e == NULL)
5159 goto epilogue_done;
5160 last = e->src;
5161
5162 /* Verify that there are no active instructions in the last block. */
a813c111 5163 label = BB_END (last);
4b4bf941 5164 while (label && !LABEL_P (label))
69732dcb
RH
5165 {
5166 if (active_insn_p (label))
5167 break;
5168 label = PREV_INSN (label);
5169 }
5170
4b4bf941 5171 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 5172 {
628f6a4e 5173 edge_iterator ei2;
718fe406 5174 rtx epilogue_line_note = NULL_RTX;
86c82654
RH
5175
5176 /* Locate the line number associated with the closing brace,
5177 if we can find one. */
5178 for (seq = get_last_insn ();
5179 seq && ! active_insn_p (seq);
5180 seq = PREV_INSN (seq))
4b4bf941 5181 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
86c82654
RH
5182 {
5183 epilogue_line_note = seq;
5184 break;
5185 }
5186
628f6a4e 5187 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5188 {
5189 basic_block bb = e->src;
5190 rtx jump;
5191
69732dcb 5192 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5193 {
5194 ei_next (&ei2);
5195 continue;
5196 }
69732dcb 5197
a813c111 5198 jump = BB_END (bb);
4b4bf941 5199 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5200 {
5201 ei_next (&ei2);
5202 continue;
5203 }
69732dcb
RH
5204
5205 /* If we have an unconditional jump, we can replace that
5206 with a simple return instruction. */
5207 if (simplejump_p (jump))
5208 {
86c82654 5209 emit_return_into_block (bb, epilogue_line_note);
53c17031 5210 delete_insn (jump);
69732dcb
RH
5211 }
5212
5213 /* If we have a conditional jump, we can try to replace
5214 that with a conditional return instruction. */
5215 else if (condjump_p (jump))
5216 {
47009d11 5217 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5218 {
5219 ei_next (&ei2);
5220 continue;
5221 }
718fe406 5222
3a75e42e
CP
5223 /* If this block has only one successor, it both jumps
5224 and falls through to the fallthru block, so we can't
5225 delete the edge. */
c5cbcccf 5226 if (single_succ_p (bb))
628f6a4e
BE
5227 {
5228 ei_next (&ei2);
5229 continue;
5230 }
69732dcb
RH
5231 }
5232 else
628f6a4e
BE
5233 {
5234 ei_next (&ei2);
5235 continue;
5236 }
69732dcb
RH
5237
5238 /* Fix up the CFG for the successful change we just made. */
86c82654 5239 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5240 }
69732dcb 5241
2dd8bc01
GK
5242 /* Emit a return insn for the exit fallthru block. Whether
5243 this is still reachable will be determined later. */
69732dcb 5244
a813c111 5245 emit_barrier_after (BB_END (last));
86c82654 5246 emit_return_into_block (last, epilogue_line_note);
a813c111 5247 epilogue_end = BB_END (last);
c5cbcccf 5248 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
718fe406 5249 goto epilogue_done;
2dd8bc01 5250 }
69732dcb
RH
5251 }
5252#endif
623a66fa
R
5253 /* Find the edge that falls through to EXIT. Other edges may exist
5254 due to RETURN instructions, but those don't need epilogues.
5255 There really shouldn't be a mixture -- either all should have
5256 been converted or none, however... */
5257
628f6a4e 5258 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5259 if (e->flags & EDGE_FALLTHRU)
5260 break;
5261 if (e == NULL)
5262 goto epilogue_done;
5263
bdac5f58
TW
5264#ifdef HAVE_epilogue
5265 if (HAVE_epilogue)
5266 {
19d3c25c 5267 start_sequence ();
2e040219 5268 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
a78bdb38 5269
19d3c25c 5270 seq = gen_epilogue ();
7393c642 5271
3258e996
RK
5272#ifdef INCOMING_RETURN_ADDR_RTX
5273 /* If this function returns with the stack depressed and we can support
5274 it, massage the epilogue to actually do that. */
43db0363
RK
5275 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5276 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
3258e996
RK
5277 seq = keep_stack_depressed (seq);
5278#endif
7393c642 5279
19d3c25c 5280 emit_jump_insn (seq);
bdac5f58 5281
19d3c25c 5282 /* Retain a map of the epilogue insns. */
0a1c58a2 5283 record_insns (seq, &epilogue);
0435312e 5284 set_insn_locators (seq, epilogue_locator);
bdac5f58 5285
2f937369 5286 seq = get_insns ();
718fe406 5287 end_sequence ();
e881bb1b 5288
19d3c25c 5289 insert_insn_on_edge (seq, e);
ca1117cc 5290 inserted = 1;
bdac5f58 5291 }
623a66fa 5292 else
bdac5f58 5293#endif
623a66fa
R
5294 {
5295 basic_block cur_bb;
5296
5297 if (! next_active_insn (BB_END (e->src)))
5298 goto epilogue_done;
5299 /* We have a fall-through edge to the exit block, the source is not
5300 at the end of the function, and there will be an assembler epilogue
5301 at the end of the function.
5302 We can't use force_nonfallthru here, because that would try to
5303 use return. Inserting a jump 'by hand' is extremely messy, so
5304 we take advantage of cfg_layout_finalize using
5305 fixup_fallthru_exit_predecessor. */
35b6b437 5306 cfg_layout_initialize (0);
623a66fa 5307 FOR_EACH_BB (cur_bb)
24bd1a0b
DB
5308 if (cur_bb->index >= NUM_FIXED_BLOCKS
5309 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5310 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5311 cfg_layout_finalize ();
5312 }
19d3c25c 5313epilogue_done:
e881bb1b 5314
ca1117cc 5315 if (inserted)
e881bb1b 5316 commit_edge_insertions ();
0a1c58a2
JL
5317
5318#ifdef HAVE_sibcall_epilogue
5319 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5320 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5321 {
5322 basic_block bb = e->src;
a813c111 5323 rtx insn = BB_END (bb);
0a1c58a2 5324
4b4bf941 5325 if (!CALL_P (insn)
0a1c58a2 5326 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5327 {
5328 ei_next (&ei);
5329 continue;
5330 }
0a1c58a2
JL
5331
5332 start_sequence ();
0af5c896
RE
5333 emit_insn (gen_sibcall_epilogue ());
5334 seq = get_insns ();
0a1c58a2
JL
5335 end_sequence ();
5336
2f937369
DM
5337 /* Retain a map of the epilogue insns. Used in life analysis to
5338 avoid getting rid of sibcall epilogue insns. Do this before we
5339 actually emit the sequence. */
5340 record_insns (seq, &sibcall_epilogue);
0435312e 5341 set_insn_locators (seq, epilogue_locator);
2f937369 5342
5e35992a 5343 emit_insn_before (seq, insn);
628f6a4e 5344 ei_next (&ei);
0a1c58a2
JL
5345 }
5346#endif
ca1117cc
RH
5347
5348#ifdef HAVE_prologue
589fe865 5349 /* This is probably all useless now that we use locators. */
ca1117cc
RH
5350 if (prologue_end)
5351 {
5352 rtx insn, prev;
5353
5354 /* GDB handles `break f' by setting a breakpoint on the first
30196c1f 5355 line note after the prologue. Which means (1) that if
ca1117cc 5356 there are line number notes before where we inserted the
30196c1f
RH
5357 prologue we should move them, and (2) we should generate a
5358 note before the end of the first basic block, if there isn't
016030fe
JH
5359 one already there.
5360
8d9afc4e 5361 ??? This behavior is completely broken when dealing with
016030fe
JH
5362 multiple entry functions. We simply place the note always
5363 into first basic block and let alternate entry points
5364 to be missed.
5365 */
ca1117cc 5366
718fe406 5367 for (insn = prologue_end; insn; insn = prev)
ca1117cc
RH
5368 {
5369 prev = PREV_INSN (insn);
4b4bf941 5370 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
ca1117cc
RH
5371 {
5372 /* Note that we cannot reorder the first insn in the
5373 chain, since rest_of_compilation relies on that
30196c1f 5374 remaining constant. */
ca1117cc 5375 if (prev == NULL)
30196c1f
RH
5376 break;
5377 reorder_insns (insn, insn, prologue_end);
ca1117cc
RH
5378 }
5379 }
5380
30196c1f 5381 /* Find the last line number note in the first block. */
a813c111 5382 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
016030fe 5383 insn != prologue_end && insn;
30196c1f 5384 insn = PREV_INSN (insn))
4b4bf941 5385 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f
RH
5386 break;
5387
5388 /* If we didn't find one, make a copy of the first line number
5389 we run across. */
5390 if (! insn)
ca1117cc 5391 {
30196c1f
RH
5392 for (insn = next_active_insn (prologue_end);
5393 insn;
5394 insn = PREV_INSN (insn))
4b4bf941 5395 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f 5396 {
5f2fc772 5397 emit_note_copy_after (insn, prologue_end);
30196c1f
RH
5398 break;
5399 }
ca1117cc
RH
5400 }
5401 }
5402#endif
86c82654
RH
5403#ifdef HAVE_epilogue
5404 if (epilogue_end)
5405 {
5406 rtx insn, next;
5407
5408 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5409 There is no need, however, to be quite so anal about the existence
84c1fa24
UW
5410 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5411 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5412 info generation. */
718fe406 5413 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5414 {
5415 next = NEXT_INSN (insn);
4b4bf941 5416 if (NOTE_P (insn)
84c1fa24
UW
5417 && (NOTE_LINE_NUMBER (insn) > 0
5418 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5419 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
86c82654
RH
5420 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5421 }
5422 }
5423#endif
bdac5f58
TW
5424}
5425
5426/* Reposition the prologue-end and epilogue-begin notes after instruction
5427 scheduling and delayed branch scheduling. */
5428
5429void
fa8db1f7 5430reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
bdac5f58
TW
5431{
5432#if defined (HAVE_prologue) || defined (HAVE_epilogue)
9f53e965 5433 rtx insn, last, note;
0a1c58a2
JL
5434 int len;
5435
f995dcfe 5436 if ((len = VEC_length (int, prologue)) > 0)
bdac5f58 5437 {
9f53e965 5438 last = 0, note = 0;
bdac5f58 5439
0a1c58a2
JL
5440 /* Scan from the beginning until we reach the last prologue insn.
5441 We apparently can't depend on basic_block_{head,end} after
5442 reorg has run. */
9f53e965 5443 for (insn = f; insn; insn = NEXT_INSN (insn))
bdac5f58 5444 {
4b4bf941 5445 if (NOTE_P (insn))
9392c110 5446 {
0a1c58a2
JL
5447 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5448 note = insn;
5449 }
f995dcfe 5450 else if (contains (insn, &prologue))
0a1c58a2 5451 {
9f53e965
RH
5452 last = insn;
5453 if (--len == 0)
5454 break;
5455 }
5456 }
797a6ac1 5457
9f53e965
RH
5458 if (last)
5459 {
9f53e965
RH
5460 /* Find the prologue-end note if we haven't already, and
5461 move it to just after the last prologue insn. */
5462 if (note == 0)
5463 {
5464 for (note = last; (note = NEXT_INSN (note));)
4b4bf941 5465 if (NOTE_P (note)
9f53e965
RH
5466 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5467 break;
5468 }
c93b03c2 5469
9f53e965 5470 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5471 if (LABEL_P (last))
9f53e965
RH
5472 last = NEXT_INSN (last);
5473 reorder_insns (note, note, last);
bdac5f58 5474 }
0a1c58a2
JL
5475 }
5476
f995dcfe 5477 if ((len = VEC_length (int, epilogue)) > 0)
0a1c58a2 5478 {
9f53e965 5479 last = 0, note = 0;
bdac5f58 5480
0a1c58a2
JL
5481 /* Scan from the end until we reach the first epilogue insn.
5482 We apparently can't depend on basic_block_{head,end} after
5483 reorg has run. */
9f53e965 5484 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
bdac5f58 5485 {
4b4bf941 5486 if (NOTE_P (insn))
9392c110 5487 {
0a1c58a2
JL
5488 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5489 note = insn;
5490 }
f995dcfe 5491 else if (contains (insn, &epilogue))
0a1c58a2 5492 {
9f53e965
RH
5493 last = insn;
5494 if (--len == 0)
5495 break;
5496 }
5497 }
c93b03c2 5498
9f53e965
RH
5499 if (last)
5500 {
5501 /* Find the epilogue-begin note if we haven't already, and
5502 move it to just before the first epilogue insn. */
5503 if (note == 0)
5504 {
5505 for (note = insn; (note = PREV_INSN (note));)
4b4bf941 5506 if (NOTE_P (note)
9f53e965
RH
5507 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5508 break;
9392c110 5509 }
9f53e965
RH
5510
5511 if (PREV_INSN (last) != note)
5512 reorder_insns (note, note, PREV_INSN (last));
bdac5f58
TW
5513 }
5514 }
5515#endif /* HAVE_prologue or HAVE_epilogue */
5516}
87ff9c8e 5517
6de9cd9a
DN
5518/* Resets insn_block_boundaries array. */
5519
5520void
5521reset_block_changes (void)
5522{
597d6703
KH
5523 cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5524 VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
6de9cd9a
DN
5525}
5526
5527/* Record the boundary for BLOCK. */
5528void
5529record_block_change (tree block)
5530{
5531 int i, n;
5532 tree last_block;
5533
5534 if (!block)
5535 return;
5536
ee184c4d
RK
5537 if(!cfun->ib_boundaries_block)
5538 return;
5539
597d6703 5540 last_block = VEC_pop (tree, cfun->ib_boundaries_block);
6de9cd9a 5541 n = get_max_uid ();
597d6703
KH
5542 for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5543 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
6de9cd9a 5544
597d6703 5545 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
6de9cd9a
DN
5546}
5547
5548/* Finishes record of boundaries. */
5549void finalize_block_changes (void)
5550{
5551 record_block_change (DECL_INITIAL (current_function_decl));
5552}
5553
5554/* For INSN return the BLOCK it belongs to. */
5555void
5556check_block_change (rtx insn, tree *block)
5557{
5558 unsigned uid = INSN_UID (insn);
5559
597d6703 5560 if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
6de9cd9a
DN
5561 return;
5562
597d6703 5563 *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
6de9cd9a
DN
5564}
5565
5566/* Releases the ib_boundaries_block records. */
5567void
5568free_block_changes (void)
5569{
597d6703 5570 VEC_free (tree, gc, cfun->ib_boundaries_block);
6de9cd9a
DN
5571}
5572
faed5cc3
SB
5573/* Returns the name of the current function. */
5574const char *
5575current_function_name (void)
5576{
ae2bcd98 5577 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3 5578}
ef330312
PB
5579\f
5580
c2924966 5581static unsigned int
ef330312
PB
5582rest_of_handle_check_leaf_regs (void)
5583{
5584#ifdef LEAF_REGISTERS
5585 current_function_uses_only_leaf_regs
5586 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5587#endif
c2924966 5588 return 0;
ef330312
PB
5589}
5590
8d8d1a28
AH
5591/* Insert a TYPE into the used types hash table of CFUN. */
5592static void
5593used_types_insert_helper (tree type, struct function *func)
33c9159e 5594{
8d8d1a28 5595 if (type != NULL && func != NULL)
33c9159e
AH
5596 {
5597 void **slot;
5598
5599 if (func->used_types_hash == NULL)
5600 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
8d8d1a28
AH
5601 htab_eq_pointer, NULL);
5602 slot = htab_find_slot (func->used_types_hash, type, INSERT);
33c9159e 5603 if (*slot == NULL)
8d8d1a28 5604 *slot = type;
33c9159e
AH
5605 }
5606}
5607
8d8d1a28
AH
5608/* Given a type, insert it into the used hash table in cfun. */
5609void
5610used_types_insert (tree t)
5611{
5612 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5613 t = TREE_TYPE (t);
5614 t = TYPE_MAIN_VARIANT (t);
5615 if (debug_info_level > DINFO_LEVEL_NONE)
5616 used_types_insert_helper (t, cfun);
5617}
5618
ef330312
PB
5619struct tree_opt_pass pass_leaf_regs =
5620{
5621 NULL, /* name */
5622 NULL, /* gate */
5623 rest_of_handle_check_leaf_regs, /* execute */
5624 NULL, /* sub */
5625 NULL, /* next */
5626 0, /* static_pass_number */
5627 0, /* tv_id */
5628 0, /* properties_required */
5629 0, /* properties_provided */
5630 0, /* properties_destroyed */
5631 0, /* todo_flags_start */
5632 0, /* todo_flags_finish */
5633 0 /* letter */
5634};
5635
faed5cc3 5636
e2500fed 5637#include "gt-function.h"
This page took 3.765645 seconds and 5 git commands to generate.