]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
sh.c: Include alloc-pool.h.
[gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
bfc45551
AM
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
6f086dfc 22
6f086dfc
RS
23/* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 35 not get a hard register. */
6f086dfc
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
6f086dfc
RS
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
1ef08c63 44#include "except.h"
6f086dfc 45#include "function.h"
6f086dfc 46#include "expr.h"
c6b97fac 47#include "optabs.h"
e78d8e51 48#include "libfuncs.h"
6f086dfc
RS
49#include "regs.h"
50#include "hard-reg-set.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "output.h"
bdac5f58 54#include "basic-block.h"
10f0ad3d 55#include "toplev.h"
e2500fed 56#include "hashtab.h"
87ff9c8e 57#include "ggc.h"
b1474bb7 58#include "tm_p.h"
c0e7830f 59#include "integrate.h"
7afff7cf 60#include "langhooks.h"
61f71b34 61#include "target.h"
623a66fa 62#include "cfglayout.h"
4744afba 63#include "tree-gimple.h"
ef330312 64#include "tree-pass.h"
7d69de61
RH
65#include "predict.h"
66
d16790f2
JW
67#ifndef LOCAL_ALIGNMENT
68#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69#endif
70
95f3f59e
JDA
71#ifndef STACK_ALIGNMENT_NEEDED
72#define STACK_ALIGNMENT_NEEDED 1
73#endif
74
975f3818
RS
75#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76
293e3de4
RS
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
0f41302f 80 must define both, or neither. */
293e3de4
RS
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
293e3de4
RS
83#endif
84
6f086dfc
RS
85/* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89
90/* Similar, but round to the next highest integer that meets the
91 alignment. */
92#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93
54ff41b7
JW
94/* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
718fe406 97 compiler passes. */
54ff41b7
JW
98int current_function_is_leaf;
99
fdb8a883
JW
100/* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
718fe406 102 life_analysis has run. */
fdb8a883
JW
103int current_function_sp_is_unchanging;
104
54ff41b7
JW
105/* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
108int current_function_uses_only_leaf_regs;
109
6f086dfc 110/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114int virtuals_instantiated;
6f086dfc 115
df696a75 116/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 117static GTY(()) int funcdef_no;
f6f315fe 118
414c4dc4
NC
119/* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
fa8db1f7 121struct machine_function * (*init_machine_status) (void);
46766466 122
b384405b 123/* The currently compiled function. */
01d939e8 124struct function *cfun = 0;
b384405b 125
f995dcfe
KH
126DEF_VEC_I(int);
127DEF_VEC_ALLOC_I(int,heap);
128
5c7675e9 129/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
f995dcfe
KH
130static VEC(int,heap) *prologue;
131static VEC(int,heap) *epilogue;
0a1c58a2
JL
132
133/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
134 in this function. */
f995dcfe 135static VEC(int,heap) *sibcall_epilogue;
6f086dfc
RS
136\f
137/* In order to evaluate some expressions, such as function calls returning
138 structures in memory, we need to temporarily allocate stack locations.
139 We record each allocated temporary in the following structure.
140
141 Associated with each temporary slot is a nesting level. When we pop up
142 one level, all temporaries associated with the previous level are freed.
143 Normally, all temporaries are freed after the execution of the statement
144 in which they were created. However, if we are inside a ({...}) grouping,
145 the result may be in a temporary and hence must be preserved. If the
146 result could be in a temporary, we preserve it if we can determine which
147 one it is in. If we cannot determine which temporary may contain the
148 result, all temporaries are preserved. A temporary is preserved by
149 pretending it was allocated at the previous nesting level.
150
151 Automatic variables are also assigned temporary slots, at the nesting
152 level where they are defined. They are marked a "kept" so that
153 free_temp_slots will not free them. */
154
e2500fed 155struct temp_slot GTY(())
6f086dfc
RS
156{
157 /* Points to next temporary slot. */
158 struct temp_slot *next;
0aea6467
ZD
159 /* Points to previous temporary slot. */
160 struct temp_slot *prev;
161
0f41302f 162 /* The rtx to used to reference the slot. */
6f086dfc 163 rtx slot;
e5e76139
RK
164 /* The rtx used to represent the address if not the address of the
165 slot above. May be an EXPR_LIST if multiple addresses exist. */
166 rtx address;
718fe406 167 /* The alignment (in bits) of the slot. */
b5c02bff 168 unsigned int align;
6f086dfc 169 /* The size, in units, of the slot. */
e5e809f4 170 HOST_WIDE_INT size;
1da68f56
RK
171 /* The type of the object in the slot, or zero if it doesn't correspond
172 to a type. We use this to determine whether a slot can be reused.
173 It can be reused if objects of the type of the new slot will always
174 conflict with objects of the type of the old slot. */
175 tree type;
cc2902df 176 /* Nonzero if this temporary is currently in use. */
6f086dfc 177 char in_use;
cc2902df 178 /* Nonzero if this temporary has its address taken. */
a25d4ba2 179 char addr_taken;
6f086dfc
RS
180 /* Nesting level at which this slot is being used. */
181 int level;
cc2902df 182 /* Nonzero if this should survive a call to free_temp_slots. */
6f086dfc 183 int keep;
fc91b0d0
RK
184 /* The offset of the slot from the frame_pointer, including extra space
185 for alignment. This info is for combine_temp_slots. */
e5e809f4 186 HOST_WIDE_INT base_offset;
fc91b0d0
RK
187 /* The size of the slot, including extra space for alignment. This
188 info is for combine_temp_slots. */
e5e809f4 189 HOST_WIDE_INT full_size;
6f086dfc 190};
6f086dfc 191\f
e15679f8
RK
192/* Forward declarations. */
193
fa8db1f7
AJ
194static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
195 struct function *);
196static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
197static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 199static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7 200static void reorder_fix_fragments (tree);
fa8db1f7
AJ
201static int all_blocks (tree, tree *);
202static tree *get_block_vector (tree, int *);
203extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 204/* We always define `record_insns' even if it's not used so that we
ec97b83a 205 can always export `prologue_epilogue_contains'. */
f995dcfe
KH
206static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
207static int contains (rtx, VEC(int,heap) **);
73ef99fb 208#ifdef HAVE_return
fa8db1f7 209static void emit_return_into_block (basic_block, rtx);
73ef99fb 210#endif
3258e996 211#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
fa8db1f7 212static rtx keep_stack_depressed (rtx);
7393c642 213#endif
3a70d621 214static void prepare_function_start (tree);
fa8db1f7
AJ
215static void do_clobber_return_reg (rtx, void *);
216static void do_use_return_reg (rtx, void *);
4c4d143a 217static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 218\f
6f086dfc 219/* Pointer to chain of `struct function' for containing functions. */
1be4cd1f 220struct function *outer_function_chain;
6f086dfc
RS
221
222/* Given a function decl for a containing function,
223 return the `struct function' for it. */
224
225struct function *
fa8db1f7 226find_function_data (tree decl)
6f086dfc
RS
227{
228 struct function *p;
e5e809f4 229
eb3ae3e1 230 for (p = outer_function_chain; p; p = p->outer)
6f086dfc
RS
231 if (p->decl == decl)
232 return p;
e5e809f4 233
0bccc606 234 gcc_unreachable ();
6f086dfc
RS
235}
236
237/* Save the current context for compilation of a nested function.
8c5666b4 238 This is called from language-specific code. The caller should use
b03e38e1 239 the enter_nested langhook to save any language-specific state,
8c5666b4
BS
240 since this function knows only about language-independent
241 variables. */
6f086dfc
RS
242
243void
5acbdd12 244push_function_context_to (tree context ATTRIBUTE_UNUSED)
6f086dfc 245{
eb3ae3e1 246 struct function *p;
36edd3cc 247
01d939e8 248 if (cfun == 0)
b384405b 249 init_dummy_function_start ();
01d939e8 250 p = cfun;
6f086dfc 251
eb3ae3e1 252 p->outer = outer_function_chain;
6f086dfc 253 outer_function_chain = p;
6f086dfc 254
ae2bcd98 255 lang_hooks.function.enter_nested (p);
b384405b 256
01d939e8 257 cfun = 0;
6f086dfc
RS
258}
259
e4a4639e 260void
fa8db1f7 261push_function_context (void)
e4a4639e 262{
a0dabda5 263 push_function_context_to (current_function_decl);
e4a4639e
JM
264}
265
6f086dfc
RS
266/* Restore the last saved context, at the end of a nested function.
267 This function is called from language-specific code. */
268
269void
fa8db1f7 270pop_function_context_from (tree context ATTRIBUTE_UNUSED)
6f086dfc
RS
271{
272 struct function *p = outer_function_chain;
273
01d939e8 274 cfun = p;
eb3ae3e1 275 outer_function_chain = p->outer;
6f086dfc 276
6f086dfc 277 current_function_decl = p->decl;
6f086dfc 278
ae2bcd98 279 lang_hooks.function.leave_nested (p);
46766466 280
6f086dfc 281 /* Reset variables that have known state during rtx generation. */
6f086dfc 282 virtuals_instantiated = 0;
1b3d8f8a 283 generating_concat_p = 1;
6f086dfc 284}
e4a4639e 285
36edd3cc 286void
fa8db1f7 287pop_function_context (void)
e4a4639e 288{
a0dabda5 289 pop_function_context_from (current_function_decl);
e4a4639e 290}
e2ecd91c 291
fa51b01b
RH
292/* Clear out all parts of the state in F that can safely be discarded
293 after the function has been parsed, but not compiled, to let
294 garbage collection reclaim the memory. */
295
296void
fa8db1f7 297free_after_parsing (struct function *f)
fa51b01b
RH
298{
299 /* f->expr->forced_labels is used by code generation. */
300 /* f->emit->regno_reg_rtx is used by code generation. */
301 /* f->varasm is used by code generation. */
302 /* f->eh->eh_return_stub_label is used by code generation. */
303
ae2bcd98 304 lang_hooks.function.final (f);
fa51b01b
RH
305}
306
e2ecd91c
BS
307/* Clear out all parts of the state in F that can safely be discarded
308 after the function has been compiled, to let garbage collection
0a8a198c 309 reclaim the memory. */
21cd906e 310
e2ecd91c 311void
fa8db1f7 312free_after_compilation (struct function *f)
e2ecd91c 313{
f995dcfe
KH
314 VEC_free (int, heap, prologue);
315 VEC_free (int, heap, epilogue);
316 VEC_free (int, heap, sibcall_epilogue);
317
e2500fed
GK
318 f->eh = NULL;
319 f->expr = NULL;
320 f->emit = NULL;
321 f->varasm = NULL;
322 f->machine = NULL;
997de8ed 323 f->cfg = NULL;
fa51b01b 324
0aea6467
ZD
325 f->x_avail_temp_slots = NULL;
326 f->x_used_temp_slots = NULL;
fa51b01b
RH
327 f->arg_offset_rtx = NULL;
328 f->return_rtx = NULL;
329 f->internal_arg_pointer = NULL;
fa51b01b 330 f->x_nonlocal_goto_handler_labels = NULL;
fa51b01b 331 f->x_return_label = NULL;
6e3077c6 332 f->x_naked_return_label = NULL;
fa51b01b 333 f->x_stack_slot_list = NULL;
fa51b01b
RH
334 f->x_tail_recursion_reentry = NULL;
335 f->x_arg_pointer_save_area = NULL;
fa51b01b 336 f->x_parm_birth_insn = NULL;
fa51b01b
RH
337 f->original_arg_vector = NULL;
338 f->original_decl_initial = NULL;
fa51b01b 339 f->epilogue_delay_list = NULL;
e2ecd91c 340}
6f086dfc
RS
341\f
342/* Allocate fixed slots in the stack frame of the current function. */
343
49ad7cfa
BS
344/* Return size needed for stack frame based on slots so far allocated in
345 function F.
c795bca9 346 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
347 the caller may have to do that. */
348
7b25e663 349static HOST_WIDE_INT
fa8db1f7 350get_func_frame_size (struct function *f)
6f086dfc 351{
f62c8a5c
JJ
352 if (FRAME_GROWS_DOWNWARD)
353 return -f->x_frame_offset;
354 else
355 return f->x_frame_offset;
6f086dfc
RS
356}
357
49ad7cfa
BS
358/* Return size needed for stack frame based on slots so far allocated.
359 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
360 the caller may have to do that. */
361HOST_WIDE_INT
fa8db1f7 362get_frame_size (void)
49ad7cfa 363{
01d939e8 364 return get_func_frame_size (cfun);
49ad7cfa
BS
365}
366
6f086dfc
RS
367/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
368 with machine mode MODE.
718fe406 369
6f086dfc
RS
370 ALIGN controls the amount of alignment for the address of the slot:
371 0 means according to MODE,
372 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 373 -2 means use BITS_PER_UNIT,
6f086dfc
RS
374 positive specifies alignment boundary in bits.
375
e2ecd91c 376 We do not round to stack_boundary here.
6f086dfc 377
e2ecd91c
BS
378 FUNCTION specifies the function to allocate in. */
379
380static rtx
fa8db1f7
AJ
381assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
382 struct function *function)
6f086dfc 383{
b3694847 384 rtx x, addr;
6f086dfc 385 int bigend_correction = 0;
95899b34 386 unsigned int alignment;
58dbcf05 387 int frame_off, frame_alignment, frame_phase;
6f086dfc
RS
388
389 if (align == 0)
390 {
d16790f2
JW
391 tree type;
392
6f086dfc 393 if (mode == BLKmode)
d16790f2 394 alignment = BIGGEST_ALIGNMENT;
dbab7b72 395 else
718fe406 396 alignment = GET_MODE_ALIGNMENT (mode);
d16790f2
JW
397
398 /* Allow the target to (possibly) increase the alignment of this
399 stack slot. */
ae2bcd98 400 type = lang_hooks.types.type_for_mode (mode, 0);
d16790f2
JW
401 if (type)
402 alignment = LOCAL_ALIGNMENT (type, alignment);
403
404 alignment /= BITS_PER_UNIT;
6f086dfc
RS
405 }
406 else if (align == -1)
407 {
408 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
409 size = CEIL_ROUND (size, alignment);
410 }
cfa29a4c
EB
411 else if (align == -2)
412 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
413 else
414 alignment = align / BITS_PER_UNIT;
415
f62c8a5c
JJ
416 if (FRAME_GROWS_DOWNWARD)
417 function->x_frame_offset -= size;
1474e303 418
a0871656
JH
419 /* Ignore alignment we can't do with expected alignment of the boundary. */
420 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
421 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
422
423 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
424 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
425
58dbcf05
AH
426 /* Calculate how many bytes the start of local variables is off from
427 stack alignment. */
428 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
429 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
430 frame_phase = frame_off ? frame_alignment - frame_off : 0;
431
95f3f59e
JDA
432 /* Round the frame offset to the specified alignment. The default is
433 to always honor requests to align the stack but a port may choose to
434 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
435 if (STACK_ALIGNMENT_NEEDED
436 || mode != BLKmode
437 || size != 0)
438 {
439 /* We must be careful here, since FRAME_OFFSET might be negative and
440 division with a negative dividend isn't as well defined as we might
441 like. So we instead assume that ALIGNMENT is a power of two and
442 use logical operations which are unambiguous. */
f62c8a5c
JJ
443 if (FRAME_GROWS_DOWNWARD)
444 function->x_frame_offset
445 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
446 (unsigned HOST_WIDE_INT) alignment)
447 + frame_phase);
448 else
449 function->x_frame_offset
450 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
451 (unsigned HOST_WIDE_INT) alignment)
452 + frame_phase);
95f3f59e 453 }
6f086dfc
RS
454
455 /* On a big-endian machine, if we are allocating more space than we will use,
456 use the least significant bytes of those that are allocated. */
d70eadf7 457 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 458 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 459
6f086dfc
RS
460 /* If we have already instantiated virtual registers, return the actual
461 address relative to the frame pointer. */
01d939e8 462 if (function == cfun && virtuals_instantiated)
6f086dfc 463 addr = plus_constant (frame_pointer_rtx,
c41536f5 464 trunc_int_for_mode
6f086dfc 465 (frame_offset + bigend_correction
c41536f5 466 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
467 else
468 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5
AO
469 trunc_int_for_mode
470 (function->x_frame_offset + bigend_correction,
471 Pmode));
6f086dfc 472
f62c8a5c
JJ
473 if (!FRAME_GROWS_DOWNWARD)
474 function->x_frame_offset += size;
6f086dfc 475
38a448ca 476 x = gen_rtx_MEM (mode, addr);
be0c514c 477 MEM_NOTRAP_P (x) = 1;
6f086dfc 478
e2ecd91c
BS
479 function->x_stack_slot_list
480 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
481
4cfe2e75
EB
482 /* Try to detect frame size overflows on native platforms. */
483#if BITS_PER_WORD >= 32
9070115b
EB
484 if ((FRAME_GROWS_DOWNWARD
485 ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
486 : (unsigned HOST_WIDE_INT) function->x_frame_offset)
487 > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
488 /* Leave room for the fixed part of the frame. */
489 - 64 * UNITS_PER_WORD)
490 {
491 error ("%Jtotal size of local objects too large", function->decl);
492 /* Avoid duplicate error messages as much as possible. */
493 function->x_frame_offset = 0;
494 }
4cfe2e75 495#endif
9070115b 496
6f086dfc
RS
497 return x;
498}
499
e2ecd91c
BS
500/* Wrapper around assign_stack_local_1; assign a local stack slot for the
501 current function. */
3bdf5ad1 502
e2ecd91c 503rtx
fa8db1f7 504assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
6f086dfc 505{
01d939e8 506 return assign_stack_local_1 (mode, size, align, cfun);
6f086dfc 507}
0aea6467
ZD
508
509\f
510/* Removes temporary slot TEMP from LIST. */
511
512static void
513cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
514{
515 if (temp->next)
516 temp->next->prev = temp->prev;
517 if (temp->prev)
518 temp->prev->next = temp->next;
519 else
520 *list = temp->next;
521
522 temp->prev = temp->next = NULL;
523}
524
525/* Inserts temporary slot TEMP to LIST. */
526
527static void
528insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
529{
530 temp->next = *list;
531 if (*list)
532 (*list)->prev = temp;
533 temp->prev = NULL;
534 *list = temp;
535}
536
537/* Returns the list of used temp slots at LEVEL. */
538
539static struct temp_slot **
540temp_slots_at_level (int level)
541{
0aea6467
ZD
542
543 if (!used_temp_slots)
544 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
545
546 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
547 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
548
549 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
550}
551
552/* Returns the maximal temporary slot level. */
553
554static int
555max_slot_level (void)
556{
557 if (!used_temp_slots)
558 return -1;
559
560 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
561}
562
563/* Moves temporary slot TEMP to LEVEL. */
564
565static void
566move_slot_to_level (struct temp_slot *temp, int level)
567{
568 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
569 insert_slot_to_list (temp, temp_slots_at_level (level));
570 temp->level = level;
571}
572
573/* Make temporary slot TEMP available. */
574
575static void
576make_slot_available (struct temp_slot *temp)
577{
578 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
579 insert_slot_to_list (temp, &avail_temp_slots);
580 temp->in_use = 0;
581 temp->level = -1;
582}
6f086dfc
RS
583\f
584/* Allocate a temporary stack slot and record it for possible later
585 reuse.
586
587 MODE is the machine mode to be given to the returned rtx.
588
589 SIZE is the size in units of the space required. We do no rounding here
590 since assign_stack_local will do any required rounding.
591
d93d4205
MS
592 KEEP is 1 if this slot is to be retained after a call to
593 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
594 with this flag. KEEP values of 2 or 3 were needed respectively
595 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535a42b1 596 or for SAVE_EXPRs, but they are now unused.
a4c6502a
MM
597
598 TYPE is the type that will be used for the stack slot. */
6f086dfc 599
a06ef755 600rtx
535a42b1
NS
601assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
602 int keep, tree type)
6f086dfc 603{
74e2819c 604 unsigned int align;
0aea6467 605 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 606 rtx slot;
6f086dfc 607
303ec2aa
RK
608 /* If SIZE is -1 it means that somebody tried to allocate a temporary
609 of a variable size. */
0bccc606 610 gcc_assert (size != -1);
303ec2aa 611
7efcb746 612 /* These are now unused. */
0bccc606 613 gcc_assert (keep <= 1);
7efcb746 614
d16790f2
JW
615 if (mode == BLKmode)
616 align = BIGGEST_ALIGNMENT;
dbab7b72
JH
617 else
618 align = GET_MODE_ALIGNMENT (mode);
6f086dfc 619
d16790f2 620 if (! type)
ae2bcd98 621 type = lang_hooks.types.type_for_mode (mode, 0);
3bdf5ad1 622
d16790f2
JW
623 if (type)
624 align = LOCAL_ALIGNMENT (type, align);
625
626 /* Try to find an available, already-allocated temporary of the proper
627 mode which meets the size and alignment requirements. Choose the
3e8b0446
ZD
628 smallest one with the closest alignment.
629
630 If assign_stack_temp is called outside of the tree->rtl expansion,
631 we cannot reuse the stack slots (that may still refer to
632 VIRTUAL_STACK_VARS_REGNUM). */
633 if (!virtuals_instantiated)
0aea6467 634 {
3e8b0446 635 for (p = avail_temp_slots; p; p = p->next)
0aea6467 636 {
3e8b0446
ZD
637 if (p->align >= align && p->size >= size
638 && GET_MODE (p->slot) == mode
639 && objects_must_conflict_p (p->type, type)
640 && (best_p == 0 || best_p->size > p->size
641 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 642 {
3e8b0446
ZD
643 if (p->align == align && p->size == size)
644 {
645 selected = p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647 best_p = 0;
648 break;
649 }
650 best_p = p;
0aea6467 651 }
0aea6467
ZD
652 }
653 }
6f086dfc
RS
654
655 /* Make our best, if any, the one to use. */
656 if (best_p)
a45035b6 657 {
0aea6467
ZD
658 selected = best_p;
659 cut_slot_from_list (selected, &avail_temp_slots);
660
a45035b6
JW
661 /* If there are enough aligned bytes left over, make them into a new
662 temp_slot so that the extra bytes don't get wasted. Do this only
663 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 664 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 665 {
d16790f2 666 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 667 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
668
669 if (best_p->size - rounded_size >= alignment)
670 {
703ad42b 671 p = ggc_alloc (sizeof (struct temp_slot));
a25d4ba2 672 p->in_use = p->addr_taken = 0;
a45035b6 673 p->size = best_p->size - rounded_size;
307d8cd6
RK
674 p->base_offset = best_p->base_offset + rounded_size;
675 p->full_size = best_p->full_size - rounded_size;
be0c514c 676 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 677 p->align = best_p->align;
e5e76139 678 p->address = 0;
1da68f56 679 p->type = best_p->type;
0aea6467 680 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 681
38a448ca
RH
682 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
683 stack_slot_list);
a45035b6
JW
684
685 best_p->size = rounded_size;
291dde90 686 best_p->full_size = rounded_size;
a45035b6
JW
687 }
688 }
a45035b6 689 }
718fe406 690
6f086dfc 691 /* If we still didn't find one, make a new temporary. */
0aea6467 692 if (selected == 0)
6f086dfc 693 {
e5e809f4
JL
694 HOST_WIDE_INT frame_offset_old = frame_offset;
695
703ad42b 696 p = ggc_alloc (sizeof (struct temp_slot));
e5e809f4 697
c87a0a39
JL
698 /* We are passing an explicit alignment request to assign_stack_local.
699 One side effect of that is assign_stack_local will not round SIZE
700 to ensure the frame offset remains suitably aligned.
701
702 So for requests which depended on the rounding of SIZE, we go ahead
703 and round it now. We also make sure ALIGNMENT is at least
704 BIGGEST_ALIGNMENT. */
0bccc606 705 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 706 p->slot = assign_stack_local (mode,
010529e5 707 (mode == BLKmode
fc555370 708 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 709 : size),
6f67a30d 710 align);
d16790f2
JW
711
712 p->align = align;
e5e809f4 713
b2a80c0d
DE
714 /* The following slot size computation is necessary because we don't
715 know the actual size of the temporary slot until assign_stack_local
716 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
717 requested temporary. Note that extra space added for alignment
718 can be either above or below this stack slot depending on which
719 way the frame grows. We include the extra space if and only if it
720 is above this slot. */
f62c8a5c
JJ
721 if (FRAME_GROWS_DOWNWARD)
722 p->size = frame_offset_old - frame_offset;
723 else
724 p->size = size;
e5e809f4 725
fc91b0d0 726 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
727 if (FRAME_GROWS_DOWNWARD)
728 {
729 p->base_offset = frame_offset;
730 p->full_size = frame_offset_old - frame_offset;
731 }
732 else
733 {
734 p->base_offset = frame_offset_old;
735 p->full_size = frame_offset - frame_offset_old;
736 }
e5e76139 737 p->address = 0;
0aea6467
ZD
738
739 selected = p;
6f086dfc
RS
740 }
741
0aea6467 742 p = selected;
6f086dfc 743 p->in_use = 1;
a25d4ba2 744 p->addr_taken = 0;
1da68f56 745 p->type = type;
7efcb746
PB
746 p->level = temp_slot_level;
747 p->keep = keep;
1995f267 748
0aea6467
ZD
749 pp = temp_slots_at_level (p->level);
750 insert_slot_to_list (p, pp);
faa964e5
UW
751
752 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
753 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
754 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 755
1da68f56
RK
756 /* If we know the alias set for the memory that will be used, use
757 it. If there's no TYPE, then we don't know anything about the
758 alias set for the memory. */
faa964e5
UW
759 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
760 set_mem_align (slot, align);
1da68f56 761
30f7a378 762 /* If a type is specified, set the relevant flags. */
3bdf5ad1 763 if (type != 0)
1da68f56 764 {
faa964e5
UW
765 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
766 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
1da68f56 767 }
be0c514c 768 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 769
faa964e5 770 return slot;
6f086dfc 771}
d16790f2
JW
772
773/* Allocate a temporary stack slot and record it for possible later
774 reuse. First three arguments are same as in preceding function. */
775
776rtx
fa8db1f7 777assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
778{
779 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
780}
638141a6 781\f
9432c136
EB
782/* Assign a temporary.
783 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
784 and so that should be used in error messages. In either case, we
785 allocate of the given type.
230f21b4
PB
786 KEEP is as for assign_stack_temp.
787 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
788 it is 0 if a register is OK.
789 DONT_PROMOTE is 1 if we should not promote values in register
790 to wider modes. */
230f21b4
PB
791
792rtx
fa8db1f7
AJ
793assign_temp (tree type_or_decl, int keep, int memory_required,
794 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 795{
9432c136
EB
796 tree type, decl;
797 enum machine_mode mode;
9e1622ed 798#ifdef PROMOTE_MODE
9432c136
EB
799 int unsignedp;
800#endif
801
802 if (DECL_P (type_or_decl))
803 decl = type_or_decl, type = TREE_TYPE (decl);
804 else
805 decl = NULL, type = type_or_decl;
806
807 mode = TYPE_MODE (type);
9e1622ed 808#ifdef PROMOTE_MODE
8df83eae 809 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 810#endif
638141a6 811
230f21b4
PB
812 if (mode == BLKmode || memory_required)
813 {
e5e809f4 814 HOST_WIDE_INT size = int_size_in_bytes (type);
e30bb772 815 tree size_tree;
230f21b4
PB
816 rtx tmp;
817
44affdae
JH
818 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
819 problems with allocating the stack space. */
820 if (size == 0)
821 size = 1;
822
230f21b4
PB
823 /* Unfortunately, we don't yet know how to allocate variable-sized
824 temporaries. However, sometimes we have a fixed upper limit on
825 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 826 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
827 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
828 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
3bdf5ad1
RK
829 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
830 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
230f21b4 831
e30bb772
RK
832 /* If we still haven't been able to get a size, see if the language
833 can compute a maximum size. */
834 if (size == -1
8963a517 835 && (size_tree = lang_hooks.types.max_size (type)) != 0
e30bb772
RK
836 && host_integerp (size_tree, 1))
837 size = tree_low_cst (size_tree, 1);
838
9432c136
EB
839 /* The size of the temporary may be too large to fit into an integer. */
840 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 841 this to things that aren't compiler-generated temporaries. The
535a42b1 842 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
843 if (decl && size == -1
844 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
845 {
dee15844 846 error ("size of variable %q+D is too large", decl);
9432c136
EB
847 size = 1;
848 }
849
d16790f2 850 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
851 return tmp;
852 }
638141a6 853
9e1622ed 854#ifdef PROMOTE_MODE
b55d9ff8
RK
855 if (! dont_promote)
856 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 857#endif
638141a6 858
230f21b4
PB
859 return gen_reg_rtx (mode);
860}
638141a6 861\f
a45035b6
JW
862/* Combine temporary stack slots which are adjacent on the stack.
863
864 This allows for better use of already allocated stack space. This is only
865 done for BLKmode slots because we can be sure that we won't have alignment
866 problems in this case. */
867
6fe79279 868static void
fa8db1f7 869combine_temp_slots (void)
a45035b6 870{
0aea6467 871 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
872 int num_slots;
873
a4c6502a
MM
874 /* We can't combine slots, because the information about which slot
875 is in which alias set will be lost. */
876 if (flag_strict_aliasing)
877 return;
878
718fe406 879 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 880 high levels of optimization. */
e5e809f4 881 if (! flag_expensive_optimizations)
0aea6467 882 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
883 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
884 return;
a45035b6 885
0aea6467 886 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
887 {
888 int delete_p = 0;
e5e809f4 889
0aea6467
ZD
890 next = p->next;
891
892 if (GET_MODE (p->slot) != BLKmode)
893 continue;
894
895 for (q = p->next; q; q = next_q)
e9b7093a 896 {
0aea6467
ZD
897 int delete_q = 0;
898
899 next_q = q->next;
900
901 if (GET_MODE (q->slot) != BLKmode)
902 continue;
903
904 if (p->base_offset + p->full_size == q->base_offset)
905 {
906 /* Q comes after P; combine Q into P. */
907 p->size += q->size;
908 p->full_size += q->full_size;
909 delete_q = 1;
910 }
911 else if (q->base_offset + q->full_size == p->base_offset)
912 {
913 /* P comes after Q; combine P into Q. */
914 q->size += p->size;
915 q->full_size += p->full_size;
916 delete_p = 1;
917 break;
918 }
919 if (delete_q)
920 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 921 }
0aea6467
ZD
922
923 /* Either delete P or advance past it. */
924 if (delete_p)
925 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 926 }
a45035b6 927}
6f086dfc 928\f
e5e76139
RK
929/* Find the temp slot corresponding to the object at address X. */
930
931static struct temp_slot *
fa8db1f7 932find_temp_slot_from_address (rtx x)
e5e76139
RK
933{
934 struct temp_slot *p;
935 rtx next;
0aea6467 936 int i;
e5e76139 937
0aea6467
ZD
938 for (i = max_slot_level (); i >= 0; i--)
939 for (p = *temp_slots_at_level (i); p; p = p->next)
940 {
941 if (XEXP (p->slot, 0) == x
942 || p->address == x
943 || (GET_CODE (x) == PLUS
944 && XEXP (x, 0) == virtual_stack_vars_rtx
945 && GET_CODE (XEXP (x, 1)) == CONST_INT
946 && INTVAL (XEXP (x, 1)) >= p->base_offset
947 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
948 return p;
949
950 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
951 for (next = p->address; next; next = XEXP (next, 1))
952 if (XEXP (next, 0) == x)
953 return p;
954 }
e5e76139 955
14a774a9
RK
956 /* If we have a sum involving a register, see if it points to a temp
957 slot. */
f8cfc6aa 958 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
14a774a9
RK
959 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
960 return p;
f8cfc6aa 961 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
14a774a9
RK
962 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
963 return p;
964
e5e76139
RK
965 return 0;
966}
718fe406 967
9faa82d8 968/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 969 that previously was known by OLD. */
e5e76139
RK
970
971void
fa8db1f7 972update_temp_slot_address (rtx old, rtx new)
e5e76139 973{
14a774a9 974 struct temp_slot *p;
e5e76139 975
14a774a9 976 if (rtx_equal_p (old, new))
e5e76139 977 return;
14a774a9
RK
978
979 p = find_temp_slot_from_address (old);
980
700f19f0
RK
981 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
982 is a register, see if one operand of the PLUS is a temporary
983 location. If so, NEW points into it. Otherwise, if both OLD and
984 NEW are a PLUS and if there is a register in common between them.
985 If so, try a recursive call on those values. */
14a774a9
RK
986 if (p == 0)
987 {
700f19f0
RK
988 if (GET_CODE (old) != PLUS)
989 return;
990
f8cfc6aa 991 if (REG_P (new))
700f19f0
RK
992 {
993 update_temp_slot_address (XEXP (old, 0), new);
994 update_temp_slot_address (XEXP (old, 1), new);
995 return;
996 }
997 else if (GET_CODE (new) != PLUS)
14a774a9
RK
998 return;
999
1000 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1003 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1004 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1006 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1007 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1008
1009 return;
1010 }
1011
718fe406 1012 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
1013 else if (p->address == 0)
1014 p->address = new;
1015 else
1016 {
1017 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 1019
38a448ca 1020 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
1021 }
1022}
1023
a25d4ba2 1024/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1025 address was taken. */
a25d4ba2
RK
1026
1027void
fa8db1f7 1028mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1029{
1030 struct temp_slot *p;
1031
1032 if (x == 0)
1033 return;
1034
1035 /* If X is not in memory or is at a constant address, it cannot be in
1036 a temporary slot. */
3c0cb5de 1037 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1038 return;
1039
1040 p = find_temp_slot_from_address (XEXP (x, 0));
1041 if (p != 0)
1042 p->addr_taken = 1;
1043}
1044
9cca6a99
MS
1045/* If X could be a reference to a temporary slot, mark that slot as
1046 belonging to the to one level higher than the current level. If X
1047 matched one of our slots, just mark that one. Otherwise, we can't
1048 easily predict which it is, so upgrade all of them. Kept slots
1049 need not be touched.
6f086dfc
RS
1050
1051 This is called when an ({...}) construct occurs and a statement
1052 returns a value in memory. */
1053
1054void
fa8db1f7 1055preserve_temp_slots (rtx x)
6f086dfc 1056{
0aea6467 1057 struct temp_slot *p = 0, *next;
6f086dfc 1058
73620b82
RK
1059 /* If there is no result, we still might have some objects whose address
1060 were taken, so we need to make sure they stay around. */
e3a77161 1061 if (x == 0)
73620b82 1062 {
0aea6467
ZD
1063 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1064 {
1065 next = p->next;
1066
1067 if (p->addr_taken)
1068 move_slot_to_level (p, temp_slot_level - 1);
1069 }
73620b82 1070
8fff4fc1
RH
1071 return;
1072 }
f7b6d104 1073
8fff4fc1
RH
1074 /* If X is a register that is being used as a pointer, see if we have
1075 a temporary slot we know it points to. To be consistent with
1076 the code below, we really should preserve all non-kept slots
1077 if we can't find a match, but that seems to be much too costly. */
1078 if (REG_P (x) && REG_POINTER (x))
1079 p = find_temp_slot_from_address (x);
f7b6d104 1080
8fff4fc1
RH
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot, but it can contain something whose address was
1083 taken. */
1084 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1085 {
1086 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1087 {
1088 next = p->next;
b5bd3b3c 1089
8fff4fc1
RH
1090 if (p->addr_taken)
1091 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1092 }
c5c76735 1093
8fff4fc1
RH
1094 return;
1095 }
1096
1097 /* First see if we can find a match. */
1098 if (p == 0)
1099 p = find_temp_slot_from_address (XEXP (x, 0));
1100
1101 if (p != 0)
1102 {
1103 /* Move everything at our level whose address was taken to our new
1104 level in case we used its address. */
1105 struct temp_slot *q;
1106
1107 if (p->level == temp_slot_level)
fbdfe39c 1108 {
8fff4fc1 1109 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1110 {
8fff4fc1 1111 next = q->next;
8b04083b 1112
8fff4fc1
RH
1113 if (p != q && q->addr_taken)
1114 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1115 }
8fff4fc1
RH
1116
1117 move_slot_to_level (p, temp_slot_level - 1);
1118 p->addr_taken = 0;
fbdfe39c 1119 }
8fff4fc1 1120 return;
f7b6d104 1121 }
e9a25f70 1122
8fff4fc1
RH
1123 /* Otherwise, preserve all non-kept slots at this level. */
1124 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1125 {
8fff4fc1 1126 next = p->next;
fe9b4957 1127
8fff4fc1
RH
1128 if (!p->keep)
1129 move_slot_to_level (p, temp_slot_level - 1);
1130 }
fe9b4957
MM
1131}
1132
8fff4fc1
RH
1133/* Free all temporaries used so far. This is normally called at the
1134 end of generating code for a statement. */
fe9b4957 1135
8fff4fc1
RH
1136void
1137free_temp_slots (void)
fe9b4957 1138{
8fff4fc1 1139 struct temp_slot *p, *next;
fe9b4957 1140
8fff4fc1
RH
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1142 {
1143 next = p->next;
fe9b4957 1144
8fff4fc1
RH
1145 if (!p->keep)
1146 make_slot_available (p);
1147 }
fe9b4957 1148
8fff4fc1
RH
1149 combine_temp_slots ();
1150}
fe9b4957 1151
8fff4fc1 1152/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1153
8fff4fc1
RH
1154void
1155push_temp_slots (void)
fe9b4957 1156{
8fff4fc1 1157 temp_slot_level++;
fe9b4957
MM
1158}
1159
8fff4fc1
RH
1160/* Pop a temporary nesting level. All slots in use in the current level
1161 are freed. */
fe9b4957 1162
8fff4fc1
RH
1163void
1164pop_temp_slots (void)
fe9b4957 1165{
8fff4fc1 1166 struct temp_slot *p, *next;
fe9b4957 1167
8fff4fc1
RH
1168 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1169 {
1170 next = p->next;
1171 make_slot_available (p);
1172 }
e9a25f70 1173
8fff4fc1 1174 combine_temp_slots ();
b987f237 1175
8fff4fc1 1176 temp_slot_level--;
8c36698e
NC
1177}
1178
8fff4fc1 1179/* Initialize temporary slots. */
e9a25f70
JL
1180
1181void
8fff4fc1 1182init_temp_slots (void)
e9a25f70 1183{
8fff4fc1
RH
1184 /* We have not allocated any temporaries yet. */
1185 avail_temp_slots = 0;
1186 used_temp_slots = 0;
1187 temp_slot_level = 0;
8fff4fc1
RH
1188}
1189\f
1190/* These routines are responsible for converting virtual register references
1191 to the actual hard register references once RTL generation is complete.
718fe406 1192
8fff4fc1
RH
1193 The following four variables are used for communication between the
1194 routines. They contain the offsets of the virtual registers from their
1195 respective hard registers. */
fe9b4957 1196
8fff4fc1
RH
1197static int in_arg_offset;
1198static int var_offset;
1199static int dynamic_offset;
1200static int out_arg_offset;
1201static int cfa_offset;
8a5275eb 1202
8fff4fc1
RH
1203/* In most machines, the stack pointer register is equivalent to the bottom
1204 of the stack. */
718fe406 1205
8fff4fc1
RH
1206#ifndef STACK_POINTER_OFFSET
1207#define STACK_POINTER_OFFSET 0
1208#endif
8c36698e 1209
8fff4fc1
RH
1210/* If not defined, pick an appropriate default for the offset of dynamically
1211 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1212 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1213
8fff4fc1 1214#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1215
8fff4fc1
RH
1216/* The bottom of the stack points to the actual arguments. If
1217 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1218 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1219 stack space for register parameters is not pushed by the caller, but
1220 rather part of the fixed stack areas and hence not included in
1221 `current_function_outgoing_args_size'. Nevertheless, we must allow
1222 for it when allocating stack dynamic objects. */
8a5275eb 1223
8fff4fc1
RH
1224#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1225#define STACK_DYNAMIC_OFFSET(FNDECL) \
1226((ACCUMULATE_OUTGOING_ARGS \
1227 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1228 + (STACK_POINTER_OFFSET)) \
4fa48eae 1229
8fff4fc1
RH
1230#else
1231#define STACK_DYNAMIC_OFFSET(FNDECL) \
1232((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1233 + (STACK_POINTER_OFFSET))
1234#endif
1235#endif
4fa48eae 1236
659e47fb 1237\f
bbf9b913
RH
1238/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1239 is a virtual register, return the equivalent hard register and set the
1240 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1241
bbf9b913
RH
1242static rtx
1243instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1244{
bbf9b913
RH
1245 rtx new;
1246 HOST_WIDE_INT offset;
6f086dfc 1247
bbf9b913
RH
1248 if (x == virtual_incoming_args_rtx)
1249 new = arg_pointer_rtx, offset = in_arg_offset;
1250 else if (x == virtual_stack_vars_rtx)
1251 new = frame_pointer_rtx, offset = var_offset;
1252 else if (x == virtual_stack_dynamic_rtx)
1253 new = stack_pointer_rtx, offset = dynamic_offset;
1254 else if (x == virtual_outgoing_args_rtx)
1255 new = stack_pointer_rtx, offset = out_arg_offset;
1256 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1257 {
1258#ifdef FRAME_POINTER_CFA_OFFSET
1259 new = frame_pointer_rtx;
1260#else
1261 new = arg_pointer_rtx;
1262#endif
1263 offset = cfa_offset;
1264 }
bbf9b913
RH
1265 else
1266 return NULL_RTX;
6f086dfc 1267
bbf9b913
RH
1268 *poffset = offset;
1269 return new;
6f086dfc
RS
1270}
1271
bbf9b913
RH
1272/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1273 Instantiate any virtual registers present inside of *LOC. The expression
1274 is simplified, as much as possible, but is not to be considered "valid"
1275 in any sense implied by the target. If any change is made, set CHANGED
1276 to true. */
6f086dfc 1277
bbf9b913
RH
1278static int
1279instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1280{
bbf9b913
RH
1281 HOST_WIDE_INT offset;
1282 bool *changed = (bool *) data;
1283 rtx x, new;
6f086dfc 1284
bbf9b913
RH
1285 x = *loc;
1286 if (x == 0)
1287 return 0;
1288
1289 switch (GET_CODE (x))
6f086dfc 1290 {
bbf9b913
RH
1291 case REG:
1292 new = instantiate_new_reg (x, &offset);
1293 if (new)
1294 {
1295 *loc = plus_constant (new, offset);
1296 if (changed)
1297 *changed = true;
1298 }
1299 return -1;
1300
1301 case PLUS:
1302 new = instantiate_new_reg (XEXP (x, 0), &offset);
1303 if (new)
1304 {
1305 new = plus_constant (new, offset);
1306 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1307 if (changed)
1308 *changed = true;
1309 return -1;
1310 }
e5e809f4 1311
bbf9b913
RH
1312 /* FIXME -- from old code */
1313 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1314 we can commute the PLUS and SUBREG because pointers into the
1315 frame are well-behaved. */
1316 break;
ce717ce4 1317
bbf9b913
RH
1318 default:
1319 break;
6f086dfc
RS
1320 }
1321
bbf9b913 1322 return 0;
6f086dfc
RS
1323}
1324
bbf9b913
RH
1325/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1326 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1327
bbf9b913
RH
1328static int
1329safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1330{
bbf9b913 1331 const struct insn_operand_data *op_data;
6f086dfc 1332
bbf9b913
RH
1333 if (code < 0)
1334 return true;
6f086dfc 1335
bbf9b913
RH
1336 op_data = &insn_data[code].operand[operand];
1337 if (op_data->predicate == NULL)
1338 return true;
5a73491b 1339
bbf9b913
RH
1340 return op_data->predicate (x, op_data->mode);
1341}
5a73491b 1342
bbf9b913
RH
1343/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1344 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1345
1346static void
bbf9b913 1347instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1348{
bbf9b913
RH
1349 HOST_WIDE_INT offset;
1350 int insn_code, i;
9325973e 1351 bool any_change = false;
bbf9b913 1352 rtx set, new, x, seq;
32e66afd 1353
bbf9b913
RH
1354 /* There are some special cases to be handled first. */
1355 set = single_set (insn);
1356 if (set)
32e66afd 1357 {
bbf9b913
RH
1358 /* We're allowed to assign to a virtual register. This is interpreted
1359 to mean that the underlying register gets assigned the inverse
1360 transformation. This is used, for example, in the handling of
1361 non-local gotos. */
1362 new = instantiate_new_reg (SET_DEST (set), &offset);
1363 if (new)
1364 {
1365 start_sequence ();
32e66afd 1366
bbf9b913
RH
1367 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1368 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1369 GEN_INT (-offset));
1370 x = force_operand (x, new);
1371 if (x != new)
1372 emit_move_insn (new, x);
5a73491b 1373
bbf9b913
RH
1374 seq = get_insns ();
1375 end_sequence ();
5a73491b 1376
bbf9b913
RH
1377 emit_insn_before (seq, insn);
1378 delete_insn (insn);
1379 return;
1380 }
5a73491b 1381
bbf9b913
RH
1382 /* Handle a straight copy from a virtual register by generating a
1383 new add insn. The difference between this and falling through
1384 to the generic case is avoiding a new pseudo and eliminating a
1385 move insn in the initial rtl stream. */
1386 new = instantiate_new_reg (SET_SRC (set), &offset);
1387 if (new && offset != 0
1388 && REG_P (SET_DEST (set))
1389 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1390 {
1391 start_sequence ();
5a73491b 1392
bbf9b913
RH
1393 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1394 new, GEN_INT (offset), SET_DEST (set),
1395 1, OPTAB_LIB_WIDEN);
1396 if (x != SET_DEST (set))
1397 emit_move_insn (SET_DEST (set), x);
770ae6cc 1398
bbf9b913
RH
1399 seq = get_insns ();
1400 end_sequence ();
87ce34d6 1401
bbf9b913
RH
1402 emit_insn_before (seq, insn);
1403 delete_insn (insn);
87ce34d6 1404 return;
bbf9b913 1405 }
5a73491b 1406
bbf9b913 1407 extract_insn (insn);
9325973e 1408 insn_code = INSN_CODE (insn);
5a73491b 1409
bbf9b913
RH
1410 /* Handle a plus involving a virtual register by determining if the
1411 operands remain valid if they're modified in place. */
1412 if (GET_CODE (SET_SRC (set)) == PLUS
1413 && recog_data.n_operands >= 3
1414 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1415 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1416 && GET_CODE (recog_data.operand[2]) == CONST_INT
1417 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1418 {
1419 offset += INTVAL (recog_data.operand[2]);
5a73491b 1420
bbf9b913 1421 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1422 if (offset == 0
1423 && REG_P (SET_DEST (set))
1424 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1425 {
1426 start_sequence ();
1427 emit_move_insn (SET_DEST (set), new);
1428 seq = get_insns ();
1429 end_sequence ();
d1405722 1430
bbf9b913
RH
1431 emit_insn_before (seq, insn);
1432 delete_insn (insn);
1433 return;
1434 }
d1405722 1435
bbf9b913 1436 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1437
1438 /* Using validate_change and apply_change_group here leaves
1439 recog_data in an invalid state. Since we know exactly what
1440 we want to check, do those two by hand. */
1441 if (safe_insn_predicate (insn_code, 1, new)
1442 && safe_insn_predicate (insn_code, 2, x))
1443 {
1444 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1445 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1446 any_change = true;
9325973e
RH
1447
1448 /* Fall through into the regular operand fixup loop in
1449 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1450 }
1451 }
1452 }
d1405722 1453 else
9325973e
RH
1454 {
1455 extract_insn (insn);
1456 insn_code = INSN_CODE (insn);
1457 }
5dc96d60 1458
bbf9b913
RH
1459 /* In the general case, we expect virtual registers to appear only in
1460 operands, and then only as either bare registers or inside memories. */
1461 for (i = 0; i < recog_data.n_operands; ++i)
1462 {
1463 x = recog_data.operand[i];
1464 switch (GET_CODE (x))
1465 {
1466 case MEM:
1467 {
1468 rtx addr = XEXP (x, 0);
1469 bool changed = false;
1470
1471 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1472 if (!changed)
1473 continue;
1474
1475 start_sequence ();
1476 x = replace_equiv_address (x, addr);
1477 seq = get_insns ();
1478 end_sequence ();
1479 if (seq)
1480 emit_insn_before (seq, insn);
1481 }
1482 break;
1483
1484 case REG:
1485 new = instantiate_new_reg (x, &offset);
1486 if (new == NULL)
1487 continue;
1488 if (offset == 0)
1489 x = new;
1490 else
1491 {
1492 start_sequence ();
6f086dfc 1493
bbf9b913
RH
1494 /* Careful, special mode predicates may have stuff in
1495 insn_data[insn_code].operand[i].mode that isn't useful
1496 to us for computing a new value. */
1497 /* ??? Recognize address_operand and/or "p" constraints
1498 to see if (plus new offset) is a valid before we put
1499 this through expand_simple_binop. */
1500 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1501 GEN_INT (offset), NULL_RTX,
1502 1, OPTAB_LIB_WIDEN);
1503 seq = get_insns ();
1504 end_sequence ();
1505 emit_insn_before (seq, insn);
1506 }
1507 break;
6f086dfc 1508
bbf9b913
RH
1509 case SUBREG:
1510 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1511 if (new == NULL)
1512 continue;
1513 if (offset != 0)
1514 {
1515 start_sequence ();
1516 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1517 GEN_INT (offset), NULL_RTX,
1518 1, OPTAB_LIB_WIDEN);
1519 seq = get_insns ();
1520 end_sequence ();
1521 emit_insn_before (seq, insn);
1522 }
fbdd0b09
RH
1523 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1524 GET_MODE (new), SUBREG_BYTE (x));
bbf9b913 1525 break;
6f086dfc 1526
bbf9b913
RH
1527 default:
1528 continue;
1529 }
6f086dfc 1530
bbf9b913
RH
1531 /* At this point, X contains the new value for the operand.
1532 Validate the new value vs the insn predicate. Note that
1533 asm insns will have insn_code -1 here. */
1534 if (!safe_insn_predicate (insn_code, i, x))
1535 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6f086dfc 1536
bbf9b913
RH
1537 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1538 any_change = true;
1539 }
6f086dfc 1540
bbf9b913
RH
1541 if (any_change)
1542 {
1543 /* Propagate operand changes into the duplicates. */
1544 for (i = 0; i < recog_data.n_dups; ++i)
1545 *recog_data.dup_loc[i]
1546 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
5dc96d60 1547
bbf9b913
RH
1548 /* Force re-recognition of the instruction for validation. */
1549 INSN_CODE (insn) = -1;
1550 }
6f086dfc 1551
bbf9b913 1552 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1553 {
bbf9b913 1554 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1555 {
bbf9b913
RH
1556 error_for_asm (insn, "impossible constraint in %<asm%>");
1557 delete_insn (insn);
1558 }
1559 }
1560 else
1561 {
1562 if (recog_memoized (insn) < 0)
1563 fatal_insn_not_found (insn);
1564 }
1565}
14a774a9 1566
bbf9b913
RH
1567/* Subroutine of instantiate_decls. Given RTL representing a decl,
1568 do any instantiation required. */
14a774a9 1569
bbf9b913
RH
1570static void
1571instantiate_decl (rtx x)
1572{
1573 rtx addr;
6f086dfc 1574
bbf9b913
RH
1575 if (x == 0)
1576 return;
6f086dfc 1577
bbf9b913
RH
1578 /* If this is a CONCAT, recurse for the pieces. */
1579 if (GET_CODE (x) == CONCAT)
1580 {
1581 instantiate_decl (XEXP (x, 0));
1582 instantiate_decl (XEXP (x, 1));
1583 return;
1584 }
6f086dfc 1585
bbf9b913
RH
1586 /* If this is not a MEM, no need to do anything. Similarly if the
1587 address is a constant or a register that is not a virtual register. */
1588 if (!MEM_P (x))
1589 return;
6f086dfc 1590
bbf9b913
RH
1591 addr = XEXP (x, 0);
1592 if (CONSTANT_P (addr)
1593 || (REG_P (addr)
1594 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1595 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1596 return;
6f086dfc 1597
bbf9b913
RH
1598 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1599}
6f086dfc 1600
434eba35
JJ
1601/* Helper for instantiate_decls called via walk_tree: Process all decls
1602 in the given DECL_VALUE_EXPR. */
1603
1604static tree
1605instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1606{
1607 tree t = *tp;
1608 if (! EXPR_P (t))
1609 {
1610 *walk_subtrees = 0;
1611 if (DECL_P (t) && DECL_RTL_SET_P (t))
1612 instantiate_decl (DECL_RTL (t));
1613 }
1614 return NULL;
1615}
1616
bbf9b913
RH
1617/* Subroutine of instantiate_decls: Process all decls in the given
1618 BLOCK node and all its subblocks. */
6f086dfc 1619
bbf9b913
RH
1620static void
1621instantiate_decls_1 (tree let)
1622{
1623 tree t;
6f086dfc 1624
bbf9b913 1625 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
434eba35
JJ
1626 {
1627 if (DECL_RTL_SET_P (t))
1628 instantiate_decl (DECL_RTL (t));
1629 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1630 {
1631 tree v = DECL_VALUE_EXPR (t);
1632 walk_tree (&v, instantiate_expr, NULL, NULL);
1633 }
1634 }
6f086dfc 1635
bbf9b913
RH
1636 /* Process all subblocks. */
1637 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1638 instantiate_decls_1 (t);
1639}
6f086dfc 1640
bbf9b913
RH
1641/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1642 all virtual registers in their DECL_RTL's. */
6f086dfc 1643
bbf9b913
RH
1644static void
1645instantiate_decls (tree fndecl)
1646{
1647 tree decl;
6f086dfc 1648
bbf9b913
RH
1649 /* Process all parameters of the function. */
1650 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1651 {
1652 instantiate_decl (DECL_RTL (decl));
1653 instantiate_decl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1654 if (DECL_HAS_VALUE_EXPR_P (decl))
1655 {
1656 tree v = DECL_VALUE_EXPR (decl);
1657 walk_tree (&v, instantiate_expr, NULL, NULL);
1658 }
bbf9b913 1659 }
4fd796bb 1660
bbf9b913
RH
1661 /* Now process all variables defined in the function or its subblocks. */
1662 instantiate_decls_1 (DECL_INITIAL (fndecl));
1663}
6f086dfc 1664
bbf9b913
RH
1665/* Pass through the INSNS of function FNDECL and convert virtual register
1666 references to hard register references. */
6f086dfc 1667
c2924966 1668static unsigned int
bbf9b913
RH
1669instantiate_virtual_regs (void)
1670{
1671 rtx insn;
6f086dfc 1672
bbf9b913
RH
1673 /* Compute the offsets to use for this function. */
1674 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1675 var_offset = STARTING_FRAME_OFFSET;
1676 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1677 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1678#ifdef FRAME_POINTER_CFA_OFFSET
1679 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1680#else
bbf9b913 1681 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1682#endif
e9a25f70 1683
bbf9b913
RH
1684 /* Initialize recognition, indicating that volatile is OK. */
1685 init_recog ();
6f086dfc 1686
bbf9b913
RH
1687 /* Scan through all the insns, instantiating every virtual register still
1688 present. */
1689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1690 if (INSN_P (insn))
6f086dfc 1691 {
bbf9b913
RH
1692 /* These patterns in the instruction stream can never be recognized.
1693 Fortunately, they shouldn't contain virtual registers either. */
1694 if (GET_CODE (PATTERN (insn)) == USE
1695 || GET_CODE (PATTERN (insn)) == CLOBBER
1696 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1697 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1698 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1699 continue;
1700
1701 instantiate_virtual_regs_in_insn (insn);
1702
1703 if (INSN_DELETED_P (insn))
1704 continue;
1705
1706 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1707
1708 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1709 if (GET_CODE (insn) == CALL_INSN)
1710 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1711 instantiate_virtual_regs_in_rtx, NULL);
6f086dfc 1712 }
6f086dfc 1713
bbf9b913
RH
1714 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1715 instantiate_decls (current_function_decl);
1716
1717 /* Indicate that, from now on, assign_stack_local should use
1718 frame_pointer_rtx. */
1719 virtuals_instantiated = 1;
c2924966 1720 return 0;
6f086dfc 1721}
ef330312
PB
1722
1723struct tree_opt_pass pass_instantiate_virtual_regs =
1724{
defb77dc 1725 "vregs", /* name */
ef330312
PB
1726 NULL, /* gate */
1727 instantiate_virtual_regs, /* execute */
1728 NULL, /* sub */
1729 NULL, /* next */
1730 0, /* static_pass_number */
1731 0, /* tv_id */
1732 0, /* properties_required */
1733 0, /* properties_provided */
1734 0, /* properties_destroyed */
1735 0, /* todo_flags_start */
defb77dc 1736 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
1737 0 /* letter */
1738};
1739
6f086dfc 1740\f
d181c154
RS
1741/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1742 This means a type for which function calls must pass an address to the
1743 function or get an address back from the function.
1744 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1745
1746int
61f71b34 1747aggregate_value_p (tree exp, tree fntype)
6f086dfc 1748{
9d790a4f
RS
1749 int i, regno, nregs;
1750 rtx reg;
2f939d94
TP
1751
1752 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d181c154 1753
61f71b34
DD
1754 if (fntype)
1755 switch (TREE_CODE (fntype))
1756 {
1757 case CALL_EXPR:
1758 fntype = get_callee_fndecl (fntype);
1759 fntype = fntype ? TREE_TYPE (fntype) : 0;
1760 break;
1761 case FUNCTION_DECL:
1762 fntype = TREE_TYPE (fntype);
1763 break;
1764 case FUNCTION_TYPE:
1765 case METHOD_TYPE:
1766 break;
1767 case IDENTIFIER_NODE:
1768 fntype = 0;
1769 break;
1770 default:
1771 /* We don't expect other rtl types here. */
0bccc606 1772 gcc_unreachable ();
61f71b34
DD
1773 }
1774
d7bf8ada
MM
1775 if (TREE_CODE (type) == VOID_TYPE)
1776 return 0;
cc77ae10
JM
1777 /* If the front end has decided that this needs to be passed by
1778 reference, do so. */
1779 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1780 && DECL_BY_REFERENCE (exp))
1781 return 1;
61f71b34 1782 if (targetm.calls.return_in_memory (type, fntype))
6f086dfc 1783 return 1;
956d6950 1784 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1785 and thus can't be returned in registers. */
1786 if (TREE_ADDRESSABLE (type))
1787 return 1;
05e3bdb9 1788 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1789 return 1;
9d790a4f
RS
1790 /* Make sure we have suitable call-clobbered regs to return
1791 the value in; if not, we must return it in memory. */
1d636cc6 1792 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
1793
1794 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1795 it is OK. */
f8cfc6aa 1796 if (!REG_P (reg))
e71f7aa5
JW
1797 return 0;
1798
9d790a4f 1799 regno = REGNO (reg);
66fd46b6 1800 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
1801 for (i = 0; i < nregs; i++)
1802 if (! call_used_regs[regno + i])
1803 return 1;
6f086dfc
RS
1804 return 0;
1805}
1806\f
8fff4fc1
RH
1807/* Return true if we should assign DECL a pseudo register; false if it
1808 should live on the local stack. */
1809
1810bool
1811use_register_for_decl (tree decl)
1812{
1813 /* Honor volatile. */
1814 if (TREE_SIDE_EFFECTS (decl))
1815 return false;
1816
1817 /* Honor addressability. */
1818 if (TREE_ADDRESSABLE (decl))
1819 return false;
1820
1821 /* Only register-like things go in registers. */
1822 if (DECL_MODE (decl) == BLKmode)
1823 return false;
1824
1825 /* If -ffloat-store specified, don't put explicit float variables
1826 into registers. */
1827 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1828 propagates values across these stores, and it probably shouldn't. */
1829 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1830 return false;
1831
78e0d62b
RH
1832 /* If we're not interested in tracking debugging information for
1833 this decl, then we can certainly put it in a register. */
1834 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
1835 return true;
1836
8fff4fc1
RH
1837 return (optimize || DECL_REGISTER (decl));
1838}
1839
0976078c
RH
1840/* Return true if TYPE should be passed by invisible reference. */
1841
1842bool
8cd5a4e0
RH
1843pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1844 tree type, bool named_arg)
0976078c
RH
1845{
1846 if (type)
1847 {
1848 /* If this type contains non-trivial constructors, then it is
1849 forbidden for the middle-end to create any new copies. */
1850 if (TREE_ADDRESSABLE (type))
1851 return true;
1852
d58247a3
RH
1853 /* GCC post 3.4 passes *all* variable sized types by reference. */
1854 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c
RH
1855 return true;
1856 }
1857
8cd5a4e0 1858 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
1859}
1860
6cdd5672
RH
1861/* Return true if TYPE, which is passed by reference, should be callee
1862 copied instead of caller copied. */
1863
1864bool
1865reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1866 tree type, bool named_arg)
1867{
1868 if (type && TREE_ADDRESSABLE (type))
1869 return false;
1870 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1871}
1872
6071dc7f
RH
1873/* Structures to communicate between the subroutines of assign_parms.
1874 The first holds data persistent across all parameters, the second
1875 is cleared out for each parameter. */
6f086dfc 1876
6071dc7f 1877struct assign_parm_data_all
6f086dfc 1878{
6f086dfc 1879 CUMULATIVE_ARGS args_so_far;
6f086dfc 1880 struct args_size stack_args_size;
6071dc7f
RH
1881 tree function_result_decl;
1882 tree orig_fnargs;
1883 rtx conversion_insns;
1884 HOST_WIDE_INT pretend_args_size;
1885 HOST_WIDE_INT extra_pretend_bytes;
1886 int reg_parm_stack_space;
1887};
6f086dfc 1888
6071dc7f
RH
1889struct assign_parm_data_one
1890{
1891 tree nominal_type;
1892 tree passed_type;
1893 rtx entry_parm;
1894 rtx stack_parm;
1895 enum machine_mode nominal_mode;
1896 enum machine_mode passed_mode;
1897 enum machine_mode promoted_mode;
1898 struct locate_and_pad_arg_data locate;
1899 int partial;
1900 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
1901 BOOL_BITFIELD passed_pointer : 1;
1902 BOOL_BITFIELD on_stack : 1;
1903 BOOL_BITFIELD loaded_in_reg : 1;
1904};
ebb904cb 1905
6071dc7f 1906/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 1907
6071dc7f
RH
1908static void
1909assign_parms_initialize_all (struct assign_parm_data_all *all)
1910{
1911 tree fntype;
6f086dfc 1912
6071dc7f
RH
1913 memset (all, 0, sizeof (*all));
1914
1915 fntype = TREE_TYPE (current_function_decl);
1916
1917#ifdef INIT_CUMULATIVE_INCOMING_ARGS
1918 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1919#else
1920 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1921 current_function_decl, -1);
1922#endif
1923
1924#ifdef REG_PARM_STACK_SPACE
1925 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1926#endif
1927}
6f086dfc 1928
6071dc7f
RH
1929/* If ARGS contains entries with complex types, split the entry into two
1930 entries of the component type. Return a new list of substitutions are
1931 needed, else the old list. */
1932
1933static tree
1934split_complex_args (tree args)
1935{
1936 tree p;
1937
1938 /* Before allocating memory, check for the common case of no complex. */
1939 for (p = args; p; p = TREE_CHAIN (p))
1940 {
1941 tree type = TREE_TYPE (p);
1942 if (TREE_CODE (type) == COMPLEX_TYPE
1943 && targetm.calls.split_complex_arg (type))
1944 goto found;
1945 }
1946 return args;
1947
1948 found:
1949 args = copy_list (args);
1950
1951 for (p = args; p; p = TREE_CHAIN (p))
1952 {
1953 tree type = TREE_TYPE (p);
1954 if (TREE_CODE (type) == COMPLEX_TYPE
1955 && targetm.calls.split_complex_arg (type))
1956 {
1957 tree decl;
1958 tree subtype = TREE_TYPE (type);
6ccd356e 1959 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
1960
1961 /* Rewrite the PARM_DECL's type with its component. */
1962 TREE_TYPE (p) = subtype;
1963 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1964 DECL_MODE (p) = VOIDmode;
1965 DECL_SIZE (p) = NULL;
1966 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
1967 /* If this arg must go in memory, put it in a pseudo here.
1968 We can't allow it to go in memory as per normal parms,
1969 because the usual place might not have the imag part
1970 adjacent to the real part. */
1971 DECL_ARTIFICIAL (p) = addressable;
1972 DECL_IGNORED_P (p) = addressable;
1973 TREE_ADDRESSABLE (p) = 0;
6071dc7f
RH
1974 layout_decl (p, 0);
1975
1976 /* Build a second synthetic decl. */
1977 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1978 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
1979 DECL_ARTIFICIAL (decl) = addressable;
1980 DECL_IGNORED_P (decl) = addressable;
6071dc7f
RH
1981 layout_decl (decl, 0);
1982
1983 /* Splice it in; skip the new decl. */
1984 TREE_CHAIN (decl) = TREE_CHAIN (p);
1985 TREE_CHAIN (p) = decl;
1986 p = decl;
1987 }
1988 }
1989
1990 return args;
1991}
1992
1993/* A subroutine of assign_parms. Adjust the parameter list to incorporate
1994 the hidden struct return argument, and (abi willing) complex args.
1995 Return the new parameter list. */
1996
1997static tree
1998assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1999{
2000 tree fndecl = current_function_decl;
2001 tree fntype = TREE_TYPE (fndecl);
2002 tree fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2003
2004 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2005 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
6f086dfc 2006 && ! current_function_returns_pcc_struct
61f71b34 2007 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2008 {
f9f29478 2009 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2010 tree decl;
6f086dfc 2011
6071dc7f
RH
2012 decl = build_decl (PARM_DECL, NULL_TREE, type);
2013 DECL_ARG_TYPE (decl) = type;
2014 DECL_ARTIFICIAL (decl) = 1;
78e0d62b 2015 DECL_IGNORED_P (decl) = 1;
6f086dfc 2016
6071dc7f
RH
2017 TREE_CHAIN (decl) = fnargs;
2018 fnargs = decl;
2019 all->function_result_decl = decl;
6f086dfc 2020 }
718fe406 2021
6071dc7f 2022 all->orig_fnargs = fnargs;
ded9bf77 2023
42ba5130
RH
2024 /* If the target wants to split complex arguments into scalars, do so. */
2025 if (targetm.calls.split_complex_arg)
ded9bf77
AH
2026 fnargs = split_complex_args (fnargs);
2027
6071dc7f
RH
2028 return fnargs;
2029}
e7949876 2030
6071dc7f
RH
2031/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2032 data for the parameter. Incorporate ABI specifics such as pass-by-
2033 reference and type promotion. */
6f086dfc 2034
6071dc7f
RH
2035static void
2036assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2037 struct assign_parm_data_one *data)
2038{
2039 tree nominal_type, passed_type;
2040 enum machine_mode nominal_mode, passed_mode, promoted_mode;
6f086dfc 2041
6071dc7f
RH
2042 memset (data, 0, sizeof (*data));
2043
8117c488
NS
2044 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2045 if (!current_function_stdarg)
2046 data->named_arg = 1; /* No varadic parms. */
2047 else if (TREE_CHAIN (parm))
2048 data->named_arg = 1; /* Not the last non-varadic parm. */
2049 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2050 data->named_arg = 1; /* Only varadic ones are unnamed. */
6071dc7f 2051 else
8117c488 2052 data->named_arg = 0; /* Treat as varadic. */
6071dc7f
RH
2053
2054 nominal_type = TREE_TYPE (parm);
2055 passed_type = DECL_ARG_TYPE (parm);
2056
2057 /* Look out for errors propagating this far. Also, if the parameter's
2058 type is void then its value doesn't matter. */
2059 if (TREE_TYPE (parm) == error_mark_node
2060 /* This can happen after weird syntax errors
2061 or if an enum type is defined among the parms. */
2062 || TREE_CODE (parm) != PARM_DECL
2063 || passed_type == NULL
2064 || VOID_TYPE_P (nominal_type))
2065 {
2066 nominal_type = passed_type = void_type_node;
2067 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2068 goto egress;
2069 }
108b7d3d 2070
6071dc7f
RH
2071 /* Find mode of arg as it is passed, and mode of arg as it should be
2072 during execution of this function. */
2073 passed_mode = TYPE_MODE (passed_type);
2074 nominal_mode = TYPE_MODE (nominal_type);
2075
2076 /* If the parm is to be passed as a transparent union, use the type of
2077 the first field for the tests below. We have already verified that
2078 the modes are the same. */
52dd234b
RH
2079 if (TREE_CODE (passed_type) == UNION_TYPE
2080 && TYPE_TRANSPARENT_UNION (passed_type))
6071dc7f
RH
2081 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2082
0976078c
RH
2083 /* See if this arg was passed by invisible reference. */
2084 if (pass_by_reference (&all->args_so_far, passed_mode,
2085 passed_type, data->named_arg))
6071dc7f
RH
2086 {
2087 passed_type = nominal_type = build_pointer_type (passed_type);
2088 data->passed_pointer = true;
2089 passed_mode = nominal_mode = Pmode;
2090 }
6f086dfc 2091
6071dc7f
RH
2092 /* Find mode as it is passed by the ABI. */
2093 promoted_mode = passed_mode;
2094 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2095 {
2096 int unsignedp = TYPE_UNSIGNED (passed_type);
2097 promoted_mode = promote_mode (passed_type, promoted_mode,
2098 &unsignedp, 1);
2099 }
6f086dfc 2100
6071dc7f
RH
2101 egress:
2102 data->nominal_type = nominal_type;
2103 data->passed_type = passed_type;
2104 data->nominal_mode = nominal_mode;
2105 data->passed_mode = passed_mode;
2106 data->promoted_mode = promoted_mode;
2107}
16bae307 2108
6071dc7f 2109/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2110
6071dc7f
RH
2111static void
2112assign_parms_setup_varargs (struct assign_parm_data_all *all,
2113 struct assign_parm_data_one *data, bool no_rtl)
2114{
2115 int varargs_pretend_bytes = 0;
2116
2117 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2118 data->promoted_mode,
2119 data->passed_type,
2120 &varargs_pretend_bytes, no_rtl);
2121
2122 /* If the back-end has requested extra stack space, record how much is
2123 needed. Do not change pretend_args_size otherwise since it may be
2124 nonzero from an earlier partial argument. */
2125 if (varargs_pretend_bytes > 0)
2126 all->pretend_args_size = varargs_pretend_bytes;
2127}
a53e14c0 2128
6071dc7f
RH
2129/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2130 the incoming location of the current parameter. */
2131
2132static void
2133assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2134 struct assign_parm_data_one *data)
2135{
2136 HOST_WIDE_INT pretend_bytes = 0;
2137 rtx entry_parm;
2138 bool in_regs;
2139
2140 if (data->promoted_mode == VOIDmode)
2141 {
2142 data->entry_parm = data->stack_parm = const0_rtx;
2143 return;
2144 }
a53e14c0 2145
6f086dfc 2146#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2147 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2148 data->passed_type, data->named_arg);
6f086dfc 2149#else
6071dc7f
RH
2150 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2151 data->passed_type, data->named_arg);
6f086dfc
RS
2152#endif
2153
6071dc7f
RH
2154 if (entry_parm == 0)
2155 data->promoted_mode = data->passed_mode;
6f086dfc 2156
6071dc7f
RH
2157 /* Determine parm's home in the stack, in case it arrives in the stack
2158 or we should pretend it did. Compute the stack position and rtx where
2159 the argument arrives and its size.
6f086dfc 2160
6071dc7f
RH
2161 There is one complexity here: If this was a parameter that would
2162 have been passed in registers, but wasn't only because it is
2163 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2164 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2165 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2166 as it was the previous time. */
2167 in_regs = entry_parm != 0;
6f086dfc 2168#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2169 in_regs = true;
e7949876 2170#endif
6071dc7f
RH
2171 if (!in_regs && !data->named_arg)
2172 {
2173 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2174 {
6071dc7f 2175 rtx tem;
6f086dfc 2176#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2177 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2178 data->passed_type, true);
6f086dfc 2179#else
6071dc7f
RH
2180 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2181 data->passed_type, true);
6f086dfc 2182#endif
6071dc7f 2183 in_regs = tem != NULL;
e7949876 2184 }
6071dc7f 2185 }
e7949876 2186
6071dc7f
RH
2187 /* If this parameter was passed both in registers and in the stack, use
2188 the copy on the stack. */
fe984136
RH
2189 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2190 data->passed_type))
6071dc7f 2191 entry_parm = 0;
e7949876 2192
6071dc7f
RH
2193 if (entry_parm)
2194 {
2195 int partial;
2196
78a52f11
RH
2197 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2198 data->promoted_mode,
2199 data->passed_type,
2200 data->named_arg);
6071dc7f
RH
2201 data->partial = partial;
2202
2203 /* The caller might already have allocated stack space for the
2204 register parameters. */
2205 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2206 {
6071dc7f
RH
2207 /* Part of this argument is passed in registers and part
2208 is passed on the stack. Ask the prologue code to extend
2209 the stack part so that we can recreate the full value.
2210
2211 PRETEND_BYTES is the size of the registers we need to store.
2212 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2213 stack space that the prologue should allocate.
2214
2215 Internally, gcc assumes that the argument pointer is aligned
2216 to STACK_BOUNDARY bits. This is used both for alignment
2217 optimizations (see init_emit) and to locate arguments that are
2218 aligned to more than PARM_BOUNDARY bits. We must preserve this
2219 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2220 a stack boundary. */
2221
2222 /* We assume at most one partial arg, and it must be the first
2223 argument on the stack. */
0bccc606 2224 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2225
78a52f11 2226 pretend_bytes = partial;
6071dc7f
RH
2227 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2228
2229 /* We want to align relative to the actual stack pointer, so
2230 don't include this in the stack size until later. */
2231 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2232 }
6071dc7f 2233 }
e7949876 2234
6071dc7f
RH
2235 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2236 entry_parm ? data->partial : 0, current_function_decl,
2237 &all->stack_args_size, &data->locate);
6f086dfc 2238
6071dc7f
RH
2239 /* Adjust offsets to include the pretend args. */
2240 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2241 data->locate.slot_offset.constant += pretend_bytes;
2242 data->locate.offset.constant += pretend_bytes;
ebca59c3 2243
6071dc7f
RH
2244 data->entry_parm = entry_parm;
2245}
6f086dfc 2246
6071dc7f
RH
2247/* A subroutine of assign_parms. If there is actually space on the stack
2248 for this parm, count it in stack_args_size and return true. */
6f086dfc 2249
6071dc7f
RH
2250static bool
2251assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2252 struct assign_parm_data_one *data)
2253{
2e6ae27f 2254 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2255 if (data->entry_parm == NULL)
2256 ;
2257 /* Also true if we're partially in registers and partially not,
2258 since we've arranged to drop the entire argument on the stack. */
2259 else if (data->partial != 0)
2260 ;
2261 /* Also true if the target says that it's passed in both registers
2262 and on the stack. */
2263 else if (GET_CODE (data->entry_parm) == PARALLEL
2264 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2265 ;
2266 /* Also true if the target says that there's stack allocated for
2267 all register parameters. */
2268 else if (all->reg_parm_stack_space > 0)
2269 ;
2270 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2271 else
2272 return false;
6f086dfc 2273
6071dc7f
RH
2274 all->stack_args_size.constant += data->locate.size.constant;
2275 if (data->locate.size.var)
2276 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2277
6071dc7f
RH
2278 return true;
2279}
0d1416c6 2280
6071dc7f
RH
2281/* A subroutine of assign_parms. Given that this parameter is allocated
2282 stack space by the ABI, find it. */
6f086dfc 2283
6071dc7f
RH
2284static void
2285assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2286{
2287 rtx offset_rtx, stack_parm;
2288 unsigned int align, boundary;
6f086dfc 2289
6071dc7f
RH
2290 /* If we're passing this arg using a reg, make its stack home the
2291 aligned stack slot. */
2292 if (data->entry_parm)
2293 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2294 else
2295 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2296
2297 stack_parm = current_function_internal_arg_pointer;
2298 if (offset_rtx != const0_rtx)
2299 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2300 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2301
2302 set_mem_attributes (stack_parm, parm, 1);
2303
bfc45551
AM
2304 boundary = data->locate.boundary;
2305 align = BITS_PER_UNIT;
6071dc7f
RH
2306
2307 /* If we're padding upward, we know that the alignment of the slot
2308 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2309 intentionally forcing upward padding. Otherwise we have to come
2310 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2311 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f
RH
2312 align = boundary;
2313 else if (GET_CODE (offset_rtx) == CONST_INT)
2314 {
2315 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2316 align = align & -align;
2317 }
bfc45551 2318 set_mem_align (stack_parm, align);
6071dc7f
RH
2319
2320 if (data->entry_parm)
2321 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2322
2323 data->stack_parm = stack_parm;
2324}
2325
2326/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2327 always valid and contiguous. */
2328
2329static void
2330assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2331{
2332 rtx entry_parm = data->entry_parm;
2333 rtx stack_parm = data->stack_parm;
2334
2335 /* If this parm was passed part in regs and part in memory, pretend it
2336 arrived entirely in memory by pushing the register-part onto the stack.
2337 In the special case of a DImode or DFmode that is split, we could put
2338 it together in a pseudoreg directly, but for now that's not worth
2339 bothering with. */
2340 if (data->partial != 0)
2341 {
2342 /* Handle calls that pass values in multiple non-contiguous
2343 locations. The Irix 6 ABI has examples of this. */
2344 if (GET_CODE (entry_parm) == PARALLEL)
2345 emit_group_store (validize_mem (stack_parm), entry_parm,
2346 data->passed_type,
2347 int_size_in_bytes (data->passed_type));
6f086dfc 2348 else
78a52f11
RH
2349 {
2350 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2351 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2352 data->partial / UNITS_PER_WORD);
2353 }
6f086dfc 2354
6071dc7f
RH
2355 entry_parm = stack_parm;
2356 }
6f086dfc 2357
6071dc7f
RH
2358 /* If we didn't decide this parm came in a register, by default it came
2359 on the stack. */
2360 else if (entry_parm == NULL)
2361 entry_parm = stack_parm;
2362
2363 /* When an argument is passed in multiple locations, we can't make use
2364 of this information, but we can save some copying if the whole argument
2365 is passed in a single register. */
2366 else if (GET_CODE (entry_parm) == PARALLEL
2367 && data->nominal_mode != BLKmode
2368 && data->passed_mode != BLKmode)
2369 {
2370 size_t i, len = XVECLEN (entry_parm, 0);
2371
2372 for (i = 0; i < len; i++)
2373 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2374 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2375 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2376 == data->passed_mode)
2377 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2378 {
2379 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2380 break;
2381 }
2382 }
e68a6ce1 2383
6071dc7f
RH
2384 data->entry_parm = entry_parm;
2385}
6f086dfc 2386
6071dc7f
RH
2387/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2388 always valid and properly aligned. */
6f086dfc 2389
6071dc7f
RH
2390static void
2391assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2392{
2393 rtx stack_parm = data->stack_parm;
2394
2395 /* If we can't trust the parm stack slot to be aligned enough for its
2396 ultimate type, don't use that slot after entry. We'll make another
2397 stack slot, if we need one. */
bfc45551
AM
2398 if (stack_parm
2399 && ((STRICT_ALIGNMENT
2400 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2401 || (data->nominal_type
2402 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2403 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2404 stack_parm = NULL;
2405
2406 /* If parm was passed in memory, and we need to convert it on entry,
2407 don't store it back in that same slot. */
2408 else if (data->entry_parm == stack_parm
2409 && data->nominal_mode != BLKmode
2410 && data->nominal_mode != data->passed_mode)
2411 stack_parm = NULL;
2412
7d69de61
RH
2413 /* If stack protection is in effect for this function, don't leave any
2414 pointers in their passed stack slots. */
2415 else if (cfun->stack_protect_guard
2416 && (flag_stack_protect == 2
2417 || data->passed_pointer
2418 || POINTER_TYPE_P (data->nominal_type)))
2419 stack_parm = NULL;
2420
6071dc7f
RH
2421 data->stack_parm = stack_parm;
2422}
a0506b54 2423
6071dc7f
RH
2424/* A subroutine of assign_parms. Return true if the current parameter
2425 should be stored as a BLKmode in the current frame. */
2426
2427static bool
2428assign_parm_setup_block_p (struct assign_parm_data_one *data)
2429{
2430 if (data->nominal_mode == BLKmode)
2431 return true;
2432 if (GET_CODE (data->entry_parm) == PARALLEL)
2433 return true;
531547e9 2434
6e985040 2435#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2436 /* Only assign_parm_setup_block knows how to deal with register arguments
2437 that are padded at the least significant end. */
2438 if (REG_P (data->entry_parm)
2439 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2440 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2441 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2442 return true;
6e985040 2443#endif
6071dc7f
RH
2444
2445 return false;
2446}
2447
2448/* A subroutine of assign_parms. Arrange for the parameter to be
2449 present and valid in DATA->STACK_RTL. */
2450
2451static void
27e29549
RH
2452assign_parm_setup_block (struct assign_parm_data_all *all,
2453 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2454{
2455 rtx entry_parm = data->entry_parm;
2456 rtx stack_parm = data->stack_parm;
bfc45551
AM
2457 HOST_WIDE_INT size;
2458 HOST_WIDE_INT size_stored;
17284759 2459 rtx orig_entry_parm = entry_parm;
6071dc7f 2460
27e29549
RH
2461 if (GET_CODE (entry_parm) == PARALLEL)
2462 entry_parm = emit_group_move_into_temps (entry_parm);
2463
6071dc7f
RH
2464 /* If we've a non-block object that's nevertheless passed in parts,
2465 reconstitute it in register operations rather than on the stack. */
2466 if (GET_CODE (entry_parm) == PARALLEL
640019aa 2467 && data->nominal_mode != BLKmode)
6071dc7f 2468 {
17284759 2469 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
6071dc7f 2470
640019aa
AH
2471 if ((XVECLEN (entry_parm, 0) > 1
2472 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2473 && use_register_for_decl (parm))
2474 {
2475 rtx parmreg = gen_reg_rtx (data->nominal_mode);
27e29549 2476
640019aa 2477 push_to_sequence (all->conversion_insns);
4af46a32 2478
640019aa
AH
2479 /* For values returned in multiple registers, handle possible
2480 incompatible calls to emit_group_store.
4af46a32 2481
640019aa
AH
2482 For example, the following would be invalid, and would have to
2483 be fixed by the conditional below:
4af46a32 2484
640019aa
AH
2485 emit_group_store ((reg:SF), (parallel:DF))
2486 emit_group_store ((reg:SI), (parallel:DI))
2487
2488 An example of this are doubles in e500 v2:
2489 (parallel:DF (expr_list (reg:SI) (const_int 0))
2490 (expr_list (reg:SI) (const_int 4))). */
2491 if (data->nominal_mode != data->passed_mode)
2492 {
2493 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2494 emit_group_store (t, entry_parm, NULL_TREE,
2495 GET_MODE_SIZE (GET_MODE (entry_parm)));
2496 convert_move (parmreg, t, 0);
2497 }
2498 else
2499 emit_group_store (parmreg, entry_parm, data->nominal_type,
2500 int_size_in_bytes (data->nominal_type));
27e29549 2501
640019aa
AH
2502 all->conversion_insns = get_insns ();
2503 end_sequence ();
27e29549 2504
640019aa
AH
2505 SET_DECL_RTL (parm, parmreg);
2506 return;
2507 }
6071dc7f
RH
2508 }
2509
bfc45551
AM
2510 size = int_size_in_bytes (data->passed_type);
2511 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2512 if (stack_parm == 0)
2513 {
a561d88b 2514 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2515 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2516 DECL_ALIGN (parm));
bfc45551
AM
2517 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2518 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2519 set_mem_attributes (stack_parm, parm, 1);
2520 }
2521
6071dc7f
RH
2522 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2523 calls that pass values in multiple non-contiguous locations. */
2524 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2525 {
6071dc7f
RH
2526 rtx mem;
2527
2528 /* Note that we will be storing an integral number of words.
2529 So we have to be careful to ensure that we allocate an
bfc45551 2530 integral number of words. We do this above when we call
6071dc7f
RH
2531 assign_stack_local if space was not allocated in the argument
2532 list. If it was, this will not work if PARM_BOUNDARY is not
2533 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2534 if it becomes a problem. Exception is when BLKmode arrives
2535 with arguments not conforming to word_mode. */
2536
bfc45551
AM
2537 if (data->stack_parm == 0)
2538 ;
6071dc7f
RH
2539 else if (GET_CODE (entry_parm) == PARALLEL)
2540 ;
0bccc606
NS
2541 else
2542 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2543
6071dc7f 2544 mem = validize_mem (stack_parm);
c6b97fac 2545
6071dc7f
RH
2546 /* Handle values in multiple non-contiguous locations. */
2547 if (GET_CODE (entry_parm) == PARALLEL)
27e29549
RH
2548 {
2549 push_to_sequence (all->conversion_insns);
2550 emit_group_store (mem, entry_parm, data->passed_type, size);
2551 all->conversion_insns = get_insns ();
2552 end_sequence ();
2553 }
c6b97fac 2554
6071dc7f
RH
2555 else if (size == 0)
2556 ;
5c07bd7a 2557
6071dc7f
RH
2558 /* If SIZE is that of a mode no bigger than a word, just use
2559 that mode's store operation. */
2560 else if (size <= UNITS_PER_WORD)
2561 {
2562 enum machine_mode mode
2563 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2564
6071dc7f 2565 if (mode != BLKmode
6e985040 2566#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2567 && (size == UNITS_PER_WORD
2568 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2569 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2570#endif
6071dc7f
RH
2571 )
2572 {
2573 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2574 emit_move_insn (change_address (mem, mode, 0), reg);
2575 }
c6b97fac 2576
6071dc7f
RH
2577 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2578 machine must be aligned to the left before storing
2579 to memory. Note that the previous test doesn't
2580 handle all cases (e.g. SIZE == 3). */
2581 else if (size != UNITS_PER_WORD
6e985040 2582#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2583 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2584 == downward)
6e985040 2585#else
6071dc7f 2586 && BYTES_BIG_ENDIAN
6e985040 2587#endif
6071dc7f
RH
2588 )
2589 {
2590 rtx tem, x;
2591 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2592 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2593
09b52670 2594 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2595 build_int_cst (NULL_TREE, by),
4a90aeeb 2596 NULL_RTX, 1);
6071dc7f
RH
2597 tem = change_address (mem, word_mode, 0);
2598 emit_move_insn (tem, x);
6f086dfc 2599 }
6071dc7f 2600 else
27e29549 2601 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2602 size_stored / UNITS_PER_WORD);
6f086dfc 2603 }
6071dc7f 2604 else
27e29549 2605 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2606 size_stored / UNITS_PER_WORD);
2607 }
bfc45551
AM
2608 else if (data->stack_parm == 0)
2609 {
2610 push_to_sequence (all->conversion_insns);
2611 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2612 BLOCK_OP_NORMAL);
2613 all->conversion_insns = get_insns ();
2614 end_sequence ();
2615 }
6071dc7f 2616
bfc45551 2617 data->stack_parm = stack_parm;
6071dc7f
RH
2618 SET_DECL_RTL (parm, stack_parm);
2619}
2620
2621/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2622 parameter. Get it there. Perform all ABI specified conversions. */
2623
2624static void
2625assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2626 struct assign_parm_data_one *data)
2627{
2628 rtx parmreg;
2629 enum machine_mode promoted_nominal_mode;
2630 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2631 bool did_conversion = false;
2632
2633 /* Store the parm in a pseudoregister during the function, but we may
2634 need to do it in a wider mode. */
2635
3f9e6aed
PB
2636 /* This is not really promoting for a call. However we need to be
2637 consistent with assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2638 promoted_nominal_mode
3f9e6aed 2639 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
6071dc7f
RH
2640
2641 parmreg = gen_reg_rtx (promoted_nominal_mode);
2642
2643 if (!DECL_ARTIFICIAL (parm))
2644 mark_user_reg (parmreg);
2645
2646 /* If this was an item that we received a pointer to,
2647 set DECL_RTL appropriately. */
2648 if (data->passed_pointer)
2649 {
2650 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2651 set_mem_attributes (x, parm, 1);
2652 SET_DECL_RTL (parm, x);
2653 }
2654 else
389fdba0 2655 SET_DECL_RTL (parm, parmreg);
6071dc7f
RH
2656
2657 /* Copy the value into the register. */
2658 if (data->nominal_mode != data->passed_mode
2659 || promoted_nominal_mode != data->promoted_mode)
2660 {
2661 int save_tree_used;
2662
2663 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2664 mode, by the caller. We now have to convert it to
2665 NOMINAL_MODE, if different. However, PARMREG may be in
2666 a different mode than NOMINAL_MODE if it is being stored
2667 promoted.
2668
2669 If ENTRY_PARM is a hard register, it might be in a register
2670 not valid for operating in its mode (e.g., an odd-numbered
2671 register for a DFmode). In that case, moves are the only
2672 thing valid, so we can't do a convert from there. This
2673 occurs when the calling sequence allow such misaligned
2674 usages.
2675
2676 In addition, the conversion may involve a call, which could
2677 clobber parameters which haven't been copied to pseudo
2678 registers yet. Therefore, we must first copy the parm to
2679 a pseudo reg here, and save the conversion until after all
2680 parameters have been moved. */
2681
2682 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2683
2684 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2685
2686 push_to_sequence (all->conversion_insns);
2687 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2688
2689 if (GET_CODE (tempreg) == SUBREG
2690 && GET_MODE (tempreg) == data->nominal_mode
2691 && REG_P (SUBREG_REG (tempreg))
2692 && data->nominal_mode == data->passed_mode
2693 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2694 && GET_MODE_SIZE (GET_MODE (tempreg))
2695 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 2696 {
6071dc7f
RH
2697 /* The argument is already sign/zero extended, so note it
2698 into the subreg. */
2699 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2700 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2701 }
00d8a4c1 2702
6071dc7f
RH
2703 /* TREE_USED gets set erroneously during expand_assignment. */
2704 save_tree_used = TREE_USED (parm);
e836a5a2 2705 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
6071dc7f
RH
2706 TREE_USED (parm) = save_tree_used;
2707 all->conversion_insns = get_insns ();
2708 end_sequence ();
00d8a4c1 2709
6071dc7f
RH
2710 did_conversion = true;
2711 }
2712 else
2713 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2714
2715 /* If we were passed a pointer but the actual value can safely live
2716 in a register, put it in one. */
2717 if (data->passed_pointer
2718 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2719 /* If by-reference argument was promoted, demote it. */
2720 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2721 || use_register_for_decl (parm)))
2722 {
2723 /* We can't use nominal_mode, because it will have been set to
2724 Pmode above. We must use the actual mode of the parm. */
2725 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2726 mark_user_reg (parmreg);
cd5b3469 2727
6071dc7f
RH
2728 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2729 {
2730 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2731 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2732
2733 push_to_sequence (all->conversion_insns);
2734 emit_move_insn (tempreg, DECL_RTL (parm));
2735 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2736 emit_move_insn (parmreg, tempreg);
27e29549 2737 all->conversion_insns = get_insns ();
6071dc7f 2738 end_sequence ();
6f086dfc 2739
6071dc7f
RH
2740 did_conversion = true;
2741 }
2742 else
2743 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 2744
6071dc7f 2745 SET_DECL_RTL (parm, parmreg);
797a6ac1 2746
6071dc7f
RH
2747 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2748 now the parm. */
2749 data->stack_parm = NULL;
2750 }
ddef6bc7 2751
6071dc7f
RH
2752 /* Mark the register as eliminable if we did no conversion and it was
2753 copied from memory at a fixed offset, and the arg pointer was not
2754 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2755 offset formed an invalid address, such memory-equivalences as we
2756 make here would screw up life analysis for it. */
2757 if (data->nominal_mode == data->passed_mode
2758 && !did_conversion
2759 && data->stack_parm != 0
2760 && MEM_P (data->stack_parm)
2761 && data->locate.offset.var == 0
2762 && reg_mentioned_p (virtual_incoming_args_rtx,
2763 XEXP (data->stack_parm, 0)))
2764 {
2765 rtx linsn = get_last_insn ();
2766 rtx sinsn, set;
a03caf76 2767
6071dc7f
RH
2768 /* Mark complex types separately. */
2769 if (GET_CODE (parmreg) == CONCAT)
2770 {
2771 enum machine_mode submode
2772 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
2773 int regnor = REGNO (XEXP (parmreg, 0));
2774 int regnoi = REGNO (XEXP (parmreg, 1));
2775 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2776 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2777 GET_MODE_SIZE (submode));
6071dc7f
RH
2778
2779 /* Scan backwards for the set of the real and
2780 imaginary parts. */
2781 for (sinsn = linsn; sinsn != 0;
2782 sinsn = prev_nonnote_insn (sinsn))
2783 {
2784 set = single_set (sinsn);
2785 if (set == 0)
2786 continue;
2787
2788 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2789 REG_NOTES (sinsn)
2790 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2791 REG_NOTES (sinsn));
2792 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2793 REG_NOTES (sinsn)
2794 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2795 REG_NOTES (sinsn));
a03caf76 2796 }
6071dc7f
RH
2797 }
2798 else if ((set = single_set (linsn)) != 0
2799 && SET_DEST (set) == parmreg)
2800 REG_NOTES (linsn)
2801 = gen_rtx_EXPR_LIST (REG_EQUIV,
2802 data->stack_parm, REG_NOTES (linsn));
2803 }
2804
2805 /* For pointer data type, suggest pointer register. */
2806 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2807 mark_reg_pointer (parmreg,
2808 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2809}
2810
2811/* A subroutine of assign_parms. Allocate stack space to hold the current
2812 parameter. Get it there. Perform all ABI specified conversions. */
2813
2814static void
2815assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2816 struct assign_parm_data_one *data)
2817{
2818 /* Value must be stored in the stack slot STACK_PARM during function
2819 execution. */
bfc45551 2820 bool to_conversion = false;
6071dc7f
RH
2821
2822 if (data->promoted_mode != data->nominal_mode)
2823 {
2824 /* Conversion is required. */
2825 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 2826
6071dc7f
RH
2827 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2828
2829 push_to_sequence (all->conversion_insns);
bfc45551
AM
2830 to_conversion = true;
2831
6071dc7f
RH
2832 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2833 TYPE_UNSIGNED (TREE_TYPE (parm)));
2834
2835 if (data->stack_parm)
2836 /* ??? This may need a big-endian conversion on sparc64. */
2837 data->stack_parm
2838 = adjust_address (data->stack_parm, data->nominal_mode, 0);
6071dc7f
RH
2839 }
2840
2841 if (data->entry_parm != data->stack_parm)
2842 {
bfc45551
AM
2843 rtx src, dest;
2844
6071dc7f
RH
2845 if (data->stack_parm == 0)
2846 {
2847 data->stack_parm
2848 = assign_stack_local (GET_MODE (data->entry_parm),
2849 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
bfc45551 2850 TYPE_ALIGN (data->passed_type));
6071dc7f 2851 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 2852 }
6071dc7f 2853
bfc45551
AM
2854 dest = validize_mem (data->stack_parm);
2855 src = validize_mem (data->entry_parm);
2856
2857 if (MEM_P (src))
6f086dfc 2858 {
bfc45551
AM
2859 /* Use a block move to handle potentially misaligned entry_parm. */
2860 if (!to_conversion)
2861 push_to_sequence (all->conversion_insns);
2862 to_conversion = true;
2863
2864 emit_block_move (dest, src,
2865 GEN_INT (int_size_in_bytes (data->passed_type)),
2866 BLOCK_OP_NORMAL);
6071dc7f
RH
2867 }
2868 else
bfc45551
AM
2869 emit_move_insn (dest, src);
2870 }
2871
2872 if (to_conversion)
2873 {
2874 all->conversion_insns = get_insns ();
2875 end_sequence ();
6071dc7f 2876 }
6f086dfc 2877
6071dc7f
RH
2878 SET_DECL_RTL (parm, data->stack_parm);
2879}
3412b298 2880
6071dc7f
RH
2881/* A subroutine of assign_parms. If the ABI splits complex arguments, then
2882 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 2883
6071dc7f 2884static void
6ccd356e 2885assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
6071dc7f
RH
2886{
2887 tree parm;
6ccd356e 2888 tree orig_fnargs = all->orig_fnargs;
f4ef873c 2889
6071dc7f
RH
2890 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2891 {
2892 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2893 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2894 {
2895 rtx tmp, real, imag;
2896 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 2897
6071dc7f
RH
2898 real = DECL_RTL (fnargs);
2899 imag = DECL_RTL (TREE_CHAIN (fnargs));
2900 if (inner != GET_MODE (real))
6f086dfc 2901 {
6071dc7f
RH
2902 real = gen_lowpart_SUBREG (inner, real);
2903 imag = gen_lowpart_SUBREG (inner, imag);
2904 }
6ccd356e
AM
2905
2906 if (TREE_ADDRESSABLE (parm))
2907 {
2908 rtx rmem, imem;
2909 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2910
2911 /* split_complex_arg put the real and imag parts in
2912 pseudos. Move them to memory. */
bfc45551
AM
2913 tmp = assign_stack_local (DECL_MODE (parm), size,
2914 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
2915 set_mem_attributes (tmp, parm, 1);
2916 rmem = adjust_address_nv (tmp, inner, 0);
2917 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2918 push_to_sequence (all->conversion_insns);
2919 emit_move_insn (rmem, real);
2920 emit_move_insn (imem, imag);
2921 all->conversion_insns = get_insns ();
2922 end_sequence ();
2923 }
2924 else
2925 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 2926 SET_DECL_RTL (parm, tmp);
7e41ffa2 2927
6071dc7f
RH
2928 real = DECL_INCOMING_RTL (fnargs);
2929 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2930 if (inner != GET_MODE (real))
2931 {
2932 real = gen_lowpart_SUBREG (inner, real);
2933 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 2934 }
6071dc7f
RH
2935 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2936 set_decl_incoming_rtl (parm, tmp);
2937 fnargs = TREE_CHAIN (fnargs);
2938 }
2939 else
2940 {
2941 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2942 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
6f086dfc 2943
6071dc7f
RH
2944 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2945 instead of the copy of decl, i.e. FNARGS. */
2946 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2947 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
6f086dfc 2948 }
6071dc7f
RH
2949
2950 fnargs = TREE_CHAIN (fnargs);
6f086dfc 2951 }
6071dc7f
RH
2952}
2953
2954/* Assign RTL expressions to the function's parameters. This may involve
2955 copying them into registers and using those registers as the DECL_RTL. */
2956
6fe79279 2957static void
6071dc7f
RH
2958assign_parms (tree fndecl)
2959{
2960 struct assign_parm_data_all all;
2961 tree fnargs, parm;
6f086dfc 2962
150cdc9e
RH
2963 current_function_internal_arg_pointer
2964 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
2965
2966 assign_parms_initialize_all (&all);
2967 fnargs = assign_parms_augmented_arg_list (&all);
2968
2969 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
ded9bf77 2970 {
6071dc7f
RH
2971 struct assign_parm_data_one data;
2972
2973 /* Extract the type of PARM; adjust it according to ABI. */
2974 assign_parm_find_data_types (&all, parm, &data);
2975
2976 /* Early out for errors and void parameters. */
2977 if (data.passed_mode == VOIDmode)
ded9bf77 2978 {
6071dc7f
RH
2979 SET_DECL_RTL (parm, const0_rtx);
2980 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2981 continue;
2982 }
196c42cd 2983
8117c488
NS
2984 if (current_function_stdarg && !TREE_CHAIN (parm))
2985 assign_parms_setup_varargs (&all, &data, false);
196c42cd 2986
6071dc7f
RH
2987 /* Find out where the parameter arrives in this function. */
2988 assign_parm_find_entry_rtl (&all, &data);
2989
2990 /* Find out where stack space for this parameter might be. */
2991 if (assign_parm_is_stack_parm (&all, &data))
2992 {
2993 assign_parm_find_stack_rtl (parm, &data);
2994 assign_parm_adjust_entry_rtl (&data);
ded9bf77 2995 }
6071dc7f
RH
2996
2997 /* Record permanently how this parm was passed. */
2998 set_decl_incoming_rtl (parm, data.entry_parm);
2999
3000 /* Update info on where next arg arrives in registers. */
3001 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3002 data.passed_type, data.named_arg);
3003
3004 assign_parm_adjust_stack_rtl (&data);
3005
3006 if (assign_parm_setup_block_p (&data))
27e29549 3007 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3008 else if (data.passed_pointer || use_register_for_decl (parm))
3009 assign_parm_setup_reg (&all, parm, &data);
3010 else
3011 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3012 }
3013
6071dc7f 3014 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
6ccd356e 3015 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3016
3412b298
JW
3017 /* Output all parameter conversion instructions (possibly including calls)
3018 now that all parameters have been copied out of hard registers. */
6071dc7f 3019 emit_insn (all.conversion_insns);
3412b298 3020
b36a8cc2
OH
3021 /* If we are receiving a struct value address as the first argument, set up
3022 the RTL for the function result. As this might require code to convert
3023 the transmitted address to Pmode, we do this here to ensure that possible
3024 preliminary conversions of the address have been emitted already. */
6071dc7f 3025 if (all.function_result_decl)
b36a8cc2 3026 {
6071dc7f
RH
3027 tree result = DECL_RESULT (current_function_decl);
3028 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3029 rtx x;
fa8db1f7 3030
cc77ae10
JM
3031 if (DECL_BY_REFERENCE (result))
3032 x = addr;
3033 else
3034 {
3035 addr = convert_memory_address (Pmode, addr);
3036 x = gen_rtx_MEM (DECL_MODE (result), addr);
3037 set_mem_attributes (x, result, 1);
3038 }
b36a8cc2
OH
3039 SET_DECL_RTL (result, x);
3040 }
3041
53c428d0 3042 /* We have aligned all the args, so add space for the pretend args. */
6071dc7f
RH
3043 current_function_pretend_args_size = all.pretend_args_size;
3044 all.stack_args_size.constant += all.extra_pretend_bytes;
3045 current_function_args_size = all.stack_args_size.constant;
6f086dfc
RS
3046
3047 /* Adjust function incoming argument size for alignment and
3048 minimum length. */
3049
3050#ifdef REG_PARM_STACK_SPACE
3051 current_function_args_size = MAX (current_function_args_size,
3052 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3053#endif
6f086dfc 3054
53366450
PB
3055 current_function_args_size = CEIL_ROUND (current_function_args_size,
3056 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3057
6f086dfc
RS
3058#ifdef ARGS_GROW_DOWNWARD
3059 current_function_arg_offset_rtx
477eff96 3060 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3061 : expand_expr (size_diffop (all.stack_args_size.var,
3062 size_int (-all.stack_args_size.constant)),
a57263bc 3063 NULL_RTX, VOIDmode, 0));
6f086dfc 3064#else
6071dc7f 3065 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3066#endif
3067
3068 /* See how many bytes, if any, of its args a function should try to pop
3069 on return. */
3070
64e6d9cc 3071 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
3072 current_function_args_size);
3073
3b69d50e
RK
3074 /* For stdarg.h function, save info about
3075 regs and stack space used by the named args. */
6f086dfc 3076
6071dc7f 3077 current_function_args_info = all.args_so_far;
6f086dfc
RS
3078
3079 /* Set the rtx used for the function return value. Put this in its
3080 own variable so any optimizers that need this information don't have
3081 to include tree.h. Do this here so it gets done when an inlined
3082 function gets output. */
3083
19e7881c
MM
3084 current_function_return_rtx
3085 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3086 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3087
3088 /* If scalar return value was computed in a pseudo-reg, or was a named
3089 return value that got dumped to the stack, copy that to the hard
3090 return register. */
3091 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3092 {
3093 tree decl_result = DECL_RESULT (fndecl);
3094 rtx decl_rtl = DECL_RTL (decl_result);
3095
3096 if (REG_P (decl_rtl)
3097 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3098 : DECL_REGISTER (decl_result))
3099 {
3100 rtx real_decl_rtl;
3101
1d636cc6
RG
3102 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3103 fndecl, true);
ce5e43d0
JJ
3104 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3105 /* The delay slot scheduler assumes that current_function_return_rtx
3106 holds the hard register containing the return value, not a
3107 temporary pseudo. */
3108 current_function_return_rtx = real_decl_rtl;
3109 }
3110 }
6f086dfc 3111}
4744afba
RH
3112
3113/* A subroutine of gimplify_parameters, invoked via walk_tree.
3114 For all seen types, gimplify their sizes. */
3115
3116static tree
3117gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3118{
3119 tree t = *tp;
3120
3121 *walk_subtrees = 0;
3122 if (TYPE_P (t))
3123 {
3124 if (POINTER_TYPE_P (t))
3125 *walk_subtrees = 1;
ad50bc8d
RH
3126 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3127 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba
RH
3128 {
3129 gimplify_type_sizes (t, (tree *) data);
3130 *walk_subtrees = 1;
3131 }
3132 }
3133
3134 return NULL;
3135}
3136
3137/* Gimplify the parameter list for current_function_decl. This involves
3138 evaluating SAVE_EXPRs of variable sized parameters and generating code
3139 to implement callee-copies reference parameters. Returns a list of
3140 statements to add to the beginning of the function, or NULL if nothing
3141 to do. */
3142
3143tree
3144gimplify_parameters (void)
3145{
3146 struct assign_parm_data_all all;
3147 tree fnargs, parm, stmts = NULL;
3148
3149 assign_parms_initialize_all (&all);
3150 fnargs = assign_parms_augmented_arg_list (&all);
3151
3152 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3153 {
3154 struct assign_parm_data_one data;
3155
3156 /* Extract the type of PARM; adjust it according to ABI. */
3157 assign_parm_find_data_types (&all, parm, &data);
3158
3159 /* Early out for errors and void parameters. */
3160 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3161 continue;
3162
3163 /* Update info on where next arg arrives in registers. */
3164 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3165 data.passed_type, data.named_arg);
3166
3167 /* ??? Once upon a time variable_size stuffed parameter list
3168 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3169 turned out to be less than manageable in the gimple world.
3170 Now we have to hunt them down ourselves. */
3171 walk_tree_without_duplicates (&data.passed_type,
3172 gimplify_parm_type, &stmts);
3173
3174 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3175 {
3176 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3177 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3178 }
3179
3180 if (data.passed_pointer)
3181 {
3182 tree type = TREE_TYPE (data.passed_type);
3183 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3184 type, data.named_arg))
3185 {
3186 tree local, t;
3187
3188 /* For constant sized objects, this is trivial; for
3189 variable-sized objects, we have to play games. */
3190 if (TREE_CONSTANT (DECL_SIZE (parm)))
3191 {
3192 local = create_tmp_var (type, get_name (parm));
3193 DECL_IGNORED_P (local) = 0;
3194 }
3195 else
3196 {
3197 tree ptr_type, addr, args;
3198
3199 ptr_type = build_pointer_type (type);
3200 addr = create_tmp_var (ptr_type, get_name (parm));
3201 DECL_IGNORED_P (addr) = 0;
3202 local = build_fold_indirect_ref (addr);
3203
3204 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3205 t = built_in_decls[BUILT_IN_ALLOCA];
3206 t = build_function_call_expr (t, args);
3207 t = fold_convert (ptr_type, t);
3208 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3209 gimplify_and_add (t, &stmts);
3210 }
3211
3212 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3213 gimplify_and_add (t, &stmts);
3214
833b3afe
DB
3215 SET_DECL_VALUE_EXPR (parm, local);
3216 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3217 }
3218 }
3219 }
3220
3221 return stmts;
3222}
6f086dfc 3223\f
75dc3319
RK
3224/* Indicate whether REGNO is an incoming argument to the current function
3225 that was promoted to a wider mode. If so, return the RTX for the
3226 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3227 that REGNO is promoted from and whether the promotion was signed or
3228 unsigned. */
3229
75dc3319 3230rtx
fa8db1f7 3231promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
75dc3319
RK
3232{
3233 tree arg;
3234
3235 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3236 arg = TREE_CHAIN (arg))
f8cfc6aa 3237 if (REG_P (DECL_INCOMING_RTL (arg))
621061f4
RK
3238 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3239 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
3240 {
3241 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
8df83eae 3242 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
75dc3319 3243
a5a52dbc 3244 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
3245 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3246 && mode != DECL_MODE (arg))
3247 {
3248 *pmode = DECL_MODE (arg);
3249 *punsignedp = unsignedp;
3250 return DECL_INCOMING_RTL (arg);
3251 }
3252 }
3253
3254 return 0;
3255}
3256
75dc3319 3257\f
6f086dfc
RS
3258/* Compute the size and offset from the start of the stacked arguments for a
3259 parm passed in mode PASSED_MODE and with type TYPE.
3260
3261 INITIAL_OFFSET_PTR points to the current offset into the stacked
3262 arguments.
3263
e7949876
AM
3264 The starting offset and size for this parm are returned in
3265 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3266 nonzero, the offset is that of stack slot, which is returned in
3267 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3268 padding required from the initial offset ptr to the stack slot.
6f086dfc 3269
cc2902df 3270 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3271 never be set if REG_PARM_STACK_SPACE is not defined.
3272
3273 FNDECL is the function in which the argument was defined.
3274
3275 There are two types of rounding that are done. The first, controlled by
3276 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3277 list to be aligned to the specific boundary (in bits). This rounding
3278 affects the initial and starting offsets, but not the argument size.
3279
3280 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3281 optionally rounds the size of the parm to PARM_BOUNDARY. The
3282 initial offset is not affected by this rounding, while the size always
3283 is and the starting offset may be. */
3284
e7949876
AM
3285/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3286 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3287 callers pass in the total size of args so far as
e7949876 3288 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3289
6f086dfc 3290void
fa8db1f7
AJ
3291locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3292 int partial, tree fndecl ATTRIBUTE_UNUSED,
3293 struct args_size *initial_offset_ptr,
3294 struct locate_and_pad_arg_data *locate)
6f086dfc 3295{
e7949876
AM
3296 tree sizetree;
3297 enum direction where_pad;
c7e777b5 3298 unsigned int boundary;
e7949876
AM
3299 int reg_parm_stack_space = 0;
3300 int part_size_in_regs;
6f086dfc
RS
3301
3302#ifdef REG_PARM_STACK_SPACE
e7949876 3303 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3304
6f086dfc
RS
3305 /* If we have found a stack parm before we reach the end of the
3306 area reserved for registers, skip that area. */
3307 if (! in_regs)
3308 {
6f086dfc
RS
3309 if (reg_parm_stack_space > 0)
3310 {
3311 if (initial_offset_ptr->var)
3312 {
3313 initial_offset_ptr->var
3314 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3315 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3316 initial_offset_ptr->constant = 0;
3317 }
3318 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3319 initial_offset_ptr->constant = reg_parm_stack_space;
3320 }
3321 }
3322#endif /* REG_PARM_STACK_SPACE */
3323
78a52f11 3324 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3325
3326 sizetree
3327 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3328 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3329 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3330 locate->where_pad = where_pad;
bfc45551 3331 locate->boundary = boundary;
6f086dfc 3332
c7e777b5
RH
3333 /* Remember if the outgoing parameter requires extra alignment on the
3334 calling function side. */
3335 if (boundary > PREFERRED_STACK_BOUNDARY)
3336 boundary = PREFERRED_STACK_BOUNDARY;
3337 if (cfun->stack_alignment_needed < boundary)
3338 cfun->stack_alignment_needed = boundary;
3339
6f086dfc 3340#ifdef ARGS_GROW_DOWNWARD
e7949876 3341 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3342 if (initial_offset_ptr->var)
e7949876
AM
3343 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3344 initial_offset_ptr->var);
9dff28ab 3345
e7949876
AM
3346 {
3347 tree s2 = sizetree;
3348 if (where_pad != none
3349 && (!host_integerp (sizetree, 1)
3350 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3351 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3352 SUB_PARM_SIZE (locate->slot_offset, s2);
3353 }
3354
3355 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3356
3357 if (!in_regs
3358#ifdef REG_PARM_STACK_SPACE
3359 || REG_PARM_STACK_SPACE (fndecl) > 0
3360#endif
3361 )
e7949876
AM
3362 pad_to_arg_alignment (&locate->slot_offset, boundary,
3363 &locate->alignment_pad);
9dff28ab 3364
e7949876
AM
3365 locate->size.constant = (-initial_offset_ptr->constant
3366 - locate->slot_offset.constant);
6f086dfc 3367 if (initial_offset_ptr->var)
e7949876
AM
3368 locate->size.var = size_binop (MINUS_EXPR,
3369 size_binop (MINUS_EXPR,
3370 ssize_int (0),
3371 initial_offset_ptr->var),
3372 locate->slot_offset.var);
3373
3374 /* Pad_below needs the pre-rounded size to know how much to pad
3375 below. */
3376 locate->offset = locate->slot_offset;
3377 if (where_pad == downward)
3378 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3379
6f086dfc 3380#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3381 if (!in_regs
3382#ifdef REG_PARM_STACK_SPACE
3383 || REG_PARM_STACK_SPACE (fndecl) > 0
3384#endif
3385 )
e7949876
AM
3386 pad_to_arg_alignment (initial_offset_ptr, boundary,
3387 &locate->alignment_pad);
3388 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3389
3390#ifdef PUSH_ROUNDING
3391 if (passed_mode != BLKmode)
3392 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3393#endif
3394
d4b0a7a0
DE
3395 /* Pad_below needs the pre-rounded size to know how much to pad below
3396 so this must be done before rounding up. */
e7949876
AM
3397 locate->offset = locate->slot_offset;
3398 if (where_pad == downward)
3399 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3400
6f086dfc 3401 if (where_pad != none
1468899d
RK
3402 && (!host_integerp (sizetree, 1)
3403 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3404 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3405
e7949876
AM
3406 ADD_PARM_SIZE (locate->size, sizetree);
3407
3408 locate->size.constant -= part_size_in_regs;
6f086dfc
RS
3409#endif /* ARGS_GROW_DOWNWARD */
3410}
3411
e16c591a
RS
3412/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3413 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3414
6f086dfc 3415static void
fa8db1f7
AJ
3416pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3417 struct args_size *alignment_pad)
6f086dfc 3418{
a544cfd2
KG
3419 tree save_var = NULL_TREE;
3420 HOST_WIDE_INT save_constant = 0;
a751cd5b 3421 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3422 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3423
3424#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3425 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3426 the real alignment of %sp. However, when it does this, the
3427 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3428 if (SPARC_STACK_BOUNDARY_HACK)
3429 sp_offset = 0;
3430#endif
4fc026cd 3431
9399d5c6 3432 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
3433 {
3434 save_var = offset_ptr->var;
3435 save_constant = offset_ptr->constant;
3436 }
3437
3438 alignment_pad->var = NULL_TREE;
3439 alignment_pad->constant = 0;
4fc026cd 3440
6f086dfc
RS
3441 if (boundary > BITS_PER_UNIT)
3442 {
3443 if (offset_ptr->var)
3444 {
a594a19c
GK
3445 tree sp_offset_tree = ssize_int (sp_offset);
3446 tree offset = size_binop (PLUS_EXPR,
3447 ARGS_SIZE_TREE (*offset_ptr),
3448 sp_offset_tree);
6f086dfc 3449#ifdef ARGS_GROW_DOWNWARD
a594a19c 3450 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3451#else
a594a19c 3452 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3453#endif
a594a19c
GK
3454
3455 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3456 /* ARGS_SIZE_TREE includes constant term. */
3457 offset_ptr->constant = 0;
dd3f0101
KH
3458 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3459 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3460 save_var);
6f086dfc
RS
3461 }
3462 else
718fe406 3463 {
a594a19c 3464 offset_ptr->constant = -sp_offset +
6f086dfc 3465#ifdef ARGS_GROW_DOWNWARD
a594a19c 3466 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3467#else
a594a19c 3468 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3469#endif
718fe406
KH
3470 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3471 alignment_pad->constant = offset_ptr->constant - save_constant;
3472 }
6f086dfc
RS
3473 }
3474}
3475
3476static void
fa8db1f7 3477pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3478{
3479 if (passed_mode != BLKmode)
3480 {
3481 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3482 offset_ptr->constant
3483 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3484 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3485 - GET_MODE_SIZE (passed_mode));
3486 }
3487 else
3488 {
3489 if (TREE_CODE (sizetree) != INTEGER_CST
3490 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3491 {
3492 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3493 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3494 /* Add it in. */
3495 ADD_PARM_SIZE (*offset_ptr, s2);
3496 SUB_PARM_SIZE (*offset_ptr, sizetree);
3497 }
3498 }
3499}
6f086dfc
RS
3500\f
3501/* Walk the tree of blocks describing the binding levels within a function
6de9cd9a 3502 and warn about variables the might be killed by setjmp or vfork.
6f086dfc
RS
3503 This is done after calling flow_analysis and before global_alloc
3504 clobbers the pseudo-regs to hard regs. */
3505
3506void
6de9cd9a 3507setjmp_vars_warning (tree block)
6f086dfc 3508{
b3694847 3509 tree decl, sub;
6de9cd9a 3510
6f086dfc
RS
3511 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3512 {
6de9cd9a 3513 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3514 && DECL_RTL_SET_P (decl)
f8cfc6aa 3515 && REG_P (DECL_RTL (decl))
6f086dfc 3516 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
dee15844 3517 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
971801ff 3518 " or %<vfork%>",
dee15844 3519 decl);
6f086dfc 3520 }
6de9cd9a 3521
6f086dfc 3522 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
6de9cd9a 3523 setjmp_vars_warning (sub);
6f086dfc
RS
3524}
3525
6de9cd9a 3526/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3527 but for arguments instead of local variables. */
3528
3529void
fa8db1f7 3530setjmp_args_warning (void)
6f086dfc 3531{
b3694847 3532 tree decl;
6f086dfc
RS
3533 for (decl = DECL_ARGUMENTS (current_function_decl);
3534 decl; decl = TREE_CHAIN (decl))
3535 if (DECL_RTL (decl) != 0
f8cfc6aa 3536 && REG_P (DECL_RTL (decl))
6f086dfc 3537 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
dee15844
JM
3538 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3539 decl);
6f086dfc
RS
3540}
3541
6f086dfc 3542\f
a20612aa
RH
3543/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3544 and create duplicate blocks. */
3545/* ??? Need an option to either create block fragments or to create
3546 abstract origin duplicates of a source block. It really depends
3547 on what optimization has been performed. */
467456d0 3548
116eebd6 3549void
fa8db1f7 3550reorder_blocks (void)
467456d0 3551{
116eebd6 3552 tree block = DECL_INITIAL (current_function_decl);
2c217442 3553 VEC(tree,heap) *block_stack;
467456d0 3554
1a4450c7 3555 if (block == NULL_TREE)
116eebd6 3556 return;
fc289cd1 3557
2c217442 3558 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 3559
a20612aa 3560 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 3561 clear_block_marks (block);
a20612aa 3562
116eebd6
MM
3563 /* Prune the old trees away, so that they don't get in the way. */
3564 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3565 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 3566
a20612aa 3567 /* Recreate the block tree from the note nesting. */
116eebd6 3568 reorder_blocks_1 (get_insns (), block, &block_stack);
718fe406 3569 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
18c038b9 3570
a20612aa
RH
3571 /* Remove deleted blocks from the block fragment chains. */
3572 reorder_fix_fragments (block);
2c217442
KH
3573
3574 VEC_free (tree, heap, block_stack);
467456d0
RS
3575}
3576
a20612aa 3577/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 3578
6de9cd9a
DN
3579void
3580clear_block_marks (tree block)
cc1fe44f 3581{
a20612aa 3582 while (block)
cc1fe44f 3583 {
a20612aa 3584 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 3585 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 3586 block = BLOCK_CHAIN (block);
cc1fe44f
DD
3587 }
3588}
3589
0a1c58a2 3590static void
2c217442 3591reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
3592{
3593 rtx insn;
3594
3595 for (insn = insns; insn; insn = NEXT_INSN (insn))
3596 {
4b4bf941 3597 if (NOTE_P (insn))
0a1c58a2
JL
3598 {
3599 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3600 {
3601 tree block = NOTE_BLOCK (insn);
a20612aa
RH
3602
3603 /* If we have seen this block before, that means it now
3604 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
3605 if (TREE_ASM_WRITTEN (block))
3606 {
a20612aa
RH
3607 tree new_block = copy_node (block);
3608 tree origin;
3609
3610 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3611 ? BLOCK_FRAGMENT_ORIGIN (block)
3612 : block);
3613 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3614 BLOCK_FRAGMENT_CHAIN (new_block)
3615 = BLOCK_FRAGMENT_CHAIN (origin);
3616 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3617
3618 NOTE_BLOCK (insn) = new_block;
3619 block = new_block;
0a1c58a2 3620 }
a20612aa 3621
0a1c58a2
JL
3622 BLOCK_SUBBLOCKS (block) = 0;
3623 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
3624 /* When there's only one block for the entire function,
3625 current_block == block and we mustn't do this, it
3626 will cause infinite recursion. */
3627 if (block != current_block)
3628 {
3629 BLOCK_SUPERCONTEXT (block) = current_block;
3630 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3631 BLOCK_SUBBLOCKS (current_block) = block;
3632 current_block = block;
3633 }
2c217442 3634 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2
JL
3635 }
3636 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3637 {
2c217442 3638 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2
JL
3639 BLOCK_SUBBLOCKS (current_block)
3640 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3641 current_block = BLOCK_SUPERCONTEXT (current_block);
3642 }
3643 }
0a1c58a2
JL
3644 }
3645}
3646
a20612aa
RH
3647/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3648 appears in the block tree, select one of the fragments to become
3649 the new origin block. */
3650
3651static void
fa8db1f7 3652reorder_fix_fragments (tree block)
a20612aa
RH
3653{
3654 while (block)
3655 {
3656 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3657 tree new_origin = NULL_TREE;
3658
3659 if (dup_origin)
3660 {
3661 if (! TREE_ASM_WRITTEN (dup_origin))
3662 {
3663 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
797a6ac1 3664
a20612aa
RH
3665 /* Find the first of the remaining fragments. There must
3666 be at least one -- the current block. */
3667 while (! TREE_ASM_WRITTEN (new_origin))
3668 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3669 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3670 }
3671 }
3672 else if (! dup_origin)
3673 new_origin = block;
3674
3675 /* Re-root the rest of the fragments to the new origin. In the
3676 case that DUP_ORIGIN was null, that means BLOCK was the origin
3677 of a chain of fragments and we want to remove those fragments
3678 that didn't make it to the output. */
3679 if (new_origin)
3680 {
3681 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3682 tree chain = *pp;
3683
3684 while (chain)
3685 {
3686 if (TREE_ASM_WRITTEN (chain))
3687 {
3688 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3689 *pp = chain;
3690 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3691 }
3692 chain = BLOCK_FRAGMENT_CHAIN (chain);
3693 }
3694 *pp = NULL_TREE;
3695 }
3696
3697 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3698 block = BLOCK_CHAIN (block);
3699 }
3700}
3701
467456d0
RS
3702/* Reverse the order of elements in the chain T of blocks,
3703 and return the new head of the chain (old last element). */
3704
6de9cd9a 3705tree
fa8db1f7 3706blocks_nreverse (tree t)
467456d0 3707{
b3694847 3708 tree prev = 0, decl, next;
467456d0
RS
3709 for (decl = t; decl; decl = next)
3710 {
3711 next = BLOCK_CHAIN (decl);
3712 BLOCK_CHAIN (decl) = prev;
3713 prev = decl;
3714 }
3715 return prev;
3716}
3717
18c038b9
MM
3718/* Count the subblocks of the list starting with BLOCK. If VECTOR is
3719 non-NULL, list them all into VECTOR, in a depth-first preorder
3720 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 3721 blocks. */
467456d0
RS
3722
3723static int
fa8db1f7 3724all_blocks (tree block, tree *vector)
467456d0 3725{
b2a59b15
MS
3726 int n_blocks = 0;
3727
a84efb51
JO
3728 while (block)
3729 {
3730 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 3731
a84efb51
JO
3732 /* Record this block. */
3733 if (vector)
3734 vector[n_blocks] = block;
b2a59b15 3735
a84efb51 3736 ++n_blocks;
718fe406 3737
a84efb51
JO
3738 /* Record the subblocks, and their subblocks... */
3739 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3740 vector ? vector + n_blocks : 0);
3741 block = BLOCK_CHAIN (block);
3742 }
467456d0
RS
3743
3744 return n_blocks;
3745}
18c038b9
MM
3746
3747/* Return a vector containing all the blocks rooted at BLOCK. The
3748 number of elements in the vector is stored in N_BLOCKS_P. The
3749 vector is dynamically allocated; it is the caller's responsibility
3750 to call `free' on the pointer returned. */
718fe406 3751
18c038b9 3752static tree *
fa8db1f7 3753get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
3754{
3755 tree *block_vector;
3756
3757 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 3758 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
3759 all_blocks (block, block_vector);
3760
3761 return block_vector;
3762}
3763
f83b236e 3764static GTY(()) int next_block_index = 2;
18c038b9
MM
3765
3766/* Set BLOCK_NUMBER for all the blocks in FN. */
3767
3768void
fa8db1f7 3769number_blocks (tree fn)
18c038b9
MM
3770{
3771 int i;
3772 int n_blocks;
3773 tree *block_vector;
3774
3775 /* For SDB and XCOFF debugging output, we start numbering the blocks
3776 from 1 within each function, rather than keeping a running
3777 count. */
3778#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
3779 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3780 next_block_index = 1;
18c038b9
MM
3781#endif
3782
3783 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3784
3785 /* The top-level BLOCK isn't numbered at all. */
3786 for (i = 1; i < n_blocks; ++i)
3787 /* We number the blocks from two. */
3788 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3789
3790 free (block_vector);
3791
3792 return;
3793}
df8992f8
RH
3794
3795/* If VAR is present in a subblock of BLOCK, return the subblock. */
3796
3797tree
fa8db1f7 3798debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
3799{
3800 tree t;
3801
3802 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3803 if (t == var)
3804 return block;
3805
3806 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3807 {
3808 tree ret = debug_find_var_in_block_tree (var, t);
3809 if (ret)
3810 return ret;
3811 }
3812
3813 return NULL_TREE;
3814}
467456d0 3815\f
3a70d621
RH
3816/* Allocate a function structure for FNDECL and set its contents
3817 to the defaults. */
7a80cf9a 3818
3a70d621
RH
3819void
3820allocate_struct_function (tree fndecl)
6f086dfc 3821{
3a70d621 3822 tree result;
6de9cd9a 3823 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 3824
3a70d621 3825 cfun = ggc_alloc_cleared (sizeof (struct function));
b384405b 3826
3a70d621
RH
3827 cfun->stack_alignment_needed = STACK_BOUNDARY;
3828 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6f086dfc 3829
3a70d621 3830 current_function_funcdef_no = funcdef_no++;
6f086dfc 3831
3a70d621 3832 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6f086dfc 3833
3a70d621 3834 init_eh_for_function ();
6f086dfc 3835
ae2bcd98 3836 lang_hooks.function.init (cfun);
3a70d621
RH
3837 if (init_machine_status)
3838 cfun->machine = (*init_machine_status) ();
e2ecd91c 3839
3a70d621
RH
3840 if (fndecl == NULL)
3841 return;
a0871656 3842
1da326c3 3843 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3a70d621 3844 cfun->decl = fndecl;
6f086dfc 3845
3a70d621 3846 result = DECL_RESULT (fndecl);
61f71b34 3847 if (aggregate_value_p (result, fndecl))
3a70d621
RH
3848 {
3849#ifdef PCC_STATIC_STRUCT_RETURN
3850 current_function_returns_pcc_struct = 1;
3851#endif
3852 current_function_returns_struct = 1;
3853 }
6f086dfc 3854
3a70d621 3855 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6f086dfc 3856
6de9cd9a
DN
3857 current_function_stdarg
3858 = (fntype
3859 && TYPE_ARG_TYPES (fntype) != 0
3860 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3861 != void_type_node));
9d30f3c1
JJ
3862
3863 /* Assume all registers in stdarg functions need to be saved. */
3864 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3865 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3a70d621 3866}
6f086dfc 3867
3a70d621 3868/* Reset cfun, and other non-struct-function variables to defaults as
2067c116 3869 appropriate for emitting rtl at the start of a function. */
6f086dfc 3870
3a70d621
RH
3871static void
3872prepare_function_start (tree fndecl)
3873{
1da326c3
SB
3874 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3875 cfun = DECL_STRUCT_FUNCTION (fndecl);
3a70d621
RH
3876 else
3877 allocate_struct_function (fndecl);
0de456a5
JH
3878 init_emit ();
3879 init_varasm_status (cfun);
3880 init_expr ();
6f086dfc 3881
3a70d621 3882 cse_not_expected = ! optimize;
6f086dfc 3883
3a70d621
RH
3884 /* Caller save not needed yet. */
3885 caller_save_needed = 0;
6f086dfc 3886
3a70d621
RH
3887 /* We haven't done register allocation yet. */
3888 reg_renumber = 0;
6f086dfc 3889
b384405b
BS
3890 /* Indicate that we have not instantiated virtual registers yet. */
3891 virtuals_instantiated = 0;
3892
1b3d8f8a
GK
3893 /* Indicate that we want CONCATs now. */
3894 generating_concat_p = 1;
3895
b384405b
BS
3896 /* Indicate we have no need of a frame pointer yet. */
3897 frame_pointer_needed = 0;
b384405b
BS
3898}
3899
3900/* Initialize the rtl expansion mechanism so that we can do simple things
3901 like generate sequences. This is used to provide a context during global
3902 initialization of some passes. */
3903void
fa8db1f7 3904init_dummy_function_start (void)
b384405b 3905{
3a70d621 3906 prepare_function_start (NULL);
b384405b
BS
3907}
3908
3909/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3910 and initialize static variables for generating RTL for the statements
3911 of the function. */
3912
3913void
fa8db1f7 3914init_function_start (tree subr)
b384405b 3915{
3a70d621 3916 prepare_function_start (subr);
b384405b 3917
ee6b0296
NS
3918 /* Prevent ever trying to delete the first instruction of a
3919 function. Also tell final how to output a linenum before the
3920 function prologue. Note linenums could be missing, e.g. when
3921 compiling a Java .class file. */
3c20847b 3922 if (! DECL_IS_BUILTIN (subr))
f31686a3 3923 emit_line_note (DECL_SOURCE_LOCATION (subr));
6f086dfc
RS
3924
3925 /* Make sure first insn is a note even if we don't want linenums.
3926 This makes sure the first insn will never be deleted.
3927 Also, final expects a note to appear there. */
2e040219 3928 emit_note (NOTE_INSN_DELETED);
6f086dfc 3929
6f086dfc
RS
3930 /* Warn if this value is an aggregate type,
3931 regardless of which calling convention we are using for it. */
ccf08a6e
DD
3932 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3933 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 3934}
5c7675e9 3935
49ad7cfa
BS
3936/* Make sure all values used by the optimization passes have sane
3937 defaults. */
c2924966 3938unsigned int
fa8db1f7 3939init_function_for_compilation (void)
49ad7cfa
BS
3940{
3941 reg_renumber = 0;
0a1c58a2 3942
f995dcfe
KH
3943 /* No prologue/epilogue insns yet. Make sure that these vectors are
3944 empty. */
3945 gcc_assert (VEC_length (int, prologue) == 0);
3946 gcc_assert (VEC_length (int, epilogue) == 0);
3947 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
c2924966 3948 return 0;
6f086dfc
RS
3949}
3950
ef330312
PB
3951struct tree_opt_pass pass_init_function =
3952{
3953 NULL, /* name */
3954 NULL, /* gate */
3955 init_function_for_compilation, /* execute */
3956 NULL, /* sub */
3957 NULL, /* next */
3958 0, /* static_pass_number */
3959 0, /* tv_id */
3960 0, /* properties_required */
3961 0, /* properties_provided */
3962 0, /* properties_destroyed */
3963 0, /* todo_flags_start */
3964 0, /* todo_flags_finish */
3965 0 /* letter */
3966};
3967
3968
6f086dfc 3969void
fa8db1f7 3970expand_main_function (void)
6f086dfc 3971{
3a57c6cb
MM
3972#if (defined(INVOKE__main) \
3973 || (!defined(HAS_INIT_SECTION) \
3974 && !defined(INIT_SECTION_ASM_OP) \
3975 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 3976 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 3977#endif
6f086dfc
RS
3978}
3979\f
7d69de61
RH
3980/* Expand code to initialize the stack_protect_guard. This is invoked at
3981 the beginning of a function to be protected. */
3982
3983#ifndef HAVE_stack_protect_set
3984# define HAVE_stack_protect_set 0
3985# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3986#endif
3987
3988void
3989stack_protect_prologue (void)
3990{
3991 tree guard_decl = targetm.stack_protect_guard ();
3992 rtx x, y;
3993
3994 /* Avoid expand_expr here, because we don't want guard_decl pulled
3995 into registers unless absolutely necessary. And we know that
3996 cfun->stack_protect_guard is a local stack slot, so this skips
3997 all the fluff. */
3998 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3999 y = validize_mem (DECL_RTL (guard_decl));
4000
4001 /* Allow the target to copy from Y to X without leaking Y into a
4002 register. */
4003 if (HAVE_stack_protect_set)
4004 {
4005 rtx insn = gen_stack_protect_set (x, y);
4006 if (insn)
4007 {
4008 emit_insn (insn);
4009 return;
4010 }
4011 }
4012
4013 /* Otherwise do a straight move. */
4014 emit_move_insn (x, y);
4015}
4016
4017/* Expand code to verify the stack_protect_guard. This is invoked at
4018 the end of a function to be protected. */
4019
4020#ifndef HAVE_stack_protect_test
b76be05e
JJ
4021# define HAVE_stack_protect_test 0
4022# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4023#endif
4024
b755446c 4025void
7d69de61
RH
4026stack_protect_epilogue (void)
4027{
4028 tree guard_decl = targetm.stack_protect_guard ();
4029 rtx label = gen_label_rtx ();
4030 rtx x, y, tmp;
4031
4032 /* Avoid expand_expr here, because we don't want guard_decl pulled
4033 into registers unless absolutely necessary. And we know that
4034 cfun->stack_protect_guard is a local stack slot, so this skips
4035 all the fluff. */
4036 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4037 y = validize_mem (DECL_RTL (guard_decl));
4038
4039 /* Allow the target to compare Y with X without leaking either into
4040 a register. */
4041 switch (HAVE_stack_protect_test != 0)
4042 {
4043 case 1:
3aebbe5f 4044 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4045 if (tmp)
4046 {
4047 emit_insn (tmp);
7d69de61
RH
4048 break;
4049 }
4050 /* FALLTHRU */
4051
4052 default:
4053 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4054 break;
4055 }
4056
4057 /* The noreturn predictor has been moved to the tree level. The rtl-level
4058 predictors estimate this branch about 20%, which isn't enough to get
4059 things moved out of line. Since this is the only extant case of adding
4060 a noreturn function at the rtl level, it doesn't seem worth doing ought
4061 except adding the prediction by hand. */
4062 tmp = get_last_insn ();
4063 if (JUMP_P (tmp))
4064 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4065
4066 expand_expr_stmt (targetm.stack_protect_fail ());
4067 emit_label (label);
4068}
4069\f
6f086dfc
RS
4070/* Start the RTL for a new function, and set variables used for
4071 emitting RTL.
4072 SUBR is the FUNCTION_DECL node.
4073 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4074 the function's parameters, which must be run at any return statement. */
4075
4076void
b79c5284 4077expand_function_start (tree subr)
6f086dfc 4078{
6f086dfc
RS
4079 /* Make sure volatile mem refs aren't considered
4080 valid operands of arithmetic insns. */
4081 init_recog_no_volatile ();
4082
70f4f91c
WC
4083 current_function_profile
4084 = (profile_flag
4085 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4086
a157febd
GK
4087 current_function_limit_stack
4088 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4089
52a11cbf
RH
4090 /* Make the label for return statements to jump to. Do not special
4091 case machines with special return instructions -- they will be
4092 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4093 return_label = gen_label_rtx ();
6f086dfc
RS
4094
4095 /* Initialize rtx used to return the value. */
4096 /* Do this before assign_parms so that we copy the struct value address
4097 before any library calls that assign parms might generate. */
4098
4099 /* Decide whether to return the value in memory or in a register. */
61f71b34 4100 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4101 {
4102 /* Returning something that won't go in a register. */
b3694847 4103 rtx value_address = 0;
6f086dfc
RS
4104
4105#ifdef PCC_STATIC_STRUCT_RETURN
4106 if (current_function_returns_pcc_struct)
4107 {
4108 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4109 value_address = assemble_static_space (size);
4110 }
4111 else
4112#endif
4113 {
61f71b34 4114 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
6f086dfc
RS
4115 /* Expect to be passed the address of a place to store the value.
4116 If it is passed as an argument, assign_parms will take care of
4117 it. */
61f71b34 4118 if (sv)
6f086dfc
RS
4119 {
4120 value_address = gen_reg_rtx (Pmode);
61f71b34 4121 emit_move_insn (value_address, sv);
6f086dfc
RS
4122 }
4123 }
4124 if (value_address)
ccdecf58 4125 {
01c98570
JM
4126 rtx x = value_address;
4127 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4128 {
4129 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4130 set_mem_attributes (x, DECL_RESULT (subr), 1);
4131 }
abde42f7 4132 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4133 }
6f086dfc
RS
4134 }
4135 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4136 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4137 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4138 else
a53e14c0 4139 {
d5bf1143
RH
4140 /* Compute the return values into a pseudo reg, which we will copy
4141 into the true return register after the cleanups are done. */
bef5d8b6
RS
4142 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4143 if (TYPE_MODE (return_type) != BLKmode
4144 && targetm.calls.return_in_msb (return_type))
4145 /* expand_function_end will insert the appropriate padding in
4146 this case. Use the return value's natural (unpadded) mode
4147 within the function proper. */
4148 SET_DECL_RTL (DECL_RESULT (subr),
4149 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4150 else
0bccc606 4151 {
bef5d8b6
RS
4152 /* In order to figure out what mode to use for the pseudo, we
4153 figure out what the mode of the eventual return register will
4154 actually be, and use that. */
1d636cc6 4155 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4156
4157 /* Structures that are returned in registers are not
4158 aggregate_value_p, so we may see a PARALLEL or a REG. */
4159 if (REG_P (hard_reg))
4160 SET_DECL_RTL (DECL_RESULT (subr),
4161 gen_reg_rtx (GET_MODE (hard_reg)));
4162 else
4163 {
4164 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4165 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4166 }
0bccc606 4167 }
a53e14c0 4168
084a1106
JDA
4169 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4170 result to the real return register(s). */
4171 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4172 }
6f086dfc
RS
4173
4174 /* Initialize rtx for parameters and local variables.
4175 In some cases this requires emitting insns. */
0d1416c6 4176 assign_parms (subr);
6f086dfc 4177
6de9cd9a
DN
4178 /* If function gets a static chain arg, store it. */
4179 if (cfun->static_chain_decl)
4180 {
7e140280
RH
4181 tree parm = cfun->static_chain_decl;
4182 rtx local = gen_reg_rtx (Pmode);
4183
4184 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4185 SET_DECL_RTL (parm, local);
7e140280 4186 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4187
7e140280 4188 emit_move_insn (local, static_chain_incoming_rtx);
6de9cd9a
DN
4189 }
4190
4191 /* If the function receives a non-local goto, then store the
4192 bits we need to restore the frame pointer. */
4193 if (cfun->nonlocal_goto_save_area)
4194 {
4195 tree t_save;
4196 rtx r_save;
4197
4198 /* ??? We need to do this save early. Unfortunately here is
4199 before the frame variable gets declared. Help out... */
4200 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4201
3244e67d
RS
4202 t_save = build4 (ARRAY_REF, ptr_type_node,
4203 cfun->nonlocal_goto_save_area,
4204 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4205 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4206 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4207
6de9cd9a
DN
4208 emit_move_insn (r_save, virtual_stack_vars_rtx);
4209 update_nonlocal_goto_save_area ();
4210 }
f0c51a1e 4211
6f086dfc
RS
4212 /* The following was moved from init_function_start.
4213 The move is supposed to make sdb output more accurate. */
4214 /* Indicate the beginning of the function body,
4215 as opposed to parm setup. */
2e040219 4216 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4217
4b4bf941 4218 if (!NOTE_P (get_last_insn ()))
2e040219 4219 emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4220 parm_birth_insn = get_last_insn ();
4221
70f4f91c 4222 if (current_function_profile)
f6f315fe 4223 {
f6f315fe 4224#ifdef PROFILE_HOOK
df696a75 4225 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4226#endif
f6f315fe 4227 }
411707f4 4228
6f086dfc
RS
4229 /* After the display initializations is where the tail-recursion label
4230 should go, if we end up needing one. Ensure we have a NOTE here
4231 since some things (like trampolines) get placed before this. */
2e040219 4232 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
6f086dfc 4233
6f086dfc
RS
4234 /* Make sure there is a line number after the function entry setup code. */
4235 force_next_line_note ();
4236}
4237\f
49ad7cfa
BS
4238/* Undo the effects of init_dummy_function_start. */
4239void
fa8db1f7 4240expand_dummy_function_end (void)
49ad7cfa
BS
4241{
4242 /* End any sequences that failed to be closed due to syntax errors. */
4243 while (in_sequence_p ())
4244 end_sequence ();
4245
4246 /* Outside function body, can't compute type's actual size
4247 until next function's body starts. */
fa51b01b 4248
01d939e8
BS
4249 free_after_parsing (cfun);
4250 free_after_compilation (cfun);
01d939e8 4251 cfun = 0;
49ad7cfa
BS
4252}
4253
c13fde05
RH
4254/* Call DOIT for each hard register used as a return value from
4255 the current function. */
bd695e1e
RH
4256
4257void
fa8db1f7 4258diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4259{
c13fde05
RH
4260 rtx outgoing = current_function_return_rtx;
4261
4262 if (! outgoing)
4263 return;
bd695e1e 4264
f8cfc6aa 4265 if (REG_P (outgoing))
c13fde05
RH
4266 (*doit) (outgoing, arg);
4267 else if (GET_CODE (outgoing) == PARALLEL)
4268 {
4269 int i;
bd695e1e 4270
c13fde05
RH
4271 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4272 {
4273 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4274
f8cfc6aa 4275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4276 (*doit) (x, arg);
bd695e1e
RH
4277 }
4278 }
4279}
4280
c13fde05 4281static void
fa8db1f7 4282do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4283{
4284 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4285}
4286
4287void
fa8db1f7 4288clobber_return_register (void)
c13fde05
RH
4289{
4290 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4291
4292 /* In case we do use pseudo to return value, clobber it too. */
4293 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4294 {
4295 tree decl_result = DECL_RESULT (current_function_decl);
4296 rtx decl_rtl = DECL_RTL (decl_result);
4297 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4298 {
4299 do_clobber_return_reg (decl_rtl, NULL);
4300 }
4301 }
c13fde05
RH
4302}
4303
4304static void
fa8db1f7 4305do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4306{
4307 emit_insn (gen_rtx_USE (VOIDmode, reg));
4308}
4309
0bf8477d 4310static void
fa8db1f7 4311use_return_register (void)
c13fde05
RH
4312{
4313 diddle_return_value (do_use_return_reg, NULL);
4314}
4315
902edd36
JH
4316/* Possibly warn about unused parameters. */
4317void
4318do_warn_unused_parameter (tree fn)
4319{
4320 tree decl;
4321
4322 for (decl = DECL_ARGUMENTS (fn);
4323 decl; decl = TREE_CHAIN (decl))
4324 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4325 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
b9b8dde3 4326 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4327}
4328
e2500fed
GK
4329static GTY(()) rtx initial_trampoline;
4330
71c0e7fc 4331/* Generate RTL for the end of the current function. */
6f086dfc
RS
4332
4333void
fa8db1f7 4334expand_function_end (void)
6f086dfc 4335{
932f0847 4336 rtx clobber_after;
6f086dfc 4337
964be02f
RH
4338 /* If arg_pointer_save_area was referenced only from a nested
4339 function, we will not have initialized it yet. Do that now. */
4340 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4341 get_arg_pointer_save_area (cfun);
4342
11044f66
RK
4343 /* If we are doing stack checking and this function makes calls,
4344 do a stack probe at the start of the function to ensure we have enough
4345 space for another stack frame. */
4346 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4347 {
4348 rtx insn, seq;
4349
4350 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4351 if (CALL_P (insn))
11044f66
RK
4352 {
4353 start_sequence ();
4354 probe_stack_range (STACK_CHECK_PROTECT,
4355 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4356 seq = get_insns ();
4357 end_sequence ();
2f937369 4358 emit_insn_before (seq, tail_recursion_reentry);
11044f66
RK
4359 break;
4360 }
4361 }
4362
902edd36
JH
4363 /* Possibly warn about unused parameters.
4364 When frontend does unit-at-a-time, the warning is already
4365 issued at finalization time. */
4366 if (warn_unused_parameter
4367 && !lang_hooks.callgraph.expand_function)
4368 do_warn_unused_parameter (current_function_decl);
6f086dfc 4369
6f086dfc
RS
4370 /* End any sequences that failed to be closed due to syntax errors. */
4371 while (in_sequence_p ())
5f4f0e22 4372 end_sequence ();
6f086dfc 4373
6f086dfc
RS
4374 clear_pending_stack_adjust ();
4375 do_pending_stack_adjust ();
4376
4377 /* Mark the end of the function body.
4378 If control reaches this insn, the function can drop through
4379 without returning a value. */
2e040219 4380 emit_note (NOTE_INSN_FUNCTION_END);
6f086dfc 4381
82e415a3
DE
4382 /* Must mark the last line number note in the function, so that the test
4383 coverage code can avoid counting the last line twice. This just tells
4384 the code to ignore the immediately following line note, since there
4385 already exists a copy of this note somewhere above. This line number
4386 note is still needed for debugging though, so we can't delete it. */
4387 if (flag_test_coverage)
2e040219 4388 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
82e415a3 4389
6f086dfc
RS
4390 /* Output a linenumber for the end of the function.
4391 SDB depends on this. */
0cea056b
NS
4392 force_next_line_note ();
4393 emit_line_note (input_location);
6f086dfc 4394
fbffc70a 4395 /* Before the return label (if any), clobber the return
a1f300c0 4396 registers so that they are not propagated live to the rest of
fbffc70a
GK
4397 the function. This can only happen with functions that drop
4398 through; if there had been a return statement, there would
932f0847
JH
4399 have either been a return rtx, or a jump to the return label.
4400
4401 We delay actual code generation after the current_function_value_rtx
4402 is computed. */
4403 clobber_after = get_last_insn ();
fbffc70a 4404
526c334b
KH
4405 /* Output the label for the actual return from the function. */
4406 emit_label (return_label);
6f086dfc 4407
815eb8f0
AM
4408 if (USING_SJLJ_EXCEPTIONS)
4409 {
4410 /* Let except.c know where it should emit the call to unregister
4411 the function context for sjlj exceptions. */
4412 if (flag_exceptions)
4413 sjlj_emit_function_exit_after (get_last_insn ());
4414 }
4415 else
4416 {
4417 /* @@@ This is a kludge. We want to ensure that instructions that
4418 may trap are not moved into the epilogue by scheduling, because
4419 we don't always emit unwind information for the epilogue.
4420 However, not all machine descriptions define a blockage insn, so
4421 emit an ASM_INPUT to act as one. */
4422 if (flag_non_call_exceptions)
4423 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4424 }
0b59e81e 4425
652b0932
RH
4426 /* If this is an implementation of throw, do what's necessary to
4427 communicate between __builtin_eh_return and the epilogue. */
4428 expand_eh_return ();
4429
3e4eac3f
RH
4430 /* If scalar return value was computed in a pseudo-reg, or was a named
4431 return value that got dumped to the stack, copy that to the hard
4432 return register. */
19e7881c 4433 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4434 {
3e4eac3f
RH
4435 tree decl_result = DECL_RESULT (current_function_decl);
4436 rtx decl_rtl = DECL_RTL (decl_result);
4437
4438 if (REG_P (decl_rtl)
4439 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4440 : DECL_REGISTER (decl_result))
4441 {
ce5e43d0 4442 rtx real_decl_rtl = current_function_return_rtx;
6f086dfc 4443
ce5e43d0 4444 /* This should be set in assign_parms. */
0bccc606 4445 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4446
4447 /* If this is a BLKmode structure being returned in registers,
4448 then use the mode computed in expand_return. Note that if
797a6ac1 4449 decl_rtl is memory, then its mode may have been changed,
3e4eac3f
RH
4450 but that current_function_return_rtx has not. */
4451 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4452 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 4453
bef5d8b6
RS
4454 /* If a non-BLKmode return value should be padded at the least
4455 significant end of the register, shift it left by the appropriate
4456 amount. BLKmode results are handled using the group load/store
4457 machinery. */
4458 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4459 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4460 {
4461 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4462 REGNO (real_decl_rtl)),
4463 decl_rtl);
4464 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4465 }
3e4eac3f 4466 /* If a named return value dumped decl_return to memory, then
797a6ac1 4467 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 4468 extension. */
bef5d8b6 4469 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 4470 {
8df83eae 4471 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3e4eac3f 4472
61f71b34
DD
4473 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4474 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4475 &unsignedp, 1);
3e4eac3f
RH
4476
4477 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4478 }
aa570f54 4479 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4480 {
4481 /* If expand_function_start has created a PARALLEL for decl_rtl,
4482 move the result to the real return registers. Otherwise, do
4483 a group load from decl_rtl for a named return. */
4484 if (GET_CODE (decl_rtl) == PARALLEL)
4485 emit_group_move (real_decl_rtl, decl_rtl);
4486 else
4487 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4488 TREE_TYPE (decl_result),
084a1106
JDA
4489 int_size_in_bytes (TREE_TYPE (decl_result)));
4490 }
652b0932
RH
4491 /* In the case of complex integer modes smaller than a word, we'll
4492 need to generate some non-trivial bitfield insertions. Do that
4493 on a pseudo and not the hard register. */
4494 else if (GET_CODE (decl_rtl) == CONCAT
4495 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4496 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4497 {
4498 int old_generating_concat_p;
4499 rtx tmp;
4500
4501 old_generating_concat_p = generating_concat_p;
4502 generating_concat_p = 0;
4503 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4504 generating_concat_p = old_generating_concat_p;
4505
4506 emit_move_insn (tmp, decl_rtl);
4507 emit_move_insn (real_decl_rtl, tmp);
4508 }
3e4eac3f
RH
4509 else
4510 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4511 }
6f086dfc
RS
4512 }
4513
4514 /* If returning a structure, arrange to return the address of the value
4515 in a place where debuggers expect to find it.
4516
4517 If returning a structure PCC style,
4518 the caller also depends on this value.
4519 And current_function_returns_pcc_struct is not necessarily set. */
4520 if (current_function_returns_struct
4521 || current_function_returns_pcc_struct)
4522 {
cc77ae10 4523 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 4524 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
4525 rtx outgoing;
4526
4527 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4528 type = TREE_TYPE (type);
4529 else
4530 value_address = XEXP (value_address, 0);
4531
1d636cc6
RG
4532 outgoing = targetm.calls.function_value (build_pointer_type (type),
4533 current_function_decl, true);
6f086dfc
RS
4534
4535 /* Mark this as a function return value so integrate will delete the
4536 assignment and USE below when inlining this function. */
4537 REG_FUNCTION_VALUE_P (outgoing) = 1;
4538
d1608933 4539 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
4540 value_address = convert_memory_address (GET_MODE (outgoing),
4541 value_address);
d1608933 4542
6f086dfc 4543 emit_move_insn (outgoing, value_address);
d1608933
RK
4544
4545 /* Show return register used to hold result (in this case the address
4546 of the result. */
4547 current_function_return_rtx = outgoing;
6f086dfc
RS
4548 }
4549
932f0847
JH
4550 /* Emit the actual code to clobber return register. */
4551 {
609c3937 4552 rtx seq;
797a6ac1 4553
932f0847
JH
4554 start_sequence ();
4555 clobber_return_register ();
609c3937 4556 expand_naked_return ();
2f937369 4557 seq = get_insns ();
932f0847
JH
4558 end_sequence ();
4559
609c3937 4560 emit_insn_after (seq, clobber_after);
932f0847
JH
4561 }
4562
609c3937
RH
4563 /* Output the label for the naked return from the function. */
4564 emit_label (naked_return_label);
6e3077c6 4565
7d69de61
RH
4566 /* If stack protection is enabled for this function, check the guard. */
4567 if (cfun->stack_protect_guard)
4568 stack_protect_epilogue ();
4569
40184445
BS
4570 /* If we had calls to alloca, and this machine needs
4571 an accurate stack pointer to exit the function,
4572 insert some code to save and restore the stack pointer. */
4573 if (! EXIT_IGNORE_STACK
4574 && current_function_calls_alloca)
4575 {
4576 rtx tem = 0;
4577
4578 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4579 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4580 }
4581
c13fde05
RH
4582 /* ??? This should no longer be necessary since stupid is no longer with
4583 us, but there are some parts of the compiler (eg reload_combine, and
4584 sh mach_dep_reorg) that still try and compute their own lifetime info
4585 instead of using the general framework. */
4586 use_return_register ();
6f086dfc 4587}
278ed218
RH
4588
4589rtx
fa8db1f7 4590get_arg_pointer_save_area (struct function *f)
278ed218
RH
4591{
4592 rtx ret = f->x_arg_pointer_save_area;
4593
4594 if (! ret)
4595 {
278ed218
RH
4596 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4597 f->x_arg_pointer_save_area = ret;
964be02f
RH
4598 }
4599
4600 if (f == cfun && ! f->arg_pointer_save_area_init)
4601 {
4602 rtx seq;
278ed218 4603
797a6ac1 4604 /* Save the arg pointer at the beginning of the function. The
964be02f 4605 generated stack slot may not be a valid memory address, so we
278ed218
RH
4606 have to check it and fix it if necessary. */
4607 start_sequence ();
4608 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
2f937369 4609 seq = get_insns ();
278ed218
RH
4610 end_sequence ();
4611
964be02f 4612 push_topmost_sequence ();
1cb2fc7b 4613 emit_insn_after (seq, entry_of_function ());
964be02f 4614 pop_topmost_sequence ();
278ed218
RH
4615 }
4616
4617 return ret;
4618}
bdac5f58 4619\f
2f937369
DM
4620/* Extend a vector that records the INSN_UIDs of INSNS
4621 (a list of one or more insns). */
bdac5f58 4622
0a1c58a2 4623static void
f995dcfe 4624record_insns (rtx insns, VEC(int,heap) **vecp)
bdac5f58 4625{
2f937369 4626 rtx tmp;
0a1c58a2 4627
f995dcfe
KH
4628 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4629 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
bdac5f58
TW
4630}
4631
589fe865 4632/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 4633static void
fa8db1f7 4634set_insn_locators (rtx insn, int loc)
0435312e
JH
4635{
4636 while (insn != NULL_RTX)
4637 {
4638 if (INSN_P (insn))
4639 INSN_LOCATOR (insn) = loc;
4640 insn = NEXT_INSN (insn);
4641 }
4642}
4643
2f937369
DM
4644/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4645 be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 4646
10914065 4647static int
f995dcfe 4648contains (rtx insn, VEC(int,heap) **vec)
bdac5f58 4649{
b3694847 4650 int i, j;
bdac5f58 4651
4b4bf941 4652 if (NONJUMP_INSN_P (insn)
bdac5f58
TW
4653 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4654 {
10914065 4655 int count = 0;
bdac5f58 4656 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
f995dcfe
KH
4657 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4658 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4659 == VEC_index (int, *vec, j))
10914065
TW
4660 count++;
4661 return count;
bdac5f58
TW
4662 }
4663 else
4664 {
f995dcfe
KH
4665 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4666 if (INSN_UID (insn) == VEC_index (int, *vec, j))
10914065 4667 return 1;
bdac5f58
TW
4668 }
4669 return 0;
4670}
5c7675e9
RH
4671
4672int
fa8db1f7 4673prologue_epilogue_contains (rtx insn)
5c7675e9 4674{
f995dcfe 4675 if (contains (insn, &prologue))
5c7675e9 4676 return 1;
f995dcfe 4677 if (contains (insn, &epilogue))
5c7675e9
RH
4678 return 1;
4679 return 0;
4680}
bdac5f58 4681
0a1c58a2 4682int
fa8db1f7 4683sibcall_epilogue_contains (rtx insn)
0a1c58a2
JL
4684{
4685 if (sibcall_epilogue)
f995dcfe 4686 return contains (insn, &sibcall_epilogue);
0a1c58a2
JL
4687 return 0;
4688}
4689
73ef99fb 4690#ifdef HAVE_return
69732dcb
RH
4691/* Insert gen_return at the end of block BB. This also means updating
4692 block_for_insn appropriately. */
4693
4694static void
fa8db1f7 4695emit_return_into_block (basic_block bb, rtx line_note)
69732dcb 4696{
a813c111 4697 emit_jump_insn_after (gen_return (), BB_END (bb));
86c82654 4698 if (line_note)
a813c111 4699 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
69732dcb 4700}
73ef99fb 4701#endif /* HAVE_return */
69732dcb 4702
3258e996
RK
4703#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4704
535a42b1
NS
4705/* These functions convert the epilogue into a variant that does not
4706 modify the stack pointer. This is used in cases where a function
4707 returns an object whose size is not known until it is computed.
4708 The called function leaves the object on the stack, leaves the
4709 stack depressed, and returns a pointer to the object.
4710
4711 What we need to do is track all modifications and references to the
4712 stack pointer, deleting the modifications and changing the
4713 references to point to the location the stack pointer would have
4714 pointed to had the modifications taken place.
4715
4716 These functions need to be portable so we need to make as few
4717 assumptions about the epilogue as we can. However, the epilogue
4718 basically contains three things: instructions to reset the stack
4719 pointer, instructions to reload registers, possibly including the
4720 frame pointer, and an instruction to return to the caller.
4721
4722 We must be sure of what a relevant epilogue insn is doing. We also
4723 make no attempt to validate the insns we make since if they are
4724 invalid, we probably can't do anything valid. The intent is that
4725 these routines get "smarter" as more and more machines start to use
4726 them and they try operating on different epilogues.
4727
4728 We use the following structure to track what the part of the
4729 epilogue that we've already processed has done. We keep two copies
4730 of the SP equivalence, one for use during the insn we are
4731 processing and one for use in the next insn. The difference is
4732 because one part of a PARALLEL may adjust SP and the other may use
4733 it. */
3258e996
RK
4734
4735struct epi_info
4736{
4737 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4738 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
3ef42a0c 4739 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
3258e996
RK
4740 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4741 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4742 should be set to once we no longer need
4743 its value. */
f285d67b
RK
4744 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4745 for registers. */
3258e996
RK
4746};
4747
fa8db1f7 4748static void handle_epilogue_set (rtx, struct epi_info *);
80fcc7bc 4749static void update_epilogue_consts (rtx, rtx, void *);
fa8db1f7 4750static void emit_equiv_load (struct epi_info *);
7393c642 4751
2f937369
DM
4752/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4753 no modifications to the stack pointer. Return the new list of insns. */
7393c642 4754
3258e996 4755static rtx
fa8db1f7 4756keep_stack_depressed (rtx insns)
7393c642 4757{
2f937369 4758 int j;
3258e996 4759 struct epi_info info;
2f937369 4760 rtx insn, next;
7393c642 4761
f285d67b 4762 /* If the epilogue is just a single instruction, it must be OK as is. */
2f937369
DM
4763 if (NEXT_INSN (insns) == NULL_RTX)
4764 return insns;
7393c642 4765
3258e996
RK
4766 /* Otherwise, start a sequence, initialize the information we have, and
4767 process all the insns we were given. */
4768 start_sequence ();
4769
4770 info.sp_equiv_reg = stack_pointer_rtx;
4771 info.sp_offset = 0;
4772 info.equiv_reg_src = 0;
7393c642 4773
f285d67b
RK
4774 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4775 info.const_equiv[j] = 0;
4776
2f937369
DM
4777 insn = insns;
4778 next = NULL_RTX;
4779 while (insn != NULL_RTX)
7393c642 4780 {
2f937369 4781 next = NEXT_INSN (insn);
7393c642 4782
3258e996
RK
4783 if (!INSN_P (insn))
4784 {
4785 add_insn (insn);
2f937369 4786 insn = next;
3258e996
RK
4787 continue;
4788 }
7393c642 4789
3258e996
RK
4790 /* If this insn references the register that SP is equivalent to and
4791 we have a pending load to that register, we must force out the load
4792 first and then indicate we no longer know what SP's equivalent is. */
4793 if (info.equiv_reg_src != 0
4794 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7393c642 4795 {
3258e996
RK
4796 emit_equiv_load (&info);
4797 info.sp_equiv_reg = 0;
4798 }
7393c642 4799
3258e996
RK
4800 info.new_sp_equiv_reg = info.sp_equiv_reg;
4801 info.new_sp_offset = info.sp_offset;
7393c642 4802
3258e996
RK
4803 /* If this is a (RETURN) and the return address is on the stack,
4804 update the address and change to an indirect jump. */
4805 if (GET_CODE (PATTERN (insn)) == RETURN
4806 || (GET_CODE (PATTERN (insn)) == PARALLEL
4807 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4808 {
4809 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4810 rtx base = 0;
4811 HOST_WIDE_INT offset = 0;
4812 rtx jump_insn, jump_set;
4813
4814 /* If the return address is in a register, we can emit the insn
4815 unchanged. Otherwise, it must be a MEM and we see what the
4816 base register and offset are. In any case, we have to emit any
4817 pending load to the equivalent reg of SP, if any. */
f8cfc6aa 4818 if (REG_P (retaddr))
3258e996
RK
4819 {
4820 emit_equiv_load (&info);
4821 add_insn (insn);
2f937369 4822 insn = next;
3258e996
RK
4823 continue;
4824 }
0bccc606 4825 else
3258e996 4826 {
0bccc606
NS
4827 rtx ret_ptr;
4828 gcc_assert (MEM_P (retaddr));
4829
4830 ret_ptr = XEXP (retaddr, 0);
4831
4832 if (REG_P (ret_ptr))
4833 {
4834 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4835 offset = 0;
4836 }
4837 else
4838 {
4839 gcc_assert (GET_CODE (ret_ptr) == PLUS
4840 && REG_P (XEXP (ret_ptr, 0))
4841 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4842 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4843 offset = INTVAL (XEXP (ret_ptr, 1));
4844 }
3258e996 4845 }
3258e996
RK
4846
4847 /* If the base of the location containing the return pointer
4848 is SP, we must update it with the replacement address. Otherwise,
4849 just build the necessary MEM. */
4850 retaddr = plus_constant (base, offset);
4851 if (base == stack_pointer_rtx)
4852 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4853 plus_constant (info.sp_equiv_reg,
4854 info.sp_offset));
4855
4856 retaddr = gen_rtx_MEM (Pmode, retaddr);
be0c514c 4857 MEM_NOTRAP_P (retaddr) = 1;
3258e996
RK
4858
4859 /* If there is a pending load to the equivalent register for SP
4860 and we reference that register, we must load our address into
4861 a scratch register and then do that load. */
4862 if (info.equiv_reg_src
4863 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4864 {
4865 unsigned int regno;
4866 rtx reg;
4867
4868 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4869 if (HARD_REGNO_MODE_OK (regno, Pmode)
53b6fb26
RK
4870 && !fixed_regs[regno]
4871 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5e2d947c
JH
4872 && !REGNO_REG_SET_P
4873 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
b5ed05aa 4874 && !refers_to_regno_p (regno,
66fd46b6
JH
4875 regno + hard_regno_nregs[regno]
4876 [Pmode],
f285d67b
RK
4877 info.equiv_reg_src, NULL)
4878 && info.const_equiv[regno] == 0)
3258e996
RK
4879 break;
4880
0bccc606 4881 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7393c642 4882
3258e996
RK
4883 reg = gen_rtx_REG (Pmode, regno);
4884 emit_move_insn (reg, retaddr);
4885 retaddr = reg;
4886 }
4887
4888 emit_equiv_load (&info);
4889 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4890
4891 /* Show the SET in the above insn is a RETURN. */
4892 jump_set = single_set (jump_insn);
0bccc606
NS
4893 gcc_assert (jump_set);
4894 SET_IS_RETURN_P (jump_set) = 1;
7393c642 4895 }
3258e996
RK
4896
4897 /* If SP is not mentioned in the pattern and its equivalent register, if
4898 any, is not modified, just emit it. Otherwise, if neither is set,
4899 replace the reference to SP and emit the insn. If none of those are
4900 true, handle each SET individually. */
4901 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4902 && (info.sp_equiv_reg == stack_pointer_rtx
4903 || !reg_set_p (info.sp_equiv_reg, insn)))
4904 add_insn (insn);
4905 else if (! reg_set_p (stack_pointer_rtx, insn)
4906 && (info.sp_equiv_reg == stack_pointer_rtx
4907 || !reg_set_p (info.sp_equiv_reg, insn)))
7393c642 4908 {
0bccc606
NS
4909 int changed;
4910
4911 changed = validate_replace_rtx (stack_pointer_rtx,
4912 plus_constant (info.sp_equiv_reg,
4913 info.sp_offset),
4914 insn);
4915 gcc_assert (changed);
7393c642 4916
3258e996
RK
4917 add_insn (insn);
4918 }
4919 else if (GET_CODE (PATTERN (insn)) == SET)
4920 handle_epilogue_set (PATTERN (insn), &info);
4921 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4922 {
4923 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4924 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4925 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4926 }
4927 else
4928 add_insn (insn);
4929
4930 info.sp_equiv_reg = info.new_sp_equiv_reg;
4931 info.sp_offset = info.new_sp_offset;
2f937369 4932
f285d67b
RK
4933 /* Now update any constants this insn sets. */
4934 note_stores (PATTERN (insn), update_epilogue_consts, &info);
2f937369 4935 insn = next;
3258e996
RK
4936 }
4937
2f937369 4938 insns = get_insns ();
3258e996 4939 end_sequence ();
2f937369 4940 return insns;
3258e996
RK
4941}
4942
d6a7951f 4943/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
3258e996 4944 structure that contains information about what we've seen so far. We
797a6ac1 4945 process this SET by either updating that data or by emitting one or
3258e996
RK
4946 more insns. */
4947
4948static void
fa8db1f7 4949handle_epilogue_set (rtx set, struct epi_info *p)
3258e996
RK
4950{
4951 /* First handle the case where we are setting SP. Record what it is being
535a42b1 4952 set from, which we must be able to determine */
3258e996
RK
4953 if (reg_set_p (stack_pointer_rtx, set))
4954 {
0bccc606 4955 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
3258e996 4956
f285d67b 4957 if (GET_CODE (SET_SRC (set)) == PLUS)
3258e996
RK
4958 {
4959 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
f285d67b
RK
4960 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4961 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
f285d67b 4962 else
0bccc606
NS
4963 {
4964 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4965 && (REGNO (XEXP (SET_SRC (set), 1))
4966 < FIRST_PSEUDO_REGISTER)
4967 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4968 p->new_sp_offset
4969 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4970 }
7393c642 4971 }
3258e996
RK
4972 else
4973 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4974
4975 /* If we are adjusting SP, we adjust from the old data. */
4976 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4977 {
4978 p->new_sp_equiv_reg = p->sp_equiv_reg;
4979 p->new_sp_offset += p->sp_offset;
4980 }
4981
0bccc606 4982 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
3258e996
RK
4983
4984 return;
4985 }
4986
535a42b1
NS
4987 /* Next handle the case where we are setting SP's equivalent
4988 register. We must not already have a value to set it to. We
4989 could update, but there seems little point in handling that case.
4990 Note that we have to allow for the case where we are setting the
4991 register set in the previous part of a PARALLEL inside a single
4992 insn. But use the old offset for any updates within this insn.
4993 We must allow for the case where the register is being set in a
4994 different (usually wider) mode than Pmode). */
f189c7ca 4995 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
3258e996 4996 {
0bccc606
NS
4997 gcc_assert (!p->equiv_reg_src
4998 && REG_P (p->new_sp_equiv_reg)
4999 && REG_P (SET_DEST (set))
5000 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5001 <= BITS_PER_WORD)
5002 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5003 p->equiv_reg_src
5004 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5005 plus_constant (p->sp_equiv_reg,
5006 p->sp_offset));
3258e996
RK
5007 }
5008
5009 /* Otherwise, replace any references to SP in the insn to its new value
5010 and emit the insn. */
5011 else
5012 {
5013 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5014 plus_constant (p->sp_equiv_reg,
5015 p->sp_offset));
5016 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5017 plus_constant (p->sp_equiv_reg,
5018 p->sp_offset));
5019 emit_insn (set);
7393c642
RK
5020 }
5021}
3258e996 5022
f285d67b
RK
5023/* Update the tracking information for registers set to constants. */
5024
5025static void
5026update_epilogue_consts (rtx dest, rtx x, void *data)
5027{
5028 struct epi_info *p = (struct epi_info *) data;
8fbc67c0 5029 rtx new;
f285d67b 5030
f8cfc6aa 5031 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
f285d67b 5032 return;
8fbc67c0
RK
5033
5034 /* If we are either clobbering a register or doing a partial set,
5035 show we don't know the value. */
5036 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
f285d67b 5037 p->const_equiv[REGNO (dest)] = 0;
8fbc67c0
RK
5038
5039 /* If we are setting it to a constant, record that constant. */
5040 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
f285d67b 5041 p->const_equiv[REGNO (dest)] = SET_SRC (x);
8fbc67c0
RK
5042
5043 /* If this is a binary operation between a register we have been tracking
5044 and a constant, see if we can compute a new constant value. */
ec8e098d 5045 else if (ARITHMETIC_P (SET_SRC (x))
f8cfc6aa 5046 && REG_P (XEXP (SET_SRC (x), 0))
8fbc67c0
RK
5047 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5048 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5049 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5050 && 0 != (new = simplify_binary_operation
5051 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5052 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5053 XEXP (SET_SRC (x), 1)))
5054 && GET_CODE (new) == CONST_INT)
5055 p->const_equiv[REGNO (dest)] = new;
5056
5057 /* Otherwise, we can't do anything with this value. */
5058 else
5059 p->const_equiv[REGNO (dest)] = 0;
f285d67b
RK
5060}
5061
3258e996
RK
5062/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5063
5064static void
fa8db1f7 5065emit_equiv_load (struct epi_info *p)
3258e996
RK
5066{
5067 if (p->equiv_reg_src != 0)
f285d67b
RK
5068 {
5069 rtx dest = p->sp_equiv_reg;
5070
5071 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5072 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5073 REGNO (p->sp_equiv_reg));
3258e996 5074
f285d67b
RK
5075 emit_move_insn (dest, p->equiv_reg_src);
5076 p->equiv_reg_src = 0;
5077 }
3258e996 5078}
7393c642
RK
5079#endif
5080
9faa82d8 5081/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
5082 this into place with notes indicating where the prologue ends and where
5083 the epilogue begins. Update the basic block information when possible. */
5084
5085void
fa8db1f7 5086thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
bdac5f58 5087{
ca1117cc 5088 int inserted = 0;
19d3c25c 5089 edge e;
91ea4f8d 5090#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 5091 rtx seq;
91ea4f8d 5092#endif
ca1117cc
RH
5093#ifdef HAVE_prologue
5094 rtx prologue_end = NULL_RTX;
5095#endif
86c82654
RH
5096#if defined (HAVE_epilogue) || defined(HAVE_return)
5097 rtx epilogue_end = NULL_RTX;
5098#endif
628f6a4e 5099 edge_iterator ei;
e881bb1b 5100
bdac5f58
TW
5101#ifdef HAVE_prologue
5102 if (HAVE_prologue)
5103 {
e881bb1b 5104 start_sequence ();
718fe406 5105 seq = gen_prologue ();
e881bb1b 5106 emit_insn (seq);
bdac5f58
TW
5107
5108 /* Retain a map of the prologue insns. */
0a1c58a2 5109 record_insns (seq, &prologue);
2e040219 5110 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
9185a8d5 5111
2f937369 5112 seq = get_insns ();
e881bb1b 5113 end_sequence ();
0435312e 5114 set_insn_locators (seq, prologue_locator);
e881bb1b 5115
d6a7951f 5116 /* Can't deal with multiple successors of the entry block
75540af0
JH
5117 at the moment. Function should always have at least one
5118 entry point. */
c5cbcccf 5119 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
e881bb1b 5120
c5cbcccf 5121 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
75540af0 5122 inserted = 1;
bdac5f58 5123 }
bdac5f58 5124#endif
bdac5f58 5125
19d3c25c
RH
5126 /* If the exit block has no non-fake predecessors, we don't need
5127 an epilogue. */
628f6a4e 5128 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
5129 if ((e->flags & EDGE_FAKE) == 0)
5130 break;
5131 if (e == NULL)
5132 goto epilogue_done;
5133
69732dcb
RH
5134#ifdef HAVE_return
5135 if (optimize && HAVE_return)
5136 {
5137 /* If we're allowed to generate a simple return instruction,
5138 then by definition we don't need a full epilogue. Examine
718fe406
KH
5139 the block that falls through to EXIT. If it does not
5140 contain any code, examine its predecessors and try to
69732dcb
RH
5141 emit (conditional) return instructions. */
5142
5143 basic_block last;
69732dcb
RH
5144 rtx label;
5145
628f6a4e 5146 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
5147 if (e->flags & EDGE_FALLTHRU)
5148 break;
5149 if (e == NULL)
5150 goto epilogue_done;
5151 last = e->src;
5152
5153 /* Verify that there are no active instructions in the last block. */
a813c111 5154 label = BB_END (last);
4b4bf941 5155 while (label && !LABEL_P (label))
69732dcb
RH
5156 {
5157 if (active_insn_p (label))
5158 break;
5159 label = PREV_INSN (label);
5160 }
5161
4b4bf941 5162 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 5163 {
628f6a4e 5164 edge_iterator ei2;
718fe406 5165 rtx epilogue_line_note = NULL_RTX;
86c82654
RH
5166
5167 /* Locate the line number associated with the closing brace,
5168 if we can find one. */
5169 for (seq = get_last_insn ();
5170 seq && ! active_insn_p (seq);
5171 seq = PREV_INSN (seq))
4b4bf941 5172 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
86c82654
RH
5173 {
5174 epilogue_line_note = seq;
5175 break;
5176 }
5177
628f6a4e 5178 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5179 {
5180 basic_block bb = e->src;
5181 rtx jump;
5182
69732dcb 5183 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5184 {
5185 ei_next (&ei2);
5186 continue;
5187 }
69732dcb 5188
a813c111 5189 jump = BB_END (bb);
4b4bf941 5190 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5191 {
5192 ei_next (&ei2);
5193 continue;
5194 }
69732dcb
RH
5195
5196 /* If we have an unconditional jump, we can replace that
5197 with a simple return instruction. */
5198 if (simplejump_p (jump))
5199 {
86c82654 5200 emit_return_into_block (bb, epilogue_line_note);
53c17031 5201 delete_insn (jump);
69732dcb
RH
5202 }
5203
5204 /* If we have a conditional jump, we can try to replace
5205 that with a conditional return instruction. */
5206 else if (condjump_p (jump))
5207 {
47009d11 5208 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5209 {
5210 ei_next (&ei2);
5211 continue;
5212 }
718fe406 5213
3a75e42e
CP
5214 /* If this block has only one successor, it both jumps
5215 and falls through to the fallthru block, so we can't
5216 delete the edge. */
c5cbcccf 5217 if (single_succ_p (bb))
628f6a4e
BE
5218 {
5219 ei_next (&ei2);
5220 continue;
5221 }
69732dcb
RH
5222 }
5223 else
628f6a4e
BE
5224 {
5225 ei_next (&ei2);
5226 continue;
5227 }
69732dcb
RH
5228
5229 /* Fix up the CFG for the successful change we just made. */
86c82654 5230 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5231 }
69732dcb 5232
2dd8bc01
GK
5233 /* Emit a return insn for the exit fallthru block. Whether
5234 this is still reachable will be determined later. */
69732dcb 5235
a813c111 5236 emit_barrier_after (BB_END (last));
86c82654 5237 emit_return_into_block (last, epilogue_line_note);
a813c111 5238 epilogue_end = BB_END (last);
c5cbcccf 5239 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
718fe406 5240 goto epilogue_done;
2dd8bc01 5241 }
69732dcb
RH
5242 }
5243#endif
623a66fa
R
5244 /* Find the edge that falls through to EXIT. Other edges may exist
5245 due to RETURN instructions, but those don't need epilogues.
5246 There really shouldn't be a mixture -- either all should have
5247 been converted or none, however... */
5248
628f6a4e 5249 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5250 if (e->flags & EDGE_FALLTHRU)
5251 break;
5252 if (e == NULL)
5253 goto epilogue_done;
5254
bdac5f58
TW
5255#ifdef HAVE_epilogue
5256 if (HAVE_epilogue)
5257 {
19d3c25c 5258 start_sequence ();
2e040219 5259 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
a78bdb38 5260
19d3c25c 5261 seq = gen_epilogue ();
7393c642 5262
3258e996
RK
5263#ifdef INCOMING_RETURN_ADDR_RTX
5264 /* If this function returns with the stack depressed and we can support
5265 it, massage the epilogue to actually do that. */
43db0363
RK
5266 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5267 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
3258e996
RK
5268 seq = keep_stack_depressed (seq);
5269#endif
7393c642 5270
19d3c25c 5271 emit_jump_insn (seq);
bdac5f58 5272
19d3c25c 5273 /* Retain a map of the epilogue insns. */
0a1c58a2 5274 record_insns (seq, &epilogue);
0435312e 5275 set_insn_locators (seq, epilogue_locator);
bdac5f58 5276
2f937369 5277 seq = get_insns ();
718fe406 5278 end_sequence ();
e881bb1b 5279
19d3c25c 5280 insert_insn_on_edge (seq, e);
ca1117cc 5281 inserted = 1;
bdac5f58 5282 }
623a66fa 5283 else
bdac5f58 5284#endif
623a66fa
R
5285 {
5286 basic_block cur_bb;
5287
5288 if (! next_active_insn (BB_END (e->src)))
5289 goto epilogue_done;
5290 /* We have a fall-through edge to the exit block, the source is not
5291 at the end of the function, and there will be an assembler epilogue
5292 at the end of the function.
5293 We can't use force_nonfallthru here, because that would try to
5294 use return. Inserting a jump 'by hand' is extremely messy, so
5295 we take advantage of cfg_layout_finalize using
5296 fixup_fallthru_exit_predecessor. */
35b6b437 5297 cfg_layout_initialize (0);
623a66fa 5298 FOR_EACH_BB (cur_bb)
24bd1a0b
DB
5299 if (cur_bb->index >= NUM_FIXED_BLOCKS
5300 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5301 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5302 cfg_layout_finalize ();
5303 }
19d3c25c 5304epilogue_done:
e881bb1b 5305
ca1117cc 5306 if (inserted)
e881bb1b 5307 commit_edge_insertions ();
0a1c58a2
JL
5308
5309#ifdef HAVE_sibcall_epilogue
5310 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5311 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5312 {
5313 basic_block bb = e->src;
a813c111 5314 rtx insn = BB_END (bb);
0a1c58a2 5315
4b4bf941 5316 if (!CALL_P (insn)
0a1c58a2 5317 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5318 {
5319 ei_next (&ei);
5320 continue;
5321 }
0a1c58a2
JL
5322
5323 start_sequence ();
0af5c896
RE
5324 emit_insn (gen_sibcall_epilogue ());
5325 seq = get_insns ();
0a1c58a2
JL
5326 end_sequence ();
5327
2f937369
DM
5328 /* Retain a map of the epilogue insns. Used in life analysis to
5329 avoid getting rid of sibcall epilogue insns. Do this before we
5330 actually emit the sequence. */
5331 record_insns (seq, &sibcall_epilogue);
0435312e 5332 set_insn_locators (seq, epilogue_locator);
2f937369 5333
5e35992a 5334 emit_insn_before (seq, insn);
628f6a4e 5335 ei_next (&ei);
0a1c58a2
JL
5336 }
5337#endif
ca1117cc
RH
5338
5339#ifdef HAVE_prologue
589fe865 5340 /* This is probably all useless now that we use locators. */
ca1117cc
RH
5341 if (prologue_end)
5342 {
5343 rtx insn, prev;
5344
5345 /* GDB handles `break f' by setting a breakpoint on the first
30196c1f 5346 line note after the prologue. Which means (1) that if
ca1117cc 5347 there are line number notes before where we inserted the
30196c1f
RH
5348 prologue we should move them, and (2) we should generate a
5349 note before the end of the first basic block, if there isn't
016030fe
JH
5350 one already there.
5351
8d9afc4e 5352 ??? This behavior is completely broken when dealing with
016030fe
JH
5353 multiple entry functions. We simply place the note always
5354 into first basic block and let alternate entry points
5355 to be missed.
5356 */
ca1117cc 5357
718fe406 5358 for (insn = prologue_end; insn; insn = prev)
ca1117cc
RH
5359 {
5360 prev = PREV_INSN (insn);
4b4bf941 5361 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
ca1117cc
RH
5362 {
5363 /* Note that we cannot reorder the first insn in the
5364 chain, since rest_of_compilation relies on that
30196c1f 5365 remaining constant. */
ca1117cc 5366 if (prev == NULL)
30196c1f
RH
5367 break;
5368 reorder_insns (insn, insn, prologue_end);
ca1117cc
RH
5369 }
5370 }
5371
30196c1f 5372 /* Find the last line number note in the first block. */
a813c111 5373 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
016030fe 5374 insn != prologue_end && insn;
30196c1f 5375 insn = PREV_INSN (insn))
4b4bf941 5376 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f
RH
5377 break;
5378
5379 /* If we didn't find one, make a copy of the first line number
5380 we run across. */
5381 if (! insn)
ca1117cc 5382 {
30196c1f
RH
5383 for (insn = next_active_insn (prologue_end);
5384 insn;
5385 insn = PREV_INSN (insn))
4b4bf941 5386 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f 5387 {
5f2fc772 5388 emit_note_copy_after (insn, prologue_end);
30196c1f
RH
5389 break;
5390 }
ca1117cc
RH
5391 }
5392 }
5393#endif
86c82654
RH
5394#ifdef HAVE_epilogue
5395 if (epilogue_end)
5396 {
5397 rtx insn, next;
5398
5399 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5400 There is no need, however, to be quite so anal about the existence
84c1fa24
UW
5401 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5402 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5403 info generation. */
718fe406 5404 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5405 {
5406 next = NEXT_INSN (insn);
4b4bf941 5407 if (NOTE_P (insn)
84c1fa24
UW
5408 && (NOTE_LINE_NUMBER (insn) > 0
5409 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5410 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
86c82654
RH
5411 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5412 }
5413 }
5414#endif
bdac5f58
TW
5415}
5416
5417/* Reposition the prologue-end and epilogue-begin notes after instruction
5418 scheduling and delayed branch scheduling. */
5419
5420void
fa8db1f7 5421reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
bdac5f58
TW
5422{
5423#if defined (HAVE_prologue) || defined (HAVE_epilogue)
9f53e965 5424 rtx insn, last, note;
0a1c58a2
JL
5425 int len;
5426
f995dcfe 5427 if ((len = VEC_length (int, prologue)) > 0)
bdac5f58 5428 {
9f53e965 5429 last = 0, note = 0;
bdac5f58 5430
0a1c58a2
JL
5431 /* Scan from the beginning until we reach the last prologue insn.
5432 We apparently can't depend on basic_block_{head,end} after
5433 reorg has run. */
9f53e965 5434 for (insn = f; insn; insn = NEXT_INSN (insn))
bdac5f58 5435 {
4b4bf941 5436 if (NOTE_P (insn))
9392c110 5437 {
0a1c58a2
JL
5438 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5439 note = insn;
5440 }
f995dcfe 5441 else if (contains (insn, &prologue))
0a1c58a2 5442 {
9f53e965
RH
5443 last = insn;
5444 if (--len == 0)
5445 break;
5446 }
5447 }
797a6ac1 5448
9f53e965
RH
5449 if (last)
5450 {
9f53e965
RH
5451 /* Find the prologue-end note if we haven't already, and
5452 move it to just after the last prologue insn. */
5453 if (note == 0)
5454 {
5455 for (note = last; (note = NEXT_INSN (note));)
4b4bf941 5456 if (NOTE_P (note)
9f53e965
RH
5457 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5458 break;
5459 }
c93b03c2 5460
9f53e965 5461 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5462 if (LABEL_P (last))
9f53e965
RH
5463 last = NEXT_INSN (last);
5464 reorder_insns (note, note, last);
bdac5f58 5465 }
0a1c58a2
JL
5466 }
5467
f995dcfe 5468 if ((len = VEC_length (int, epilogue)) > 0)
0a1c58a2 5469 {
9f53e965 5470 last = 0, note = 0;
bdac5f58 5471
0a1c58a2
JL
5472 /* Scan from the end until we reach the first epilogue insn.
5473 We apparently can't depend on basic_block_{head,end} after
5474 reorg has run. */
9f53e965 5475 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
bdac5f58 5476 {
4b4bf941 5477 if (NOTE_P (insn))
9392c110 5478 {
0a1c58a2
JL
5479 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5480 note = insn;
5481 }
f995dcfe 5482 else if (contains (insn, &epilogue))
0a1c58a2 5483 {
9f53e965
RH
5484 last = insn;
5485 if (--len == 0)
5486 break;
5487 }
5488 }
c93b03c2 5489
9f53e965
RH
5490 if (last)
5491 {
5492 /* Find the epilogue-begin note if we haven't already, and
5493 move it to just before the first epilogue insn. */
5494 if (note == 0)
5495 {
5496 for (note = insn; (note = PREV_INSN (note));)
4b4bf941 5497 if (NOTE_P (note)
9f53e965
RH
5498 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5499 break;
9392c110 5500 }
9f53e965
RH
5501
5502 if (PREV_INSN (last) != note)
5503 reorder_insns (note, note, PREV_INSN (last));
bdac5f58
TW
5504 }
5505 }
5506#endif /* HAVE_prologue or HAVE_epilogue */
5507}
87ff9c8e 5508
6de9cd9a
DN
5509/* Resets insn_block_boundaries array. */
5510
5511void
5512reset_block_changes (void)
5513{
5514 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5515 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5516}
5517
5518/* Record the boundary for BLOCK. */
5519void
5520record_block_change (tree block)
5521{
5522 int i, n;
5523 tree last_block;
5524
5525 if (!block)
5526 return;
5527
ee184c4d
RK
5528 if(!cfun->ib_boundaries_block)
5529 return;
5530
6de9cd9a
DN
5531 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5532 VARRAY_POP (cfun->ib_boundaries_block);
5533 n = get_max_uid ();
5534 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5535 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5536
5537 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5538}
5539
5540/* Finishes record of boundaries. */
5541void finalize_block_changes (void)
5542{
5543 record_block_change (DECL_INITIAL (current_function_decl));
5544}
5545
5546/* For INSN return the BLOCK it belongs to. */
5547void
5548check_block_change (rtx insn, tree *block)
5549{
5550 unsigned uid = INSN_UID (insn);
5551
5552 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5553 return;
5554
5555 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5556}
5557
5558/* Releases the ib_boundaries_block records. */
5559void
5560free_block_changes (void)
5561{
5562 cfun->ib_boundaries_block = NULL;
5563}
5564
faed5cc3
SB
5565/* Returns the name of the current function. */
5566const char *
5567current_function_name (void)
5568{
ae2bcd98 5569 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3 5570}
ef330312
PB
5571\f
5572
c2924966 5573static unsigned int
ef330312
PB
5574rest_of_handle_check_leaf_regs (void)
5575{
5576#ifdef LEAF_REGISTERS
5577 current_function_uses_only_leaf_regs
5578 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5579#endif
c2924966 5580 return 0;
ef330312
PB
5581}
5582
5583struct tree_opt_pass pass_leaf_regs =
5584{
5585 NULL, /* name */
5586 NULL, /* gate */
5587 rest_of_handle_check_leaf_regs, /* execute */
5588 NULL, /* sub */
5589 NULL, /* next */
5590 0, /* static_pass_number */
5591 0, /* tv_id */
5592 0, /* properties_required */
5593 0, /* properties_provided */
5594 0, /* properties_destroyed */
5595 0, /* todo_flags_start */
5596 0, /* todo_flags_finish */
5597 0 /* letter */
5598};
5599
faed5cc3 5600
e2500fed 5601#include "gt-function.h"
This page took 4.031672 seconds and 5 git commands to generate.