]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
re PR tree-optimization/22026 (ACATS FAIL C45331A fixed point wrong code (VRP related))
[gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
bfc45551
AM
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
6f086dfc 22
6f086dfc
RS
23/* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 35 not get a hard register. */
6f086dfc
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
6f086dfc
RS
41#include "rtl.h"
42#include "tree.h"
43#include "flags.h"
1ef08c63 44#include "except.h"
6f086dfc 45#include "function.h"
6f086dfc 46#include "expr.h"
c6b97fac 47#include "optabs.h"
e78d8e51 48#include "libfuncs.h"
6f086dfc
RS
49#include "regs.h"
50#include "hard-reg-set.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "output.h"
bdac5f58 54#include "basic-block.h"
10f0ad3d 55#include "toplev.h"
e2500fed 56#include "hashtab.h"
87ff9c8e 57#include "ggc.h"
b1474bb7 58#include "tm_p.h"
c0e7830f 59#include "integrate.h"
7afff7cf 60#include "langhooks.h"
61f71b34 61#include "target.h"
623a66fa 62#include "cfglayout.h"
4744afba 63#include "tree-gimple.h"
6f086dfc 64
d16790f2
JW
65#ifndef LOCAL_ALIGNMENT
66#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67#endif
68
95f3f59e
JDA
69#ifndef STACK_ALIGNMENT_NEEDED
70#define STACK_ALIGNMENT_NEEDED 1
71#endif
72
975f3818
RS
73#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74
293e3de4
RS
75/* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
0f41302f 78 must define both, or neither. */
293e3de4
RS
79#ifndef NAME__MAIN
80#define NAME__MAIN "__main"
293e3de4
RS
81#endif
82
6f086dfc
RS
83/* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87
88/* Similar, but round to the next highest integer that meets the
89 alignment. */
90#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91
54ff41b7
JW
92/* Nonzero if function being compiled doesn't contain any calls
93 (ignoring the prologue and epilogue). This is set prior to
94 local register allocation and is valid for the remaining
718fe406 95 compiler passes. */
54ff41b7
JW
96int current_function_is_leaf;
97
fdb8a883
JW
98/* Nonzero if function being compiled doesn't modify the stack pointer
99 (ignoring the prologue and epilogue). This is only valid after
718fe406 100 life_analysis has run. */
fdb8a883
JW
101int current_function_sp_is_unchanging;
102
54ff41b7
JW
103/* Nonzero if the function being compiled is a leaf function which only
104 uses leaf registers. This is valid after reload (specifically after
105 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
106int current_function_uses_only_leaf_regs;
107
6f086dfc 108/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
109 assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 calls.c:emit_library_call_value_1 uses it to set up
111 post-instantiation libcalls. */
112int virtuals_instantiated;
6f086dfc 113
df696a75 114/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 115static GTY(()) int funcdef_no;
f6f315fe 116
414c4dc4
NC
117/* These variables hold pointers to functions to create and destroy
118 target specific, per-function data structures. */
fa8db1f7 119struct machine_function * (*init_machine_status) (void);
46766466 120
b384405b 121/* The currently compiled function. */
01d939e8 122struct function *cfun = 0;
b384405b 123
f995dcfe
KH
124DEF_VEC_I(int);
125DEF_VEC_ALLOC_I(int,heap);
126
5c7675e9 127/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
f995dcfe
KH
128static VEC(int,heap) *prologue;
129static VEC(int,heap) *epilogue;
0a1c58a2
JL
130
131/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
f995dcfe 133static VEC(int,heap) *sibcall_epilogue;
6f086dfc
RS
134\f
135/* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
138
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
148
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
152
e2500fed 153struct temp_slot GTY(())
6f086dfc
RS
154{
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
0aea6467
ZD
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
159
0f41302f 160 /* The rtx to used to reference the slot. */
6f086dfc 161 rtx slot;
e5e76139
RK
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
718fe406 165 /* The alignment (in bits) of the slot. */
b5c02bff 166 unsigned int align;
6f086dfc 167 /* The size, in units, of the slot. */
e5e809f4 168 HOST_WIDE_INT size;
1da68f56
RK
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
cc2902df 174 /* Nonzero if this temporary is currently in use. */
6f086dfc 175 char in_use;
cc2902df 176 /* Nonzero if this temporary has its address taken. */
a25d4ba2 177 char addr_taken;
6f086dfc
RS
178 /* Nesting level at which this slot is being used. */
179 int level;
cc2902df 180 /* Nonzero if this should survive a call to free_temp_slots. */
6f086dfc 181 int keep;
fc91b0d0
RK
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
e5e809f4 184 HOST_WIDE_INT base_offset;
fc91b0d0
RK
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
e5e809f4 187 HOST_WIDE_INT full_size;
6f086dfc 188};
6f086dfc 189\f
e15679f8
RK
190/* Forward declarations. */
191
fa8db1f7
AJ
192static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
193 struct function *);
194static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
195static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 197static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7 198static void reorder_fix_fragments (tree);
fa8db1f7
AJ
199static int all_blocks (tree, tree *);
200static tree *get_block_vector (tree, int *);
201extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 202/* We always define `record_insns' even if it's not used so that we
ec97b83a 203 can always export `prologue_epilogue_contains'. */
f995dcfe
KH
204static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205static int contains (rtx, VEC(int,heap) **);
73ef99fb 206#ifdef HAVE_return
fa8db1f7 207static void emit_return_into_block (basic_block, rtx);
73ef99fb 208#endif
3258e996 209#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
fa8db1f7 210static rtx keep_stack_depressed (rtx);
7393c642 211#endif
3a70d621 212static void prepare_function_start (tree);
fa8db1f7
AJ
213static void do_clobber_return_reg (rtx, void *);
214static void do_use_return_reg (rtx, void *);
4c4d143a 215static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 216\f
6f086dfc 217/* Pointer to chain of `struct function' for containing functions. */
1be4cd1f 218struct function *outer_function_chain;
6f086dfc
RS
219
220/* Given a function decl for a containing function,
221 return the `struct function' for it. */
222
223struct function *
fa8db1f7 224find_function_data (tree decl)
6f086dfc
RS
225{
226 struct function *p;
e5e809f4 227
eb3ae3e1 228 for (p = outer_function_chain; p; p = p->outer)
6f086dfc
RS
229 if (p->decl == decl)
230 return p;
e5e809f4 231
0bccc606 232 gcc_unreachable ();
6f086dfc
RS
233}
234
235/* Save the current context for compilation of a nested function.
8c5666b4 236 This is called from language-specific code. The caller should use
b03e38e1 237 the enter_nested langhook to save any language-specific state,
8c5666b4
BS
238 since this function knows only about language-independent
239 variables. */
6f086dfc
RS
240
241void
5acbdd12 242push_function_context_to (tree context ATTRIBUTE_UNUSED)
6f086dfc 243{
eb3ae3e1 244 struct function *p;
36edd3cc 245
01d939e8 246 if (cfun == 0)
b384405b 247 init_dummy_function_start ();
01d939e8 248 p = cfun;
6f086dfc 249
eb3ae3e1 250 p->outer = outer_function_chain;
6f086dfc 251 outer_function_chain = p;
6f086dfc 252
ae2bcd98 253 lang_hooks.function.enter_nested (p);
b384405b 254
01d939e8 255 cfun = 0;
6f086dfc
RS
256}
257
e4a4639e 258void
fa8db1f7 259push_function_context (void)
e4a4639e 260{
a0dabda5 261 push_function_context_to (current_function_decl);
e4a4639e
JM
262}
263
6f086dfc
RS
264/* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
266
267void
fa8db1f7 268pop_function_context_from (tree context ATTRIBUTE_UNUSED)
6f086dfc
RS
269{
270 struct function *p = outer_function_chain;
271
01d939e8 272 cfun = p;
eb3ae3e1 273 outer_function_chain = p->outer;
6f086dfc 274
6f086dfc 275 current_function_decl = p->decl;
6f086dfc 276
ae2bcd98 277 lang_hooks.function.leave_nested (p);
46766466 278
6f086dfc 279 /* Reset variables that have known state during rtx generation. */
6f086dfc 280 virtuals_instantiated = 0;
1b3d8f8a 281 generating_concat_p = 1;
6f086dfc 282}
e4a4639e 283
36edd3cc 284void
fa8db1f7 285pop_function_context (void)
e4a4639e 286{
a0dabda5 287 pop_function_context_from (current_function_decl);
e4a4639e 288}
e2ecd91c 289
fa51b01b
RH
290/* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
293
294void
fa8db1f7 295free_after_parsing (struct function *f)
fa51b01b
RH
296{
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
301
ae2bcd98 302 lang_hooks.function.final (f);
fa51b01b
RH
303}
304
e2ecd91c
BS
305/* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
0a8a198c 307 reclaim the memory. */
21cd906e 308
e2ecd91c 309void
fa8db1f7 310free_after_compilation (struct function *f)
e2ecd91c 311{
f995dcfe
KH
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
315
e2500fed
GK
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
997de8ed 321 f->cfg = NULL;
fa51b01b 322
0aea6467
ZD
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
fa51b01b
RH
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
fa51b01b 328 f->x_nonlocal_goto_handler_labels = NULL;
fa51b01b 329 f->x_return_label = NULL;
6e3077c6 330 f->x_naked_return_label = NULL;
fa51b01b 331 f->x_stack_slot_list = NULL;
fa51b01b
RH
332 f->x_tail_recursion_reentry = NULL;
333 f->x_arg_pointer_save_area = NULL;
fa51b01b 334 f->x_parm_birth_insn = NULL;
fa51b01b
RH
335 f->original_arg_vector = NULL;
336 f->original_decl_initial = NULL;
fa51b01b 337 f->epilogue_delay_list = NULL;
e2ecd91c 338}
6f086dfc
RS
339\f
340/* Allocate fixed slots in the stack frame of the current function. */
341
49ad7cfa
BS
342/* Return size needed for stack frame based on slots so far allocated in
343 function F.
c795bca9 344 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
345 the caller may have to do that. */
346
7b25e663 347static HOST_WIDE_INT
fa8db1f7 348get_func_frame_size (struct function *f)
6f086dfc
RS
349{
350#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 351 return -f->x_frame_offset;
6f086dfc 352#else
49ad7cfa 353 return f->x_frame_offset;
6f086dfc
RS
354#endif
355}
356
49ad7cfa
BS
357/* Return size needed for stack frame based on slots so far allocated.
358 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
359 the caller may have to do that. */
360HOST_WIDE_INT
fa8db1f7 361get_frame_size (void)
49ad7cfa 362{
01d939e8 363 return get_func_frame_size (cfun);
49ad7cfa
BS
364}
365
6f086dfc
RS
366/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
367 with machine mode MODE.
718fe406 368
6f086dfc
RS
369 ALIGN controls the amount of alignment for the address of the slot:
370 0 means according to MODE,
371 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 372 -2 means use BITS_PER_UNIT,
6f086dfc
RS
373 positive specifies alignment boundary in bits.
374
e2ecd91c 375 We do not round to stack_boundary here.
6f086dfc 376
e2ecd91c
BS
377 FUNCTION specifies the function to allocate in. */
378
379static rtx
fa8db1f7
AJ
380assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
381 struct function *function)
6f086dfc 382{
b3694847 383 rtx x, addr;
6f086dfc 384 int bigend_correction = 0;
95899b34 385 unsigned int alignment;
58dbcf05 386 int frame_off, frame_alignment, frame_phase;
6f086dfc
RS
387
388 if (align == 0)
389 {
d16790f2
JW
390 tree type;
391
6f086dfc 392 if (mode == BLKmode)
d16790f2 393 alignment = BIGGEST_ALIGNMENT;
dbab7b72 394 else
718fe406 395 alignment = GET_MODE_ALIGNMENT (mode);
d16790f2
JW
396
397 /* Allow the target to (possibly) increase the alignment of this
398 stack slot. */
ae2bcd98 399 type = lang_hooks.types.type_for_mode (mode, 0);
d16790f2
JW
400 if (type)
401 alignment = LOCAL_ALIGNMENT (type, alignment);
402
403 alignment /= BITS_PER_UNIT;
6f086dfc
RS
404 }
405 else if (align == -1)
406 {
407 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
408 size = CEIL_ROUND (size, alignment);
409 }
cfa29a4c
EB
410 else if (align == -2)
411 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
412 else
413 alignment = align / BITS_PER_UNIT;
414
1474e303 415#ifdef FRAME_GROWS_DOWNWARD
e2ecd91c 416 function->x_frame_offset -= size;
1474e303
JL
417#endif
418
a0871656
JH
419 /* Ignore alignment we can't do with expected alignment of the boundary. */
420 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
421 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
422
423 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
424 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
425
58dbcf05
AH
426 /* Calculate how many bytes the start of local variables is off from
427 stack alignment. */
428 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
429 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
430 frame_phase = frame_off ? frame_alignment - frame_off : 0;
431
95f3f59e
JDA
432 /* Round the frame offset to the specified alignment. The default is
433 to always honor requests to align the stack but a port may choose to
434 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
435 if (STACK_ALIGNMENT_NEEDED
436 || mode != BLKmode
437 || size != 0)
438 {
439 /* We must be careful here, since FRAME_OFFSET might be negative and
440 division with a negative dividend isn't as well defined as we might
441 like. So we instead assume that ALIGNMENT is a power of two and
442 use logical operations which are unambiguous. */
6f086dfc 443#ifdef FRAME_GROWS_DOWNWARD
95f3f59e 444 function->x_frame_offset
e140e27d
RH
445 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
446 (unsigned HOST_WIDE_INT) alignment)
95f3f59e 447 + frame_phase);
6f086dfc 448#else
95f3f59e 449 function->x_frame_offset
e140e27d
RH
450 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
451 (unsigned HOST_WIDE_INT) alignment)
95f3f59e 452 + frame_phase);
6f086dfc 453#endif
95f3f59e 454 }
6f086dfc
RS
455
456 /* On a big-endian machine, if we are allocating more space than we will use,
457 use the least significant bytes of those that are allocated. */
f76b9db2 458 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 459 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 460
6f086dfc
RS
461 /* If we have already instantiated virtual registers, return the actual
462 address relative to the frame pointer. */
01d939e8 463 if (function == cfun && virtuals_instantiated)
6f086dfc 464 addr = plus_constant (frame_pointer_rtx,
c41536f5 465 trunc_int_for_mode
6f086dfc 466 (frame_offset + bigend_correction
c41536f5 467 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
468 else
469 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5
AO
470 trunc_int_for_mode
471 (function->x_frame_offset + bigend_correction,
472 Pmode));
6f086dfc
RS
473
474#ifndef FRAME_GROWS_DOWNWARD
e2ecd91c 475 function->x_frame_offset += size;
6f086dfc
RS
476#endif
477
38a448ca 478 x = gen_rtx_MEM (mode, addr);
6f086dfc 479
e2ecd91c
BS
480 function->x_stack_slot_list
481 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
482
6f086dfc
RS
483 return x;
484}
485
e2ecd91c
BS
486/* Wrapper around assign_stack_local_1; assign a local stack slot for the
487 current function. */
3bdf5ad1 488
e2ecd91c 489rtx
fa8db1f7 490assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
6f086dfc 491{
01d939e8 492 return assign_stack_local_1 (mode, size, align, cfun);
6f086dfc 493}
0aea6467
ZD
494
495\f
496/* Removes temporary slot TEMP from LIST. */
497
498static void
499cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
500{
501 if (temp->next)
502 temp->next->prev = temp->prev;
503 if (temp->prev)
504 temp->prev->next = temp->next;
505 else
506 *list = temp->next;
507
508 temp->prev = temp->next = NULL;
509}
510
511/* Inserts temporary slot TEMP to LIST. */
512
513static void
514insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
515{
516 temp->next = *list;
517 if (*list)
518 (*list)->prev = temp;
519 temp->prev = NULL;
520 *list = temp;
521}
522
523/* Returns the list of used temp slots at LEVEL. */
524
525static struct temp_slot **
526temp_slots_at_level (int level)
527{
0aea6467
ZD
528
529 if (!used_temp_slots)
530 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
531
532 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
533 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
534
535 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
536}
537
538/* Returns the maximal temporary slot level. */
539
540static int
541max_slot_level (void)
542{
543 if (!used_temp_slots)
544 return -1;
545
546 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
547}
548
549/* Moves temporary slot TEMP to LEVEL. */
550
551static void
552move_slot_to_level (struct temp_slot *temp, int level)
553{
554 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
555 insert_slot_to_list (temp, temp_slots_at_level (level));
556 temp->level = level;
557}
558
559/* Make temporary slot TEMP available. */
560
561static void
562make_slot_available (struct temp_slot *temp)
563{
564 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
565 insert_slot_to_list (temp, &avail_temp_slots);
566 temp->in_use = 0;
567 temp->level = -1;
568}
6f086dfc
RS
569\f
570/* Allocate a temporary stack slot and record it for possible later
571 reuse.
572
573 MODE is the machine mode to be given to the returned rtx.
574
575 SIZE is the size in units of the space required. We do no rounding here
576 since assign_stack_local will do any required rounding.
577
d93d4205
MS
578 KEEP is 1 if this slot is to be retained after a call to
579 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
580 with this flag. KEEP values of 2 or 3 were needed respectively
581 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535a42b1 582 or for SAVE_EXPRs, but they are now unused.
a4c6502a
MM
583
584 TYPE is the type that will be used for the stack slot. */
6f086dfc 585
a06ef755 586rtx
535a42b1
NS
587assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
588 int keep, tree type)
6f086dfc 589{
74e2819c 590 unsigned int align;
0aea6467 591 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 592 rtx slot;
6f086dfc 593
303ec2aa
RK
594 /* If SIZE is -1 it means that somebody tried to allocate a temporary
595 of a variable size. */
0bccc606 596 gcc_assert (size != -1);
303ec2aa 597
7efcb746 598 /* These are now unused. */
0bccc606 599 gcc_assert (keep <= 1);
7efcb746 600
d16790f2
JW
601 if (mode == BLKmode)
602 align = BIGGEST_ALIGNMENT;
dbab7b72
JH
603 else
604 align = GET_MODE_ALIGNMENT (mode);
6f086dfc 605
d16790f2 606 if (! type)
ae2bcd98 607 type = lang_hooks.types.type_for_mode (mode, 0);
3bdf5ad1 608
d16790f2
JW
609 if (type)
610 align = LOCAL_ALIGNMENT (type, align);
611
612 /* Try to find an available, already-allocated temporary of the proper
613 mode which meets the size and alignment requirements. Choose the
614 smallest one with the closest alignment. */
0aea6467
ZD
615 for (p = avail_temp_slots; p; p = p->next)
616 {
617 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
618 && objects_must_conflict_p (p->type, type)
619 && (best_p == 0 || best_p->size > p->size
620 || (best_p->size == p->size && best_p->align > p->align)))
621 {
622 if (p->align == align && p->size == size)
623 {
624 selected = p;
625 cut_slot_from_list (selected, &avail_temp_slots);
626 best_p = 0;
627 break;
628 }
629 best_p = p;
630 }
631 }
6f086dfc
RS
632
633 /* Make our best, if any, the one to use. */
634 if (best_p)
a45035b6 635 {
0aea6467
ZD
636 selected = best_p;
637 cut_slot_from_list (selected, &avail_temp_slots);
638
a45035b6
JW
639 /* If there are enough aligned bytes left over, make them into a new
640 temp_slot so that the extra bytes don't get wasted. Do this only
641 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 642 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 643 {
d16790f2 644 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 645 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
646
647 if (best_p->size - rounded_size >= alignment)
648 {
703ad42b 649 p = ggc_alloc (sizeof (struct temp_slot));
a25d4ba2 650 p->in_use = p->addr_taken = 0;
a45035b6 651 p->size = best_p->size - rounded_size;
307d8cd6
RK
652 p->base_offset = best_p->base_offset + rounded_size;
653 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
654 p->slot = gen_rtx_MEM (BLKmode,
655 plus_constant (XEXP (best_p->slot, 0),
656 rounded_size));
d16790f2 657 p->align = best_p->align;
e5e76139 658 p->address = 0;
1da68f56 659 p->type = best_p->type;
0aea6467 660 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 661
38a448ca
RH
662 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
663 stack_slot_list);
a45035b6
JW
664
665 best_p->size = rounded_size;
291dde90 666 best_p->full_size = rounded_size;
a45035b6
JW
667 }
668 }
a45035b6 669 }
718fe406 670
6f086dfc 671 /* If we still didn't find one, make a new temporary. */
0aea6467 672 if (selected == 0)
6f086dfc 673 {
e5e809f4
JL
674 HOST_WIDE_INT frame_offset_old = frame_offset;
675
703ad42b 676 p = ggc_alloc (sizeof (struct temp_slot));
e5e809f4 677
c87a0a39
JL
678 /* We are passing an explicit alignment request to assign_stack_local.
679 One side effect of that is assign_stack_local will not round SIZE
680 to ensure the frame offset remains suitably aligned.
681
682 So for requests which depended on the rounding of SIZE, we go ahead
683 and round it now. We also make sure ALIGNMENT is at least
684 BIGGEST_ALIGNMENT. */
0bccc606 685 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 686 p->slot = assign_stack_local (mode,
010529e5 687 (mode == BLKmode
fc555370 688 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 689 : size),
6f67a30d 690 align);
d16790f2
JW
691
692 p->align = align;
e5e809f4 693
b2a80c0d
DE
694 /* The following slot size computation is necessary because we don't
695 know the actual size of the temporary slot until assign_stack_local
696 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
697 requested temporary. Note that extra space added for alignment
698 can be either above or below this stack slot depending on which
699 way the frame grows. We include the extra space if and only if it
700 is above this slot. */
b2a80c0d
DE
701#ifdef FRAME_GROWS_DOWNWARD
702 p->size = frame_offset_old - frame_offset;
703#else
fc91b0d0
RK
704 p->size = size;
705#endif
e5e809f4 706
fc91b0d0
RK
707 /* Now define the fields used by combine_temp_slots. */
708#ifdef FRAME_GROWS_DOWNWARD
709 p->base_offset = frame_offset;
710 p->full_size = frame_offset_old - frame_offset;
711#else
712 p->base_offset = frame_offset_old;
713 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 714#endif
e5e76139 715 p->address = 0;
0aea6467
ZD
716
717 selected = p;
6f086dfc
RS
718 }
719
0aea6467 720 p = selected;
6f086dfc 721 p->in_use = 1;
a25d4ba2 722 p->addr_taken = 0;
1da68f56 723 p->type = type;
7efcb746
PB
724 p->level = temp_slot_level;
725 p->keep = keep;
1995f267 726
0aea6467
ZD
727 pp = temp_slots_at_level (p->level);
728 insert_slot_to_list (p, pp);
faa964e5
UW
729
730 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
731 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
732 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 733
1da68f56
RK
734 /* If we know the alias set for the memory that will be used, use
735 it. If there's no TYPE, then we don't know anything about the
736 alias set for the memory. */
faa964e5
UW
737 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
738 set_mem_align (slot, align);
1da68f56 739
30f7a378 740 /* If a type is specified, set the relevant flags. */
3bdf5ad1 741 if (type != 0)
1da68f56 742 {
faa964e5
UW
743 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
744 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
1da68f56 745 }
3bdf5ad1 746
faa964e5 747 return slot;
6f086dfc 748}
d16790f2
JW
749
750/* Allocate a temporary stack slot and record it for possible later
751 reuse. First three arguments are same as in preceding function. */
752
753rtx
fa8db1f7 754assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
755{
756 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
757}
638141a6 758\f
9432c136
EB
759/* Assign a temporary.
760 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
761 and so that should be used in error messages. In either case, we
762 allocate of the given type.
230f21b4
PB
763 KEEP is as for assign_stack_temp.
764 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
765 it is 0 if a register is OK.
766 DONT_PROMOTE is 1 if we should not promote values in register
767 to wider modes. */
230f21b4
PB
768
769rtx
fa8db1f7
AJ
770assign_temp (tree type_or_decl, int keep, int memory_required,
771 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 772{
9432c136
EB
773 tree type, decl;
774 enum machine_mode mode;
9e1622ed 775#ifdef PROMOTE_MODE
9432c136
EB
776 int unsignedp;
777#endif
778
779 if (DECL_P (type_or_decl))
780 decl = type_or_decl, type = TREE_TYPE (decl);
781 else
782 decl = NULL, type = type_or_decl;
783
784 mode = TYPE_MODE (type);
9e1622ed 785#ifdef PROMOTE_MODE
8df83eae 786 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 787#endif
638141a6 788
230f21b4
PB
789 if (mode == BLKmode || memory_required)
790 {
e5e809f4 791 HOST_WIDE_INT size = int_size_in_bytes (type);
e30bb772 792 tree size_tree;
230f21b4
PB
793 rtx tmp;
794
44affdae
JH
795 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
796 problems with allocating the stack space. */
797 if (size == 0)
798 size = 1;
799
230f21b4
PB
800 /* Unfortunately, we don't yet know how to allocate variable-sized
801 temporaries. However, sometimes we have a fixed upper limit on
802 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 803 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
804 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
805 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
3bdf5ad1
RK
806 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
807 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
230f21b4 808
e30bb772
RK
809 /* If we still haven't been able to get a size, see if the language
810 can compute a maximum size. */
811 if (size == -1
8963a517 812 && (size_tree = lang_hooks.types.max_size (type)) != 0
e30bb772
RK
813 && host_integerp (size_tree, 1))
814 size = tree_low_cst (size_tree, 1);
815
9432c136
EB
816 /* The size of the temporary may be too large to fit into an integer. */
817 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 818 this to things that aren't compiler-generated temporaries. The
535a42b1 819 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
820 if (decl && size == -1
821 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
822 {
971801ff 823 error ("%Jsize of variable %qD is too large", decl, decl);
9432c136
EB
824 size = 1;
825 }
826
d16790f2 827 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
828 return tmp;
829 }
638141a6 830
9e1622ed 831#ifdef PROMOTE_MODE
b55d9ff8
RK
832 if (! dont_promote)
833 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 834#endif
638141a6 835
230f21b4
PB
836 return gen_reg_rtx (mode);
837}
638141a6 838\f
a45035b6
JW
839/* Combine temporary stack slots which are adjacent on the stack.
840
841 This allows for better use of already allocated stack space. This is only
842 done for BLKmode slots because we can be sure that we won't have alignment
843 problems in this case. */
844
6fe79279 845static void
fa8db1f7 846combine_temp_slots (void)
a45035b6 847{
0aea6467 848 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
849 int num_slots;
850
a4c6502a
MM
851 /* We can't combine slots, because the information about which slot
852 is in which alias set will be lost. */
853 if (flag_strict_aliasing)
854 return;
855
718fe406 856 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 857 high levels of optimization. */
e5e809f4 858 if (! flag_expensive_optimizations)
0aea6467 859 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
860 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
861 return;
a45035b6 862
0aea6467 863 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
864 {
865 int delete_p = 0;
e5e809f4 866
0aea6467
ZD
867 next = p->next;
868
869 if (GET_MODE (p->slot) != BLKmode)
870 continue;
871
872 for (q = p->next; q; q = next_q)
e9b7093a 873 {
0aea6467
ZD
874 int delete_q = 0;
875
876 next_q = q->next;
877
878 if (GET_MODE (q->slot) != BLKmode)
879 continue;
880
881 if (p->base_offset + p->full_size == q->base_offset)
882 {
883 /* Q comes after P; combine Q into P. */
884 p->size += q->size;
885 p->full_size += q->full_size;
886 delete_q = 1;
887 }
888 else if (q->base_offset + q->full_size == p->base_offset)
889 {
890 /* P comes after Q; combine P into Q. */
891 q->size += p->size;
892 q->full_size += p->full_size;
893 delete_p = 1;
894 break;
895 }
896 if (delete_q)
897 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 898 }
0aea6467
ZD
899
900 /* Either delete P or advance past it. */
901 if (delete_p)
902 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 903 }
a45035b6 904}
6f086dfc 905\f
e5e76139
RK
906/* Find the temp slot corresponding to the object at address X. */
907
908static struct temp_slot *
fa8db1f7 909find_temp_slot_from_address (rtx x)
e5e76139
RK
910{
911 struct temp_slot *p;
912 rtx next;
0aea6467 913 int i;
e5e76139 914
0aea6467
ZD
915 for (i = max_slot_level (); i >= 0; i--)
916 for (p = *temp_slots_at_level (i); p; p = p->next)
917 {
918 if (XEXP (p->slot, 0) == x
919 || p->address == x
920 || (GET_CODE (x) == PLUS
921 && XEXP (x, 0) == virtual_stack_vars_rtx
922 && GET_CODE (XEXP (x, 1)) == CONST_INT
923 && INTVAL (XEXP (x, 1)) >= p->base_offset
924 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
925 return p;
926
927 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
928 for (next = p->address; next; next = XEXP (next, 1))
929 if (XEXP (next, 0) == x)
930 return p;
931 }
e5e76139 932
14a774a9
RK
933 /* If we have a sum involving a register, see if it points to a temp
934 slot. */
f8cfc6aa 935 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
14a774a9
RK
936 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
937 return p;
f8cfc6aa 938 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
14a774a9
RK
939 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
940 return p;
941
e5e76139
RK
942 return 0;
943}
718fe406 944
9faa82d8 945/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 946 that previously was known by OLD. */
e5e76139
RK
947
948void
fa8db1f7 949update_temp_slot_address (rtx old, rtx new)
e5e76139 950{
14a774a9 951 struct temp_slot *p;
e5e76139 952
14a774a9 953 if (rtx_equal_p (old, new))
e5e76139 954 return;
14a774a9
RK
955
956 p = find_temp_slot_from_address (old);
957
700f19f0
RK
958 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
959 is a register, see if one operand of the PLUS is a temporary
960 location. If so, NEW points into it. Otherwise, if both OLD and
961 NEW are a PLUS and if there is a register in common between them.
962 If so, try a recursive call on those values. */
14a774a9
RK
963 if (p == 0)
964 {
700f19f0
RK
965 if (GET_CODE (old) != PLUS)
966 return;
967
f8cfc6aa 968 if (REG_P (new))
700f19f0
RK
969 {
970 update_temp_slot_address (XEXP (old, 0), new);
971 update_temp_slot_address (XEXP (old, 1), new);
972 return;
973 }
974 else if (GET_CODE (new) != PLUS)
14a774a9
RK
975 return;
976
977 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
978 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
979 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
980 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
981 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
982 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
983 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
984 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
985
986 return;
987 }
988
718fe406 989 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
990 else if (p->address == 0)
991 p->address = new;
992 else
993 {
994 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 995 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 996
38a448ca 997 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
998 }
999}
1000
a25d4ba2 1001/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1002 address was taken. */
a25d4ba2
RK
1003
1004void
fa8db1f7 1005mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1006{
1007 struct temp_slot *p;
1008
1009 if (x == 0)
1010 return;
1011
1012 /* If X is not in memory or is at a constant address, it cannot be in
1013 a temporary slot. */
3c0cb5de 1014 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1015 return;
1016
1017 p = find_temp_slot_from_address (XEXP (x, 0));
1018 if (p != 0)
1019 p->addr_taken = 1;
1020}
1021
9cca6a99
MS
1022/* If X could be a reference to a temporary slot, mark that slot as
1023 belonging to the to one level higher than the current level. If X
1024 matched one of our slots, just mark that one. Otherwise, we can't
1025 easily predict which it is, so upgrade all of them. Kept slots
1026 need not be touched.
6f086dfc
RS
1027
1028 This is called when an ({...}) construct occurs and a statement
1029 returns a value in memory. */
1030
1031void
fa8db1f7 1032preserve_temp_slots (rtx x)
6f086dfc 1033{
0aea6467 1034 struct temp_slot *p = 0, *next;
6f086dfc 1035
73620b82
RK
1036 /* If there is no result, we still might have some objects whose address
1037 were taken, so we need to make sure they stay around. */
e3a77161 1038 if (x == 0)
73620b82 1039 {
0aea6467
ZD
1040 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1041 {
1042 next = p->next;
1043
1044 if (p->addr_taken)
1045 move_slot_to_level (p, temp_slot_level - 1);
1046 }
73620b82 1047
8fff4fc1
RH
1048 return;
1049 }
f7b6d104 1050
8fff4fc1
RH
1051 /* If X is a register that is being used as a pointer, see if we have
1052 a temporary slot we know it points to. To be consistent with
1053 the code below, we really should preserve all non-kept slots
1054 if we can't find a match, but that seems to be much too costly. */
1055 if (REG_P (x) && REG_POINTER (x))
1056 p = find_temp_slot_from_address (x);
f7b6d104 1057
8fff4fc1
RH
1058 /* If X is not in memory or is at a constant address, it cannot be in
1059 a temporary slot, but it can contain something whose address was
1060 taken. */
1061 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1062 {
1063 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1064 {
1065 next = p->next;
b5bd3b3c 1066
8fff4fc1
RH
1067 if (p->addr_taken)
1068 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1069 }
c5c76735 1070
8fff4fc1
RH
1071 return;
1072 }
1073
1074 /* First see if we can find a match. */
1075 if (p == 0)
1076 p = find_temp_slot_from_address (XEXP (x, 0));
1077
1078 if (p != 0)
1079 {
1080 /* Move everything at our level whose address was taken to our new
1081 level in case we used its address. */
1082 struct temp_slot *q;
1083
1084 if (p->level == temp_slot_level)
fbdfe39c 1085 {
8fff4fc1 1086 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1087 {
8fff4fc1 1088 next = q->next;
8b04083b 1089
8fff4fc1
RH
1090 if (p != q && q->addr_taken)
1091 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1092 }
8fff4fc1
RH
1093
1094 move_slot_to_level (p, temp_slot_level - 1);
1095 p->addr_taken = 0;
fbdfe39c 1096 }
8fff4fc1 1097 return;
f7b6d104 1098 }
e9a25f70 1099
8fff4fc1
RH
1100 /* Otherwise, preserve all non-kept slots at this level. */
1101 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1102 {
8fff4fc1 1103 next = p->next;
fe9b4957 1104
8fff4fc1
RH
1105 if (!p->keep)
1106 move_slot_to_level (p, temp_slot_level - 1);
1107 }
fe9b4957
MM
1108}
1109
8fff4fc1
RH
1110/* Free all temporaries used so far. This is normally called at the
1111 end of generating code for a statement. */
fe9b4957 1112
8fff4fc1
RH
1113void
1114free_temp_slots (void)
fe9b4957 1115{
8fff4fc1 1116 struct temp_slot *p, *next;
fe9b4957 1117
8fff4fc1
RH
1118 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1119 {
1120 next = p->next;
fe9b4957 1121
8fff4fc1
RH
1122 if (!p->keep)
1123 make_slot_available (p);
1124 }
fe9b4957 1125
8fff4fc1
RH
1126 combine_temp_slots ();
1127}
fe9b4957 1128
8fff4fc1 1129/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1130
8fff4fc1
RH
1131void
1132push_temp_slots (void)
fe9b4957 1133{
8fff4fc1 1134 temp_slot_level++;
fe9b4957
MM
1135}
1136
8fff4fc1
RH
1137/* Pop a temporary nesting level. All slots in use in the current level
1138 are freed. */
fe9b4957 1139
8fff4fc1
RH
1140void
1141pop_temp_slots (void)
fe9b4957 1142{
8fff4fc1 1143 struct temp_slot *p, *next;
fe9b4957 1144
8fff4fc1
RH
1145 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1146 {
1147 next = p->next;
1148 make_slot_available (p);
1149 }
e9a25f70 1150
8fff4fc1 1151 combine_temp_slots ();
b987f237 1152
8fff4fc1 1153 temp_slot_level--;
8c36698e
NC
1154}
1155
8fff4fc1 1156/* Initialize temporary slots. */
e9a25f70
JL
1157
1158void
8fff4fc1 1159init_temp_slots (void)
e9a25f70 1160{
8fff4fc1
RH
1161 /* We have not allocated any temporaries yet. */
1162 avail_temp_slots = 0;
1163 used_temp_slots = 0;
1164 temp_slot_level = 0;
8fff4fc1
RH
1165}
1166\f
1167/* These routines are responsible for converting virtual register references
1168 to the actual hard register references once RTL generation is complete.
718fe406 1169
8fff4fc1
RH
1170 The following four variables are used for communication between the
1171 routines. They contain the offsets of the virtual registers from their
1172 respective hard registers. */
fe9b4957 1173
8fff4fc1
RH
1174static int in_arg_offset;
1175static int var_offset;
1176static int dynamic_offset;
1177static int out_arg_offset;
1178static int cfa_offset;
8a5275eb 1179
8fff4fc1
RH
1180/* In most machines, the stack pointer register is equivalent to the bottom
1181 of the stack. */
718fe406 1182
8fff4fc1
RH
1183#ifndef STACK_POINTER_OFFSET
1184#define STACK_POINTER_OFFSET 0
1185#endif
8c36698e 1186
8fff4fc1
RH
1187/* If not defined, pick an appropriate default for the offset of dynamically
1188 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1189 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1190
8fff4fc1 1191#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1192
8fff4fc1
RH
1193/* The bottom of the stack points to the actual arguments. If
1194 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1195 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1196 stack space for register parameters is not pushed by the caller, but
1197 rather part of the fixed stack areas and hence not included in
1198 `current_function_outgoing_args_size'. Nevertheless, we must allow
1199 for it when allocating stack dynamic objects. */
8a5275eb 1200
8fff4fc1
RH
1201#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1202#define STACK_DYNAMIC_OFFSET(FNDECL) \
1203((ACCUMULATE_OUTGOING_ARGS \
1204 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1205 + (STACK_POINTER_OFFSET)) \
4fa48eae 1206
8fff4fc1
RH
1207#else
1208#define STACK_DYNAMIC_OFFSET(FNDECL) \
1209((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1210 + (STACK_POINTER_OFFSET))
1211#endif
1212#endif
4fa48eae 1213
8fff4fc1 1214/* On most machines, the CFA coincides with the first incoming parm. */
4fa48eae 1215
8fff4fc1
RH
1216#ifndef ARG_POINTER_CFA_OFFSET
1217#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1218#endif
4fa48eae 1219
659e47fb 1220\f
bbf9b913
RH
1221/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1222 is a virtual register, return the equivalent hard register and set the
1223 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1224
bbf9b913
RH
1225static rtx
1226instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1227{
bbf9b913
RH
1228 rtx new;
1229 HOST_WIDE_INT offset;
6f086dfc 1230
bbf9b913
RH
1231 if (x == virtual_incoming_args_rtx)
1232 new = arg_pointer_rtx, offset = in_arg_offset;
1233 else if (x == virtual_stack_vars_rtx)
1234 new = frame_pointer_rtx, offset = var_offset;
1235 else if (x == virtual_stack_dynamic_rtx)
1236 new = stack_pointer_rtx, offset = dynamic_offset;
1237 else if (x == virtual_outgoing_args_rtx)
1238 new = stack_pointer_rtx, offset = out_arg_offset;
1239 else if (x == virtual_cfa_rtx)
1240 new = arg_pointer_rtx, offset = cfa_offset;
1241 else
1242 return NULL_RTX;
6f086dfc 1243
bbf9b913
RH
1244 *poffset = offset;
1245 return new;
6f086dfc
RS
1246}
1247
bbf9b913
RH
1248/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1249 Instantiate any virtual registers present inside of *LOC. The expression
1250 is simplified, as much as possible, but is not to be considered "valid"
1251 in any sense implied by the target. If any change is made, set CHANGED
1252 to true. */
6f086dfc 1253
bbf9b913
RH
1254static int
1255instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1256{
bbf9b913
RH
1257 HOST_WIDE_INT offset;
1258 bool *changed = (bool *) data;
1259 rtx x, new;
6f086dfc 1260
bbf9b913
RH
1261 x = *loc;
1262 if (x == 0)
1263 return 0;
1264
1265 switch (GET_CODE (x))
6f086dfc 1266 {
bbf9b913
RH
1267 case REG:
1268 new = instantiate_new_reg (x, &offset);
1269 if (new)
1270 {
1271 *loc = plus_constant (new, offset);
1272 if (changed)
1273 *changed = true;
1274 }
1275 return -1;
1276
1277 case PLUS:
1278 new = instantiate_new_reg (XEXP (x, 0), &offset);
1279 if (new)
1280 {
1281 new = plus_constant (new, offset);
1282 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1283 if (changed)
1284 *changed = true;
1285 return -1;
1286 }
e5e809f4 1287
bbf9b913
RH
1288 /* FIXME -- from old code */
1289 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1290 we can commute the PLUS and SUBREG because pointers into the
1291 frame are well-behaved. */
1292 break;
ce717ce4 1293
bbf9b913
RH
1294 default:
1295 break;
6f086dfc
RS
1296 }
1297
bbf9b913 1298 return 0;
6f086dfc
RS
1299}
1300
bbf9b913
RH
1301/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1302 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1303
bbf9b913
RH
1304static int
1305safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1306{
bbf9b913 1307 const struct insn_operand_data *op_data;
6f086dfc 1308
bbf9b913
RH
1309 if (code < 0)
1310 return true;
6f086dfc 1311
bbf9b913
RH
1312 op_data = &insn_data[code].operand[operand];
1313 if (op_data->predicate == NULL)
1314 return true;
5a73491b 1315
bbf9b913
RH
1316 return op_data->predicate (x, op_data->mode);
1317}
5a73491b 1318
bbf9b913
RH
1319/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1320 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1321
1322static void
bbf9b913 1323instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1324{
bbf9b913
RH
1325 HOST_WIDE_INT offset;
1326 int insn_code, i;
9325973e 1327 bool any_change = false;
bbf9b913 1328 rtx set, new, x, seq;
32e66afd 1329
bbf9b913
RH
1330 /* There are some special cases to be handled first. */
1331 set = single_set (insn);
1332 if (set)
32e66afd 1333 {
bbf9b913
RH
1334 /* We're allowed to assign to a virtual register. This is interpreted
1335 to mean that the underlying register gets assigned the inverse
1336 transformation. This is used, for example, in the handling of
1337 non-local gotos. */
1338 new = instantiate_new_reg (SET_DEST (set), &offset);
1339 if (new)
1340 {
1341 start_sequence ();
32e66afd 1342
bbf9b913
RH
1343 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1344 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1345 GEN_INT (-offset));
1346 x = force_operand (x, new);
1347 if (x != new)
1348 emit_move_insn (new, x);
5a73491b 1349
bbf9b913
RH
1350 seq = get_insns ();
1351 end_sequence ();
5a73491b 1352
bbf9b913
RH
1353 emit_insn_before (seq, insn);
1354 delete_insn (insn);
1355 return;
1356 }
5a73491b 1357
bbf9b913
RH
1358 /* Handle a straight copy from a virtual register by generating a
1359 new add insn. The difference between this and falling through
1360 to the generic case is avoiding a new pseudo and eliminating a
1361 move insn in the initial rtl stream. */
1362 new = instantiate_new_reg (SET_SRC (set), &offset);
1363 if (new && offset != 0
1364 && REG_P (SET_DEST (set))
1365 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1366 {
1367 start_sequence ();
5a73491b 1368
bbf9b913
RH
1369 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1370 new, GEN_INT (offset), SET_DEST (set),
1371 1, OPTAB_LIB_WIDEN);
1372 if (x != SET_DEST (set))
1373 emit_move_insn (SET_DEST (set), x);
770ae6cc 1374
bbf9b913
RH
1375 seq = get_insns ();
1376 end_sequence ();
87ce34d6 1377
bbf9b913
RH
1378 emit_insn_before (seq, insn);
1379 delete_insn (insn);
87ce34d6 1380 return;
bbf9b913 1381 }
5a73491b 1382
bbf9b913 1383 extract_insn (insn);
9325973e 1384 insn_code = INSN_CODE (insn);
5a73491b 1385
bbf9b913
RH
1386 /* Handle a plus involving a virtual register by determining if the
1387 operands remain valid if they're modified in place. */
1388 if (GET_CODE (SET_SRC (set)) == PLUS
1389 && recog_data.n_operands >= 3
1390 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1391 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1392 && GET_CODE (recog_data.operand[2]) == CONST_INT
1393 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1394 {
1395 offset += INTVAL (recog_data.operand[2]);
5a73491b 1396
bbf9b913 1397 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1398 if (offset == 0
1399 && REG_P (SET_DEST (set))
1400 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1401 {
1402 start_sequence ();
1403 emit_move_insn (SET_DEST (set), new);
1404 seq = get_insns ();
1405 end_sequence ();
d1405722 1406
bbf9b913
RH
1407 emit_insn_before (seq, insn);
1408 delete_insn (insn);
1409 return;
1410 }
d1405722 1411
bbf9b913 1412 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1413
1414 /* Using validate_change and apply_change_group here leaves
1415 recog_data in an invalid state. Since we know exactly what
1416 we want to check, do those two by hand. */
1417 if (safe_insn_predicate (insn_code, 1, new)
1418 && safe_insn_predicate (insn_code, 2, x))
1419 {
1420 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1421 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1422 any_change = true;
9325973e
RH
1423
1424 /* Fall through into the regular operand fixup loop in
1425 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1426 }
1427 }
1428 }
d1405722 1429 else
9325973e
RH
1430 {
1431 extract_insn (insn);
1432 insn_code = INSN_CODE (insn);
1433 }
5dc96d60 1434
bbf9b913
RH
1435 /* In the general case, we expect virtual registers to appear only in
1436 operands, and then only as either bare registers or inside memories. */
1437 for (i = 0; i < recog_data.n_operands; ++i)
1438 {
1439 x = recog_data.operand[i];
1440 switch (GET_CODE (x))
1441 {
1442 case MEM:
1443 {
1444 rtx addr = XEXP (x, 0);
1445 bool changed = false;
1446
1447 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1448 if (!changed)
1449 continue;
1450
1451 start_sequence ();
1452 x = replace_equiv_address (x, addr);
1453 seq = get_insns ();
1454 end_sequence ();
1455 if (seq)
1456 emit_insn_before (seq, insn);
1457 }
1458 break;
1459
1460 case REG:
1461 new = instantiate_new_reg (x, &offset);
1462 if (new == NULL)
1463 continue;
1464 if (offset == 0)
1465 x = new;
1466 else
1467 {
1468 start_sequence ();
6f086dfc 1469
bbf9b913
RH
1470 /* Careful, special mode predicates may have stuff in
1471 insn_data[insn_code].operand[i].mode that isn't useful
1472 to us for computing a new value. */
1473 /* ??? Recognize address_operand and/or "p" constraints
1474 to see if (plus new offset) is a valid before we put
1475 this through expand_simple_binop. */
1476 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1477 GEN_INT (offset), NULL_RTX,
1478 1, OPTAB_LIB_WIDEN);
1479 seq = get_insns ();
1480 end_sequence ();
1481 emit_insn_before (seq, insn);
1482 }
1483 break;
6f086dfc 1484
bbf9b913
RH
1485 case SUBREG:
1486 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1487 if (new == NULL)
1488 continue;
1489 if (offset != 0)
1490 {
1491 start_sequence ();
1492 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1493 GEN_INT (offset), NULL_RTX,
1494 1, OPTAB_LIB_WIDEN);
1495 seq = get_insns ();
1496 end_sequence ();
1497 emit_insn_before (seq, insn);
1498 }
fbdd0b09
RH
1499 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1500 GET_MODE (new), SUBREG_BYTE (x));
bbf9b913 1501 break;
6f086dfc 1502
bbf9b913
RH
1503 default:
1504 continue;
1505 }
6f086dfc 1506
bbf9b913
RH
1507 /* At this point, X contains the new value for the operand.
1508 Validate the new value vs the insn predicate. Note that
1509 asm insns will have insn_code -1 here. */
1510 if (!safe_insn_predicate (insn_code, i, x))
1511 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6f086dfc 1512
bbf9b913
RH
1513 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1514 any_change = true;
1515 }
6f086dfc 1516
bbf9b913
RH
1517 if (any_change)
1518 {
1519 /* Propagate operand changes into the duplicates. */
1520 for (i = 0; i < recog_data.n_dups; ++i)
1521 *recog_data.dup_loc[i]
1522 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
5dc96d60 1523
bbf9b913
RH
1524 /* Force re-recognition of the instruction for validation. */
1525 INSN_CODE (insn) = -1;
1526 }
6f086dfc 1527
bbf9b913 1528 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1529 {
bbf9b913 1530 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1531 {
bbf9b913
RH
1532 error_for_asm (insn, "impossible constraint in %<asm%>");
1533 delete_insn (insn);
1534 }
1535 }
1536 else
1537 {
1538 if (recog_memoized (insn) < 0)
1539 fatal_insn_not_found (insn);
1540 }
1541}
14a774a9 1542
bbf9b913
RH
1543/* Subroutine of instantiate_decls. Given RTL representing a decl,
1544 do any instantiation required. */
14a774a9 1545
bbf9b913
RH
1546static void
1547instantiate_decl (rtx x)
1548{
1549 rtx addr;
6f086dfc 1550
bbf9b913
RH
1551 if (x == 0)
1552 return;
6f086dfc 1553
bbf9b913
RH
1554 /* If this is a CONCAT, recurse for the pieces. */
1555 if (GET_CODE (x) == CONCAT)
1556 {
1557 instantiate_decl (XEXP (x, 0));
1558 instantiate_decl (XEXP (x, 1));
1559 return;
1560 }
6f086dfc 1561
bbf9b913
RH
1562 /* If this is not a MEM, no need to do anything. Similarly if the
1563 address is a constant or a register that is not a virtual register. */
1564 if (!MEM_P (x))
1565 return;
6f086dfc 1566
bbf9b913
RH
1567 addr = XEXP (x, 0);
1568 if (CONSTANT_P (addr)
1569 || (REG_P (addr)
1570 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1571 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1572 return;
6f086dfc 1573
bbf9b913
RH
1574 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1575}
6f086dfc 1576
bbf9b913
RH
1577/* Subroutine of instantiate_decls: Process all decls in the given
1578 BLOCK node and all its subblocks. */
6f086dfc 1579
bbf9b913
RH
1580static void
1581instantiate_decls_1 (tree let)
1582{
1583 tree t;
6f086dfc 1584
bbf9b913
RH
1585 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1586 if (DECL_RTL_SET_P (t))
1587 instantiate_decl (DECL_RTL (t));
6f086dfc 1588
bbf9b913
RH
1589 /* Process all subblocks. */
1590 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1591 instantiate_decls_1 (t);
1592}
6f086dfc 1593
bbf9b913
RH
1594/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1595 all virtual registers in their DECL_RTL's. */
6f086dfc 1596
bbf9b913
RH
1597static void
1598instantiate_decls (tree fndecl)
1599{
1600 tree decl;
6f086dfc 1601
bbf9b913
RH
1602 /* Process all parameters of the function. */
1603 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1604 {
1605 instantiate_decl (DECL_RTL (decl));
1606 instantiate_decl (DECL_INCOMING_RTL (decl));
1607 }
4fd796bb 1608
bbf9b913
RH
1609 /* Now process all variables defined in the function or its subblocks. */
1610 instantiate_decls_1 (DECL_INITIAL (fndecl));
1611}
6f086dfc 1612
bbf9b913
RH
1613/* Pass through the INSNS of function FNDECL and convert virtual register
1614 references to hard register references. */
6f086dfc 1615
bbf9b913
RH
1616void
1617instantiate_virtual_regs (void)
1618{
1619 rtx insn;
6f086dfc 1620
bbf9b913
RH
1621 /* Compute the offsets to use for this function. */
1622 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1623 var_offset = STARTING_FRAME_OFFSET;
1624 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1625 out_arg_offset = STACK_POINTER_OFFSET;
1626 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
e9a25f70 1627
bbf9b913
RH
1628 /* Initialize recognition, indicating that volatile is OK. */
1629 init_recog ();
6f086dfc 1630
bbf9b913
RH
1631 /* Scan through all the insns, instantiating every virtual register still
1632 present. */
1633 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1634 if (INSN_P (insn))
6f086dfc 1635 {
bbf9b913
RH
1636 /* These patterns in the instruction stream can never be recognized.
1637 Fortunately, they shouldn't contain virtual registers either. */
1638 if (GET_CODE (PATTERN (insn)) == USE
1639 || GET_CODE (PATTERN (insn)) == CLOBBER
1640 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1641 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1642 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1643 continue;
1644
1645 instantiate_virtual_regs_in_insn (insn);
1646
1647 if (INSN_DELETED_P (insn))
1648 continue;
1649
1650 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1651
1652 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1653 if (GET_CODE (insn) == CALL_INSN)
1654 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1655 instantiate_virtual_regs_in_rtx, NULL);
6f086dfc 1656 }
6f086dfc 1657
bbf9b913
RH
1658 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1659 instantiate_decls (current_function_decl);
1660
1661 /* Indicate that, from now on, assign_stack_local should use
1662 frame_pointer_rtx. */
1663 virtuals_instantiated = 1;
6f086dfc
RS
1664}
1665\f
d181c154
RS
1666/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1667 This means a type for which function calls must pass an address to the
1668 function or get an address back from the function.
1669 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1670
1671int
61f71b34 1672aggregate_value_p (tree exp, tree fntype)
6f086dfc 1673{
9d790a4f
RS
1674 int i, regno, nregs;
1675 rtx reg;
2f939d94
TP
1676
1677 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d181c154 1678
61f71b34
DD
1679 if (fntype)
1680 switch (TREE_CODE (fntype))
1681 {
1682 case CALL_EXPR:
1683 fntype = get_callee_fndecl (fntype);
1684 fntype = fntype ? TREE_TYPE (fntype) : 0;
1685 break;
1686 case FUNCTION_DECL:
1687 fntype = TREE_TYPE (fntype);
1688 break;
1689 case FUNCTION_TYPE:
1690 case METHOD_TYPE:
1691 break;
1692 case IDENTIFIER_NODE:
1693 fntype = 0;
1694 break;
1695 default:
1696 /* We don't expect other rtl types here. */
0bccc606 1697 gcc_unreachable ();
61f71b34
DD
1698 }
1699
d7bf8ada
MM
1700 if (TREE_CODE (type) == VOID_TYPE)
1701 return 0;
cc77ae10
JM
1702 /* If the front end has decided that this needs to be passed by
1703 reference, do so. */
1704 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1705 && DECL_BY_REFERENCE (exp))
1706 return 1;
61f71b34 1707 if (targetm.calls.return_in_memory (type, fntype))
6f086dfc 1708 return 1;
956d6950 1709 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1710 and thus can't be returned in registers. */
1711 if (TREE_ADDRESSABLE (type))
1712 return 1;
05e3bdb9 1713 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1714 return 1;
9d790a4f
RS
1715 /* Make sure we have suitable call-clobbered regs to return
1716 the value in; if not, we must return it in memory. */
4dc07bd7 1717 reg = hard_function_value (type, 0, 0);
e71f7aa5
JW
1718
1719 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1720 it is OK. */
f8cfc6aa 1721 if (!REG_P (reg))
e71f7aa5
JW
1722 return 0;
1723
9d790a4f 1724 regno = REGNO (reg);
66fd46b6 1725 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
1726 for (i = 0; i < nregs; i++)
1727 if (! call_used_regs[regno + i])
1728 return 1;
6f086dfc
RS
1729 return 0;
1730}
1731\f
8fff4fc1
RH
1732/* Return true if we should assign DECL a pseudo register; false if it
1733 should live on the local stack. */
1734
1735bool
1736use_register_for_decl (tree decl)
1737{
1738 /* Honor volatile. */
1739 if (TREE_SIDE_EFFECTS (decl))
1740 return false;
1741
1742 /* Honor addressability. */
1743 if (TREE_ADDRESSABLE (decl))
1744 return false;
1745
1746 /* Only register-like things go in registers. */
1747 if (DECL_MODE (decl) == BLKmode)
1748 return false;
1749
1750 /* If -ffloat-store specified, don't put explicit float variables
1751 into registers. */
1752 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1753 propagates values across these stores, and it probably shouldn't. */
1754 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1755 return false;
1756
78e0d62b
RH
1757 /* If we're not interested in tracking debugging information for
1758 this decl, then we can certainly put it in a register. */
1759 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
1760 return true;
1761
8fff4fc1
RH
1762 return (optimize || DECL_REGISTER (decl));
1763}
1764
0976078c
RH
1765/* Return true if TYPE should be passed by invisible reference. */
1766
1767bool
8cd5a4e0
RH
1768pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1769 tree type, bool named_arg)
0976078c
RH
1770{
1771 if (type)
1772 {
1773 /* If this type contains non-trivial constructors, then it is
1774 forbidden for the middle-end to create any new copies. */
1775 if (TREE_ADDRESSABLE (type))
1776 return true;
1777
d58247a3
RH
1778 /* GCC post 3.4 passes *all* variable sized types by reference. */
1779 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c
RH
1780 return true;
1781 }
1782
8cd5a4e0 1783 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
1784}
1785
6cdd5672
RH
1786/* Return true if TYPE, which is passed by reference, should be callee
1787 copied instead of caller copied. */
1788
1789bool
1790reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1791 tree type, bool named_arg)
1792{
1793 if (type && TREE_ADDRESSABLE (type))
1794 return false;
1795 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1796}
1797
6071dc7f
RH
1798/* Structures to communicate between the subroutines of assign_parms.
1799 The first holds data persistent across all parameters, the second
1800 is cleared out for each parameter. */
6f086dfc 1801
6071dc7f 1802struct assign_parm_data_all
6f086dfc 1803{
6f086dfc 1804 CUMULATIVE_ARGS args_so_far;
6f086dfc 1805 struct args_size stack_args_size;
6071dc7f
RH
1806 tree function_result_decl;
1807 tree orig_fnargs;
1808 rtx conversion_insns;
1809 HOST_WIDE_INT pretend_args_size;
1810 HOST_WIDE_INT extra_pretend_bytes;
1811 int reg_parm_stack_space;
1812};
6f086dfc 1813
6071dc7f
RH
1814struct assign_parm_data_one
1815{
1816 tree nominal_type;
1817 tree passed_type;
1818 rtx entry_parm;
1819 rtx stack_parm;
1820 enum machine_mode nominal_mode;
1821 enum machine_mode passed_mode;
1822 enum machine_mode promoted_mode;
1823 struct locate_and_pad_arg_data locate;
1824 int partial;
1825 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
1826 BOOL_BITFIELD passed_pointer : 1;
1827 BOOL_BITFIELD on_stack : 1;
1828 BOOL_BITFIELD loaded_in_reg : 1;
1829};
ebb904cb 1830
6071dc7f 1831/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 1832
6071dc7f
RH
1833static void
1834assign_parms_initialize_all (struct assign_parm_data_all *all)
1835{
1836 tree fntype;
6f086dfc 1837
6071dc7f
RH
1838 memset (all, 0, sizeof (*all));
1839
1840 fntype = TREE_TYPE (current_function_decl);
1841
1842#ifdef INIT_CUMULATIVE_INCOMING_ARGS
1843 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1844#else
1845 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1846 current_function_decl, -1);
1847#endif
1848
1849#ifdef REG_PARM_STACK_SPACE
1850 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1851#endif
1852}
6f086dfc 1853
6071dc7f
RH
1854/* If ARGS contains entries with complex types, split the entry into two
1855 entries of the component type. Return a new list of substitutions are
1856 needed, else the old list. */
1857
1858static tree
1859split_complex_args (tree args)
1860{
1861 tree p;
1862
1863 /* Before allocating memory, check for the common case of no complex. */
1864 for (p = args; p; p = TREE_CHAIN (p))
1865 {
1866 tree type = TREE_TYPE (p);
1867 if (TREE_CODE (type) == COMPLEX_TYPE
1868 && targetm.calls.split_complex_arg (type))
1869 goto found;
1870 }
1871 return args;
1872
1873 found:
1874 args = copy_list (args);
1875
1876 for (p = args; p; p = TREE_CHAIN (p))
1877 {
1878 tree type = TREE_TYPE (p);
1879 if (TREE_CODE (type) == COMPLEX_TYPE
1880 && targetm.calls.split_complex_arg (type))
1881 {
1882 tree decl;
1883 tree subtype = TREE_TYPE (type);
6ccd356e 1884 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
1885
1886 /* Rewrite the PARM_DECL's type with its component. */
1887 TREE_TYPE (p) = subtype;
1888 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1889 DECL_MODE (p) = VOIDmode;
1890 DECL_SIZE (p) = NULL;
1891 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
1892 /* If this arg must go in memory, put it in a pseudo here.
1893 We can't allow it to go in memory as per normal parms,
1894 because the usual place might not have the imag part
1895 adjacent to the real part. */
1896 DECL_ARTIFICIAL (p) = addressable;
1897 DECL_IGNORED_P (p) = addressable;
1898 TREE_ADDRESSABLE (p) = 0;
6071dc7f
RH
1899 layout_decl (p, 0);
1900
1901 /* Build a second synthetic decl. */
1902 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1903 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
1904 DECL_ARTIFICIAL (decl) = addressable;
1905 DECL_IGNORED_P (decl) = addressable;
6071dc7f
RH
1906 layout_decl (decl, 0);
1907
1908 /* Splice it in; skip the new decl. */
1909 TREE_CHAIN (decl) = TREE_CHAIN (p);
1910 TREE_CHAIN (p) = decl;
1911 p = decl;
1912 }
1913 }
1914
1915 return args;
1916}
1917
1918/* A subroutine of assign_parms. Adjust the parameter list to incorporate
1919 the hidden struct return argument, and (abi willing) complex args.
1920 Return the new parameter list. */
1921
1922static tree
1923assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1924{
1925 tree fndecl = current_function_decl;
1926 tree fntype = TREE_TYPE (fndecl);
1927 tree fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
1928
1929 /* If struct value address is treated as the first argument, make it so. */
61f71b34 1930 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
6f086dfc 1931 && ! current_function_returns_pcc_struct
61f71b34 1932 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 1933 {
f9f29478 1934 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 1935 tree decl;
6f086dfc 1936
6071dc7f
RH
1937 decl = build_decl (PARM_DECL, NULL_TREE, type);
1938 DECL_ARG_TYPE (decl) = type;
1939 DECL_ARTIFICIAL (decl) = 1;
78e0d62b 1940 DECL_IGNORED_P (decl) = 1;
6f086dfc 1941
6071dc7f
RH
1942 TREE_CHAIN (decl) = fnargs;
1943 fnargs = decl;
1944 all->function_result_decl = decl;
6f086dfc 1945 }
718fe406 1946
6071dc7f 1947 all->orig_fnargs = fnargs;
ded9bf77 1948
42ba5130
RH
1949 /* If the target wants to split complex arguments into scalars, do so. */
1950 if (targetm.calls.split_complex_arg)
ded9bf77
AH
1951 fnargs = split_complex_args (fnargs);
1952
6071dc7f
RH
1953 return fnargs;
1954}
e7949876 1955
6071dc7f
RH
1956/* A subroutine of assign_parms. Examine PARM and pull out type and mode
1957 data for the parameter. Incorporate ABI specifics such as pass-by-
1958 reference and type promotion. */
6f086dfc 1959
6071dc7f
RH
1960static void
1961assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
1962 struct assign_parm_data_one *data)
1963{
1964 tree nominal_type, passed_type;
1965 enum machine_mode nominal_mode, passed_mode, promoted_mode;
6f086dfc 1966
6071dc7f
RH
1967 memset (data, 0, sizeof (*data));
1968
8117c488
NS
1969 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
1970 if (!current_function_stdarg)
1971 data->named_arg = 1; /* No varadic parms. */
1972 else if (TREE_CHAIN (parm))
1973 data->named_arg = 1; /* Not the last non-varadic parm. */
1974 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
1975 data->named_arg = 1; /* Only varadic ones are unnamed. */
6071dc7f 1976 else
8117c488 1977 data->named_arg = 0; /* Treat as varadic. */
6071dc7f
RH
1978
1979 nominal_type = TREE_TYPE (parm);
1980 passed_type = DECL_ARG_TYPE (parm);
1981
1982 /* Look out for errors propagating this far. Also, if the parameter's
1983 type is void then its value doesn't matter. */
1984 if (TREE_TYPE (parm) == error_mark_node
1985 /* This can happen after weird syntax errors
1986 or if an enum type is defined among the parms. */
1987 || TREE_CODE (parm) != PARM_DECL
1988 || passed_type == NULL
1989 || VOID_TYPE_P (nominal_type))
1990 {
1991 nominal_type = passed_type = void_type_node;
1992 nominal_mode = passed_mode = promoted_mode = VOIDmode;
1993 goto egress;
1994 }
108b7d3d 1995
6071dc7f
RH
1996 /* Find mode of arg as it is passed, and mode of arg as it should be
1997 during execution of this function. */
1998 passed_mode = TYPE_MODE (passed_type);
1999 nominal_mode = TYPE_MODE (nominal_type);
2000
2001 /* If the parm is to be passed as a transparent union, use the type of
2002 the first field for the tests below. We have already verified that
2003 the modes are the same. */
2004 if (DECL_TRANSPARENT_UNION (parm)
2005 || (TREE_CODE (passed_type) == UNION_TYPE
2006 && TYPE_TRANSPARENT_UNION (passed_type)))
2007 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2008
0976078c
RH
2009 /* See if this arg was passed by invisible reference. */
2010 if (pass_by_reference (&all->args_so_far, passed_mode,
2011 passed_type, data->named_arg))
6071dc7f
RH
2012 {
2013 passed_type = nominal_type = build_pointer_type (passed_type);
2014 data->passed_pointer = true;
2015 passed_mode = nominal_mode = Pmode;
2016 }
6f086dfc 2017
6071dc7f
RH
2018 /* Find mode as it is passed by the ABI. */
2019 promoted_mode = passed_mode;
2020 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2021 {
2022 int unsignedp = TYPE_UNSIGNED (passed_type);
2023 promoted_mode = promote_mode (passed_type, promoted_mode,
2024 &unsignedp, 1);
2025 }
6f086dfc 2026
6071dc7f
RH
2027 egress:
2028 data->nominal_type = nominal_type;
2029 data->passed_type = passed_type;
2030 data->nominal_mode = nominal_mode;
2031 data->passed_mode = passed_mode;
2032 data->promoted_mode = promoted_mode;
2033}
16bae307 2034
6071dc7f 2035/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2036
6071dc7f
RH
2037static void
2038assign_parms_setup_varargs (struct assign_parm_data_all *all,
2039 struct assign_parm_data_one *data, bool no_rtl)
2040{
2041 int varargs_pretend_bytes = 0;
2042
2043 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2044 data->promoted_mode,
2045 data->passed_type,
2046 &varargs_pretend_bytes, no_rtl);
2047
2048 /* If the back-end has requested extra stack space, record how much is
2049 needed. Do not change pretend_args_size otherwise since it may be
2050 nonzero from an earlier partial argument. */
2051 if (varargs_pretend_bytes > 0)
2052 all->pretend_args_size = varargs_pretend_bytes;
2053}
a53e14c0 2054
6071dc7f
RH
2055/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2056 the incoming location of the current parameter. */
2057
2058static void
2059assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2060 struct assign_parm_data_one *data)
2061{
2062 HOST_WIDE_INT pretend_bytes = 0;
2063 rtx entry_parm;
2064 bool in_regs;
2065
2066 if (data->promoted_mode == VOIDmode)
2067 {
2068 data->entry_parm = data->stack_parm = const0_rtx;
2069 return;
2070 }
a53e14c0 2071
6f086dfc 2072#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2073 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2074 data->passed_type, data->named_arg);
6f086dfc 2075#else
6071dc7f
RH
2076 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2077 data->passed_type, data->named_arg);
6f086dfc
RS
2078#endif
2079
6071dc7f
RH
2080 if (entry_parm == 0)
2081 data->promoted_mode = data->passed_mode;
6f086dfc 2082
6071dc7f
RH
2083 /* Determine parm's home in the stack, in case it arrives in the stack
2084 or we should pretend it did. Compute the stack position and rtx where
2085 the argument arrives and its size.
6f086dfc 2086
6071dc7f
RH
2087 There is one complexity here: If this was a parameter that would
2088 have been passed in registers, but wasn't only because it is
2089 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2090 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2091 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2092 as it was the previous time. */
2093 in_regs = entry_parm != 0;
6f086dfc 2094#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2095 in_regs = true;
e7949876 2096#endif
6071dc7f
RH
2097 if (!in_regs && !data->named_arg)
2098 {
2099 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2100 {
6071dc7f 2101 rtx tem;
6f086dfc 2102#ifdef FUNCTION_INCOMING_ARG
6071dc7f
RH
2103 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2104 data->passed_type, true);
6f086dfc 2105#else
6071dc7f
RH
2106 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2107 data->passed_type, true);
6f086dfc 2108#endif
6071dc7f 2109 in_regs = tem != NULL;
e7949876 2110 }
6071dc7f 2111 }
e7949876 2112
6071dc7f
RH
2113 /* If this parameter was passed both in registers and in the stack, use
2114 the copy on the stack. */
fe984136
RH
2115 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2116 data->passed_type))
6071dc7f 2117 entry_parm = 0;
e7949876 2118
6071dc7f
RH
2119 if (entry_parm)
2120 {
2121 int partial;
2122
78a52f11
RH
2123 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2124 data->promoted_mode,
2125 data->passed_type,
2126 data->named_arg);
6071dc7f
RH
2127 data->partial = partial;
2128
2129 /* The caller might already have allocated stack space for the
2130 register parameters. */
2131 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2132 {
6071dc7f
RH
2133 /* Part of this argument is passed in registers and part
2134 is passed on the stack. Ask the prologue code to extend
2135 the stack part so that we can recreate the full value.
2136
2137 PRETEND_BYTES is the size of the registers we need to store.
2138 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2139 stack space that the prologue should allocate.
2140
2141 Internally, gcc assumes that the argument pointer is aligned
2142 to STACK_BOUNDARY bits. This is used both for alignment
2143 optimizations (see init_emit) and to locate arguments that are
2144 aligned to more than PARM_BOUNDARY bits. We must preserve this
2145 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2146 a stack boundary. */
2147
2148 /* We assume at most one partial arg, and it must be the first
2149 argument on the stack. */
0bccc606 2150 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2151
78a52f11 2152 pretend_bytes = partial;
6071dc7f
RH
2153 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2154
2155 /* We want to align relative to the actual stack pointer, so
2156 don't include this in the stack size until later. */
2157 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2158 }
6071dc7f 2159 }
e7949876 2160
6071dc7f
RH
2161 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2162 entry_parm ? data->partial : 0, current_function_decl,
2163 &all->stack_args_size, &data->locate);
6f086dfc 2164
6071dc7f
RH
2165 /* Adjust offsets to include the pretend args. */
2166 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2167 data->locate.slot_offset.constant += pretend_bytes;
2168 data->locate.offset.constant += pretend_bytes;
ebca59c3 2169
6071dc7f
RH
2170 data->entry_parm = entry_parm;
2171}
6f086dfc 2172
6071dc7f
RH
2173/* A subroutine of assign_parms. If there is actually space on the stack
2174 for this parm, count it in stack_args_size and return true. */
6f086dfc 2175
6071dc7f
RH
2176static bool
2177assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2178 struct assign_parm_data_one *data)
2179{
2e6ae27f 2180 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2181 if (data->entry_parm == NULL)
2182 ;
2183 /* Also true if we're partially in registers and partially not,
2184 since we've arranged to drop the entire argument on the stack. */
2185 else if (data->partial != 0)
2186 ;
2187 /* Also true if the target says that it's passed in both registers
2188 and on the stack. */
2189 else if (GET_CODE (data->entry_parm) == PARALLEL
2190 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2191 ;
2192 /* Also true if the target says that there's stack allocated for
2193 all register parameters. */
2194 else if (all->reg_parm_stack_space > 0)
2195 ;
2196 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2197 else
2198 return false;
6f086dfc 2199
6071dc7f
RH
2200 all->stack_args_size.constant += data->locate.size.constant;
2201 if (data->locate.size.var)
2202 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2203
6071dc7f
RH
2204 return true;
2205}
0d1416c6 2206
6071dc7f
RH
2207/* A subroutine of assign_parms. Given that this parameter is allocated
2208 stack space by the ABI, find it. */
6f086dfc 2209
6071dc7f
RH
2210static void
2211assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2212{
2213 rtx offset_rtx, stack_parm;
2214 unsigned int align, boundary;
6f086dfc 2215
6071dc7f
RH
2216 /* If we're passing this arg using a reg, make its stack home the
2217 aligned stack slot. */
2218 if (data->entry_parm)
2219 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2220 else
2221 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2222
2223 stack_parm = current_function_internal_arg_pointer;
2224 if (offset_rtx != const0_rtx)
2225 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2226 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2227
2228 set_mem_attributes (stack_parm, parm, 1);
2229
bfc45551
AM
2230 boundary = data->locate.boundary;
2231 align = BITS_PER_UNIT;
6071dc7f
RH
2232
2233 /* If we're padding upward, we know that the alignment of the slot
2234 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2235 intentionally forcing upward padding. Otherwise we have to come
2236 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2237 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f
RH
2238 align = boundary;
2239 else if (GET_CODE (offset_rtx) == CONST_INT)
2240 {
2241 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2242 align = align & -align;
2243 }
bfc45551 2244 set_mem_align (stack_parm, align);
6071dc7f
RH
2245
2246 if (data->entry_parm)
2247 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2248
2249 data->stack_parm = stack_parm;
2250}
2251
2252/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2253 always valid and contiguous. */
2254
2255static void
2256assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2257{
2258 rtx entry_parm = data->entry_parm;
2259 rtx stack_parm = data->stack_parm;
2260
2261 /* If this parm was passed part in regs and part in memory, pretend it
2262 arrived entirely in memory by pushing the register-part onto the stack.
2263 In the special case of a DImode or DFmode that is split, we could put
2264 it together in a pseudoreg directly, but for now that's not worth
2265 bothering with. */
2266 if (data->partial != 0)
2267 {
2268 /* Handle calls that pass values in multiple non-contiguous
2269 locations. The Irix 6 ABI has examples of this. */
2270 if (GET_CODE (entry_parm) == PARALLEL)
2271 emit_group_store (validize_mem (stack_parm), entry_parm,
2272 data->passed_type,
2273 int_size_in_bytes (data->passed_type));
6f086dfc 2274 else
78a52f11
RH
2275 {
2276 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2277 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2278 data->partial / UNITS_PER_WORD);
2279 }
6f086dfc 2280
6071dc7f
RH
2281 entry_parm = stack_parm;
2282 }
6f086dfc 2283
6071dc7f
RH
2284 /* If we didn't decide this parm came in a register, by default it came
2285 on the stack. */
2286 else if (entry_parm == NULL)
2287 entry_parm = stack_parm;
2288
2289 /* When an argument is passed in multiple locations, we can't make use
2290 of this information, but we can save some copying if the whole argument
2291 is passed in a single register. */
2292 else if (GET_CODE (entry_parm) == PARALLEL
2293 && data->nominal_mode != BLKmode
2294 && data->passed_mode != BLKmode)
2295 {
2296 size_t i, len = XVECLEN (entry_parm, 0);
2297
2298 for (i = 0; i < len; i++)
2299 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2300 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2301 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2302 == data->passed_mode)
2303 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2304 {
2305 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2306 break;
2307 }
2308 }
e68a6ce1 2309
6071dc7f
RH
2310 data->entry_parm = entry_parm;
2311}
6f086dfc 2312
6071dc7f
RH
2313/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2314 always valid and properly aligned. */
6f086dfc 2315
6071dc7f
RH
2316static void
2317assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2318{
2319 rtx stack_parm = data->stack_parm;
2320
2321 /* If we can't trust the parm stack slot to be aligned enough for its
2322 ultimate type, don't use that slot after entry. We'll make another
2323 stack slot, if we need one. */
bfc45551
AM
2324 if (stack_parm
2325 && ((STRICT_ALIGNMENT
2326 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2327 || (data->nominal_type
2328 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2329 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2330 stack_parm = NULL;
2331
2332 /* If parm was passed in memory, and we need to convert it on entry,
2333 don't store it back in that same slot. */
2334 else if (data->entry_parm == stack_parm
2335 && data->nominal_mode != BLKmode
2336 && data->nominal_mode != data->passed_mode)
2337 stack_parm = NULL;
2338
2339 data->stack_parm = stack_parm;
2340}
a0506b54 2341
6071dc7f
RH
2342/* A subroutine of assign_parms. Return true if the current parameter
2343 should be stored as a BLKmode in the current frame. */
2344
2345static bool
2346assign_parm_setup_block_p (struct assign_parm_data_one *data)
2347{
2348 if (data->nominal_mode == BLKmode)
2349 return true;
2350 if (GET_CODE (data->entry_parm) == PARALLEL)
2351 return true;
531547e9 2352
6e985040 2353#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2354 /* Only assign_parm_setup_block knows how to deal with register arguments
2355 that are padded at the least significant end. */
2356 if (REG_P (data->entry_parm)
2357 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2358 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2359 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2360 return true;
6e985040 2361#endif
6071dc7f
RH
2362
2363 return false;
2364}
2365
2366/* A subroutine of assign_parms. Arrange for the parameter to be
2367 present and valid in DATA->STACK_RTL. */
2368
2369static void
27e29549
RH
2370assign_parm_setup_block (struct assign_parm_data_all *all,
2371 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2372{
2373 rtx entry_parm = data->entry_parm;
2374 rtx stack_parm = data->stack_parm;
bfc45551
AM
2375 HOST_WIDE_INT size;
2376 HOST_WIDE_INT size_stored;
17284759 2377 rtx orig_entry_parm = entry_parm;
6071dc7f 2378
27e29549
RH
2379 if (GET_CODE (entry_parm) == PARALLEL)
2380 entry_parm = emit_group_move_into_temps (entry_parm);
2381
6071dc7f
RH
2382 /* If we've a non-block object that's nevertheless passed in parts,
2383 reconstitute it in register operations rather than on the stack. */
2384 if (GET_CODE (entry_parm) == PARALLEL
640019aa 2385 && data->nominal_mode != BLKmode)
6071dc7f 2386 {
17284759 2387 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
6071dc7f 2388
640019aa
AH
2389 if ((XVECLEN (entry_parm, 0) > 1
2390 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2391 && use_register_for_decl (parm))
2392 {
2393 rtx parmreg = gen_reg_rtx (data->nominal_mode);
27e29549 2394
640019aa 2395 push_to_sequence (all->conversion_insns);
4af46a32 2396
640019aa
AH
2397 /* For values returned in multiple registers, handle possible
2398 incompatible calls to emit_group_store.
4af46a32 2399
640019aa
AH
2400 For example, the following would be invalid, and would have to
2401 be fixed by the conditional below:
4af46a32 2402
640019aa
AH
2403 emit_group_store ((reg:SF), (parallel:DF))
2404 emit_group_store ((reg:SI), (parallel:DI))
2405
2406 An example of this are doubles in e500 v2:
2407 (parallel:DF (expr_list (reg:SI) (const_int 0))
2408 (expr_list (reg:SI) (const_int 4))). */
2409 if (data->nominal_mode != data->passed_mode)
2410 {
2411 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2412 emit_group_store (t, entry_parm, NULL_TREE,
2413 GET_MODE_SIZE (GET_MODE (entry_parm)));
2414 convert_move (parmreg, t, 0);
2415 }
2416 else
2417 emit_group_store (parmreg, entry_parm, data->nominal_type,
2418 int_size_in_bytes (data->nominal_type));
27e29549 2419
640019aa
AH
2420 all->conversion_insns = get_insns ();
2421 end_sequence ();
27e29549 2422
640019aa
AH
2423 SET_DECL_RTL (parm, parmreg);
2424 return;
2425 }
6071dc7f
RH
2426 }
2427
bfc45551
AM
2428 size = int_size_in_bytes (data->passed_type);
2429 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2430 if (stack_parm == 0)
2431 {
a561d88b 2432 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2433 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2434 DECL_ALIGN (parm));
bfc45551
AM
2435 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2436 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2437 set_mem_attributes (stack_parm, parm, 1);
2438 }
2439
6071dc7f
RH
2440 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2441 calls that pass values in multiple non-contiguous locations. */
2442 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2443 {
6071dc7f
RH
2444 rtx mem;
2445
2446 /* Note that we will be storing an integral number of words.
2447 So we have to be careful to ensure that we allocate an
bfc45551 2448 integral number of words. We do this above when we call
6071dc7f
RH
2449 assign_stack_local if space was not allocated in the argument
2450 list. If it was, this will not work if PARM_BOUNDARY is not
2451 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2452 if it becomes a problem. Exception is when BLKmode arrives
2453 with arguments not conforming to word_mode. */
2454
bfc45551
AM
2455 if (data->stack_parm == 0)
2456 ;
6071dc7f
RH
2457 else if (GET_CODE (entry_parm) == PARALLEL)
2458 ;
0bccc606
NS
2459 else
2460 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2461
6071dc7f 2462 mem = validize_mem (stack_parm);
c6b97fac 2463
6071dc7f
RH
2464 /* Handle values in multiple non-contiguous locations. */
2465 if (GET_CODE (entry_parm) == PARALLEL)
27e29549
RH
2466 {
2467 push_to_sequence (all->conversion_insns);
2468 emit_group_store (mem, entry_parm, data->passed_type, size);
2469 all->conversion_insns = get_insns ();
2470 end_sequence ();
2471 }
c6b97fac 2472
6071dc7f
RH
2473 else if (size == 0)
2474 ;
5c07bd7a 2475
6071dc7f
RH
2476 /* If SIZE is that of a mode no bigger than a word, just use
2477 that mode's store operation. */
2478 else if (size <= UNITS_PER_WORD)
2479 {
2480 enum machine_mode mode
2481 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2482
6071dc7f 2483 if (mode != BLKmode
6e985040 2484#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2485 && (size == UNITS_PER_WORD
2486 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2487 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2488#endif
6071dc7f
RH
2489 )
2490 {
2491 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2492 emit_move_insn (change_address (mem, mode, 0), reg);
2493 }
c6b97fac 2494
6071dc7f
RH
2495 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2496 machine must be aligned to the left before storing
2497 to memory. Note that the previous test doesn't
2498 handle all cases (e.g. SIZE == 3). */
2499 else if (size != UNITS_PER_WORD
6e985040 2500#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2501 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2502 == downward)
6e985040 2503#else
6071dc7f 2504 && BYTES_BIG_ENDIAN
6e985040 2505#endif
6071dc7f
RH
2506 )
2507 {
2508 rtx tem, x;
2509 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2510 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2511
09b52670 2512 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2513 build_int_cst (NULL_TREE, by),
4a90aeeb 2514 NULL_RTX, 1);
6071dc7f
RH
2515 tem = change_address (mem, word_mode, 0);
2516 emit_move_insn (tem, x);
6f086dfc 2517 }
6071dc7f 2518 else
27e29549 2519 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2520 size_stored / UNITS_PER_WORD);
6f086dfc 2521 }
6071dc7f 2522 else
27e29549 2523 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2524 size_stored / UNITS_PER_WORD);
2525 }
bfc45551
AM
2526 else if (data->stack_parm == 0)
2527 {
2528 push_to_sequence (all->conversion_insns);
2529 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2530 BLOCK_OP_NORMAL);
2531 all->conversion_insns = get_insns ();
2532 end_sequence ();
2533 }
6071dc7f 2534
bfc45551 2535 data->stack_parm = stack_parm;
6071dc7f
RH
2536 SET_DECL_RTL (parm, stack_parm);
2537}
2538
2539/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2540 parameter. Get it there. Perform all ABI specified conversions. */
2541
2542static void
2543assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2544 struct assign_parm_data_one *data)
2545{
2546 rtx parmreg;
2547 enum machine_mode promoted_nominal_mode;
2548 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2549 bool did_conversion = false;
2550
2551 /* Store the parm in a pseudoregister during the function, but we may
2552 need to do it in a wider mode. */
2553
2554 promoted_nominal_mode
2555 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2556
2557 parmreg = gen_reg_rtx (promoted_nominal_mode);
2558
2559 if (!DECL_ARTIFICIAL (parm))
2560 mark_user_reg (parmreg);
2561
2562 /* If this was an item that we received a pointer to,
2563 set DECL_RTL appropriately. */
2564 if (data->passed_pointer)
2565 {
2566 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2567 set_mem_attributes (x, parm, 1);
2568 SET_DECL_RTL (parm, x);
2569 }
2570 else
389fdba0 2571 SET_DECL_RTL (parm, parmreg);
6071dc7f
RH
2572
2573 /* Copy the value into the register. */
2574 if (data->nominal_mode != data->passed_mode
2575 || promoted_nominal_mode != data->promoted_mode)
2576 {
2577 int save_tree_used;
2578
2579 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2580 mode, by the caller. We now have to convert it to
2581 NOMINAL_MODE, if different. However, PARMREG may be in
2582 a different mode than NOMINAL_MODE if it is being stored
2583 promoted.
2584
2585 If ENTRY_PARM is a hard register, it might be in a register
2586 not valid for operating in its mode (e.g., an odd-numbered
2587 register for a DFmode). In that case, moves are the only
2588 thing valid, so we can't do a convert from there. This
2589 occurs when the calling sequence allow such misaligned
2590 usages.
2591
2592 In addition, the conversion may involve a call, which could
2593 clobber parameters which haven't been copied to pseudo
2594 registers yet. Therefore, we must first copy the parm to
2595 a pseudo reg here, and save the conversion until after all
2596 parameters have been moved. */
2597
2598 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2599
2600 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2601
2602 push_to_sequence (all->conversion_insns);
2603 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2604
2605 if (GET_CODE (tempreg) == SUBREG
2606 && GET_MODE (tempreg) == data->nominal_mode
2607 && REG_P (SUBREG_REG (tempreg))
2608 && data->nominal_mode == data->passed_mode
2609 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2610 && GET_MODE_SIZE (GET_MODE (tempreg))
2611 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 2612 {
6071dc7f
RH
2613 /* The argument is already sign/zero extended, so note it
2614 into the subreg. */
2615 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2616 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2617 }
00d8a4c1 2618
6071dc7f
RH
2619 /* TREE_USED gets set erroneously during expand_assignment. */
2620 save_tree_used = TREE_USED (parm);
e836a5a2 2621 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
6071dc7f
RH
2622 TREE_USED (parm) = save_tree_used;
2623 all->conversion_insns = get_insns ();
2624 end_sequence ();
00d8a4c1 2625
6071dc7f
RH
2626 did_conversion = true;
2627 }
2628 else
2629 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2630
2631 /* If we were passed a pointer but the actual value can safely live
2632 in a register, put it in one. */
2633 if (data->passed_pointer
2634 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2635 /* If by-reference argument was promoted, demote it. */
2636 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2637 || use_register_for_decl (parm)))
2638 {
2639 /* We can't use nominal_mode, because it will have been set to
2640 Pmode above. We must use the actual mode of the parm. */
2641 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2642 mark_user_reg (parmreg);
cd5b3469 2643
6071dc7f
RH
2644 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2645 {
2646 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2647 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2648
2649 push_to_sequence (all->conversion_insns);
2650 emit_move_insn (tempreg, DECL_RTL (parm));
2651 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2652 emit_move_insn (parmreg, tempreg);
27e29549 2653 all->conversion_insns = get_insns ();
6071dc7f 2654 end_sequence ();
6f086dfc 2655
6071dc7f
RH
2656 did_conversion = true;
2657 }
2658 else
2659 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 2660
6071dc7f 2661 SET_DECL_RTL (parm, parmreg);
797a6ac1 2662
6071dc7f
RH
2663 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2664 now the parm. */
2665 data->stack_parm = NULL;
2666 }
ddef6bc7 2667
6071dc7f
RH
2668 /* Mark the register as eliminable if we did no conversion and it was
2669 copied from memory at a fixed offset, and the arg pointer was not
2670 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2671 offset formed an invalid address, such memory-equivalences as we
2672 make here would screw up life analysis for it. */
2673 if (data->nominal_mode == data->passed_mode
2674 && !did_conversion
2675 && data->stack_parm != 0
2676 && MEM_P (data->stack_parm)
2677 && data->locate.offset.var == 0
2678 && reg_mentioned_p (virtual_incoming_args_rtx,
2679 XEXP (data->stack_parm, 0)))
2680 {
2681 rtx linsn = get_last_insn ();
2682 rtx sinsn, set;
a03caf76 2683
6071dc7f
RH
2684 /* Mark complex types separately. */
2685 if (GET_CODE (parmreg) == CONCAT)
2686 {
2687 enum machine_mode submode
2688 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
2689 int regnor = REGNO (XEXP (parmreg, 0));
2690 int regnoi = REGNO (XEXP (parmreg, 1));
2691 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2692 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2693 GET_MODE_SIZE (submode));
6071dc7f
RH
2694
2695 /* Scan backwards for the set of the real and
2696 imaginary parts. */
2697 for (sinsn = linsn; sinsn != 0;
2698 sinsn = prev_nonnote_insn (sinsn))
2699 {
2700 set = single_set (sinsn);
2701 if (set == 0)
2702 continue;
2703
2704 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2705 REG_NOTES (sinsn)
2706 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2707 REG_NOTES (sinsn));
2708 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2709 REG_NOTES (sinsn)
2710 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2711 REG_NOTES (sinsn));
a03caf76 2712 }
6071dc7f
RH
2713 }
2714 else if ((set = single_set (linsn)) != 0
2715 && SET_DEST (set) == parmreg)
2716 REG_NOTES (linsn)
2717 = gen_rtx_EXPR_LIST (REG_EQUIV,
2718 data->stack_parm, REG_NOTES (linsn));
2719 }
2720
2721 /* For pointer data type, suggest pointer register. */
2722 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2723 mark_reg_pointer (parmreg,
2724 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2725}
2726
2727/* A subroutine of assign_parms. Allocate stack space to hold the current
2728 parameter. Get it there. Perform all ABI specified conversions. */
2729
2730static void
2731assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2732 struct assign_parm_data_one *data)
2733{
2734 /* Value must be stored in the stack slot STACK_PARM during function
2735 execution. */
bfc45551 2736 bool to_conversion = false;
6071dc7f
RH
2737
2738 if (data->promoted_mode != data->nominal_mode)
2739 {
2740 /* Conversion is required. */
2741 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 2742
6071dc7f
RH
2743 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2744
2745 push_to_sequence (all->conversion_insns);
bfc45551
AM
2746 to_conversion = true;
2747
6071dc7f
RH
2748 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2749 TYPE_UNSIGNED (TREE_TYPE (parm)));
2750
2751 if (data->stack_parm)
2752 /* ??? This may need a big-endian conversion on sparc64. */
2753 data->stack_parm
2754 = adjust_address (data->stack_parm, data->nominal_mode, 0);
6071dc7f
RH
2755 }
2756
2757 if (data->entry_parm != data->stack_parm)
2758 {
bfc45551
AM
2759 rtx src, dest;
2760
6071dc7f
RH
2761 if (data->stack_parm == 0)
2762 {
2763 data->stack_parm
2764 = assign_stack_local (GET_MODE (data->entry_parm),
2765 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
bfc45551 2766 TYPE_ALIGN (data->passed_type));
6071dc7f 2767 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 2768 }
6071dc7f 2769
bfc45551
AM
2770 dest = validize_mem (data->stack_parm);
2771 src = validize_mem (data->entry_parm);
2772
2773 if (MEM_P (src))
6f086dfc 2774 {
bfc45551
AM
2775 /* Use a block move to handle potentially misaligned entry_parm. */
2776 if (!to_conversion)
2777 push_to_sequence (all->conversion_insns);
2778 to_conversion = true;
2779
2780 emit_block_move (dest, src,
2781 GEN_INT (int_size_in_bytes (data->passed_type)),
2782 BLOCK_OP_NORMAL);
6071dc7f
RH
2783 }
2784 else
bfc45551
AM
2785 emit_move_insn (dest, src);
2786 }
2787
2788 if (to_conversion)
2789 {
2790 all->conversion_insns = get_insns ();
2791 end_sequence ();
6071dc7f 2792 }
6f086dfc 2793
6071dc7f
RH
2794 SET_DECL_RTL (parm, data->stack_parm);
2795}
3412b298 2796
6071dc7f
RH
2797/* A subroutine of assign_parms. If the ABI splits complex arguments, then
2798 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 2799
6071dc7f 2800static void
6ccd356e 2801assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
6071dc7f
RH
2802{
2803 tree parm;
6ccd356e 2804 tree orig_fnargs = all->orig_fnargs;
f4ef873c 2805
6071dc7f
RH
2806 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2807 {
2808 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2809 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2810 {
2811 rtx tmp, real, imag;
2812 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 2813
6071dc7f
RH
2814 real = DECL_RTL (fnargs);
2815 imag = DECL_RTL (TREE_CHAIN (fnargs));
2816 if (inner != GET_MODE (real))
6f086dfc 2817 {
6071dc7f
RH
2818 real = gen_lowpart_SUBREG (inner, real);
2819 imag = gen_lowpart_SUBREG (inner, imag);
2820 }
6ccd356e
AM
2821
2822 if (TREE_ADDRESSABLE (parm))
2823 {
2824 rtx rmem, imem;
2825 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2826
2827 /* split_complex_arg put the real and imag parts in
2828 pseudos. Move them to memory. */
bfc45551
AM
2829 tmp = assign_stack_local (DECL_MODE (parm), size,
2830 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
2831 set_mem_attributes (tmp, parm, 1);
2832 rmem = adjust_address_nv (tmp, inner, 0);
2833 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2834 push_to_sequence (all->conversion_insns);
2835 emit_move_insn (rmem, real);
2836 emit_move_insn (imem, imag);
2837 all->conversion_insns = get_insns ();
2838 end_sequence ();
2839 }
2840 else
2841 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 2842 SET_DECL_RTL (parm, tmp);
7e41ffa2 2843
6071dc7f
RH
2844 real = DECL_INCOMING_RTL (fnargs);
2845 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2846 if (inner != GET_MODE (real))
2847 {
2848 real = gen_lowpart_SUBREG (inner, real);
2849 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 2850 }
6071dc7f
RH
2851 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2852 set_decl_incoming_rtl (parm, tmp);
2853 fnargs = TREE_CHAIN (fnargs);
2854 }
2855 else
2856 {
2857 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2858 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
6f086dfc 2859
6071dc7f
RH
2860 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2861 instead of the copy of decl, i.e. FNARGS. */
2862 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2863 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
6f086dfc 2864 }
6071dc7f
RH
2865
2866 fnargs = TREE_CHAIN (fnargs);
6f086dfc 2867 }
6071dc7f
RH
2868}
2869
2870/* Assign RTL expressions to the function's parameters. This may involve
2871 copying them into registers and using those registers as the DECL_RTL. */
2872
6fe79279 2873static void
6071dc7f
RH
2874assign_parms (tree fndecl)
2875{
2876 struct assign_parm_data_all all;
2877 tree fnargs, parm;
2878 rtx internal_arg_pointer;
6f086dfc 2879
6071dc7f
RH
2880 /* If the reg that the virtual arg pointer will be translated into is
2881 not a fixed reg or is the stack pointer, make a copy of the virtual
2882 arg pointer, and address parms via the copy. The frame pointer is
2883 considered fixed even though it is not marked as such.
2884
2885 The second time through, simply use ap to avoid generating rtx. */
2886
2887 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2888 || ! (fixed_regs[ARG_POINTER_REGNUM]
2889 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
2890 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2891 else
2892 internal_arg_pointer = virtual_incoming_args_rtx;
2893 current_function_internal_arg_pointer = internal_arg_pointer;
2894
2895 assign_parms_initialize_all (&all);
2896 fnargs = assign_parms_augmented_arg_list (&all);
2897
2898 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
ded9bf77 2899 {
6071dc7f
RH
2900 struct assign_parm_data_one data;
2901
2902 /* Extract the type of PARM; adjust it according to ABI. */
2903 assign_parm_find_data_types (&all, parm, &data);
2904
2905 /* Early out for errors and void parameters. */
2906 if (data.passed_mode == VOIDmode)
ded9bf77 2907 {
6071dc7f
RH
2908 SET_DECL_RTL (parm, const0_rtx);
2909 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2910 continue;
2911 }
196c42cd 2912
8117c488
NS
2913 if (current_function_stdarg && !TREE_CHAIN (parm))
2914 assign_parms_setup_varargs (&all, &data, false);
196c42cd 2915
6071dc7f
RH
2916 /* Find out where the parameter arrives in this function. */
2917 assign_parm_find_entry_rtl (&all, &data);
2918
2919 /* Find out where stack space for this parameter might be. */
2920 if (assign_parm_is_stack_parm (&all, &data))
2921 {
2922 assign_parm_find_stack_rtl (parm, &data);
2923 assign_parm_adjust_entry_rtl (&data);
ded9bf77 2924 }
6071dc7f
RH
2925
2926 /* Record permanently how this parm was passed. */
2927 set_decl_incoming_rtl (parm, data.entry_parm);
2928
2929 /* Update info on where next arg arrives in registers. */
2930 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2931 data.passed_type, data.named_arg);
2932
2933 assign_parm_adjust_stack_rtl (&data);
2934
2935 if (assign_parm_setup_block_p (&data))
27e29549 2936 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
2937 else if (data.passed_pointer || use_register_for_decl (parm))
2938 assign_parm_setup_reg (&all, parm, &data);
2939 else
2940 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
2941 }
2942
6071dc7f 2943 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
6ccd356e 2944 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 2945
3412b298
JW
2946 /* Output all parameter conversion instructions (possibly including calls)
2947 now that all parameters have been copied out of hard registers. */
6071dc7f 2948 emit_insn (all.conversion_insns);
3412b298 2949
b36a8cc2
OH
2950 /* If we are receiving a struct value address as the first argument, set up
2951 the RTL for the function result. As this might require code to convert
2952 the transmitted address to Pmode, we do this here to ensure that possible
2953 preliminary conversions of the address have been emitted already. */
6071dc7f 2954 if (all.function_result_decl)
b36a8cc2 2955 {
6071dc7f
RH
2956 tree result = DECL_RESULT (current_function_decl);
2957 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 2958 rtx x;
fa8db1f7 2959
cc77ae10
JM
2960 if (DECL_BY_REFERENCE (result))
2961 x = addr;
2962 else
2963 {
2964 addr = convert_memory_address (Pmode, addr);
2965 x = gen_rtx_MEM (DECL_MODE (result), addr);
2966 set_mem_attributes (x, result, 1);
2967 }
b36a8cc2
OH
2968 SET_DECL_RTL (result, x);
2969 }
2970
53c428d0 2971 /* We have aligned all the args, so add space for the pretend args. */
6071dc7f
RH
2972 current_function_pretend_args_size = all.pretend_args_size;
2973 all.stack_args_size.constant += all.extra_pretend_bytes;
2974 current_function_args_size = all.stack_args_size.constant;
6f086dfc
RS
2975
2976 /* Adjust function incoming argument size for alignment and
2977 minimum length. */
2978
2979#ifdef REG_PARM_STACK_SPACE
2980 current_function_args_size = MAX (current_function_args_size,
2981 REG_PARM_STACK_SPACE (fndecl));
6f90e075 2982#endif
6f086dfc 2983
4433e339
RH
2984 current_function_args_size
2985 = ((current_function_args_size + STACK_BYTES - 1)
2986 / STACK_BYTES) * STACK_BYTES;
4433e339 2987
6f086dfc
RS
2988#ifdef ARGS_GROW_DOWNWARD
2989 current_function_arg_offset_rtx
477eff96 2990 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
2991 : expand_expr (size_diffop (all.stack_args_size.var,
2992 size_int (-all.stack_args_size.constant)),
a57263bc 2993 NULL_RTX, VOIDmode, 0));
6f086dfc 2994#else
6071dc7f 2995 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
2996#endif
2997
2998 /* See how many bytes, if any, of its args a function should try to pop
2999 on return. */
3000
64e6d9cc 3001 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
3002 current_function_args_size);
3003
3b69d50e
RK
3004 /* For stdarg.h function, save info about
3005 regs and stack space used by the named args. */
6f086dfc 3006
6071dc7f 3007 current_function_args_info = all.args_so_far;
6f086dfc
RS
3008
3009 /* Set the rtx used for the function return value. Put this in its
3010 own variable so any optimizers that need this information don't have
3011 to include tree.h. Do this here so it gets done when an inlined
3012 function gets output. */
3013
19e7881c
MM
3014 current_function_return_rtx
3015 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3016 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3017
3018 /* If scalar return value was computed in a pseudo-reg, or was a named
3019 return value that got dumped to the stack, copy that to the hard
3020 return register. */
3021 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3022 {
3023 tree decl_result = DECL_RESULT (fndecl);
3024 rtx decl_rtl = DECL_RTL (decl_result);
3025
3026 if (REG_P (decl_rtl)
3027 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3028 : DECL_REGISTER (decl_result))
3029 {
3030 rtx real_decl_rtl;
3031
3032#ifdef FUNCTION_OUTGOING_VALUE
3033 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3034 fndecl);
3035#else
3036 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3037 fndecl);
3038#endif
3039 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3040 /* The delay slot scheduler assumes that current_function_return_rtx
3041 holds the hard register containing the return value, not a
3042 temporary pseudo. */
3043 current_function_return_rtx = real_decl_rtl;
3044 }
3045 }
6f086dfc 3046}
4744afba
RH
3047
3048/* A subroutine of gimplify_parameters, invoked via walk_tree.
3049 For all seen types, gimplify their sizes. */
3050
3051static tree
3052gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3053{
3054 tree t = *tp;
3055
3056 *walk_subtrees = 0;
3057 if (TYPE_P (t))
3058 {
3059 if (POINTER_TYPE_P (t))
3060 *walk_subtrees = 1;
ad50bc8d
RH
3061 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3062 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba
RH
3063 {
3064 gimplify_type_sizes (t, (tree *) data);
3065 *walk_subtrees = 1;
3066 }
3067 }
3068
3069 return NULL;
3070}
3071
3072/* Gimplify the parameter list for current_function_decl. This involves
3073 evaluating SAVE_EXPRs of variable sized parameters and generating code
3074 to implement callee-copies reference parameters. Returns a list of
3075 statements to add to the beginning of the function, or NULL if nothing
3076 to do. */
3077
3078tree
3079gimplify_parameters (void)
3080{
3081 struct assign_parm_data_all all;
3082 tree fnargs, parm, stmts = NULL;
3083
3084 assign_parms_initialize_all (&all);
3085 fnargs = assign_parms_augmented_arg_list (&all);
3086
3087 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3088 {
3089 struct assign_parm_data_one data;
3090
3091 /* Extract the type of PARM; adjust it according to ABI. */
3092 assign_parm_find_data_types (&all, parm, &data);
3093
3094 /* Early out for errors and void parameters. */
3095 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3096 continue;
3097
3098 /* Update info on where next arg arrives in registers. */
3099 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3100 data.passed_type, data.named_arg);
3101
3102 /* ??? Once upon a time variable_size stuffed parameter list
3103 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3104 turned out to be less than manageable in the gimple world.
3105 Now we have to hunt them down ourselves. */
3106 walk_tree_without_duplicates (&data.passed_type,
3107 gimplify_parm_type, &stmts);
3108
3109 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3110 {
3111 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3112 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3113 }
3114
3115 if (data.passed_pointer)
3116 {
3117 tree type = TREE_TYPE (data.passed_type);
3118 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3119 type, data.named_arg))
3120 {
3121 tree local, t;
3122
3123 /* For constant sized objects, this is trivial; for
3124 variable-sized objects, we have to play games. */
3125 if (TREE_CONSTANT (DECL_SIZE (parm)))
3126 {
3127 local = create_tmp_var (type, get_name (parm));
3128 DECL_IGNORED_P (local) = 0;
3129 }
3130 else
3131 {
3132 tree ptr_type, addr, args;
3133
3134 ptr_type = build_pointer_type (type);
3135 addr = create_tmp_var (ptr_type, get_name (parm));
3136 DECL_IGNORED_P (addr) = 0;
3137 local = build_fold_indirect_ref (addr);
3138
3139 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3140 t = built_in_decls[BUILT_IN_ALLOCA];
3141 t = build_function_call_expr (t, args);
3142 t = fold_convert (ptr_type, t);
3143 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3144 gimplify_and_add (t, &stmts);
3145 }
3146
3147 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3148 gimplify_and_add (t, &stmts);
3149
833b3afe
DB
3150 SET_DECL_VALUE_EXPR (parm, local);
3151 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3152 }
3153 }
3154 }
3155
3156 return stmts;
3157}
6f086dfc 3158\f
75dc3319
RK
3159/* Indicate whether REGNO is an incoming argument to the current function
3160 that was promoted to a wider mode. If so, return the RTX for the
3161 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3162 that REGNO is promoted from and whether the promotion was signed or
3163 unsigned. */
3164
75dc3319 3165rtx
fa8db1f7 3166promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
75dc3319
RK
3167{
3168 tree arg;
3169
3170 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3171 arg = TREE_CHAIN (arg))
f8cfc6aa 3172 if (REG_P (DECL_INCOMING_RTL (arg))
621061f4
RK
3173 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3174 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
3175 {
3176 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
8df83eae 3177 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
75dc3319 3178
a5a52dbc 3179 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
3180 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3181 && mode != DECL_MODE (arg))
3182 {
3183 *pmode = DECL_MODE (arg);
3184 *punsignedp = unsignedp;
3185 return DECL_INCOMING_RTL (arg);
3186 }
3187 }
3188
3189 return 0;
3190}
3191
75dc3319 3192\f
6f086dfc
RS
3193/* Compute the size and offset from the start of the stacked arguments for a
3194 parm passed in mode PASSED_MODE and with type TYPE.
3195
3196 INITIAL_OFFSET_PTR points to the current offset into the stacked
3197 arguments.
3198
e7949876
AM
3199 The starting offset and size for this parm are returned in
3200 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3201 nonzero, the offset is that of stack slot, which is returned in
3202 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3203 padding required from the initial offset ptr to the stack slot.
6f086dfc 3204
cc2902df 3205 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3206 never be set if REG_PARM_STACK_SPACE is not defined.
3207
3208 FNDECL is the function in which the argument was defined.
3209
3210 There are two types of rounding that are done. The first, controlled by
3211 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3212 list to be aligned to the specific boundary (in bits). This rounding
3213 affects the initial and starting offsets, but not the argument size.
3214
3215 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3216 optionally rounds the size of the parm to PARM_BOUNDARY. The
3217 initial offset is not affected by this rounding, while the size always
3218 is and the starting offset may be. */
3219
e7949876
AM
3220/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3221 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3222 callers pass in the total size of args so far as
e7949876 3223 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3224
6f086dfc 3225void
fa8db1f7
AJ
3226locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3227 int partial, tree fndecl ATTRIBUTE_UNUSED,
3228 struct args_size *initial_offset_ptr,
3229 struct locate_and_pad_arg_data *locate)
6f086dfc 3230{
e7949876
AM
3231 tree sizetree;
3232 enum direction where_pad;
3233 int boundary;
3234 int reg_parm_stack_space = 0;
3235 int part_size_in_regs;
6f086dfc
RS
3236
3237#ifdef REG_PARM_STACK_SPACE
e7949876 3238 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3239
6f086dfc
RS
3240 /* If we have found a stack parm before we reach the end of the
3241 area reserved for registers, skip that area. */
3242 if (! in_regs)
3243 {
6f086dfc
RS
3244 if (reg_parm_stack_space > 0)
3245 {
3246 if (initial_offset_ptr->var)
3247 {
3248 initial_offset_ptr->var
3249 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3250 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3251 initial_offset_ptr->constant = 0;
3252 }
3253 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3254 initial_offset_ptr->constant = reg_parm_stack_space;
3255 }
3256 }
3257#endif /* REG_PARM_STACK_SPACE */
3258
78a52f11 3259 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3260
3261 sizetree
3262 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3263 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3264 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3265 locate->where_pad = where_pad;
bfc45551 3266 locate->boundary = boundary;
6f086dfc
RS
3267
3268#ifdef ARGS_GROW_DOWNWARD
e7949876 3269 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3270 if (initial_offset_ptr->var)
e7949876
AM
3271 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3272 initial_offset_ptr->var);
9dff28ab 3273
e7949876
AM
3274 {
3275 tree s2 = sizetree;
3276 if (where_pad != none
3277 && (!host_integerp (sizetree, 1)
3278 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3279 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3280 SUB_PARM_SIZE (locate->slot_offset, s2);
3281 }
3282
3283 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3284
3285 if (!in_regs
3286#ifdef REG_PARM_STACK_SPACE
3287 || REG_PARM_STACK_SPACE (fndecl) > 0
3288#endif
3289 )
e7949876
AM
3290 pad_to_arg_alignment (&locate->slot_offset, boundary,
3291 &locate->alignment_pad);
9dff28ab 3292
e7949876
AM
3293 locate->size.constant = (-initial_offset_ptr->constant
3294 - locate->slot_offset.constant);
6f086dfc 3295 if (initial_offset_ptr->var)
e7949876
AM
3296 locate->size.var = size_binop (MINUS_EXPR,
3297 size_binop (MINUS_EXPR,
3298 ssize_int (0),
3299 initial_offset_ptr->var),
3300 locate->slot_offset.var);
3301
3302 /* Pad_below needs the pre-rounded size to know how much to pad
3303 below. */
3304 locate->offset = locate->slot_offset;
3305 if (where_pad == downward)
3306 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3307
6f086dfc 3308#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3309 if (!in_regs
3310#ifdef REG_PARM_STACK_SPACE
3311 || REG_PARM_STACK_SPACE (fndecl) > 0
3312#endif
3313 )
e7949876
AM
3314 pad_to_arg_alignment (initial_offset_ptr, boundary,
3315 &locate->alignment_pad);
3316 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3317
3318#ifdef PUSH_ROUNDING
3319 if (passed_mode != BLKmode)
3320 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3321#endif
3322
d4b0a7a0
DE
3323 /* Pad_below needs the pre-rounded size to know how much to pad below
3324 so this must be done before rounding up. */
e7949876
AM
3325 locate->offset = locate->slot_offset;
3326 if (where_pad == downward)
3327 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3328
6f086dfc 3329 if (where_pad != none
1468899d
RK
3330 && (!host_integerp (sizetree, 1)
3331 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3332 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3333
e7949876
AM
3334 ADD_PARM_SIZE (locate->size, sizetree);
3335
3336 locate->size.constant -= part_size_in_regs;
6f086dfc
RS
3337#endif /* ARGS_GROW_DOWNWARD */
3338}
3339
e16c591a
RS
3340/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3341 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3342
6f086dfc 3343static void
fa8db1f7
AJ
3344pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3345 struct args_size *alignment_pad)
6f086dfc 3346{
a544cfd2
KG
3347 tree save_var = NULL_TREE;
3348 HOST_WIDE_INT save_constant = 0;
a751cd5b 3349 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3350 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3351
3352#ifdef SPARC_STACK_BOUNDARY_HACK
3353 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3354 higher than the real alignment of %sp. However, when it does this,
3355 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3356 This is a temporary hack while the sparc port is fixed. */
3357 if (SPARC_STACK_BOUNDARY_HACK)
3358 sp_offset = 0;
3359#endif
4fc026cd 3360
9399d5c6 3361 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
3362 {
3363 save_var = offset_ptr->var;
3364 save_constant = offset_ptr->constant;
3365 }
3366
3367 alignment_pad->var = NULL_TREE;
3368 alignment_pad->constant = 0;
4fc026cd 3369
6f086dfc
RS
3370 if (boundary > BITS_PER_UNIT)
3371 {
3372 if (offset_ptr->var)
3373 {
a594a19c
GK
3374 tree sp_offset_tree = ssize_int (sp_offset);
3375 tree offset = size_binop (PLUS_EXPR,
3376 ARGS_SIZE_TREE (*offset_ptr),
3377 sp_offset_tree);
6f086dfc 3378#ifdef ARGS_GROW_DOWNWARD
a594a19c 3379 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3380#else
a594a19c 3381 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3382#endif
a594a19c
GK
3383
3384 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3385 /* ARGS_SIZE_TREE includes constant term. */
3386 offset_ptr->constant = 0;
dd3f0101
KH
3387 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3388 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3389 save_var);
6f086dfc
RS
3390 }
3391 else
718fe406 3392 {
a594a19c 3393 offset_ptr->constant = -sp_offset +
6f086dfc 3394#ifdef ARGS_GROW_DOWNWARD
a594a19c 3395 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3396#else
a594a19c 3397 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3398#endif
718fe406
KH
3399 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3400 alignment_pad->constant = offset_ptr->constant - save_constant;
3401 }
6f086dfc
RS
3402 }
3403}
3404
3405static void
fa8db1f7 3406pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3407{
3408 if (passed_mode != BLKmode)
3409 {
3410 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3411 offset_ptr->constant
3412 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3413 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3414 - GET_MODE_SIZE (passed_mode));
3415 }
3416 else
3417 {
3418 if (TREE_CODE (sizetree) != INTEGER_CST
3419 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3420 {
3421 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3422 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3423 /* Add it in. */
3424 ADD_PARM_SIZE (*offset_ptr, s2);
3425 SUB_PARM_SIZE (*offset_ptr, sizetree);
3426 }
3427 }
3428}
6f086dfc
RS
3429\f
3430/* Walk the tree of blocks describing the binding levels within a function
6de9cd9a 3431 and warn about variables the might be killed by setjmp or vfork.
6f086dfc
RS
3432 This is done after calling flow_analysis and before global_alloc
3433 clobbers the pseudo-regs to hard regs. */
3434
3435void
6de9cd9a 3436setjmp_vars_warning (tree block)
6f086dfc 3437{
b3694847 3438 tree decl, sub;
6de9cd9a 3439
6f086dfc
RS
3440 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3441 {
6de9cd9a 3442 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3443 && DECL_RTL_SET_P (decl)
f8cfc6aa 3444 && REG_P (DECL_RTL (decl))
6f086dfc 3445 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
d4ee4d25 3446 warning (0, "%Jvariable %qD might be clobbered by %<longjmp%>"
971801ff 3447 " or %<vfork%>",
ddd2d57e 3448 decl, decl);
6f086dfc 3449 }
6de9cd9a 3450
6f086dfc 3451 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
6de9cd9a 3452 setjmp_vars_warning (sub);
6f086dfc
RS
3453}
3454
6de9cd9a 3455/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3456 but for arguments instead of local variables. */
3457
3458void
fa8db1f7 3459setjmp_args_warning (void)
6f086dfc 3460{
b3694847 3461 tree decl;
6f086dfc
RS
3462 for (decl = DECL_ARGUMENTS (current_function_decl);
3463 decl; decl = TREE_CHAIN (decl))
3464 if (DECL_RTL (decl) != 0
f8cfc6aa 3465 && REG_P (DECL_RTL (decl))
6f086dfc 3466 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
d4ee4d25 3467 warning (0, "%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
ddd2d57e 3468 decl, decl);
6f086dfc
RS
3469}
3470
6f086dfc 3471\f
a20612aa
RH
3472/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3473 and create duplicate blocks. */
3474/* ??? Need an option to either create block fragments or to create
3475 abstract origin duplicates of a source block. It really depends
3476 on what optimization has been performed. */
467456d0 3477
116eebd6 3478void
fa8db1f7 3479reorder_blocks (void)
467456d0 3480{
116eebd6 3481 tree block = DECL_INITIAL (current_function_decl);
2c217442 3482 VEC(tree,heap) *block_stack;
467456d0 3483
1a4450c7 3484 if (block == NULL_TREE)
116eebd6 3485 return;
fc289cd1 3486
2c217442 3487 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 3488
a20612aa 3489 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 3490 clear_block_marks (block);
a20612aa 3491
116eebd6
MM
3492 /* Prune the old trees away, so that they don't get in the way. */
3493 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3494 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 3495
a20612aa 3496 /* Recreate the block tree from the note nesting. */
116eebd6 3497 reorder_blocks_1 (get_insns (), block, &block_stack);
718fe406 3498 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
18c038b9 3499
a20612aa
RH
3500 /* Remove deleted blocks from the block fragment chains. */
3501 reorder_fix_fragments (block);
2c217442
KH
3502
3503 VEC_free (tree, heap, block_stack);
467456d0
RS
3504}
3505
a20612aa 3506/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 3507
6de9cd9a
DN
3508void
3509clear_block_marks (tree block)
cc1fe44f 3510{
a20612aa 3511 while (block)
cc1fe44f 3512 {
a20612aa 3513 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 3514 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 3515 block = BLOCK_CHAIN (block);
cc1fe44f
DD
3516 }
3517}
3518
0a1c58a2 3519static void
2c217442 3520reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
3521{
3522 rtx insn;
3523
3524 for (insn = insns; insn; insn = NEXT_INSN (insn))
3525 {
4b4bf941 3526 if (NOTE_P (insn))
0a1c58a2
JL
3527 {
3528 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3529 {
3530 tree block = NOTE_BLOCK (insn);
a20612aa
RH
3531
3532 /* If we have seen this block before, that means it now
3533 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
3534 if (TREE_ASM_WRITTEN (block))
3535 {
a20612aa
RH
3536 tree new_block = copy_node (block);
3537 tree origin;
3538
3539 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3540 ? BLOCK_FRAGMENT_ORIGIN (block)
3541 : block);
3542 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3543 BLOCK_FRAGMENT_CHAIN (new_block)
3544 = BLOCK_FRAGMENT_CHAIN (origin);
3545 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3546
3547 NOTE_BLOCK (insn) = new_block;
3548 block = new_block;
0a1c58a2 3549 }
a20612aa 3550
0a1c58a2
JL
3551 BLOCK_SUBBLOCKS (block) = 0;
3552 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
3553 /* When there's only one block for the entire function,
3554 current_block == block and we mustn't do this, it
3555 will cause infinite recursion. */
3556 if (block != current_block)
3557 {
3558 BLOCK_SUPERCONTEXT (block) = current_block;
3559 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3560 BLOCK_SUBBLOCKS (current_block) = block;
3561 current_block = block;
3562 }
2c217442 3563 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2
JL
3564 }
3565 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3566 {
2c217442 3567 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2
JL
3568 BLOCK_SUBBLOCKS (current_block)
3569 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3570 current_block = BLOCK_SUPERCONTEXT (current_block);
3571 }
3572 }
0a1c58a2
JL
3573 }
3574}
3575
a20612aa
RH
3576/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3577 appears in the block tree, select one of the fragments to become
3578 the new origin block. */
3579
3580static void
fa8db1f7 3581reorder_fix_fragments (tree block)
a20612aa
RH
3582{
3583 while (block)
3584 {
3585 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3586 tree new_origin = NULL_TREE;
3587
3588 if (dup_origin)
3589 {
3590 if (! TREE_ASM_WRITTEN (dup_origin))
3591 {
3592 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
797a6ac1 3593
a20612aa
RH
3594 /* Find the first of the remaining fragments. There must
3595 be at least one -- the current block. */
3596 while (! TREE_ASM_WRITTEN (new_origin))
3597 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3598 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3599 }
3600 }
3601 else if (! dup_origin)
3602 new_origin = block;
3603
3604 /* Re-root the rest of the fragments to the new origin. In the
3605 case that DUP_ORIGIN was null, that means BLOCK was the origin
3606 of a chain of fragments and we want to remove those fragments
3607 that didn't make it to the output. */
3608 if (new_origin)
3609 {
3610 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3611 tree chain = *pp;
3612
3613 while (chain)
3614 {
3615 if (TREE_ASM_WRITTEN (chain))
3616 {
3617 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3618 *pp = chain;
3619 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3620 }
3621 chain = BLOCK_FRAGMENT_CHAIN (chain);
3622 }
3623 *pp = NULL_TREE;
3624 }
3625
3626 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3627 block = BLOCK_CHAIN (block);
3628 }
3629}
3630
467456d0
RS
3631/* Reverse the order of elements in the chain T of blocks,
3632 and return the new head of the chain (old last element). */
3633
6de9cd9a 3634tree
fa8db1f7 3635blocks_nreverse (tree t)
467456d0 3636{
b3694847 3637 tree prev = 0, decl, next;
467456d0
RS
3638 for (decl = t; decl; decl = next)
3639 {
3640 next = BLOCK_CHAIN (decl);
3641 BLOCK_CHAIN (decl) = prev;
3642 prev = decl;
3643 }
3644 return prev;
3645}
3646
18c038b9
MM
3647/* Count the subblocks of the list starting with BLOCK. If VECTOR is
3648 non-NULL, list them all into VECTOR, in a depth-first preorder
3649 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 3650 blocks. */
467456d0
RS
3651
3652static int
fa8db1f7 3653all_blocks (tree block, tree *vector)
467456d0 3654{
b2a59b15
MS
3655 int n_blocks = 0;
3656
a84efb51
JO
3657 while (block)
3658 {
3659 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 3660
a84efb51
JO
3661 /* Record this block. */
3662 if (vector)
3663 vector[n_blocks] = block;
b2a59b15 3664
a84efb51 3665 ++n_blocks;
718fe406 3666
a84efb51
JO
3667 /* Record the subblocks, and their subblocks... */
3668 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3669 vector ? vector + n_blocks : 0);
3670 block = BLOCK_CHAIN (block);
3671 }
467456d0
RS
3672
3673 return n_blocks;
3674}
18c038b9
MM
3675
3676/* Return a vector containing all the blocks rooted at BLOCK. The
3677 number of elements in the vector is stored in N_BLOCKS_P. The
3678 vector is dynamically allocated; it is the caller's responsibility
3679 to call `free' on the pointer returned. */
718fe406 3680
18c038b9 3681static tree *
fa8db1f7 3682get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
3683{
3684 tree *block_vector;
3685
3686 *n_blocks_p = all_blocks (block, NULL);
703ad42b 3687 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
18c038b9
MM
3688 all_blocks (block, block_vector);
3689
3690 return block_vector;
3691}
3692
f83b236e 3693static GTY(()) int next_block_index = 2;
18c038b9
MM
3694
3695/* Set BLOCK_NUMBER for all the blocks in FN. */
3696
3697void
fa8db1f7 3698number_blocks (tree fn)
18c038b9
MM
3699{
3700 int i;
3701 int n_blocks;
3702 tree *block_vector;
3703
3704 /* For SDB and XCOFF debugging output, we start numbering the blocks
3705 from 1 within each function, rather than keeping a running
3706 count. */
3707#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
3708 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3709 next_block_index = 1;
18c038b9
MM
3710#endif
3711
3712 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3713
3714 /* The top-level BLOCK isn't numbered at all. */
3715 for (i = 1; i < n_blocks; ++i)
3716 /* We number the blocks from two. */
3717 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3718
3719 free (block_vector);
3720
3721 return;
3722}
df8992f8
RH
3723
3724/* If VAR is present in a subblock of BLOCK, return the subblock. */
3725
3726tree
fa8db1f7 3727debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
3728{
3729 tree t;
3730
3731 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3732 if (t == var)
3733 return block;
3734
3735 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3736 {
3737 tree ret = debug_find_var_in_block_tree (var, t);
3738 if (ret)
3739 return ret;
3740 }
3741
3742 return NULL_TREE;
3743}
467456d0 3744\f
3a70d621
RH
3745/* Allocate a function structure for FNDECL and set its contents
3746 to the defaults. */
7a80cf9a 3747
3a70d621
RH
3748void
3749allocate_struct_function (tree fndecl)
6f086dfc 3750{
3a70d621 3751 tree result;
6de9cd9a 3752 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 3753
3a70d621 3754 cfun = ggc_alloc_cleared (sizeof (struct function));
b384405b 3755
3a70d621
RH
3756 cfun->stack_alignment_needed = STACK_BOUNDARY;
3757 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6f086dfc 3758
3a70d621 3759 current_function_funcdef_no = funcdef_no++;
6f086dfc 3760
3a70d621 3761 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6f086dfc 3762
3a70d621 3763 init_eh_for_function ();
6f086dfc 3764
ae2bcd98 3765 lang_hooks.function.init (cfun);
3a70d621
RH
3766 if (init_machine_status)
3767 cfun->machine = (*init_machine_status) ();
e2ecd91c 3768
3a70d621
RH
3769 if (fndecl == NULL)
3770 return;
a0871656 3771
1da326c3 3772 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3a70d621 3773 cfun->decl = fndecl;
6f086dfc 3774
3a70d621 3775 result = DECL_RESULT (fndecl);
61f71b34 3776 if (aggregate_value_p (result, fndecl))
3a70d621
RH
3777 {
3778#ifdef PCC_STATIC_STRUCT_RETURN
3779 current_function_returns_pcc_struct = 1;
3780#endif
3781 current_function_returns_struct = 1;
3782 }
6f086dfc 3783
3a70d621 3784 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6f086dfc 3785
6de9cd9a
DN
3786 current_function_stdarg
3787 = (fntype
3788 && TYPE_ARG_TYPES (fntype) != 0
3789 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3790 != void_type_node));
9d30f3c1
JJ
3791
3792 /* Assume all registers in stdarg functions need to be saved. */
3793 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3794 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3a70d621 3795}
6f086dfc 3796
3a70d621 3797/* Reset cfun, and other non-struct-function variables to defaults as
2067c116 3798 appropriate for emitting rtl at the start of a function. */
6f086dfc 3799
3a70d621
RH
3800static void
3801prepare_function_start (tree fndecl)
3802{
1da326c3
SB
3803 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3804 cfun = DECL_STRUCT_FUNCTION (fndecl);
3a70d621
RH
3805 else
3806 allocate_struct_function (fndecl);
0de456a5
JH
3807 init_emit ();
3808 init_varasm_status (cfun);
3809 init_expr ();
6f086dfc 3810
3a70d621 3811 cse_not_expected = ! optimize;
6f086dfc 3812
3a70d621
RH
3813 /* Caller save not needed yet. */
3814 caller_save_needed = 0;
6f086dfc 3815
3a70d621
RH
3816 /* We haven't done register allocation yet. */
3817 reg_renumber = 0;
6f086dfc 3818
b384405b
BS
3819 /* Indicate that we have not instantiated virtual registers yet. */
3820 virtuals_instantiated = 0;
3821
1b3d8f8a
GK
3822 /* Indicate that we want CONCATs now. */
3823 generating_concat_p = 1;
3824
b384405b
BS
3825 /* Indicate we have no need of a frame pointer yet. */
3826 frame_pointer_needed = 0;
b384405b
BS
3827}
3828
3829/* Initialize the rtl expansion mechanism so that we can do simple things
3830 like generate sequences. This is used to provide a context during global
3831 initialization of some passes. */
3832void
fa8db1f7 3833init_dummy_function_start (void)
b384405b 3834{
3a70d621 3835 prepare_function_start (NULL);
b384405b
BS
3836}
3837
3838/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3839 and initialize static variables for generating RTL for the statements
3840 of the function. */
3841
3842void
fa8db1f7 3843init_function_start (tree subr)
b384405b 3844{
3a70d621 3845 prepare_function_start (subr);
b384405b 3846
ee6b0296
NS
3847 /* Prevent ever trying to delete the first instruction of a
3848 function. Also tell final how to output a linenum before the
3849 function prologue. Note linenums could be missing, e.g. when
3850 compiling a Java .class file. */
3c20847b 3851 if (! DECL_IS_BUILTIN (subr))
f31686a3 3852 emit_line_note (DECL_SOURCE_LOCATION (subr));
6f086dfc
RS
3853
3854 /* Make sure first insn is a note even if we don't want linenums.
3855 This makes sure the first insn will never be deleted.
3856 Also, final expects a note to appear there. */
2e040219 3857 emit_note (NOTE_INSN_DELETED);
6f086dfc 3858
6f086dfc
RS
3859 /* Warn if this value is an aggregate type,
3860 regardless of which calling convention we are using for it. */
ccf08a6e
DD
3861 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3862 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 3863}
5c7675e9 3864
49ad7cfa
BS
3865/* Make sure all values used by the optimization passes have sane
3866 defaults. */
3867void
fa8db1f7 3868init_function_for_compilation (void)
49ad7cfa
BS
3869{
3870 reg_renumber = 0;
0a1c58a2 3871
f995dcfe
KH
3872 /* No prologue/epilogue insns yet. Make sure that these vectors are
3873 empty. */
3874 gcc_assert (VEC_length (int, prologue) == 0);
3875 gcc_assert (VEC_length (int, epilogue) == 0);
3876 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
6f086dfc
RS
3877}
3878
6f086dfc 3879void
fa8db1f7 3880expand_main_function (void)
6f086dfc 3881{
1d482056
RH
3882#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3883 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3884 {
3885 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
8a723db2 3886 rtx tmp, seq;
1d482056 3887
8a723db2 3888 start_sequence ();
ef89d648 3889 /* Forcibly align the stack. */
1d482056 3890#ifdef STACK_GROWS_DOWNWARD
ef89d648
ZW
3891 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3892 stack_pointer_rtx, 1, OPTAB_WIDEN);
1d482056 3893#else
ef89d648
ZW
3894 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3895 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3896 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3897 stack_pointer_rtx, 1, OPTAB_WIDEN);
1d482056
RH
3898#endif
3899 if (tmp != stack_pointer_rtx)
3900 emit_move_insn (stack_pointer_rtx, tmp);
797a6ac1 3901
1d482056
RH
3902 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3903 tmp = force_reg (Pmode, const0_rtx);
3904 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
2f937369 3905 seq = get_insns ();
8a723db2
DD
3906 end_sequence ();
3907
3908 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3909 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3910 break;
3911 if (tmp)
3912 emit_insn_before (seq, tmp);
3913 else
3914 emit_insn (seq);
1d482056
RH
3915 }
3916#endif
3917
3a57c6cb
MM
3918#if (defined(INVOKE__main) \
3919 || (!defined(HAS_INIT_SECTION) \
3920 && !defined(INIT_SECTION_ASM_OP) \
3921 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 3922 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 3923#endif
6f086dfc
RS
3924}
3925\f
3926/* Start the RTL for a new function, and set variables used for
3927 emitting RTL.
3928 SUBR is the FUNCTION_DECL node.
3929 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3930 the function's parameters, which must be run at any return statement. */
3931
3932void
b79c5284 3933expand_function_start (tree subr)
6f086dfc 3934{
6f086dfc
RS
3935 /* Make sure volatile mem refs aren't considered
3936 valid operands of arithmetic insns. */
3937 init_recog_no_volatile ();
3938
70f4f91c
WC
3939 current_function_profile
3940 = (profile_flag
3941 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3942
a157febd
GK
3943 current_function_limit_stack
3944 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3945
52a11cbf
RH
3946 /* Make the label for return statements to jump to. Do not special
3947 case machines with special return instructions -- they will be
3948 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 3949 return_label = gen_label_rtx ();
6f086dfc
RS
3950
3951 /* Initialize rtx used to return the value. */
3952 /* Do this before assign_parms so that we copy the struct value address
3953 before any library calls that assign parms might generate. */
3954
3955 /* Decide whether to return the value in memory or in a register. */
61f71b34 3956 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
3957 {
3958 /* Returning something that won't go in a register. */
b3694847 3959 rtx value_address = 0;
6f086dfc
RS
3960
3961#ifdef PCC_STATIC_STRUCT_RETURN
3962 if (current_function_returns_pcc_struct)
3963 {
3964 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3965 value_address = assemble_static_space (size);
3966 }
3967 else
3968#endif
3969 {
61f71b34 3970 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
6f086dfc
RS
3971 /* Expect to be passed the address of a place to store the value.
3972 If it is passed as an argument, assign_parms will take care of
3973 it. */
61f71b34 3974 if (sv)
6f086dfc
RS
3975 {
3976 value_address = gen_reg_rtx (Pmode);
61f71b34 3977 emit_move_insn (value_address, sv);
6f086dfc
RS
3978 }
3979 }
3980 if (value_address)
ccdecf58 3981 {
01c98570
JM
3982 rtx x = value_address;
3983 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
3984 {
3985 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
3986 set_mem_attributes (x, DECL_RESULT (subr), 1);
3987 }
abde42f7 3988 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 3989 }
6f086dfc
RS
3990 }
3991 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3992 /* If return mode is void, this decl rtl should not be used. */
19e7881c 3993 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 3994 else
a53e14c0 3995 {
d5bf1143
RH
3996 /* Compute the return values into a pseudo reg, which we will copy
3997 into the true return register after the cleanups are done. */
bef5d8b6
RS
3998 tree return_type = TREE_TYPE (DECL_RESULT (subr));
3999 if (TYPE_MODE (return_type) != BLKmode
4000 && targetm.calls.return_in_msb (return_type))
4001 /* expand_function_end will insert the appropriate padding in
4002 this case. Use the return value's natural (unpadded) mode
4003 within the function proper. */
4004 SET_DECL_RTL (DECL_RESULT (subr),
4005 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4006 else
0bccc606 4007 {
bef5d8b6
RS
4008 /* In order to figure out what mode to use for the pseudo, we
4009 figure out what the mode of the eventual return register will
4010 actually be, and use that. */
4011 rtx hard_reg = hard_function_value (return_type, subr, 1);
4012
4013 /* Structures that are returned in registers are not
4014 aggregate_value_p, so we may see a PARALLEL or a REG. */
4015 if (REG_P (hard_reg))
4016 SET_DECL_RTL (DECL_RESULT (subr),
4017 gen_reg_rtx (GET_MODE (hard_reg)));
4018 else
4019 {
4020 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4021 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4022 }
0bccc606 4023 }
a53e14c0 4024
084a1106
JDA
4025 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4026 result to the real return register(s). */
4027 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4028 }
6f086dfc
RS
4029
4030 /* Initialize rtx for parameters and local variables.
4031 In some cases this requires emitting insns. */
0d1416c6 4032 assign_parms (subr);
6f086dfc 4033
6de9cd9a
DN
4034 /* If function gets a static chain arg, store it. */
4035 if (cfun->static_chain_decl)
4036 {
7e140280
RH
4037 tree parm = cfun->static_chain_decl;
4038 rtx local = gen_reg_rtx (Pmode);
4039
4040 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4041 SET_DECL_RTL (parm, local);
7e140280 4042 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4043
7e140280 4044 emit_move_insn (local, static_chain_incoming_rtx);
6de9cd9a
DN
4045 }
4046
4047 /* If the function receives a non-local goto, then store the
4048 bits we need to restore the frame pointer. */
4049 if (cfun->nonlocal_goto_save_area)
4050 {
4051 tree t_save;
4052 rtx r_save;
4053
4054 /* ??? We need to do this save early. Unfortunately here is
4055 before the frame variable gets declared. Help out... */
4056 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4057
3244e67d
RS
4058 t_save = build4 (ARRAY_REF, ptr_type_node,
4059 cfun->nonlocal_goto_save_area,
4060 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4061 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4062 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4063
6de9cd9a
DN
4064 emit_move_insn (r_save, virtual_stack_vars_rtx);
4065 update_nonlocal_goto_save_area ();
4066 }
f0c51a1e 4067
6f086dfc
RS
4068 /* The following was moved from init_function_start.
4069 The move is supposed to make sdb output more accurate. */
4070 /* Indicate the beginning of the function body,
4071 as opposed to parm setup. */
2e040219 4072 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4073
4b4bf941 4074 if (!NOTE_P (get_last_insn ()))
2e040219 4075 emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4076 parm_birth_insn = get_last_insn ();
4077
70f4f91c 4078 if (current_function_profile)
f6f315fe 4079 {
f6f315fe 4080#ifdef PROFILE_HOOK
df696a75 4081 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4082#endif
f6f315fe 4083 }
411707f4 4084
6f086dfc
RS
4085 /* After the display initializations is where the tail-recursion label
4086 should go, if we end up needing one. Ensure we have a NOTE here
4087 since some things (like trampolines) get placed before this. */
2e040219 4088 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
6f086dfc 4089
6f086dfc
RS
4090 /* Make sure there is a line number after the function entry setup code. */
4091 force_next_line_note ();
4092}
4093\f
49ad7cfa
BS
4094/* Undo the effects of init_dummy_function_start. */
4095void
fa8db1f7 4096expand_dummy_function_end (void)
49ad7cfa
BS
4097{
4098 /* End any sequences that failed to be closed due to syntax errors. */
4099 while (in_sequence_p ())
4100 end_sequence ();
4101
4102 /* Outside function body, can't compute type's actual size
4103 until next function's body starts. */
fa51b01b 4104
01d939e8
BS
4105 free_after_parsing (cfun);
4106 free_after_compilation (cfun);
01d939e8 4107 cfun = 0;
49ad7cfa
BS
4108}
4109
c13fde05
RH
4110/* Call DOIT for each hard register used as a return value from
4111 the current function. */
bd695e1e
RH
4112
4113void
fa8db1f7 4114diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4115{
c13fde05
RH
4116 rtx outgoing = current_function_return_rtx;
4117
4118 if (! outgoing)
4119 return;
bd695e1e 4120
f8cfc6aa 4121 if (REG_P (outgoing))
c13fde05
RH
4122 (*doit) (outgoing, arg);
4123 else if (GET_CODE (outgoing) == PARALLEL)
4124 {
4125 int i;
bd695e1e 4126
c13fde05
RH
4127 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4128 {
4129 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4130
f8cfc6aa 4131 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4132 (*doit) (x, arg);
bd695e1e
RH
4133 }
4134 }
4135}
4136
c13fde05 4137static void
fa8db1f7 4138do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4139{
4140 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4141}
4142
4143void
fa8db1f7 4144clobber_return_register (void)
c13fde05
RH
4145{
4146 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4147
4148 /* In case we do use pseudo to return value, clobber it too. */
4149 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4150 {
4151 tree decl_result = DECL_RESULT (current_function_decl);
4152 rtx decl_rtl = DECL_RTL (decl_result);
4153 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4154 {
4155 do_clobber_return_reg (decl_rtl, NULL);
4156 }
4157 }
c13fde05
RH
4158}
4159
4160static void
fa8db1f7 4161do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05
RH
4162{
4163 emit_insn (gen_rtx_USE (VOIDmode, reg));
4164}
4165
4166void
fa8db1f7 4167use_return_register (void)
c13fde05
RH
4168{
4169 diddle_return_value (do_use_return_reg, NULL);
4170}
4171
902edd36
JH
4172/* Possibly warn about unused parameters. */
4173void
4174do_warn_unused_parameter (tree fn)
4175{
4176 tree decl;
4177
4178 for (decl = DECL_ARGUMENTS (fn);
4179 decl; decl = TREE_CHAIN (decl))
4180 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4181 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
d4ee4d25 4182 warning (0, "%Junused parameter %qD", decl, decl);
902edd36
JH
4183}
4184
e2500fed
GK
4185static GTY(()) rtx initial_trampoline;
4186
71c0e7fc 4187/* Generate RTL for the end of the current function. */
6f086dfc
RS
4188
4189void
fa8db1f7 4190expand_function_end (void)
6f086dfc 4191{
932f0847 4192 rtx clobber_after;
6f086dfc 4193
964be02f
RH
4194 /* If arg_pointer_save_area was referenced only from a nested
4195 function, we will not have initialized it yet. Do that now. */
4196 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4197 get_arg_pointer_save_area (cfun);
4198
11044f66
RK
4199 /* If we are doing stack checking and this function makes calls,
4200 do a stack probe at the start of the function to ensure we have enough
4201 space for another stack frame. */
4202 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4203 {
4204 rtx insn, seq;
4205
4206 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4207 if (CALL_P (insn))
11044f66
RK
4208 {
4209 start_sequence ();
4210 probe_stack_range (STACK_CHECK_PROTECT,
4211 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4212 seq = get_insns ();
4213 end_sequence ();
2f937369 4214 emit_insn_before (seq, tail_recursion_reentry);
11044f66
RK
4215 break;
4216 }
4217 }
4218
902edd36
JH
4219 /* Possibly warn about unused parameters.
4220 When frontend does unit-at-a-time, the warning is already
4221 issued at finalization time. */
4222 if (warn_unused_parameter
4223 && !lang_hooks.callgraph.expand_function)
4224 do_warn_unused_parameter (current_function_decl);
6f086dfc 4225
6f086dfc
RS
4226 /* End any sequences that failed to be closed due to syntax errors. */
4227 while (in_sequence_p ())
5f4f0e22 4228 end_sequence ();
6f086dfc 4229
6f086dfc
RS
4230 clear_pending_stack_adjust ();
4231 do_pending_stack_adjust ();
4232
ffad84cd
AH
4233 /* @@@ This is a kludge. We want to ensure that instructions that
4234 may trap are not moved into the epilogue by scheduling, because
4235 we don't always emit unwind information for the epilogue.
4236 However, not all machine descriptions define a blockage insn, so
4237 emit an ASM_INPUT to act as one. */
4238 if (flag_non_call_exceptions)
4239 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4240
6f086dfc
RS
4241 /* Mark the end of the function body.
4242 If control reaches this insn, the function can drop through
4243 without returning a value. */
2e040219 4244 emit_note (NOTE_INSN_FUNCTION_END);
6f086dfc 4245
82e415a3
DE
4246 /* Must mark the last line number note in the function, so that the test
4247 coverage code can avoid counting the last line twice. This just tells
4248 the code to ignore the immediately following line note, since there
4249 already exists a copy of this note somewhere above. This line number
4250 note is still needed for debugging though, so we can't delete it. */
4251 if (flag_test_coverage)
2e040219 4252 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
82e415a3 4253
6f086dfc
RS
4254 /* Output a linenumber for the end of the function.
4255 SDB depends on this. */
0cea056b
NS
4256 force_next_line_note ();
4257 emit_line_note (input_location);
6f086dfc 4258
fbffc70a 4259 /* Before the return label (if any), clobber the return
a1f300c0 4260 registers so that they are not propagated live to the rest of
fbffc70a
GK
4261 the function. This can only happen with functions that drop
4262 through; if there had been a return statement, there would
932f0847
JH
4263 have either been a return rtx, or a jump to the return label.
4264
4265 We delay actual code generation after the current_function_value_rtx
4266 is computed. */
4267 clobber_after = get_last_insn ();
fbffc70a 4268
526c334b
KH
4269 /* Output the label for the actual return from the function. */
4270 emit_label (return_label);
6f086dfc 4271
52a11cbf
RH
4272 /* Let except.c know where it should emit the call to unregister
4273 the function context for sjlj exceptions. */
4274 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4275 sjlj_emit_function_exit_after (get_last_insn ());
4276
3e4eac3f
RH
4277 /* If scalar return value was computed in a pseudo-reg, or was a named
4278 return value that got dumped to the stack, copy that to the hard
4279 return register. */
19e7881c 4280 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4281 {
3e4eac3f
RH
4282 tree decl_result = DECL_RESULT (current_function_decl);
4283 rtx decl_rtl = DECL_RTL (decl_result);
4284
4285 if (REG_P (decl_rtl)
4286 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4287 : DECL_REGISTER (decl_result))
4288 {
ce5e43d0 4289 rtx real_decl_rtl = current_function_return_rtx;
6f086dfc 4290
ce5e43d0 4291 /* This should be set in assign_parms. */
0bccc606 4292 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4293
4294 /* If this is a BLKmode structure being returned in registers,
4295 then use the mode computed in expand_return. Note that if
797a6ac1 4296 decl_rtl is memory, then its mode may have been changed,
3e4eac3f
RH
4297 but that current_function_return_rtx has not. */
4298 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4299 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 4300
bef5d8b6
RS
4301 /* If a non-BLKmode return value should be padded at the least
4302 significant end of the register, shift it left by the appropriate
4303 amount. BLKmode results are handled using the group load/store
4304 machinery. */
4305 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4306 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4307 {
4308 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4309 REGNO (real_decl_rtl)),
4310 decl_rtl);
4311 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4312 }
3e4eac3f 4313 /* If a named return value dumped decl_return to memory, then
797a6ac1 4314 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 4315 extension. */
bef5d8b6 4316 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 4317 {
8df83eae 4318 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3e4eac3f 4319
61f71b34
DD
4320 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4321 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4322 &unsignedp, 1);
3e4eac3f
RH
4323
4324 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4325 }
aa570f54 4326 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4327 {
4328 /* If expand_function_start has created a PARALLEL for decl_rtl,
4329 move the result to the real return registers. Otherwise, do
4330 a group load from decl_rtl for a named return. */
4331 if (GET_CODE (decl_rtl) == PARALLEL)
4332 emit_group_move (real_decl_rtl, decl_rtl);
4333 else
4334 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4335 TREE_TYPE (decl_result),
084a1106
JDA
4336 int_size_in_bytes (TREE_TYPE (decl_result)));
4337 }
3e4eac3f
RH
4338 else
4339 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4340 }
6f086dfc
RS
4341 }
4342
4343 /* If returning a structure, arrange to return the address of the value
4344 in a place where debuggers expect to find it.
4345
4346 If returning a structure PCC style,
4347 the caller also depends on this value.
4348 And current_function_returns_pcc_struct is not necessarily set. */
4349 if (current_function_returns_struct
4350 || current_function_returns_pcc_struct)
4351 {
cc77ae10 4352 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 4353 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
4354 rtx outgoing;
4355
4356 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4357 type = TREE_TYPE (type);
4358 else
4359 value_address = XEXP (value_address, 0);
4360
6f086dfc 4361#ifdef FUNCTION_OUTGOING_VALUE
cc77ae10
JM
4362 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4363 current_function_decl);
6f086dfc 4364#else
cc77ae10
JM
4365 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4366 current_function_decl);
4367#endif
6f086dfc
RS
4368
4369 /* Mark this as a function return value so integrate will delete the
4370 assignment and USE below when inlining this function. */
4371 REG_FUNCTION_VALUE_P (outgoing) = 1;
4372
d1608933 4373 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
4374 value_address = convert_memory_address (GET_MODE (outgoing),
4375 value_address);
d1608933 4376
6f086dfc 4377 emit_move_insn (outgoing, value_address);
d1608933
RK
4378
4379 /* Show return register used to hold result (in this case the address
4380 of the result. */
4381 current_function_return_rtx = outgoing;
6f086dfc
RS
4382 }
4383
52a11cbf
RH
4384 /* If this is an implementation of throw, do what's necessary to
4385 communicate between __builtin_eh_return and the epilogue. */
4386 expand_eh_return ();
4387
932f0847
JH
4388 /* Emit the actual code to clobber return register. */
4389 {
609c3937 4390 rtx seq;
797a6ac1 4391
932f0847
JH
4392 start_sequence ();
4393 clobber_return_register ();
609c3937 4394 expand_naked_return ();
2f937369 4395 seq = get_insns ();
932f0847
JH
4396 end_sequence ();
4397
609c3937 4398 emit_insn_after (seq, clobber_after);
932f0847
JH
4399 }
4400
609c3937
RH
4401 /* Output the label for the naked return from the function. */
4402 emit_label (naked_return_label);
6e3077c6 4403
40184445
BS
4404 /* If we had calls to alloca, and this machine needs
4405 an accurate stack pointer to exit the function,
4406 insert some code to save and restore the stack pointer. */
4407 if (! EXIT_IGNORE_STACK
4408 && current_function_calls_alloca)
4409 {
4410 rtx tem = 0;
4411
4412 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4413 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4414 }
4415
c13fde05
RH
4416 /* ??? This should no longer be necessary since stupid is no longer with
4417 us, but there are some parts of the compiler (eg reload_combine, and
4418 sh mach_dep_reorg) that still try and compute their own lifetime info
4419 instead of using the general framework. */
4420 use_return_register ();
6f086dfc 4421}
278ed218
RH
4422
4423rtx
fa8db1f7 4424get_arg_pointer_save_area (struct function *f)
278ed218
RH
4425{
4426 rtx ret = f->x_arg_pointer_save_area;
4427
4428 if (! ret)
4429 {
278ed218
RH
4430 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4431 f->x_arg_pointer_save_area = ret;
964be02f
RH
4432 }
4433
4434 if (f == cfun && ! f->arg_pointer_save_area_init)
4435 {
4436 rtx seq;
278ed218 4437
797a6ac1 4438 /* Save the arg pointer at the beginning of the function. The
964be02f 4439 generated stack slot may not be a valid memory address, so we
278ed218
RH
4440 have to check it and fix it if necessary. */
4441 start_sequence ();
4442 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
2f937369 4443 seq = get_insns ();
278ed218
RH
4444 end_sequence ();
4445
964be02f 4446 push_topmost_sequence ();
1cb2fc7b 4447 emit_insn_after (seq, entry_of_function ());
964be02f 4448 pop_topmost_sequence ();
278ed218
RH
4449 }
4450
4451 return ret;
4452}
bdac5f58 4453\f
2f937369
DM
4454/* Extend a vector that records the INSN_UIDs of INSNS
4455 (a list of one or more insns). */
bdac5f58 4456
0a1c58a2 4457static void
f995dcfe 4458record_insns (rtx insns, VEC(int,heap) **vecp)
bdac5f58 4459{
2f937369 4460 rtx tmp;
0a1c58a2 4461
f995dcfe
KH
4462 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4463 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
bdac5f58
TW
4464}
4465
589fe865 4466/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 4467static void
fa8db1f7 4468set_insn_locators (rtx insn, int loc)
0435312e
JH
4469{
4470 while (insn != NULL_RTX)
4471 {
4472 if (INSN_P (insn))
4473 INSN_LOCATOR (insn) = loc;
4474 insn = NEXT_INSN (insn);
4475 }
4476}
4477
2f937369
DM
4478/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4479 be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 4480
10914065 4481static int
f995dcfe 4482contains (rtx insn, VEC(int,heap) **vec)
bdac5f58 4483{
b3694847 4484 int i, j;
bdac5f58 4485
4b4bf941 4486 if (NONJUMP_INSN_P (insn)
bdac5f58
TW
4487 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4488 {
10914065 4489 int count = 0;
bdac5f58 4490 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
f995dcfe
KH
4491 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4492 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4493 == VEC_index (int, *vec, j))
10914065
TW
4494 count++;
4495 return count;
bdac5f58
TW
4496 }
4497 else
4498 {
f995dcfe
KH
4499 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4500 if (INSN_UID (insn) == VEC_index (int, *vec, j))
10914065 4501 return 1;
bdac5f58
TW
4502 }
4503 return 0;
4504}
5c7675e9
RH
4505
4506int
fa8db1f7 4507prologue_epilogue_contains (rtx insn)
5c7675e9 4508{
f995dcfe 4509 if (contains (insn, &prologue))
5c7675e9 4510 return 1;
f995dcfe 4511 if (contains (insn, &epilogue))
5c7675e9
RH
4512 return 1;
4513 return 0;
4514}
bdac5f58 4515
0a1c58a2 4516int
fa8db1f7 4517sibcall_epilogue_contains (rtx insn)
0a1c58a2
JL
4518{
4519 if (sibcall_epilogue)
f995dcfe 4520 return contains (insn, &sibcall_epilogue);
0a1c58a2
JL
4521 return 0;
4522}
4523
73ef99fb 4524#ifdef HAVE_return
69732dcb
RH
4525/* Insert gen_return at the end of block BB. This also means updating
4526 block_for_insn appropriately. */
4527
4528static void
fa8db1f7 4529emit_return_into_block (basic_block bb, rtx line_note)
69732dcb 4530{
a813c111 4531 emit_jump_insn_after (gen_return (), BB_END (bb));
86c82654 4532 if (line_note)
a813c111 4533 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
69732dcb 4534}
73ef99fb 4535#endif /* HAVE_return */
69732dcb 4536
3258e996
RK
4537#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4538
535a42b1
NS
4539/* These functions convert the epilogue into a variant that does not
4540 modify the stack pointer. This is used in cases where a function
4541 returns an object whose size is not known until it is computed.
4542 The called function leaves the object on the stack, leaves the
4543 stack depressed, and returns a pointer to the object.
4544
4545 What we need to do is track all modifications and references to the
4546 stack pointer, deleting the modifications and changing the
4547 references to point to the location the stack pointer would have
4548 pointed to had the modifications taken place.
4549
4550 These functions need to be portable so we need to make as few
4551 assumptions about the epilogue as we can. However, the epilogue
4552 basically contains three things: instructions to reset the stack
4553 pointer, instructions to reload registers, possibly including the
4554 frame pointer, and an instruction to return to the caller.
4555
4556 We must be sure of what a relevant epilogue insn is doing. We also
4557 make no attempt to validate the insns we make since if they are
4558 invalid, we probably can't do anything valid. The intent is that
4559 these routines get "smarter" as more and more machines start to use
4560 them and they try operating on different epilogues.
4561
4562 We use the following structure to track what the part of the
4563 epilogue that we've already processed has done. We keep two copies
4564 of the SP equivalence, one for use during the insn we are
4565 processing and one for use in the next insn. The difference is
4566 because one part of a PARALLEL may adjust SP and the other may use
4567 it. */
3258e996
RK
4568
4569struct epi_info
4570{
4571 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4572 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
3ef42a0c 4573 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
3258e996
RK
4574 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4575 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4576 should be set to once we no longer need
4577 its value. */
f285d67b
RK
4578 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4579 for registers. */
3258e996
RK
4580};
4581
fa8db1f7 4582static void handle_epilogue_set (rtx, struct epi_info *);
80fcc7bc 4583static void update_epilogue_consts (rtx, rtx, void *);
fa8db1f7 4584static void emit_equiv_load (struct epi_info *);
7393c642 4585
2f937369
DM
4586/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4587 no modifications to the stack pointer. Return the new list of insns. */
7393c642 4588
3258e996 4589static rtx
fa8db1f7 4590keep_stack_depressed (rtx insns)
7393c642 4591{
2f937369 4592 int j;
3258e996 4593 struct epi_info info;
2f937369 4594 rtx insn, next;
7393c642 4595
f285d67b 4596 /* If the epilogue is just a single instruction, it must be OK as is. */
2f937369
DM
4597 if (NEXT_INSN (insns) == NULL_RTX)
4598 return insns;
7393c642 4599
3258e996
RK
4600 /* Otherwise, start a sequence, initialize the information we have, and
4601 process all the insns we were given. */
4602 start_sequence ();
4603
4604 info.sp_equiv_reg = stack_pointer_rtx;
4605 info.sp_offset = 0;
4606 info.equiv_reg_src = 0;
7393c642 4607
f285d67b
RK
4608 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4609 info.const_equiv[j] = 0;
4610
2f937369
DM
4611 insn = insns;
4612 next = NULL_RTX;
4613 while (insn != NULL_RTX)
7393c642 4614 {
2f937369 4615 next = NEXT_INSN (insn);
7393c642 4616
3258e996
RK
4617 if (!INSN_P (insn))
4618 {
4619 add_insn (insn);
2f937369 4620 insn = next;
3258e996
RK
4621 continue;
4622 }
7393c642 4623
3258e996
RK
4624 /* If this insn references the register that SP is equivalent to and
4625 we have a pending load to that register, we must force out the load
4626 first and then indicate we no longer know what SP's equivalent is. */
4627 if (info.equiv_reg_src != 0
4628 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7393c642 4629 {
3258e996
RK
4630 emit_equiv_load (&info);
4631 info.sp_equiv_reg = 0;
4632 }
7393c642 4633
3258e996
RK
4634 info.new_sp_equiv_reg = info.sp_equiv_reg;
4635 info.new_sp_offset = info.sp_offset;
7393c642 4636
3258e996
RK
4637 /* If this is a (RETURN) and the return address is on the stack,
4638 update the address and change to an indirect jump. */
4639 if (GET_CODE (PATTERN (insn)) == RETURN
4640 || (GET_CODE (PATTERN (insn)) == PARALLEL
4641 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4642 {
4643 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4644 rtx base = 0;
4645 HOST_WIDE_INT offset = 0;
4646 rtx jump_insn, jump_set;
4647
4648 /* If the return address is in a register, we can emit the insn
4649 unchanged. Otherwise, it must be a MEM and we see what the
4650 base register and offset are. In any case, we have to emit any
4651 pending load to the equivalent reg of SP, if any. */
f8cfc6aa 4652 if (REG_P (retaddr))
3258e996
RK
4653 {
4654 emit_equiv_load (&info);
4655 add_insn (insn);
2f937369 4656 insn = next;
3258e996
RK
4657 continue;
4658 }
0bccc606 4659 else
3258e996 4660 {
0bccc606
NS
4661 rtx ret_ptr;
4662 gcc_assert (MEM_P (retaddr));
4663
4664 ret_ptr = XEXP (retaddr, 0);
4665
4666 if (REG_P (ret_ptr))
4667 {
4668 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4669 offset = 0;
4670 }
4671 else
4672 {
4673 gcc_assert (GET_CODE (ret_ptr) == PLUS
4674 && REG_P (XEXP (ret_ptr, 0))
4675 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4676 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4677 offset = INTVAL (XEXP (ret_ptr, 1));
4678 }
3258e996 4679 }
3258e996
RK
4680
4681 /* If the base of the location containing the return pointer
4682 is SP, we must update it with the replacement address. Otherwise,
4683 just build the necessary MEM. */
4684 retaddr = plus_constant (base, offset);
4685 if (base == stack_pointer_rtx)
4686 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4687 plus_constant (info.sp_equiv_reg,
4688 info.sp_offset));
4689
4690 retaddr = gen_rtx_MEM (Pmode, retaddr);
4691
4692 /* If there is a pending load to the equivalent register for SP
4693 and we reference that register, we must load our address into
4694 a scratch register and then do that load. */
4695 if (info.equiv_reg_src
4696 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4697 {
4698 unsigned int regno;
4699 rtx reg;
4700
4701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4702 if (HARD_REGNO_MODE_OK (regno, Pmode)
53b6fb26
RK
4703 && !fixed_regs[regno]
4704 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5e2d947c
JH
4705 && !REGNO_REG_SET_P
4706 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
b5ed05aa 4707 && !refers_to_regno_p (regno,
66fd46b6
JH
4708 regno + hard_regno_nregs[regno]
4709 [Pmode],
f285d67b
RK
4710 info.equiv_reg_src, NULL)
4711 && info.const_equiv[regno] == 0)
3258e996
RK
4712 break;
4713
0bccc606 4714 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7393c642 4715
3258e996
RK
4716 reg = gen_rtx_REG (Pmode, regno);
4717 emit_move_insn (reg, retaddr);
4718 retaddr = reg;
4719 }
4720
4721 emit_equiv_load (&info);
4722 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4723
4724 /* Show the SET in the above insn is a RETURN. */
4725 jump_set = single_set (jump_insn);
0bccc606
NS
4726 gcc_assert (jump_set);
4727 SET_IS_RETURN_P (jump_set) = 1;
7393c642 4728 }
3258e996
RK
4729
4730 /* If SP is not mentioned in the pattern and its equivalent register, if
4731 any, is not modified, just emit it. Otherwise, if neither is set,
4732 replace the reference to SP and emit the insn. If none of those are
4733 true, handle each SET individually. */
4734 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4735 && (info.sp_equiv_reg == stack_pointer_rtx
4736 || !reg_set_p (info.sp_equiv_reg, insn)))
4737 add_insn (insn);
4738 else if (! reg_set_p (stack_pointer_rtx, insn)
4739 && (info.sp_equiv_reg == stack_pointer_rtx
4740 || !reg_set_p (info.sp_equiv_reg, insn)))
7393c642 4741 {
0bccc606
NS
4742 int changed;
4743
4744 changed = validate_replace_rtx (stack_pointer_rtx,
4745 plus_constant (info.sp_equiv_reg,
4746 info.sp_offset),
4747 insn);
4748 gcc_assert (changed);
7393c642 4749
3258e996
RK
4750 add_insn (insn);
4751 }
4752 else if (GET_CODE (PATTERN (insn)) == SET)
4753 handle_epilogue_set (PATTERN (insn), &info);
4754 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4755 {
4756 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4757 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4758 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4759 }
4760 else
4761 add_insn (insn);
4762
4763 info.sp_equiv_reg = info.new_sp_equiv_reg;
4764 info.sp_offset = info.new_sp_offset;
2f937369 4765
f285d67b
RK
4766 /* Now update any constants this insn sets. */
4767 note_stores (PATTERN (insn), update_epilogue_consts, &info);
2f937369 4768 insn = next;
3258e996
RK
4769 }
4770
2f937369 4771 insns = get_insns ();
3258e996 4772 end_sequence ();
2f937369 4773 return insns;
3258e996
RK
4774}
4775
d6a7951f 4776/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
3258e996 4777 structure that contains information about what we've seen so far. We
797a6ac1 4778 process this SET by either updating that data or by emitting one or
3258e996
RK
4779 more insns. */
4780
4781static void
fa8db1f7 4782handle_epilogue_set (rtx set, struct epi_info *p)
3258e996
RK
4783{
4784 /* First handle the case where we are setting SP. Record what it is being
535a42b1 4785 set from, which we must be able to determine */
3258e996
RK
4786 if (reg_set_p (stack_pointer_rtx, set))
4787 {
0bccc606 4788 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
3258e996 4789
f285d67b 4790 if (GET_CODE (SET_SRC (set)) == PLUS)
3258e996
RK
4791 {
4792 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
f285d67b
RK
4793 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4794 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
f285d67b 4795 else
0bccc606
NS
4796 {
4797 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4798 && (REGNO (XEXP (SET_SRC (set), 1))
4799 < FIRST_PSEUDO_REGISTER)
4800 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4801 p->new_sp_offset
4802 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4803 }
7393c642 4804 }
3258e996
RK
4805 else
4806 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4807
4808 /* If we are adjusting SP, we adjust from the old data. */
4809 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4810 {
4811 p->new_sp_equiv_reg = p->sp_equiv_reg;
4812 p->new_sp_offset += p->sp_offset;
4813 }
4814
0bccc606 4815 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
3258e996
RK
4816
4817 return;
4818 }
4819
535a42b1
NS
4820 /* Next handle the case where we are setting SP's equivalent
4821 register. We must not already have a value to set it to. We
4822 could update, but there seems little point in handling that case.
4823 Note that we have to allow for the case where we are setting the
4824 register set in the previous part of a PARALLEL inside a single
4825 insn. But use the old offset for any updates within this insn.
4826 We must allow for the case where the register is being set in a
4827 different (usually wider) mode than Pmode). */
f189c7ca 4828 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
3258e996 4829 {
0bccc606
NS
4830 gcc_assert (!p->equiv_reg_src
4831 && REG_P (p->new_sp_equiv_reg)
4832 && REG_P (SET_DEST (set))
4833 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4834 <= BITS_PER_WORD)
4835 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4836 p->equiv_reg_src
4837 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4838 plus_constant (p->sp_equiv_reg,
4839 p->sp_offset));
3258e996
RK
4840 }
4841
4842 /* Otherwise, replace any references to SP in the insn to its new value
4843 and emit the insn. */
4844 else
4845 {
4846 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4847 plus_constant (p->sp_equiv_reg,
4848 p->sp_offset));
4849 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4850 plus_constant (p->sp_equiv_reg,
4851 p->sp_offset));
4852 emit_insn (set);
7393c642
RK
4853 }
4854}
3258e996 4855
f285d67b
RK
4856/* Update the tracking information for registers set to constants. */
4857
4858static void
4859update_epilogue_consts (rtx dest, rtx x, void *data)
4860{
4861 struct epi_info *p = (struct epi_info *) data;
8fbc67c0 4862 rtx new;
f285d67b 4863
f8cfc6aa 4864 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
f285d67b 4865 return;
8fbc67c0
RK
4866
4867 /* If we are either clobbering a register or doing a partial set,
4868 show we don't know the value. */
4869 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
f285d67b 4870 p->const_equiv[REGNO (dest)] = 0;
8fbc67c0
RK
4871
4872 /* If we are setting it to a constant, record that constant. */
4873 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
f285d67b 4874 p->const_equiv[REGNO (dest)] = SET_SRC (x);
8fbc67c0
RK
4875
4876 /* If this is a binary operation between a register we have been tracking
4877 and a constant, see if we can compute a new constant value. */
ec8e098d 4878 else if (ARITHMETIC_P (SET_SRC (x))
f8cfc6aa 4879 && REG_P (XEXP (SET_SRC (x), 0))
8fbc67c0
RK
4880 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4881 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4882 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4883 && 0 != (new = simplify_binary_operation
4884 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4885 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4886 XEXP (SET_SRC (x), 1)))
4887 && GET_CODE (new) == CONST_INT)
4888 p->const_equiv[REGNO (dest)] = new;
4889
4890 /* Otherwise, we can't do anything with this value. */
4891 else
4892 p->const_equiv[REGNO (dest)] = 0;
f285d67b
RK
4893}
4894
3258e996
RK
4895/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4896
4897static void
fa8db1f7 4898emit_equiv_load (struct epi_info *p)
3258e996
RK
4899{
4900 if (p->equiv_reg_src != 0)
f285d67b
RK
4901 {
4902 rtx dest = p->sp_equiv_reg;
4903
4904 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4905 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4906 REGNO (p->sp_equiv_reg));
3258e996 4907
f285d67b
RK
4908 emit_move_insn (dest, p->equiv_reg_src);
4909 p->equiv_reg_src = 0;
4910 }
3258e996 4911}
7393c642
RK
4912#endif
4913
9faa82d8 4914/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
4915 this into place with notes indicating where the prologue ends and where
4916 the epilogue begins. Update the basic block information when possible. */
4917
4918void
fa8db1f7 4919thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
bdac5f58 4920{
ca1117cc 4921 int inserted = 0;
19d3c25c 4922 edge e;
91ea4f8d 4923#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 4924 rtx seq;
91ea4f8d 4925#endif
ca1117cc
RH
4926#ifdef HAVE_prologue
4927 rtx prologue_end = NULL_RTX;
4928#endif
86c82654
RH
4929#if defined (HAVE_epilogue) || defined(HAVE_return)
4930 rtx epilogue_end = NULL_RTX;
4931#endif
628f6a4e 4932 edge_iterator ei;
e881bb1b 4933
bdac5f58
TW
4934#ifdef HAVE_prologue
4935 if (HAVE_prologue)
4936 {
e881bb1b 4937 start_sequence ();
718fe406 4938 seq = gen_prologue ();
e881bb1b 4939 emit_insn (seq);
bdac5f58
TW
4940
4941 /* Retain a map of the prologue insns. */
0a1c58a2 4942 record_insns (seq, &prologue);
2e040219 4943 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
9185a8d5 4944
2f937369 4945 seq = get_insns ();
e881bb1b 4946 end_sequence ();
0435312e 4947 set_insn_locators (seq, prologue_locator);
e881bb1b 4948
d6a7951f 4949 /* Can't deal with multiple successors of the entry block
75540af0
JH
4950 at the moment. Function should always have at least one
4951 entry point. */
c5cbcccf 4952 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
e881bb1b 4953
c5cbcccf 4954 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
75540af0 4955 inserted = 1;
bdac5f58 4956 }
bdac5f58 4957#endif
bdac5f58 4958
19d3c25c
RH
4959 /* If the exit block has no non-fake predecessors, we don't need
4960 an epilogue. */
628f6a4e 4961 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
4962 if ((e->flags & EDGE_FAKE) == 0)
4963 break;
4964 if (e == NULL)
4965 goto epilogue_done;
4966
69732dcb
RH
4967#ifdef HAVE_return
4968 if (optimize && HAVE_return)
4969 {
4970 /* If we're allowed to generate a simple return instruction,
4971 then by definition we don't need a full epilogue. Examine
718fe406
KH
4972 the block that falls through to EXIT. If it does not
4973 contain any code, examine its predecessors and try to
69732dcb
RH
4974 emit (conditional) return instructions. */
4975
4976 basic_block last;
69732dcb
RH
4977 rtx label;
4978
628f6a4e 4979 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
4980 if (e->flags & EDGE_FALLTHRU)
4981 break;
4982 if (e == NULL)
4983 goto epilogue_done;
4984 last = e->src;
4985
4986 /* Verify that there are no active instructions in the last block. */
a813c111 4987 label = BB_END (last);
4b4bf941 4988 while (label && !LABEL_P (label))
69732dcb
RH
4989 {
4990 if (active_insn_p (label))
4991 break;
4992 label = PREV_INSN (label);
4993 }
4994
4b4bf941 4995 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 4996 {
628f6a4e 4997 edge_iterator ei2;
718fe406 4998 rtx epilogue_line_note = NULL_RTX;
86c82654
RH
4999
5000 /* Locate the line number associated with the closing brace,
5001 if we can find one. */
5002 for (seq = get_last_insn ();
5003 seq && ! active_insn_p (seq);
5004 seq = PREV_INSN (seq))
4b4bf941 5005 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
86c82654
RH
5006 {
5007 epilogue_line_note = seq;
5008 break;
5009 }
5010
628f6a4e 5011 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5012 {
5013 basic_block bb = e->src;
5014 rtx jump;
5015
69732dcb 5016 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5017 {
5018 ei_next (&ei2);
5019 continue;
5020 }
69732dcb 5021
a813c111 5022 jump = BB_END (bb);
4b4bf941 5023 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5024 {
5025 ei_next (&ei2);
5026 continue;
5027 }
69732dcb
RH
5028
5029 /* If we have an unconditional jump, we can replace that
5030 with a simple return instruction. */
5031 if (simplejump_p (jump))
5032 {
86c82654 5033 emit_return_into_block (bb, epilogue_line_note);
53c17031 5034 delete_insn (jump);
69732dcb
RH
5035 }
5036
5037 /* If we have a conditional jump, we can try to replace
5038 that with a conditional return instruction. */
5039 else if (condjump_p (jump))
5040 {
47009d11 5041 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5042 {
5043 ei_next (&ei2);
5044 continue;
5045 }
718fe406 5046
3a75e42e
CP
5047 /* If this block has only one successor, it both jumps
5048 and falls through to the fallthru block, so we can't
5049 delete the edge. */
c5cbcccf 5050 if (single_succ_p (bb))
628f6a4e
BE
5051 {
5052 ei_next (&ei2);
5053 continue;
5054 }
69732dcb
RH
5055 }
5056 else
628f6a4e
BE
5057 {
5058 ei_next (&ei2);
5059 continue;
5060 }
69732dcb
RH
5061
5062 /* Fix up the CFG for the successful change we just made. */
86c82654 5063 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5064 }
69732dcb 5065
2dd8bc01
GK
5066 /* Emit a return insn for the exit fallthru block. Whether
5067 this is still reachable will be determined later. */
69732dcb 5068
a813c111 5069 emit_barrier_after (BB_END (last));
86c82654 5070 emit_return_into_block (last, epilogue_line_note);
a813c111 5071 epilogue_end = BB_END (last);
c5cbcccf 5072 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
718fe406 5073 goto epilogue_done;
2dd8bc01 5074 }
69732dcb
RH
5075 }
5076#endif
623a66fa
R
5077 /* Find the edge that falls through to EXIT. Other edges may exist
5078 due to RETURN instructions, but those don't need epilogues.
5079 There really shouldn't be a mixture -- either all should have
5080 been converted or none, however... */
5081
628f6a4e 5082 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5083 if (e->flags & EDGE_FALLTHRU)
5084 break;
5085 if (e == NULL)
5086 goto epilogue_done;
5087
bdac5f58
TW
5088#ifdef HAVE_epilogue
5089 if (HAVE_epilogue)
5090 {
19d3c25c 5091 start_sequence ();
2e040219 5092 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
a78bdb38 5093
19d3c25c 5094 seq = gen_epilogue ();
7393c642 5095
3258e996
RK
5096#ifdef INCOMING_RETURN_ADDR_RTX
5097 /* If this function returns with the stack depressed and we can support
5098 it, massage the epilogue to actually do that. */
43db0363
RK
5099 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5100 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
3258e996
RK
5101 seq = keep_stack_depressed (seq);
5102#endif
7393c642 5103
19d3c25c 5104 emit_jump_insn (seq);
bdac5f58 5105
19d3c25c 5106 /* Retain a map of the epilogue insns. */
0a1c58a2 5107 record_insns (seq, &epilogue);
0435312e 5108 set_insn_locators (seq, epilogue_locator);
bdac5f58 5109
2f937369 5110 seq = get_insns ();
718fe406 5111 end_sequence ();
e881bb1b 5112
19d3c25c 5113 insert_insn_on_edge (seq, e);
ca1117cc 5114 inserted = 1;
bdac5f58 5115 }
623a66fa 5116 else
bdac5f58 5117#endif
623a66fa
R
5118 {
5119 basic_block cur_bb;
5120
5121 if (! next_active_insn (BB_END (e->src)))
5122 goto epilogue_done;
5123 /* We have a fall-through edge to the exit block, the source is not
5124 at the end of the function, and there will be an assembler epilogue
5125 at the end of the function.
5126 We can't use force_nonfallthru here, because that would try to
5127 use return. Inserting a jump 'by hand' is extremely messy, so
5128 we take advantage of cfg_layout_finalize using
5129 fixup_fallthru_exit_predecessor. */
35b6b437 5130 cfg_layout_initialize (0);
623a66fa
R
5131 FOR_EACH_BB (cur_bb)
5132 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5133 cur_bb->rbi->next = cur_bb->next_bb;
5134 cfg_layout_finalize ();
5135 }
19d3c25c 5136epilogue_done:
e881bb1b 5137
ca1117cc 5138 if (inserted)
e881bb1b 5139 commit_edge_insertions ();
0a1c58a2
JL
5140
5141#ifdef HAVE_sibcall_epilogue
5142 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5143 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5144 {
5145 basic_block bb = e->src;
a813c111 5146 rtx insn = BB_END (bb);
0a1c58a2 5147
4b4bf941 5148 if (!CALL_P (insn)
0a1c58a2 5149 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5150 {
5151 ei_next (&ei);
5152 continue;
5153 }
0a1c58a2
JL
5154
5155 start_sequence ();
0af5c896
RE
5156 emit_insn (gen_sibcall_epilogue ());
5157 seq = get_insns ();
0a1c58a2
JL
5158 end_sequence ();
5159
2f937369
DM
5160 /* Retain a map of the epilogue insns. Used in life analysis to
5161 avoid getting rid of sibcall epilogue insns. Do this before we
5162 actually emit the sequence. */
5163 record_insns (seq, &sibcall_epilogue);
0435312e 5164 set_insn_locators (seq, epilogue_locator);
2f937369 5165
5e35992a 5166 emit_insn_before (seq, insn);
628f6a4e 5167 ei_next (&ei);
0a1c58a2
JL
5168 }
5169#endif
ca1117cc
RH
5170
5171#ifdef HAVE_prologue
589fe865 5172 /* This is probably all useless now that we use locators. */
ca1117cc
RH
5173 if (prologue_end)
5174 {
5175 rtx insn, prev;
5176
5177 /* GDB handles `break f' by setting a breakpoint on the first
30196c1f 5178 line note after the prologue. Which means (1) that if
ca1117cc 5179 there are line number notes before where we inserted the
30196c1f
RH
5180 prologue we should move them, and (2) we should generate a
5181 note before the end of the first basic block, if there isn't
016030fe
JH
5182 one already there.
5183
8d9afc4e 5184 ??? This behavior is completely broken when dealing with
016030fe
JH
5185 multiple entry functions. We simply place the note always
5186 into first basic block and let alternate entry points
5187 to be missed.
5188 */
ca1117cc 5189
718fe406 5190 for (insn = prologue_end; insn; insn = prev)
ca1117cc
RH
5191 {
5192 prev = PREV_INSN (insn);
4b4bf941 5193 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
ca1117cc
RH
5194 {
5195 /* Note that we cannot reorder the first insn in the
5196 chain, since rest_of_compilation relies on that
30196c1f 5197 remaining constant. */
ca1117cc 5198 if (prev == NULL)
30196c1f
RH
5199 break;
5200 reorder_insns (insn, insn, prologue_end);
ca1117cc
RH
5201 }
5202 }
5203
30196c1f 5204 /* Find the last line number note in the first block. */
a813c111 5205 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
016030fe 5206 insn != prologue_end && insn;
30196c1f 5207 insn = PREV_INSN (insn))
4b4bf941 5208 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f
RH
5209 break;
5210
5211 /* If we didn't find one, make a copy of the first line number
5212 we run across. */
5213 if (! insn)
ca1117cc 5214 {
30196c1f
RH
5215 for (insn = next_active_insn (prologue_end);
5216 insn;
5217 insn = PREV_INSN (insn))
4b4bf941 5218 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
30196c1f 5219 {
5f2fc772 5220 emit_note_copy_after (insn, prologue_end);
30196c1f
RH
5221 break;
5222 }
ca1117cc
RH
5223 }
5224 }
5225#endif
86c82654
RH
5226#ifdef HAVE_epilogue
5227 if (epilogue_end)
5228 {
5229 rtx insn, next;
5230
5231 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5232 There is no need, however, to be quite so anal about the existence
84c1fa24
UW
5233 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5234 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5235 info generation. */
718fe406 5236 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5237 {
5238 next = NEXT_INSN (insn);
4b4bf941 5239 if (NOTE_P (insn)
84c1fa24
UW
5240 && (NOTE_LINE_NUMBER (insn) > 0
5241 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5242 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
86c82654
RH
5243 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5244 }
5245 }
5246#endif
bdac5f58
TW
5247}
5248
5249/* Reposition the prologue-end and epilogue-begin notes after instruction
5250 scheduling and delayed branch scheduling. */
5251
5252void
fa8db1f7 5253reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
bdac5f58
TW
5254{
5255#if defined (HAVE_prologue) || defined (HAVE_epilogue)
9f53e965 5256 rtx insn, last, note;
0a1c58a2
JL
5257 int len;
5258
f995dcfe 5259 if ((len = VEC_length (int, prologue)) > 0)
bdac5f58 5260 {
9f53e965 5261 last = 0, note = 0;
bdac5f58 5262
0a1c58a2
JL
5263 /* Scan from the beginning until we reach the last prologue insn.
5264 We apparently can't depend on basic_block_{head,end} after
5265 reorg has run. */
9f53e965 5266 for (insn = f; insn; insn = NEXT_INSN (insn))
bdac5f58 5267 {
4b4bf941 5268 if (NOTE_P (insn))
9392c110 5269 {
0a1c58a2
JL
5270 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5271 note = insn;
5272 }
f995dcfe 5273 else if (contains (insn, &prologue))
0a1c58a2 5274 {
9f53e965
RH
5275 last = insn;
5276 if (--len == 0)
5277 break;
5278 }
5279 }
797a6ac1 5280
9f53e965
RH
5281 if (last)
5282 {
9f53e965
RH
5283 /* Find the prologue-end note if we haven't already, and
5284 move it to just after the last prologue insn. */
5285 if (note == 0)
5286 {
5287 for (note = last; (note = NEXT_INSN (note));)
4b4bf941 5288 if (NOTE_P (note)
9f53e965
RH
5289 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5290 break;
5291 }
c93b03c2 5292
9f53e965 5293 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5294 if (LABEL_P (last))
9f53e965
RH
5295 last = NEXT_INSN (last);
5296 reorder_insns (note, note, last);
bdac5f58 5297 }
0a1c58a2
JL
5298 }
5299
f995dcfe 5300 if ((len = VEC_length (int, epilogue)) > 0)
0a1c58a2 5301 {
9f53e965 5302 last = 0, note = 0;
bdac5f58 5303
0a1c58a2
JL
5304 /* Scan from the end until we reach the first epilogue insn.
5305 We apparently can't depend on basic_block_{head,end} after
5306 reorg has run. */
9f53e965 5307 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
bdac5f58 5308 {
4b4bf941 5309 if (NOTE_P (insn))
9392c110 5310 {
0a1c58a2
JL
5311 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5312 note = insn;
5313 }
f995dcfe 5314 else if (contains (insn, &epilogue))
0a1c58a2 5315 {
9f53e965
RH
5316 last = insn;
5317 if (--len == 0)
5318 break;
5319 }
5320 }
c93b03c2 5321
9f53e965
RH
5322 if (last)
5323 {
5324 /* Find the epilogue-begin note if we haven't already, and
5325 move it to just before the first epilogue insn. */
5326 if (note == 0)
5327 {
5328 for (note = insn; (note = PREV_INSN (note));)
4b4bf941 5329 if (NOTE_P (note)
9f53e965
RH
5330 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5331 break;
9392c110 5332 }
9f53e965
RH
5333
5334 if (PREV_INSN (last) != note)
5335 reorder_insns (note, note, PREV_INSN (last));
bdac5f58
TW
5336 }
5337 }
5338#endif /* HAVE_prologue or HAVE_epilogue */
5339}
87ff9c8e 5340
6de9cd9a
DN
5341/* Resets insn_block_boundaries array. */
5342
5343void
5344reset_block_changes (void)
5345{
5346 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5347 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5348}
5349
5350/* Record the boundary for BLOCK. */
5351void
5352record_block_change (tree block)
5353{
5354 int i, n;
5355 tree last_block;
5356
5357 if (!block)
5358 return;
5359
5360 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5361 VARRAY_POP (cfun->ib_boundaries_block);
5362 n = get_max_uid ();
5363 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5364 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5365
5366 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5367}
5368
5369/* Finishes record of boundaries. */
5370void finalize_block_changes (void)
5371{
5372 record_block_change (DECL_INITIAL (current_function_decl));
5373}
5374
5375/* For INSN return the BLOCK it belongs to. */
5376void
5377check_block_change (rtx insn, tree *block)
5378{
5379 unsigned uid = INSN_UID (insn);
5380
5381 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5382 return;
5383
5384 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5385}
5386
5387/* Releases the ib_boundaries_block records. */
5388void
5389free_block_changes (void)
5390{
5391 cfun->ib_boundaries_block = NULL;
5392}
5393
faed5cc3
SB
5394/* Returns the name of the current function. */
5395const char *
5396current_function_name (void)
5397{
ae2bcd98 5398 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3
SB
5399}
5400
e2500fed 5401#include "gt-function.h"
This page took 3.420788 seconds and 5 git commands to generate.