]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
Always set DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT correctly
[gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
6f086dfc 3
1322177d 4This file is part of GCC.
6f086dfc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6f086dfc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6f086dfc
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6f086dfc 19
6f086dfc
RS
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 32 not get a hard register. */
6f086dfc
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
0cbd9993 38#include "rtl-error.h"
6f086dfc 39#include "tree.h"
d8a2d370
DN
40#include "stor-layout.h"
41#include "varasm.h"
42#include "stringpool.h"
6f086dfc 43#include "flags.h"
1ef08c63 44#include "except.h"
6f086dfc 45#include "function.h"
6f086dfc 46#include "expr.h"
c6b97fac 47#include "optabs.h"
e78d8e51 48#include "libfuncs.h"
6f086dfc
RS
49#include "regs.h"
50#include "hard-reg-set.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "output.h"
e2500fed 54#include "hashtab.h"
b1474bb7 55#include "tm_p.h"
7afff7cf 56#include "langhooks.h"
61f71b34 57#include "target.h"
677f3fa8 58#include "common/common-target.h"
2fb9a547 59#include "gimple-expr.h"
45b0be94 60#include "gimplify.h"
ef330312 61#include "tree-pass.h"
7d69de61 62#include "predict.h"
6fb5fa3c 63#include "df.h"
ffe14686
AM
64#include "params.h"
65#include "bb-reorder.h"
f30e25a3 66#include "shrink-wrap.h"
b9b5f433 67#include "toplev.h"
b8704801 68#include "rtl-iter.h"
7d69de61 69
5576d6f2
TT
70/* So we can assign to cfun in this file. */
71#undef cfun
72
95f3f59e
JDA
73#ifndef STACK_ALIGNMENT_NEEDED
74#define STACK_ALIGNMENT_NEEDED 1
75#endif
76
975f3818
RS
77#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78
6f086dfc
RS
79/* Round a value to the lowest integer less than it that is a multiple of
80 the required alignment. Avoid using division in case the value is
81 negative. Assume the alignment is a power of two. */
82#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83
84/* Similar, but round to the next highest integer that meets the
85 alignment. */
86#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87
6f086dfc 88/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
89 assign_stack_local uses frame_pointer_rtx when this is nonzero.
90 calls.c:emit_library_call_value_1 uses it to set up
91 post-instantiation libcalls. */
92int virtuals_instantiated;
6f086dfc 93
df696a75 94/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 95static GTY(()) int funcdef_no;
f6f315fe 96
414c4dc4
NC
97/* These variables hold pointers to functions to create and destroy
98 target specific, per-function data structures. */
fa8db1f7 99struct machine_function * (*init_machine_status) (void);
46766466 100
b384405b 101/* The currently compiled function. */
01d939e8 102struct function *cfun = 0;
b384405b 103
cd9c1ca8
RH
104/* These hashes record the prologue and epilogue insns. */
105static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
106 htab_t prologue_insn_hash;
107static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
108 htab_t epilogue_insn_hash;
6f086dfc 109\f
b646ba3f
DS
110
111htab_t types_used_by_vars_hash = NULL;
9771b263 112vec<tree, va_gc> *types_used_by_cur_var_decl;
b646ba3f 113
e15679f8
RK
114/* Forward declarations. */
115
fa8db1f7 116static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
117static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
118static void pad_below (struct args_size *, enum machine_mode, tree);
691fe203 119static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
fa8db1f7
AJ
120static int all_blocks (tree, tree *);
121static tree *get_block_vector (tree, int *);
122extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 123/* We always define `record_insns' even if it's not used so that we
ec97b83a 124 can always export `prologue_epilogue_contains'. */
dc01c3d1 125static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
cd9c1ca8 126static bool contains (const_rtx, htab_t);
db2960f4 127static void prepare_function_start (void);
fa8db1f7
AJ
128static void do_clobber_return_reg (rtx, void *);
129static void do_use_return_reg (rtx, void *);
c20bf1f3 130\f
936fc9ba
JH
131/* Stack of nested functions. */
132/* Keep track of the cfun stack. */
e5e809f4 133
936fc9ba 134typedef struct function *function_p;
e5e809f4 135
9771b263 136static vec<function_p> function_context_stack;
6f086dfc
RS
137
138/* Save the current context for compilation of a nested function.
d2784db4 139 This is called from language-specific code. */
6f086dfc
RS
140
141void
d2784db4 142push_function_context (void)
6f086dfc 143{
01d939e8 144 if (cfun == 0)
182e0d71 145 allocate_struct_function (NULL, false);
b384405b 146
9771b263 147 function_context_stack.safe_push (cfun);
db2960f4 148 set_cfun (NULL);
6f086dfc
RS
149}
150
151/* Restore the last saved context, at the end of a nested function.
152 This function is called from language-specific code. */
153
154void
d2784db4 155pop_function_context (void)
6f086dfc 156{
9771b263 157 struct function *p = function_context_stack.pop ();
db2960f4 158 set_cfun (p);
6f086dfc 159 current_function_decl = p->decl;
6f086dfc 160
6f086dfc 161 /* Reset variables that have known state during rtx generation. */
6f086dfc 162 virtuals_instantiated = 0;
1b3d8f8a 163 generating_concat_p = 1;
6f086dfc 164}
e4a4639e 165
fa51b01b
RH
166/* Clear out all parts of the state in F that can safely be discarded
167 after the function has been parsed, but not compiled, to let
168 garbage collection reclaim the memory. */
169
170void
fa8db1f7 171free_after_parsing (struct function *f)
fa51b01b 172{
e8924938 173 f->language = 0;
fa51b01b
RH
174}
175
e2ecd91c
BS
176/* Clear out all parts of the state in F that can safely be discarded
177 after the function has been compiled, to let garbage collection
0a8a198c 178 reclaim the memory. */
21cd906e 179
e2ecd91c 180void
fa8db1f7 181free_after_compilation (struct function *f)
e2ecd91c 182{
cd9c1ca8
RH
183 prologue_insn_hash = NULL;
184 epilogue_insn_hash = NULL;
185
04695783 186 free (crtl->emit.regno_pointer_align);
f995dcfe 187
3e029763 188 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 189 f->eh = NULL;
e2500fed 190 f->machine = NULL;
997de8ed 191 f->cfg = NULL;
fa51b01b 192
57b9e367 193 regno_reg_rtx = NULL;
e2ecd91c 194}
6f086dfc 195\f
49ad7cfa
BS
196/* Return size needed for stack frame based on slots so far allocated.
197 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
198 the caller may have to do that. */
9fb798d7 199
49ad7cfa 200HOST_WIDE_INT
fa8db1f7 201get_frame_size (void)
49ad7cfa 202{
bd60bab2
JH
203 if (FRAME_GROWS_DOWNWARD)
204 return -frame_offset;
205 else
206 return frame_offset;
49ad7cfa
BS
207}
208
9fb798d7
EB
209/* Issue an error message and return TRUE if frame OFFSET overflows in
210 the signed target pointer arithmetics for function FUNC. Otherwise
211 return FALSE. */
212
213bool
214frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 215{
9fb798d7
EB
216 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
217
218 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
219 /* Leave room for the fixed part of the frame. */
220 - 64 * UNITS_PER_WORD)
221 {
c5d75364
MLI
222 error_at (DECL_SOURCE_LOCATION (func),
223 "total size of local objects too large");
9fb798d7
EB
224 return TRUE;
225 }
226
227 return FALSE;
228}
229
76fe54f0
L
230/* Return stack slot alignment in bits for TYPE and MODE. */
231
232static unsigned int
233get_stack_local_alignment (tree type, enum machine_mode mode)
234{
235 unsigned int alignment;
236
237 if (mode == BLKmode)
238 alignment = BIGGEST_ALIGNMENT;
239 else
240 alignment = GET_MODE_ALIGNMENT (mode);
241
242 /* Allow the frond-end to (possibly) increase the alignment of this
243 stack slot. */
244 if (! type)
245 type = lang_hooks.types.type_for_mode (mode, 0);
246
247 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
248}
249
56731d64
BS
250/* Determine whether it is possible to fit a stack slot of size SIZE and
251 alignment ALIGNMENT into an area in the stack frame that starts at
252 frame offset START and has a length of LENGTH. If so, store the frame
253 offset to be used for the stack slot in *POFFSET and return true;
254 return false otherwise. This function will extend the frame size when
255 given a start/length pair that lies at the end of the frame. */
256
257static bool
258try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
259 HOST_WIDE_INT size, unsigned int alignment,
260 HOST_WIDE_INT *poffset)
261{
262 HOST_WIDE_INT this_frame_offset;
263 int frame_off, frame_alignment, frame_phase;
264
265 /* Calculate how many bytes the start of local variables is off from
266 stack alignment. */
267 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
268 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
269 frame_phase = frame_off ? frame_alignment - frame_off : 0;
270
271 /* Round the frame offset to the specified alignment. */
272
273 /* We must be careful here, since FRAME_OFFSET might be negative and
274 division with a negative dividend isn't as well defined as we might
275 like. So we instead assume that ALIGNMENT is a power of two and
276 use logical operations which are unambiguous. */
277 if (FRAME_GROWS_DOWNWARD)
278 this_frame_offset
279 = (FLOOR_ROUND (start + length - size - frame_phase,
280 (unsigned HOST_WIDE_INT) alignment)
281 + frame_phase);
282 else
283 this_frame_offset
284 = (CEIL_ROUND (start - frame_phase,
285 (unsigned HOST_WIDE_INT) alignment)
286 + frame_phase);
287
288 /* See if it fits. If this space is at the edge of the frame,
289 consider extending the frame to make it fit. Our caller relies on
290 this when allocating a new slot. */
291 if (frame_offset == start && this_frame_offset < frame_offset)
292 frame_offset = this_frame_offset;
293 else if (this_frame_offset < start)
294 return false;
295 else if (start + length == frame_offset
296 && this_frame_offset + size > start + length)
297 frame_offset = this_frame_offset + size;
298 else if (this_frame_offset + size > start + length)
299 return false;
300
301 *poffset = this_frame_offset;
302 return true;
303}
304
305/* Create a new frame_space structure describing free space in the stack
306 frame beginning at START and ending at END, and chain it into the
307 function's frame_space_list. */
308
309static void
310add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
311{
766090c2 312 struct frame_space *space = ggc_alloc<frame_space> ();
56731d64
BS
313 space->next = crtl->frame_space_list;
314 crtl->frame_space_list = space;
315 space->start = start;
316 space->length = end - start;
317}
318
6f086dfc
RS
319/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
320 with machine mode MODE.
718fe406 321
6f086dfc
RS
322 ALIGN controls the amount of alignment for the address of the slot:
323 0 means according to MODE,
324 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 325 -2 means use BITS_PER_UNIT,
6f086dfc
RS
326 positive specifies alignment boundary in bits.
327
80a832cd
JJ
328 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
329 alignment and ASLK_RECORD_PAD bit set if we should remember
330 extra space we allocated for alignment purposes. When we are
331 called from assign_stack_temp_for_type, it is not set so we don't
332 track the same stack slot in two independent lists.
2e3f842f 333
bd60bab2 334 We do not round to stack_boundary here. */
6f086dfc 335
bd60bab2 336rtx
2e3f842f 337assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
80a832cd 338 int align, int kind)
6f086dfc 339{
b3694847 340 rtx x, addr;
6f086dfc 341 int bigend_correction = 0;
427188d5 342 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 343 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
344
345 if (align == 0)
346 {
76fe54f0 347 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 348 alignment /= BITS_PER_UNIT;
6f086dfc
RS
349 }
350 else if (align == -1)
351 {
352 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
353 size = CEIL_ROUND (size, alignment);
354 }
cfa29a4c
EB
355 else if (align == -2)
356 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
357 else
358 alignment = align / BITS_PER_UNIT;
359
2e3f842f
L
360 alignment_in_bits = alignment * BITS_PER_UNIT;
361
2e3f842f
L
362 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
363 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
364 {
365 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
366 alignment = alignment_in_bits / BITS_PER_UNIT;
367 }
a0871656 368
2e3f842f
L
369 if (SUPPORTS_STACK_ALIGNMENT)
370 {
371 if (crtl->stack_alignment_estimated < alignment_in_bits)
372 {
373 if (!crtl->stack_realign_processed)
374 crtl->stack_alignment_estimated = alignment_in_bits;
375 else
376 {
377 /* If stack is realigned and stack alignment value
378 hasn't been finalized, it is OK not to increase
379 stack_alignment_estimated. The bigger alignment
380 requirement is recorded in stack_alignment_needed
381 below. */
382 gcc_assert (!crtl->stack_realign_finalized);
383 if (!crtl->stack_realign_needed)
384 {
385 /* It is OK to reduce the alignment as long as the
386 requested size is 0 or the estimated stack
387 alignment >= mode alignment. */
80a832cd 388 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
389 || size == 0
390 || (crtl->stack_alignment_estimated
391 >= GET_MODE_ALIGNMENT (mode)));
392 alignment_in_bits = crtl->stack_alignment_estimated;
393 alignment = alignment_in_bits / BITS_PER_UNIT;
394 }
395 }
396 }
397 }
76fe54f0
L
398
399 if (crtl->stack_alignment_needed < alignment_in_bits)
400 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
401 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
402 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 403
56731d64
BS
404 if (mode != BLKmode || size != 0)
405 {
80a832cd 406 if (kind & ASLK_RECORD_PAD)
56731d64 407 {
80a832cd
JJ
408 struct frame_space **psp;
409
410 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
411 {
412 struct frame_space *space = *psp;
413 if (!try_fit_stack_local (space->start, space->length, size,
414 alignment, &slot_offset))
415 continue;
416 *psp = space->next;
417 if (slot_offset > space->start)
418 add_frame_space (space->start, slot_offset);
419 if (slot_offset + size < space->start + space->length)
420 add_frame_space (slot_offset + size,
421 space->start + space->length);
422 goto found_space;
423 }
56731d64
BS
424 }
425 }
426 else if (!STACK_ALIGNMENT_NEEDED)
427 {
428 slot_offset = frame_offset;
429 goto found_space;
430 }
431
432 old_frame_offset = frame_offset;
433
434 if (FRAME_GROWS_DOWNWARD)
435 {
436 frame_offset -= size;
437 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 438
80a832cd
JJ
439 if (kind & ASLK_RECORD_PAD)
440 {
441 if (slot_offset > frame_offset)
442 add_frame_space (frame_offset, slot_offset);
443 if (slot_offset + size < old_frame_offset)
444 add_frame_space (slot_offset + size, old_frame_offset);
445 }
56731d64
BS
446 }
447 else
95f3f59e 448 {
56731d64
BS
449 frame_offset += size;
450 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
451
80a832cd
JJ
452 if (kind & ASLK_RECORD_PAD)
453 {
454 if (slot_offset > old_frame_offset)
455 add_frame_space (old_frame_offset, slot_offset);
456 if (slot_offset + size < frame_offset)
457 add_frame_space (slot_offset + size, frame_offset);
458 }
95f3f59e 459 }
6f086dfc 460
56731d64 461 found_space:
6f086dfc
RS
462 /* On a big-endian machine, if we are allocating more space than we will use,
463 use the least significant bytes of those that are allocated. */
d70eadf7 464 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 465 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 466
6f086dfc
RS
467 /* If we have already instantiated virtual registers, return the actual
468 address relative to the frame pointer. */
bd60bab2 469 if (virtuals_instantiated)
0a81f074 470 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 471 trunc_int_for_mode
56731d64 472 (slot_offset + bigend_correction
c41536f5 473 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 474 else
0a81f074 475 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 476 trunc_int_for_mode
56731d64 477 (slot_offset + bigend_correction,
c41536f5 478 Pmode));
6f086dfc 479
38a448ca 480 x = gen_rtx_MEM (mode, addr);
76fe54f0 481 set_mem_align (x, alignment_in_bits);
be0c514c 482 MEM_NOTRAP_P (x) = 1;
6f086dfc 483
bd60bab2
JH
484 stack_slot_list
485 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 486
bd60bab2
JH
487 if (frame_offset_overflow (frame_offset, current_function_decl))
488 frame_offset = 0;
9070115b 489
6f086dfc
RS
490 return x;
491}
2e3f842f
L
492
493/* Wrap up assign_stack_local_1 with last parameter as false. */
494
495rtx
496assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
497{
80a832cd 498 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 499}
0aea6467 500\f
fb0703f7
SB
501/* In order to evaluate some expressions, such as function calls returning
502 structures in memory, we need to temporarily allocate stack locations.
503 We record each allocated temporary in the following structure.
504
505 Associated with each temporary slot is a nesting level. When we pop up
506 one level, all temporaries associated with the previous level are freed.
507 Normally, all temporaries are freed after the execution of the statement
508 in which they were created. However, if we are inside a ({...}) grouping,
509 the result may be in a temporary and hence must be preserved. If the
510 result could be in a temporary, we preserve it if we can determine which
511 one it is in. If we cannot determine which temporary may contain the
512 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 513 pretending it was allocated at the previous nesting level. */
fb0703f7 514
d1b38208 515struct GTY(()) temp_slot {
fb0703f7
SB
516 /* Points to next temporary slot. */
517 struct temp_slot *next;
518 /* Points to previous temporary slot. */
519 struct temp_slot *prev;
520 /* The rtx to used to reference the slot. */
521 rtx slot;
fb0703f7
SB
522 /* The size, in units, of the slot. */
523 HOST_WIDE_INT size;
524 /* The type of the object in the slot, or zero if it doesn't correspond
525 to a type. We use this to determine whether a slot can be reused.
526 It can be reused if objects of the type of the new slot will always
527 conflict with objects of the type of the old slot. */
528 tree type;
8f5929e1
JJ
529 /* The alignment (in bits) of the slot. */
530 unsigned int align;
fb0703f7
SB
531 /* Nonzero if this temporary is currently in use. */
532 char in_use;
fb0703f7
SB
533 /* Nesting level at which this slot is being used. */
534 int level;
fb0703f7
SB
535 /* The offset of the slot from the frame_pointer, including extra space
536 for alignment. This info is for combine_temp_slots. */
537 HOST_WIDE_INT base_offset;
538 /* The size of the slot, including extra space for alignment. This
539 info is for combine_temp_slots. */
540 HOST_WIDE_INT full_size;
541};
542
543/* A table of addresses that represent a stack slot. The table is a mapping
544 from address RTXen to a temp slot. */
545static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
f8395d62 546static size_t n_temp_slots_in_use;
fb0703f7
SB
547
548/* Entry for the above hash table. */
d1b38208 549struct GTY(()) temp_slot_address_entry {
fb0703f7
SB
550 hashval_t hash;
551 rtx address;
552 struct temp_slot *temp_slot;
553};
554
0aea6467
ZD
555/* Removes temporary slot TEMP from LIST. */
556
557static void
558cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
559{
560 if (temp->next)
561 temp->next->prev = temp->prev;
562 if (temp->prev)
563 temp->prev->next = temp->next;
564 else
565 *list = temp->next;
566
567 temp->prev = temp->next = NULL;
568}
569
570/* Inserts temporary slot TEMP to LIST. */
571
572static void
573insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
574{
575 temp->next = *list;
576 if (*list)
577 (*list)->prev = temp;
578 temp->prev = NULL;
579 *list = temp;
580}
581
582/* Returns the list of used temp slots at LEVEL. */
583
584static struct temp_slot **
585temp_slots_at_level (int level)
586{
9771b263
DN
587 if (level >= (int) vec_safe_length (used_temp_slots))
588 vec_safe_grow_cleared (used_temp_slots, level + 1);
0aea6467 589
9771b263 590 return &(*used_temp_slots)[level];
0aea6467
ZD
591}
592
593/* Returns the maximal temporary slot level. */
594
595static int
596max_slot_level (void)
597{
598 if (!used_temp_slots)
599 return -1;
600
9771b263 601 return used_temp_slots->length () - 1;
0aea6467
ZD
602}
603
604/* Moves temporary slot TEMP to LEVEL. */
605
606static void
607move_slot_to_level (struct temp_slot *temp, int level)
608{
609 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
610 insert_slot_to_list (temp, temp_slots_at_level (level));
611 temp->level = level;
612}
613
614/* Make temporary slot TEMP available. */
615
616static void
617make_slot_available (struct temp_slot *temp)
618{
619 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
620 insert_slot_to_list (temp, &avail_temp_slots);
621 temp->in_use = 0;
622 temp->level = -1;
f8395d62 623 n_temp_slots_in_use--;
0aea6467 624}
fb0703f7
SB
625
626/* Compute the hash value for an address -> temp slot mapping.
627 The value is cached on the mapping entry. */
628static hashval_t
629temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
630{
631 int do_not_record = 0;
632 return hash_rtx (t->address, GET_MODE (t->address),
633 &do_not_record, NULL, false);
634}
635
636/* Return the hash value for an address -> temp slot mapping. */
637static hashval_t
638temp_slot_address_hash (const void *p)
639{
640 const struct temp_slot_address_entry *t;
641 t = (const struct temp_slot_address_entry *) p;
642 return t->hash;
643}
644
645/* Compare two address -> temp slot mapping entries. */
646static int
647temp_slot_address_eq (const void *p1, const void *p2)
648{
649 const struct temp_slot_address_entry *t1, *t2;
650 t1 = (const struct temp_slot_address_entry *) p1;
651 t2 = (const struct temp_slot_address_entry *) p2;
652 return exp_equiv_p (t1->address, t2->address, 0, true);
653}
654
655/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
656static void
657insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
658{
659 void **slot;
766090c2 660 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fb0703f7
SB
661 t->address = address;
662 t->temp_slot = temp_slot;
663 t->hash = temp_slot_address_compute_hash (t);
664 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
665 *slot = t;
666}
667
668/* Remove an address -> temp slot mapping entry if the temp slot is
669 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
670static int
671remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
672{
673 const struct temp_slot_address_entry *t;
674 t = (const struct temp_slot_address_entry *) *slot;
675 if (! t->temp_slot->in_use)
f8395d62 676 htab_clear_slot (temp_slot_address_table, slot);
fb0703f7
SB
677 return 1;
678}
679
680/* Remove all mappings of addresses to unused temp slots. */
681static void
682remove_unused_temp_slot_addresses (void)
683{
f8395d62
MM
684 /* Use quicker clearing if there aren't any active temp slots. */
685 if (n_temp_slots_in_use)
686 htab_traverse (temp_slot_address_table,
687 remove_unused_temp_slot_addresses_1,
688 NULL);
689 else
690 htab_empty (temp_slot_address_table);
fb0703f7
SB
691}
692
693/* Find the temp slot corresponding to the object at address X. */
694
695static struct temp_slot *
696find_temp_slot_from_address (rtx x)
697{
698 struct temp_slot *p;
699 struct temp_slot_address_entry tmp, *t;
700
701 /* First try the easy way:
702 See if X exists in the address -> temp slot mapping. */
703 tmp.address = x;
704 tmp.temp_slot = NULL;
705 tmp.hash = temp_slot_address_compute_hash (&tmp);
706 t = (struct temp_slot_address_entry *)
707 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
708 if (t)
709 return t->temp_slot;
710
711 /* If we have a sum involving a register, see if it points to a temp
712 slot. */
713 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
714 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
715 return p;
716 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
717 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
718 return p;
719
720 /* Last resort: Address is a virtual stack var address. */
721 if (GET_CODE (x) == PLUS
722 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 723 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
724 {
725 int i;
726 for (i = max_slot_level (); i >= 0; i--)
727 for (p = *temp_slots_at_level (i); p; p = p->next)
728 {
729 if (INTVAL (XEXP (x, 1)) >= p->base_offset
730 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
731 return p;
732 }
733 }
734
735 return NULL;
736}
6f086dfc
RS
737\f
738/* Allocate a temporary stack slot and record it for possible later
739 reuse.
740
741 MODE is the machine mode to be given to the returned rtx.
742
743 SIZE is the size in units of the space required. We do no rounding here
744 since assign_stack_local will do any required rounding.
745
a4c6502a 746 TYPE is the type that will be used for the stack slot. */
6f086dfc 747
a06ef755 748rtx
535a42b1 749assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
9474e8ab 750 tree type)
6f086dfc 751{
74e2819c 752 unsigned int align;
0aea6467 753 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 754 rtx slot;
6f086dfc 755
303ec2aa
RK
756 /* If SIZE is -1 it means that somebody tried to allocate a temporary
757 of a variable size. */
0bccc606 758 gcc_assert (size != -1);
303ec2aa 759
76fe54f0 760 align = get_stack_local_alignment (type, mode);
d16790f2
JW
761
762 /* Try to find an available, already-allocated temporary of the proper
763 mode which meets the size and alignment requirements. Choose the
3e8b0446 764 smallest one with the closest alignment.
b8698a0f 765
3e8b0446
ZD
766 If assign_stack_temp is called outside of the tree->rtl expansion,
767 we cannot reuse the stack slots (that may still refer to
768 VIRTUAL_STACK_VARS_REGNUM). */
769 if (!virtuals_instantiated)
0aea6467 770 {
3e8b0446 771 for (p = avail_temp_slots; p; p = p->next)
0aea6467 772 {
3e8b0446
ZD
773 if (p->align >= align && p->size >= size
774 && GET_MODE (p->slot) == mode
775 && objects_must_conflict_p (p->type, type)
776 && (best_p == 0 || best_p->size > p->size
777 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 778 {
3e8b0446
ZD
779 if (p->align == align && p->size == size)
780 {
781 selected = p;
782 cut_slot_from_list (selected, &avail_temp_slots);
783 best_p = 0;
784 break;
785 }
786 best_p = p;
0aea6467 787 }
0aea6467
ZD
788 }
789 }
6f086dfc
RS
790
791 /* Make our best, if any, the one to use. */
792 if (best_p)
a45035b6 793 {
0aea6467
ZD
794 selected = best_p;
795 cut_slot_from_list (selected, &avail_temp_slots);
796
a45035b6
JW
797 /* If there are enough aligned bytes left over, make them into a new
798 temp_slot so that the extra bytes don't get wasted. Do this only
799 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 800 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 801 {
d16790f2 802 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 803 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
804
805 if (best_p->size - rounded_size >= alignment)
806 {
766090c2 807 p = ggc_alloc<temp_slot> ();
9474e8ab 808 p->in_use = 0;
a45035b6 809 p->size = best_p->size - rounded_size;
307d8cd6
RK
810 p->base_offset = best_p->base_offset + rounded_size;
811 p->full_size = best_p->full_size - rounded_size;
be0c514c 812 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 813 p->align = best_p->align;
1da68f56 814 p->type = best_p->type;
0aea6467 815 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 816
38a448ca
RH
817 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
818 stack_slot_list);
a45035b6
JW
819
820 best_p->size = rounded_size;
291dde90 821 best_p->full_size = rounded_size;
a45035b6
JW
822 }
823 }
a45035b6 824 }
718fe406 825
6f086dfc 826 /* If we still didn't find one, make a new temporary. */
0aea6467 827 if (selected == 0)
6f086dfc 828 {
e5e809f4
JL
829 HOST_WIDE_INT frame_offset_old = frame_offset;
830
766090c2 831 p = ggc_alloc<temp_slot> ();
e5e809f4 832
c87a0a39
JL
833 /* We are passing an explicit alignment request to assign_stack_local.
834 One side effect of that is assign_stack_local will not round SIZE
835 to ensure the frame offset remains suitably aligned.
836
837 So for requests which depended on the rounding of SIZE, we go ahead
838 and round it now. We also make sure ALIGNMENT is at least
839 BIGGEST_ALIGNMENT. */
0bccc606 840 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
841 p->slot = assign_stack_local_1 (mode,
842 (mode == BLKmode
843 ? CEIL_ROUND (size,
844 (int) align
845 / BITS_PER_UNIT)
846 : size),
847 align, 0);
d16790f2
JW
848
849 p->align = align;
e5e809f4 850
b2a80c0d
DE
851 /* The following slot size computation is necessary because we don't
852 know the actual size of the temporary slot until assign_stack_local
853 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
854 requested temporary. Note that extra space added for alignment
855 can be either above or below this stack slot depending on which
856 way the frame grows. We include the extra space if and only if it
857 is above this slot. */
f62c8a5c
JJ
858 if (FRAME_GROWS_DOWNWARD)
859 p->size = frame_offset_old - frame_offset;
860 else
861 p->size = size;
e5e809f4 862
fc91b0d0 863 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
864 if (FRAME_GROWS_DOWNWARD)
865 {
866 p->base_offset = frame_offset;
867 p->full_size = frame_offset_old - frame_offset;
868 }
869 else
870 {
871 p->base_offset = frame_offset_old;
872 p->full_size = frame_offset - frame_offset_old;
873 }
0aea6467
ZD
874
875 selected = p;
6f086dfc
RS
876 }
877
0aea6467 878 p = selected;
6f086dfc 879 p->in_use = 1;
1da68f56 880 p->type = type;
7efcb746 881 p->level = temp_slot_level;
f8395d62 882 n_temp_slots_in_use++;
1995f267 883
0aea6467
ZD
884 pp = temp_slots_at_level (p->level);
885 insert_slot_to_list (p, pp);
fb0703f7 886 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
887
888 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
889 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
890 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 891
1da68f56
RK
892 /* If we know the alias set for the memory that will be used, use
893 it. If there's no TYPE, then we don't know anything about the
894 alias set for the memory. */
faa964e5
UW
895 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
896 set_mem_align (slot, align);
1da68f56 897
30f7a378 898 /* If a type is specified, set the relevant flags. */
3bdf5ad1 899 if (type != 0)
55356334 900 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 901 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 902
faa964e5 903 return slot;
6f086dfc 904}
d16790f2
JW
905
906/* Allocate a temporary stack slot and record it for possible later
9474e8ab 907 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
908
909rtx
9474e8ab 910assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
d16790f2 911{
9474e8ab 912 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 913}
638141a6 914\f
9432c136
EB
915/* Assign a temporary.
916 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
917 and so that should be used in error messages. In either case, we
918 allocate of the given type.
230f21b4 919 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
920 it is 0 if a register is OK.
921 DONT_PROMOTE is 1 if we should not promote values in register
922 to wider modes. */
230f21b4
PB
923
924rtx
9474e8ab 925assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 926 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 927{
9432c136
EB
928 tree type, decl;
929 enum machine_mode mode;
9e1622ed 930#ifdef PROMOTE_MODE
9432c136
EB
931 int unsignedp;
932#endif
933
934 if (DECL_P (type_or_decl))
935 decl = type_or_decl, type = TREE_TYPE (decl);
936 else
937 decl = NULL, type = type_or_decl;
938
939 mode = TYPE_MODE (type);
9e1622ed 940#ifdef PROMOTE_MODE
8df83eae 941 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 942#endif
638141a6 943
230f21b4
PB
944 if (mode == BLKmode || memory_required)
945 {
e5e809f4 946 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
947 rtx tmp;
948
44affdae
JH
949 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
950 problems with allocating the stack space. */
951 if (size == 0)
952 size = 1;
953
230f21b4 954 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
955 temporaries. However, sometimes we can find a fixed upper limit on
956 the size, so try that instead. */
957 else if (size == -1)
958 size = max_int_size_in_bytes (type);
e30bb772 959
9432c136
EB
960 /* The size of the temporary may be too large to fit into an integer. */
961 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 962 this to things that aren't compiler-generated temporaries. The
535a42b1 963 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
964 if (decl && size == -1
965 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
966 {
dee15844 967 error ("size of variable %q+D is too large", decl);
9432c136
EB
968 size = 1;
969 }
970
9474e8ab 971 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
972 return tmp;
973 }
638141a6 974
9e1622ed 975#ifdef PROMOTE_MODE
b55d9ff8 976 if (! dont_promote)
cde0f3fd 977 mode = promote_mode (type, mode, &unsignedp);
230f21b4 978#endif
638141a6 979
230f21b4
PB
980 return gen_reg_rtx (mode);
981}
638141a6 982\f
a45035b6
JW
983/* Combine temporary stack slots which are adjacent on the stack.
984
985 This allows for better use of already allocated stack space. This is only
986 done for BLKmode slots because we can be sure that we won't have alignment
987 problems in this case. */
988
6fe79279 989static void
fa8db1f7 990combine_temp_slots (void)
a45035b6 991{
0aea6467 992 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
993 int num_slots;
994
a4c6502a
MM
995 /* We can't combine slots, because the information about which slot
996 is in which alias set will be lost. */
997 if (flag_strict_aliasing)
998 return;
999
718fe406 1000 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1001 high levels of optimization. */
e5e809f4 1002 if (! flag_expensive_optimizations)
0aea6467 1003 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1004 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1005 return;
a45035b6 1006
0aea6467 1007 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1008 {
1009 int delete_p = 0;
e5e809f4 1010
0aea6467
ZD
1011 next = p->next;
1012
1013 if (GET_MODE (p->slot) != BLKmode)
1014 continue;
1015
1016 for (q = p->next; q; q = next_q)
e9b7093a 1017 {
0aea6467
ZD
1018 int delete_q = 0;
1019
1020 next_q = q->next;
1021
1022 if (GET_MODE (q->slot) != BLKmode)
1023 continue;
1024
1025 if (p->base_offset + p->full_size == q->base_offset)
1026 {
1027 /* Q comes after P; combine Q into P. */
1028 p->size += q->size;
1029 p->full_size += q->full_size;
1030 delete_q = 1;
1031 }
1032 else if (q->base_offset + q->full_size == p->base_offset)
1033 {
1034 /* P comes after Q; combine P into Q. */
1035 q->size += p->size;
1036 q->full_size += p->full_size;
1037 delete_p = 1;
1038 break;
1039 }
1040 if (delete_q)
1041 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1042 }
0aea6467
ZD
1043
1044 /* Either delete P or advance past it. */
1045 if (delete_p)
1046 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1047 }
a45035b6 1048}
6f086dfc 1049\f
82d6e6fc
KG
1050/* Indicate that NEW_RTX is an alternate way of referring to the temp
1051 slot that previously was known by OLD_RTX. */
e5e76139
RK
1052
1053void
82d6e6fc 1054update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1055{
14a774a9 1056 struct temp_slot *p;
e5e76139 1057
82d6e6fc 1058 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1059 return;
14a774a9 1060
82d6e6fc 1061 p = find_temp_slot_from_address (old_rtx);
14a774a9 1062
82d6e6fc
KG
1063 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1064 NEW_RTX is a register, see if one operand of the PLUS is a
1065 temporary location. If so, NEW_RTX points into it. Otherwise,
1066 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1067 in common between them. If so, try a recursive call on those
1068 values. */
14a774a9
RK
1069 if (p == 0)
1070 {
82d6e6fc 1071 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1072 return;
1073
82d6e6fc 1074 if (REG_P (new_rtx))
700f19f0 1075 {
82d6e6fc
KG
1076 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1077 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1078 return;
1079 }
82d6e6fc 1080 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1081 return;
1082
82d6e6fc
KG
1083 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1084 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1085 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1086 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1087 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1088 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1089 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1090 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1091
1092 return;
1093 }
1094
718fe406 1095 /* Otherwise add an alias for the temp's address. */
fb0703f7 1096 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1097}
1098
9cca6a99
MS
1099/* If X could be a reference to a temporary slot, mark that slot as
1100 belonging to the to one level higher than the current level. If X
1101 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1102 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1103
1104 This is called when an ({...}) construct occurs and a statement
1105 returns a value in memory. */
1106
1107void
fa8db1f7 1108preserve_temp_slots (rtx x)
6f086dfc 1109{
0aea6467 1110 struct temp_slot *p = 0, *next;
6f086dfc 1111
e3a77161 1112 if (x == 0)
9474e8ab 1113 return;
f7b6d104 1114
8fff4fc1 1115 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1116 a temporary slot we know it points to. */
8fff4fc1
RH
1117 if (REG_P (x) && REG_POINTER (x))
1118 p = find_temp_slot_from_address (x);
f7b6d104 1119
8fff4fc1 1120 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1121 a temporary slot. */
8fff4fc1 1122 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1123 return;
8fff4fc1
RH
1124
1125 /* First see if we can find a match. */
1126 if (p == 0)
1127 p = find_temp_slot_from_address (XEXP (x, 0));
1128
1129 if (p != 0)
1130 {
8fff4fc1 1131 if (p->level == temp_slot_level)
9474e8ab 1132 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1133 return;
f7b6d104 1134 }
e9a25f70 1135
8fff4fc1
RH
1136 /* Otherwise, preserve all non-kept slots at this level. */
1137 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1138 {
8fff4fc1 1139 next = p->next;
9474e8ab 1140 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1141 }
fe9b4957
MM
1142}
1143
8fff4fc1
RH
1144/* Free all temporaries used so far. This is normally called at the
1145 end of generating code for a statement. */
fe9b4957 1146
8fff4fc1
RH
1147void
1148free_temp_slots (void)
fe9b4957 1149{
8fff4fc1 1150 struct temp_slot *p, *next;
5d7cefe5 1151 bool some_available = false;
fe9b4957 1152
8fff4fc1
RH
1153 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1154 {
1155 next = p->next;
9474e8ab
MM
1156 make_slot_available (p);
1157 some_available = true;
8fff4fc1 1158 }
fe9b4957 1159
5d7cefe5
MM
1160 if (some_available)
1161 {
1162 remove_unused_temp_slot_addresses ();
1163 combine_temp_slots ();
1164 }
8fff4fc1 1165}
fe9b4957 1166
8fff4fc1 1167/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1168
8fff4fc1
RH
1169void
1170push_temp_slots (void)
fe9b4957 1171{
8fff4fc1 1172 temp_slot_level++;
fe9b4957
MM
1173}
1174
8fff4fc1
RH
1175/* Pop a temporary nesting level. All slots in use in the current level
1176 are freed. */
fe9b4957 1177
8fff4fc1
RH
1178void
1179pop_temp_slots (void)
fe9b4957 1180{
9474e8ab 1181 free_temp_slots ();
8fff4fc1 1182 temp_slot_level--;
8c36698e
NC
1183}
1184
8fff4fc1 1185/* Initialize temporary slots. */
e9a25f70
JL
1186
1187void
8fff4fc1 1188init_temp_slots (void)
e9a25f70 1189{
8fff4fc1
RH
1190 /* We have not allocated any temporaries yet. */
1191 avail_temp_slots = 0;
9771b263 1192 vec_alloc (used_temp_slots, 0);
8fff4fc1 1193 temp_slot_level = 0;
f8395d62 1194 n_temp_slots_in_use = 0;
fb0703f7
SB
1195
1196 /* Set up the table to map addresses to temp slots. */
1197 if (! temp_slot_address_table)
1198 temp_slot_address_table = htab_create_ggc (32,
1199 temp_slot_address_hash,
1200 temp_slot_address_eq,
1201 NULL);
1202 else
1203 htab_empty (temp_slot_address_table);
8fff4fc1
RH
1204}
1205\f
6399c0ab
SB
1206/* Functions and data structures to keep track of the values hard regs
1207 had at the start of the function. */
1208
1209/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1210 and has_hard_reg_initial_val.. */
1211typedef struct GTY(()) initial_value_pair {
1212 rtx hard_reg;
1213 rtx pseudo;
1214} initial_value_pair;
1215/* ??? This could be a VEC but there is currently no way to define an
1216 opaque VEC type. This could be worked around by defining struct
1217 initial_value_pair in function.h. */
1218typedef struct GTY(()) initial_value_struct {
1219 int num_entries;
1220 int max_entries;
1221 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1222} initial_value_struct;
1223
1224/* If a pseudo represents an initial hard reg (or expression), return
1225 it, else return NULL_RTX. */
1226
1227rtx
1228get_hard_reg_initial_reg (rtx reg)
1229{
1230 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1231 int i;
1232
1233 if (ivs == 0)
1234 return NULL_RTX;
1235
1236 for (i = 0; i < ivs->num_entries; i++)
1237 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1238 return ivs->entries[i].hard_reg;
1239
1240 return NULL_RTX;
1241}
1242
1243/* Make sure that there's a pseudo register of mode MODE that stores the
1244 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1245
1246rtx
1247get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1248{
1249 struct initial_value_struct *ivs;
1250 rtx rv;
1251
1252 rv = has_hard_reg_initial_val (mode, regno);
1253 if (rv)
1254 return rv;
1255
1256 ivs = crtl->hard_reg_initial_vals;
1257 if (ivs == 0)
1258 {
766090c2 1259 ivs = ggc_alloc<initial_value_struct> ();
6399c0ab
SB
1260 ivs->num_entries = 0;
1261 ivs->max_entries = 5;
766090c2 1262 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
6399c0ab
SB
1263 crtl->hard_reg_initial_vals = ivs;
1264 }
1265
1266 if (ivs->num_entries >= ivs->max_entries)
1267 {
1268 ivs->max_entries += 5;
1269 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1270 ivs->max_entries);
1271 }
1272
1273 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1274 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1275
1276 return ivs->entries[ivs->num_entries++].pseudo;
1277}
1278
1279/* See if get_hard_reg_initial_val has been used to create a pseudo
1280 for the initial value of hard register REGNO in mode MODE. Return
1281 the associated pseudo if so, otherwise return NULL. */
1282
1283rtx
1284has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1285{
1286 struct initial_value_struct *ivs;
1287 int i;
1288
1289 ivs = crtl->hard_reg_initial_vals;
1290 if (ivs != 0)
1291 for (i = 0; i < ivs->num_entries; i++)
1292 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1293 && REGNO (ivs->entries[i].hard_reg) == regno)
1294 return ivs->entries[i].pseudo;
1295
1296 return NULL_RTX;
1297}
1298
1299unsigned int
1300emit_initial_value_sets (void)
1301{
1302 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1303 int i;
691fe203 1304 rtx_insn *seq;
6399c0ab
SB
1305
1306 if (ivs == 0)
1307 return 0;
1308
1309 start_sequence ();
1310 for (i = 0; i < ivs->num_entries; i++)
1311 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1312 seq = get_insns ();
1313 end_sequence ();
1314
1315 emit_insn_at_entry (seq);
1316 return 0;
1317}
1318
1319/* Return the hardreg-pseudoreg initial values pair entry I and
1320 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1321bool
1322initial_value_entry (int i, rtx *hreg, rtx *preg)
1323{
1324 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1325 if (!ivs || i >= ivs->num_entries)
1326 return false;
1327
1328 *hreg = ivs->entries[i].hard_reg;
1329 *preg = ivs->entries[i].pseudo;
1330 return true;
1331}
1332\f
8fff4fc1
RH
1333/* These routines are responsible for converting virtual register references
1334 to the actual hard register references once RTL generation is complete.
718fe406 1335
8fff4fc1
RH
1336 The following four variables are used for communication between the
1337 routines. They contain the offsets of the virtual registers from their
1338 respective hard registers. */
fe9b4957 1339
8fff4fc1
RH
1340static int in_arg_offset;
1341static int var_offset;
1342static int dynamic_offset;
1343static int out_arg_offset;
1344static int cfa_offset;
8a5275eb 1345
8fff4fc1
RH
1346/* In most machines, the stack pointer register is equivalent to the bottom
1347 of the stack. */
718fe406 1348
8fff4fc1
RH
1349#ifndef STACK_POINTER_OFFSET
1350#define STACK_POINTER_OFFSET 0
1351#endif
8c36698e 1352
ddbb449f
AM
1353#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1354#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1355#endif
1356
8fff4fc1
RH
1357/* If not defined, pick an appropriate default for the offset of dynamically
1358 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
ddbb449f 1359 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1360
8fff4fc1 1361#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1362
8fff4fc1
RH
1363/* The bottom of the stack points to the actual arguments. If
1364 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1365 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1366 stack space for register parameters is not pushed by the caller, but
1367 rather part of the fixed stack areas and hence not included in
38173d38 1368 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1369 for it when allocating stack dynamic objects. */
8a5275eb 1370
ddbb449f 1371#ifdef INCOMING_REG_PARM_STACK_SPACE
8fff4fc1
RH
1372#define STACK_DYNAMIC_OFFSET(FNDECL) \
1373((ACCUMULATE_OUTGOING_ARGS \
38173d38 1374 ? (crtl->outgoing_args_size \
81464b2c 1375 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
ddbb449f 1376 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1377 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1378#else
1379#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1380((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1381 + (STACK_POINTER_OFFSET))
1382#endif
1383#endif
4fa48eae 1384
659e47fb 1385\f
bbf9b913
RH
1386/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1387 is a virtual register, return the equivalent hard register and set the
1388 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1389
bbf9b913
RH
1390static rtx
1391instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1392{
82d6e6fc 1393 rtx new_rtx;
bbf9b913 1394 HOST_WIDE_INT offset;
6f086dfc 1395
bbf9b913 1396 if (x == virtual_incoming_args_rtx)
2e3f842f 1397 {
d015f7cc 1398 if (stack_realign_drap)
2e3f842f 1399 {
d015f7cc
L
1400 /* Replace virtual_incoming_args_rtx with internal arg
1401 pointer if DRAP is used to realign stack. */
82d6e6fc 1402 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1403 offset = 0;
1404 }
1405 else
82d6e6fc 1406 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1407 }
bbf9b913 1408 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1409 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1410 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1411 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1412 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1413 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1414 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1415 {
1416#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1417 new_rtx = frame_pointer_rtx;
f6672e8e 1418#else
82d6e6fc 1419 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1420#endif
1421 offset = cfa_offset;
1422 }
32990d5b
JJ
1423 else if (x == virtual_preferred_stack_boundary_rtx)
1424 {
1425 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1426 offset = 0;
1427 }
bbf9b913
RH
1428 else
1429 return NULL_RTX;
6f086dfc 1430
bbf9b913 1431 *poffset = offset;
82d6e6fc 1432 return new_rtx;
6f086dfc
RS
1433}
1434
b8704801
RS
1435/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1436 registers present inside of *LOC. The expression is simplified,
1437 as much as possible, but is not to be considered "valid" in any sense
1438 implied by the target. Return true if any change is made. */
6f086dfc 1439
b8704801
RS
1440static bool
1441instantiate_virtual_regs_in_rtx (rtx *loc)
6f086dfc 1442{
b8704801
RS
1443 if (!*loc)
1444 return false;
1445 bool changed = false;
1446 subrtx_ptr_iterator::array_type array;
1447 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
6f086dfc 1448 {
b8704801
RS
1449 rtx *loc = *iter;
1450 if (rtx x = *loc)
bbf9b913 1451 {
b8704801
RS
1452 rtx new_rtx;
1453 HOST_WIDE_INT offset;
1454 switch (GET_CODE (x))
1455 {
1456 case REG:
1457 new_rtx = instantiate_new_reg (x, &offset);
1458 if (new_rtx)
1459 {
1460 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1461 changed = true;
1462 }
1463 iter.skip_subrtxes ();
1464 break;
bbf9b913 1465
b8704801
RS
1466 case PLUS:
1467 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1468 if (new_rtx)
1469 {
1470 XEXP (x, 0) = new_rtx;
1471 *loc = plus_constant (GET_MODE (x), x, offset, true);
1472 changed = true;
1473 iter.skip_subrtxes ();
1474 break;
1475 }
e5e809f4 1476
b8704801
RS
1477 /* FIXME -- from old code */
1478 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1479 we can commute the PLUS and SUBREG because pointers into the
1480 frame are well-behaved. */
1481 break;
ce717ce4 1482
b8704801
RS
1483 default:
1484 break;
1485 }
1486 }
6f086dfc 1487 }
b8704801 1488 return changed;
6f086dfc
RS
1489}
1490
bbf9b913
RH
1491/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1492 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1493
bbf9b913
RH
1494static int
1495safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1496{
2ef6ce06 1497 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1498}
5a73491b 1499
bbf9b913
RH
1500/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1501 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1502
1503static void
691fe203 1504instantiate_virtual_regs_in_insn (rtx_insn *insn)
5a73491b 1505{
bbf9b913
RH
1506 HOST_WIDE_INT offset;
1507 int insn_code, i;
9325973e 1508 bool any_change = false;
691fe203
DM
1509 rtx set, new_rtx, x;
1510 rtx_insn *seq;
32e66afd 1511
bbf9b913
RH
1512 /* There are some special cases to be handled first. */
1513 set = single_set (insn);
1514 if (set)
32e66afd 1515 {
bbf9b913
RH
1516 /* We're allowed to assign to a virtual register. This is interpreted
1517 to mean that the underlying register gets assigned the inverse
1518 transformation. This is used, for example, in the handling of
1519 non-local gotos. */
82d6e6fc
KG
1520 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1521 if (new_rtx)
bbf9b913
RH
1522 {
1523 start_sequence ();
32e66afd 1524
b8704801 1525 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
82d6e6fc 1526 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
69a59f0f 1527 gen_int_mode (-offset, GET_MODE (new_rtx)));
82d6e6fc
KG
1528 x = force_operand (x, new_rtx);
1529 if (x != new_rtx)
1530 emit_move_insn (new_rtx, x);
5a73491b 1531
bbf9b913
RH
1532 seq = get_insns ();
1533 end_sequence ();
5a73491b 1534
bbf9b913
RH
1535 emit_insn_before (seq, insn);
1536 delete_insn (insn);
1537 return;
1538 }
5a73491b 1539
bbf9b913
RH
1540 /* Handle a straight copy from a virtual register by generating a
1541 new add insn. The difference between this and falling through
1542 to the generic case is avoiding a new pseudo and eliminating a
1543 move insn in the initial rtl stream. */
82d6e6fc
KG
1544 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1545 if (new_rtx && offset != 0
bbf9b913
RH
1546 && REG_P (SET_DEST (set))
1547 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1548 {
1549 start_sequence ();
5a73491b 1550
2f1cd2eb
RS
1551 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1552 gen_int_mode (offset,
1553 GET_MODE (SET_DEST (set))),
1554 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1555 if (x != SET_DEST (set))
1556 emit_move_insn (SET_DEST (set), x);
770ae6cc 1557
bbf9b913
RH
1558 seq = get_insns ();
1559 end_sequence ();
87ce34d6 1560
bbf9b913
RH
1561 emit_insn_before (seq, insn);
1562 delete_insn (insn);
87ce34d6 1563 return;
bbf9b913 1564 }
5a73491b 1565
bbf9b913 1566 extract_insn (insn);
9325973e 1567 insn_code = INSN_CODE (insn);
5a73491b 1568
bbf9b913
RH
1569 /* Handle a plus involving a virtual register by determining if the
1570 operands remain valid if they're modified in place. */
1571 if (GET_CODE (SET_SRC (set)) == PLUS
1572 && recog_data.n_operands >= 3
1573 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1574 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1575 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1576 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1577 {
1578 offset += INTVAL (recog_data.operand[2]);
5a73491b 1579
bbf9b913 1580 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1581 if (offset == 0
1582 && REG_P (SET_DEST (set))
1583 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1584 {
1585 start_sequence ();
82d6e6fc 1586 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1587 seq = get_insns ();
1588 end_sequence ();
d1405722 1589
bbf9b913
RH
1590 emit_insn_before (seq, insn);
1591 delete_insn (insn);
1592 return;
1593 }
d1405722 1594
bbf9b913 1595 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1596
1597 /* Using validate_change and apply_change_group here leaves
1598 recog_data in an invalid state. Since we know exactly what
1599 we want to check, do those two by hand. */
82d6e6fc 1600 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1601 && safe_insn_predicate (insn_code, 2, x))
1602 {
82d6e6fc 1603 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1604 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1605 any_change = true;
9325973e
RH
1606
1607 /* Fall through into the regular operand fixup loop in
1608 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1609 }
1610 }
1611 }
d1405722 1612 else
9325973e
RH
1613 {
1614 extract_insn (insn);
1615 insn_code = INSN_CODE (insn);
1616 }
5dc96d60 1617
bbf9b913
RH
1618 /* In the general case, we expect virtual registers to appear only in
1619 operands, and then only as either bare registers or inside memories. */
1620 for (i = 0; i < recog_data.n_operands; ++i)
1621 {
1622 x = recog_data.operand[i];
1623 switch (GET_CODE (x))
1624 {
1625 case MEM:
1626 {
1627 rtx addr = XEXP (x, 0);
bbf9b913 1628
b8704801 1629 if (!instantiate_virtual_regs_in_rtx (&addr))
bbf9b913
RH
1630 continue;
1631
1632 start_sequence ();
23b33725 1633 x = replace_equiv_address (x, addr, true);
a5bfb13a
MM
1634 /* It may happen that the address with the virtual reg
1635 was valid (e.g. based on the virtual stack reg, which might
1636 be acceptable to the predicates with all offsets), whereas
1637 the address now isn't anymore, for instance when the address
1638 is still offsetted, but the base reg isn't virtual-stack-reg
1639 anymore. Below we would do a force_reg on the whole operand,
1640 but this insn might actually only accept memory. Hence,
1641 before doing that last resort, try to reload the address into
1642 a register, so this operand stays a MEM. */
1643 if (!safe_insn_predicate (insn_code, i, x))
1644 {
1645 addr = force_reg (GET_MODE (addr), addr);
23b33725 1646 x = replace_equiv_address (x, addr, true);
a5bfb13a 1647 }
bbf9b913
RH
1648 seq = get_insns ();
1649 end_sequence ();
1650 if (seq)
1651 emit_insn_before (seq, insn);
1652 }
1653 break;
1654
1655 case REG:
82d6e6fc
KG
1656 new_rtx = instantiate_new_reg (x, &offset);
1657 if (new_rtx == NULL)
bbf9b913
RH
1658 continue;
1659 if (offset == 0)
82d6e6fc 1660 x = new_rtx;
bbf9b913
RH
1661 else
1662 {
1663 start_sequence ();
6f086dfc 1664
bbf9b913
RH
1665 /* Careful, special mode predicates may have stuff in
1666 insn_data[insn_code].operand[i].mode that isn't useful
1667 to us for computing a new value. */
1668 /* ??? Recognize address_operand and/or "p" constraints
1669 to see if (plus new offset) is a valid before we put
1670 this through expand_simple_binop. */
82d6e6fc 1671 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
2f1cd2eb
RS
1672 gen_int_mode (offset, GET_MODE (x)),
1673 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1674 seq = get_insns ();
1675 end_sequence ();
1676 emit_insn_before (seq, insn);
1677 }
1678 break;
6f086dfc 1679
bbf9b913 1680 case SUBREG:
82d6e6fc
KG
1681 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1682 if (new_rtx == NULL)
bbf9b913
RH
1683 continue;
1684 if (offset != 0)
1685 {
1686 start_sequence ();
2f1cd2eb
RS
1687 new_rtx = expand_simple_binop
1688 (GET_MODE (new_rtx), PLUS, new_rtx,
1689 gen_int_mode (offset, GET_MODE (new_rtx)),
1690 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1691 seq = get_insns ();
1692 end_sequence ();
1693 emit_insn_before (seq, insn);
1694 }
82d6e6fc
KG
1695 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1696 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1697 gcc_assert (x);
bbf9b913 1698 break;
6f086dfc 1699
bbf9b913
RH
1700 default:
1701 continue;
1702 }
6f086dfc 1703
bbf9b913
RH
1704 /* At this point, X contains the new value for the operand.
1705 Validate the new value vs the insn predicate. Note that
1706 asm insns will have insn_code -1 here. */
1707 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1708 {
1709 start_sequence ();
f7ce0951
SE
1710 if (REG_P (x))
1711 {
1712 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1713 x = copy_to_reg (x);
1714 }
1715 else
1716 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1717 seq = get_insns ();
1718 end_sequence ();
1719 if (seq)
1720 emit_insn_before (seq, insn);
1721 }
6f086dfc 1722
bbf9b913
RH
1723 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1724 any_change = true;
1725 }
6f086dfc 1726
bbf9b913
RH
1727 if (any_change)
1728 {
1729 /* Propagate operand changes into the duplicates. */
1730 for (i = 0; i < recog_data.n_dups; ++i)
1731 *recog_data.dup_loc[i]
3e916873 1732 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1733
bbf9b913
RH
1734 /* Force re-recognition of the instruction for validation. */
1735 INSN_CODE (insn) = -1;
1736 }
6f086dfc 1737
bbf9b913 1738 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1739 {
bbf9b913 1740 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1741 {
bbf9b913 1742 error_for_asm (insn, "impossible constraint in %<asm%>");
5a860835
JJ
1743 /* For asm goto, instead of fixing up all the edges
1744 just clear the template and clear input operands
1745 (asm goto doesn't have any output operands). */
1746 if (JUMP_P (insn))
1747 {
1748 rtx asm_op = extract_asm_operands (PATTERN (insn));
1749 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1750 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1751 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1752 }
1753 else
1754 delete_insn (insn);
bbf9b913
RH
1755 }
1756 }
1757 else
1758 {
1759 if (recog_memoized (insn) < 0)
1760 fatal_insn_not_found (insn);
1761 }
1762}
14a774a9 1763
bbf9b913
RH
1764/* Subroutine of instantiate_decls. Given RTL representing a decl,
1765 do any instantiation required. */
14a774a9 1766
e41b2a33
PB
1767void
1768instantiate_decl_rtl (rtx x)
bbf9b913
RH
1769{
1770 rtx addr;
6f086dfc 1771
bbf9b913
RH
1772 if (x == 0)
1773 return;
6f086dfc 1774
bbf9b913
RH
1775 /* If this is a CONCAT, recurse for the pieces. */
1776 if (GET_CODE (x) == CONCAT)
1777 {
e41b2a33
PB
1778 instantiate_decl_rtl (XEXP (x, 0));
1779 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1780 return;
1781 }
6f086dfc 1782
bbf9b913
RH
1783 /* If this is not a MEM, no need to do anything. Similarly if the
1784 address is a constant or a register that is not a virtual register. */
1785 if (!MEM_P (x))
1786 return;
6f086dfc 1787
bbf9b913
RH
1788 addr = XEXP (x, 0);
1789 if (CONSTANT_P (addr)
1790 || (REG_P (addr)
1791 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1792 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1793 return;
6f086dfc 1794
b8704801 1795 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
bbf9b913 1796}
6f086dfc 1797
434eba35
JJ
1798/* Helper for instantiate_decls called via walk_tree: Process all decls
1799 in the given DECL_VALUE_EXPR. */
1800
1801static tree
1802instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1803{
1804 tree t = *tp;
726a989a 1805 if (! EXPR_P (t))
434eba35
JJ
1806 {
1807 *walk_subtrees = 0;
37d6a488
AO
1808 if (DECL_P (t))
1809 {
1810 if (DECL_RTL_SET_P (t))
1811 instantiate_decl_rtl (DECL_RTL (t));
1812 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1813 && DECL_INCOMING_RTL (t))
1814 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1815 if ((TREE_CODE (t) == VAR_DECL
1816 || TREE_CODE (t) == RESULT_DECL)
1817 && DECL_HAS_VALUE_EXPR_P (t))
1818 {
1819 tree v = DECL_VALUE_EXPR (t);
1820 walk_tree (&v, instantiate_expr, NULL, NULL);
1821 }
1822 }
434eba35
JJ
1823 }
1824 return NULL;
1825}
1826
bbf9b913
RH
1827/* Subroutine of instantiate_decls: Process all decls in the given
1828 BLOCK node and all its subblocks. */
6f086dfc 1829
bbf9b913
RH
1830static void
1831instantiate_decls_1 (tree let)
1832{
1833 tree t;
6f086dfc 1834
910ad8de 1835 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1836 {
1837 if (DECL_RTL_SET_P (t))
e41b2a33 1838 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1839 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1840 {
1841 tree v = DECL_VALUE_EXPR (t);
1842 walk_tree (&v, instantiate_expr, NULL, NULL);
1843 }
1844 }
6f086dfc 1845
bbf9b913 1846 /* Process all subblocks. */
87caf699 1847 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1848 instantiate_decls_1 (t);
1849}
6f086dfc 1850
bbf9b913
RH
1851/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1852 all virtual registers in their DECL_RTL's. */
6f086dfc 1853
bbf9b913
RH
1854static void
1855instantiate_decls (tree fndecl)
1856{
c021f10b
NF
1857 tree decl;
1858 unsigned ix;
6f086dfc 1859
bbf9b913 1860 /* Process all parameters of the function. */
910ad8de 1861 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1862 {
e41b2a33
PB
1863 instantiate_decl_rtl (DECL_RTL (decl));
1864 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1865 if (DECL_HAS_VALUE_EXPR_P (decl))
1866 {
1867 tree v = DECL_VALUE_EXPR (decl);
1868 walk_tree (&v, instantiate_expr, NULL, NULL);
1869 }
bbf9b913 1870 }
4fd796bb 1871
37d6a488
AO
1872 if ((decl = DECL_RESULT (fndecl))
1873 && TREE_CODE (decl) == RESULT_DECL)
1874 {
1875 if (DECL_RTL_SET_P (decl))
1876 instantiate_decl_rtl (DECL_RTL (decl));
1877 if (DECL_HAS_VALUE_EXPR_P (decl))
1878 {
1879 tree v = DECL_VALUE_EXPR (decl);
1880 walk_tree (&v, instantiate_expr, NULL, NULL);
1881 }
1882 }
1883
3fd48b12
EB
1884 /* Process the saved static chain if it exists. */
1885 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1886 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1887 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1888
bbf9b913
RH
1889 /* Now process all variables defined in the function or its subblocks. */
1890 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1891
c021f10b
NF
1892 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1893 if (DECL_RTL_SET_P (decl))
1894 instantiate_decl_rtl (DECL_RTL (decl));
9771b263 1895 vec_free (cfun->local_decls);
bbf9b913 1896}
6f086dfc 1897
bbf9b913
RH
1898/* Pass through the INSNS of function FNDECL and convert virtual register
1899 references to hard register references. */
6f086dfc 1900
c2924966 1901static unsigned int
bbf9b913
RH
1902instantiate_virtual_regs (void)
1903{
691fe203 1904 rtx_insn *insn;
6f086dfc 1905
bbf9b913
RH
1906 /* Compute the offsets to use for this function. */
1907 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1908 var_offset = STARTING_FRAME_OFFSET;
1909 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1910 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1911#ifdef FRAME_POINTER_CFA_OFFSET
1912 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1913#else
bbf9b913 1914 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1915#endif
e9a25f70 1916
bbf9b913
RH
1917 /* Initialize recognition, indicating that volatile is OK. */
1918 init_recog ();
6f086dfc 1919
bbf9b913
RH
1920 /* Scan through all the insns, instantiating every virtual register still
1921 present. */
45dbce1b
NF
1922 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1923 if (INSN_P (insn))
1924 {
1925 /* These patterns in the instruction stream can never be recognized.
1926 Fortunately, they shouldn't contain virtual registers either. */
39718607 1927 if (GET_CODE (PATTERN (insn)) == USE
45dbce1b 1928 || GET_CODE (PATTERN (insn)) == CLOBBER
45dbce1b
NF
1929 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1930 continue;
1931 else if (DEBUG_INSN_P (insn))
b8704801 1932 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
45dbce1b
NF
1933 else
1934 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1935
4654c0cf 1936 if (insn->deleted ())
45dbce1b 1937 continue;
7114321e 1938
b8704801 1939 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
ba4807a0 1940
45dbce1b
NF
1941 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1942 if (CALL_P (insn))
b8704801 1943 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
45dbce1b 1944 }
6f086dfc 1945
bbf9b913
RH
1946 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1947 instantiate_decls (current_function_decl);
1948
e41b2a33
PB
1949 targetm.instantiate_decls ();
1950
bbf9b913
RH
1951 /* Indicate that, from now on, assign_stack_local should use
1952 frame_pointer_rtx. */
1953 virtuals_instantiated = 1;
d3c12306 1954
c2924966 1955 return 0;
6f086dfc 1956}
ef330312 1957
27a4cd48
DM
1958namespace {
1959
1960const pass_data pass_data_instantiate_virtual_regs =
1961{
1962 RTL_PASS, /* type */
1963 "vregs", /* name */
1964 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1965 TV_NONE, /* tv_id */
1966 0, /* properties_required */
1967 0, /* properties_provided */
1968 0, /* properties_destroyed */
1969 0, /* todo_flags_start */
1970 0, /* todo_flags_finish */
ef330312
PB
1971};
1972
27a4cd48
DM
1973class pass_instantiate_virtual_regs : public rtl_opt_pass
1974{
1975public:
c3284718
RS
1976 pass_instantiate_virtual_regs (gcc::context *ctxt)
1977 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
27a4cd48
DM
1978 {}
1979
1980 /* opt_pass methods: */
be55bfe6
TS
1981 virtual unsigned int execute (function *)
1982 {
1983 return instantiate_virtual_regs ();
1984 }
27a4cd48
DM
1985
1986}; // class pass_instantiate_virtual_regs
1987
1988} // anon namespace
1989
1990rtl_opt_pass *
1991make_pass_instantiate_virtual_regs (gcc::context *ctxt)
1992{
1993 return new pass_instantiate_virtual_regs (ctxt);
1994}
1995
6f086dfc 1996\f
d181c154
RS
1997/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1998 This means a type for which function calls must pass an address to the
1999 function or get an address back from the function.
2000 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
2001
2002int
586de218 2003aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 2004{
d47d0a8d 2005 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
2006 int i, regno, nregs;
2007 rtx reg;
2f939d94 2008
61f71b34
DD
2009 if (fntype)
2010 switch (TREE_CODE (fntype))
2011 {
2012 case CALL_EXPR:
d47d0a8d
EB
2013 {
2014 tree fndecl = get_callee_fndecl (fntype);
2015 fntype = (fndecl
2016 ? TREE_TYPE (fndecl)
2017 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2018 }
61f71b34
DD
2019 break;
2020 case FUNCTION_DECL:
d47d0a8d 2021 fntype = TREE_TYPE (fntype);
61f71b34
DD
2022 break;
2023 case FUNCTION_TYPE:
2024 case METHOD_TYPE:
2025 break;
2026 case IDENTIFIER_NODE:
d47d0a8d 2027 fntype = NULL_TREE;
61f71b34
DD
2028 break;
2029 default:
d47d0a8d 2030 /* We don't expect other tree types here. */
0bccc606 2031 gcc_unreachable ();
61f71b34
DD
2032 }
2033
d47d0a8d 2034 if (VOID_TYPE_P (type))
d7bf8ada 2035 return 0;
500c353d 2036
ebf0bf7f
JJ
2037 /* If a record should be passed the same as its first (and only) member
2038 don't pass it as an aggregate. */
2039 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2040 return aggregate_value_p (first_field (type), fntype);
2041
cc77ae10
JM
2042 /* If the front end has decided that this needs to be passed by
2043 reference, do so. */
2044 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2045 && DECL_BY_REFERENCE (exp))
2046 return 1;
500c353d 2047
d47d0a8d
EB
2048 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2049 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2050 return 1;
b8698a0f 2051
956d6950 2052 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2053 and thus can't be returned in registers. */
2054 if (TREE_ADDRESSABLE (type))
2055 return 1;
d47d0a8d 2056
05e3bdb9 2057 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2058 return 1;
d47d0a8d
EB
2059
2060 if (targetm.calls.return_in_memory (type, fntype))
2061 return 1;
2062
9d790a4f
RS
2063 /* Make sure we have suitable call-clobbered regs to return
2064 the value in; if not, we must return it in memory. */
1d636cc6 2065 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2066
2067 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2068 it is OK. */
f8cfc6aa 2069 if (!REG_P (reg))
e71f7aa5
JW
2070 return 0;
2071
9d790a4f 2072 regno = REGNO (reg);
66fd46b6 2073 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2074 for (i = 0; i < nregs; i++)
2075 if (! call_used_regs[regno + i])
2076 return 1;
d47d0a8d 2077
6f086dfc
RS
2078 return 0;
2079}
2080\f
8fff4fc1
RH
2081/* Return true if we should assign DECL a pseudo register; false if it
2082 should live on the local stack. */
2083
2084bool
fa233e34 2085use_register_for_decl (const_tree decl)
8fff4fc1 2086{
c3284718 2087 if (!targetm.calls.allocate_stack_slots_for_args ())
007e61c2 2088 return true;
b8698a0f 2089
8fff4fc1
RH
2090 /* Honor volatile. */
2091 if (TREE_SIDE_EFFECTS (decl))
2092 return false;
2093
2094 /* Honor addressability. */
2095 if (TREE_ADDRESSABLE (decl))
2096 return false;
2097
2098 /* Only register-like things go in registers. */
2099 if (DECL_MODE (decl) == BLKmode)
2100 return false;
2101
2102 /* If -ffloat-store specified, don't put explicit float variables
2103 into registers. */
2104 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2105 propagates values across these stores, and it probably shouldn't. */
2106 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2107 return false;
2108
78e0d62b
RH
2109 /* If we're not interested in tracking debugging information for
2110 this decl, then we can certainly put it in a register. */
2111 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2112 return true;
2113
d130d647
JJ
2114 if (optimize)
2115 return true;
2116
2117 if (!DECL_REGISTER (decl))
2118 return false;
2119
2120 switch (TREE_CODE (TREE_TYPE (decl)))
2121 {
2122 case RECORD_TYPE:
2123 case UNION_TYPE:
2124 case QUAL_UNION_TYPE:
2125 /* When not optimizing, disregard register keyword for variables with
2126 types containing methods, otherwise the methods won't be callable
2127 from the debugger. */
2128 if (TYPE_METHODS (TREE_TYPE (decl)))
2129 return false;
2130 break;
2131 default:
2132 break;
2133 }
2134
2135 return true;
8fff4fc1
RH
2136}
2137
0976078c
RH
2138/* Return true if TYPE should be passed by invisible reference. */
2139
2140bool
8cd5a4e0
RH
2141pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2142 tree type, bool named_arg)
0976078c
RH
2143{
2144 if (type)
2145 {
2146 /* If this type contains non-trivial constructors, then it is
2147 forbidden for the middle-end to create any new copies. */
2148 if (TREE_ADDRESSABLE (type))
2149 return true;
2150
d58247a3
RH
2151 /* GCC post 3.4 passes *all* variable sized types by reference. */
2152 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2153 return true;
ebf0bf7f
JJ
2154
2155 /* If a record type should be passed the same as its first (and only)
2156 member, use the type and mode of that member. */
2157 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2158 {
2159 type = TREE_TYPE (first_field (type));
2160 mode = TYPE_MODE (type);
2161 }
0976078c
RH
2162 }
2163
d5cc9181
JR
2164 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2165 type, named_arg);
0976078c
RH
2166}
2167
6cdd5672
RH
2168/* Return true if TYPE, which is passed by reference, should be callee
2169 copied instead of caller copied. */
2170
2171bool
2172reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2173 tree type, bool named_arg)
2174{
2175 if (type && TREE_ADDRESSABLE (type))
2176 return false;
d5cc9181
JR
2177 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2178 named_arg);
6cdd5672
RH
2179}
2180
6071dc7f
RH
2181/* Structures to communicate between the subroutines of assign_parms.
2182 The first holds data persistent across all parameters, the second
2183 is cleared out for each parameter. */
6f086dfc 2184
6071dc7f 2185struct assign_parm_data_all
6f086dfc 2186{
d5cc9181
JR
2187 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2188 should become a job of the target or otherwise encapsulated. */
2189 CUMULATIVE_ARGS args_so_far_v;
2190 cumulative_args_t args_so_far;
6f086dfc 2191 struct args_size stack_args_size;
6071dc7f
RH
2192 tree function_result_decl;
2193 tree orig_fnargs;
7a688d52
DM
2194 rtx_insn *first_conversion_insn;
2195 rtx_insn *last_conversion_insn;
6071dc7f
RH
2196 HOST_WIDE_INT pretend_args_size;
2197 HOST_WIDE_INT extra_pretend_bytes;
2198 int reg_parm_stack_space;
2199};
6f086dfc 2200
6071dc7f
RH
2201struct assign_parm_data_one
2202{
2203 tree nominal_type;
2204 tree passed_type;
2205 rtx entry_parm;
2206 rtx stack_parm;
2207 enum machine_mode nominal_mode;
2208 enum machine_mode passed_mode;
2209 enum machine_mode promoted_mode;
2210 struct locate_and_pad_arg_data locate;
2211 int partial;
2212 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2213 BOOL_BITFIELD passed_pointer : 1;
2214 BOOL_BITFIELD on_stack : 1;
2215 BOOL_BITFIELD loaded_in_reg : 1;
2216};
ebb904cb 2217
6071dc7f 2218/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2219
6071dc7f
RH
2220static void
2221assign_parms_initialize_all (struct assign_parm_data_all *all)
2222{
fc2f1f53 2223 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2224
6071dc7f
RH
2225 memset (all, 0, sizeof (*all));
2226
2227 fntype = TREE_TYPE (current_function_decl);
2228
2229#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2230 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2231#else
d5cc9181 2232 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2233 current_function_decl, -1);
2234#endif
d5cc9181 2235 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f 2236
ddbb449f
AM
2237#ifdef INCOMING_REG_PARM_STACK_SPACE
2238 all->reg_parm_stack_space
2239 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
6071dc7f
RH
2240#endif
2241}
6f086dfc 2242
6071dc7f
RH
2243/* If ARGS contains entries with complex types, split the entry into two
2244 entries of the component type. Return a new list of substitutions are
2245 needed, else the old list. */
2246
3b3f318a 2247static void
9771b263 2248split_complex_args (vec<tree> *args)
6071dc7f 2249{
3b3f318a 2250 unsigned i;
6071dc7f
RH
2251 tree p;
2252
9771b263 2253 FOR_EACH_VEC_ELT (*args, i, p)
6071dc7f
RH
2254 {
2255 tree type = TREE_TYPE (p);
2256 if (TREE_CODE (type) == COMPLEX_TYPE
2257 && targetm.calls.split_complex_arg (type))
2258 {
2259 tree decl;
2260 tree subtype = TREE_TYPE (type);
6ccd356e 2261 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2262
2263 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2264 p = copy_node (p);
6071dc7f
RH
2265 TREE_TYPE (p) = subtype;
2266 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2267 DECL_MODE (p) = VOIDmode;
2268 DECL_SIZE (p) = NULL;
2269 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2270 /* If this arg must go in memory, put it in a pseudo here.
2271 We can't allow it to go in memory as per normal parms,
2272 because the usual place might not have the imag part
2273 adjacent to the real part. */
2274 DECL_ARTIFICIAL (p) = addressable;
2275 DECL_IGNORED_P (p) = addressable;
2276 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2277 layout_decl (p, 0);
9771b263 2278 (*args)[i] = p;
6071dc7f
RH
2279
2280 /* Build a second synthetic decl. */
c2255bc4
AH
2281 decl = build_decl (EXPR_LOCATION (p),
2282 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2283 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2284 DECL_ARTIFICIAL (decl) = addressable;
2285 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2286 layout_decl (decl, 0);
9771b263 2287 args->safe_insert (++i, decl);
6071dc7f
RH
2288 }
2289 }
6071dc7f
RH
2290}
2291
2292/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2293 the hidden struct return argument, and (abi willing) complex args.
2294 Return the new parameter list. */
2295
9771b263 2296static vec<tree>
6071dc7f
RH
2297assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2298{
2299 tree fndecl = current_function_decl;
2300 tree fntype = TREE_TYPE (fndecl);
6e1aa848 2301 vec<tree> fnargs = vNULL;
3b3f318a
RG
2302 tree arg;
2303
910ad8de 2304 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
9771b263 2305 fnargs.safe_push (arg);
3b3f318a
RG
2306
2307 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2308
2309 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2310 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2311 && ! cfun->returns_pcc_struct
61f71b34 2312 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2313 {
f9f29478 2314 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2315 tree decl;
6f086dfc 2316
c2255bc4 2317 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2318 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2319 DECL_ARG_TYPE (decl) = type;
2320 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2321 DECL_NAMELESS (decl) = 1;
2322 TREE_CONSTANT (decl) = 1;
6f086dfc 2323
910ad8de 2324 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a 2325 all->orig_fnargs = decl;
9771b263 2326 fnargs.safe_insert (0, decl);
3b3f318a 2327
6071dc7f 2328 all->function_result_decl = decl;
6f086dfc 2329 }
718fe406 2330
42ba5130
RH
2331 /* If the target wants to split complex arguments into scalars, do so. */
2332 if (targetm.calls.split_complex_arg)
3b3f318a 2333 split_complex_args (&fnargs);
ded9bf77 2334
6071dc7f
RH
2335 return fnargs;
2336}
e7949876 2337
6071dc7f
RH
2338/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2339 data for the parameter. Incorporate ABI specifics such as pass-by-
2340 reference and type promotion. */
6f086dfc 2341
6071dc7f
RH
2342static void
2343assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2344 struct assign_parm_data_one *data)
2345{
2346 tree nominal_type, passed_type;
2347 enum machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2348 int unsignedp;
6f086dfc 2349
6071dc7f
RH
2350 memset (data, 0, sizeof (*data));
2351
fa10beec 2352 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2353 if (!cfun->stdarg)
fa10beec 2354 data->named_arg = 1; /* No variadic parms. */
910ad8de 2355 else if (DECL_CHAIN (parm))
fa10beec 2356 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2357 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2358 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2359 else
fa10beec 2360 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2361
2362 nominal_type = TREE_TYPE (parm);
2363 passed_type = DECL_ARG_TYPE (parm);
2364
2365 /* Look out for errors propagating this far. Also, if the parameter's
2366 type is void then its value doesn't matter. */
2367 if (TREE_TYPE (parm) == error_mark_node
2368 /* This can happen after weird syntax errors
2369 or if an enum type is defined among the parms. */
2370 || TREE_CODE (parm) != PARM_DECL
2371 || passed_type == NULL
2372 || VOID_TYPE_P (nominal_type))
2373 {
2374 nominal_type = passed_type = void_type_node;
2375 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2376 goto egress;
2377 }
108b7d3d 2378
6071dc7f
RH
2379 /* Find mode of arg as it is passed, and mode of arg as it should be
2380 during execution of this function. */
2381 passed_mode = TYPE_MODE (passed_type);
2382 nominal_mode = TYPE_MODE (nominal_type);
2383
ebf0bf7f
JJ
2384 /* If the parm is to be passed as a transparent union or record, use the
2385 type of the first field for the tests below. We have already verified
2386 that the modes are the same. */
2387 if ((TREE_CODE (passed_type) == UNION_TYPE
2388 || TREE_CODE (passed_type) == RECORD_TYPE)
2389 && TYPE_TRANSPARENT_AGGR (passed_type))
2390 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2391
0976078c 2392 /* See if this arg was passed by invisible reference. */
d5cc9181 2393 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2394 passed_type, data->named_arg))
6071dc7f
RH
2395 {
2396 passed_type = nominal_type = build_pointer_type (passed_type);
2397 data->passed_pointer = true;
fd91cfe3 2398 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
6071dc7f 2399 }
6f086dfc 2400
6071dc7f 2401 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2402 unsignedp = TYPE_UNSIGNED (passed_type);
2403 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2404 TREE_TYPE (current_function_decl), 0);
6f086dfc 2405
6071dc7f
RH
2406 egress:
2407 data->nominal_type = nominal_type;
2408 data->passed_type = passed_type;
2409 data->nominal_mode = nominal_mode;
2410 data->passed_mode = passed_mode;
2411 data->promoted_mode = promoted_mode;
2412}
16bae307 2413
6071dc7f 2414/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2415
6071dc7f
RH
2416static void
2417assign_parms_setup_varargs (struct assign_parm_data_all *all,
2418 struct assign_parm_data_one *data, bool no_rtl)
2419{
2420 int varargs_pretend_bytes = 0;
2421
d5cc9181 2422 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2423 data->promoted_mode,
2424 data->passed_type,
2425 &varargs_pretend_bytes, no_rtl);
2426
2427 /* If the back-end has requested extra stack space, record how much is
2428 needed. Do not change pretend_args_size otherwise since it may be
2429 nonzero from an earlier partial argument. */
2430 if (varargs_pretend_bytes > 0)
2431 all->pretend_args_size = varargs_pretend_bytes;
2432}
a53e14c0 2433
6071dc7f
RH
2434/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2435 the incoming location of the current parameter. */
2436
2437static void
2438assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2439 struct assign_parm_data_one *data)
2440{
2441 HOST_WIDE_INT pretend_bytes = 0;
2442 rtx entry_parm;
2443 bool in_regs;
2444
2445 if (data->promoted_mode == VOIDmode)
2446 {
2447 data->entry_parm = data->stack_parm = const0_rtx;
2448 return;
2449 }
a53e14c0 2450
d5cc9181 2451 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2452 data->promoted_mode,
2453 data->passed_type,
2454 data->named_arg);
6f086dfc 2455
6071dc7f
RH
2456 if (entry_parm == 0)
2457 data->promoted_mode = data->passed_mode;
6f086dfc 2458
6071dc7f
RH
2459 /* Determine parm's home in the stack, in case it arrives in the stack
2460 or we should pretend it did. Compute the stack position and rtx where
2461 the argument arrives and its size.
6f086dfc 2462
6071dc7f
RH
2463 There is one complexity here: If this was a parameter that would
2464 have been passed in registers, but wasn't only because it is
2465 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2466 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2467 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2468 as it was the previous time. */
2469 in_regs = entry_parm != 0;
6f086dfc 2470#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2471 in_regs = true;
e7949876 2472#endif
6071dc7f
RH
2473 if (!in_regs && !data->named_arg)
2474 {
d5cc9181 2475 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2476 {
6071dc7f 2477 rtx tem;
d5cc9181 2478 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2479 data->promoted_mode,
2480 data->passed_type, true);
6071dc7f 2481 in_regs = tem != NULL;
e7949876 2482 }
6071dc7f 2483 }
e7949876 2484
6071dc7f
RH
2485 /* If this parameter was passed both in registers and in the stack, use
2486 the copy on the stack. */
fe984136
RH
2487 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2488 data->passed_type))
6071dc7f 2489 entry_parm = 0;
e7949876 2490
6071dc7f
RH
2491 if (entry_parm)
2492 {
2493 int partial;
2494
d5cc9181 2495 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2496 data->promoted_mode,
2497 data->passed_type,
2498 data->named_arg);
6071dc7f
RH
2499 data->partial = partial;
2500
2501 /* The caller might already have allocated stack space for the
2502 register parameters. */
2503 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2504 {
6071dc7f
RH
2505 /* Part of this argument is passed in registers and part
2506 is passed on the stack. Ask the prologue code to extend
2507 the stack part so that we can recreate the full value.
2508
2509 PRETEND_BYTES is the size of the registers we need to store.
2510 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2511 stack space that the prologue should allocate.
2512
2513 Internally, gcc assumes that the argument pointer is aligned
2514 to STACK_BOUNDARY bits. This is used both for alignment
2515 optimizations (see init_emit) and to locate arguments that are
2516 aligned to more than PARM_BOUNDARY bits. We must preserve this
2517 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2518 a stack boundary. */
2519
2520 /* We assume at most one partial arg, and it must be the first
2521 argument on the stack. */
0bccc606 2522 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2523
78a52f11 2524 pretend_bytes = partial;
6071dc7f
RH
2525 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2526
2527 /* We want to align relative to the actual stack pointer, so
2528 don't include this in the stack size until later. */
2529 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2530 }
6071dc7f 2531 }
e7949876 2532
6071dc7f 2533 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e4ceca5 2534 all->reg_parm_stack_space,
6071dc7f
RH
2535 entry_parm ? data->partial : 0, current_function_decl,
2536 &all->stack_args_size, &data->locate);
6f086dfc 2537
e94a448f
L
2538 /* Update parm_stack_boundary if this parameter is passed in the
2539 stack. */
2540 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2541 crtl->parm_stack_boundary = data->locate.boundary;
2542
6071dc7f
RH
2543 /* Adjust offsets to include the pretend args. */
2544 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2545 data->locate.slot_offset.constant += pretend_bytes;
2546 data->locate.offset.constant += pretend_bytes;
ebca59c3 2547
6071dc7f
RH
2548 data->entry_parm = entry_parm;
2549}
6f086dfc 2550
6071dc7f
RH
2551/* A subroutine of assign_parms. If there is actually space on the stack
2552 for this parm, count it in stack_args_size and return true. */
6f086dfc 2553
6071dc7f
RH
2554static bool
2555assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2556 struct assign_parm_data_one *data)
2557{
2e6ae27f 2558 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2559 if (data->entry_parm == NULL)
2560 ;
2561 /* Also true if we're partially in registers and partially not,
2562 since we've arranged to drop the entire argument on the stack. */
2563 else if (data->partial != 0)
2564 ;
2565 /* Also true if the target says that it's passed in both registers
2566 and on the stack. */
2567 else if (GET_CODE (data->entry_parm) == PARALLEL
2568 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2569 ;
2570 /* Also true if the target says that there's stack allocated for
2571 all register parameters. */
2572 else if (all->reg_parm_stack_space > 0)
2573 ;
2574 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2575 else
2576 return false;
6f086dfc 2577
6071dc7f
RH
2578 all->stack_args_size.constant += data->locate.size.constant;
2579 if (data->locate.size.var)
2580 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2581
6071dc7f
RH
2582 return true;
2583}
0d1416c6 2584
6071dc7f
RH
2585/* A subroutine of assign_parms. Given that this parameter is allocated
2586 stack space by the ABI, find it. */
6f086dfc 2587
6071dc7f
RH
2588static void
2589assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2590{
2591 rtx offset_rtx, stack_parm;
2592 unsigned int align, boundary;
6f086dfc 2593
6071dc7f
RH
2594 /* If we're passing this arg using a reg, make its stack home the
2595 aligned stack slot. */
2596 if (data->entry_parm)
2597 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2598 else
2599 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2600
38173d38 2601 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2602 if (offset_rtx != const0_rtx)
2603 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2604 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2605
08ab0acf 2606 if (!data->passed_pointer)
997f78fb 2607 {
08ab0acf
JJ
2608 set_mem_attributes (stack_parm, parm, 1);
2609 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2610 while promoted mode's size is needed. */
2611 if (data->promoted_mode != BLKmode
2612 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2613 {
f5541398 2614 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2615 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2616 {
2617 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2618 data->promoted_mode);
2619 if (offset)
527210c4 2620 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2621 }
997f78fb
JJ
2622 }
2623 }
6071dc7f 2624
bfc45551
AM
2625 boundary = data->locate.boundary;
2626 align = BITS_PER_UNIT;
6071dc7f
RH
2627
2628 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2629 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2630 intentionally forcing upward padding. Otherwise we have to come
2631 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2632 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2633 align = boundary;
481683e1 2634 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2635 {
2636 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2637 align = align & -align;
2638 }
bfc45551 2639 set_mem_align (stack_parm, align);
6071dc7f
RH
2640
2641 if (data->entry_parm)
2642 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2643
2644 data->stack_parm = stack_parm;
2645}
2646
2647/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2648 always valid and contiguous. */
2649
2650static void
2651assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2652{
2653 rtx entry_parm = data->entry_parm;
2654 rtx stack_parm = data->stack_parm;
2655
2656 /* If this parm was passed part in regs and part in memory, pretend it
2657 arrived entirely in memory by pushing the register-part onto the stack.
2658 In the special case of a DImode or DFmode that is split, we could put
2659 it together in a pseudoreg directly, but for now that's not worth
2660 bothering with. */
2661 if (data->partial != 0)
2662 {
2663 /* Handle calls that pass values in multiple non-contiguous
2664 locations. The Irix 6 ABI has examples of this. */
2665 if (GET_CODE (entry_parm) == PARALLEL)
1a8cb155 2666 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
b8698a0f 2667 data->passed_type,
6071dc7f 2668 int_size_in_bytes (data->passed_type));
6f086dfc 2669 else
78a52f11
RH
2670 {
2671 gcc_assert (data->partial % UNITS_PER_WORD == 0);
1a8cb155
RS
2672 move_block_from_reg (REGNO (entry_parm),
2673 validize_mem (copy_rtx (stack_parm)),
78a52f11
RH
2674 data->partial / UNITS_PER_WORD);
2675 }
6f086dfc 2676
6071dc7f
RH
2677 entry_parm = stack_parm;
2678 }
6f086dfc 2679
6071dc7f
RH
2680 /* If we didn't decide this parm came in a register, by default it came
2681 on the stack. */
2682 else if (entry_parm == NULL)
2683 entry_parm = stack_parm;
2684
2685 /* When an argument is passed in multiple locations, we can't make use
2686 of this information, but we can save some copying if the whole argument
2687 is passed in a single register. */
2688 else if (GET_CODE (entry_parm) == PARALLEL
2689 && data->nominal_mode != BLKmode
2690 && data->passed_mode != BLKmode)
2691 {
2692 size_t i, len = XVECLEN (entry_parm, 0);
2693
2694 for (i = 0; i < len; i++)
2695 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2696 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2697 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2698 == data->passed_mode)
2699 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2700 {
2701 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2702 break;
2703 }
2704 }
e68a6ce1 2705
6071dc7f
RH
2706 data->entry_parm = entry_parm;
2707}
6f086dfc 2708
4d2a9850
DJ
2709/* A subroutine of assign_parms. Reconstitute any values which were
2710 passed in multiple registers and would fit in a single register. */
2711
2712static void
2713assign_parm_remove_parallels (struct assign_parm_data_one *data)
2714{
2715 rtx entry_parm = data->entry_parm;
2716
2717 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2718 This can be done with register operations rather than on the
2719 stack, even if we will store the reconstituted parameter on the
2720 stack later. */
85776d60 2721 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2722 {
2723 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2724 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2725 GET_MODE_SIZE (GET_MODE (entry_parm)));
2726 entry_parm = parmreg;
2727 }
2728
2729 data->entry_parm = entry_parm;
2730}
2731
6071dc7f
RH
2732/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2733 always valid and properly aligned. */
6f086dfc 2734
6071dc7f
RH
2735static void
2736assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2737{
2738 rtx stack_parm = data->stack_parm;
2739
2740 /* If we can't trust the parm stack slot to be aligned enough for its
2741 ultimate type, don't use that slot after entry. We'll make another
2742 stack slot, if we need one. */
bfc45551
AM
2743 if (stack_parm
2744 && ((STRICT_ALIGNMENT
2745 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2746 || (data->nominal_type
2747 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2748 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2749 stack_parm = NULL;
2750
2751 /* If parm was passed in memory, and we need to convert it on entry,
2752 don't store it back in that same slot. */
2753 else if (data->entry_parm == stack_parm
2754 && data->nominal_mode != BLKmode
2755 && data->nominal_mode != data->passed_mode)
2756 stack_parm = NULL;
2757
7d69de61
RH
2758 /* If stack protection is in effect for this function, don't leave any
2759 pointers in their passed stack slots. */
cb91fab0 2760 else if (crtl->stack_protect_guard
7d69de61
RH
2761 && (flag_stack_protect == 2
2762 || data->passed_pointer
2763 || POINTER_TYPE_P (data->nominal_type)))
2764 stack_parm = NULL;
2765
6071dc7f
RH
2766 data->stack_parm = stack_parm;
2767}
a0506b54 2768
6071dc7f
RH
2769/* A subroutine of assign_parms. Return true if the current parameter
2770 should be stored as a BLKmode in the current frame. */
2771
2772static bool
2773assign_parm_setup_block_p (struct assign_parm_data_one *data)
2774{
2775 if (data->nominal_mode == BLKmode)
2776 return true;
85776d60
DJ
2777 if (GET_MODE (data->entry_parm) == BLKmode)
2778 return true;
531547e9 2779
6e985040 2780#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2781 /* Only assign_parm_setup_block knows how to deal with register arguments
2782 that are padded at the least significant end. */
2783 if (REG_P (data->entry_parm)
2784 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2785 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2786 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2787 return true;
6e985040 2788#endif
6071dc7f
RH
2789
2790 return false;
2791}
2792
b8698a0f 2793/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2794 present and valid in DATA->STACK_RTL. */
2795
2796static void
27e29549
RH
2797assign_parm_setup_block (struct assign_parm_data_all *all,
2798 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2799{
2800 rtx entry_parm = data->entry_parm;
2801 rtx stack_parm = data->stack_parm;
bfc45551
AM
2802 HOST_WIDE_INT size;
2803 HOST_WIDE_INT size_stored;
6071dc7f 2804
27e29549
RH
2805 if (GET_CODE (entry_parm) == PARALLEL)
2806 entry_parm = emit_group_move_into_temps (entry_parm);
2807
bfc45551
AM
2808 size = int_size_in_bytes (data->passed_type);
2809 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2810 if (stack_parm == 0)
2811 {
a561d88b 2812 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2813 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2814 DECL_ALIGN (parm));
bfc45551
AM
2815 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2816 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2817 set_mem_attributes (stack_parm, parm, 1);
2818 }
2819
6071dc7f
RH
2820 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2821 calls that pass values in multiple non-contiguous locations. */
2822 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2823 {
6071dc7f
RH
2824 rtx mem;
2825
2826 /* Note that we will be storing an integral number of words.
2827 So we have to be careful to ensure that we allocate an
bfc45551 2828 integral number of words. We do this above when we call
6071dc7f
RH
2829 assign_stack_local if space was not allocated in the argument
2830 list. If it was, this will not work if PARM_BOUNDARY is not
2831 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2832 if it becomes a problem. Exception is when BLKmode arrives
2833 with arguments not conforming to word_mode. */
2834
bfc45551
AM
2835 if (data->stack_parm == 0)
2836 ;
6071dc7f
RH
2837 else if (GET_CODE (entry_parm) == PARALLEL)
2838 ;
0bccc606
NS
2839 else
2840 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2841
1a8cb155 2842 mem = validize_mem (copy_rtx (stack_parm));
c6b97fac 2843
6071dc7f
RH
2844 /* Handle values in multiple non-contiguous locations. */
2845 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2846 {
bb27eeda
SE
2847 push_to_sequence2 (all->first_conversion_insn,
2848 all->last_conversion_insn);
27e29549 2849 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2850 all->first_conversion_insn = get_insns ();
2851 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2852 end_sequence ();
2853 }
c6b97fac 2854
6071dc7f
RH
2855 else if (size == 0)
2856 ;
5c07bd7a 2857
6071dc7f
RH
2858 /* If SIZE is that of a mode no bigger than a word, just use
2859 that mode's store operation. */
2860 else if (size <= UNITS_PER_WORD)
2861 {
2862 enum machine_mode mode
2863 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2864
6071dc7f 2865 if (mode != BLKmode
6e985040 2866#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2867 && (size == UNITS_PER_WORD
2868 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2869 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2870#endif
6071dc7f
RH
2871 )
2872 {
208996c7
RS
2873 rtx reg;
2874
2875 /* We are really truncating a word_mode value containing
2876 SIZE bytes into a value of mode MODE. If such an
2877 operation requires no actual instructions, we can refer
2878 to the value directly in mode MODE, otherwise we must
2879 start with the register in word_mode and explicitly
2880 convert it. */
2881 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2882 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2883 else
2884 {
2885 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2886 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2887 }
6071dc7f
RH
2888 emit_move_insn (change_address (mem, mode, 0), reg);
2889 }
c6b97fac 2890
6071dc7f
RH
2891 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2892 machine must be aligned to the left before storing
2893 to memory. Note that the previous test doesn't
2894 handle all cases (e.g. SIZE == 3). */
2895 else if (size != UNITS_PER_WORD
6e985040 2896#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2897 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2898 == downward)
6e985040 2899#else
6071dc7f 2900 && BYTES_BIG_ENDIAN
6e985040 2901#endif
6071dc7f
RH
2902 )
2903 {
2904 rtx tem, x;
2905 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2906 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2907
eb6c3df1 2908 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2909 tem = change_address (mem, word_mode, 0);
2910 emit_move_insn (tem, x);
6f086dfc 2911 }
6071dc7f 2912 else
27e29549 2913 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2914 size_stored / UNITS_PER_WORD);
6f086dfc 2915 }
6071dc7f 2916 else
27e29549 2917 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2918 size_stored / UNITS_PER_WORD);
2919 }
bfc45551
AM
2920 else if (data->stack_parm == 0)
2921 {
bb27eeda 2922 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2923 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2924 BLOCK_OP_NORMAL);
bb27eeda
SE
2925 all->first_conversion_insn = get_insns ();
2926 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2927 end_sequence ();
2928 }
6071dc7f 2929
bfc45551 2930 data->stack_parm = stack_parm;
6071dc7f
RH
2931 SET_DECL_RTL (parm, stack_parm);
2932}
2933
2934/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2935 parameter. Get it there. Perform all ABI specified conversions. */
2936
2937static void
2938assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2939 struct assign_parm_data_one *data)
2940{
71008de4
BS
2941 rtx parmreg, validated_mem;
2942 rtx equiv_stack_parm;
6071dc7f
RH
2943 enum machine_mode promoted_nominal_mode;
2944 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2945 bool did_conversion = false;
71008de4 2946 bool need_conversion, moved;
6071dc7f
RH
2947
2948 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2949 need to do it in a wider mode. Using 2 here makes the result
2950 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2951 promoted_nominal_mode
cde0f3fd 2952 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2953 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
2954
2955 parmreg = gen_reg_rtx (promoted_nominal_mode);
2956
2957 if (!DECL_ARTIFICIAL (parm))
2958 mark_user_reg (parmreg);
2959
2960 /* If this was an item that we received a pointer to,
2961 set DECL_RTL appropriately. */
2962 if (data->passed_pointer)
2963 {
2964 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2965 set_mem_attributes (x, parm, 1);
2966 SET_DECL_RTL (parm, x);
2967 }
2968 else
389fdba0 2969 SET_DECL_RTL (parm, parmreg);
6071dc7f 2970
4d2a9850
DJ
2971 assign_parm_remove_parallels (data);
2972
666e3ceb
PB
2973 /* Copy the value into the register, thus bridging between
2974 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2975
71008de4 2976 equiv_stack_parm = data->stack_parm;
1a8cb155 2977 validated_mem = validize_mem (copy_rtx (data->entry_parm));
71008de4
BS
2978
2979 need_conversion = (data->nominal_mode != data->passed_mode
2980 || promoted_nominal_mode != data->promoted_mode);
2981 moved = false;
2982
dbb94435
BS
2983 if (need_conversion
2984 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2985 && data->nominal_mode == data->passed_mode
2986 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 2987 {
6071dc7f
RH
2988 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2989 mode, by the caller. We now have to convert it to
2990 NOMINAL_MODE, if different. However, PARMREG may be in
2991 a different mode than NOMINAL_MODE if it is being stored
2992 promoted.
2993
2994 If ENTRY_PARM is a hard register, it might be in a register
2995 not valid for operating in its mode (e.g., an odd-numbered
2996 register for a DFmode). In that case, moves are the only
2997 thing valid, so we can't do a convert from there. This
2998 occurs when the calling sequence allow such misaligned
2999 usages.
3000
3001 In addition, the conversion may involve a call, which could
3002 clobber parameters which haven't been copied to pseudo
71008de4
BS
3003 registers yet.
3004
3005 First, we try to emit an insn which performs the necessary
3006 conversion. We verify that this insn does not clobber any
3007 hard registers. */
3008
3009 enum insn_code icode;
3010 rtx op0, op1;
3011
3012 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3013 unsignedp);
3014
3015 op0 = parmreg;
3016 op1 = validated_mem;
3017 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3018 && insn_operand_matches (icode, 0, op0)
3019 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3020 {
3021 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
b32d5189
DM
3022 rtx_insn *insn, *insns;
3023 rtx t = op1;
71008de4
BS
3024 HARD_REG_SET hardregs;
3025
3026 start_sequence ();
f9fef349
JJ
3027 /* If op1 is a hard register that is likely spilled, first
3028 force it into a pseudo, otherwise combiner might extend
3029 its lifetime too much. */
3030 if (GET_CODE (t) == SUBREG)
3031 t = SUBREG_REG (t);
3032 if (REG_P (t)
3033 && HARD_REGISTER_P (t)
3034 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3035 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3036 {
3037 t = gen_reg_rtx (GET_MODE (op1));
3038 emit_move_insn (t, op1);
3039 }
3040 else
3041 t = op1;
a11899b2
DM
3042 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3043 data->passed_mode, unsignedp);
3044 emit_insn (pat);
71008de4
BS
3045 insns = get_insns ();
3046
3047 moved = true;
3048 CLEAR_HARD_REG_SET (hardregs);
3049 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3050 {
3051 if (INSN_P (insn))
3052 note_stores (PATTERN (insn), record_hard_reg_sets,
3053 &hardregs);
3054 if (!hard_reg_set_empty_p (hardregs))
3055 moved = false;
3056 }
3057
3058 end_sequence ();
3059
3060 if (moved)
3061 {
3062 emit_insn (insns);
dbb94435
BS
3063 if (equiv_stack_parm != NULL_RTX)
3064 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3065 equiv_stack_parm);
71008de4
BS
3066 }
3067 }
3068 }
3069
3070 if (moved)
3071 /* Nothing to do. */
3072 ;
3073 else if (need_conversion)
3074 {
3075 /* We did not have an insn to convert directly, or the sequence
3076 generated appeared unsafe. We must first copy the parm to a
3077 pseudo reg, and save the conversion until after all
6071dc7f
RH
3078 parameters have been moved. */
3079
71008de4 3080 int save_tree_used;
6071dc7f
RH
3081 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3082
71008de4 3083 emit_move_insn (tempreg, validated_mem);
6071dc7f 3084
bb27eeda 3085 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3086 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3087
3088 if (GET_CODE (tempreg) == SUBREG
3089 && GET_MODE (tempreg) == data->nominal_mode
3090 && REG_P (SUBREG_REG (tempreg))
3091 && data->nominal_mode == data->passed_mode
3092 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3093 && GET_MODE_SIZE (GET_MODE (tempreg))
3094 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3095 {
6071dc7f
RH
3096 /* The argument is already sign/zero extended, so note it
3097 into the subreg. */
3098 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
362d42dc 3099 SUBREG_PROMOTED_SET (tempreg, unsignedp);
6071dc7f 3100 }
00d8a4c1 3101
6071dc7f
RH
3102 /* TREE_USED gets set erroneously during expand_assignment. */
3103 save_tree_used = TREE_USED (parm);
79f5e442 3104 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3105 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3106 all->first_conversion_insn = get_insns ();
3107 all->last_conversion_insn = get_last_insn ();
6071dc7f 3108 end_sequence ();
00d8a4c1 3109
6071dc7f
RH
3110 did_conversion = true;
3111 }
3112 else
71008de4 3113 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3114
3115 /* If we were passed a pointer but the actual value can safely live
f7e088e7
EB
3116 in a register, retrieve it and use it directly. */
3117 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
6071dc7f
RH
3118 {
3119 /* We can't use nominal_mode, because it will have been set to
3120 Pmode above. We must use the actual mode of the parm. */
f7e088e7
EB
3121 if (use_register_for_decl (parm))
3122 {
3123 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3124 mark_user_reg (parmreg);
3125 }
3126 else
3127 {
3128 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3129 TYPE_MODE (TREE_TYPE (parm)),
3130 TYPE_ALIGN (TREE_TYPE (parm)));
3131 parmreg
3132 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3133 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3134 align);
3135 set_mem_attributes (parmreg, parm, 1);
3136 }
cd5b3469 3137
6071dc7f
RH
3138 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3139 {
3140 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3141 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3142
bb27eeda
SE
3143 push_to_sequence2 (all->first_conversion_insn,
3144 all->last_conversion_insn);
6071dc7f
RH
3145 emit_move_insn (tempreg, DECL_RTL (parm));
3146 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3147 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3148 all->first_conversion_insn = get_insns ();
3149 all->last_conversion_insn = get_last_insn ();
6071dc7f 3150 end_sequence ();
6f086dfc 3151
6071dc7f
RH
3152 did_conversion = true;
3153 }
3154 else
3155 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3156
6071dc7f 3157 SET_DECL_RTL (parm, parmreg);
797a6ac1 3158
6071dc7f
RH
3159 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3160 now the parm. */
3161 data->stack_parm = NULL;
3162 }
ddef6bc7 3163
6071dc7f
RH
3164 /* Mark the register as eliminable if we did no conversion and it was
3165 copied from memory at a fixed offset, and the arg pointer was not
3166 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3167 offset formed an invalid address, such memory-equivalences as we
3168 make here would screw up life analysis for it. */
3169 if (data->nominal_mode == data->passed_mode
3170 && !did_conversion
3171 && data->stack_parm != 0
3172 && MEM_P (data->stack_parm)
3173 && data->locate.offset.var == 0
3174 && reg_mentioned_p (virtual_incoming_args_rtx,
3175 XEXP (data->stack_parm, 0)))
3176 {
691fe203
DM
3177 rtx_insn *linsn = get_last_insn ();
3178 rtx_insn *sinsn;
3179 rtx set;
a03caf76 3180
6071dc7f
RH
3181 /* Mark complex types separately. */
3182 if (GET_CODE (parmreg) == CONCAT)
3183 {
3184 enum machine_mode submode
3185 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3186 int regnor = REGNO (XEXP (parmreg, 0));
3187 int regnoi = REGNO (XEXP (parmreg, 1));
3188 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3189 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3190 GET_MODE_SIZE (submode));
6071dc7f
RH
3191
3192 /* Scan backwards for the set of the real and
3193 imaginary parts. */
3194 for (sinsn = linsn; sinsn != 0;
3195 sinsn = prev_nonnote_insn (sinsn))
3196 {
3197 set = single_set (sinsn);
3198 if (set == 0)
3199 continue;
3200
3201 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3202 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3203 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3204 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3205 }
6071dc7f 3206 }
7543f918
JR
3207 else
3208 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3209 }
3210
3211 /* For pointer data type, suggest pointer register. */
3212 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3213 mark_reg_pointer (parmreg,
3214 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3215}
3216
3217/* A subroutine of assign_parms. Allocate stack space to hold the current
3218 parameter. Get it there. Perform all ABI specified conversions. */
3219
3220static void
3221assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3222 struct assign_parm_data_one *data)
3223{
3224 /* Value must be stored in the stack slot STACK_PARM during function
3225 execution. */
bfc45551 3226 bool to_conversion = false;
6071dc7f 3227
4d2a9850
DJ
3228 assign_parm_remove_parallels (data);
3229
6071dc7f
RH
3230 if (data->promoted_mode != data->nominal_mode)
3231 {
3232 /* Conversion is required. */
3233 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3234
1a8cb155 3235 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
6071dc7f 3236
bb27eeda 3237 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3238 to_conversion = true;
3239
6071dc7f
RH
3240 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3241 TYPE_UNSIGNED (TREE_TYPE (parm)));
3242
3243 if (data->stack_parm)
dd67163f
JJ
3244 {
3245 int offset = subreg_lowpart_offset (data->nominal_mode,
3246 GET_MODE (data->stack_parm));
3247 /* ??? This may need a big-endian conversion on sparc64. */
3248 data->stack_parm
3249 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3250 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3251 set_mem_offset (data->stack_parm,
527210c4 3252 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3253 }
6071dc7f
RH
3254 }
3255
3256 if (data->entry_parm != data->stack_parm)
3257 {
bfc45551
AM
3258 rtx src, dest;
3259
6071dc7f
RH
3260 if (data->stack_parm == 0)
3261 {
3a695389
UW
3262 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3263 GET_MODE (data->entry_parm),
3264 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3265 data->stack_parm
3266 = assign_stack_local (GET_MODE (data->entry_parm),
3267 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3268 align);
6071dc7f 3269 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3270 }
6071dc7f 3271
1a8cb155
RS
3272 dest = validize_mem (copy_rtx (data->stack_parm));
3273 src = validize_mem (copy_rtx (data->entry_parm));
bfc45551
AM
3274
3275 if (MEM_P (src))
6f086dfc 3276 {
bfc45551
AM
3277 /* Use a block move to handle potentially misaligned entry_parm. */
3278 if (!to_conversion)
bb27eeda
SE
3279 push_to_sequence2 (all->first_conversion_insn,
3280 all->last_conversion_insn);
bfc45551
AM
3281 to_conversion = true;
3282
3283 emit_block_move (dest, src,
3284 GEN_INT (int_size_in_bytes (data->passed_type)),
3285 BLOCK_OP_NORMAL);
6071dc7f
RH
3286 }
3287 else
bfc45551
AM
3288 emit_move_insn (dest, src);
3289 }
3290
3291 if (to_conversion)
3292 {
bb27eeda
SE
3293 all->first_conversion_insn = get_insns ();
3294 all->last_conversion_insn = get_last_insn ();
bfc45551 3295 end_sequence ();
6071dc7f 3296 }
6f086dfc 3297
6071dc7f
RH
3298 SET_DECL_RTL (parm, data->stack_parm);
3299}
3412b298 3300
6071dc7f
RH
3301/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3302 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3303
6071dc7f 3304static void
3b3f318a 3305assign_parms_unsplit_complex (struct assign_parm_data_all *all,
9771b263 3306 vec<tree> fnargs)
6071dc7f
RH
3307{
3308 tree parm;
6ccd356e 3309 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3310 unsigned i = 0;
f4ef873c 3311
3b3f318a 3312 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3313 {
3314 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3315 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3316 {
3317 rtx tmp, real, imag;
3318 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3319
9771b263
DN
3320 real = DECL_RTL (fnargs[i]);
3321 imag = DECL_RTL (fnargs[i + 1]);
6071dc7f 3322 if (inner != GET_MODE (real))
6f086dfc 3323 {
6071dc7f
RH
3324 real = gen_lowpart_SUBREG (inner, real);
3325 imag = gen_lowpart_SUBREG (inner, imag);
3326 }
6ccd356e
AM
3327
3328 if (TREE_ADDRESSABLE (parm))
3329 {
3330 rtx rmem, imem;
3331 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3332 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3333 DECL_MODE (parm),
3334 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3335
3336 /* split_complex_arg put the real and imag parts in
3337 pseudos. Move them to memory. */
3a695389 3338 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3339 set_mem_attributes (tmp, parm, 1);
3340 rmem = adjust_address_nv (tmp, inner, 0);
3341 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3342 push_to_sequence2 (all->first_conversion_insn,
3343 all->last_conversion_insn);
6ccd356e
AM
3344 emit_move_insn (rmem, real);
3345 emit_move_insn (imem, imag);
bb27eeda
SE
3346 all->first_conversion_insn = get_insns ();
3347 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3348 end_sequence ();
3349 }
3350 else
3351 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3352 SET_DECL_RTL (parm, tmp);
7e41ffa2 3353
9771b263
DN
3354 real = DECL_INCOMING_RTL (fnargs[i]);
3355 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
6071dc7f
RH
3356 if (inner != GET_MODE (real))
3357 {
3358 real = gen_lowpart_SUBREG (inner, real);
3359 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3360 }
6071dc7f 3361 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3362 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3363 i++;
6f086dfc 3364 }
6f086dfc 3365 }
6071dc7f
RH
3366}
3367
3368/* Assign RTL expressions to the function's parameters. This may involve
3369 copying them into registers and using those registers as the DECL_RTL. */
3370
6fe79279 3371static void
6071dc7f
RH
3372assign_parms (tree fndecl)
3373{
3374 struct assign_parm_data_all all;
3b3f318a 3375 tree parm;
9771b263 3376 vec<tree> fnargs;
3b3f318a 3377 unsigned i;
6f086dfc 3378
38173d38 3379 crtl->args.internal_arg_pointer
150cdc9e 3380 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3381
3382 assign_parms_initialize_all (&all);
3383 fnargs = assign_parms_augmented_arg_list (&all);
3384
9771b263 3385 FOR_EACH_VEC_ELT (fnargs, i, parm)
ded9bf77 3386 {
6071dc7f
RH
3387 struct assign_parm_data_one data;
3388
3389 /* Extract the type of PARM; adjust it according to ABI. */
3390 assign_parm_find_data_types (&all, parm, &data);
3391
3392 /* Early out for errors and void parameters. */
3393 if (data.passed_mode == VOIDmode)
ded9bf77 3394 {
6071dc7f
RH
3395 SET_DECL_RTL (parm, const0_rtx);
3396 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3397 continue;
3398 }
196c42cd 3399
2e3f842f
L
3400 /* Estimate stack alignment from parameter alignment. */
3401 if (SUPPORTS_STACK_ALIGNMENT)
3402 {
c2ed6cf8
NF
3403 unsigned int align
3404 = targetm.calls.function_arg_boundary (data.promoted_mode,
3405 data.passed_type);
ae58e548
JJ
3406 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3407 align);
2e3f842f 3408 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3409 align = MINIMUM_ALIGNMENT (data.nominal_type,
3410 TYPE_MODE (data.nominal_type),
3411 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3412 if (crtl->stack_alignment_estimated < align)
3413 {
3414 gcc_assert (!crtl->stack_realign_processed);
3415 crtl->stack_alignment_estimated = align;
3416 }
3417 }
b8698a0f 3418
910ad8de 3419 if (cfun->stdarg && !DECL_CHAIN (parm))
8117c488 3420 assign_parms_setup_varargs (&all, &data, false);
196c42cd 3421
6071dc7f
RH
3422 /* Find out where the parameter arrives in this function. */
3423 assign_parm_find_entry_rtl (&all, &data);
3424
3425 /* Find out where stack space for this parameter might be. */
3426 if (assign_parm_is_stack_parm (&all, &data))
3427 {
3428 assign_parm_find_stack_rtl (parm, &data);
3429 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3430 }
6071dc7f
RH
3431
3432 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3433 if (data.passed_pointer)
3434 {
3435 rtx incoming_rtl
3436 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3437 data.entry_parm);
3438 set_decl_incoming_rtl (parm, incoming_rtl, true);
3439 }
3440 else
3441 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f
RH
3442
3443 /* Update info on where next arg arrives in registers. */
d5cc9181 3444 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3445 data.passed_type, data.named_arg);
6071dc7f
RH
3446
3447 assign_parm_adjust_stack_rtl (&data);
3448
3449 if (assign_parm_setup_block_p (&data))
27e29549 3450 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3451 else if (data.passed_pointer || use_register_for_decl (parm))
3452 assign_parm_setup_reg (&all, parm, &data);
3453 else
3454 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3455 }
3456
3b3f318a 3457 if (targetm.calls.split_complex_arg)
6ccd356e 3458 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3459
9771b263 3460 fnargs.release ();
3b3f318a 3461
3412b298
JW
3462 /* Output all parameter conversion instructions (possibly including calls)
3463 now that all parameters have been copied out of hard registers. */
bb27eeda 3464 emit_insn (all.first_conversion_insn);
3412b298 3465
2e3f842f
L
3466 /* Estimate reload stack alignment from scalar return mode. */
3467 if (SUPPORTS_STACK_ALIGNMENT)
3468 {
3469 if (DECL_RESULT (fndecl))
3470 {
3471 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3472 enum machine_mode mode = TYPE_MODE (type);
3473
3474 if (mode != BLKmode
3475 && mode != VOIDmode
3476 && !AGGREGATE_TYPE_P (type))
3477 {
3478 unsigned int align = GET_MODE_ALIGNMENT (mode);
3479 if (crtl->stack_alignment_estimated < align)
3480 {
3481 gcc_assert (!crtl->stack_realign_processed);
3482 crtl->stack_alignment_estimated = align;
3483 }
3484 }
b8698a0f 3485 }
2e3f842f
L
3486 }
3487
b36a8cc2
OH
3488 /* If we are receiving a struct value address as the first argument, set up
3489 the RTL for the function result. As this might require code to convert
3490 the transmitted address to Pmode, we do this here to ensure that possible
3491 preliminary conversions of the address have been emitted already. */
6071dc7f 3492 if (all.function_result_decl)
b36a8cc2 3493 {
6071dc7f
RH
3494 tree result = DECL_RESULT (current_function_decl);
3495 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3496 rtx x;
fa8db1f7 3497
cc77ae10 3498 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3499 {
3500 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3501 x = addr;
3502 }
cc77ae10
JM
3503 else
3504 {
8dcfef8f
AO
3505 SET_DECL_VALUE_EXPR (result,
3506 build1 (INDIRECT_REF, TREE_TYPE (result),
3507 all.function_result_decl));
cc77ae10
JM
3508 addr = convert_memory_address (Pmode, addr);
3509 x = gen_rtx_MEM (DECL_MODE (result), addr);
3510 set_mem_attributes (x, result, 1);
3511 }
8dcfef8f
AO
3512
3513 DECL_HAS_VALUE_EXPR_P (result) = 1;
3514
b36a8cc2
OH
3515 SET_DECL_RTL (result, x);
3516 }
3517
53c428d0 3518 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3519 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3520 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3521 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3522
3523 /* Adjust function incoming argument size for alignment and
3524 minimum length. */
3525
2e4ceca5 3526 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
38173d38 3527 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3528 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3529
6f086dfc 3530#ifdef ARGS_GROW_DOWNWARD
38173d38 3531 crtl->args.arg_offset_rtx
477eff96 3532 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3533 : expand_expr (size_diffop (all.stack_args_size.var,
3534 size_int (-all.stack_args_size.constant)),
bbbbb16a 3535 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3536#else
38173d38 3537 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3538#endif
3539
3540 /* See how many bytes, if any, of its args a function should try to pop
3541 on return. */
3542
079e7538
NF
3543 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3544 TREE_TYPE (fndecl),
3545 crtl->args.size);
6f086dfc 3546
3b69d50e
RK
3547 /* For stdarg.h function, save info about
3548 regs and stack space used by the named args. */
6f086dfc 3549
d5cc9181 3550 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3551
3552 /* Set the rtx used for the function return value. Put this in its
3553 own variable so any optimizers that need this information don't have
3554 to include tree.h. Do this here so it gets done when an inlined
3555 function gets output. */
3556
38173d38 3557 crtl->return_rtx
19e7881c
MM
3558 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3559 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3560
3561 /* If scalar return value was computed in a pseudo-reg, or was a named
3562 return value that got dumped to the stack, copy that to the hard
3563 return register. */
3564 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3565 {
3566 tree decl_result = DECL_RESULT (fndecl);
3567 rtx decl_rtl = DECL_RTL (decl_result);
3568
3569 if (REG_P (decl_rtl)
3570 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3571 : DECL_REGISTER (decl_result))
3572 {
3573 rtx real_decl_rtl;
3574
1d636cc6
RG
3575 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3576 fndecl, true);
ce5e43d0 3577 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3578 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3579 holds the hard register containing the return value, not a
3580 temporary pseudo. */
38173d38 3581 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3582 }
3583 }
6f086dfc 3584}
4744afba
RH
3585
3586/* A subroutine of gimplify_parameters, invoked via walk_tree.
3587 For all seen types, gimplify their sizes. */
3588
3589static tree
3590gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3591{
3592 tree t = *tp;
3593
3594 *walk_subtrees = 0;
3595 if (TYPE_P (t))
3596 {
3597 if (POINTER_TYPE_P (t))
3598 *walk_subtrees = 1;
ad50bc8d
RH
3599 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3600 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3601 {
726a989a 3602 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3603 *walk_subtrees = 1;
3604 }
3605 }
3606
3607 return NULL;
3608}
3609
3610/* Gimplify the parameter list for current_function_decl. This involves
3611 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3612 to implement callee-copies reference parameters. Returns a sequence of
3613 statements to add to the beginning of the function. */
4744afba 3614
726a989a 3615gimple_seq
4744afba
RH
3616gimplify_parameters (void)
3617{
3618 struct assign_parm_data_all all;
3b3f318a 3619 tree parm;
726a989a 3620 gimple_seq stmts = NULL;
9771b263 3621 vec<tree> fnargs;
3b3f318a 3622 unsigned i;
4744afba
RH
3623
3624 assign_parms_initialize_all (&all);
3625 fnargs = assign_parms_augmented_arg_list (&all);
3626
9771b263 3627 FOR_EACH_VEC_ELT (fnargs, i, parm)
4744afba
RH
3628 {
3629 struct assign_parm_data_one data;
3630
3631 /* Extract the type of PARM; adjust it according to ABI. */
3632 assign_parm_find_data_types (&all, parm, &data);
3633
3634 /* Early out for errors and void parameters. */
3635 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3636 continue;
3637
3638 /* Update info on where next arg arrives in registers. */
d5cc9181 3639 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3640 data.passed_type, data.named_arg);
4744afba
RH
3641
3642 /* ??? Once upon a time variable_size stuffed parameter list
3643 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3644 turned out to be less than manageable in the gimple world.
3645 Now we have to hunt them down ourselves. */
3646 walk_tree_without_duplicates (&data.passed_type,
3647 gimplify_parm_type, &stmts);
3648
b38f3813 3649 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3650 {
3651 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3652 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3653 }
3654
3655 if (data.passed_pointer)
3656 {
3657 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3658 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3659 type, data.named_arg))
3660 {
3661 tree local, t;
3662
b38f3813 3663 /* For constant-sized objects, this is trivial; for
4744afba 3664 variable-sized objects, we have to play games. */
b38f3813
EB
3665 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3666 && !(flag_stack_check == GENERIC_STACK_CHECK
3667 && compare_tree_int (DECL_SIZE_UNIT (parm),
3668 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3669 {
5dac1dae 3670 local = create_tmp_var (type, get_name (parm));
4744afba 3671 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3672 /* If PARM was addressable, move that flag over
3673 to the local copy, as its address will be taken,
37609bf0
RG
3674 not the PARMs. Keep the parms address taken
3675 as we'll query that flag during gimplification. */
04487a2f 3676 if (TREE_ADDRESSABLE (parm))
37609bf0 3677 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3678 else if (TREE_CODE (type) == COMPLEX_TYPE
3679 || TREE_CODE (type) == VECTOR_TYPE)
3680 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3681 }
3682 else
3683 {
5039610b 3684 tree ptr_type, addr;
4744afba
RH
3685
3686 ptr_type = build_pointer_type (type);
c98b08ff 3687 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3688 DECL_IGNORED_P (addr) = 0;
3689 local = build_fold_indirect_ref (addr);
3690
e79983f4 3691 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3692 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3693 size_int (DECL_ALIGN (parm)));
3694
d3c12306 3695 /* The call has been built for a variable-sized object. */
63d2a353 3696 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3697 t = fold_convert (ptr_type, t);
726a989a 3698 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3699 gimplify_and_add (t, &stmts);
3700 }
3701
726a989a 3702 gimplify_assign (local, parm, &stmts);
4744afba 3703
833b3afe
DB
3704 SET_DECL_VALUE_EXPR (parm, local);
3705 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3706 }
3707 }
3708 }
3709
9771b263 3710 fnargs.release ();
3b3f318a 3711
4744afba
RH
3712 return stmts;
3713}
75dc3319 3714\f
6f086dfc
RS
3715/* Compute the size and offset from the start of the stacked arguments for a
3716 parm passed in mode PASSED_MODE and with type TYPE.
3717
3718 INITIAL_OFFSET_PTR points to the current offset into the stacked
3719 arguments.
3720
e7949876
AM
3721 The starting offset and size for this parm are returned in
3722 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3723 nonzero, the offset is that of stack slot, which is returned in
3724 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3725 padding required from the initial offset ptr to the stack slot.
6f086dfc 3726
cc2902df 3727 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3728 never be set if REG_PARM_STACK_SPACE is not defined.
3729
2e4ceca5
UW
3730 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3731 for arguments which are passed in registers.
3732
6f086dfc
RS
3733 FNDECL is the function in which the argument was defined.
3734
3735 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3736 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3737 argument list to be aligned to the specific boundary (in bits). This
3738 rounding affects the initial and starting offsets, but not the argument
3739 size.
6f086dfc
RS
3740
3741 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3742 optionally rounds the size of the parm to PARM_BOUNDARY. The
3743 initial offset is not affected by this rounding, while the size always
3744 is and the starting offset may be. */
3745
e7949876
AM
3746/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3747 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3748 callers pass in the total size of args so far as
e7949876 3749 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3750
6f086dfc 3751void
fa8db1f7 3752locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
2e4ceca5
UW
3753 int reg_parm_stack_space, int partial,
3754 tree fndecl ATTRIBUTE_UNUSED,
fa8db1f7
AJ
3755 struct args_size *initial_offset_ptr,
3756 struct locate_and_pad_arg_data *locate)
6f086dfc 3757{
e7949876
AM
3758 tree sizetree;
3759 enum direction where_pad;
123148b5 3760 unsigned int boundary, round_boundary;
e7949876 3761 int part_size_in_regs;
6f086dfc 3762
6f086dfc
RS
3763 /* If we have found a stack parm before we reach the end of the
3764 area reserved for registers, skip that area. */
3765 if (! in_regs)
3766 {
6f086dfc
RS
3767 if (reg_parm_stack_space > 0)
3768 {
3769 if (initial_offset_ptr->var)
3770 {
3771 initial_offset_ptr->var
3772 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3773 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3774 initial_offset_ptr->constant = 0;
3775 }
3776 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3777 initial_offset_ptr->constant = reg_parm_stack_space;
3778 }
3779 }
6f086dfc 3780
78a52f11 3781 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3782
3783 sizetree
3784 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3785 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 3786 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
3787 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3788 type);
6e985040 3789 locate->where_pad = where_pad;
2e3f842f
L
3790
3791 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3792 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3793 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3794
bfc45551 3795 locate->boundary = boundary;
6f086dfc 3796
2e3f842f
L
3797 if (SUPPORTS_STACK_ALIGNMENT)
3798 {
3799 /* stack_alignment_estimated can't change after stack has been
3800 realigned. */
3801 if (crtl->stack_alignment_estimated < boundary)
3802 {
3803 if (!crtl->stack_realign_processed)
3804 crtl->stack_alignment_estimated = boundary;
3805 else
3806 {
3807 /* If stack is realigned and stack alignment value
3808 hasn't been finalized, it is OK not to increase
3809 stack_alignment_estimated. The bigger alignment
3810 requirement is recorded in stack_alignment_needed
3811 below. */
3812 gcc_assert (!crtl->stack_realign_finalized
3813 && crtl->stack_realign_needed);
3814 }
3815 }
3816 }
3817
c7e777b5
RH
3818 /* Remember if the outgoing parameter requires extra alignment on the
3819 calling function side. */
cb91fab0
JH
3820 if (crtl->stack_alignment_needed < boundary)
3821 crtl->stack_alignment_needed = boundary;
2e3f842f
L
3822 if (crtl->preferred_stack_boundary < boundary)
3823 crtl->preferred_stack_boundary = boundary;
c7e777b5 3824
6f086dfc 3825#ifdef ARGS_GROW_DOWNWARD
e7949876 3826 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3827 if (initial_offset_ptr->var)
e7949876
AM
3828 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3829 initial_offset_ptr->var);
9dff28ab 3830
e7949876
AM
3831 {
3832 tree s2 = sizetree;
3833 if (where_pad != none
cc269bb6 3834 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 3835 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 3836 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
e7949876
AM
3837 SUB_PARM_SIZE (locate->slot_offset, s2);
3838 }
3839
3840 locate->slot_offset.constant += part_size_in_regs;
9dff28ab 3841
2e4ceca5 3842 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
3843 pad_to_arg_alignment (&locate->slot_offset, boundary,
3844 &locate->alignment_pad);
9dff28ab 3845
e7949876
AM
3846 locate->size.constant = (-initial_offset_ptr->constant
3847 - locate->slot_offset.constant);
6f086dfc 3848 if (initial_offset_ptr->var)
e7949876
AM
3849 locate->size.var = size_binop (MINUS_EXPR,
3850 size_binop (MINUS_EXPR,
3851 ssize_int (0),
3852 initial_offset_ptr->var),
3853 locate->slot_offset.var);
3854
3855 /* Pad_below needs the pre-rounded size to know how much to pad
3856 below. */
3857 locate->offset = locate->slot_offset;
3858 if (where_pad == downward)
3859 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3860
6f086dfc 3861#else /* !ARGS_GROW_DOWNWARD */
2e4ceca5 3862 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
3863 pad_to_arg_alignment (initial_offset_ptr, boundary,
3864 &locate->alignment_pad);
3865 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3866
3867#ifdef PUSH_ROUNDING
3868 if (passed_mode != BLKmode)
3869 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3870#endif
3871
d4b0a7a0
DE
3872 /* Pad_below needs the pre-rounded size to know how much to pad below
3873 so this must be done before rounding up. */
e7949876
AM
3874 locate->offset = locate->slot_offset;
3875 if (where_pad == downward)
3876 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3877
6f086dfc 3878 if (where_pad != none
cc269bb6 3879 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 3880 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 3881 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 3882
e7949876
AM
3883 ADD_PARM_SIZE (locate->size, sizetree);
3884
3885 locate->size.constant -= part_size_in_regs;
6f086dfc 3886#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
3887
3888#ifdef FUNCTION_ARG_OFFSET
3889 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3890#endif
6f086dfc
RS
3891}
3892
e16c591a
RS
3893/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3894 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3895
6f086dfc 3896static void
fa8db1f7
AJ
3897pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3898 struct args_size *alignment_pad)
6f086dfc 3899{
a544cfd2
KG
3900 tree save_var = NULL_TREE;
3901 HOST_WIDE_INT save_constant = 0;
a751cd5b 3902 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3903 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3904
3905#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3906 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3907 the real alignment of %sp. However, when it does this, the
3908 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3909 if (SPARC_STACK_BOUNDARY_HACK)
3910 sp_offset = 0;
3911#endif
4fc026cd 3912
6f6b8f81 3913 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
3914 {
3915 save_var = offset_ptr->var;
3916 save_constant = offset_ptr->constant;
3917 }
3918
3919 alignment_pad->var = NULL_TREE;
3920 alignment_pad->constant = 0;
4fc026cd 3921
6f086dfc
RS
3922 if (boundary > BITS_PER_UNIT)
3923 {
3924 if (offset_ptr->var)
3925 {
a594a19c
GK
3926 tree sp_offset_tree = ssize_int (sp_offset);
3927 tree offset = size_binop (PLUS_EXPR,
3928 ARGS_SIZE_TREE (*offset_ptr),
3929 sp_offset_tree);
6f086dfc 3930#ifdef ARGS_GROW_DOWNWARD
a594a19c 3931 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3932#else
a594a19c 3933 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3934#endif
a594a19c
GK
3935
3936 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3937 /* ARGS_SIZE_TREE includes constant term. */
3938 offset_ptr->constant = 0;
6f6b8f81 3939 if (boundary > PARM_BOUNDARY)
dd3f0101 3940 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3941 save_var);
6f086dfc
RS
3942 }
3943 else
718fe406 3944 {
a594a19c 3945 offset_ptr->constant = -sp_offset +
6f086dfc 3946#ifdef ARGS_GROW_DOWNWARD
a594a19c 3947 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3948#else
a594a19c 3949 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3950#endif
6f6b8f81 3951 if (boundary > PARM_BOUNDARY)
718fe406
KH
3952 alignment_pad->constant = offset_ptr->constant - save_constant;
3953 }
6f086dfc
RS
3954 }
3955}
3956
3957static void
fa8db1f7 3958pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3959{
3960 if (passed_mode != BLKmode)
3961 {
3962 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3963 offset_ptr->constant
3964 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3965 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3966 - GET_MODE_SIZE (passed_mode));
3967 }
3968 else
3969 {
3970 if (TREE_CODE (sizetree) != INTEGER_CST
3971 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3972 {
3973 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3974 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3975 /* Add it in. */
3976 ADD_PARM_SIZE (*offset_ptr, s2);
3977 SUB_PARM_SIZE (*offset_ptr, sizetree);
3978 }
3979 }
3980}
6f086dfc 3981\f
6f086dfc 3982
6fb5fa3c
DB
3983/* True if register REGNO was alive at a place where `setjmp' was
3984 called and was set more than once or is an argument. Such regs may
3985 be clobbered by `longjmp'. */
3986
3987static bool
3988regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3989{
3990 /* There appear to be cases where some local vars never reach the
3991 backend but have bogus regnos. */
3992 if (regno >= max_reg_num ())
3993 return false;
3994
3995 return ((REG_N_SETS (regno) > 1
fefa31b5
DM
3996 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3997 regno))
6fb5fa3c
DB
3998 && REGNO_REG_SET_P (setjmp_crosses, regno));
3999}
4000
4001/* Walk the tree of blocks describing the binding levels within a
4002 function and warn about variables the might be killed by setjmp or
4003 vfork. This is done after calling flow_analysis before register
4004 allocation since that will clobber the pseudo-regs to hard
4005 regs. */
4006
4007static void
4008setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 4009{
b3694847 4010 tree decl, sub;
6de9cd9a 4011
910ad8de 4012 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 4013 {
6de9cd9a 4014 if (TREE_CODE (decl) == VAR_DECL
bc41842b 4015 && DECL_RTL_SET_P (decl)
f8cfc6aa 4016 && REG_P (DECL_RTL (decl))
6fb5fa3c 4017 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4018 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 4019 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 4020 }
6de9cd9a 4021
87caf699 4022 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 4023 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
4024}
4025
6de9cd9a 4026/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
4027 but for arguments instead of local variables. */
4028
6fb5fa3c
DB
4029static void
4030setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4031{
b3694847 4032 tree decl;
6f086dfc 4033 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4034 decl; decl = DECL_CHAIN (decl))
6f086dfc 4035 if (DECL_RTL (decl) != 0
f8cfc6aa 4036 && REG_P (DECL_RTL (decl))
6fb5fa3c 4037 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4038 warning (OPT_Wclobbered,
2b001724 4039 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4040 decl);
6f086dfc
RS
4041}
4042
6fb5fa3c
DB
4043/* Generate warning messages for variables live across setjmp. */
4044
b8698a0f 4045void
6fb5fa3c
DB
4046generate_setjmp_warnings (void)
4047{
4048 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4049
0cae8d31 4050 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
6fb5fa3c
DB
4051 || bitmap_empty_p (setjmp_crosses))
4052 return;
4053
4054 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4055 setjmp_args_warning (setjmp_crosses);
4056}
4057
6f086dfc 4058\f
3373692b 4059/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4060 and return the new head of the chain (old last element).
4061 In addition to that clear BLOCK_SAME_RANGE flags when needed
4062 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4063 its super fragment origin. */
3373692b
JJ
4064
4065static tree
4066block_fragments_nreverse (tree t)
4067{
1e3c1d95
JJ
4068 tree prev = 0, block, next, prev_super = 0;
4069 tree super = BLOCK_SUPERCONTEXT (t);
4070 if (BLOCK_FRAGMENT_ORIGIN (super))
4071 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4072 for (block = t; block; block = next)
4073 {
4074 next = BLOCK_FRAGMENT_CHAIN (block);
4075 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4076 if ((prev && !BLOCK_SAME_RANGE (prev))
4077 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4078 != prev_super))
4079 BLOCK_SAME_RANGE (block) = 0;
4080 prev_super = BLOCK_SUPERCONTEXT (block);
4081 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4082 prev = block;
4083 }
1e3c1d95
JJ
4084 t = BLOCK_FRAGMENT_ORIGIN (t);
4085 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4086 != prev_super)
4087 BLOCK_SAME_RANGE (t) = 0;
4088 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4089 return prev;
4090}
4091
4092/* Reverse the order of elements in the chain T of blocks,
4093 and return the new head of the chain (old last element).
4094 Also do the same on subblocks and reverse the order of elements
4095 in BLOCK_FRAGMENT_CHAIN as well. */
4096
4097static tree
4098blocks_nreverse_all (tree t)
4099{
4100 tree prev = 0, block, next;
4101 for (block = t; block; block = next)
4102 {
4103 next = BLOCK_CHAIN (block);
4104 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4105 if (BLOCK_FRAGMENT_CHAIN (block)
4106 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4107 {
4108 BLOCK_FRAGMENT_CHAIN (block)
4109 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4110 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4111 BLOCK_SAME_RANGE (block) = 0;
4112 }
4113 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4114 prev = block;
4115 }
4116 return prev;
4117}
4118
4119
a20612aa
RH
4120/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4121 and create duplicate blocks. */
4122/* ??? Need an option to either create block fragments or to create
4123 abstract origin duplicates of a source block. It really depends
4124 on what optimization has been performed. */
467456d0 4125
116eebd6 4126void
fa8db1f7 4127reorder_blocks (void)
467456d0 4128{
116eebd6 4129 tree block = DECL_INITIAL (current_function_decl);
467456d0 4130
1a4450c7 4131 if (block == NULL_TREE)
116eebd6 4132 return;
fc289cd1 4133
00f96dc9 4134 auto_vec<tree, 10> block_stack;
18c038b9 4135
a20612aa 4136 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4137 clear_block_marks (block);
a20612aa 4138
116eebd6
MM
4139 /* Prune the old trees away, so that they don't get in the way. */
4140 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4141 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4142
a20612aa 4143 /* Recreate the block tree from the note nesting. */
116eebd6 4144 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4145 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
467456d0
RS
4146}
4147
a20612aa 4148/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4149
6de9cd9a
DN
4150void
4151clear_block_marks (tree block)
cc1fe44f 4152{
a20612aa 4153 while (block)
cc1fe44f 4154 {
a20612aa 4155 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4156 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4157 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4158 }
4159}
4160
0a1c58a2 4161static void
691fe203
DM
4162reorder_blocks_1 (rtx_insn *insns, tree current_block,
4163 vec<tree> *p_block_stack)
0a1c58a2 4164{
691fe203 4165 rtx_insn *insn;
1e3c1d95 4166 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4167
4168 for (insn = insns; insn; insn = NEXT_INSN (insn))
4169 {
4b4bf941 4170 if (NOTE_P (insn))
0a1c58a2 4171 {
a38e7aa5 4172 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4173 {
4174 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4175 tree origin;
4176
3373692b
JJ
4177 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4178 origin = block;
a20612aa 4179
1e3c1d95
JJ
4180 if (prev_end)
4181 BLOCK_SAME_RANGE (prev_end) = 0;
4182 prev_end = NULL_TREE;
4183
a20612aa
RH
4184 /* If we have seen this block before, that means it now
4185 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4186 if (TREE_ASM_WRITTEN (block))
4187 {
a20612aa 4188 tree new_block = copy_node (block);
a20612aa 4189
1e3c1d95 4190 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4191 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4192 BLOCK_FRAGMENT_CHAIN (new_block)
4193 = BLOCK_FRAGMENT_CHAIN (origin);
4194 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4195
4196 NOTE_BLOCK (insn) = new_block;
4197 block = new_block;
0a1c58a2 4198 }
a20612aa 4199
1e3c1d95
JJ
4200 if (prev_beg == current_block && prev_beg)
4201 BLOCK_SAME_RANGE (block) = 1;
4202
4203 prev_beg = origin;
4204
0a1c58a2
JL
4205 BLOCK_SUBBLOCKS (block) = 0;
4206 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4207 /* When there's only one block for the entire function,
4208 current_block == block and we mustn't do this, it
4209 will cause infinite recursion. */
4210 if (block != current_block)
4211 {
1e3c1d95 4212 tree super;
51b7d006 4213 if (block != origin)
1e3c1d95
JJ
4214 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4215 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4216 (origin))
4217 == current_block);
9771b263 4218 if (p_block_stack->is_empty ())
1e3c1d95
JJ
4219 super = current_block;
4220 else
4221 {
9771b263 4222 super = p_block_stack->last ();
1e3c1d95
JJ
4223 gcc_assert (super == current_block
4224 || BLOCK_FRAGMENT_ORIGIN (super)
4225 == current_block);
4226 }
4227 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4228 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4229 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4230 current_block = origin;
339a28b9 4231 }
9771b263 4232 p_block_stack->safe_push (block);
0a1c58a2 4233 }
a38e7aa5 4234 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4235 {
9771b263 4236 NOTE_BLOCK (insn) = p_block_stack->pop ();
0a1c58a2 4237 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4238 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4239 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4240 prev_beg = NULL_TREE;
4241 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4242 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4243 }
4244 }
1e3c1d95
JJ
4245 else
4246 {
4247 prev_beg = NULL_TREE;
4248 if (prev_end)
4249 BLOCK_SAME_RANGE (prev_end) = 0;
4250 prev_end = NULL_TREE;
4251 }
0a1c58a2
JL
4252 }
4253}
4254
467456d0
RS
4255/* Reverse the order of elements in the chain T of blocks,
4256 and return the new head of the chain (old last element). */
4257
6de9cd9a 4258tree
fa8db1f7 4259blocks_nreverse (tree t)
467456d0 4260{
3373692b
JJ
4261 tree prev = 0, block, next;
4262 for (block = t; block; block = next)
467456d0 4263 {
3373692b
JJ
4264 next = BLOCK_CHAIN (block);
4265 BLOCK_CHAIN (block) = prev;
4266 prev = block;
467456d0
RS
4267 }
4268 return prev;
4269}
4270
61e46a7d
NF
4271/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4272 by modifying the last node in chain 1 to point to chain 2. */
4273
4274tree
4275block_chainon (tree op1, tree op2)
4276{
4277 tree t1;
4278
4279 if (!op1)
4280 return op2;
4281 if (!op2)
4282 return op1;
4283
4284 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4285 continue;
4286 BLOCK_CHAIN (t1) = op2;
4287
4288#ifdef ENABLE_TREE_CHECKING
4289 {
4290 tree t2;
4291 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4292 gcc_assert (t2 != t1);
4293 }
4294#endif
4295
4296 return op1;
4297}
4298
18c038b9
MM
4299/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4300 non-NULL, list them all into VECTOR, in a depth-first preorder
4301 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4302 blocks. */
467456d0
RS
4303
4304static int
fa8db1f7 4305all_blocks (tree block, tree *vector)
467456d0 4306{
b2a59b15
MS
4307 int n_blocks = 0;
4308
a84efb51
JO
4309 while (block)
4310 {
4311 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4312
a84efb51
JO
4313 /* Record this block. */
4314 if (vector)
4315 vector[n_blocks] = block;
b2a59b15 4316
a84efb51 4317 ++n_blocks;
718fe406 4318
a84efb51
JO
4319 /* Record the subblocks, and their subblocks... */
4320 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4321 vector ? vector + n_blocks : 0);
4322 block = BLOCK_CHAIN (block);
4323 }
467456d0
RS
4324
4325 return n_blocks;
4326}
18c038b9
MM
4327
4328/* Return a vector containing all the blocks rooted at BLOCK. The
4329 number of elements in the vector is stored in N_BLOCKS_P. The
4330 vector is dynamically allocated; it is the caller's responsibility
4331 to call `free' on the pointer returned. */
718fe406 4332
18c038b9 4333static tree *
fa8db1f7 4334get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4335{
4336 tree *block_vector;
4337
4338 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4339 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4340 all_blocks (block, block_vector);
4341
4342 return block_vector;
4343}
4344
f83b236e 4345static GTY(()) int next_block_index = 2;
18c038b9
MM
4346
4347/* Set BLOCK_NUMBER for all the blocks in FN. */
4348
4349void
fa8db1f7 4350number_blocks (tree fn)
18c038b9
MM
4351{
4352 int i;
4353 int n_blocks;
4354 tree *block_vector;
4355
4356 /* For SDB and XCOFF debugging output, we start numbering the blocks
4357 from 1 within each function, rather than keeping a running
4358 count. */
4359#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4360 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4361 next_block_index = 1;
18c038b9
MM
4362#endif
4363
4364 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4365
4366 /* The top-level BLOCK isn't numbered at all. */
4367 for (i = 1; i < n_blocks; ++i)
4368 /* We number the blocks from two. */
4369 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4370
4371 free (block_vector);
4372
4373 return;
4374}
df8992f8
RH
4375
4376/* If VAR is present in a subblock of BLOCK, return the subblock. */
4377
24e47c76 4378DEBUG_FUNCTION tree
fa8db1f7 4379debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4380{
4381 tree t;
4382
4383 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4384 if (t == var)
4385 return block;
4386
4387 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4388 {
4389 tree ret = debug_find_var_in_block_tree (var, t);
4390 if (ret)
4391 return ret;
4392 }
4393
4394 return NULL_TREE;
4395}
467456d0 4396\f
db2960f4
SL
4397/* Keep track of whether we're in a dummy function context. If we are,
4398 we don't want to invoke the set_current_function hook, because we'll
4399 get into trouble if the hook calls target_reinit () recursively or
4400 when the initial initialization is not yet complete. */
4401
4402static bool in_dummy_function;
4403
ab442df7
MM
4404/* Invoke the target hook when setting cfun. Update the optimization options
4405 if the function uses different options than the default. */
db2960f4
SL
4406
4407static void
4408invoke_set_current_function_hook (tree fndecl)
4409{
4410 if (!in_dummy_function)
ab442df7
MM
4411 {
4412 tree opts = ((fndecl)
4413 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4414 : optimization_default_node);
4415
4416 if (!opts)
4417 opts = optimization_default_node;
4418
4419 /* Change optimization options if needed. */
4420 if (optimization_current_node != opts)
4421 {
4422 optimization_current_node = opts;
46625112 4423 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4424 }
4425
892c4745 4426 targetm.set_current_function (fndecl);
4b1baac8 4427 this_fn_optabs = this_target_optabs;
135204dd 4428
4b1baac8 4429 if (opts != optimization_default_node)
135204dd 4430 {
4b1baac8
RS
4431 init_tree_optimization_optabs (opts);
4432 if (TREE_OPTIMIZATION_OPTABS (opts))
4433 this_fn_optabs = (struct target_optabs *)
4434 TREE_OPTIMIZATION_OPTABS (opts);
135204dd 4435 }
ab442df7 4436 }
db2960f4
SL
4437}
4438
4439/* cfun should never be set directly; use this function. */
4440
4441void
4442set_cfun (struct function *new_cfun)
4443{
4444 if (cfun != new_cfun)
4445 {
4446 cfun = new_cfun;
4447 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4448 }
4449}
4450
db2960f4
SL
4451/* Initialized with NOGC, making this poisonous to the garbage collector. */
4452
9771b263 4453static vec<function_p> cfun_stack;
db2960f4 4454
af16bc76
MJ
4455/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4456 current_function_decl accordingly. */
db2960f4
SL
4457
4458void
4459push_cfun (struct function *new_cfun)
4460{
af16bc76
MJ
4461 gcc_assert ((!cfun && !current_function_decl)
4462 || (cfun && current_function_decl == cfun->decl));
9771b263 4463 cfun_stack.safe_push (cfun);
af16bc76 4464 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4
SL
4465 set_cfun (new_cfun);
4466}
4467
af16bc76 4468/* Pop cfun from the stack. Also set current_function_decl accordingly. */
db2960f4
SL
4469
4470void
4471pop_cfun (void)
4472{
9771b263 4473 struct function *new_cfun = cfun_stack.pop ();
af16bc76
MJ
4474 /* When in_dummy_function, we do have a cfun but current_function_decl is
4475 NULL. We also allow pushing NULL cfun and subsequently changing
4476 current_function_decl to something else and have both restored by
4477 pop_cfun. */
4478 gcc_checking_assert (in_dummy_function
4479 || !cfun
4480 || current_function_decl == cfun->decl);
38d34676 4481 set_cfun (new_cfun);
af16bc76 4482 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4 4483}
3e87758a
RL
4484
4485/* Return value of funcdef and increase it. */
4486int
b8698a0f 4487get_next_funcdef_no (void)
3e87758a
RL
4488{
4489 return funcdef_no++;
4490}
4491
903d1e67
XDL
4492/* Return value of funcdef. */
4493int
4494get_last_funcdef_no (void)
4495{
4496 return funcdef_no;
4497}
4498
3a70d621 4499/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4500 to the defaults. Set cfun to the newly-allocated object.
4501 Some of the helper functions invoked during initialization assume
4502 that cfun has already been set. Therefore, assign the new object
4503 directly into cfun and invoke the back end hook explicitly at the
4504 very end, rather than initializing a temporary and calling set_cfun
4505 on it.
182e0d71
AK
4506
4507 ABSTRACT_P is true if this is a function that will never be seen by
4508 the middle-end. Such functions are front-end concepts (like C++
4509 function templates) that do not correspond directly to functions
4510 placed in object files. */
7a80cf9a 4511
3a70d621 4512void
182e0d71 4513allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4514{
6de9cd9a 4515 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4516
766090c2 4517 cfun = ggc_cleared_alloc<function> ();
b384405b 4518
3a70d621 4519 init_eh_for_function ();
6f086dfc 4520
3a70d621
RH
4521 if (init_machine_status)
4522 cfun->machine = (*init_machine_status) ();
e2ecd91c 4523
7c800926
KT
4524#ifdef OVERRIDE_ABI_FORMAT
4525 OVERRIDE_ABI_FORMAT (fndecl);
4526#endif
4527
81464b2c 4528 if (fndecl != NULL_TREE)
3a70d621 4529 {
db2960f4
SL
4530 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4531 cfun->decl = fndecl;
70cf5bc1 4532 current_function_funcdef_no = get_next_funcdef_no ();
5b9db1bc
MJ
4533 }
4534
4535 invoke_set_current_function_hook (fndecl);
db2960f4 4536
5b9db1bc
MJ
4537 if (fndecl != NULL_TREE)
4538 {
4539 tree result = DECL_RESULT (fndecl);
182e0d71 4540 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4541 {
3a70d621 4542#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4543 cfun->returns_pcc_struct = 1;
3a70d621 4544#endif
e3b5732b 4545 cfun->returns_struct = 1;
db2960f4
SL
4546 }
4547
f38958e8 4548 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4549
db2960f4
SL
4550 /* Assume all registers in stdarg functions need to be saved. */
4551 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4552 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4553
4554 /* ??? This could be set on a per-function basis by the front-end
4555 but is this worth the hassle? */
4556 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
d764963b 4557 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
0b37ba8a
AK
4558
4559 if (!profile_flag && !flag_instrument_function_entry_exit)
4560 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
3a70d621 4561 }
db2960f4
SL
4562}
4563
4564/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4565 instead of just setting it. */
9d30f3c1 4566
db2960f4
SL
4567void
4568push_struct_function (tree fndecl)
4569{
af16bc76
MJ
4570 /* When in_dummy_function we might be in the middle of a pop_cfun and
4571 current_function_decl and cfun may not match. */
4572 gcc_assert (in_dummy_function
4573 || (!cfun && !current_function_decl)
4574 || (cfun && current_function_decl == cfun->decl));
9771b263 4575 cfun_stack.safe_push (cfun);
af16bc76 4576 current_function_decl = fndecl;
182e0d71 4577 allocate_struct_function (fndecl, false);
3a70d621 4578}
6f086dfc 4579
8f4f502f 4580/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4581 appropriate for emitting rtl at the start of a function. */
6f086dfc 4582
3a70d621 4583static void
db2960f4 4584prepare_function_start (void)
3a70d621 4585{
3e029763 4586 gcc_assert (!crtl->emit.x_last_insn);
fb0703f7 4587 init_temp_slots ();
0de456a5 4588 init_emit ();
bd60bab2 4589 init_varasm_status ();
0de456a5 4590 init_expr ();
bf08ebeb 4591 default_rtl_profile ();
6f086dfc 4592
a11e0df4 4593 if (flag_stack_usage_info)
d3c12306 4594 {
766090c2 4595 cfun->su = ggc_cleared_alloc<stack_usage> ();
d3c12306
EB
4596 cfun->su->static_stack_size = -1;
4597 }
4598
3a70d621 4599 cse_not_expected = ! optimize;
6f086dfc 4600
3a70d621
RH
4601 /* Caller save not needed yet. */
4602 caller_save_needed = 0;
6f086dfc 4603
3a70d621
RH
4604 /* We haven't done register allocation yet. */
4605 reg_renumber = 0;
6f086dfc 4606
b384405b
BS
4607 /* Indicate that we have not instantiated virtual registers yet. */
4608 virtuals_instantiated = 0;
4609
1b3d8f8a
GK
4610 /* Indicate that we want CONCATs now. */
4611 generating_concat_p = 1;
4612
b384405b
BS
4613 /* Indicate we have no need of a frame pointer yet. */
4614 frame_pointer_needed = 0;
b384405b
BS
4615}
4616
4617/* Initialize the rtl expansion mechanism so that we can do simple things
4618 like generate sequences. This is used to provide a context during global
db2960f4
SL
4619 initialization of some passes. You must call expand_dummy_function_end
4620 to exit this context. */
4621
b384405b 4622void
fa8db1f7 4623init_dummy_function_start (void)
b384405b 4624{
db2960f4
SL
4625 gcc_assert (!in_dummy_function);
4626 in_dummy_function = true;
4627 push_struct_function (NULL_TREE);
4628 prepare_function_start ();
b384405b
BS
4629}
4630
4631/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4632 and initialize static variables for generating RTL for the statements
4633 of the function. */
4634
4635void
fa8db1f7 4636init_function_start (tree subr)
b384405b 4637{
db2960f4
SL
4638 if (subr && DECL_STRUCT_FUNCTION (subr))
4639 set_cfun (DECL_STRUCT_FUNCTION (subr));
4640 else
182e0d71 4641 allocate_struct_function (subr, false);
b9b5f433
JH
4642
4643 /* Initialize backend, if needed. */
4644 initialize_rtl ();
4645
db2960f4 4646 prepare_function_start ();
2c7eebae 4647 decide_function_section (subr);
b384405b 4648
6f086dfc
RS
4649 /* Warn if this value is an aggregate type,
4650 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4651 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4652 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4653}
5c7675e9 4654
7d69de61
RH
4655/* Expand code to verify the stack_protect_guard. This is invoked at
4656 the end of a function to be protected. */
4657
4658#ifndef HAVE_stack_protect_test
b76be05e
JJ
4659# define HAVE_stack_protect_test 0
4660# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4661#endif
4662
b755446c 4663void
7d69de61
RH
4664stack_protect_epilogue (void)
4665{
4666 tree guard_decl = targetm.stack_protect_guard ();
19f8b229 4667 rtx_code_label *label = gen_label_rtx ();
7d69de61
RH
4668 rtx x, y, tmp;
4669
08d4cc33
RH
4670 x = expand_normal (crtl->stack_protect_guard);
4671 y = expand_normal (guard_decl);
7d69de61
RH
4672
4673 /* Allow the target to compare Y with X without leaking either into
4674 a register. */
fedfecef 4675 switch ((int) (HAVE_stack_protect_test != 0))
7d69de61
RH
4676 {
4677 case 1:
3aebbe5f 4678 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4679 if (tmp)
4680 {
4681 emit_insn (tmp);
7d69de61
RH
4682 break;
4683 }
4684 /* FALLTHRU */
4685
4686 default:
4687 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4688 break;
4689 }
4690
4691 /* The noreturn predictor has been moved to the tree level. The rtl-level
4692 predictors estimate this branch about 20%, which isn't enough to get
4693 things moved out of line. Since this is the only extant case of adding
4694 a noreturn function at the rtl level, it doesn't seem worth doing ought
4695 except adding the prediction by hand. */
4696 tmp = get_last_insn ();
4697 if (JUMP_P (tmp))
9f215bf5 4698 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
7d69de61 4699
b3c144a3
SB
4700 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4701 free_temp_slots ();
7d69de61
RH
4702 emit_label (label);
4703}
4704\f
6f086dfc
RS
4705/* Start the RTL for a new function, and set variables used for
4706 emitting RTL.
4707 SUBR is the FUNCTION_DECL node.
4708 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4709 the function's parameters, which must be run at any return statement. */
4710
4711void
b79c5284 4712expand_function_start (tree subr)
6f086dfc 4713{
6f086dfc
RS
4714 /* Make sure volatile mem refs aren't considered
4715 valid operands of arithmetic insns. */
4716 init_recog_no_volatile ();
4717
e3b5732b 4718 crtl->profile
70f4f91c
WC
4719 = (profile_flag
4720 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4721
e3b5732b 4722 crtl->limit_stack
a157febd
GK
4723 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4724
52a11cbf
RH
4725 /* Make the label for return statements to jump to. Do not special
4726 case machines with special return instructions -- they will be
4727 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4728 return_label = gen_label_rtx ();
6f086dfc
RS
4729
4730 /* Initialize rtx used to return the value. */
4731 /* Do this before assign_parms so that we copy the struct value address
4732 before any library calls that assign parms might generate. */
4733
4734 /* Decide whether to return the value in memory or in a register. */
61f71b34 4735 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4736 {
4737 /* Returning something that won't go in a register. */
b3694847 4738 rtx value_address = 0;
6f086dfc
RS
4739
4740#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4741 if (cfun->returns_pcc_struct)
6f086dfc
RS
4742 {
4743 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4744 value_address = assemble_static_space (size);
4745 }
4746 else
4747#endif
4748 {
2225b57c 4749 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4750 /* Expect to be passed the address of a place to store the value.
4751 If it is passed as an argument, assign_parms will take care of
4752 it. */
61f71b34 4753 if (sv)
6f086dfc
RS
4754 {
4755 value_address = gen_reg_rtx (Pmode);
61f71b34 4756 emit_move_insn (value_address, sv);
6f086dfc
RS
4757 }
4758 }
4759 if (value_address)
ccdecf58 4760 {
01c98570
JM
4761 rtx x = value_address;
4762 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4763 {
4764 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4765 set_mem_attributes (x, DECL_RESULT (subr), 1);
4766 }
abde42f7 4767 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4768 }
6f086dfc
RS
4769 }
4770 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4771 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4772 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4773 else
a53e14c0 4774 {
d5bf1143
RH
4775 /* Compute the return values into a pseudo reg, which we will copy
4776 into the true return register after the cleanups are done. */
bef5d8b6
RS
4777 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4778 if (TYPE_MODE (return_type) != BLKmode
4779 && targetm.calls.return_in_msb (return_type))
4780 /* expand_function_end will insert the appropriate padding in
4781 this case. Use the return value's natural (unpadded) mode
4782 within the function proper. */
4783 SET_DECL_RTL (DECL_RESULT (subr),
4784 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4785 else
0bccc606 4786 {
bef5d8b6
RS
4787 /* In order to figure out what mode to use for the pseudo, we
4788 figure out what the mode of the eventual return register will
4789 actually be, and use that. */
1d636cc6 4790 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4791
4792 /* Structures that are returned in registers are not
4793 aggregate_value_p, so we may see a PARALLEL or a REG. */
4794 if (REG_P (hard_reg))
4795 SET_DECL_RTL (DECL_RESULT (subr),
4796 gen_reg_rtx (GET_MODE (hard_reg)));
4797 else
4798 {
4799 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4800 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4801 }
0bccc606 4802 }
a53e14c0 4803
084a1106
JDA
4804 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4805 result to the real return register(s). */
4806 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4807 }
6f086dfc
RS
4808
4809 /* Initialize rtx for parameters and local variables.
4810 In some cases this requires emitting insns. */
0d1416c6 4811 assign_parms (subr);
6f086dfc 4812
6de9cd9a
DN
4813 /* If function gets a static chain arg, store it. */
4814 if (cfun->static_chain_decl)
4815 {
7e140280 4816 tree parm = cfun->static_chain_decl;
531ca746 4817 rtx local, chain, insn;
7e140280 4818
531ca746
RH
4819 local = gen_reg_rtx (Pmode);
4820 chain = targetm.calls.static_chain (current_function_decl, true);
4821
4822 set_decl_incoming_rtl (parm, chain, false);
7e140280 4823 SET_DECL_RTL (parm, local);
7e140280 4824 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4825
531ca746
RH
4826 insn = emit_move_insn (local, chain);
4827
4828 /* Mark the register as eliminable, similar to parameters. */
4829 if (MEM_P (chain)
4830 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 4831 set_dst_reg_note (insn, REG_EQUIV, chain, local);
3fd48b12
EB
4832
4833 /* If we aren't optimizing, save the static chain onto the stack. */
4834 if (!optimize)
4835 {
4836 tree saved_static_chain_decl
4837 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
4838 DECL_NAME (parm), TREE_TYPE (parm));
4839 rtx saved_static_chain_rtx
4840 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4841 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
4842 emit_move_insn (saved_static_chain_rtx, chain);
4843 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
4844 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4845 }
6de9cd9a
DN
4846 }
4847
4848 /* If the function receives a non-local goto, then store the
4849 bits we need to restore the frame pointer. */
4850 if (cfun->nonlocal_goto_save_area)
4851 {
4852 tree t_save;
4853 rtx r_save;
4854
4846b435 4855 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 4856 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 4857
6bbec3e1
L
4858 t_save = build4 (ARRAY_REF,
4859 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
4860 cfun->nonlocal_goto_save_area,
4861 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4862 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 4863 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 4864
88280cf9 4865 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
4866 update_nonlocal_goto_save_area ();
4867 }
f0c51a1e 4868
6f086dfc
RS
4869 /* The following was moved from init_function_start.
4870 The move is supposed to make sdb output more accurate. */
4871 /* Indicate the beginning of the function body,
4872 as opposed to parm setup. */
2e040219 4873 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4874
ede497cf
SB
4875 gcc_assert (NOTE_P (get_last_insn ()));
4876
6f086dfc
RS
4877 parm_birth_insn = get_last_insn ();
4878
e3b5732b 4879 if (crtl->profile)
f6f315fe 4880 {
f6f315fe 4881#ifdef PROFILE_HOOK
df696a75 4882 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4883#endif
f6f315fe 4884 }
411707f4 4885
6d3cc8f0
EB
4886 /* If we are doing generic stack checking, the probe should go here. */
4887 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 4888 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4889}
4890\f
49ad7cfa
BS
4891/* Undo the effects of init_dummy_function_start. */
4892void
fa8db1f7 4893expand_dummy_function_end (void)
49ad7cfa 4894{
db2960f4
SL
4895 gcc_assert (in_dummy_function);
4896
49ad7cfa
BS
4897 /* End any sequences that failed to be closed due to syntax errors. */
4898 while (in_sequence_p ())
4899 end_sequence ();
4900
4901 /* Outside function body, can't compute type's actual size
4902 until next function's body starts. */
fa51b01b 4903
01d939e8
BS
4904 free_after_parsing (cfun);
4905 free_after_compilation (cfun);
db2960f4
SL
4906 pop_cfun ();
4907 in_dummy_function = false;
49ad7cfa
BS
4908}
4909
c13fde05
RH
4910/* Call DOIT for each hard register used as a return value from
4911 the current function. */
bd695e1e
RH
4912
4913void
fa8db1f7 4914diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4915{
38173d38 4916 rtx outgoing = crtl->return_rtx;
c13fde05
RH
4917
4918 if (! outgoing)
4919 return;
bd695e1e 4920
f8cfc6aa 4921 if (REG_P (outgoing))
c13fde05
RH
4922 (*doit) (outgoing, arg);
4923 else if (GET_CODE (outgoing) == PARALLEL)
4924 {
4925 int i;
bd695e1e 4926
c13fde05
RH
4927 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4928 {
4929 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4930
f8cfc6aa 4931 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4932 (*doit) (x, arg);
bd695e1e
RH
4933 }
4934 }
4935}
4936
c13fde05 4937static void
fa8db1f7 4938do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4939{
c41c1387 4940 emit_clobber (reg);
c13fde05
RH
4941}
4942
4943void
fa8db1f7 4944clobber_return_register (void)
c13fde05
RH
4945{
4946 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4947
4948 /* In case we do use pseudo to return value, clobber it too. */
4949 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4950 {
4951 tree decl_result = DECL_RESULT (current_function_decl);
4952 rtx decl_rtl = DECL_RTL (decl_result);
4953 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4954 {
4955 do_clobber_return_reg (decl_rtl, NULL);
4956 }
4957 }
c13fde05
RH
4958}
4959
4960static void
fa8db1f7 4961do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4962{
c41c1387 4963 emit_use (reg);
c13fde05
RH
4964}
4965
0bf8477d 4966static void
fa8db1f7 4967use_return_register (void)
c13fde05
RH
4968{
4969 diddle_return_value (do_use_return_reg, NULL);
4970}
4971
902edd36
JH
4972/* Possibly warn about unused parameters. */
4973void
4974do_warn_unused_parameter (tree fn)
4975{
4976 tree decl;
4977
4978 for (decl = DECL_ARGUMENTS (fn);
910ad8de 4979 decl; decl = DECL_CHAIN (decl))
902edd36 4980 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
4981 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4982 && !TREE_NO_WARNING (decl))
b9b8dde3 4983 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4984}
4985
862d0b35
DN
4986/* Set the location of the insn chain starting at INSN to LOC. */
4987
4988static void
dc01c3d1 4989set_insn_locations (rtx_insn *insn, int loc)
862d0b35 4990{
dc01c3d1 4991 while (insn != NULL)
862d0b35
DN
4992 {
4993 if (INSN_P (insn))
4994 INSN_LOCATION (insn) = loc;
4995 insn = NEXT_INSN (insn);
4996 }
4997}
4998
71c0e7fc 4999/* Generate RTL for the end of the current function. */
6f086dfc
RS
5000
5001void
fa8db1f7 5002expand_function_end (void)
6f086dfc 5003{
932f0847 5004 rtx clobber_after;
6f086dfc 5005
964be02f
RH
5006 /* If arg_pointer_save_area was referenced only from a nested
5007 function, we will not have initialized it yet. Do that now. */
e3b5732b 5008 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 5009 get_arg_pointer_save_area ();
964be02f 5010
b38f3813 5011 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
5012 do a stack probe at the start of the function to ensure we have enough
5013 space for another stack frame. */
b38f3813 5014 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66 5015 {
691fe203 5016 rtx_insn *insn, *seq;
11044f66
RK
5017
5018 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 5019 if (CALL_P (insn))
11044f66 5020 {
c35af30f 5021 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 5022 start_sequence ();
c35af30f
EB
5023 if (STACK_CHECK_MOVING_SP)
5024 anti_adjust_stack_and_probe (max_frame_size, true);
5025 else
5026 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
5027 seq = get_insns ();
5028 end_sequence ();
5368224f 5029 set_insn_locations (seq, prologue_location);
ede497cf 5030 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
5031 break;
5032 }
5033 }
5034
6f086dfc
RS
5035 /* End any sequences that failed to be closed due to syntax errors. */
5036 while (in_sequence_p ())
5f4f0e22 5037 end_sequence ();
6f086dfc 5038
6f086dfc
RS
5039 clear_pending_stack_adjust ();
5040 do_pending_stack_adjust ();
5041
6f086dfc
RS
5042 /* Output a linenumber for the end of the function.
5043 SDB depends on this. */
5368224f 5044 set_curr_insn_location (input_location);
6f086dfc 5045
fbffc70a 5046 /* Before the return label (if any), clobber the return
a1f300c0 5047 registers so that they are not propagated live to the rest of
fbffc70a
GK
5048 the function. This can only happen with functions that drop
5049 through; if there had been a return statement, there would
932f0847
JH
5050 have either been a return rtx, or a jump to the return label.
5051
5052 We delay actual code generation after the current_function_value_rtx
5053 is computed. */
5054 clobber_after = get_last_insn ();
fbffc70a 5055
526c334b
KH
5056 /* Output the label for the actual return from the function. */
5057 emit_label (return_label);
6f086dfc 5058
677f3fa8 5059 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5060 {
5061 /* Let except.c know where it should emit the call to unregister
5062 the function context for sjlj exceptions. */
5063 if (flag_exceptions)
5064 sjlj_emit_function_exit_after (get_last_insn ());
5065 }
6fb5fa3c
DB
5066 else
5067 {
5068 /* We want to ensure that instructions that may trap are not
5069 moved into the epilogue by scheduling, because we don't
5070 always emit unwind information for the epilogue. */
8f4f502f 5071 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5072 emit_insn (gen_blockage ());
5073 }
0b59e81e 5074
652b0932
RH
5075 /* If this is an implementation of throw, do what's necessary to
5076 communicate between __builtin_eh_return and the epilogue. */
5077 expand_eh_return ();
5078
3e4eac3f
RH
5079 /* If scalar return value was computed in a pseudo-reg, or was a named
5080 return value that got dumped to the stack, copy that to the hard
5081 return register. */
19e7881c 5082 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5083 {
3e4eac3f
RH
5084 tree decl_result = DECL_RESULT (current_function_decl);
5085 rtx decl_rtl = DECL_RTL (decl_result);
5086
5087 if (REG_P (decl_rtl)
5088 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5089 : DECL_REGISTER (decl_result))
5090 {
38173d38 5091 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5092
ce5e43d0 5093 /* This should be set in assign_parms. */
0bccc606 5094 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5095
5096 /* If this is a BLKmode structure being returned in registers,
5097 then use the mode computed in expand_return. Note that if
797a6ac1 5098 decl_rtl is memory, then its mode may have been changed,
38173d38 5099 but that crtl->return_rtx has not. */
3e4eac3f 5100 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5101 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5102
bef5d8b6
RS
5103 /* If a non-BLKmode return value should be padded at the least
5104 significant end of the register, shift it left by the appropriate
5105 amount. BLKmode results are handled using the group load/store
5106 machinery. */
5107 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
66de4d7c 5108 && REG_P (real_decl_rtl)
bef5d8b6
RS
5109 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5110 {
5111 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5112 REGNO (real_decl_rtl)),
5113 decl_rtl);
5114 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5115 }
3e4eac3f 5116 /* If a named return value dumped decl_return to memory, then
797a6ac1 5117 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5118 extension. */
bef5d8b6 5119 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5120 {
8df83eae 5121 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5122 promote_function_mode (TREE_TYPE (decl_result),
5123 GET_MODE (decl_rtl), &unsignedp,
5124 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5125
5126 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5127 }
aa570f54 5128 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5129 {
5130 /* If expand_function_start has created a PARALLEL for decl_rtl,
5131 move the result to the real return registers. Otherwise, do
5132 a group load from decl_rtl for a named return. */
5133 if (GET_CODE (decl_rtl) == PARALLEL)
5134 emit_group_move (real_decl_rtl, decl_rtl);
5135 else
5136 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5137 TREE_TYPE (decl_result),
084a1106
JDA
5138 int_size_in_bytes (TREE_TYPE (decl_result)));
5139 }
652b0932
RH
5140 /* In the case of complex integer modes smaller than a word, we'll
5141 need to generate some non-trivial bitfield insertions. Do that
5142 on a pseudo and not the hard register. */
5143 else if (GET_CODE (decl_rtl) == CONCAT
5144 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5145 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5146 {
5147 int old_generating_concat_p;
5148 rtx tmp;
5149
5150 old_generating_concat_p = generating_concat_p;
5151 generating_concat_p = 0;
5152 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5153 generating_concat_p = old_generating_concat_p;
5154
5155 emit_move_insn (tmp, decl_rtl);
5156 emit_move_insn (real_decl_rtl, tmp);
5157 }
3e4eac3f
RH
5158 else
5159 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5160 }
6f086dfc
RS
5161 }
5162
5163 /* If returning a structure, arrange to return the address of the value
5164 in a place where debuggers expect to find it.
5165
5166 If returning a structure PCC style,
5167 the caller also depends on this value.
e3b5732b
JH
5168 And cfun->returns_pcc_struct is not necessarily set. */
5169 if (cfun->returns_struct
5170 || cfun->returns_pcc_struct)
6f086dfc 5171 {
cc77ae10 5172 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5173 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5174 rtx outgoing;
5175
5176 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5177 type = TREE_TYPE (type);
5178 else
5179 value_address = XEXP (value_address, 0);
5180
1d636cc6
RG
5181 outgoing = targetm.calls.function_value (build_pointer_type (type),
5182 current_function_decl, true);
6f086dfc
RS
5183
5184 /* Mark this as a function return value so integrate will delete the
5185 assignment and USE below when inlining this function. */
5186 REG_FUNCTION_VALUE_P (outgoing) = 1;
5187
d1608933 5188 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5189 value_address = convert_memory_address (GET_MODE (outgoing),
5190 value_address);
d1608933 5191
6f086dfc 5192 emit_move_insn (outgoing, value_address);
d1608933
RK
5193
5194 /* Show return register used to hold result (in this case the address
5195 of the result. */
38173d38 5196 crtl->return_rtx = outgoing;
6f086dfc
RS
5197 }
5198
79c7fda6
JJ
5199 /* Emit the actual code to clobber return register. Don't emit
5200 it if clobber_after is a barrier, then the previous basic block
5201 certainly doesn't fall thru into the exit block. */
5202 if (!BARRIER_P (clobber_after))
5203 {
5204 rtx seq;
797a6ac1 5205
79c7fda6
JJ
5206 start_sequence ();
5207 clobber_return_register ();
5208 seq = get_insns ();
5209 end_sequence ();
932f0847 5210
79c7fda6
JJ
5211 emit_insn_after (seq, clobber_after);
5212 }
932f0847 5213
609c3937 5214 /* Output the label for the naked return from the function. */
4c33221c
UW
5215 if (naked_return_label)
5216 emit_label (naked_return_label);
6e3077c6 5217
25108646
AH
5218 /* @@@ This is a kludge. We want to ensure that instructions that
5219 may trap are not moved into the epilogue by scheduling, because
56d17681 5220 we don't always emit unwind information for the epilogue. */
f0a0390e 5221 if (cfun->can_throw_non_call_exceptions
677f3fa8 5222 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5223 emit_insn (gen_blockage ());
25108646 5224
7d69de61 5225 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5226 if (crtl->stack_protect_guard)
7d69de61
RH
5227 stack_protect_epilogue ();
5228
40184445
BS
5229 /* If we had calls to alloca, and this machine needs
5230 an accurate stack pointer to exit the function,
5231 insert some code to save and restore the stack pointer. */
5232 if (! EXIT_IGNORE_STACK
e3b5732b 5233 && cfun->calls_alloca)
40184445 5234 {
9eac0f2a 5235 rtx tem = 0, seq;
40184445 5236
9eac0f2a
RH
5237 start_sequence ();
5238 emit_stack_save (SAVE_FUNCTION, &tem);
5239 seq = get_insns ();
5240 end_sequence ();
5241 emit_insn_before (seq, parm_birth_insn);
5242
5243 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5244 }
5245
c13fde05
RH
5246 /* ??? This should no longer be necessary since stupid is no longer with
5247 us, but there are some parts of the compiler (eg reload_combine, and
5248 sh mach_dep_reorg) that still try and compute their own lifetime info
5249 instead of using the general framework. */
5250 use_return_register ();
6f086dfc 5251}
278ed218
RH
5252
5253rtx
bd60bab2 5254get_arg_pointer_save_area (void)
278ed218 5255{
bd60bab2 5256 rtx ret = arg_pointer_save_area;
278ed218
RH
5257
5258 if (! ret)
5259 {
bd60bab2
JH
5260 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5261 arg_pointer_save_area = ret;
964be02f
RH
5262 }
5263
e3b5732b 5264 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5265 {
5266 rtx seq;
278ed218 5267
797a6ac1 5268 /* Save the arg pointer at the beginning of the function. The
964be02f 5269 generated stack slot may not be a valid memory address, so we
278ed218
RH
5270 have to check it and fix it if necessary. */
5271 start_sequence ();
1a8cb155 5272 emit_move_insn (validize_mem (copy_rtx (ret)),
2e3f842f 5273 crtl->args.internal_arg_pointer);
2f937369 5274 seq = get_insns ();
278ed218
RH
5275 end_sequence ();
5276
964be02f 5277 push_topmost_sequence ();
1cb2fc7b 5278 emit_insn_after (seq, entry_of_function ());
964be02f 5279 pop_topmost_sequence ();
c1d9a70a
ILT
5280
5281 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5282 }
5283
5284 return ret;
5285}
bdac5f58 5286\f
cd9c1ca8
RH
5287/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5288 for the first time. */
bdac5f58 5289
0a1c58a2 5290static void
dc01c3d1 5291record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
bdac5f58 5292{
dc01c3d1 5293 rtx_insn *tmp;
cd9c1ca8 5294 htab_t hash = *hashp;
0a1c58a2 5295
cd9c1ca8
RH
5296 if (hash == NULL)
5297 *hashp = hash
5298 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5299
5300 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5301 {
5302 void **slot = htab_find_slot (hash, tmp, INSERT);
5303 gcc_assert (*slot == NULL);
5304 *slot = tmp;
5305 }
5306}
5307
cd400280
RH
5308/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5309 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5310 insn, then record COPY as well. */
cd9c1ca8
RH
5311
5312void
cd400280 5313maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5314{
cd400280 5315 htab_t hash;
cd9c1ca8
RH
5316 void **slot;
5317
cd400280
RH
5318 hash = epilogue_insn_hash;
5319 if (!hash || !htab_find (hash, insn))
5320 {
5321 hash = prologue_insn_hash;
5322 if (!hash || !htab_find (hash, insn))
5323 return;
5324 }
cd9c1ca8 5325
cd400280 5326 slot = htab_find_slot (hash, copy, INSERT);
cd9c1ca8
RH
5327 gcc_assert (*slot == NULL);
5328 *slot = copy;
bdac5f58
TW
5329}
5330
cd9c1ca8
RH
5331/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5332 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5333
cd9c1ca8
RH
5334static bool
5335contains (const_rtx insn, htab_t hash)
bdac5f58 5336{
cd9c1ca8
RH
5337 if (hash == NULL)
5338 return false;
bdac5f58 5339
cd9c1ca8 5340 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5341 {
e0944870 5342 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
cd9c1ca8 5343 int i;
e0944870
DM
5344 for (i = seq->len () - 1; i >= 0; i--)
5345 if (htab_find (hash, seq->element (i)))
cd9c1ca8
RH
5346 return true;
5347 return false;
bdac5f58 5348 }
cd9c1ca8
RH
5349
5350 return htab_find (hash, insn) != NULL;
bdac5f58 5351}
5c7675e9
RH
5352
5353int
4f588890 5354prologue_epilogue_contains (const_rtx insn)
5c7675e9 5355{
cd9c1ca8 5356 if (contains (insn, prologue_insn_hash))
5c7675e9 5357 return 1;
cd9c1ca8 5358 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5359 return 1;
5360 return 0;
5361}
bdac5f58 5362
170d8157 5363#ifdef HAVE_return
4c029f40
TV
5364/* Insert use of return register before the end of BB. */
5365
5366static void
5367emit_use_return_register_into_block (basic_block bb)
5368{
1e1b18c1 5369 rtx seq, insn;
4c029f40
TV
5370 start_sequence ();
5371 use_return_register ();
5372 seq = get_insns ();
5373 end_sequence ();
1e1b18c1
EB
5374 insn = BB_END (bb);
5375#ifdef HAVE_cc0
5376 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5377 insn = prev_cc0_setter (insn);
5378#endif
5379 emit_insn_before (seq, insn);
4c029f40
TV
5380}
5381
484db665
BS
5382
5383/* Create a return pattern, either simple_return or return, depending on
5384 simple_p. */
5385
5386static rtx
5387gen_return_pattern (bool simple_p)
5388{
5389#ifdef HAVE_simple_return
5390 return simple_p ? gen_simple_return () : gen_return ();
5391#else
5392 gcc_assert (!simple_p);
5393 return gen_return ();
5394#endif
5395}
5396
5397/* Insert an appropriate return pattern at the end of block BB. This
5398 also means updating block_for_insn appropriately. SIMPLE_P is
5399 the same as in gen_return_pattern and passed to it. */
69732dcb 5400
f30e25a3 5401void
484db665 5402emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5403{
484db665
BS
5404 rtx jump, pat;
5405 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5406 pat = PATTERN (jump);
26898771
BS
5407 if (GET_CODE (pat) == PARALLEL)
5408 pat = XVECEXP (pat, 0, 0);
5409 gcc_assert (ANY_RETURN_P (pat));
5410 JUMP_LABEL (jump) = pat;
69732dcb 5411}
484db665 5412#endif
69732dcb 5413
387748de
AM
5414/* Set JUMP_LABEL for a return insn. */
5415
5416void
5417set_return_jump_label (rtx returnjump)
5418{
5419 rtx pat = PATTERN (returnjump);
5420 if (GET_CODE (pat) == PARALLEL)
5421 pat = XVECEXP (pat, 0, 0);
5422 if (ANY_RETURN_P (pat))
5423 JUMP_LABEL (returnjump) = pat;
5424 else
5425 JUMP_LABEL (returnjump) = ret_rtx;
5426}
5427
ffe14686
AM
5428#if defined (HAVE_return) || defined (HAVE_simple_return)
5429/* Return true if there are any active insns between HEAD and TAIL. */
f30e25a3 5430bool
ffd80b43 5431active_insn_between (rtx_insn *head, rtx_insn *tail)
39d52ae5 5432{
ffe14686
AM
5433 while (tail)
5434 {
5435 if (active_insn_p (tail))
5436 return true;
5437 if (tail == head)
5438 return false;
5439 tail = PREV_INSN (tail);
5440 }
5441 return false;
5442}
5443
5444/* LAST_BB is a block that exits, and empty of active instructions.
5445 Examine its predecessors for jumps that can be converted to
5446 (conditional) returns. */
f30e25a3 5447vec<edge>
ffe14686 5448convert_jumps_to_returns (basic_block last_bb, bool simple_p,
9771b263 5449 vec<edge> unconverted ATTRIBUTE_UNUSED)
ffe14686
AM
5450{
5451 int i;
5452 basic_block bb;
39d52ae5 5453 rtx label;
ffe14686
AM
5454 edge_iterator ei;
5455 edge e;
ef062b13 5456 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
39d52ae5 5457
ffe14686 5458 FOR_EACH_EDGE (e, ei, last_bb->preds)
fefa31b5 5459 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
9771b263 5460 src_bbs.quick_push (e->src);
ffe14686
AM
5461
5462 label = BB_HEAD (last_bb);
5463
9771b263 5464 FOR_EACH_VEC_ELT (src_bbs, i, bb)
39d52ae5 5465 {
68a1a6c0 5466 rtx_insn *jump = BB_END (bb);
ffe14686
AM
5467
5468 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5469 continue;
5470
5471 e = find_edge (bb, last_bb);
5472
5473 /* If we have an unconditional jump, we can replace that
5474 with a simple return instruction. */
5475 if (simplejump_p (jump))
5476 {
5477 /* The use of the return register might be present in the exit
5478 fallthru block. Either:
5479 - removing the use is safe, and we should remove the use in
5480 the exit fallthru block, or
5481 - removing the use is not safe, and we should add it here.
5482 For now, we conservatively choose the latter. Either of the
5483 2 helps in crossjumping. */
5484 emit_use_return_register_into_block (bb);
5485
5486 emit_return_into_block (simple_p, bb);
5487 delete_insn (jump);
5488 }
5489
5490 /* If we have a conditional jump branching to the last
5491 block, we can try to replace that with a conditional
5492 return instruction. */
5493 else if (condjump_p (jump))
5494 {
5495 rtx dest;
5496
5497 if (simple_p)
5498 dest = simple_return_rtx;
5499 else
5500 dest = ret_rtx;
5501 if (!redirect_jump (jump, dest, 0))
5502 {
5503#ifdef HAVE_simple_return
5504 if (simple_p)
5505 {
5506 if (dump_file)
5507 fprintf (dump_file,
5508 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5509 unconverted.safe_push (e);
ffe14686
AM
5510 }
5511#endif
5512 continue;
5513 }
5514
5515 /* See comment in simplejump_p case above. */
5516 emit_use_return_register_into_block (bb);
5517
5518 /* If this block has only one successor, it both jumps
5519 and falls through to the fallthru block, so we can't
5520 delete the edge. */
5521 if (single_succ_p (bb))
5522 continue;
5523 }
5524 else
5525 {
5526#ifdef HAVE_simple_return
5527 if (simple_p)
5528 {
5529 if (dump_file)
5530 fprintf (dump_file,
5531 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5532 unconverted.safe_push (e);
ffe14686
AM
5533 }
5534#endif
5535 continue;
5536 }
5537
5538 /* Fix up the CFG for the successful change we just made. */
fefa31b5 5539 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
d3b623c7 5540 e->flags &= ~EDGE_CROSSING;
39d52ae5 5541 }
9771b263 5542 src_bbs.release ();
ffe14686 5543 return unconverted;
39d52ae5
BS
5544}
5545
ffe14686 5546/* Emit a return insn for the exit fallthru block. */
f30e25a3 5547basic_block
ffe14686
AM
5548emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5549{
5550 basic_block last_bb = exit_fallthru_edge->src;
5551
5552 if (JUMP_P (BB_END (last_bb)))
5553 {
5554 last_bb = split_edge (exit_fallthru_edge);
5555 exit_fallthru_edge = single_succ_edge (last_bb);
5556 }
5557 emit_barrier_after (BB_END (last_bb));
5558 emit_return_into_block (simple_p, last_bb);
5559 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5560 return last_bb;
5561}
5562#endif
5563
5564
9faa82d8 5565/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5566 this into place with notes indicating where the prologue ends and where
484db665
BS
5567 the epilogue begins. Update the basic block information when possible.
5568
5569 Notes on epilogue placement:
5570 There are several kinds of edges to the exit block:
5571 * a single fallthru edge from LAST_BB
5572 * possibly, edges from blocks containing sibcalls
5573 * possibly, fake edges from infinite loops
5574
5575 The epilogue is always emitted on the fallthru edge from the last basic
5576 block in the function, LAST_BB, into the exit block.
5577
5578 If LAST_BB is empty except for a label, it is the target of every
5579 other basic block in the function that ends in a return. If a
5580 target has a return or simple_return pattern (possibly with
5581 conditional variants), these basic blocks can be changed so that a
5582 return insn is emitted into them, and their target is adjusted to
5583 the real exit block.
5584
5585 Notes on shrink wrapping: We implement a fairly conservative
5586 version of shrink-wrapping rather than the textbook one. We only
5587 generate a single prologue and a single epilogue. This is
5588 sufficient to catch a number of interesting cases involving early
5589 exits.
5590
5591 First, we identify the blocks that require the prologue to occur before
5592 them. These are the ones that modify a call-saved register, or reference
5593 any of the stack or frame pointer registers. To simplify things, we then
5594 mark everything reachable from these blocks as also requiring a prologue.
5595 This takes care of loops automatically, and avoids the need to examine
5596 whether MEMs reference the frame, since it is sufficient to check for
5597 occurrences of the stack or frame pointer.
5598
5599 We then compute the set of blocks for which the need for a prologue
5600 is anticipatable (borrowing terminology from the shrink-wrapping
5601 description in Muchnick's book). These are the blocks which either
5602 require a prologue themselves, or those that have only successors
5603 where the prologue is anticipatable. The prologue needs to be
5604 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5605 is not. For the moment, we ensure that only one such edge exists.
5606
5607 The epilogue is placed as described above, but we make a
5608 distinction between inserting return and simple_return patterns
5609 when modifying other blocks that end in a return. Blocks that end
5610 in a sibcall omit the sibcall_epilogue if the block is not in
5611 ANTIC. */
bdac5f58 5612
6fb5fa3c
DB
5613static void
5614thread_prologue_and_epilogue_insns (void)
bdac5f58 5615{
7458026b 5616 bool inserted;
484db665 5617#ifdef HAVE_simple_return
6e1aa848 5618 vec<edge> unconverted_simple_returns = vNULL;
ffe14686 5619 bitmap_head bb_flags;
484db665 5620#endif
9c8348cf 5621 rtx_insn *returnjump;
9c8348cf 5622 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
dc01c3d1 5623 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
484db665 5624 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5625 edge_iterator ei;
484db665
BS
5626
5627 df_analyze ();
e881bb1b 5628
fefa31b5 5629 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7458026b
ILT
5630
5631 inserted = false;
9c8348cf
DM
5632 epilogue_end = NULL;
5633 returnjump = NULL;
7458026b
ILT
5634
5635 /* Can't deal with multiple successors of the entry block at the
5636 moment. Function should always have at least one entry
5637 point. */
fefa31b5
DM
5638 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5639 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
484db665
BS
5640 orig_entry_edge = entry_edge;
5641
dc01c3d1 5642 split_prologue_seq = NULL;
7458026b
ILT
5643 if (flag_split_stack
5644 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5645 == NULL))
5646 {
5647#ifndef HAVE_split_stack_prologue
5648 gcc_unreachable ();
5649#else
5650 gcc_assert (HAVE_split_stack_prologue);
5651
5652 start_sequence ();
5653 emit_insn (gen_split_stack_prologue ());
484db665 5654 split_prologue_seq = get_insns ();
7458026b
ILT
5655 end_sequence ();
5656
484db665 5657 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5368224f 5658 set_insn_locations (split_prologue_seq, prologue_location);
7458026b
ILT
5659#endif
5660 }
5661
dc01c3d1 5662 prologue_seq = NULL;
bdac5f58
TW
5663#ifdef HAVE_prologue
5664 if (HAVE_prologue)
5665 {
e881bb1b 5666 start_sequence ();
dc01c3d1 5667 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
e881bb1b 5668 emit_insn (seq);
bdac5f58 5669
b8698a0f 5670 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5671 if the profiling is on and the frame pointer is required. */
e3b5732b 5672 if (crtl->profile && frame_pointer_needed)
c41c1387 5673 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5674
bdac5f58 5675 /* Retain a map of the prologue insns. */
cd9c1ca8 5676 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5677 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5678
56d17681
UB
5679 /* Ensure that instructions are not moved into the prologue when
5680 profiling is on. The call to the profiling routine can be
5681 emitted within the live range of a call-clobbered register. */
3c5273a9 5682 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5683 emit_insn (gen_blockage ());
9185a8d5 5684
484db665 5685 prologue_seq = get_insns ();
e881bb1b 5686 end_sequence ();
5368224f 5687 set_insn_locations (prologue_seq, prologue_location);
484db665
BS
5688 }
5689#endif
e881bb1b 5690
ffe14686 5691#ifdef HAVE_simple_return
484db665
BS
5692 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5693
484db665
BS
5694 /* Try to perform a kind of shrink-wrapping, making sure the
5695 prologue/epilogue is emitted only around those parts of the
5696 function that require it. */
5697
f30e25a3 5698 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
bdac5f58 5699#endif
bdac5f58 5700
484db665
BS
5701 if (split_prologue_seq != NULL_RTX)
5702 {
f4b31a33 5703 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
5704 inserted = true;
5705 }
5706 if (prologue_seq != NULL_RTX)
5707 {
5708 insert_insn_on_edge (prologue_seq, entry_edge);
5709 inserted = true;
5710 }
5711
19d3c25c
RH
5712 /* If the exit block has no non-fake predecessors, we don't need
5713 an epilogue. */
fefa31b5 5714 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
19d3c25c
RH
5715 if ((e->flags & EDGE_FAKE) == 0)
5716 break;
5717 if (e == NULL)
5718 goto epilogue_done;
5719
fefa31b5 5720 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
484db665 5721
fefa31b5 5722 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
ffe14686 5723
ffe14686
AM
5724#ifdef HAVE_simple_return
5725 if (entry_edge != orig_entry_edge)
f30e25a3
ZC
5726 exit_fallthru_edge
5727 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5728 &unconverted_simple_returns,
5729 &returnjump);
484db665 5730#endif
ffe14686
AM
5731#ifdef HAVE_return
5732 if (HAVE_return)
5733 {
5734 if (exit_fallthru_edge == NULL)
5735 goto epilogue_done;
69732dcb 5736
ffe14686
AM
5737 if (optimize)
5738 {
5739 basic_block last_bb = exit_fallthru_edge->src;
484db665 5740
ffe14686
AM
5741 if (LABEL_P (BB_HEAD (last_bb))
5742 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6e1aa848 5743 convert_jumps_to_returns (last_bb, false, vNULL);
ffe14686 5744
1ff2fd21
AM
5745 if (EDGE_COUNT (last_bb->preds) != 0
5746 && single_succ_p (last_bb))
484db665 5747 {
ffe14686
AM
5748 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5749 epilogue_end = returnjump = BB_END (last_bb);
484db665 5750#ifdef HAVE_simple_return
ffe14686
AM
5751 /* Emitting the return may add a basic block.
5752 Fix bb_flags for the added block. */
5753 if (last_bb != exit_fallthru_edge->src)
5754 bitmap_set_bit (&bb_flags, last_bb->index);
484db665 5755#endif
ffe14686 5756 goto epilogue_done;
69732dcb 5757 }
2dd8bc01 5758 }
69732dcb
RH
5759 }
5760#endif
cd9c1ca8
RH
5761
5762 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5763 this marker for the splits of EH_RETURN patterns, and nothing else
5764 uses the flag in the meantime. */
5765 epilogue_completed = 1;
5766
5767#ifdef HAVE_eh_return
5768 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5769 some targets, these get split to a special version of the epilogue
5770 code. In order to be able to properly annotate these with unwind
5771 info, try to split them now. If we get a valid split, drop an
5772 EPILOGUE_BEG note and mark the insns as epilogue insns. */
fefa31b5 5773 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
cd9c1ca8 5774 {
691fe203 5775 rtx_insn *prev, *last, *trial;
cd9c1ca8
RH
5776
5777 if (e->flags & EDGE_FALLTHRU)
5778 continue;
5779 last = BB_END (e->src);
5780 if (!eh_returnjump_p (last))
5781 continue;
5782
5783 prev = PREV_INSN (last);
5784 trial = try_split (PATTERN (last), last, 1);
5785 if (trial == last)
5786 continue;
5787
5788 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5789 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5790 }
5791#endif
5792
484db665
BS
5793 /* If nothing falls through into the exit block, we don't need an
5794 epilogue. */
623a66fa 5795
484db665 5796 if (exit_fallthru_edge == NULL)
623a66fa
R
5797 goto epilogue_done;
5798
bdac5f58
TW
5799#ifdef HAVE_epilogue
5800 if (HAVE_epilogue)
5801 {
19d3c25c 5802 start_sequence ();
2e040219 5803 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
dc01c3d1 5804 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
55c623b5
UW
5805 if (seq)
5806 emit_jump_insn (seq);
bdac5f58 5807
19d3c25c 5808 /* Retain a map of the epilogue insns. */
cd9c1ca8 5809 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 5810 set_insn_locations (seq, epilogue_location);
bdac5f58 5811
2f937369 5812 seq = get_insns ();
484db665 5813 returnjump = get_last_insn ();
718fe406 5814 end_sequence ();
e881bb1b 5815
484db665 5816 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 5817 inserted = true;
dc0ff1c8
BS
5818
5819 if (JUMP_P (returnjump))
387748de 5820 set_return_jump_label (returnjump);
bdac5f58 5821 }
623a66fa 5822 else
bdac5f58 5823#endif
623a66fa
R
5824 {
5825 basic_block cur_bb;
5826
484db665 5827 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
5828 goto epilogue_done;
5829 /* We have a fall-through edge to the exit block, the source is not
5830 at the end of the function, and there will be an assembler epilogue
5831 at the end of the function.
5832 We can't use force_nonfallthru here, because that would try to
484db665 5833 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 5834 we take advantage of cfg_layout_finalize using
484db665 5835 fixup_fallthru_exit_predecessor. */
35b6b437 5836 cfg_layout_initialize (0);
11cd3bed 5837 FOR_EACH_BB_FN (cur_bb, cfun)
24bd1a0b
DB
5838 if (cur_bb->index >= NUM_FIXED_BLOCKS
5839 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5840 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5841 cfg_layout_finalize ();
5842 }
cf103ca4 5843
19d3c25c 5844epilogue_done:
484db665 5845
a8ba47cb 5846 default_rtl_profile ();
e881bb1b 5847
ca1117cc 5848 if (inserted)
30a873c3 5849 {
cf103ca4
EB
5850 sbitmap blocks;
5851
30a873c3
ZD
5852 commit_edge_insertions ();
5853
cf103ca4 5854 /* Look for basic blocks within the prologue insns. */
8b1c6fd7 5855 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 5856 bitmap_clear (blocks);
d7c028c0
LC
5857 bitmap_set_bit (blocks, entry_edge->dest->index);
5858 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
5859 find_many_sub_basic_blocks (blocks);
5860 sbitmap_free (blocks);
5861
30a873c3
ZD
5862 /* The epilogue insns we inserted may cause the exit edge to no longer
5863 be fallthru. */
fefa31b5 5864 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
30a873c3
ZD
5865 {
5866 if (((e->flags & EDGE_FALLTHRU) != 0)
5867 && returnjump_p (BB_END (e->src)))
5868 e->flags &= ~EDGE_FALLTHRU;
5869 }
5870 }
0a1c58a2 5871
484db665 5872#ifdef HAVE_simple_return
f30e25a3
ZC
5873 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
5874 unconverted_simple_returns);
484db665
BS
5875#endif
5876
0a1c58a2
JL
5877#ifdef HAVE_sibcall_epilogue
5878 /* Emit sibling epilogues before any sibling call sites. */
fefa31b5
DM
5879 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
5880 ei_safe_edge (ei));
5881 )
0a1c58a2
JL
5882 {
5883 basic_block bb = e->src;
691fe203 5884 rtx_insn *insn = BB_END (bb);
484db665 5885 rtx ep_seq;
0a1c58a2 5886
4b4bf941 5887 if (!CALL_P (insn)
484db665 5888 || ! SIBLING_CALL_P (insn)
ffe14686 5889#ifdef HAVE_simple_return
484db665 5890 || (entry_edge != orig_entry_edge
ffe14686
AM
5891 && !bitmap_bit_p (&bb_flags, bb->index))
5892#endif
5893 )
628f6a4e
BE
5894 {
5895 ei_next (&ei);
5896 continue;
5897 }
0a1c58a2 5898
484db665
BS
5899 ep_seq = gen_sibcall_epilogue ();
5900 if (ep_seq)
5901 {
5902 start_sequence ();
5903 emit_note (NOTE_INSN_EPILOGUE_BEG);
5904 emit_insn (ep_seq);
dc01c3d1 5905 rtx_insn *seq = get_insns ();
484db665 5906 end_sequence ();
0a1c58a2 5907
484db665
BS
5908 /* Retain a map of the epilogue insns. Used in life analysis to
5909 avoid getting rid of sibcall epilogue insns. Do this before we
5910 actually emit the sequence. */
5911 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 5912 set_insn_locations (seq, epilogue_location);
2f937369 5913
484db665
BS
5914 emit_insn_before (seq, insn);
5915 }
628f6a4e 5916 ei_next (&ei);
0a1c58a2
JL
5917 }
5918#endif
ca1117cc 5919
86c82654
RH
5920#ifdef HAVE_epilogue
5921 if (epilogue_end)
5922 {
9c8348cf 5923 rtx_insn *insn, *next;
86c82654
RH
5924
5925 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5926 There is no need, however, to be quite so anal about the existence
071a42f9 5927 of such a note. Also possibly move
84c1fa24
UW
5928 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5929 info generation. */
718fe406 5930 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5931 {
5932 next = NEXT_INSN (insn);
b8698a0f 5933 if (NOTE_P (insn)
a38e7aa5 5934 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
5935 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5936 }
5937 }
5938#endif
6fb5fa3c 5939
ffe14686 5940#ifdef HAVE_simple_return
484db665 5941 bitmap_clear (&bb_flags);
ffe14686 5942#endif
484db665 5943
6fb5fa3c
DB
5944 /* Threading the prologue and epilogue changes the artificial refs
5945 in the entry and exit blocks. */
5946 epilogue_completed = 1;
5947 df_update_entry_exit_and_calls ();
bdac5f58
TW
5948}
5949
cd9c1ca8
RH
5950/* Reposition the prologue-end and epilogue-begin notes after
5951 instruction scheduling. */
bdac5f58
TW
5952
5953void
6fb5fa3c 5954reposition_prologue_and_epilogue_notes (void)
bdac5f58 5955{
cd9c1ca8
RH
5956#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5957 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
5958 /* Since the hash table is created on demand, the fact that it is
5959 non-null is a signal that it is non-empty. */
5960 if (prologue_insn_hash != NULL)
bdac5f58 5961 {
cd9c1ca8 5962 size_t len = htab_elements (prologue_insn_hash);
691fe203 5963 rtx_insn *insn, *last = NULL, *note = NULL;
bdac5f58 5964
cd9c1ca8
RH
5965 /* Scan from the beginning until we reach the last prologue insn. */
5966 /* ??? While we do have the CFG intact, there are two problems:
5967 (1) The prologue can contain loops (typically probing the stack),
5968 which means that the end of the prologue isn't in the first bb.
5969 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 5970 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 5971 {
4b4bf941 5972 if (NOTE_P (insn))
9392c110 5973 {
a38e7aa5 5974 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
5975 note = insn;
5976 }
cd9c1ca8 5977 else if (contains (insn, prologue_insn_hash))
0a1c58a2 5978 {
9f53e965
RH
5979 last = insn;
5980 if (--len == 0)
5981 break;
5982 }
5983 }
797a6ac1 5984
9f53e965
RH
5985 if (last)
5986 {
cd9c1ca8 5987 if (note == NULL)
9f53e965 5988 {
cd9c1ca8
RH
5989 /* Scan forward looking for the PROLOGUE_END note. It should
5990 be right at the beginning of the block, possibly with other
5991 insn notes that got moved there. */
5992 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5993 {
5994 if (NOTE_P (note)
5995 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5996 break;
5997 }
9f53e965 5998 }
c93b03c2 5999
9f53e965 6000 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6001 if (LABEL_P (last))
9f53e965
RH
6002 last = NEXT_INSN (last);
6003 reorder_insns (note, note, last);
bdac5f58 6004 }
0a1c58a2
JL
6005 }
6006
cd9c1ca8 6007 if (epilogue_insn_hash != NULL)
0a1c58a2 6008 {
cd9c1ca8
RH
6009 edge_iterator ei;
6010 edge e;
bdac5f58 6011
fefa31b5 6012 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bdac5f58 6013 {
691fe203 6014 rtx_insn *insn, *first = NULL, *note = NULL;
997704f1 6015 basic_block bb = e->src;
c93b03c2 6016
997704f1 6017 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6018 FOR_BB_INSNS (bb, insn)
9f53e965 6019 {
cd9c1ca8
RH
6020 if (NOTE_P (insn))
6021 {
6022 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6023 {
6024 note = insn;
997704f1 6025 if (first != NULL)
cd9c1ca8
RH
6026 break;
6027 }
6028 }
997704f1 6029 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6030 {
997704f1 6031 first = insn;
cd9c1ca8
RH
6032 if (note != NULL)
6033 break;
6034 }
9392c110 6035 }
997704f1
RH
6036
6037 if (note)
6038 {
6039 /* If the function has a single basic block, and no real
b8698a0f 6040 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6041 epilogue note can get scheduled before the prologue
6042 note. If we have frame related prologue insns, having
6043 them scanned during the epilogue will result in a crash.
6044 In this case re-order the epilogue note to just before
6045 the last insn in the block. */
6046 if (first == NULL)
6047 first = BB_END (bb);
6048
6049 if (PREV_INSN (first) != note)
6050 reorder_insns (note, note, PREV_INSN (first));
6051 }
bdac5f58
TW
6052 }
6053 }
6054#endif /* HAVE_prologue or HAVE_epilogue */
6055}
87ff9c8e 6056
df92c640
SB
6057/* Returns the name of function declared by FNDECL. */
6058const char *
6059fndecl_name (tree fndecl)
6060{
6061 if (fndecl == NULL)
6062 return "(nofn)";
6063 return lang_hooks.decl_printable_name (fndecl, 2);
6064}
6065
532aafad
SB
6066/* Returns the name of function FN. */
6067const char *
6068function_name (struct function *fn)
6069{
df92c640
SB
6070 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6071 return fndecl_name (fndecl);
532aafad
SB
6072}
6073
faed5cc3
SB
6074/* Returns the name of the current function. */
6075const char *
6076current_function_name (void)
6077{
532aafad 6078 return function_name (cfun);
faed5cc3 6079}
ef330312
PB
6080\f
6081
c2924966 6082static unsigned int
ef330312
PB
6083rest_of_handle_check_leaf_regs (void)
6084{
6085#ifdef LEAF_REGISTERS
416ff32e 6086 crtl->uses_only_leaf_regs
ef330312
PB
6087 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6088#endif
c2924966 6089 return 0;
ef330312
PB
6090}
6091
8d8d1a28 6092/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6093
8d8d1a28
AH
6094static void
6095used_types_insert_helper (tree type, struct function *func)
33c9159e 6096{
8d8d1a28 6097 if (type != NULL && func != NULL)
33c9159e 6098 {
33c9159e 6099 if (func->used_types_hash == NULL)
b086d530
TS
6100 func->used_types_hash = hash_set<tree>::create_ggc (37);
6101
6102 func->used_types_hash->add (type);
33c9159e
AH
6103 }
6104}
6105
8d8d1a28
AH
6106/* Given a type, insert it into the used hash table in cfun. */
6107void
6108used_types_insert (tree t)
6109{
6110 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6111 if (TYPE_NAME (t))
6112 break;
6113 else
6114 t = TREE_TYPE (t);
29ce73cb
PB
6115 if (TREE_CODE (t) == ERROR_MARK)
6116 return;
095c7b3c
JJ
6117 if (TYPE_NAME (t) == NULL_TREE
6118 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6119 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6120 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6121 {
6122 if (cfun)
6123 used_types_insert_helper (t, cfun);
6124 else
9771b263
DN
6125 {
6126 /* So this might be a type referenced by a global variable.
6127 Record that type so that we can later decide to emit its
6128 debug information. */
6129 vec_safe_push (types_used_by_cur_var_decl, t);
6130 }
b646ba3f
DS
6131 }
6132}
6133
6134/* Helper to Hash a struct types_used_by_vars_entry. */
6135
6136static hashval_t
6137hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6138{
6139 gcc_assert (entry && entry->var_decl && entry->type);
6140
6141 return iterative_hash_object (entry->type,
6142 iterative_hash_object (entry->var_decl, 0));
6143}
6144
6145/* Hash function of the types_used_by_vars_entry hash table. */
6146
6147hashval_t
6148types_used_by_vars_do_hash (const void *x)
6149{
6150 const struct types_used_by_vars_entry *entry =
6151 (const struct types_used_by_vars_entry *) x;
6152
6153 return hash_types_used_by_vars_entry (entry);
6154}
6155
6156/*Equality function of the types_used_by_vars_entry hash table. */
6157
6158int
6159types_used_by_vars_eq (const void *x1, const void *x2)
6160{
6161 const struct types_used_by_vars_entry *e1 =
6162 (const struct types_used_by_vars_entry *) x1;
6163 const struct types_used_by_vars_entry *e2 =
6164 (const struct types_used_by_vars_entry *)x2;
6165
6166 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6167}
6168
6169/* Inserts an entry into the types_used_by_vars_hash hash table. */
6170
6171void
6172types_used_by_var_decl_insert (tree type, tree var_decl)
6173{
6174 if (type != NULL && var_decl != NULL)
6175 {
6176 void **slot;
6177 struct types_used_by_vars_entry e;
6178 e.var_decl = var_decl;
6179 e.type = type;
6180 if (types_used_by_vars_hash == NULL)
6181 types_used_by_vars_hash =
6182 htab_create_ggc (37, types_used_by_vars_do_hash,
6183 types_used_by_vars_eq, NULL);
6184 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6185 hash_types_used_by_vars_entry (&e), INSERT);
6186 if (*slot == NULL)
6187 {
6188 struct types_used_by_vars_entry *entry;
766090c2 6189 entry = ggc_alloc<types_used_by_vars_entry> ();
b646ba3f
DS
6190 entry->type = type;
6191 entry->var_decl = var_decl;
6192 *slot = entry;
6193 }
6194 }
8d8d1a28
AH
6195}
6196
27a4cd48
DM
6197namespace {
6198
6199const pass_data pass_data_leaf_regs =
6200{
6201 RTL_PASS, /* type */
6202 "*leaf_regs", /* name */
6203 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6204 TV_NONE, /* tv_id */
6205 0, /* properties_required */
6206 0, /* properties_provided */
6207 0, /* properties_destroyed */
6208 0, /* todo_flags_start */
6209 0, /* todo_flags_finish */
ef330312
PB
6210};
6211
27a4cd48
DM
6212class pass_leaf_regs : public rtl_opt_pass
6213{
6214public:
c3284718
RS
6215 pass_leaf_regs (gcc::context *ctxt)
6216 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
27a4cd48
DM
6217 {}
6218
6219 /* opt_pass methods: */
be55bfe6
TS
6220 virtual unsigned int execute (function *)
6221 {
6222 return rest_of_handle_check_leaf_regs ();
6223 }
27a4cd48
DM
6224
6225}; // class pass_leaf_regs
6226
6227} // anon namespace
6228
6229rtl_opt_pass *
6230make_pass_leaf_regs (gcc::context *ctxt)
6231{
6232 return new pass_leaf_regs (ctxt);
6233}
6234
6fb5fa3c
DB
6235static unsigned int
6236rest_of_handle_thread_prologue_and_epilogue (void)
6237{
6238 if (optimize)
6239 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6240
6fb5fa3c
DB
6241 /* On some machines, the prologue and epilogue code, or parts thereof,
6242 can be represented as RTL. Doing so lets us schedule insns between
6243 it and the rest of the code and also allows delayed branch
6244 scheduling to operate in the epilogue. */
6fb5fa3c 6245 thread_prologue_and_epilogue_insns ();
d3c12306 6246
bdc6e1ae
SB
6247 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6248 see PR57320. */
6249 cleanup_cfg (0);
6250
d3c12306 6251 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6252 if (flag_stack_usage_info)
d3c12306
EB
6253 output_stack_usage ();
6254
6fb5fa3c
DB
6255 return 0;
6256}
6257
27a4cd48
DM
6258namespace {
6259
6260const pass_data pass_data_thread_prologue_and_epilogue =
6261{
6262 RTL_PASS, /* type */
6263 "pro_and_epilogue", /* name */
6264 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6265 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6266 0, /* properties_required */
6267 0, /* properties_provided */
6268 0, /* properties_destroyed */
3bea341f
RB
6269 0, /* todo_flags_start */
6270 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6fb5fa3c 6271};
27a4cd48
DM
6272
6273class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6274{
6275public:
c3284718
RS
6276 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6277 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
27a4cd48
DM
6278 {}
6279
6280 /* opt_pass methods: */
be55bfe6
TS
6281 virtual unsigned int execute (function *)
6282 {
6283 return rest_of_handle_thread_prologue_and_epilogue ();
6284 }
27a4cd48
DM
6285
6286}; // class pass_thread_prologue_and_epilogue
6287
6288} // anon namespace
6289
6290rtl_opt_pass *
6291make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6292{
6293 return new pass_thread_prologue_and_epilogue (ctxt);
6294}
d8d72314
PB
6295\f
6296
6297/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6298 in-out constraints. Say you start with
d8d72314
PB
6299
6300 orig = inout;
6301 asm ("": "+mr" (inout));
6302 use (orig);
6303
6304 which is transformed very early to use explicit output and match operands:
6305
6306 orig = inout;
6307 asm ("": "=mr" (inout) : "0" (inout));
6308 use (orig);
6309
6310 Or, after SSA and copyprop,
6311
6312 asm ("": "=mr" (inout_2) : "0" (inout_1));
6313 use (inout_1);
6314
6315 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6316 they represent two separate values, so they will get different pseudo
6317 registers during expansion. Then, since the two operands need to match
6318 per the constraints, but use different pseudo registers, reload can
6319 only register a reload for these operands. But reloads can only be
6320 satisfied by hardregs, not by memory, so we need a register for this
6321 reload, just because we are presented with non-matching operands.
6322 So, even though we allow memory for this operand, no memory can be
6323 used for it, just because the two operands don't match. This can
6324 cause reload failures on register-starved targets.
6325
6326 So it's a symptom of reload not being able to use memory for reloads
6327 or, alternatively it's also a symptom of both operands not coming into
6328 reload as matching (in which case the pseudo could go to memory just
6329 fine, as the alternative allows it, and no reload would be necessary).
6330 We fix the latter problem here, by transforming
6331
6332 asm ("": "=mr" (inout_2) : "0" (inout_1));
6333
6334 back to
6335
6336 inout_2 = inout_1;
6337 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6338
6339static void
691fe203 6340match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
d8d72314
PB
6341{
6342 int i;
6343 bool changed = false;
6344 rtx op = SET_SRC (p_sets[0]);
6345 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6346 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6347 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6348
d7b8033f 6349 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6350 for (i = 0; i < ninputs; i++)
6351 {
691fe203
DM
6352 rtx input, output;
6353 rtx_insn *insns;
d8d72314
PB
6354 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6355 char *end;
53220215 6356 int match, j;
d8d72314 6357
70f16287
JJ
6358 if (*constraint == '%')
6359 constraint++;
6360
d8d72314
PB
6361 match = strtoul (constraint, &end, 10);
6362 if (end == constraint)
6363 continue;
6364
6365 gcc_assert (match < noutputs);
6366 output = SET_DEST (p_sets[match]);
6367 input = RTVEC_ELT (inputs, i);
53220215
MM
6368 /* Only do the transformation for pseudos. */
6369 if (! REG_P (output)
6370 || rtx_equal_p (output, input)
d8d72314
PB
6371 || (GET_MODE (input) != VOIDmode
6372 && GET_MODE (input) != GET_MODE (output)))
6373 continue;
6374
53220215
MM
6375 /* We can't do anything if the output is also used as input,
6376 as we're going to overwrite it. */
6377 for (j = 0; j < ninputs; j++)
6378 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6379 break;
6380 if (j != ninputs)
6381 continue;
6382
d7b8033f
JJ
6383 /* Avoid changing the same input several times. For
6384 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6385 only change in once (to out1), rather than changing it
6386 first to out1 and afterwards to out2. */
6387 if (i > 0)
6388 {
6389 for (j = 0; j < noutputs; j++)
6390 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6391 break;
6392 if (j != noutputs)
6393 continue;
6394 }
6395 output_matched[match] = true;
6396
d8d72314 6397 start_sequence ();
53220215 6398 emit_move_insn (output, input);
d8d72314
PB
6399 insns = get_insns ();
6400 end_sequence ();
d8d72314 6401 emit_insn_before (insns, insn);
53220215
MM
6402
6403 /* Now replace all mentions of the input with output. We can't
fa10beec 6404 just replace the occurrence in inputs[i], as the register might
53220215
MM
6405 also be used in some other input (or even in an address of an
6406 output), which would mean possibly increasing the number of
6407 inputs by one (namely 'output' in addition), which might pose
6408 a too complicated problem for reload to solve. E.g. this situation:
6409
6410 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6411
84fbffb2 6412 Here 'input' is used in two occurrences as input (once for the
53220215 6413 input operand, once for the address in the second output operand).
fa10beec 6414 If we would replace only the occurrence of the input operand (to
53220215
MM
6415 make the matching) we would be left with this:
6416
6417 output = input
6418 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6419
6420 Now we suddenly have two different input values (containing the same
6421 value, but different pseudos) where we formerly had only one.
6422 With more complicated asms this might lead to reload failures
6423 which wouldn't have happen without this pass. So, iterate over
84fbffb2 6424 all operands and replace all occurrences of the register used. */
53220215 6425 for (j = 0; j < noutputs; j++)
1596d61e 6426 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
6427 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6428 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6429 input, output);
6430 for (j = 0; j < ninputs; j++)
6431 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6432 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6433 input, output);
6434
d8d72314
PB
6435 changed = true;
6436 }
6437
6438 if (changed)
6439 df_insn_rescan (insn);
6440}
6441
be55bfe6
TS
6442namespace {
6443
6444const pass_data pass_data_match_asm_constraints =
6445{
6446 RTL_PASS, /* type */
6447 "asmcons", /* name */
6448 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6449 TV_NONE, /* tv_id */
6450 0, /* properties_required */
6451 0, /* properties_provided */
6452 0, /* properties_destroyed */
6453 0, /* todo_flags_start */
6454 0, /* todo_flags_finish */
6455};
6456
6457class pass_match_asm_constraints : public rtl_opt_pass
6458{
6459public:
6460 pass_match_asm_constraints (gcc::context *ctxt)
6461 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6462 {}
6463
6464 /* opt_pass methods: */
6465 virtual unsigned int execute (function *);
6466
6467}; // class pass_match_asm_constraints
6468
6469unsigned
6470pass_match_asm_constraints::execute (function *fun)
d8d72314
PB
6471{
6472 basic_block bb;
691fe203
DM
6473 rtx_insn *insn;
6474 rtx pat, *p_sets;
d8d72314
PB
6475 int noutputs;
6476
e3b5732b 6477 if (!crtl->has_asm_statement)
d8d72314
PB
6478 return 0;
6479
6480 df_set_flags (DF_DEFER_INSN_RESCAN);
be55bfe6 6481 FOR_EACH_BB_FN (bb, fun)
d8d72314
PB
6482 {
6483 FOR_BB_INSNS (bb, insn)
6484 {
6485 if (!INSN_P (insn))
6486 continue;
6487
6488 pat = PATTERN (insn);
6489 if (GET_CODE (pat) == PARALLEL)
6490 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6491 else if (GET_CODE (pat) == SET)
6492 p_sets = &PATTERN (insn), noutputs = 1;
6493 else
6494 continue;
6495
6496 if (GET_CODE (*p_sets) == SET
6497 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6498 match_asm_constraints_1 (insn, p_sets, noutputs);
6499 }
6500 }
6501
6502 return TODO_df_finish;
6503}
6504
27a4cd48
DM
6505} // anon namespace
6506
6507rtl_opt_pass *
6508make_pass_match_asm_constraints (gcc::context *ctxt)
6509{
6510 return new pass_match_asm_constraints (ctxt);
6511}
6512
faed5cc3 6513
e2500fed 6514#include "gt-function.h"
This page took 7.771102 seconds and 5 git commands to generate.