]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
c-common.c (enum attrs): Add A_NO_LIMIT_STACK.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
a5cad800 2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
6f086dfc
RS
20
21
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41#include "config.h"
670ee920 42#include "system.h"
6f086dfc
RS
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
1ef08c63 46#include "except.h"
6f086dfc
RS
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
c20bf1f3 57#include "obstack.h"
10f0ad3d 58#include "toplev.h"
fe9b4957 59#include "hash.h"
87ff9c8e 60#include "ggc.h"
b1474bb7 61#include "tm_p.h"
6f086dfc 62
189cc377
RK
63#ifndef TRAMPOLINE_ALIGNMENT
64#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65#endif
66
d16790f2
JW
67#ifndef LOCAL_ALIGNMENT
68#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69#endif
70
293e3de4
RS
71/* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
0f41302f 74 must define both, or neither. */
293e3de4
RS
75#ifndef NAME__MAIN
76#define NAME__MAIN "__main"
77#define SYMBOL__MAIN __main
78#endif
79
6f086dfc
RS
80/* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
84
85/* Similar, but round to the next highest integer that meets the
86 alignment. */
87#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
88
89/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
94
95#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96#define NEED_SEPARATE_AP
97#endif
98
54ff41b7
JW
99/* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
54ff41b7
JW
103int current_function_is_leaf;
104
fdb8a883
JW
105/* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
fdb8a883
JW
108int current_function_sp_is_unchanging;
109
54ff41b7
JW
110/* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
113int current_function_uses_only_leaf_regs;
114
6f086dfc
RS
115/* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117static int virtuals_instantiated;
118
46766466
RS
119/* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
36edd3cc 122void (*init_machine_status) PROTO((struct function *));
9e014ded
RK
123void (*save_machine_status) PROTO((struct function *));
124void (*restore_machine_status) PROTO((struct function *));
e2ecd91c 125void (*mark_machine_status) PROTO((struct function *));
fa51b01b 126void (*free_machine_status) PROTO((struct function *));
46766466 127
8c5666b4 128/* Likewise, but for language-specific data. */
0a8a198c 129void (*init_lang_status) PROTO((struct function *));
8c5666b4
BS
130void (*save_lang_status) PROTO((struct function *));
131void (*restore_lang_status) PROTO((struct function *));
e2ecd91c 132void (*mark_lang_status) PROTO((struct function *));
0a8a198c 133void (*free_lang_status) PROTO((struct function *));
8c5666b4 134
49ad7cfa
BS
135/* The FUNCTION_DECL for an inline function currently being expanded. */
136tree inline_function_decl;
b384405b
BS
137
138/* The currently compiled function. */
139struct function *current_function = 0;
140
141/* Global list of all compiled functions. */
142struct function *all_functions = 0;
5c7675e9
RH
143
144/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145static int *prologue;
146static int *epilogue;
6f086dfc
RS
147\f
148/* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
151
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
161
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
165
166struct temp_slot
167{
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
0f41302f 170 /* The rtx to used to reference the slot. */
6f086dfc 171 rtx slot;
e5e76139
RK
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
d16790f2
JW
175 /* The alignment (in bits) of the slot. */
176 int align;
6f086dfc 177 /* The size, in units, of the slot. */
e5e809f4 178 HOST_WIDE_INT size;
a4c6502a
MM
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
189 int alias_set;
e7a84011
RK
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 tree rtl_expr;
6f086dfc
RS
192 /* Non-zero if this temporary is currently in use. */
193 char in_use;
a25d4ba2
RK
194 /* Non-zero if this temporary has its address taken. */
195 char addr_taken;
6f086dfc
RS
196 /* Nesting level at which this slot is being used. */
197 int level;
198 /* Non-zero if this should survive a call to free_temp_slots. */
199 int keep;
fc91b0d0
RK
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
e5e809f4 202 HOST_WIDE_INT base_offset;
fc91b0d0
RK
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
e5e809f4 205 HOST_WIDE_INT full_size;
6f086dfc 206};
6f086dfc 207\f
e15679f8
RK
208/* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
212
213struct fixup_replacement
214{
215 rtx old;
216 rtx new;
217 struct fixup_replacement *next;
218};
219
fe9b4957
MM
220struct insns_for_mem_entry {
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
225};
226
e15679f8
RK
227/* Forward declarations. */
228
e2ecd91c 229static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
1ac4f799 230 int, struct function *));
d16790f2
JW
231static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
232 int, tree));
e15679f8
RK
233static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
234static void put_reg_into_stack PROTO((struct function *, rtx, tree,
0006e95b 235 enum machine_mode, enum machine_mode,
fe9b4957
MM
236 int, int, int,
237 struct hash_table *));
238static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
239 struct hash_table *));
e15679f8
RK
240static struct fixup_replacement
241 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
242static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
fe9b4957 243 rtx, int, struct hash_table *));
e15679f8
RK
244static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **));
246static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
247static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
248static rtx fixup_stack_1 PROTO((rtx, rtx));
249static void optimize_bit_field PROTO((rtx, rtx, rtx *));
250static void instantiate_decls PROTO((tree, int));
251static void instantiate_decls_1 PROTO((tree, int));
252static void instantiate_decl PROTO((rtx, int, int));
253static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
254static void delete_handlers PROTO((void));
4fc026cd 255static void pad_to_arg_alignment PROTO((struct args_size *, int, struct args_size *));
51723711 256#ifndef ARGS_GROW_DOWNWARD
e15679f8
RK
257static void pad_below PROTO((struct args_size *, enum machine_mode,
258 tree));
51723711 259#endif
487a6e06 260#ifdef ARGS_GROW_DOWNWARD
e15679f8 261static tree round_down PROTO((tree, int));
487a6e06 262#endif
e15679f8
RK
263static rtx round_trampoline_addr PROTO((rtx));
264static tree blocks_nreverse PROTO((tree));
265static int all_blocks PROTO((tree, tree *));
ec97b83a
KG
266/* We always define `record_insns' even if its not used so that we
267 can always export `prologue_epilogue_contains'. */
268static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
e15679f8 269static int contains PROTO((rtx, int *));
fe9b4957 270static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
8c36698e 271static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
fe9b4957 272 struct hash_table *));
8c36698e 273static int is_addressof PROTO ((rtx *, void *));
fe9b4957
MM
274static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
275 struct hash_table *,
276 hash_table_key));
277static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
278static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
279static int insns_for_mem_walk PROTO ((rtx *, void *));
280static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
87ff9c8e 281static void mark_temp_slot PROTO ((struct temp_slot *));
fa51b01b 282static void mark_function_status PROTO ((struct function *));
87ff9c8e 283static void mark_function_chain PROTO ((void *));
ca3075bd 284static void prepare_function_start PROTO ((void));
fe9b4957 285
c20bf1f3 286\f
6f086dfc
RS
287/* Pointer to chain of `struct function' for containing functions. */
288struct function *outer_function_chain;
289
290/* Given a function decl for a containing function,
291 return the `struct function' for it. */
292
293struct function *
294find_function_data (decl)
295 tree decl;
296{
297 struct function *p;
e5e809f4 298
6f086dfc
RS
299 for (p = outer_function_chain; p; p = p->next)
300 if (p->decl == decl)
301 return p;
e5e809f4 302
6f086dfc
RS
303 abort ();
304}
305
306/* Save the current context for compilation of a nested function.
8c5666b4
BS
307 This is called from language-specific code. The caller should use
308 the save_lang_status callback to save any language-specific state,
309 since this function knows only about language-independent
310 variables. */
6f086dfc
RS
311
312void
a0dabda5
JM
313push_function_context_to (context)
314 tree context;
6f086dfc 315{
36edd3cc
BS
316 struct function *p, *context_data;
317
318 if (context)
319 {
320 context_data = (context == current_function_decl
321 ? current_function
322 : find_function_data (context));
323 context_data->contains_functions = 1;
324 }
b384405b
BS
325
326 if (current_function == 0)
327 init_dummy_function_start ();
328 p = current_function;
6f086dfc
RS
329
330 p->next = outer_function_chain;
331 outer_function_chain = p;
6f086dfc
RS
332 p->fixup_var_refs_queue = 0;
333
36edd3cc 334 save_tree_status (p);
8c5666b4
BS
335 if (save_lang_status)
336 (*save_lang_status) (p);
46766466
RS
337 if (save_machine_status)
338 (*save_machine_status) (p);
b384405b
BS
339
340 current_function = 0;
6f086dfc
RS
341}
342
e4a4639e
JM
343void
344push_function_context ()
345{
a0dabda5 346 push_function_context_to (current_function_decl);
e4a4639e
JM
347}
348
6f086dfc
RS
349/* Restore the last saved context, at the end of a nested function.
350 This function is called from language-specific code. */
351
352void
a0dabda5 353pop_function_context_from (context)
ca3075bd 354 tree context ATTRIBUTE_UNUSED;
6f086dfc
RS
355{
356 struct function *p = outer_function_chain;
e5e809f4 357 struct var_refs_queue *queue;
a3770a81 358 struct var_refs_queue *next;
6f086dfc 359
b384405b 360 current_function = p;
6f086dfc
RS
361 outer_function_chain = p->next;
362
6f086dfc 363 current_function_decl = p->decl;
7cbc7b0c 364 reg_renumber = 0;
6f086dfc 365
36edd3cc 366 restore_tree_status (p);
6f086dfc 367 restore_emit_status (p);
6f086dfc 368
46766466
RS
369 if (restore_machine_status)
370 (*restore_machine_status) (p);
8c5666b4
BS
371 if (restore_lang_status)
372 (*restore_lang_status) (p);
46766466 373
6f086dfc
RS
374 /* Finish doing put_var_into_stack for any of our variables
375 which became addressable during the nested function. */
a3770a81
RH
376 for (queue = p->fixup_var_refs_queue; queue; queue = next)
377 {
378 next = queue->next;
379 fixup_var_refs (queue->modified, queue->promoted_mode,
380 queue->unsignedp, 0);
381 free (queue);
382 }
383 p->fixup_var_refs_queue = 0;
6f086dfc 384
6f086dfc
RS
385 /* Reset variables that have known state during rtx generation. */
386 rtx_equal_function_value_matters = 1;
387 virtuals_instantiated = 0;
388}
e4a4639e 389
36edd3cc
BS
390void
391pop_function_context ()
e4a4639e 392{
a0dabda5 393 pop_function_context_from (current_function_decl);
e4a4639e 394}
e2ecd91c 395
fa51b01b
RH
396/* Clear out all parts of the state in F that can safely be discarded
397 after the function has been parsed, but not compiled, to let
398 garbage collection reclaim the memory. */
399
400void
401free_after_parsing (f)
402 struct function *f;
403{
404 /* f->expr->forced_labels is used by code generation. */
405 /* f->emit->regno_reg_rtx is used by code generation. */
406 /* f->varasm is used by code generation. */
407 /* f->eh->eh_return_stub_label is used by code generation. */
408
409 if (free_lang_status)
410 (*free_lang_status) (f);
411 free_stmt_status (f);
412}
413
e2ecd91c
BS
414/* Clear out all parts of the state in F that can safely be discarded
415 after the function has been compiled, to let garbage collection
0a8a198c 416 reclaim the memory. */
21cd906e 417
e2ecd91c 418void
0a8a198c 419free_after_compilation (f)
e2ecd91c
BS
420 struct function *f;
421{
fa51b01b
RH
422 free_eh_status (f);
423 free_expr_status (f);
0a8a198c
MM
424 free_emit_status (f);
425 free_varasm_status (f);
e2ecd91c 426
fa51b01b
RH
427 if (free_machine_status)
428 (*free_machine_status) (f);
429
5faf03ae
MM
430 if (f->x_parm_reg_stack_loc)
431 free (f->x_parm_reg_stack_loc);
fa51b01b
RH
432
433 f->arg_offset_rtx = NULL;
434 f->return_rtx = NULL;
435 f->internal_arg_pointer = NULL;
436 f->x_nonlocal_labels = NULL;
437 f->x_nonlocal_goto_handler_slots = NULL;
438 f->x_nonlocal_goto_handler_labels = NULL;
439 f->x_nonlocal_goto_stack_level = NULL;
440 f->x_cleanup_label = NULL;
441 f->x_return_label = NULL;
442 f->x_save_expr_regs = NULL;
443 f->x_stack_slot_list = NULL;
444 f->x_rtl_expr_chain = NULL;
445 f->x_tail_recursion_label = NULL;
446 f->x_tail_recursion_reentry = NULL;
447 f->x_arg_pointer_save_area = NULL;
448 f->x_context_display = NULL;
449 f->x_trampoline_list = NULL;
450 f->x_parm_birth_insn = NULL;
451 f->x_last_parm_insn = NULL;
452 f->x_parm_reg_stack_loc = NULL;
453 f->x_temp_slots = NULL;
454 f->fixup_var_refs_queue = NULL;
455 f->original_arg_vector = NULL;
456 f->original_decl_initial = NULL;
457 f->inl_last_parm_insn = NULL;
458 f->epilogue_delay_list = NULL;
e2ecd91c 459}
fa51b01b 460
6f086dfc
RS
461\f
462/* Allocate fixed slots in the stack frame of the current function. */
463
49ad7cfa
BS
464/* Return size needed for stack frame based on slots so far allocated in
465 function F.
c795bca9 466 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
6f086dfc
RS
467 the caller may have to do that. */
468
8af5168b 469HOST_WIDE_INT
49ad7cfa
BS
470get_func_frame_size (f)
471 struct function *f;
6f086dfc
RS
472{
473#ifdef FRAME_GROWS_DOWNWARD
49ad7cfa 474 return -f->x_frame_offset;
6f086dfc 475#else
49ad7cfa 476 return f->x_frame_offset;
6f086dfc
RS
477#endif
478}
479
49ad7cfa
BS
480/* Return size needed for stack frame based on slots so far allocated.
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
483HOST_WIDE_INT
484get_frame_size ()
485{
486 return get_func_frame_size (current_function);
487}
488
6f086dfc
RS
489/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
490 with machine mode MODE.
491
492 ALIGN controls the amount of alignment for the address of the slot:
493 0 means according to MODE,
494 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
495 positive specifies alignment boundary in bits.
496
e2ecd91c 497 We do not round to stack_boundary here.
6f086dfc 498
e2ecd91c
BS
499 FUNCTION specifies the function to allocate in. */
500
501static rtx
502assign_stack_local_1 (mode, size, align, function)
6f086dfc 503 enum machine_mode mode;
e5e809f4 504 HOST_WIDE_INT size;
6f086dfc 505 int align;
e2ecd91c 506 struct function *function;
6f086dfc
RS
507{
508 register rtx x, addr;
509 int bigend_correction = 0;
510 int alignment;
511
e2ecd91c
BS
512 /* Allocate in the memory associated with the function in whose frame
513 we are assigning. */
514 if (function != current_function)
515 push_obstacks (function->function_obstack,
516 function->function_maybepermanent_obstack);
517
6f086dfc
RS
518 if (align == 0)
519 {
d16790f2
JW
520 tree type;
521
522 alignment = GET_MODE_ALIGNMENT (mode);
6f086dfc 523 if (mode == BLKmode)
d16790f2
JW
524 alignment = BIGGEST_ALIGNMENT;
525
526 /* Allow the target to (possibly) increase the alignment of this
527 stack slot. */
528 type = type_for_mode (mode, 0);
529 if (type)
530 alignment = LOCAL_ALIGNMENT (type, alignment);
531
532 alignment /= BITS_PER_UNIT;
6f086dfc
RS
533 }
534 else if (align == -1)
535 {
536 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
537 size = CEIL_ROUND (size, alignment);
538 }
539 else
540 alignment = align / BITS_PER_UNIT;
541
1474e303 542#ifdef FRAME_GROWS_DOWNWARD
e2ecd91c 543 function->x_frame_offset -= size;
1474e303
JL
544#endif
545
6f086dfc
RS
546 /* Round frame offset to that alignment.
547 We must be careful here, since FRAME_OFFSET might be negative and
548 division with a negative dividend isn't as well defined as we might
549 like. So we instead assume that ALIGNMENT is a power of two and
550 use logical operations which are unambiguous. */
551#ifdef FRAME_GROWS_DOWNWARD
e2ecd91c 552 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
6f086dfc 553#else
e2ecd91c 554 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
6f086dfc
RS
555#endif
556
557 /* On a big-endian machine, if we are allocating more space than we will use,
558 use the least significant bytes of those that are allocated. */
f76b9db2 559 if (BYTES_BIG_ENDIAN && mode != BLKmode)
6f086dfc 560 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 561
6f086dfc
RS
562 /* If we have already instantiated virtual registers, return the actual
563 address relative to the frame pointer. */
e2ecd91c 564 if (function == current_function && virtuals_instantiated)
6f086dfc
RS
565 addr = plus_constant (frame_pointer_rtx,
566 (frame_offset + bigend_correction
567 + STARTING_FRAME_OFFSET));
568 else
569 addr = plus_constant (virtual_stack_vars_rtx,
3b71623b 570 function->x_frame_offset + bigend_correction);
6f086dfc
RS
571
572#ifndef FRAME_GROWS_DOWNWARD
e2ecd91c 573 function->x_frame_offset += size;
6f086dfc
RS
574#endif
575
38a448ca 576 x = gen_rtx_MEM (mode, addr);
6f086dfc 577
e2ecd91c
BS
578 function->x_stack_slot_list
579 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
580
581 if (function != current_function)
582 pop_obstacks ();
6f086dfc
RS
583
584 return x;
585}
586
e2ecd91c
BS
587/* Wrapper around assign_stack_local_1; assign a local stack slot for the
588 current function. */
589rtx
590assign_stack_local (mode, size, align)
6f086dfc 591 enum machine_mode mode;
e5e809f4 592 HOST_WIDE_INT size;
6f086dfc 593 int align;
6f086dfc 594{
e2ecd91c 595 return assign_stack_local_1 (mode, size, align, current_function);
6f086dfc
RS
596}
597\f
598/* Allocate a temporary stack slot and record it for possible later
599 reuse.
600
601 MODE is the machine mode to be given to the returned rtx.
602
603 SIZE is the size in units of the space required. We do no rounding here
604 since assign_stack_local will do any required rounding.
605
d93d4205
MS
606 KEEP is 1 if this slot is to be retained after a call to
607 free_temp_slots. Automatic variables for a block are allocated
e5e809f4
JL
608 with this flag. KEEP is 2 if we allocate a longer term temporary,
609 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
610 if we are to allocate something at an inner level to be treated as
a4c6502a
MM
611 a variable in the block (e.g., a SAVE_EXPR).
612
613 TYPE is the type that will be used for the stack slot. */
6f086dfc 614
d16790f2
JW
615static rtx
616assign_stack_temp_for_type (mode, size, keep, type)
6f086dfc 617 enum machine_mode mode;
e5e809f4 618 HOST_WIDE_INT size;
6f086dfc 619 int keep;
d16790f2 620 tree type;
6f086dfc 621{
d16790f2 622 int align;
a4c6502a 623 int alias_set;
6f086dfc
RS
624 struct temp_slot *p, *best_p = 0;
625
303ec2aa
RK
626 /* If SIZE is -1 it means that somebody tried to allocate a temporary
627 of a variable size. */
628 if (size == -1)
629 abort ();
630
a4c6502a
MM
631 /* If we know the alias set for the memory that will be used, use
632 it. If there's no TYPE, then we don't know anything about the
633 alias set for the memory. */
634 if (type)
635 alias_set = get_alias_set (type);
636 else
637 alias_set = 0;
638
d16790f2
JW
639 align = GET_MODE_ALIGNMENT (mode);
640 if (mode == BLKmode)
641 align = BIGGEST_ALIGNMENT;
6f086dfc 642
d16790f2
JW
643 if (! type)
644 type = type_for_mode (mode, 0);
645 if (type)
646 align = LOCAL_ALIGNMENT (type, align);
647
648 /* Try to find an available, already-allocated temporary of the proper
649 mode which meets the size and alignment requirements. Choose the
650 smallest one with the closest alignment. */
651 for (p = temp_slots; p; p = p->next)
652 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
653 && ! p->in_use
a4c6502a
MM
654 && (!flag_strict_aliasing
655 || (alias_set && p->alias_set == alias_set))
d16790f2
JW
656 && (best_p == 0 || best_p->size > p->size
657 || (best_p->size == p->size && best_p->align > p->align)))
658 {
659 if (p->align == align && p->size == size)
660 {
661 best_p = 0;
662 break;
663 }
6f086dfc 664 best_p = p;
d16790f2 665 }
6f086dfc
RS
666
667 /* Make our best, if any, the one to use. */
668 if (best_p)
a45035b6
JW
669 {
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
a4c6502a
MM
673 if (GET_MODE (best_p->slot) == BLKmode
674 /* We can't split slots if -fstrict-aliasing because the
675 information about the alias set for the new slot will be
676 lost. */
677 && !flag_strict_aliasing)
a45035b6 678 {
d16790f2 679 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 680 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
681
682 if (best_p->size - rounded_size >= alignment)
683 {
684 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
a25d4ba2 685 p->in_use = p->addr_taken = 0;
a45035b6 686 p->size = best_p->size - rounded_size;
307d8cd6
RK
687 p->base_offset = best_p->base_offset + rounded_size;
688 p->full_size = best_p->full_size - rounded_size;
38a448ca
RH
689 p->slot = gen_rtx_MEM (BLKmode,
690 plus_constant (XEXP (best_p->slot, 0),
691 rounded_size));
d16790f2 692 p->align = best_p->align;
e5e76139 693 p->address = 0;
84e24c03 694 p->rtl_expr = 0;
a45035b6
JW
695 p->next = temp_slots;
696 temp_slots = p;
697
38a448ca
RH
698 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
699 stack_slot_list);
a45035b6
JW
700
701 best_p->size = rounded_size;
291dde90 702 best_p->full_size = rounded_size;
a45035b6
JW
703 }
704 }
705
706 p = best_p;
707 }
708
6f086dfc
RS
709 /* If we still didn't find one, make a new temporary. */
710 if (p == 0)
711 {
e5e809f4
JL
712 HOST_WIDE_INT frame_offset_old = frame_offset;
713
6f086dfc 714 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
e5e809f4 715
c87a0a39
JL
716 /* We are passing an explicit alignment request to assign_stack_local.
717 One side effect of that is assign_stack_local will not round SIZE
718 to ensure the frame offset remains suitably aligned.
719
720 So for requests which depended on the rounding of SIZE, we go ahead
721 and round it now. We also make sure ALIGNMENT is at least
722 BIGGEST_ALIGNMENT. */
010529e5 723 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
6f67a30d
JW
724 abort();
725 p->slot = assign_stack_local (mode,
010529e5
AS
726 (mode == BLKmode
727 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
728 : size),
6f67a30d 729 align);
d16790f2
JW
730
731 p->align = align;
a4c6502a 732 p->alias_set = alias_set;
e5e809f4 733
b2a80c0d
DE
734 /* The following slot size computation is necessary because we don't
735 know the actual size of the temporary slot until assign_stack_local
736 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
737 requested temporary. Note that extra space added for alignment
738 can be either above or below this stack slot depending on which
739 way the frame grows. We include the extra space if and only if it
740 is above this slot. */
b2a80c0d
DE
741#ifdef FRAME_GROWS_DOWNWARD
742 p->size = frame_offset_old - frame_offset;
743#else
fc91b0d0
RK
744 p->size = size;
745#endif
e5e809f4 746
fc91b0d0
RK
747 /* Now define the fields used by combine_temp_slots. */
748#ifdef FRAME_GROWS_DOWNWARD
749 p->base_offset = frame_offset;
750 p->full_size = frame_offset_old - frame_offset;
751#else
752 p->base_offset = frame_offset_old;
753 p->full_size = frame_offset - frame_offset_old;
b2a80c0d 754#endif
e5e76139 755 p->address = 0;
6f086dfc
RS
756 p->next = temp_slots;
757 temp_slots = p;
758 }
759
760 p->in_use = 1;
a25d4ba2 761 p->addr_taken = 0;
49ad7cfa 762 p->rtl_expr = seq_rtl_expr;
a25d4ba2 763
d93d4205
MS
764 if (keep == 2)
765 {
766 p->level = target_temp_slot_level;
767 p->keep = 0;
768 }
e5e809f4
JL
769 else if (keep == 3)
770 {
771 p->level = var_temp_slot_level;
772 p->keep = 0;
773 }
d93d4205
MS
774 else
775 {
776 p->level = temp_slot_level;
777 p->keep = keep;
778 }
1995f267
RK
779
780 /* We may be reusing an old slot, so clear any MEM flags that may have been
781 set from before. */
782 RTX_UNCHANGING_P (p->slot) = 0;
783 MEM_IN_STRUCT_P (p->slot) = 0;
c6df88cb
MM
784 MEM_SCALAR_P (p->slot) = 0;
785 MEM_ALIAS_SET (p->slot) = 0;
6f086dfc
RS
786 return p->slot;
787}
d16790f2
JW
788
789/* Allocate a temporary stack slot and record it for possible later
790 reuse. First three arguments are same as in preceding function. */
791
792rtx
793assign_stack_temp (mode, size, keep)
794 enum machine_mode mode;
795 HOST_WIDE_INT size;
796 int keep;
797{
798 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
799}
638141a6 800\f
230f21b4
PB
801/* Assign a temporary of given TYPE.
802 KEEP is as for assign_stack_temp.
803 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
804 it is 0 if a register is OK.
805 DONT_PROMOTE is 1 if we should not promote values in register
806 to wider modes. */
230f21b4
PB
807
808rtx
b55d9ff8 809assign_temp (type, keep, memory_required, dont_promote)
230f21b4
PB
810 tree type;
811 int keep;
812 int memory_required;
b55d9ff8 813 int dont_promote;
230f21b4
PB
814{
815 enum machine_mode mode = TYPE_MODE (type);
638141a6
RK
816 int unsignedp = TREE_UNSIGNED (type);
817
230f21b4
PB
818 if (mode == BLKmode || memory_required)
819 {
e5e809f4 820 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
821 rtx tmp;
822
823 /* Unfortunately, we don't yet know how to allocate variable-sized
824 temporaries. However, sometimes we have a fixed upper limit on
825 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
0f41302f 826 instead. This is the case for Chill variable-sized strings. */
230f21b4
PB
827 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
828 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
829 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
830 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
831
d16790f2 832 tmp = assign_stack_temp_for_type (mode, size, keep, type);
c6df88cb 833 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
230f21b4
PB
834 return tmp;
835 }
638141a6 836
230f21b4 837#ifndef PROMOTE_FOR_CALL_ONLY
b55d9ff8
RK
838 if (! dont_promote)
839 mode = promote_mode (type, mode, &unsignedp, 0);
230f21b4 840#endif
638141a6 841
230f21b4
PB
842 return gen_reg_rtx (mode);
843}
638141a6 844\f
a45035b6
JW
845/* Combine temporary stack slots which are adjacent on the stack.
846
847 This allows for better use of already allocated stack space. This is only
848 done for BLKmode slots because we can be sure that we won't have alignment
849 problems in this case. */
850
851void
852combine_temp_slots ()
853{
854 struct temp_slot *p, *q;
855 struct temp_slot *prev_p, *prev_q;
e5e809f4
JL
856 int num_slots;
857
a4c6502a
MM
858 /* We can't combine slots, because the information about which slot
859 is in which alias set will be lost. */
860 if (flag_strict_aliasing)
861 return;
862
e5e809f4
JL
863 /* If there are a lot of temp slots, don't do anything unless
864 high levels of optimizaton. */
865 if (! flag_expensive_optimizations)
866 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
867 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
868 return;
a45035b6 869
e9b7093a
RS
870 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
871 {
872 int delete_p = 0;
e5e809f4 873
e9b7093a
RS
874 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
875 for (q = p->next, prev_q = p; q; q = prev_q->next)
a45035b6 876 {
e9b7093a
RS
877 int delete_q = 0;
878 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
a45035b6 879 {
fc91b0d0 880 if (p->base_offset + p->full_size == q->base_offset)
e9b7093a
RS
881 {
882 /* Q comes after P; combine Q into P. */
883 p->size += q->size;
307d8cd6 884 p->full_size += q->full_size;
e9b7093a
RS
885 delete_q = 1;
886 }
fc91b0d0 887 else if (q->base_offset + q->full_size == p->base_offset)
e9b7093a
RS
888 {
889 /* P comes after Q; combine P into Q. */
890 q->size += p->size;
307d8cd6 891 q->full_size += p->full_size;
e9b7093a
RS
892 delete_p = 1;
893 break;
894 }
a45035b6 895 }
e9b7093a
RS
896 /* Either delete Q or advance past it. */
897 if (delete_q)
898 prev_q->next = q->next;
899 else
900 prev_q = q;
a45035b6 901 }
e9b7093a
RS
902 /* Either delete P or advance past it. */
903 if (delete_p)
904 {
905 if (prev_p)
906 prev_p->next = p->next;
907 else
908 temp_slots = p->next;
909 }
910 else
911 prev_p = p;
912 }
a45035b6 913}
6f086dfc 914\f
e5e76139
RK
915/* Find the temp slot corresponding to the object at address X. */
916
917static struct temp_slot *
918find_temp_slot_from_address (x)
919 rtx x;
920{
921 struct temp_slot *p;
922 rtx next;
923
924 for (p = temp_slots; p; p = p->next)
925 {
926 if (! p->in_use)
927 continue;
e5e809f4 928
e5e76139 929 else if (XEXP (p->slot, 0) == x
abb52246
RK
930 || p->address == x
931 || (GET_CODE (x) == PLUS
932 && XEXP (x, 0) == virtual_stack_vars_rtx
933 && GET_CODE (XEXP (x, 1)) == CONST_INT
934 && INTVAL (XEXP (x, 1)) >= p->base_offset
935 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
e5e76139
RK
936 return p;
937
938 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
939 for (next = p->address; next; next = XEXP (next, 1))
940 if (XEXP (next, 0) == x)
941 return p;
942 }
943
14a774a9
RK
944 /* If we have a sum involving a register, see if it points to a temp
945 slot. */
946 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
947 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
948 return p;
949 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
950 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
951 return p;
952
e5e76139
RK
953 return 0;
954}
955
9faa82d8 956/* Indicate that NEW is an alternate way of referring to the temp slot
e5e809f4 957 that previously was known by OLD. */
e5e76139
RK
958
959void
960update_temp_slot_address (old, new)
961 rtx old, new;
962{
14a774a9 963 struct temp_slot *p;
e5e76139 964
14a774a9 965 if (rtx_equal_p (old, new))
e5e76139 966 return;
14a774a9
RK
967
968 p = find_temp_slot_from_address (old);
969
970 /* If we didn't find one, see if both OLD and NEW are a PLUS and if
971 there is a register in common between them. If so, try a recursive
972 call on those values. */
973 if (p == 0)
974 {
975 if (GET_CODE (old) != PLUS || GET_CODE (new) != PLUS)
976 return;
977
978 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
979 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
980 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
981 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
982 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
983 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
984 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
985 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
986
987 return;
988 }
989
990 /* Otherwise add an alias for the temp's address. */
e5e76139
RK
991 else if (p->address == 0)
992 p->address = new;
993 else
994 {
995 if (GET_CODE (p->address) != EXPR_LIST)
38a448ca 996 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
e5e76139 997
38a448ca 998 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
e5e76139
RK
999 }
1000}
1001
a25d4ba2 1002/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1003 address was taken. */
a25d4ba2
RK
1004
1005void
1006mark_temp_addr_taken (x)
1007 rtx x;
1008{
1009 struct temp_slot *p;
1010
1011 if (x == 0)
1012 return;
1013
1014 /* If X is not in memory or is at a constant address, it cannot be in
1015 a temporary slot. */
1016 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1017 return;
1018
1019 p = find_temp_slot_from_address (XEXP (x, 0));
1020 if (p != 0)
1021 p->addr_taken = 1;
1022}
1023
9cca6a99
MS
1024/* If X could be a reference to a temporary slot, mark that slot as
1025 belonging to the to one level higher than the current level. If X
1026 matched one of our slots, just mark that one. Otherwise, we can't
1027 easily predict which it is, so upgrade all of them. Kept slots
1028 need not be touched.
6f086dfc
RS
1029
1030 This is called when an ({...}) construct occurs and a statement
1031 returns a value in memory. */
1032
1033void
1034preserve_temp_slots (x)
1035 rtx x;
1036{
a25d4ba2 1037 struct temp_slot *p = 0;
6f086dfc 1038
73620b82
RK
1039 /* If there is no result, we still might have some objects whose address
1040 were taken, so we need to make sure they stay around. */
e3a77161 1041 if (x == 0)
73620b82
RK
1042 {
1043 for (p = temp_slots; p; p = p->next)
1044 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1045 p->level--;
1046
1047 return;
1048 }
e3a77161
RK
1049
1050 /* If X is a register that is being used as a pointer, see if we have
1051 a temporary slot we know it points to. To be consistent with
1052 the code below, we really should preserve all non-kept slots
1053 if we can't find a match, but that seems to be much too costly. */
a25d4ba2
RK
1054 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1055 p = find_temp_slot_from_address (x);
1056
6f086dfc 1057 /* If X is not in memory or is at a constant address, it cannot be in
e19571db
RK
1058 a temporary slot, but it can contain something whose address was
1059 taken. */
a25d4ba2 1060 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
e19571db
RK
1061 {
1062 for (p = temp_slots; p; p = p->next)
1063 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1064 p->level--;
1065
1066 return;
1067 }
6f086dfc
RS
1068
1069 /* First see if we can find a match. */
73620b82 1070 if (p == 0)
a25d4ba2
RK
1071 p = find_temp_slot_from_address (XEXP (x, 0));
1072
e5e76139
RK
1073 if (p != 0)
1074 {
a25d4ba2
RK
1075 /* Move everything at our level whose address was taken to our new
1076 level in case we used its address. */
1077 struct temp_slot *q;
1078
9cca6a99
MS
1079 if (p->level == temp_slot_level)
1080 {
1081 for (q = temp_slots; q; q = q->next)
1082 if (q != p && q->addr_taken && q->level == p->level)
1083 q->level--;
a25d4ba2 1084
9cca6a99
MS
1085 p->level--;
1086 p->addr_taken = 0;
1087 }
e5e76139
RK
1088 return;
1089 }
6f086dfc
RS
1090
1091 /* Otherwise, preserve all non-kept slots at this level. */
1092 for (p = temp_slots; p; p = p->next)
1093 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1094 p->level--;
1095}
1096
422c8f63
RK
1097/* X is the result of an RTL_EXPR. If it is a temporary slot associated
1098 with that RTL_EXPR, promote it into a temporary slot at the present
1099 level so it will not be freed when we free slots made in the
1100 RTL_EXPR. */
1101
1102void
1103preserve_rtl_expr_result (x)
1104 rtx x;
1105{
1106 struct temp_slot *p;
1107
1108 /* If X is not in memory or is at a constant address, it cannot be in
1109 a temporary slot. */
1110 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1111 return;
1112
199b61d8
RK
1113 /* If we can find a match, move it to our level unless it is already at
1114 an upper level. */
1115 p = find_temp_slot_from_address (XEXP (x, 0));
1116 if (p != 0)
1117 {
1118 p->level = MIN (p->level, temp_slot_level);
1119 p->rtl_expr = 0;
1120 }
422c8f63
RK
1121
1122 return;
1123}
1124
6f086dfc 1125/* Free all temporaries used so far. This is normally called at the end
e7a84011
RK
1126 of generating code for a statement. Don't free any temporaries
1127 currently in use for an RTL_EXPR that hasn't yet been emitted.
1128 We could eventually do better than this since it can be reused while
1129 generating the same RTL_EXPR, but this is complex and probably not
1130 worthwhile. */
6f086dfc
RS
1131
1132void
1133free_temp_slots ()
1134{
1135 struct temp_slot *p;
1136
1137 for (p = temp_slots; p; p = p->next)
e7a84011
RK
1138 if (p->in_use && p->level == temp_slot_level && ! p->keep
1139 && p->rtl_expr == 0)
1140 p->in_use = 0;
1141
1142 combine_temp_slots ();
1143}
1144
1145/* Free all temporary slots used in T, an RTL_EXPR node. */
1146
1147void
1148free_temps_for_rtl_expr (t)
1149 tree t;
1150{
1151 struct temp_slot *p;
1152
1153 for (p = temp_slots; p; p = p->next)
1154 if (p->rtl_expr == t)
6f086dfc 1155 p->in_use = 0;
a45035b6
JW
1156
1157 combine_temp_slots ();
6f086dfc
RS
1158}
1159
956d6950 1160/* Mark all temporaries ever allocated in this function as not suitable
a94e4054
RK
1161 for reuse until the current level is exited. */
1162
1163void
1164mark_all_temps_used ()
1165{
1166 struct temp_slot *p;
1167
1168 for (p = temp_slots; p; p = p->next)
1169 {
85b119d1 1170 p->in_use = p->keep = 1;
27ce006b 1171 p->level = MIN (p->level, temp_slot_level);
a94e4054
RK
1172 }
1173}
1174
6f086dfc
RS
1175/* Push deeper into the nesting level for stack temporaries. */
1176
1177void
1178push_temp_slots ()
1179{
6f086dfc
RS
1180 temp_slot_level++;
1181}
1182
e5e809f4
JL
1183/* Likewise, but save the new level as the place to allocate variables
1184 for blocks. */
1185
ca3075bd 1186#if 0
e5e809f4
JL
1187void
1188push_temp_slots_for_block ()
1189{
1190 push_temp_slots ();
1191
1192 var_temp_slot_level = temp_slot_level;
1193}
1194
f5963e61
JL
1195/* Likewise, but save the new level as the place to allocate temporaries
1196 for TARGET_EXPRs. */
1197
1198void
1199push_temp_slots_for_target ()
1200{
1201 push_temp_slots ();
1202
1203 target_temp_slot_level = temp_slot_level;
1204}
1205
1206/* Set and get the value of target_temp_slot_level. The only
1207 permitted use of these functions is to save and restore this value. */
1208
1209int
1210get_target_temp_slot_level ()
1211{
1212 return target_temp_slot_level;
1213}
1214
1215void
1216set_target_temp_slot_level (level)
1217 int level;
1218{
1219 target_temp_slot_level = level;
1220}
ca3075bd 1221#endif
f5963e61 1222
6f086dfc
RS
1223/* Pop a temporary nesting level. All slots in use in the current level
1224 are freed. */
1225
1226void
1227pop_temp_slots ()
1228{
1229 struct temp_slot *p;
1230
6f086dfc 1231 for (p = temp_slots; p; p = p->next)
e7a84011 1232 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
6f086dfc
RS
1233 p->in_use = 0;
1234
a45035b6
JW
1235 combine_temp_slots ();
1236
6f086dfc
RS
1237 temp_slot_level--;
1238}
bc0ebdf9
RK
1239
1240/* Initialize temporary slots. */
1241
1242void
1243init_temp_slots ()
1244{
1245 /* We have not allocated any temporaries yet. */
1246 temp_slots = 0;
1247 temp_slot_level = 0;
e5e809f4 1248 var_temp_slot_level = 0;
bc0ebdf9
RK
1249 target_temp_slot_level = 0;
1250}
6f086dfc
RS
1251\f
1252/* Retroactively move an auto variable from a register to a stack slot.
1253 This is done when an address-reference to the variable is seen. */
1254
1255void
1256put_var_into_stack (decl)
1257 tree decl;
1258{
1259 register rtx reg;
00d8a4c1 1260 enum machine_mode promoted_mode, decl_mode;
6f086dfc 1261 struct function *function = 0;
c20bf1f3 1262 tree context;
e9a25f70 1263 int can_use_addressof;
c20bf1f3 1264
c20bf1f3 1265 context = decl_function_context (decl);
6f086dfc 1266
9ec36da5 1267 /* Get the current rtl used for this object and its original mode. */
6f086dfc 1268 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
1269
1270 /* No need to do anything if decl has no rtx yet
1271 since in that case caller is setting TREE_ADDRESSABLE
1272 and a stack slot will be assigned when the rtl is made. */
1273 if (reg == 0)
1274 return;
00d8a4c1
RK
1275
1276 /* Get the declared mode for this object. */
1277 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1278 : DECL_MODE (decl));
2baccce2
RS
1279 /* Get the mode it's actually stored in. */
1280 promoted_mode = GET_MODE (reg);
6f086dfc
RS
1281
1282 /* If this variable comes from an outer function,
1283 find that function's saved context. */
4ac74fb8 1284 if (context != current_function_decl && context != inline_function_decl)
6f086dfc
RS
1285 for (function = outer_function_chain; function; function = function->next)
1286 if (function->decl == context)
1287 break;
1288
6f086dfc
RS
1289 /* If this is a variable-size object with a pseudo to address it,
1290 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 1291 if (DECL_NONLOCAL (decl)
6f086dfc
RS
1292 && GET_CODE (reg) == MEM
1293 && GET_CODE (XEXP (reg, 0)) == REG
1294 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
1295 {
1296 reg = XEXP (reg, 0);
1297 decl_mode = promoted_mode = GET_MODE (reg);
1298 }
e15762df 1299
e9a25f70
JL
1300 can_use_addressof
1301 = (function == 0
e5e809f4 1302 && optimize > 0
e9a25f70
JL
1303 /* FIXME make it work for promoted modes too */
1304 && decl_mode == promoted_mode
1305#ifdef NON_SAVING_SETJMP
1306 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1307#endif
1308 );
1309
1310 /* If we can't use ADDRESSOF, make sure we see through one we already
1311 generated. */
1312 if (! can_use_addressof && GET_CODE (reg) == MEM
1313 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1314 reg = XEXP (XEXP (reg, 0), 0);
1315
293e3de4
RS
1316 /* Now we should have a value that resides in one or more pseudo regs. */
1317
1318 if (GET_CODE (reg) == REG)
e9a25f70
JL
1319 {
1320 /* If this variable lives in the current function and we don't need
1321 to put things in the stack for the sake of setjmp, try to keep it
1322 in a register until we know we actually need the address. */
1323 if (can_use_addressof)
1324 gen_mem_addressof (reg, decl);
1325 else
1326 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1327 promoted_mode, decl_mode,
e5e809f4 1328 TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1329 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1330 0);
e9a25f70 1331 }
293e3de4
RS
1332 else if (GET_CODE (reg) == CONCAT)
1333 {
1334 /* A CONCAT contains two pseudos; put them both in the stack.
1335 We do it so they end up consecutive. */
1336 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1337 tree part_type = TREE_TYPE (TREE_TYPE (decl));
4738c10d 1338#ifdef FRAME_GROWS_DOWNWARD
293e3de4 1339 /* Since part 0 should have a lower address, do it second. */
0006e95b 1340 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4 1341 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1342 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1343 0);
0006e95b 1344 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4 1345 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1346 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1347 0);
293e3de4 1348#else
0006e95b 1349 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
e5e809f4 1350 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1351 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1352 0);
0006e95b 1353 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
e5e809f4 1354 part_mode, TREE_SIDE_EFFECTS (decl), 0,
fe9b4957
MM
1355 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1356 0);
293e3de4
RS
1357#endif
1358
1359 /* Change the CONCAT into a combined MEM for both parts. */
1360 PUT_CODE (reg, MEM);
0006e95b 1361 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
41472af8 1362 MEM_ALIAS_SET (reg) = get_alias_set (decl);
0006e95b 1363
293e3de4
RS
1364 /* The two parts are in memory order already.
1365 Use the lower parts address as ours. */
1366 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1367 /* Prevent sharing of rtl that might lose. */
1368 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1369 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1370 }
86fa911a
RK
1371 else
1372 return;
1373
7d384cc0 1374 if (current_function_check_memory_usage)
86fa911a 1375 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 1376 XEXP (reg, 0), Pmode,
86fa911a
RK
1377 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1378 TYPE_MODE (sizetype),
956d6950
JL
1379 GEN_INT (MEMORY_USE_RW),
1380 TYPE_MODE (integer_type_node));
293e3de4
RS
1381}
1382
1383/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1384 into the stack frame of FUNCTION (0 means the current function).
1385 DECL_MODE is the machine mode of the user-level data type.
0006e95b 1386 PROMOTED_MODE is the machine mode of the register.
e5e809f4
JL
1387 VOLATILE_P is nonzero if this is for a "volatile" decl.
1388 USED_P is nonzero if this reg might have already been used in an insn. */
293e3de4
RS
1389
1390static void
e9a25f70 1391put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
fe9b4957 1392 original_regno, used_p, ht)
293e3de4
RS
1393 struct function *function;
1394 rtx reg;
1395 tree type;
1396 enum machine_mode promoted_mode, decl_mode;
0006e95b 1397 int volatile_p;
e9a25f70 1398 int original_regno;
e5e809f4 1399 int used_p;
fe9b4957 1400 struct hash_table *ht;
293e3de4 1401{
e2ecd91c 1402 struct function *func = function ? function : current_function;
293e3de4 1403 rtx new = 0;
e9a25f70
JL
1404 int regno = original_regno;
1405
1406 if (regno == 0)
1407 regno = REGNO (reg);
6f086dfc 1408
e2ecd91c
BS
1409 if (regno < func->x_max_parm_reg)
1410 new = func->x_parm_reg_stack_loc[regno];
1411 if (new == 0)
1412 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
6f086dfc 1413
ef178af3 1414 PUT_CODE (reg, MEM);
0006e95b 1415 PUT_MODE (reg, decl_mode);
6f086dfc
RS
1416 XEXP (reg, 0) = XEXP (new, 0);
1417 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
0006e95b 1418 MEM_VOLATILE_P (reg) = volatile_p;
6f086dfc
RS
1419
1420 /* If this is a memory ref that contains aggregate components,
bdd3e6ab
JW
1421 mark it as such for cse and loop optimize. If we are reusing a
1422 previously generated stack slot, then we need to copy the bit in
1423 case it was set for other reasons. For instance, it is set for
1424 __builtin_va_alist. */
c6df88cb
MM
1425 MEM_SET_IN_STRUCT_P (reg,
1426 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
41472af8 1427 MEM_ALIAS_SET (reg) = get_alias_set (type);
6f086dfc
RS
1428
1429 /* Now make sure that all refs to the variable, previously made
1430 when it was a register, are fixed up to be valid again. */
e5e809f4
JL
1431
1432 if (used_p && function != 0)
6f086dfc
RS
1433 {
1434 struct var_refs_queue *temp;
1435
6f086dfc 1436 temp
a3770a81 1437 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
6f086dfc 1438 temp->modified = reg;
00d8a4c1 1439 temp->promoted_mode = promoted_mode;
293e3de4 1440 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
1441 temp->next = function->fixup_var_refs_queue;
1442 function->fixup_var_refs_queue = temp;
6f086dfc 1443 }
e5e809f4 1444 else if (used_p)
6f086dfc 1445 /* Variable is local; fix it up now. */
fe9b4957 1446 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
6f086dfc
RS
1447}
1448\f
1449static void
fe9b4957 1450fixup_var_refs (var, promoted_mode, unsignedp, ht)
6f086dfc 1451 rtx var;
00d8a4c1
RK
1452 enum machine_mode promoted_mode;
1453 int unsignedp;
fe9b4957 1454 struct hash_table *ht;
6f086dfc
RS
1455{
1456 tree pending;
1457 rtx first_insn = get_insns ();
49ad7cfa 1458 struct sequence_stack *stack = seq_stack;
6f086dfc
RS
1459 tree rtl_exps = rtl_expr_chain;
1460
1461 /* Must scan all insns for stack-refs that exceed the limit. */
fe9b4957
MM
1462 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1463 stack == 0, ht);
1464 /* If there's a hash table, it must record all uses of VAR. */
1465 if (ht)
1466 return;
6f086dfc
RS
1467
1468 /* Scan all pending sequences too. */
1469 for (; stack; stack = stack->next)
1470 {
1471 push_to_sequence (stack->first);
00d8a4c1 1472 fixup_var_refs_insns (var, promoted_mode, unsignedp,
fe9b4957 1473 stack->first, stack->next != 0, 0);
6f086dfc
RS
1474 /* Update remembered end of sequence
1475 in case we added an insn at the end. */
1476 stack->last = get_last_insn ();
1477 end_sequence ();
1478 }
1479
1480 /* Scan all waiting RTL_EXPRs too. */
1481 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1482 {
1483 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1484 if (seq != const0_rtx && seq != 0)
1485 {
1486 push_to_sequence (seq);
fe9b4957
MM
1487 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1488 0);
6f086dfc
RS
1489 end_sequence ();
1490 }
1491 }
d33c2956
DB
1492
1493 /* Scan the catch clauses for exception handling too. */
1494 push_to_sequence (catch_clauses);
fe9b4957
MM
1495 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1496 0, 0);
d33c2956 1497 end_sequence ();
6f086dfc
RS
1498}
1499\f
e15679f8 1500/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
6f086dfc 1501 some part of an insn. Return a struct fixup_replacement whose OLD
0f41302f 1502 value is equal to X. Allocate a new structure if no such entry exists. */
6f086dfc
RS
1503
1504static struct fixup_replacement *
2740a678 1505find_fixup_replacement (replacements, x)
6f086dfc
RS
1506 struct fixup_replacement **replacements;
1507 rtx x;
1508{
1509 struct fixup_replacement *p;
1510
1511 /* See if we have already replaced this. */
c5c76735 1512 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
6f086dfc
RS
1513 ;
1514
1515 if (p == 0)
1516 {
1517 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1518 p->old = x;
1519 p->new = 0;
1520 p->next = *replacements;
1521 *replacements = p;
1522 }
1523
1524 return p;
1525}
1526
1527/* Scan the insn-chain starting with INSN for refs to VAR
1528 and fix them up. TOPLEVEL is nonzero if this chain is the
1529 main chain of insns for the current function. */
1530
1531static void
fe9b4957 1532fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
6f086dfc 1533 rtx var;
00d8a4c1
RK
1534 enum machine_mode promoted_mode;
1535 int unsignedp;
6f086dfc
RS
1536 rtx insn;
1537 int toplevel;
fe9b4957 1538 struct hash_table *ht;
6f086dfc 1539{
02a10449 1540 rtx call_dest = 0;
07444f1d 1541 rtx insn_list = NULL_RTX;
fe9b4957
MM
1542
1543 /* If we already know which INSNs reference VAR there's no need
1544 to walk the entire instruction chain. */
1545 if (ht)
1546 {
1547 insn_list = ((struct insns_for_mem_entry *)
1548 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1549 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1550 insn_list = XEXP (insn_list, 1);
1551 }
02a10449 1552
6f086dfc
RS
1553 while (insn)
1554 {
1555 rtx next = NEXT_INSN (insn);
e5e809f4 1556 rtx set, prev, prev_set;
6f086dfc 1557 rtx note;
e5e809f4 1558
e15762df 1559 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc 1560 {
ef178af3
ZW
1561 /* Remember the notes in case we delete the insn. */
1562 note = REG_NOTES (insn);
1563
63770d6a
RK
1564 /* If this is a CLOBBER of VAR, delete it.
1565
1566 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1567 and REG_RETVAL notes too. */
926d1ca5 1568 if (GET_CODE (PATTERN (insn)) == CLOBBER
07362cb3
JW
1569 && (XEXP (PATTERN (insn), 0) == var
1570 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1571 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1572 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
63770d6a
RK
1573 {
1574 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1575 /* The REG_LIBCALL note will go away since we are going to
1576 turn INSN into a NOTE, so just delete the
1577 corresponding REG_RETVAL note. */
1578 remove_note (XEXP (note, 0),
1579 find_reg_note (XEXP (note, 0), REG_RETVAL,
1580 NULL_RTX));
1581
1582 /* In unoptimized compilation, we shouldn't call delete_insn
1583 except in jump.c doing warnings. */
1584 PUT_CODE (insn, NOTE);
1585 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1586 NOTE_SOURCE_FILE (insn) = 0;
1587 }
1588
6f086dfc 1589 /* The insn to load VAR from a home in the arglist
e5e809f4
JL
1590 is now a no-op. When we see it, just delete it.
1591 Similarly if this is storing VAR from a register from which
1592 it was loaded in the previous insn. This will occur
1593 when an ADDRESSOF was made for an arglist slot. */
63770d6a 1594 else if (toplevel
e5e809f4
JL
1595 && (set = single_set (insn)) != 0
1596 && SET_DEST (set) == var
63770d6a
RK
1597 /* If this represents the result of an insn group,
1598 don't delete the insn. */
1599 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
e5e809f4
JL
1600 && (rtx_equal_p (SET_SRC (set), var)
1601 || (GET_CODE (SET_SRC (set)) == REG
1602 && (prev = prev_nonnote_insn (insn)) != 0
1603 && (prev_set = single_set (prev)) != 0
1604 && SET_DEST (prev_set) == SET_SRC (set)
1605 && rtx_equal_p (SET_SRC (prev_set), var))))
6f086dfc 1606 {
b4ff474c
RS
1607 /* In unoptimized compilation, we shouldn't call delete_insn
1608 except in jump.c doing warnings. */
1609 PUT_CODE (insn, NOTE);
1610 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1611 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1612 if (insn == last_parm_insn)
1613 last_parm_insn = PREV_INSN (next);
1614 }
1615 else
1616 {
02a10449
RK
1617 struct fixup_replacement *replacements = 0;
1618 rtx next_insn = NEXT_INSN (insn);
1619
e9a25f70
JL
1620 if (SMALL_REGISTER_CLASSES)
1621 {
1622 /* If the insn that copies the results of a CALL_INSN
1623 into a pseudo now references VAR, we have to use an
1624 intermediate pseudo since we want the life of the
1625 return value register to be only a single insn.
02a10449 1626
e9a25f70
JL
1627 If we don't use an intermediate pseudo, such things as
1628 address computations to make the address of VAR valid
1629 if it is not can be placed between the CALL_INSN and INSN.
02a10449 1630
e9a25f70
JL
1631 To make sure this doesn't happen, we record the destination
1632 of the CALL_INSN and see if the next insn uses both that
1633 and VAR. */
02a10449 1634
f95182a4
ILT
1635 if (call_dest != 0 && GET_CODE (insn) == INSN
1636 && reg_mentioned_p (var, PATTERN (insn))
1637 && reg_mentioned_p (call_dest, PATTERN (insn)))
1638 {
1639 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
02a10449 1640
f95182a4 1641 emit_insn_before (gen_move_insn (temp, call_dest), insn);
02a10449 1642
f95182a4
ILT
1643 PATTERN (insn) = replace_rtx (PATTERN (insn),
1644 call_dest, temp);
1645 }
02a10449 1646
f95182a4
ILT
1647 if (GET_CODE (insn) == CALL_INSN
1648 && GET_CODE (PATTERN (insn)) == SET)
1649 call_dest = SET_DEST (PATTERN (insn));
1650 else if (GET_CODE (insn) == CALL_INSN
1651 && GET_CODE (PATTERN (insn)) == PARALLEL
1652 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1653 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1654 else
1655 call_dest = 0;
1656 }
02a10449 1657
6f086dfc
RS
1658 /* See if we have to do anything to INSN now that VAR is in
1659 memory. If it needs to be loaded into a pseudo, use a single
1660 pseudo for the entire insn in case there is a MATCH_DUP
1661 between two operands. We pass a pointer to the head of
1662 a list of struct fixup_replacements. If fixup_var_refs_1
1663 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1664 it will record them in this list.
1665
1666 If it allocated a pseudo for any replacement, we copy into
1667 it here. */
1668
00d8a4c1
RK
1669 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1670 &replacements);
6f086dfc 1671
77121fee
JW
1672 /* If this is last_parm_insn, and any instructions were output
1673 after it to fix it up, then we must set last_parm_insn to
1674 the last such instruction emitted. */
1675 if (insn == last_parm_insn)
1676 last_parm_insn = PREV_INSN (next_insn);
1677
6f086dfc
RS
1678 while (replacements)
1679 {
1680 if (GET_CODE (replacements->new) == REG)
1681 {
1682 rtx insert_before;
00d8a4c1 1683 rtx seq;
6f086dfc
RS
1684
1685 /* OLD might be a (subreg (mem)). */
1686 if (GET_CODE (replacements->old) == SUBREG)
1687 replacements->old
1688 = fixup_memory_subreg (replacements->old, insn, 0);
1689 else
1690 replacements->old
1691 = fixup_stack_1 (replacements->old, insn);
1692
5fa7422b 1693 insert_before = insn;
6f086dfc 1694
00d8a4c1
RK
1695 /* If we are changing the mode, do a conversion.
1696 This might be wasteful, but combine.c will
1697 eliminate much of the waste. */
1698
1699 if (GET_MODE (replacements->new)
1700 != GET_MODE (replacements->old))
1701 {
1702 start_sequence ();
1703 convert_move (replacements->new,
1704 replacements->old, unsignedp);
1705 seq = gen_sequence ();
1706 end_sequence ();
1707 }
1708 else
1709 seq = gen_move_insn (replacements->new,
1710 replacements->old);
1711
1712 emit_insn_before (seq, insert_before);
6f086dfc
RS
1713 }
1714
1715 replacements = replacements->next;
1716 }
1717 }
1718
1719 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1720 But don't touch other insns referred to by reg-notes;
1721 we will get them elsewhere. */
ef178af3
ZW
1722 while (note)
1723 {
1724 if (GET_CODE (note) != INSN_LIST)
1725 XEXP (note, 0)
1726 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1727 note = XEXP (note, 1);
1728 }
6f086dfc 1729 }
fe9b4957
MM
1730
1731 if (!ht)
1732 insn = next;
1733 else if (insn_list)
1734 {
1735 insn = XEXP (insn_list, 0);
1736 insn_list = XEXP (insn_list, 1);
1737 }
1738 else
1739 insn = NULL_RTX;
6f086dfc
RS
1740 }
1741}
1742\f
00d8a4c1
RK
1743/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1744 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1745
1746 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1747 contain a list of original rtx's and replacements. If we find that we need
1748 to modify this insn by replacing a memory reference with a pseudo or by
1749 making a new MEM to implement a SUBREG, we consult that list to see if
1750 we have already chosen a replacement. If none has already been allocated,
1751 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1752 or the SUBREG, as appropriate, to the pseudo. */
1753
1754static void
00d8a4c1 1755fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1756 register rtx var;
00d8a4c1 1757 enum machine_mode promoted_mode;
6f086dfc
RS
1758 register rtx *loc;
1759 rtx insn;
1760 struct fixup_replacement **replacements;
1761{
1762 register int i;
1763 register rtx x = *loc;
1764 RTX_CODE code = GET_CODE (x);
6f7d635c 1765 register const char *fmt;
6f086dfc
RS
1766 register rtx tem, tem1;
1767 struct fixup_replacement *replacement;
1768
1769 switch (code)
1770 {
e9a25f70
JL
1771 case ADDRESSOF:
1772 if (XEXP (x, 0) == var)
1773 {
956d6950
JL
1774 /* Prevent sharing of rtl that might lose. */
1775 rtx sub = copy_rtx (XEXP (var, 0));
1776
956d6950
JL
1777 if (! validate_change (insn, loc, sub, 0))
1778 {
5f98f7c4
RH
1779 rtx y = gen_reg_rtx (GET_MODE (sub));
1780 rtx seq, new_insn;
1781
1782 /* We should be able to replace with a register or all is lost.
1783 Note that we can't use validate_change to verify this, since
1784 we're not caring for replacing all dups simultaneously. */
1785 if (! validate_replace_rtx (*loc, y, insn))
1786 abort ();
1787
1788 /* Careful! First try to recognize a direct move of the
1789 value, mimicking how things are done in gen_reload wrt
1790 PLUS. Consider what happens when insn is a conditional
1791 move instruction and addsi3 clobbers flags. */
1792
1793 start_sequence ();
1794 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1795 seq = gen_sequence ();
1796 end_sequence ();
1797
1798 if (recog_memoized (new_insn) < 0)
1799 {
1800 /* That failed. Fall back on force_operand and hope. */
956d6950 1801
5f98f7c4
RH
1802 start_sequence ();
1803 force_operand (sub, y);
1804 seq = gen_sequence ();
1805 end_sequence ();
1806 }
956d6950 1807
5f98f7c4
RH
1808#ifdef HAVE_cc0
1809 /* Don't separate setter from user. */
1810 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1811 insn = PREV_INSN (insn);
1812#endif
1813
1814 emit_insn_before (seq, insn);
1815 }
e9a25f70
JL
1816 }
1817 return;
1818
6f086dfc
RS
1819 case MEM:
1820 if (var == x)
1821 {
1822 /* If we already have a replacement, use it. Otherwise,
1823 try to fix up this address in case it is invalid. */
1824
2740a678 1825 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1826 if (replacement->new)
1827 {
1828 *loc = replacement->new;
1829 return;
1830 }
1831
1832 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1833
00d8a4c1
RK
1834 /* Unless we are forcing memory to register or we changed the mode,
1835 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1836
1837 INSN_CODE (insn) = -1;
00d8a4c1
RK
1838 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1839 && recog_memoized (insn) >= 0)
6f086dfc
RS
1840 return;
1841
00d8a4c1 1842 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1843 return;
1844 }
1845
1846 /* If X contains VAR, we need to unshare it here so that we update
1847 each occurrence separately. But all identical MEMs in one insn
1848 must be replaced with the same rtx because of the possibility of
1849 MATCH_DUPs. */
1850
1851 if (reg_mentioned_p (var, x))
1852 {
2740a678 1853 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1854 if (replacement->new == 0)
1855 replacement->new = copy_most_rtx (x, var);
1856
1857 *loc = x = replacement->new;
1858 }
1859 break;
1860
1861 case REG:
1862 case CC0:
1863 case PC:
1864 case CONST_INT:
1865 case CONST:
1866 case SYMBOL_REF:
1867 case LABEL_REF:
1868 case CONST_DOUBLE:
1869 return;
1870
1871 case SIGN_EXTRACT:
1872 case ZERO_EXTRACT:
1873 /* Note that in some cases those types of expressions are altered
1874 by optimize_bit_field, and do not survive to get here. */
1875 if (XEXP (x, 0) == var
1876 || (GET_CODE (XEXP (x, 0)) == SUBREG
1877 && SUBREG_REG (XEXP (x, 0)) == var))
1878 {
1879 /* Get TEM as a valid MEM in the mode presently in the insn.
1880
1881 We don't worry about the possibility of MATCH_DUP here; it
1882 is highly unlikely and would be tricky to handle. */
1883
1884 tem = XEXP (x, 0);
1885 if (GET_CODE (tem) == SUBREG)
0e09cc26
RK
1886 {
1887 if (GET_MODE_BITSIZE (GET_MODE (tem))
1888 > GET_MODE_BITSIZE (GET_MODE (var)))
1889 {
1890 replacement = find_fixup_replacement (replacements, var);
1891 if (replacement->new == 0)
1892 replacement->new = gen_reg_rtx (GET_MODE (var));
1893 SUBREG_REG (tem) = replacement->new;
1894 }
ef933d26
RK
1895 else
1896 tem = fixup_memory_subreg (tem, insn, 0);
0e09cc26
RK
1897 }
1898 else
1899 tem = fixup_stack_1 (tem, insn);
6f086dfc
RS
1900
1901 /* Unless we want to load from memory, get TEM into the proper mode
1902 for an extract from memory. This can only be done if the
1903 extract is at a constant position and length. */
1904
1905 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1906 && GET_CODE (XEXP (x, 2)) == CONST_INT
1907 && ! mode_dependent_address_p (XEXP (tem, 0))
1908 && ! MEM_VOLATILE_P (tem))
1909 {
1910 enum machine_mode wanted_mode = VOIDmode;
1911 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 1912 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6f086dfc
RS
1913
1914#ifdef HAVE_extzv
1915 if (GET_CODE (x) == ZERO_EXTRACT)
0d8e55d8 1916 {
a995e389
RH
1917 wanted_mode
1918 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
1919 if (wanted_mode == VOIDmode)
1920 wanted_mode = word_mode;
1921 }
6f086dfc
RS
1922#endif
1923#ifdef HAVE_extv
1924 if (GET_CODE (x) == SIGN_EXTRACT)
0d8e55d8 1925 {
a995e389 1926 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
0d8e55d8
JL
1927 if (wanted_mode == VOIDmode)
1928 wanted_mode = word_mode;
1929 }
6f086dfc 1930#endif
6dc42e49 1931 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1932 if (wanted_mode != VOIDmode
1933 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1934 {
e5e809f4 1935 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
1936 rtx old_pos = XEXP (x, 2);
1937 rtx newmem;
1938
1939 /* If the bytes and bits are counted differently, we
1940 must adjust the offset. */
f76b9db2
ILT
1941 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1942 offset = (GET_MODE_SIZE (is_mode)
1943 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
1944
1945 pos %= GET_MODE_BITSIZE (wanted_mode);
1946
38a448ca
RH
1947 newmem = gen_rtx_MEM (wanted_mode,
1948 plus_constant (XEXP (tem, 0), offset));
6f086dfc 1949 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 1950 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
1951
1952 /* Make the change and see if the insn remains valid. */
1953 INSN_CODE (insn) = -1;
1954 XEXP (x, 0) = newmem;
5f4f0e22 1955 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1956
1957 if (recog_memoized (insn) >= 0)
1958 return;
1959
1960 /* Otherwise, restore old position. XEXP (x, 0) will be
1961 restored later. */
1962 XEXP (x, 2) = old_pos;
1963 }
1964 }
1965
1966 /* If we get here, the bitfield extract insn can't accept a memory
1967 reference. Copy the input into a register. */
1968
1969 tem1 = gen_reg_rtx (GET_MODE (tem));
1970 emit_insn_before (gen_move_insn (tem1, tem), insn);
1971 XEXP (x, 0) = tem1;
1972 return;
1973 }
1974 break;
1975
1976 case SUBREG:
1977 if (SUBREG_REG (x) == var)
1978 {
00d8a4c1
RK
1979 /* If this is a special SUBREG made because VAR was promoted
1980 from a wider mode, replace it with VAR and call ourself
1981 recursively, this time saying that the object previously
1982 had its current mode (by virtue of the SUBREG). */
1983
1984 if (SUBREG_PROMOTED_VAR_P (x))
1985 {
1986 *loc = var;
1987 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1988 return;
1989 }
1990
6f086dfc
RS
1991 /* If this SUBREG makes VAR wider, it has become a paradoxical
1992 SUBREG with VAR in memory, but these aren't allowed at this
1993 stage of the compilation. So load VAR into a pseudo and take
1994 a SUBREG of that pseudo. */
1995 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1996 {
2740a678 1997 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1998 if (replacement->new == 0)
1999 replacement->new = gen_reg_rtx (GET_MODE (var));
2000 SUBREG_REG (x) = replacement->new;
2001 return;
2002 }
2003
2004 /* See if we have already found a replacement for this SUBREG.
2005 If so, use it. Otherwise, make a MEM and see if the insn
2006 is recognized. If not, or if we should force MEM into a register,
2007 make a pseudo for this SUBREG. */
2740a678 2008 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
2009 if (replacement->new)
2010 {
2011 *loc = replacement->new;
2012 return;
2013 }
2014
2015 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2016
f898f031 2017 INSN_CODE (insn) = -1;
6f086dfc
RS
2018 if (! flag_force_mem && recog_memoized (insn) >= 0)
2019 return;
2020
2021 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2022 return;
2023 }
2024 break;
2025
2026 case SET:
2027 /* First do special simplification of bit-field references. */
2028 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2029 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2030 optimize_bit_field (x, insn, 0);
2031 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2032 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 2033 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc 2034
0e09cc26
RK
2035 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2036 into a register and then store it back out. */
2037 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2038 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2039 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2040 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2041 > GET_MODE_SIZE (GET_MODE (var))))
2042 {
2043 replacement = find_fixup_replacement (replacements, var);
2044 if (replacement->new == 0)
2045 replacement->new = gen_reg_rtx (GET_MODE (var));
2046
2047 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2048 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2049 }
2050
6f086dfc 2051 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
0f41302f 2052 insn into a pseudo and store the low part of the pseudo into VAR. */
6f086dfc
RS
2053 if (GET_CODE (SET_DEST (x)) == SUBREG
2054 && SUBREG_REG (SET_DEST (x)) == var
2055 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2056 > GET_MODE_SIZE (GET_MODE (var))))
2057 {
2058 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2059 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2060 tem)),
2061 insn);
2062 break;
2063 }
2064
2065 {
2066 rtx dest = SET_DEST (x);
2067 rtx src = SET_SRC (x);
29a82058 2068#ifdef HAVE_insv
6f086dfc 2069 rtx outerdest = dest;
29a82058 2070#endif
6f086dfc
RS
2071
2072 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2073 || GET_CODE (dest) == SIGN_EXTRACT
2074 || GET_CODE (dest) == ZERO_EXTRACT)
2075 dest = XEXP (dest, 0);
2076
2077 if (GET_CODE (src) == SUBREG)
2078 src = XEXP (src, 0);
2079
2080 /* If VAR does not appear at the top level of the SET
2081 just scan the lower levels of the tree. */
2082
2083 if (src != var && dest != var)
2084 break;
2085
2086 /* We will need to rerecognize this insn. */
2087 INSN_CODE (insn) = -1;
2088
2089#ifdef HAVE_insv
2090 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2091 {
2092 /* Since this case will return, ensure we fixup all the
2093 operands here. */
00d8a4c1
RK
2094 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2095 insn, replacements);
2096 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2097 insn, replacements);
2098 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2099 insn, replacements);
6f086dfc
RS
2100
2101 tem = XEXP (outerdest, 0);
2102
2103 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2104 that may appear inside a ZERO_EXTRACT.
2105 This was legitimate when the MEM was a REG. */
2106 if (GET_CODE (tem) == SUBREG
2107 && SUBREG_REG (tem) == var)
0e09cc26 2108 tem = fixup_memory_subreg (tem, insn, 0);
6f086dfc
RS
2109 else
2110 tem = fixup_stack_1 (tem, insn);
2111
2112 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2113 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2114 && ! mode_dependent_address_p (XEXP (tem, 0))
2115 && ! MEM_VOLATILE_P (tem))
2116 {
0d8e55d8 2117 enum machine_mode wanted_mode;
6f086dfc 2118 enum machine_mode is_mode = GET_MODE (tem);
e5e809f4 2119 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
6f086dfc 2120
a995e389 2121 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
0d8e55d8
JL
2122 if (wanted_mode == VOIDmode)
2123 wanted_mode = word_mode;
2124
6dc42e49 2125 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
2126 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2127 {
e5e809f4 2128 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
6f086dfc
RS
2129 rtx old_pos = XEXP (outerdest, 2);
2130 rtx newmem;
2131
f76b9db2
ILT
2132 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2133 offset = (GET_MODE_SIZE (is_mode)
2134 - GET_MODE_SIZE (wanted_mode) - offset);
6f086dfc
RS
2135
2136 pos %= GET_MODE_BITSIZE (wanted_mode);
2137
38a448ca 2138 newmem = gen_rtx_MEM (wanted_mode,
c5c76735
JL
2139 plus_constant (XEXP (tem, 0),
2140 offset));
6f086dfc 2141 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
c6df88cb 2142 MEM_COPY_ATTRIBUTES (newmem, tem);
6f086dfc
RS
2143
2144 /* Make the change and see if the insn remains valid. */
2145 INSN_CODE (insn) = -1;
2146 XEXP (outerdest, 0) = newmem;
5f4f0e22 2147 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
2148
2149 if (recog_memoized (insn) >= 0)
2150 return;
2151
2152 /* Otherwise, restore old position. XEXP (x, 0) will be
2153 restored later. */
2154 XEXP (outerdest, 2) = old_pos;
2155 }
2156 }
2157
2158 /* If we get here, the bit-field store doesn't allow memory
2159 or isn't located at a constant position. Load the value into
2160 a register, do the store, and put it back into memory. */
2161
2162 tem1 = gen_reg_rtx (GET_MODE (tem));
2163 emit_insn_before (gen_move_insn (tem1, tem), insn);
2164 emit_insn_after (gen_move_insn (tem, tem1), insn);
2165 XEXP (outerdest, 0) = tem1;
2166 return;
2167 }
2168#endif
2169
2170 /* STRICT_LOW_PART is a no-op on memory references
2171 and it can cause combinations to be unrecognizable,
2172 so eliminate it. */
2173
2174 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2175 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2176
2177 /* A valid insn to copy VAR into or out of a register
2178 must be left alone, to avoid an infinite loop here.
2179 If the reference to VAR is by a subreg, fix that up,
2180 since SUBREG is not valid for a memref.
e15762df
RK
2181 Also fix up the address of the stack slot.
2182
2183 Note that we must not try to recognize the insn until
2184 after we know that we have valid addresses and no
2185 (subreg (mem ...) ...) constructs, since these interfere
2186 with determining the validity of the insn. */
6f086dfc
RS
2187
2188 if ((SET_SRC (x) == var
2189 || (GET_CODE (SET_SRC (x)) == SUBREG
2190 && SUBREG_REG (SET_SRC (x)) == var))
2191 && (GET_CODE (SET_DEST (x)) == REG
2192 || (GET_CODE (SET_DEST (x)) == SUBREG
2193 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1d273bf5 2194 && GET_MODE (var) == promoted_mode
c46722a7 2195 && x == single_set (insn))
6f086dfc 2196 {
e15762df
RK
2197 rtx pat;
2198
2740a678 2199 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 2200 if (replacement->new)
6f086dfc 2201 SET_SRC (x) = replacement->new;
6f086dfc
RS
2202 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2203 SET_SRC (x) = replacement->new
2204 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2205 else
2206 SET_SRC (x) = replacement->new
2207 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
2208
2209 if (recog_memoized (insn) >= 0)
2210 return;
2211
2212 /* INSN is not valid, but we know that we want to
2213 copy SET_SRC (x) to SET_DEST (x) in some way. So
2214 we generate the move and see whether it requires more
2215 than one insn. If it does, we emit those insns and
2216 delete INSN. Otherwise, we an just replace the pattern
2217 of INSN; we have already verified above that INSN has
2218 no other function that to do X. */
2219
2220 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2221 if (GET_CODE (pat) == SEQUENCE)
2222 {
2223 emit_insn_after (pat, insn);
2224 PUT_CODE (insn, NOTE);
2225 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2226 NOTE_SOURCE_FILE (insn) = 0;
2227 }
2228 else
2229 PATTERN (insn) = pat;
2230
6f086dfc
RS
2231 return;
2232 }
2233
2234 if ((SET_DEST (x) == var
2235 || (GET_CODE (SET_DEST (x)) == SUBREG
2236 && SUBREG_REG (SET_DEST (x)) == var))
2237 && (GET_CODE (SET_SRC (x)) == REG
2238 || (GET_CODE (SET_SRC (x)) == SUBREG
2239 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1d273bf5 2240 && GET_MODE (var) == promoted_mode
c46722a7 2241 && x == single_set (insn))
6f086dfc 2242 {
e15762df
RK
2243 rtx pat;
2244
6f086dfc
RS
2245 if (GET_CODE (SET_DEST (x)) == SUBREG)
2246 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2247 else
2248 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
2249
2250 if (recog_memoized (insn) >= 0)
2251 return;
2252
2253 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2254 if (GET_CODE (pat) == SEQUENCE)
2255 {
2256 emit_insn_after (pat, insn);
2257 PUT_CODE (insn, NOTE);
2258 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2259 NOTE_SOURCE_FILE (insn) = 0;
2260 }
2261 else
2262 PATTERN (insn) = pat;
2263
6f086dfc
RS
2264 return;
2265 }
2266
2267 /* Otherwise, storing into VAR must be handled specially
2268 by storing into a temporary and copying that into VAR
00d8a4c1
RK
2269 with a new insn after this one. Note that this case
2270 will be used when storing into a promoted scalar since
2271 the insn will now have different modes on the input
2272 and output and hence will be invalid (except for the case
2273 of setting it to a constant, which does not need any
2274 change if it is valid). We generate extra code in that case,
2275 but combine.c will eliminate it. */
6f086dfc
RS
2276
2277 if (dest == var)
2278 {
2279 rtx temp;
00d8a4c1
RK
2280 rtx fixeddest = SET_DEST (x);
2281
6f086dfc 2282 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
2283 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2284 fixeddest = XEXP (fixeddest, 0);
6f086dfc 2285 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1 2286 if (GET_CODE (fixeddest) == SUBREG)
926d1ca5
RK
2287 {
2288 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2289 promoted_mode = GET_MODE (fixeddest);
2290 }
6f086dfc 2291 else
00d8a4c1
RK
2292 fixeddest = fixup_stack_1 (fixeddest, insn);
2293
926d1ca5 2294 temp = gen_reg_rtx (promoted_mode);
00d8a4c1
RK
2295
2296 emit_insn_after (gen_move_insn (fixeddest,
2297 gen_lowpart (GET_MODE (fixeddest),
2298 temp)),
2299 insn);
6f086dfc 2300
6f086dfc
RS
2301 SET_DEST (x) = temp;
2302 }
2303 }
e9a25f70
JL
2304
2305 default:
2306 break;
6f086dfc
RS
2307 }
2308
2309 /* Nothing special about this RTX; fix its operands. */
2310
2311 fmt = GET_RTX_FORMAT (code);
2312 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2313 {
2314 if (fmt[i] == 'e')
00d8a4c1 2315 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
2316 if (fmt[i] == 'E')
2317 {
2318 register int j;
2319 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
2320 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2321 insn, replacements);
6f086dfc
RS
2322 }
2323 }
2324}
2325\f
2326/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2327 return an rtx (MEM:m1 newaddr) which is equivalent.
2328 If any insns must be emitted to compute NEWADDR, put them before INSN.
2329
2330 UNCRITICAL nonzero means accept paradoxical subregs.
0f41302f 2331 This is used for subregs found inside REG_NOTES. */
6f086dfc
RS
2332
2333static rtx
2334fixup_memory_subreg (x, insn, uncritical)
2335 rtx x;
2336 rtx insn;
2337 int uncritical;
2338{
2339 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2340 rtx addr = XEXP (SUBREG_REG (x), 0);
2341 enum machine_mode mode = GET_MODE (x);
29a82058 2342 rtx result;
6f086dfc
RS
2343
2344 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2345 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2346 && ! uncritical)
2347 abort ();
2348
f76b9db2
ILT
2349 if (BYTES_BIG_ENDIAN)
2350 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2351 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
6f086dfc
RS
2352 addr = plus_constant (addr, offset);
2353 if (!flag_force_addr && memory_address_p (mode, addr))
2354 /* Shortcut if no insns need be emitted. */
2355 return change_address (SUBREG_REG (x), mode, addr);
2356 start_sequence ();
2357 result = change_address (SUBREG_REG (x), mode, addr);
2358 emit_insn_before (gen_sequence (), insn);
2359 end_sequence ();
2360 return result;
2361}
2362
2363/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2364 Replace subexpressions of X in place.
2365 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2366 Otherwise return X, with its contents possibly altered.
2367
ab6155b7
RK
2368 If any insns must be emitted to compute NEWADDR, put them before INSN.
2369
2370 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
2371
2372static rtx
ab6155b7 2373walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
2374 register rtx x;
2375 rtx insn;
ab6155b7 2376 int uncritical;
6f086dfc
RS
2377{
2378 register enum rtx_code code;
6f7d635c 2379 register const char *fmt;
6f086dfc
RS
2380 register int i;
2381
2382 if (x == 0)
2383 return 0;
2384
2385 code = GET_CODE (x);
2386
2387 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 2388 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
2389
2390 /* Nothing special about this RTX; fix its operands. */
2391
2392 fmt = GET_RTX_FORMAT (code);
2393 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2394 {
2395 if (fmt[i] == 'e')
ab6155b7 2396 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
2397 if (fmt[i] == 'E')
2398 {
2399 register int j;
2400 for (j = 0; j < XVECLEN (x, i); j++)
2401 XVECEXP (x, i, j)
ab6155b7 2402 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
2403 }
2404 }
2405 return x;
2406}
2407\f
6f086dfc
RS
2408/* For each memory ref within X, if it refers to a stack slot
2409 with an out of range displacement, put the address in a temp register
2410 (emitting new insns before INSN to load these registers)
2411 and alter the memory ref to use that register.
2412 Replace each such MEM rtx with a copy, to avoid clobberage. */
2413
2414static rtx
2415fixup_stack_1 (x, insn)
2416 rtx x;
2417 rtx insn;
2418{
2419 register int i;
2420 register RTX_CODE code = GET_CODE (x);
6f7d635c 2421 register const char *fmt;
6f086dfc
RS
2422
2423 if (code == MEM)
2424 {
2425 register rtx ad = XEXP (x, 0);
2426 /* If we have address of a stack slot but it's not valid
2427 (displacement is too large), compute the sum in a register. */
2428 if (GET_CODE (ad) == PLUS
2429 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
2430 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2431 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
e9a25f70
JL
2432 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2433#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2434 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2435#endif
2436 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
956d6950 2437 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
40d05551 2438 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
2439 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2440 {
2441 rtx temp, seq;
2442 if (memory_address_p (GET_MODE (x), ad))
2443 return x;
2444
2445 start_sequence ();
2446 temp = copy_to_reg (ad);
2447 seq = gen_sequence ();
2448 end_sequence ();
2449 emit_insn_before (seq, insn);
2450 return change_address (x, VOIDmode, temp);
2451 }
2452 return x;
2453 }
2454
2455 fmt = GET_RTX_FORMAT (code);
2456 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2457 {
2458 if (fmt[i] == 'e')
2459 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2460 if (fmt[i] == 'E')
2461 {
2462 register int j;
2463 for (j = 0; j < XVECLEN (x, i); j++)
2464 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2465 }
2466 }
2467 return x;
2468}
2469\f
2470/* Optimization: a bit-field instruction whose field
2471 happens to be a byte or halfword in memory
2472 can be changed to a move instruction.
2473
2474 We call here when INSN is an insn to examine or store into a bit-field.
2475 BODY is the SET-rtx to be altered.
2476
2477 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2478 (Currently this is called only from function.c, and EQUIV_MEM
2479 is always 0.) */
2480
2481static void
2482optimize_bit_field (body, insn, equiv_mem)
2483 rtx body;
2484 rtx insn;
2485 rtx *equiv_mem;
2486{
2487 register rtx bitfield;
2488 int destflag;
2489 rtx seq = 0;
2490 enum machine_mode mode;
2491
2492 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2493 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2494 bitfield = SET_DEST (body), destflag = 1;
2495 else
2496 bitfield = SET_SRC (body), destflag = 0;
2497
2498 /* First check that the field being stored has constant size and position
2499 and is in fact a byte or halfword suitably aligned. */
2500
2501 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2502 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2503 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2504 != BLKmode)
2505 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2506 {
2507 register rtx memref = 0;
2508
2509 /* Now check that the containing word is memory, not a register,
2510 and that it is safe to change the machine mode. */
2511
2512 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2513 memref = XEXP (bitfield, 0);
2514 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2515 && equiv_mem != 0)
2516 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2517 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2518 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2519 memref = SUBREG_REG (XEXP (bitfield, 0));
2520 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2521 && equiv_mem != 0
2522 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2523 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2524
2525 if (memref
2526 && ! mode_dependent_address_p (XEXP (memref, 0))
2527 && ! MEM_VOLATILE_P (memref))
2528 {
2529 /* Now adjust the address, first for any subreg'ing
2530 that we are now getting rid of,
2531 and then for which byte of the word is wanted. */
2532
e5e809f4 2533 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
b88a3142
RK
2534 rtx insns;
2535
6f086dfc 2536 /* Adjust OFFSET to count bits from low-address byte. */
f76b9db2
ILT
2537 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2538 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2539 - offset - INTVAL (XEXP (bitfield, 1)));
2540
6f086dfc
RS
2541 /* Adjust OFFSET to count bytes from low-address byte. */
2542 offset /= BITS_PER_UNIT;
2543 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2544 {
2545 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
f76b9db2
ILT
2546 if (BYTES_BIG_ENDIAN)
2547 offset -= (MIN (UNITS_PER_WORD,
2548 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2549 - MIN (UNITS_PER_WORD,
2550 GET_MODE_SIZE (GET_MODE (memref))));
6f086dfc
RS
2551 }
2552
b88a3142
RK
2553 start_sequence ();
2554 memref = change_address (memref, mode,
6f086dfc 2555 plus_constant (XEXP (memref, 0), offset));
b88a3142
RK
2556 insns = get_insns ();
2557 end_sequence ();
2558 emit_insns_before (insns, insn);
6f086dfc
RS
2559
2560 /* Store this memory reference where
2561 we found the bit field reference. */
2562
2563 if (destflag)
2564 {
2565 validate_change (insn, &SET_DEST (body), memref, 1);
2566 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2567 {
2568 rtx src = SET_SRC (body);
2569 while (GET_CODE (src) == SUBREG
2570 && SUBREG_WORD (src) == 0)
2571 src = SUBREG_REG (src);
2572 if (GET_MODE (src) != GET_MODE (memref))
2573 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2574 validate_change (insn, &SET_SRC (body), src, 1);
2575 }
2576 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2577 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2578 /* This shouldn't happen because anything that didn't have
2579 one of these modes should have got converted explicitly
2580 and then referenced through a subreg.
2581 This is so because the original bit-field was
2582 handled by agg_mode and so its tree structure had
2583 the same mode that memref now has. */
2584 abort ();
2585 }
2586 else
2587 {
2588 rtx dest = SET_DEST (body);
2589
2590 while (GET_CODE (dest) == SUBREG
4013a709
RK
2591 && SUBREG_WORD (dest) == 0
2592 && (GET_MODE_CLASS (GET_MODE (dest))
ab87f8c8
JL
2593 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2594 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2595 <= UNITS_PER_WORD))
6f086dfc
RS
2596 dest = SUBREG_REG (dest);
2597
2598 validate_change (insn, &SET_DEST (body), dest, 1);
2599
2600 if (GET_MODE (dest) == GET_MODE (memref))
2601 validate_change (insn, &SET_SRC (body), memref, 1);
2602 else
2603 {
2604 /* Convert the mem ref to the destination mode. */
2605 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2606
2607 start_sequence ();
2608 convert_move (newreg, memref,
2609 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2610 seq = get_insns ();
2611 end_sequence ();
2612
2613 validate_change (insn, &SET_SRC (body), newreg, 1);
2614 }
2615 }
2616
2617 /* See if we can convert this extraction or insertion into
2618 a simple move insn. We might not be able to do so if this
2619 was, for example, part of a PARALLEL.
2620
2621 If we succeed, write out any needed conversions. If we fail,
2622 it is hard to guess why we failed, so don't do anything
2623 special; just let the optimization be suppressed. */
2624
2625 if (apply_change_group () && seq)
2626 emit_insns_before (seq, insn);
2627 }
2628 }
2629}
2630\f
2631/* These routines are responsible for converting virtual register references
2632 to the actual hard register references once RTL generation is complete.
2633
2634 The following four variables are used for communication between the
2635 routines. They contain the offsets of the virtual registers from their
2636 respective hard registers. */
2637
2638static int in_arg_offset;
2639static int var_offset;
2640static int dynamic_offset;
2641static int out_arg_offset;
71038426 2642static int cfa_offset;
6f086dfc
RS
2643
2644/* In most machines, the stack pointer register is equivalent to the bottom
2645 of the stack. */
2646
2647#ifndef STACK_POINTER_OFFSET
2648#define STACK_POINTER_OFFSET 0
2649#endif
2650
2651/* If not defined, pick an appropriate default for the offset of dynamically
2652 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2653 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2654
2655#ifndef STACK_DYNAMIC_OFFSET
2656
2657#ifdef ACCUMULATE_OUTGOING_ARGS
2658/* The bottom of the stack points to the actual arguments. If
2659 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2660 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2661 stack space for register parameters is not pushed by the caller, but
2662 rather part of the fixed stack areas and hence not included in
2663 `current_function_outgoing_args_size'. Nevertheless, we must allow
2664 for it when allocating stack dynamic objects. */
2665
2666#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2667#define STACK_DYNAMIC_OFFSET(FNDECL) \
2668(current_function_outgoing_args_size \
2669 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2670
2671#else
2672#define STACK_DYNAMIC_OFFSET(FNDECL) \
2673(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2674#endif
2675
2676#else
2677#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2678#endif
2679#endif
2680
71038426
RH
2681/* On a few machines, the CFA coincides with the arg pointer. */
2682
2683#ifndef ARG_POINTER_CFA_OFFSET
2684#define ARG_POINTER_CFA_OFFSET 0
2685#endif
2686
2687
e9a25f70
JL
2688/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2689 its address taken. DECL is the decl for the object stored in the
2690 register, for later use if we do need to force REG into the stack.
2691 REG is overwritten by the MEM like in put_reg_into_stack. */
2692
2693rtx
2694gen_mem_addressof (reg, decl)
2695 rtx reg;
2696 tree decl;
2697{
2698 tree type = TREE_TYPE (decl);
8f985ec4
ZW
2699 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2700 REGNO (reg), decl);
14a774a9 2701
95ca22f4 2702 /* If the original REG was a user-variable, then so is the REG whose
14a774a9 2703 address is being taken. Likewise for unchanging. */
95ca22f4 2704 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
14a774a9 2705 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
e9a25f70 2706
e9a25f70
JL
2707 PUT_CODE (reg, MEM);
2708 PUT_MODE (reg, DECL_MODE (decl));
ef178af3 2709 XEXP (reg, 0) = r;
e9a25f70 2710 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
c6df88cb 2711 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
41472af8 2712 MEM_ALIAS_SET (reg) = get_alias_set (decl);
e9a25f70 2713
e5e809f4 2714 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
fe9b4957 2715 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
e5e809f4 2716
e9a25f70
JL
2717 return reg;
2718}
2719
2720/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2721
ca3075bd 2722#if 0
e9a25f70
JL
2723void
2724flush_addressof (decl)
2725 tree decl;
2726{
2727 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2728 && DECL_RTL (decl) != 0
2729 && GET_CODE (DECL_RTL (decl)) == MEM
2730 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2731 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
fe9b4957 2732 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
e9a25f70 2733}
ca3075bd 2734#endif
e9a25f70
JL
2735
2736/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2737
2738static void
fe9b4957 2739put_addressof_into_stack (r, ht)
e9a25f70 2740 rtx r;
fe9b4957 2741 struct hash_table *ht;
e9a25f70
JL
2742{
2743 tree decl = ADDRESSOF_DECL (r);
2744 rtx reg = XEXP (r, 0);
2745
2746 if (GET_CODE (reg) != REG)
2747 abort ();
2748
2749 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2750 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
e5e809f4 2751 ADDRESSOF_REGNO (r),
fe9b4957 2752 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
e9a25f70
JL
2753}
2754
b5bd3b3c
AS
2755/* List of replacements made below in purge_addressof_1 when creating
2756 bitfield insertions. */
8b04083b
VM
2757static rtx purge_bitfield_addressof_replacements;
2758
2759/* List of replacements made below in purge_addressof_1 for patterns
2760 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2761 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2762 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2763 enough in complex cases, e.g. when some field values can be
2764 extracted by usage MEM with narrower mode. */
b5bd3b3c
AS
2765static rtx purge_addressof_replacements;
2766
e9a25f70
JL
2767/* Helper function for purge_addressof. See if the rtx expression at *LOC
2768 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
8c36698e
NC
2769 the stack. If the function returns FALSE then the replacement could not
2770 be made. */
e9a25f70 2771
8c36698e 2772static boolean
fe9b4957 2773purge_addressof_1 (loc, insn, force, store, ht)
e9a25f70
JL
2774 rtx *loc;
2775 rtx insn;
f7b6d104 2776 int force, store;
fe9b4957 2777 struct hash_table *ht;
e9a25f70
JL
2778{
2779 rtx x;
2780 RTX_CODE code;
2781 int i, j;
6f7d635c 2782 const char *fmt;
8c36698e 2783 boolean result = true;
e9a25f70
JL
2784
2785 /* Re-start here to avoid recursion in common cases. */
2786 restart:
2787
2788 x = *loc;
2789 if (x == 0)
8c36698e 2790 return true;
e9a25f70
JL
2791
2792 code = GET_CODE (x);
2793
c5c76735
JL
2794 /* If we don't return in any of the cases below, we will recurse inside
2795 the RTX, which will normally result in any ADDRESSOF being forced into
2796 memory. */
2797 if (code == SET)
2798 {
8c36698e
NC
2799 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2800 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2801 return result;
c5c76735
JL
2802 }
2803
2804 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
e9a25f70 2805 {
956d6950
JL
2806 /* We must create a copy of the rtx because it was created by
2807 overwriting a REG rtx which is always shared. */
2808 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
c5c76735 2809 rtx insns;
e9a25f70 2810
ab87f8c8
JL
2811 if (validate_change (insn, loc, sub, 0)
2812 || validate_replace_rtx (x, sub, insn))
8c36698e 2813 return true;
ab87f8c8 2814
e9a25f70 2815 start_sequence ();
ab87f8c8
JL
2816 sub = force_operand (sub, NULL_RTX);
2817 if (! validate_change (insn, loc, sub, 0)
2818 && ! validate_replace_rtx (x, sub, insn))
e9a25f70
JL
2819 abort ();
2820
f7b6d104 2821 insns = gen_sequence ();
e9a25f70 2822 end_sequence ();
18e765cb 2823 emit_insn_before (insns, insn);
8c36698e 2824 return true;
e9a25f70 2825 }
c5c76735 2826
e9a25f70
JL
2827 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2828 {
2829 rtx sub = XEXP (XEXP (x, 0), 0);
ab87f8c8 2830 rtx sub2;
e5e809f4 2831
6d8ccdbb 2832 if (GET_CODE (sub) == MEM)
ab87f8c8
JL
2833 {
2834 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2835 MEM_COPY_ATTRIBUTES (sub2, sub);
2836 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2837 sub = sub2;
2838 }
c5c76735
JL
2839 else if (GET_CODE (sub) == REG
2840 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2841 ;
e5e809f4 2842 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
e9a25f70 2843 {
f7b6d104
RH
2844 int size_x, size_sub;
2845
b5bd3b3c
AS
2846 if (!insn)
2847 {
2848 /* When processing REG_NOTES look at the list of
2849 replacements done on the insn to find the register that X
2850 was replaced by. */
2851 rtx tem;
2852
8b04083b
VM
2853 for (tem = purge_bitfield_addressof_replacements;
2854 tem != NULL_RTX;
b5bd3b3c 2855 tem = XEXP (XEXP (tem, 1), 1))
8b04083b
VM
2856 if (rtx_equal_p (x, XEXP (tem, 0)))
2857 {
2858 *loc = XEXP (XEXP (tem, 1), 0);
8c36698e 2859 return true;
8b04083b 2860 }
fbdfe39c 2861
8b04083b
VM
2862 /* See comment for purge_addressof_replacements. */
2863 for (tem = purge_addressof_replacements;
2864 tem != NULL_RTX;
2865 tem = XEXP (XEXP (tem, 1), 1))
2866 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2867 {
2868 rtx z = XEXP (XEXP (tem, 1), 0);
fbdfe39c 2869
8b04083b
VM
2870 if (GET_MODE (x) == GET_MODE (z)
2871 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2872 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2873 abort ();
2874
2875 /* It can happen that the note may speak of things
2876 in a wider (or just different) mode than the
2877 code did. This is especially true of
2878 REG_RETVAL. */
2879
2880 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2881 z = SUBREG_REG (z);
2882
2883 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2884 && (GET_MODE_SIZE (GET_MODE (x))
2885 > GET_MODE_SIZE (GET_MODE (z))))
2886 {
2887 /* This can occur as a result in invalid
2888 pointer casts, e.g. float f; ...
2889 *(long long int *)&f.
2890 ??? We could emit a warning here, but
2891 without a line number that wouldn't be
2892 very helpful. */
2893 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2894 }
2895 else
2896 z = gen_lowpart (GET_MODE (x), z);
2897
2898 *loc = z;
aa608fe6 2899 return true;
8b04083b 2900 }
b5bd3b3c 2901
8c36698e
NC
2902 /* Sometimes we may not be able to find the replacement. For
2903 example when the original insn was a MEM in a wider mode,
2904 and the note is part of a sign extension of a narrowed
2905 version of that MEM. Gcc testcase compile/990829-1.c can
2906 generate an example of this siutation. Rather than complain
2907 we return false, which will prompt our caller to remove the
2908 offending note. */
2909 return false;
b5bd3b3c
AS
2910 }
2911
f7b6d104
RH
2912 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2913 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2914
2915 /* Don't even consider working with paradoxical subregs,
2916 or the moral equivalent seen here. */
470032d7 2917 if (size_x <= size_sub
d006aa54 2918 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
e9a25f70 2919 {
f7b6d104
RH
2920 /* Do a bitfield insertion to mirror what would happen
2921 in memory. */
2922
f7b6d104
RH
2923 rtx val, seq;
2924
f7b6d104
RH
2925 if (store)
2926 {
fe9b4957 2927 rtx p = PREV_INSN (insn);
de0dd934 2928
f7b6d104
RH
2929 start_sequence ();
2930 val = gen_reg_rtx (GET_MODE (x));
2931 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
2932 {
2933 /* Discard the current sequence and put the
2934 ADDRESSOF on stack. */
2935 end_sequence ();
2936 goto give_up;
2937 }
f7b6d104
RH
2938 seq = gen_sequence ();
2939 end_sequence ();
2940 emit_insn_before (seq, insn);
fe9b4957
MM
2941 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2942 insn, ht);
f7b6d104
RH
2943
2944 start_sequence ();
47401c4d 2945 store_bit_field (sub, size_x, 0, GET_MODE (x),
f7b6d104
RH
2946 val, GET_MODE_SIZE (GET_MODE (sub)),
2947 GET_MODE_SIZE (GET_MODE (sub)));
2948
de0dd934
R
2949 /* Make sure to unshare any shared rtl that store_bit_field
2950 might have created. */
2951 for (p = get_insns(); p; p = NEXT_INSN (p))
2952 {
2953 reset_used_flags (PATTERN (p));
2954 reset_used_flags (REG_NOTES (p));
2955 reset_used_flags (LOG_LINKS (p));
2956 }
2957 unshare_all_rtl (get_insns ());
2958
f7b6d104
RH
2959 seq = gen_sequence ();
2960 end_sequence ();
fe9b4957
MM
2961 p = emit_insn_after (seq, insn);
2962 if (NEXT_INSN (insn))
2963 compute_insns_for_mem (NEXT_INSN (insn),
2964 p ? NEXT_INSN (p) : NULL_RTX,
2965 ht);
f7b6d104
RH
2966 }
2967 else
2968 {
fe9b4957
MM
2969 rtx p = PREV_INSN (insn);
2970
f7b6d104 2971 start_sequence ();
47401c4d 2972 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
f7b6d104
RH
2973 GET_MODE (x), GET_MODE (x),
2974 GET_MODE_SIZE (GET_MODE (sub)),
2975 GET_MODE_SIZE (GET_MODE (sub)));
2976
f7b6d104 2977 if (! validate_change (insn, loc, val, 0))
b5bd3b3c
AS
2978 {
2979 /* Discard the current sequence and put the
2980 ADDRESSOF on stack. */
2981 end_sequence ();
2982 goto give_up;
2983 }
f7b6d104
RH
2984
2985 seq = gen_sequence ();
2986 end_sequence ();
2987 emit_insn_before (seq, insn);
fe9b4957
MM
2988 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2989 insn, ht);
f7b6d104
RH
2990 }
2991
b5bd3b3c
AS
2992 /* Remember the replacement so that the same one can be done
2993 on the REG_NOTES. */
8b04083b 2994 purge_bitfield_addressof_replacements
b5bd3b3c 2995 = gen_rtx_EXPR_LIST (VOIDmode, x,
8b04083b
VM
2996 gen_rtx_EXPR_LIST
2997 (VOIDmode, val,
2998 purge_bitfield_addressof_replacements));
b5bd3b3c 2999
f7b6d104 3000 /* We replaced with a reg -- all done. */
8c36698e 3001 return true;
e9a25f70
JL
3002 }
3003 }
c5c76735 3004
e9a25f70 3005 else if (validate_change (insn, loc, sub, 0))
fbdfe39c
RH
3006 {
3007 /* Remember the replacement so that the same one can be done
3008 on the REG_NOTES. */
8b04083b
VM
3009 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3010 {
3011 rtx tem;
3012
3013 for (tem = purge_addressof_replacements;
3014 tem != NULL_RTX;
3015 tem = XEXP (XEXP (tem, 1), 1))
3016 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3017 {
3018 XEXP (XEXP (tem, 1), 0) = sub;
8c36698e 3019 return true;
8b04083b
VM
3020 }
3021 purge_addressof_replacements
3022 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3023 gen_rtx_EXPR_LIST (VOIDmode, sub,
3024 purge_addressof_replacements));
8c36698e 3025 return true;
8b04083b 3026 }
fbdfe39c
RH
3027 goto restart;
3028 }
b5bd3b3c 3029 give_up:;
e9a25f70
JL
3030 /* else give up and put it into the stack */
3031 }
c5c76735 3032
e9a25f70
JL
3033 else if (code == ADDRESSOF)
3034 {
fe9b4957 3035 put_addressof_into_stack (x, ht);
aa608fe6 3036 return true;
e9a25f70 3037 }
f7b6d104
RH
3038 else if (code == SET)
3039 {
8c36698e
NC
3040 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3041 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3042 return result;
f7b6d104 3043 }
e9a25f70
JL
3044
3045 /* Scan all subexpressions. */
3046 fmt = GET_RTX_FORMAT (code);
3047 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3048 {
3049 if (*fmt == 'e')
8c36698e 3050 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
e9a25f70
JL
3051 else if (*fmt == 'E')
3052 for (j = 0; j < XVECLEN (x, i); j++)
8c36698e 3053 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
fe9b4957 3054 }
8c36698e
NC
3055
3056 return result;
fe9b4957
MM
3057}
3058
3059/* Return a new hash table entry in HT. */
3060
3061static struct hash_entry *
3062insns_for_mem_newfunc (he, ht, k)
3063 struct hash_entry *he;
3064 struct hash_table *ht;
3065 hash_table_key k ATTRIBUTE_UNUSED;
3066{
3067 struct insns_for_mem_entry *ifmhe;
3068 if (he)
3069 return he;
3070
3071 ifmhe = ((struct insns_for_mem_entry *)
3072 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3073 ifmhe->insns = NULL_RTX;
3074
3075 return &ifmhe->he;
3076}
3077
3078/* Return a hash value for K, a REG. */
3079
3080static unsigned long
3081insns_for_mem_hash (k)
3082 hash_table_key k;
3083{
3084 /* K is really a RTX. Just use the address as the hash value. */
3085 return (unsigned long) k;
3086}
3087
3088/* Return non-zero if K1 and K2 (two REGs) are the same. */
3089
3090static boolean
3091insns_for_mem_comp (k1, k2)
3092 hash_table_key k1;
3093 hash_table_key k2;
3094{
3095 return k1 == k2;
3096}
3097
3098struct insns_for_mem_walk_info {
3099 /* The hash table that we are using to record which INSNs use which
3100 MEMs. */
3101 struct hash_table *ht;
3102
3103 /* The INSN we are currently proessing. */
3104 rtx insn;
3105
3106 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3107 to find the insns that use the REGs in the ADDRESSOFs. */
3108 int pass;
3109};
3110
3111/* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3112 that might be used in an ADDRESSOF expression, record this INSN in
3113 the hash table given by DATA (which is really a pointer to an
3114 insns_for_mem_walk_info structure). */
3115
3116static int
3117insns_for_mem_walk (r, data)
3118 rtx *r;
3119 void *data;
3120{
3121 struct insns_for_mem_walk_info *ifmwi
3122 = (struct insns_for_mem_walk_info *) data;
3123
3124 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3125 && GET_CODE (XEXP (*r, 0)) == REG)
3126 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3127 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3128 {
3129 /* Lookup this MEM in the hashtable, creating it if necessary. */
3130 struct insns_for_mem_entry *ifme
3131 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3132 *r,
3133 /*create=*/0,
3134 /*copy=*/0);
3135
3136 /* If we have not already recorded this INSN, do so now. Since
3137 we process the INSNs in order, we know that if we have
3138 recorded it it must be at the front of the list. */
3139 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3140 {
3141 /* We do the allocation on the same obstack as is used for
3142 the hash table since this memory will not be used once
3143 the hash table is deallocated. */
3144 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3145 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3146 ifme->insns);
3147 pop_obstacks ();
3148 }
e9a25f70 3149 }
fe9b4957
MM
3150
3151 return 0;
3152}
3153
3154/* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3155 which REGs in HT. */
3156
3157static void
3158compute_insns_for_mem (insns, last_insn, ht)
3159 rtx insns;
3160 rtx last_insn;
3161 struct hash_table *ht;
3162{
3163 rtx insn;
3164 struct insns_for_mem_walk_info ifmwi;
3165 ifmwi.ht = ht;
3166
3167 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3168 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3169 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3170 {
3171 ifmwi.insn = insn;
3172 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3173 }
e9a25f70
JL
3174}
3175
8c36698e
NC
3176/* Helper function for purge_addressof called through for_each_rtx.
3177 Returns true iff the rtl is an ADDRESSOF. */
3178static int
3179is_addressof (rtl, data)
3180 rtx * rtl;
3181 void * data ATTRIBUTE_UNUSED;
3182{
3183 return GET_CODE (* rtl) == ADDRESSOF;
3184}
3185
e9a25f70
JL
3186/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3187 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3188 stack. */
3189
3190void
3191purge_addressof (insns)
3192 rtx insns;
3193{
3194 rtx insn;
fe9b4957
MM
3195 struct hash_table ht;
3196
3197 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3198 requires a fixup pass over the instruction stream to correct
3199 INSNs that depended on the REG being a REG, and not a MEM. But,
3200 these fixup passes are slow. Furthermore, more MEMs are not
3201 mentioned in very many instructions. So, we speed up the process
3202 by pre-calculating which REGs occur in which INSNs; that allows
3203 us to perform the fixup passes much more quickly. */
3204 hash_table_init (&ht,
3205 insns_for_mem_newfunc,
3206 insns_for_mem_hash,
3207 insns_for_mem_comp);
3208 compute_insns_for_mem (insns, NULL_RTX, &ht);
3209
e9a25f70
JL
3210 for (insn = insns; insn; insn = NEXT_INSN (insn))
3211 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3212 || GET_CODE (insn) == CALL_INSN)
3213 {
8c36698e
NC
3214 if (! purge_addressof_1 (&PATTERN (insn), insn,
3215 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3216 /* If we could not replace the ADDRESSOFs in the insn,
3217 something is wrong. */
3218 abort ();
3219
3220 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3221 {
3222 /* If we could not replace the ADDRESSOFs in the insn's notes,
3223 we can just remove the offending notes instead. */
3224 rtx note;
3225
3226 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3227 {
3228 /* If we find a REG_RETVAL note then the insn is a libcall.
3229 Such insns must have REG_EQUAL notes as well, in order
3230 for later passes of the compiler to work. So it is not
3231 safe to delete the notes here, and instead we abort. */
3232 if (REG_NOTE_KIND (note) == REG_RETVAL)
3233 abort ();
3234 if (for_each_rtx (& note, is_addressof, NULL))
3235 remove_note (insn, note);
3236 }
3237 }
e9a25f70 3238 }
fe9b4957
MM
3239
3240 /* Clean up. */
3241 hash_table_free (&ht);
8b04083b 3242 purge_bitfield_addressof_replacements = 0;
da9b1f9c 3243 purge_addressof_replacements = 0;
e9a25f70
JL
3244}
3245\f
6f086dfc
RS
3246/* Pass through the INSNS of function FNDECL and convert virtual register
3247 references to hard register references. */
3248
3249void
3250instantiate_virtual_regs (fndecl, insns)
3251 tree fndecl;
3252 rtx insns;
3253{
3254 rtx insn;
e9a25f70 3255 int i;
6f086dfc
RS
3256
3257 /* Compute the offsets to use for this function. */
3258 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3259 var_offset = STARTING_FRAME_OFFSET;
3260 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3261 out_arg_offset = STACK_POINTER_OFFSET;
71038426 3262 cfa_offset = ARG_POINTER_CFA_OFFSET;
6f086dfc
RS
3263
3264 /* Scan all variables and parameters of this function. For each that is
3265 in memory, instantiate all virtual registers if the result is a valid
3266 address. If not, we do it later. That will handle most uses of virtual
3267 regs on many machines. */
3268 instantiate_decls (fndecl, 1);
3269
3270 /* Initialize recognition, indicating that volatile is OK. */
3271 init_recog ();
3272
3273 /* Scan through all the insns, instantiating every virtual register still
3274 present. */
3275 for (insn = insns; insn; insn = NEXT_INSN (insn))
3276 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3277 || GET_CODE (insn) == CALL_INSN)
3278 {
3279 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 3280 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
3281 }
3282
e9a25f70
JL
3283 /* Instantiate the stack slots for the parm registers, for later use in
3284 addressof elimination. */
3285 for (i = 0; i < max_parm_reg; ++i)
3286 if (parm_reg_stack_loc[i])
3287 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3288
6f086dfc
RS
3289 /* Now instantiate the remaining register equivalences for debugging info.
3290 These will not be valid addresses. */
3291 instantiate_decls (fndecl, 0);
3292
3293 /* Indicate that, from now on, assign_stack_local should use
3294 frame_pointer_rtx. */
3295 virtuals_instantiated = 1;
3296}
3297
3298/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3299 all virtual registers in their DECL_RTL's.
3300
3301 If VALID_ONLY, do this only if the resulting address is still valid.
3302 Otherwise, always do it. */
3303
3304static void
3305instantiate_decls (fndecl, valid_only)
3306 tree fndecl;
3307 int valid_only;
3308{
3309 tree decl;
3310
e1686233 3311 if (DECL_SAVED_INSNS (fndecl))
6f086dfc
RS
3312 /* When compiling an inline function, the obstack used for
3313 rtl allocation is the maybepermanent_obstack. Calling
3314 `resume_temporary_allocation' switches us back to that
3315 obstack while we process this function's parameters. */
3316 resume_temporary_allocation ();
3317
3318 /* Process all parameters of the function. */
3319 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3320 {
e5e809f4
JL
3321 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3322
ce717ce4
JW
3323 instantiate_decl (DECL_RTL (decl), size, valid_only);
3324
3325 /* If the parameter was promoted, then the incoming RTL mode may be
3326 larger than the declared type size. We must use the larger of
3327 the two sizes. */
3328 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3329 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
6f086dfc
RS
3330 }
3331
0f41302f 3332 /* Now process all variables defined in the function or its subblocks. */
6f086dfc
RS
3333 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3334
79c0672e 3335 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
6f086dfc
RS
3336 {
3337 /* Save all rtl allocated for this function by raising the
3338 high-water mark on the maybepermanent_obstack. */
3339 preserve_data ();
3340 /* All further rtl allocation is now done in the current_obstack. */
3341 rtl_in_current_obstack ();
3342 }
3343}
3344
3345/* Subroutine of instantiate_decls: Process all decls in the given
3346 BLOCK node and all its subblocks. */
3347
3348static void
3349instantiate_decls_1 (let, valid_only)
3350 tree let;
3351 int valid_only;
3352{
3353 tree t;
3354
3355 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
3356 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3357 valid_only);
6f086dfc
RS
3358
3359 /* Process all subblocks. */
3360 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3361 instantiate_decls_1 (t, valid_only);
3362}
5a73491b 3363
8008b228 3364/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
3365 decl and the size of the object, do any instantiation required.
3366
3367 If VALID_ONLY is non-zero, it means that the RTL should only be
3368 changed if the new address is valid. */
3369
3370static void
3371instantiate_decl (x, size, valid_only)
3372 rtx x;
3373 int size;
3374 int valid_only;
3375{
3376 enum machine_mode mode;
3377 rtx addr;
3378
3379 /* If this is not a MEM, no need to do anything. Similarly if the
3380 address is a constant or a register that is not a virtual register. */
3381
3382 if (x == 0 || GET_CODE (x) != MEM)
3383 return;
3384
3385 addr = XEXP (x, 0);
3386 if (CONSTANT_P (addr)
956d6950 3387 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
5a73491b
RK
3388 || (GET_CODE (addr) == REG
3389 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3390 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3391 return;
3392
3393 /* If we should only do this if the address is valid, copy the address.
3394 We need to do this so we can undo any changes that might make the
3395 address invalid. This copy is unfortunate, but probably can't be
3396 avoided. */
3397
3398 if (valid_only)
3399 addr = copy_rtx (addr);
3400
3401 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3402
87ce34d6
JW
3403 if (valid_only)
3404 {
3405 /* Now verify that the resulting address is valid for every integer or
3406 floating-point mode up to and including SIZE bytes long. We do this
3407 since the object might be accessed in any mode and frame addresses
3408 are shared. */
3409
3410 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3411 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3412 mode = GET_MODE_WIDER_MODE (mode))
3413 if (! memory_address_p (mode, addr))
3414 return;
5a73491b 3415
87ce34d6
JW
3416 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3417 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3418 mode = GET_MODE_WIDER_MODE (mode))
3419 if (! memory_address_p (mode, addr))
3420 return;
3421 }
5a73491b 3422
87ce34d6
JW
3423 /* Put back the address now that we have updated it and we either know
3424 it is valid or we don't care whether it is valid. */
5a73491b
RK
3425
3426 XEXP (x, 0) = addr;
3427}
6f086dfc
RS
3428\f
3429/* Given a pointer to a piece of rtx and an optional pointer to the
3430 containing object, instantiate any virtual registers present in it.
3431
3432 If EXTRA_INSNS, we always do the replacement and generate
3433 any extra insns before OBJECT. If it zero, we do nothing if replacement
3434 is not valid.
3435
3436 Return 1 if we either had nothing to do or if we were able to do the
3437 needed replacement. Return 0 otherwise; we only return zero if
3438 EXTRA_INSNS is zero.
3439
3440 We first try some simple transformations to avoid the creation of extra
3441 pseudos. */
3442
3443static int
3444instantiate_virtual_regs_1 (loc, object, extra_insns)
3445 rtx *loc;
3446 rtx object;
3447 int extra_insns;
3448{
3449 rtx x;
3450 RTX_CODE code;
3451 rtx new = 0;
07444f1d 3452 HOST_WIDE_INT offset = 0;
6f086dfc
RS
3453 rtx temp;
3454 rtx seq;
3455 int i, j;
6f7d635c 3456 const char *fmt;
6f086dfc
RS
3457
3458 /* Re-start here to avoid recursion in common cases. */
3459 restart:
3460
3461 x = *loc;
3462 if (x == 0)
3463 return 1;
3464
3465 code = GET_CODE (x);
3466
3467 /* Check for some special cases. */
3468 switch (code)
3469 {
3470 case CONST_INT:
3471 case CONST_DOUBLE:
3472 case CONST:
3473 case SYMBOL_REF:
3474 case CODE_LABEL:
3475 case PC:
3476 case CC0:
3477 case ASM_INPUT:
3478 case ADDR_VEC:
3479 case ADDR_DIFF_VEC:
3480 case RETURN:
3481 return 1;
3482
3483 case SET:
3484 /* We are allowed to set the virtual registers. This means that
38e01259 3485 the actual register should receive the source minus the
6f086dfc
RS
3486 appropriate offset. This is used, for example, in the handling
3487 of non-local gotos. */
3488 if (SET_DEST (x) == virtual_incoming_args_rtx)
3489 new = arg_pointer_rtx, offset = - in_arg_offset;
3490 else if (SET_DEST (x) == virtual_stack_vars_rtx)
dfd3dae6 3491 new = frame_pointer_rtx, offset = - var_offset;
6f086dfc
RS
3492 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3493 new = stack_pointer_rtx, offset = - dynamic_offset;
3494 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3495 new = stack_pointer_rtx, offset = - out_arg_offset;
71038426
RH
3496 else if (SET_DEST (x) == virtual_cfa_rtx)
3497 new = arg_pointer_rtx, offset = - cfa_offset;
6f086dfc
RS
3498
3499 if (new)
3500 {
14a774a9
RK
3501 rtx src = SET_SRC (x);
3502
3503 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3504
6f086dfc
RS
3505 /* The only valid sources here are PLUS or REG. Just do
3506 the simplest possible thing to handle them. */
14a774a9 3507 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
6f086dfc
RS
3508 abort ();
3509
3510 start_sequence ();
14a774a9
RK
3511 if (GET_CODE (src) != REG)
3512 temp = force_operand (src, NULL_RTX);
6f086dfc 3513 else
14a774a9 3514 temp = src;
5f4f0e22 3515 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
3516 seq = get_insns ();
3517 end_sequence ();
3518
3519 emit_insns_before (seq, object);
3520 SET_DEST (x) = new;
3521
e9a25f70 3522 if (! validate_change (object, &SET_SRC (x), temp, 0)
6f086dfc
RS
3523 || ! extra_insns)
3524 abort ();
3525
3526 return 1;
3527 }
3528
3529 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3530 loc = &SET_SRC (x);
3531 goto restart;
3532
3533 case PLUS:
3534 /* Handle special case of virtual register plus constant. */
3535 if (CONSTANT_P (XEXP (x, 1)))
3536 {
b1f82ccf 3537 rtx old, new_offset;
6f086dfc
RS
3538
3539 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3540 if (GET_CODE (XEXP (x, 0)) == PLUS)
3541 {
3542 rtx inner = XEXP (XEXP (x, 0), 0);
3543
3544 if (inner == virtual_incoming_args_rtx)
3545 new = arg_pointer_rtx, offset = in_arg_offset;
3546 else if (inner == virtual_stack_vars_rtx)
3547 new = frame_pointer_rtx, offset = var_offset;
3548 else if (inner == virtual_stack_dynamic_rtx)
3549 new = stack_pointer_rtx, offset = dynamic_offset;
3550 else if (inner == virtual_outgoing_args_rtx)
3551 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3552 else if (inner == virtual_cfa_rtx)
3553 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3554 else
3555 {
3556 loc = &XEXP (x, 0);
3557 goto restart;
3558 }
3559
3560 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3561 extra_insns);
38a448ca 3562 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
6f086dfc
RS
3563 }
3564
3565 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3566 new = arg_pointer_rtx, offset = in_arg_offset;
3567 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3568 new = frame_pointer_rtx, offset = var_offset;
3569 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3570 new = stack_pointer_rtx, offset = dynamic_offset;
3571 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3572 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3573 else if (XEXP (x, 0) == virtual_cfa_rtx)
3574 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3575 else
3576 {
3577 /* We know the second operand is a constant. Unless the
3578 first operand is a REG (which has been already checked),
3579 it needs to be checked. */
3580 if (GET_CODE (XEXP (x, 0)) != REG)
3581 {
3582 loc = &XEXP (x, 0);
3583 goto restart;
3584 }
3585 return 1;
3586 }
3587
b1f82ccf 3588 new_offset = plus_constant (XEXP (x, 1), offset);
6f086dfc 3589
b1f82ccf
DE
3590 /* If the new constant is zero, try to replace the sum with just
3591 the register. */
3592 if (new_offset == const0_rtx
3593 && validate_change (object, loc, new, 0))
6f086dfc
RS
3594 return 1;
3595
b1f82ccf
DE
3596 /* Next try to replace the register and new offset.
3597 There are two changes to validate here and we can't assume that
3598 in the case of old offset equals new just changing the register
3599 will yield a valid insn. In the interests of a little efficiency,
3600 however, we only call validate change once (we don't queue up the
0f41302f 3601 changes and then call apply_change_group). */
b1f82ccf
DE
3602
3603 old = XEXP (x, 0);
3604 if (offset == 0
3605 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3606 : (XEXP (x, 0) = new,
3607 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
6f086dfc
RS
3608 {
3609 if (! extra_insns)
3610 {
3611 XEXP (x, 0) = old;
3612 return 0;
3613 }
3614
3615 /* Otherwise copy the new constant into a register and replace
3616 constant with that register. */
3617 temp = gen_reg_rtx (Pmode);
b1f82ccf 3618 XEXP (x, 0) = new;
6f086dfc 3619 if (validate_change (object, &XEXP (x, 1), temp, 0))
b1f82ccf 3620 emit_insn_before (gen_move_insn (temp, new_offset), object);
6f086dfc
RS
3621 else
3622 {
3623 /* If that didn't work, replace this expression with a
3624 register containing the sum. */
3625
6f086dfc 3626 XEXP (x, 0) = old;
38a448ca 3627 new = gen_rtx_PLUS (Pmode, new, new_offset);
6f086dfc
RS
3628
3629 start_sequence ();
5f4f0e22 3630 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
3631 seq = get_insns ();
3632 end_sequence ();
3633
3634 emit_insns_before (seq, object);
3635 if (! validate_change (object, loc, temp, 0)
3636 && ! validate_replace_rtx (x, temp, object))
3637 abort ();
3638 }
3639 }
3640
3641 return 1;
3642 }
3643
3644 /* Fall through to generic two-operand expression case. */
3645 case EXPR_LIST:
3646 case CALL:
3647 case COMPARE:
3648 case MINUS:
3649 case MULT:
3650 case DIV: case UDIV:
3651 case MOD: case UMOD:
3652 case AND: case IOR: case XOR:
45620ed4
RK
3653 case ROTATERT: case ROTATE:
3654 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
6f086dfc
RS
3655 case NE: case EQ:
3656 case GE: case GT: case GEU: case GTU:
3657 case LE: case LT: case LEU: case LTU:
3658 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3659 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3660 loc = &XEXP (x, 0);
3661 goto restart;
3662
3663 case MEM:
3664 /* Most cases of MEM that convert to valid addresses have already been
4fd796bb 3665 handled by our scan of decls. The only special handling we
6f086dfc 3666 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 3667 shared if we have to change it to a pseudo.
6f086dfc
RS
3668
3669 If the rtx is a simple reference to an address via a virtual register,
3670 it can potentially be shared. In such cases, first try to make it
3671 a valid address, which can also be shared. Otherwise, copy it and
3672 proceed normally.
3673
3674 First check for common cases that need no processing. These are
3675 usually due to instantiation already being done on a previous instance
3676 of a shared rtx. */
3677
3678 temp = XEXP (x, 0);
3679 if (CONSTANT_ADDRESS_P (temp)
3680#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3681 || temp == arg_pointer_rtx
b37f453b
DE
3682#endif
3683#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3684 || temp == hard_frame_pointer_rtx
6f086dfc
RS
3685#endif
3686 || temp == frame_pointer_rtx)
3687 return 1;
3688
3689 if (GET_CODE (temp) == PLUS
3690 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3691 && (XEXP (temp, 0) == frame_pointer_rtx
b37f453b
DE
3692#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3693 || XEXP (temp, 0) == hard_frame_pointer_rtx
3694#endif
6f086dfc
RS
3695#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3696 || XEXP (temp, 0) == arg_pointer_rtx
3697#endif
3698 ))
3699 return 1;
3700
3701 if (temp == virtual_stack_vars_rtx
3702 || temp == virtual_incoming_args_rtx
3703 || (GET_CODE (temp) == PLUS
3704 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3705 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3706 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3707 {
3708 /* This MEM may be shared. If the substitution can be done without
3709 the need to generate new pseudos, we want to do it in place
3710 so all copies of the shared rtx benefit. The call below will
3711 only make substitutions if the resulting address is still
3712 valid.
3713
3714 Note that we cannot pass X as the object in the recursive call
3715 since the insn being processed may not allow all valid
6461be14
RS
3716 addresses. However, if we were not passed on object, we can
3717 only modify X without copying it if X will have a valid
3718 address.
6f086dfc 3719
6461be14
RS
3720 ??? Also note that this can still lose if OBJECT is an insn that
3721 has less restrictions on an address that some other insn.
3722 In that case, we will modify the shared address. This case
4fd796bb
RK
3723 doesn't seem very likely, though. One case where this could
3724 happen is in the case of a USE or CLOBBER reference, but we
3725 take care of that below. */
6461be14
RS
3726
3727 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3728 object ? object : x, 0))
6f086dfc
RS
3729 return 1;
3730
3731 /* Otherwise make a copy and process that copy. We copy the entire
3732 RTL expression since it might be a PLUS which could also be
3733 shared. */
3734 *loc = x = copy_rtx (x);
3735 }
3736
3737 /* Fall through to generic unary operation case. */
6f086dfc
RS
3738 case SUBREG:
3739 case STRICT_LOW_PART:
3740 case NEG: case NOT:
3741 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3742 case SIGN_EXTEND: case ZERO_EXTEND:
3743 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3744 case FLOAT: case FIX:
3745 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3746 case ABS:
3747 case SQRT:
3748 case FFS:
3749 /* These case either have just one operand or we know that we need not
3750 check the rest of the operands. */
3751 loc = &XEXP (x, 0);
3752 goto restart;
3753
4fd796bb
RK
3754 case USE:
3755 case CLOBBER:
3756 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3757 go ahead and make the invalid one, but do it to a copy. For a REG,
3758 just make the recursive call, since there's no chance of a problem. */
3759
3760 if ((GET_CODE (XEXP (x, 0)) == MEM
3761 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3762 0))
3763 || (GET_CODE (XEXP (x, 0)) == REG
7694ce35 3764 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4fd796bb
RK
3765 return 1;
3766
3767 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3768 loc = &XEXP (x, 0);
3769 goto restart;
3770
6f086dfc
RS
3771 case REG:
3772 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3773 in front of this insn and substitute the temporary. */
3774 if (x == virtual_incoming_args_rtx)
3775 new = arg_pointer_rtx, offset = in_arg_offset;
3776 else if (x == virtual_stack_vars_rtx)
3777 new = frame_pointer_rtx, offset = var_offset;
3778 else if (x == virtual_stack_dynamic_rtx)
3779 new = stack_pointer_rtx, offset = dynamic_offset;
3780 else if (x == virtual_outgoing_args_rtx)
3781 new = stack_pointer_rtx, offset = out_arg_offset;
71038426
RH
3782 else if (x == virtual_cfa_rtx)
3783 new = arg_pointer_rtx, offset = cfa_offset;
6f086dfc
RS
3784
3785 if (new)
3786 {
3787 temp = plus_constant (new, offset);
3788 if (!validate_change (object, loc, temp, 0))
3789 {
3790 if (! extra_insns)
3791 return 0;
3792
3793 start_sequence ();
5f4f0e22 3794 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
3795 seq = get_insns ();
3796 end_sequence ();
3797
3798 emit_insns_before (seq, object);
3799 if (! validate_change (object, loc, temp, 0)
3800 && ! validate_replace_rtx (x, temp, object))
3801 abort ();
3802 }
3803 }
3804
3805 return 1;
e9a25f70
JL
3806
3807 case ADDRESSOF:
3808 if (GET_CODE (XEXP (x, 0)) == REG)
3809 return 1;
3810
3811 else if (GET_CODE (XEXP (x, 0)) == MEM)
3812 {
3813 /* If we have a (addressof (mem ..)), do any instantiation inside
3814 since we know we'll be making the inside valid when we finally
3815 remove the ADDRESSOF. */
3816 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3817 return 1;
3818 }
3819 break;
3820
3821 default:
3822 break;
6f086dfc
RS
3823 }
3824
3825 /* Scan all subexpressions. */
3826 fmt = GET_RTX_FORMAT (code);
3827 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3828 if (*fmt == 'e')
3829 {
3830 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3831 return 0;
3832 }
3833 else if (*fmt == 'E')
3834 for (j = 0; j < XVECLEN (x, i); j++)
3835 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3836 extra_insns))
3837 return 0;
3838
3839 return 1;
3840}
3841\f
3842/* Optimization: assuming this function does not receive nonlocal gotos,
3843 delete the handlers for such, as well as the insns to establish
3844 and disestablish them. */
3845
3846static void
3847delete_handlers ()
3848{
3849 rtx insn;
3850 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3851 {
3852 /* Delete the handler by turning off the flag that would
3853 prevent jump_optimize from deleting it.
3854 Also permit deletion of the nonlocal labels themselves
3855 if nothing local refers to them. */
3856 if (GET_CODE (insn) == CODE_LABEL)
71cd4a8d
JW
3857 {
3858 tree t, last_t;
3859
3860 LABEL_PRESERVE_P (insn) = 0;
3861
3862 /* Remove it from the nonlocal_label list, to avoid confusing
3863 flow. */
3864 for (t = nonlocal_labels, last_t = 0; t;
3865 last_t = t, t = TREE_CHAIN (t))
3866 if (DECL_RTL (TREE_VALUE (t)) == insn)
3867 break;
3868 if (t)
3869 {
3870 if (! last_t)
3871 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3872 else
3873 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3874 }
3875 }
ba716ac9
BS
3876 if (GET_CODE (insn) == INSN)
3877 {
3878 int can_delete = 0;
3879 rtx t;
3880 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3881 if (reg_mentioned_p (t, PATTERN (insn)))
3882 {
3883 can_delete = 1;
3884 break;
3885 }
3886 if (can_delete
59257ff7
RK
3887 || (nonlocal_goto_stack_level != 0
3888 && reg_mentioned_p (nonlocal_goto_stack_level,
ba716ac9
BS
3889 PATTERN (insn))))
3890 delete_insn (insn);
3891 }
6f086dfc
RS
3892 }
3893}
6f086dfc
RS
3894\f
3895/* Output a USE for any register use in RTL.
3896 This is used with -noreg to mark the extent of lifespan
3897 of any registers used in a user-visible variable's DECL_RTL. */
3898
3899void
3900use_variable (rtl)
3901 rtx rtl;
3902{
3903 if (GET_CODE (rtl) == REG)
3904 /* This is a register variable. */
38a448ca 3905 emit_insn (gen_rtx_USE (VOIDmode, rtl));
6f086dfc
RS
3906 else if (GET_CODE (rtl) == MEM
3907 && GET_CODE (XEXP (rtl, 0)) == REG
3908 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3909 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3910 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3911 /* This is a variable-sized structure. */
38a448ca 3912 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
6f086dfc
RS
3913}
3914
3915/* Like use_variable except that it outputs the USEs after INSN
3916 instead of at the end of the insn-chain. */
3917
3918void
3919use_variable_after (rtl, insn)
3920 rtx rtl, insn;
3921{
3922 if (GET_CODE (rtl) == REG)
3923 /* This is a register variable. */
38a448ca 3924 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
6f086dfc
RS
3925 else if (GET_CODE (rtl) == MEM
3926 && GET_CODE (XEXP (rtl, 0)) == REG
3927 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3928 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3929 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3930 /* This is a variable-sized structure. */
38a448ca 3931 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
6f086dfc
RS
3932}
3933\f
3934int
3935max_parm_reg_num ()
3936{
3937 return max_parm_reg;
3938}
3939
3940/* Return the first insn following those generated by `assign_parms'. */
3941
3942rtx
3943get_first_nonparm_insn ()
3944{
3945 if (last_parm_insn)
3946 return NEXT_INSN (last_parm_insn);
3947 return get_insns ();
3948}
3949
5378192b
RS
3950/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3951 Crash if there is none. */
3952
3953rtx
3954get_first_block_beg ()
3955{
3956 register rtx searcher;
3957 register rtx insn = get_first_nonparm_insn ();
3958
3959 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3960 if (GET_CODE (searcher) == NOTE
3961 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3962 return searcher;
3963
3964 abort (); /* Invalid call to this function. (See comments above.) */
3965 return NULL_RTX;
3966}
3967
d181c154
RS
3968/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3969 This means a type for which function calls must pass an address to the
3970 function or get an address back from the function.
3971 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
3972
3973int
3974aggregate_value_p (exp)
3975 tree exp;
3976{
9d790a4f
RS
3977 int i, regno, nregs;
3978 rtx reg;
d181c154
RS
3979 tree type;
3980 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3981 type = exp;
3982 else
3983 type = TREE_TYPE (exp);
3984
3985 if (RETURN_IN_MEMORY (type))
6f086dfc 3986 return 1;
956d6950 3987 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
3988 and thus can't be returned in registers. */
3989 if (TREE_ADDRESSABLE (type))
3990 return 1;
05e3bdb9 3991 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 3992 return 1;
9d790a4f
RS
3993 /* Make sure we have suitable call-clobbered regs to return
3994 the value in; if not, we must return it in memory. */
4dc07bd7 3995 reg = hard_function_value (type, 0, 0);
e71f7aa5
JW
3996
3997 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3998 it is OK. */
3999 if (GET_CODE (reg) != REG)
4000 return 0;
4001
9d790a4f 4002 regno = REGNO (reg);
d181c154 4003 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
9d790a4f
RS
4004 for (i = 0; i < nregs; i++)
4005 if (! call_used_regs[regno + i])
4006 return 1;
6f086dfc
RS
4007 return 0;
4008}
4009\f
4010/* Assign RTL expressions to the function's parameters.
4011 This may involve copying them into registers and using
0d1416c6 4012 those registers as the RTL for them. */
6f086dfc
RS
4013
4014void
0d1416c6 4015assign_parms (fndecl)
6f086dfc 4016 tree fndecl;
6f086dfc
RS
4017{
4018 register tree parm;
4019 register rtx entry_parm = 0;
4020 register rtx stack_parm = 0;
4021 CUMULATIVE_ARGS args_so_far;
621061f4
RK
4022 enum machine_mode promoted_mode, passed_mode;
4023 enum machine_mode nominal_mode, promoted_nominal_mode;
00d8a4c1 4024 int unsignedp;
6f086dfc
RS
4025 /* Total space needed so far for args on the stack,
4026 given as a constant and a tree-expression. */
4027 struct args_size stack_args_size;
4028 tree fntype = TREE_TYPE (fndecl);
4029 tree fnargs = DECL_ARGUMENTS (fndecl);
4030 /* This is used for the arg pointer when referring to stack args. */
4031 rtx internal_arg_pointer;
4032 /* This is a dummy PARM_DECL that we used for the function result if
4033 the function returns a structure. */
4034 tree function_result_decl = 0;
54ea1de9 4035#ifdef SETUP_INCOMING_VARARGS
6f086dfc 4036 int varargs_setup = 0;
54ea1de9 4037#endif
3412b298 4038 rtx conversion_insns = 0;
4fc026cd 4039 struct args_size alignment_pad;
6f086dfc
RS
4040
4041 /* Nonzero if the last arg is named `__builtin_va_alist',
4042 which is used on some machines for old-fashioned non-ANSI varargs.h;
4043 this should be stuck onto the stack as if it had arrived there. */
3b69d50e
RK
4044 int hide_last_arg
4045 = (current_function_varargs
4046 && fnargs
6f086dfc
RS
4047 && (parm = tree_last (fnargs)) != 0
4048 && DECL_NAME (parm)
4049 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4050 "__builtin_va_alist")));
4051
4052 /* Nonzero if function takes extra anonymous args.
4053 This means the last named arg must be on the stack
0f41302f 4054 right before the anonymous ones. */
6f086dfc
RS
4055 int stdarg
4056 = (TYPE_ARG_TYPES (fntype) != 0
4057 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4058 != void_type_node));
4059
ebb904cb
RK
4060 current_function_stdarg = stdarg;
4061
6f086dfc
RS
4062 /* If the reg that the virtual arg pointer will be translated into is
4063 not a fixed reg or is the stack pointer, make a copy of the virtual
4064 arg pointer, and address parms via the copy. The frame pointer is
4065 considered fixed even though it is not marked as such.
4066
4067 The second time through, simply use ap to avoid generating rtx. */
4068
4069 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4070 || ! (fixed_regs[ARG_POINTER_REGNUM]
0d1416c6 4071 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
6f086dfc
RS
4072 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4073 else
4074 internal_arg_pointer = virtual_incoming_args_rtx;
4075 current_function_internal_arg_pointer = internal_arg_pointer;
4076
4077 stack_args_size.constant = 0;
4078 stack_args_size.var = 0;
4079
4080 /* If struct value address is treated as the first argument, make it so. */
4081 if (aggregate_value_p (DECL_RESULT (fndecl))
4082 && ! current_function_returns_pcc_struct
4083 && struct_value_incoming_rtx == 0)
4084 {
f9f29478 4085 tree type = build_pointer_type (TREE_TYPE (fntype));
6f086dfc 4086
5f4f0e22 4087 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
4088
4089 DECL_ARG_TYPE (function_result_decl) = type;
4090 TREE_CHAIN (function_result_decl) = fnargs;
4091 fnargs = function_result_decl;
4092 }
4093
e9a25f70 4094 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
e2ecd91c 4095 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
6f086dfc
RS
4096
4097#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 4098 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 4099#else
2c7ee1a6 4100 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
6f086dfc
RS
4101#endif
4102
4103 /* We haven't yet found an argument that we must push and pretend the
4104 caller did. */
4105 current_function_pretend_args_size = 0;
4106
4107 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4108 {
05e3bdb9 4109 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
6f086dfc
RS
4110 struct args_size stack_offset;
4111 struct args_size arg_size;
4112 int passed_pointer = 0;
621061f4 4113 int did_conversion = 0;
6f086dfc 4114 tree passed_type = DECL_ARG_TYPE (parm);
621061f4 4115 tree nominal_type = TREE_TYPE (parm);
9ab70a9b 4116 int pretend_named;
6f086dfc
RS
4117
4118 /* Set LAST_NAMED if this is last named arg before some
bf9c83fe 4119 anonymous args. */
6f086dfc
RS
4120 int last_named = ((TREE_CHAIN (parm) == 0
4121 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3b69d50e 4122 && (stdarg || current_function_varargs));
bf9c83fe
JW
4123 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4124 most machines, if this is a varargs/stdarg function, then we treat
4125 the last named arg as if it were anonymous too. */
e5e809f4 4126 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
6f086dfc
RS
4127
4128 if (TREE_TYPE (parm) == error_mark_node
4129 /* This can happen after weird syntax errors
4130 or if an enum type is defined among the parms. */
4131 || TREE_CODE (parm) != PARM_DECL
4132 || passed_type == NULL)
4133 {
38a448ca
RH
4134 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4135 = gen_rtx_MEM (BLKmode, const0_rtx);
6f086dfc
RS
4136 TREE_USED (parm) = 1;
4137 continue;
4138 }
4139
4140 /* For varargs.h function, save info about regs and stack space
4141 used by the individual args, not including the va_alist arg. */
3b69d50e 4142 if (hide_last_arg && last_named)
6f086dfc
RS
4143 current_function_args_info = args_so_far;
4144
4145 /* Find mode of arg as it is passed, and mode of arg
4146 as it should be during execution of this function. */
4147 passed_mode = TYPE_MODE (passed_type);
621061f4 4148 nominal_mode = TYPE_MODE (nominal_type);
6f086dfc 4149
16bae307
RS
4150 /* If the parm's mode is VOID, its value doesn't matter,
4151 and avoid the usual things like emit_move_insn that could crash. */
4152 if (nominal_mode == VOIDmode)
4153 {
4154 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4155 continue;
4156 }
4157
3f46679a
RK
4158 /* If the parm is to be passed as a transparent union, use the
4159 type of the first field for the tests below. We have already
4160 verified that the modes are the same. */
4161 if (DECL_TRANSPARENT_UNION (parm)
4162 || TYPE_TRANSPARENT_UNION (passed_type))
4163 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4164
a14ae508
RK
4165 /* See if this arg was passed by invisible reference. It is if
4166 it is an object whose size depends on the contents of the
4167 object itself or if the machine requires these objects be passed
4168 that way. */
4169
4170 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4171 && contains_placeholder_p (TYPE_SIZE (passed_type)))
657bb6dc 4172 || TREE_ADDRESSABLE (passed_type)
6f086dfc 4173#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
a14ae508 4174 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
bf9c83fe 4175 passed_type, named_arg)
a14ae508
RK
4176#endif
4177 )
6f086dfc 4178 {
621061f4 4179 passed_type = nominal_type = build_pointer_type (passed_type);
6f086dfc
RS
4180 passed_pointer = 1;
4181 passed_mode = nominal_mode = Pmode;
4182 }
6f086dfc 4183
a53e14c0
RK
4184 promoted_mode = passed_mode;
4185
4186#ifdef PROMOTE_FUNCTION_ARGS
4187 /* Compute the mode in which the arg is actually extended to. */
7940255d 4188 unsignedp = TREE_UNSIGNED (passed_type);
a5a52dbc 4189 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
a53e14c0
RK
4190#endif
4191
6f086dfc
RS
4192 /* Let machine desc say which reg (if any) the parm arrives in.
4193 0 means it arrives on the stack. */
4194#ifdef FUNCTION_INCOMING_ARG
a53e14c0 4195 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
bf9c83fe 4196 passed_type, named_arg);
6f086dfc 4197#else
a53e14c0 4198 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
bf9c83fe 4199 passed_type, named_arg);
6f086dfc
RS
4200#endif
4201
621061f4
RK
4202 if (entry_parm == 0)
4203 promoted_mode = passed_mode;
a53e14c0 4204
6f086dfc
RS
4205#ifdef SETUP_INCOMING_VARARGS
4206 /* If this is the last named parameter, do any required setup for
4207 varargs or stdargs. We need to know about the case of this being an
4208 addressable type, in which case we skip the registers it
4209 would have arrived in.
4210
4211 For stdargs, LAST_NAMED will be set for two parameters, the one that
4212 is actually the last named, and the dummy parameter. We only
4213 want to do this action once.
4214
4215 Also, indicate when RTL generation is to be suppressed. */
4216 if (last_named && !varargs_setup)
4217 {
621061f4 4218 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
0d1416c6 4219 current_function_pretend_args_size, 0);
6f086dfc
RS
4220 varargs_setup = 1;
4221 }
4222#endif
4223
4224 /* Determine parm's home in the stack,
4225 in case it arrives in the stack or we should pretend it did.
4226
4227 Compute the stack position and rtx where the argument arrives
4228 and its size.
4229
4230 There is one complexity here: If this was a parameter that would
4231 have been passed in registers, but wasn't only because it is
4232 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4233 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4234 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4235 0 as it was the previous time. */
4236
9ab70a9b 4237 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
0f11903b 4238 locate_and_pad_parm (promoted_mode, passed_type,
6f086dfc
RS
4239#ifdef STACK_PARMS_IN_REG_PARM_AREA
4240 1,
4241#else
4242#ifdef FUNCTION_INCOMING_ARG
621061f4 4243 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc 4244 passed_type,
9ab70a9b 4245 pretend_named) != 0,
6f086dfc 4246#else
621061f4 4247 FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc 4248 passed_type,
9ab70a9b 4249 pretend_named) != 0,
6f086dfc
RS
4250#endif
4251#endif
4fc026cd
CM
4252 fndecl, &stack_args_size, &stack_offset, &arg_size,
4253 &alignment_pad);
6f086dfc 4254
0d1416c6
BS
4255 {
4256 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4257
4258 if (offset_rtx == const0_rtx)
4259 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4260 else
4261 stack_parm = gen_rtx_MEM (promoted_mode,
4262 gen_rtx_PLUS (Pmode,
4263 internal_arg_pointer,
4264 offset_rtx));
4265
4266 /* If this is a memory ref that contains aggregate components,
4267 mark it as such for cse and loop optimize. Likewise if it
4268 is readonly. */
4269 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4270 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4271 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4272 }
6f086dfc
RS
4273
4274 /* If this parameter was passed both in registers and in the stack,
4275 use the copy on the stack. */
621061f4 4276 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
6f086dfc
RS
4277 entry_parm = 0;
4278
461beb10 4279#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
4280 /* If this parm was passed part in regs and part in memory,
4281 pretend it arrived entirely in memory
4282 by pushing the register-part onto the stack.
4283
4284 In the special case of a DImode or DFmode that is split,
4285 we could put it together in a pseudoreg directly,
4286 but for now that's not worth bothering with. */
4287
4288 if (entry_parm)
4289 {
621061f4 4290 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
bf9c83fe 4291 passed_type, named_arg);
6f086dfc
RS
4292
4293 if (nregs > 0)
4294 {
4295 current_function_pretend_args_size
4296 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4297 / (PARM_BOUNDARY / BITS_PER_UNIT)
4298 * (PARM_BOUNDARY / BITS_PER_UNIT));
4299
0d1416c6
BS
4300 /* Handle calls that pass values in multiple non-contiguous
4301 locations. The Irix 6 ABI has examples of this. */
4302 if (GET_CODE (entry_parm) == PARALLEL)
4303 emit_group_store (validize_mem (stack_parm), entry_parm,
4304 int_size_in_bytes (TREE_TYPE (parm)),
4305 (TYPE_ALIGN (TREE_TYPE (parm))
4306 / BITS_PER_UNIT));
4307 else
4308 move_block_from_reg (REGNO (entry_parm),
4309 validize_mem (stack_parm), nregs,
4310 int_size_in_bytes (TREE_TYPE (parm)));
4311
6f086dfc
RS
4312 entry_parm = stack_parm;
4313 }
4314 }
461beb10 4315#endif
6f086dfc
RS
4316
4317 /* If we didn't decide this parm came in a register,
4318 by default it came on the stack. */
4319 if (entry_parm == 0)
4320 entry_parm = stack_parm;
4321
4322 /* Record permanently how this parm was passed. */
0d1416c6 4323 DECL_INCOMING_RTL (parm) = entry_parm;
6f086dfc
RS
4324
4325 /* If there is actually space on the stack for this parm,
4326 count it in stack_args_size; otherwise set stack_parm to 0
4327 to indicate there is no preallocated stack slot for the parm. */
4328
4329 if (entry_parm == stack_parm
ab87f8c8
JL
4330 || (GET_CODE (entry_parm) == PARALLEL
4331 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
d9ca49d5 4332#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 4333 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
4334 there is still an (uninitialized) stack slot allocated for it.
4335
4336 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4337 whether this parameter already has a stack slot allocated,
4338 because an arg block exists only if current_function_args_size
abc95ed3 4339 is larger than some threshold, and we haven't calculated that
d9ca49d5
JW
4340 yet. So, for now, we just assume that stack slots never exist
4341 in this case. */
6f086dfc
RS
4342 || REG_PARM_STACK_SPACE (fndecl) > 0
4343#endif
4344 )
4345 {
4346 stack_args_size.constant += arg_size.constant;
4347 if (arg_size.var)
4348 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4349 }
4350 else
4351 /* No stack slot was pushed for this parm. */
4352 stack_parm = 0;
4353
4354 /* Update info on where next arg arrives in registers. */
4355
621061f4 4356 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
bf9c83fe 4357 passed_type, named_arg);
6f086dfc 4358
e16c591a
RS
4359 /* If we can't trust the parm stack slot to be aligned enough
4360 for its ultimate type, don't use that slot after entry.
4361 We'll make another stack slot, if we need one. */
4362 {
e16c591a 4363 int thisparm_boundary
621061f4 4364 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
e16c591a
RS
4365
4366 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4367 stack_parm = 0;
4368 }
4369
cb61f66f
RS
4370 /* If parm was passed in memory, and we need to convert it on entry,
4371 don't store it back in that same slot. */
4372 if (entry_parm != 0
4373 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4374 stack_parm = 0;
4375
4376#if 0
6f086dfc
RS
4377 /* Now adjust STACK_PARM to the mode and precise location
4378 where this parameter should live during execution,
4379 if we discover that it must live in the stack during execution.
4380 To make debuggers happier on big-endian machines, we store
4381 the value in the last bytes of the space available. */
4382
4383 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4384 && stack_parm != 0)
4385 {
4386 rtx offset_rtx;
4387
f76b9db2
ILT
4388 if (BYTES_BIG_ENDIAN
4389 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
6f086dfc
RS
4390 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4391 - GET_MODE_SIZE (nominal_mode));
6f086dfc
RS
4392
4393 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4394 if (offset_rtx == const0_rtx)
38a448ca 4395 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
6f086dfc 4396 else
38a448ca
RH
4397 stack_parm = gen_rtx_MEM (nominal_mode,
4398 gen_rtx_PLUS (Pmode,
4399 internal_arg_pointer,
4400 offset_rtx));
6f086dfc
RS
4401
4402 /* If this is a memory ref that contains aggregate components,
4403 mark it as such for cse and loop optimize. */
c6df88cb 4404 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
6f086dfc 4405 }
cb61f66f 4406#endif /* 0 */
6f086dfc
RS
4407
4408 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4409 in the mode in which it arrives.
4410 STACK_PARM is an RTX for a stack slot where the parameter can live
4411 during the function (in case we want to put it there).
4412 STACK_PARM is 0 if no stack slot was pushed for it.
4413
4414 Now output code if necessary to convert ENTRY_PARM to
4415 the type in which this function declares it,
4416 and store that result in an appropriate place,
4417 which may be a pseudo reg, may be STACK_PARM,
4418 or may be a local stack slot if STACK_PARM is 0.
4419
4420 Set DECL_RTL to that place. */
4421
5c4cdc9f 4422 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4423 {
5c4cdc9f
JW
4424 /* If a BLKmode arrives in registers, copy it to a stack slot.
4425 Handle calls that pass values in multiple non-contiguous
4426 locations. The Irix 6 ABI has examples of this. */
4427 if (GET_CODE (entry_parm) == REG
4428 || GET_CODE (entry_parm) == PARALLEL)
6f086dfc 4429 {
621061f4
RK
4430 int size_stored
4431 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4432 UNITS_PER_WORD);
6f086dfc
RS
4433
4434 /* Note that we will be storing an integral number of words.
4435 So we have to be careful to ensure that we allocate an
4436 integral number of words. We do this below in the
4437 assign_stack_local if space was not allocated in the argument
4438 list. If it was, this will not work if PARM_BOUNDARY is not
4439 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4440 if it becomes a problem. */
4441
4442 if (stack_parm == 0)
7e41ffa2
RS
4443 {
4444 stack_parm
621061f4
RK
4445 = assign_stack_local (GET_MODE (entry_parm),
4446 size_stored, 0);
4447
4448 /* If this is a memory ref that contains aggregate
4449 components, mark it as such for cse and loop optimize. */
c6df88cb 4450 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4451 }
4452
6f086dfc
RS
4453 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4454 abort ();
4455
7a30f0c4
JW
4456 if (TREE_READONLY (parm))
4457 RTX_UNCHANGING_P (stack_parm) = 1;
4458
5c4cdc9f
JW
4459 /* Handle calls that pass values in multiple non-contiguous
4460 locations. The Irix 6 ABI has examples of this. */
4461 if (GET_CODE (entry_parm) == PARALLEL)
aac5cc16
RH
4462 emit_group_store (validize_mem (stack_parm), entry_parm,
4463 int_size_in_bytes (TREE_TYPE (parm)),
4464 (TYPE_ALIGN (TREE_TYPE (parm))
4465 / BITS_PER_UNIT));
5c4cdc9f
JW
4466 else
4467 move_block_from_reg (REGNO (entry_parm),
4468 validize_mem (stack_parm),
4469 size_stored / UNITS_PER_WORD,
4470 int_size_in_bytes (TREE_TYPE (parm)));
6f086dfc
RS
4471 }
4472 DECL_RTL (parm) = stack_parm;
4473 }
74bd77a8 4474 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 4475 && ! DECL_INLINE (fndecl))
6f086dfc
RS
4476 /* layout_decl may set this. */
4477 || TREE_ADDRESSABLE (parm)
4478 || TREE_SIDE_EFFECTS (parm)
4479 /* If -ffloat-store specified, don't put explicit
4480 float variables into registers. */
4481 || (flag_float_store
4482 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4483 /* Always assign pseudo to structure return or item passed
4484 by invisible reference. */
4485 || passed_pointer || parm == function_result_decl)
4486 {
00d8a4c1
RK
4487 /* Store the parm in a pseudoregister during the function, but we
4488 may need to do it in a wider mode. */
4489
4490 register rtx parmreg;
4e86caed 4491 int regno, regnoi = 0, regnor = 0;
00d8a4c1
RK
4492
4493 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
cd5b3469 4494
621061f4
RK
4495 promoted_nominal_mode
4496 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
6f086dfc 4497
621061f4 4498 parmreg = gen_reg_rtx (promoted_nominal_mode);
ddb7361a 4499 mark_user_reg (parmreg);
6f086dfc
RS
4500
4501 /* If this was an item that we received a pointer to, set DECL_RTL
4502 appropriately. */
4503 if (passed_pointer)
4504 {
621061f4 4505 DECL_RTL (parm)
38a448ca 4506 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
c6df88cb 4507 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
6f086dfc
RS
4508 }
4509 else
4510 DECL_RTL (parm) = parmreg;
4511
4512 /* Copy the value into the register. */
621061f4
RK
4513 if (nominal_mode != passed_mode
4514 || promoted_nominal_mode != promoted_mode)
86f8eff3 4515 {
efd8cba0 4516 int save_tree_used;
621061f4
RK
4517 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4518 mode, by the caller. We now have to convert it to
4519 NOMINAL_MODE, if different. However, PARMREG may be in
956d6950 4520 a different mode than NOMINAL_MODE if it is being stored
621061f4
RK
4521 promoted.
4522
4523 If ENTRY_PARM is a hard register, it might be in a register
86f8eff3
RK
4524 not valid for operating in its mode (e.g., an odd-numbered
4525 register for a DFmode). In that case, moves are the only
4526 thing valid, so we can't do a convert from there. This
4527 occurs when the calling sequence allow such misaligned
3412b298
JW
4528 usages.
4529
4530 In addition, the conversion may involve a call, which could
4531 clobber parameters which haven't been copied to pseudo
4532 registers yet. Therefore, we must first copy the parm to
4533 a pseudo reg here, and save the conversion until after all
4534 parameters have been moved. */
4535
4536 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4537
4538 emit_move_insn (tempreg, validize_mem (entry_parm));
4539
4540 push_to_sequence (conversion_insns);
ad241351
RK
4541 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4542
efd8cba0
DB
4543 /* TREE_USED gets set erroneously during expand_assignment. */
4544 save_tree_used = TREE_USED (parm);
621061f4
RK
4545 expand_assignment (parm,
4546 make_tree (nominal_type, tempreg), 0, 0);
efd8cba0 4547 TREE_USED (parm) = save_tree_used;
3412b298 4548 conversion_insns = get_insns ();
621061f4 4549 did_conversion = 1;
3412b298 4550 end_sequence ();
86f8eff3 4551 }
6f086dfc
RS
4552 else
4553 emit_move_insn (parmreg, validize_mem (entry_parm));
4554
74bd77a8
RS
4555 /* If we were passed a pointer but the actual value
4556 can safely live in a register, put it in one. */
16bae307 4557 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
4558 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4559 && ! DECL_INLINE (fndecl))
4560 /* layout_decl may set this. */
4561 || TREE_ADDRESSABLE (parm)
4562 || TREE_SIDE_EFFECTS (parm)
4563 /* If -ffloat-store specified, don't put explicit
4564 float variables into registers. */
4565 || (flag_float_store
4566 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4567 {
2654605a
JW
4568 /* We can't use nominal_mode, because it will have been set to
4569 Pmode above. We must use the actual mode of the parm. */
4570 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
ddb7361a 4571 mark_user_reg (parmreg);
74bd77a8
RS
4572 emit_move_insn (parmreg, DECL_RTL (parm));
4573 DECL_RTL (parm) = parmreg;
c110c53d
RS
4574 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4575 now the parm. */
4576 stack_parm = 0;
74bd77a8 4577 }
137a2a7b
DE
4578#ifdef FUNCTION_ARG_CALLEE_COPIES
4579 /* If we are passed an arg by reference and it is our responsibility
4580 to make a copy, do it now.
4581 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4582 original argument, so we must recreate them in the call to
4583 FUNCTION_ARG_CALLEE_COPIES. */
4584 /* ??? Later add code to handle the case that if the argument isn't
4585 modified, don't do the copy. */
4586
4587 else if (passed_pointer
4588 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4589 TYPE_MODE (DECL_ARG_TYPE (parm)),
4590 DECL_ARG_TYPE (parm),
bf9c83fe 4591 named_arg)
926b1b99 4592 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
137a2a7b
DE
4593 {
4594 rtx copy;
4595 tree type = DECL_ARG_TYPE (parm);
4596
4597 /* This sequence may involve a library call perhaps clobbering
4598 registers that haven't been copied to pseudos yet. */
4599
4600 push_to_sequence (conversion_insns);
4601
4602 if (TYPE_SIZE (type) == 0
4603 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1fd3ef7f 4604 /* This is a variable sized object. */
38a448ca
RH
4605 copy = gen_rtx_MEM (BLKmode,
4606 allocate_dynamic_stack_space
4607 (expr_size (parm), NULL_RTX,
4608 TYPE_ALIGN (type)));
137a2a7b 4609 else
1fd3ef7f
RK
4610 copy = assign_stack_temp (TYPE_MODE (type),
4611 int_size_in_bytes (type), 1);
c6df88cb 4612 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
e9a25f70 4613 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
137a2a7b
DE
4614
4615 store_expr (parm, copy, 0);
4616 emit_move_insn (parmreg, XEXP (copy, 0));
7d384cc0 4617 if (current_function_check_memory_usage)
86fa911a 4618 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4619 XEXP (copy, 0), Pmode,
86fa911a
RK
4620 GEN_INT (int_size_in_bytes (type)),
4621 TYPE_MODE (sizetype),
956d6950
JL
4622 GEN_INT (MEMORY_USE_RW),
4623 TYPE_MODE (integer_type_node));
137a2a7b 4624 conversion_insns = get_insns ();
621061f4 4625 did_conversion = 1;
137a2a7b
DE
4626 end_sequence ();
4627 }
4628#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 4629
6f086dfc 4630 /* In any case, record the parm's desired stack location
14aceb29
RS
4631 in case we later discover it must live in the stack.
4632
4633 If it is a COMPLEX value, store the stack location for both
4634 halves. */
4635
4636 if (GET_CODE (parmreg) == CONCAT)
4637 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4638 else
4639 regno = REGNO (parmreg);
4640
e9a25f70 4641 if (regno >= max_parm_reg)
6f086dfc
RS
4642 {
4643 rtx *new;
e9a25f70 4644 int old_max_parm_reg = max_parm_reg;
14aceb29 4645
e9a25f70
JL
4646 /* It's slow to expand this one register at a time,
4647 but it's also rare and we need max_parm_reg to be
4648 precisely correct. */
4649 max_parm_reg = regno + 1;
e2ecd91c
BS
4650 new = (rtx *) xrealloc (parm_reg_stack_loc,
4651 max_parm_reg * sizeof (rtx));
e9a25f70
JL
4652 bzero ((char *) (new + old_max_parm_reg),
4653 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
6f086dfc
RS
4654 parm_reg_stack_loc = new;
4655 }
14aceb29
RS
4656
4657 if (GET_CODE (parmreg) == CONCAT)
4658 {
4659 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4660
a03caf76
RK
4661 regnor = REGNO (gen_realpart (submode, parmreg));
4662 regnoi = REGNO (gen_imagpart (submode, parmreg));
4663
7b1a0c14
RS
4664 if (stack_parm != 0)
4665 {
a03caf76 4666 parm_reg_stack_loc[regnor]
3d329b07 4667 = gen_realpart (submode, stack_parm);
a03caf76 4668 parm_reg_stack_loc[regnoi]
3d329b07 4669 = gen_imagpart (submode, stack_parm);
7b1a0c14
RS
4670 }
4671 else
4672 {
a03caf76
RK
4673 parm_reg_stack_loc[regnor] = 0;
4674 parm_reg_stack_loc[regnoi] = 0;
7b1a0c14 4675 }
14aceb29
RS
4676 }
4677 else
4678 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
6f086dfc
RS
4679
4680 /* Mark the register as eliminable if we did no conversion
4681 and it was copied from memory at a fixed offset,
4682 and the arg pointer was not copied to a pseudo-reg.
4683 If the arg pointer is a pseudo reg or the offset formed
4684 an invalid address, such memory-equivalences
4685 as we make here would screw up life analysis for it. */
4686 if (nominal_mode == passed_mode
621061f4 4687 && ! did_conversion
38b610ed
ILT
4688 && stack_parm != 0
4689 && GET_CODE (stack_parm) == MEM
6f086dfc
RS
4690 && stack_offset.var == 0
4691 && reg_mentioned_p (virtual_incoming_args_rtx,
38b610ed 4692 XEXP (stack_parm, 0)))
a03caf76
RK
4693 {
4694 rtx linsn = get_last_insn ();
69685820 4695 rtx sinsn, set;
a03caf76
RK
4696
4697 /* Mark complex types separately. */
4698 if (GET_CODE (parmreg) == CONCAT)
69685820
RK
4699 /* Scan backwards for the set of the real and
4700 imaginary parts. */
4701 for (sinsn = linsn; sinsn != 0;
4702 sinsn = prev_nonnote_insn (sinsn))
4703 {
4704 set = single_set (sinsn);
4705 if (set != 0
4706 && SET_DEST (set) == regno_reg_rtx [regnoi])
4707 REG_NOTES (sinsn)
38a448ca
RH
4708 = gen_rtx_EXPR_LIST (REG_EQUIV,
4709 parm_reg_stack_loc[regnoi],
4710 REG_NOTES (sinsn));
69685820
RK
4711 else if (set != 0
4712 && SET_DEST (set) == regno_reg_rtx [regnor])
4713 REG_NOTES (sinsn)
38a448ca
RH
4714 = gen_rtx_EXPR_LIST (REG_EQUIV,
4715 parm_reg_stack_loc[regnor],
4716 REG_NOTES (sinsn));
69685820
RK
4717 }
4718 else if ((set = single_set (linsn)) != 0
4719 && SET_DEST (set) == parmreg)
a03caf76 4720 REG_NOTES (linsn)
38a448ca
RH
4721 = gen_rtx_EXPR_LIST (REG_EQUIV,
4722 stack_parm, REG_NOTES (linsn));
a03caf76 4723 }
6f086dfc
RS
4724
4725 /* For pointer data type, suggest pointer register. */
e5e809f4 4726 if (POINTER_TYPE_P (TREE_TYPE (parm)))
6c6166bd
RK
4727 mark_reg_pointer (parmreg,
4728 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4729 / BITS_PER_UNIT));
6f086dfc
RS
4730 }
4731 else
4732 {
4733 /* Value must be stored in the stack slot STACK_PARM
4734 during function execution. */
4735
621061f4 4736 if (promoted_mode != nominal_mode)
86f8eff3
RK
4737 {
4738 /* Conversion is required. */
3412b298
JW
4739 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4740
4741 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 4742
3412b298
JW
4743 push_to_sequence (conversion_insns);
4744 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 4745 TREE_UNSIGNED (TREE_TYPE (parm)));
de957303
DE
4746 if (stack_parm)
4747 {
4748 /* ??? This may need a big-endian conversion on sparc64. */
4749 stack_parm = change_address (stack_parm, nominal_mode,
4750 NULL_RTX);
4751 }
3412b298 4752 conversion_insns = get_insns ();
621061f4 4753 did_conversion = 1;
3412b298 4754 end_sequence ();
86f8eff3 4755 }
6f086dfc
RS
4756
4757 if (entry_parm != stack_parm)
4758 {
4759 if (stack_parm == 0)
7e41ffa2
RS
4760 {
4761 stack_parm
4762 = assign_stack_local (GET_MODE (entry_parm),
4763 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4764 /* If this is a memory ref that contains aggregate components,
4765 mark it as such for cse and loop optimize. */
c6df88cb 4766 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
7e41ffa2
RS
4767 }
4768
621061f4 4769 if (promoted_mode != nominal_mode)
3412b298
JW
4770 {
4771 push_to_sequence (conversion_insns);
4772 emit_move_insn (validize_mem (stack_parm),
4773 validize_mem (entry_parm));
4774 conversion_insns = get_insns ();
4775 end_sequence ();
4776 }
4777 else
4778 emit_move_insn (validize_mem (stack_parm),
4779 validize_mem (entry_parm));
6f086dfc 4780 }
7d384cc0 4781 if (current_function_check_memory_usage)
86fa911a
RK
4782 {
4783 push_to_sequence (conversion_insns);
4784 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 4785 XEXP (stack_parm, 0), Pmode,
86fa911a
RK
4786 GEN_INT (GET_MODE_SIZE (GET_MODE
4787 (entry_parm))),
4788 TYPE_MODE (sizetype),
956d6950
JL
4789 GEN_INT (MEMORY_USE_RW),
4790 TYPE_MODE (integer_type_node));
6f086dfc 4791
86fa911a
RK
4792 conversion_insns = get_insns ();
4793 end_sequence ();
4794 }
6f086dfc
RS
4795 DECL_RTL (parm) = stack_parm;
4796 }
4797
4798 /* If this "parameter" was the place where we are receiving the
4799 function's incoming structure pointer, set up the result. */
4800 if (parm == function_result_decl)
ccdecf58
RK
4801 {
4802 tree result = DECL_RESULT (fndecl);
4803 tree restype = TREE_TYPE (result);
4804
4805 DECL_RTL (result)
38a448ca 4806 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
ccdecf58 4807
c6df88cb
MM
4808 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4809 AGGREGATE_TYPE_P (restype));
ccdecf58 4810 }
6f086dfc
RS
4811
4812 if (TREE_THIS_VOLATILE (parm))
4813 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4814 if (TREE_READONLY (parm))
4815 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4816 }
4817
3412b298
JW
4818 /* Output all parameter conversion instructions (possibly including calls)
4819 now that all parameters have been copied out of hard registers. */
4820 emit_insns (conversion_insns);
4821
6f086dfc
RS
4822 last_parm_insn = get_last_insn ();
4823
4824 current_function_args_size = stack_args_size.constant;
4825
4826 /* Adjust function incoming argument size for alignment and
4827 minimum length. */
4828
4829#ifdef REG_PARM_STACK_SPACE
6f90e075 4830#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
4831 current_function_args_size = MAX (current_function_args_size,
4832 REG_PARM_STACK_SPACE (fndecl));
4833#endif
6f90e075 4834#endif
6f086dfc 4835
4433e339
RH
4836#ifdef STACK_BOUNDARY
4837#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4838
4839 current_function_args_size
4840 = ((current_function_args_size + STACK_BYTES - 1)
4841 / STACK_BYTES) * STACK_BYTES;
4842#endif
4843
6f086dfc
RS
4844#ifdef ARGS_GROW_DOWNWARD
4845 current_function_arg_offset_rtx
5f4f0e22 4846 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
4847 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4848 size_int (-stack_args_size.constant)),
86fa911a 4849 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
6f086dfc
RS
4850#else
4851 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4852#endif
4853
4854 /* See how many bytes, if any, of its args a function should try to pop
4855 on return. */
4856
64e6d9cc 4857 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
6f086dfc
RS
4858 current_function_args_size);
4859
3b69d50e
RK
4860 /* For stdarg.h function, save info about
4861 regs and stack space used by the named args. */
6f086dfc 4862
3b69d50e 4863 if (!hide_last_arg)
6f086dfc
RS
4864 current_function_args_info = args_so_far;
4865
4866 /* Set the rtx used for the function return value. Put this in its
4867 own variable so any optimizers that need this information don't have
4868 to include tree.h. Do this here so it gets done when an inlined
4869 function gets output. */
4870
4871 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4872}
4873\f
75dc3319
RK
4874/* Indicate whether REGNO is an incoming argument to the current function
4875 that was promoted to a wider mode. If so, return the RTX for the
4876 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4877 that REGNO is promoted from and whether the promotion was signed or
4878 unsigned. */
4879
4880#ifdef PROMOTE_FUNCTION_ARGS
4881
4882rtx
4883promoted_input_arg (regno, pmode, punsignedp)
4884 int regno;
4885 enum machine_mode *pmode;
4886 int *punsignedp;
4887{
4888 tree arg;
4889
4890 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4891 arg = TREE_CHAIN (arg))
4892 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
621061f4
RK
4893 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4894 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
75dc3319
RK
4895 {
4896 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4897 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4898
a5a52dbc 4899 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
75dc3319
RK
4900 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4901 && mode != DECL_MODE (arg))
4902 {
4903 *pmode = DECL_MODE (arg);
4904 *punsignedp = unsignedp;
4905 return DECL_INCOMING_RTL (arg);
4906 }
4907 }
4908
4909 return 0;
4910}
4911
4912#endif
4913\f
6f086dfc
RS
4914/* Compute the size and offset from the start of the stacked arguments for a
4915 parm passed in mode PASSED_MODE and with type TYPE.
4916
4917 INITIAL_OFFSET_PTR points to the current offset into the stacked
4918 arguments.
4919
4920 The starting offset and size for this parm are returned in *OFFSET_PTR
4921 and *ARG_SIZE_PTR, respectively.
4922
4923 IN_REGS is non-zero if the argument will be passed in registers. It will
4924 never be set if REG_PARM_STACK_SPACE is not defined.
4925
4926 FNDECL is the function in which the argument was defined.
4927
4928 There are two types of rounding that are done. The first, controlled by
4929 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4930 list to be aligned to the specific boundary (in bits). This rounding
4931 affects the initial and starting offsets, but not the argument size.
4932
4933 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4934 optionally rounds the size of the parm to PARM_BOUNDARY. The
4935 initial offset is not affected by this rounding, while the size always
4936 is and the starting offset may be. */
4937
4938/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4939 initial_offset_ptr is positive because locate_and_pad_parm's
4940 callers pass in the total size of args so far as
4941 initial_offset_ptr. arg_size_ptr is always positive.*/
4942
6f086dfc
RS
4943void
4944locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4fc026cd
CM
4945 initial_offset_ptr, offset_ptr, arg_size_ptr,
4946 alignment_pad)
6f086dfc
RS
4947 enum machine_mode passed_mode;
4948 tree type;
4949 int in_regs;
91813b28 4950 tree fndecl ATTRIBUTE_UNUSED;
6f086dfc
RS
4951 struct args_size *initial_offset_ptr;
4952 struct args_size *offset_ptr;
4953 struct args_size *arg_size_ptr;
4fc026cd
CM
4954 struct args_size *alignment_pad;
4955
6f086dfc
RS
4956{
4957 tree sizetree
4958 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4959 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4960 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6f086dfc
RS
4961
4962#ifdef REG_PARM_STACK_SPACE
4963 /* If we have found a stack parm before we reach the end of the
4964 area reserved for registers, skip that area. */
4965 if (! in_regs)
4966 {
29a82058
JL
4967 int reg_parm_stack_space = 0;
4968
29008b51
JW
4969#ifdef MAYBE_REG_PARM_STACK_SPACE
4970 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4971#else
6f086dfc 4972 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 4973#endif
6f086dfc
RS
4974 if (reg_parm_stack_space > 0)
4975 {
4976 if (initial_offset_ptr->var)
4977 {
4978 initial_offset_ptr->var
4979 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4980 size_int (reg_parm_stack_space));
4981 initial_offset_ptr->constant = 0;
4982 }
4983 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4984 initial_offset_ptr->constant = reg_parm_stack_space;
4985 }
4986 }
4987#endif /* REG_PARM_STACK_SPACE */
4988
4989 arg_size_ptr->var = 0;
4990 arg_size_ptr->constant = 0;
4991
4992#ifdef ARGS_GROW_DOWNWARD
4993 if (initial_offset_ptr->var)
4994 {
4995 offset_ptr->constant = 0;
4996 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4997 initial_offset_ptr->var);
4998 }
4999 else
5000 {
5001 offset_ptr->constant = - initial_offset_ptr->constant;
5002 offset_ptr->var = 0;
5003 }
0b21dcf5 5004 if (where_pad != none
6f086dfc
RS
5005 && (TREE_CODE (sizetree) != INTEGER_CST
5006 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5007 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5008 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19 5009 if (where_pad != downward)
4fc026cd 5010 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
6f086dfc
RS
5011 if (initial_offset_ptr->var)
5012 {
5013 arg_size_ptr->var = size_binop (MINUS_EXPR,
5014 size_binop (MINUS_EXPR,
5015 integer_zero_node,
5016 initial_offset_ptr->var),
5017 offset_ptr->var);
5018 }
5019 else
5020 {
db3cf6fb
MS
5021 arg_size_ptr->constant = (- initial_offset_ptr->constant
5022 - offset_ptr->constant);
6f086dfc 5023 }
6f086dfc 5024#else /* !ARGS_GROW_DOWNWARD */
4fc026cd 5025 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
6f086dfc 5026 *offset_ptr = *initial_offset_ptr;
6f086dfc
RS
5027
5028#ifdef PUSH_ROUNDING
5029 if (passed_mode != BLKmode)
5030 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5031#endif
5032
d4b0a7a0
DE
5033 /* Pad_below needs the pre-rounded size to know how much to pad below
5034 so this must be done before rounding up. */
ea5917da
DE
5035 if (where_pad == downward
5036 /* However, BLKmode args passed in regs have their padding done elsewhere.
5037 The stack slot must be able to hold the entire register. */
5038 && !(in_regs && passed_mode == BLKmode))
d4b0a7a0
DE
5039 pad_below (offset_ptr, passed_mode, sizetree);
5040
6f086dfc
RS
5041 if (where_pad != none
5042 && (TREE_CODE (sizetree) != INTEGER_CST
5043 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5044 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5045
5046 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5047#endif /* ARGS_GROW_DOWNWARD */
5048}
5049
e16c591a
RS
5050/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5051 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5052
6f086dfc 5053static void
4fc026cd 5054pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
6f086dfc
RS
5055 struct args_size *offset_ptr;
5056 int boundary;
4fc026cd 5057 struct args_size *alignment_pad;
6f086dfc 5058{
4fc026cd
CM
5059 tree save_var;
5060 HOST_WIDE_INT save_constant;
5061
6f086dfc
RS
5062 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5063
9399d5c6 5064 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd
CM
5065 {
5066 save_var = offset_ptr->var;
5067 save_constant = offset_ptr->constant;
5068 }
5069
5070 alignment_pad->var = NULL_TREE;
5071 alignment_pad->constant = 0;
4fc026cd 5072
6f086dfc
RS
5073 if (boundary > BITS_PER_UNIT)
5074 {
5075 if (offset_ptr->var)
5076 {
5077 offset_ptr->var =
5078#ifdef ARGS_GROW_DOWNWARD
5079 round_down
5080#else
5081 round_up
5082#endif
5083 (ARGS_SIZE_TREE (*offset_ptr),
5084 boundary / BITS_PER_UNIT);
5085 offset_ptr->constant = 0; /*?*/
9399d5c6 5086 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
4fc026cd 5087 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
6f086dfc
RS
5088 }
5089 else
fbb57b2a
CM
5090 {
5091 offset_ptr->constant =
6f086dfc 5092#ifdef ARGS_GROW_DOWNWARD
fbb57b2a 5093 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
6f086dfc 5094#else
fbb57b2a 5095 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
6f086dfc 5096#endif
9399d5c6 5097 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
fbb57b2a
CM
5098 alignment_pad->constant = offset_ptr->constant - save_constant;
5099 }
6f086dfc
RS
5100 }
5101}
5102
51723711 5103#ifndef ARGS_GROW_DOWNWARD
6f086dfc
RS
5104static void
5105pad_below (offset_ptr, passed_mode, sizetree)
5106 struct args_size *offset_ptr;
5107 enum machine_mode passed_mode;
5108 tree sizetree;
5109{
5110 if (passed_mode != BLKmode)
5111 {
5112 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5113 offset_ptr->constant
5114 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5115 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5116 - GET_MODE_SIZE (passed_mode));
5117 }
5118 else
5119 {
5120 if (TREE_CODE (sizetree) != INTEGER_CST
5121 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5122 {
5123 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5124 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5125 /* Add it in. */
5126 ADD_PARM_SIZE (*offset_ptr, s2);
5127 SUB_PARM_SIZE (*offset_ptr, sizetree);
5128 }
5129 }
5130}
51723711 5131#endif
6f086dfc 5132
487a6e06 5133#ifdef ARGS_GROW_DOWNWARD
6f086dfc
RS
5134static tree
5135round_down (value, divisor)
5136 tree value;
5137 int divisor;
5138{
5139 return size_binop (MULT_EXPR,
5140 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5141 size_int (divisor));
5142}
487a6e06 5143#endif
6f086dfc
RS
5144\f
5145/* Walk the tree of blocks describing the binding levels within a function
5146 and warn about uninitialized variables.
5147 This is done after calling flow_analysis and before global_alloc
5148 clobbers the pseudo-regs to hard regs. */
5149
5150void
5151uninitialized_vars_warning (block)
5152 tree block;
5153{
5154 register tree decl, sub;
5155 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5156 {
8fbe1035
ML
5157 if (warn_uninitialized
5158 && TREE_CODE (decl) == VAR_DECL
6f086dfc
RS
5159 /* These warnings are unreliable for and aggregates
5160 because assigning the fields one by one can fail to convince
5161 flow.c that the entire aggregate was initialized.
5162 Unions are troublesome because members may be shorter. */
05e3bdb9 5163 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
6f086dfc
RS
5164 && DECL_RTL (decl) != 0
5165 && GET_CODE (DECL_RTL (decl)) == REG
6acdd0fd
JL
5166 /* Global optimizations can make it difficult to determine if a
5167 particular variable has been initialized. However, a VAR_DECL
5168 with a nonzero DECL_INITIAL had an initializer, so do not
5169 claim it is potentially uninitialized.
5170
5171 We do not care about the actual value in DECL_INITIAL, so we do
5172 not worry that it may be a dangling pointer. */
5173 && DECL_INITIAL (decl) == NULL_TREE
6f086dfc
RS
5174 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5175 warning_with_decl (decl,
3c8cd8bd 5176 "`%s' might be used uninitialized in this function");
8fbe1035
ML
5177 if (extra_warnings
5178 && TREE_CODE (decl) == VAR_DECL
6f086dfc
RS
5179 && DECL_RTL (decl) != 0
5180 && GET_CODE (DECL_RTL (decl)) == REG
5181 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5182 warning_with_decl (decl,
3c8cd8bd 5183 "variable `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5184 }
5185 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5186 uninitialized_vars_warning (sub);
5187}
5188
5189/* Do the appropriate part of uninitialized_vars_warning
5190 but for arguments instead of local variables. */
5191
5192void
0cd6ef35 5193setjmp_args_warning ()
6f086dfc
RS
5194{
5195 register tree decl;
5196 for (decl = DECL_ARGUMENTS (current_function_decl);
5197 decl; decl = TREE_CHAIN (decl))
5198 if (DECL_RTL (decl) != 0
5199 && GET_CODE (DECL_RTL (decl)) == REG
5200 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3c8cd8bd 5201 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
5202}
5203
5204/* If this function call setjmp, put all vars into the stack
5205 unless they were declared `register'. */
5206
5207void
5208setjmp_protect (block)
5209 tree block;
5210{
5211 register tree decl, sub;
5212 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5213 if ((TREE_CODE (decl) == VAR_DECL
5214 || TREE_CODE (decl) == PARM_DECL)
5215 && DECL_RTL (decl) != 0
e9a25f70
JL
5216 && (GET_CODE (DECL_RTL (decl)) == REG
5217 || (GET_CODE (DECL_RTL (decl)) == MEM
5218 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
b335c2cc 5219 /* If this variable came from an inline function, it must be
9ec36da5 5220 that its life doesn't overlap the setjmp. If there was a
b335c2cc
TW
5221 setjmp in the function, it would already be in memory. We
5222 must exclude such variable because their DECL_RTL might be
5223 set to strange things such as virtual_stack_vars_rtx. */
5224 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
5225 && (
5226#ifdef NON_SAVING_SETJMP
5227 /* If longjmp doesn't restore the registers,
5228 don't put anything in them. */
5229 NON_SAVING_SETJMP
5230 ||
5231#endif
a82ad570 5232 ! DECL_REGISTER (decl)))
6f086dfc
RS
5233 put_var_into_stack (decl);
5234 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5235 setjmp_protect (sub);
5236}
5237\f
5238/* Like the previous function, but for args instead of local variables. */
5239
5240void
5241setjmp_protect_args ()
5242{
29a82058 5243 register tree decl;
6f086dfc
RS
5244 for (decl = DECL_ARGUMENTS (current_function_decl);
5245 decl; decl = TREE_CHAIN (decl))
5246 if ((TREE_CODE (decl) == VAR_DECL
5247 || TREE_CODE (decl) == PARM_DECL)
5248 && DECL_RTL (decl) != 0
e9a25f70
JL
5249 && (GET_CODE (DECL_RTL (decl)) == REG
5250 || (GET_CODE (DECL_RTL (decl)) == MEM
5251 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
6f086dfc
RS
5252 && (
5253 /* If longjmp doesn't restore the registers,
5254 don't put anything in them. */
5255#ifdef NON_SAVING_SETJMP
5256 NON_SAVING_SETJMP
5257 ||
5258#endif
a82ad570 5259 ! DECL_REGISTER (decl)))
6f086dfc
RS
5260 put_var_into_stack (decl);
5261}
5262\f
5263/* Return the context-pointer register corresponding to DECL,
5264 or 0 if it does not need one. */
5265
5266rtx
5267lookup_static_chain (decl)
5268 tree decl;
5269{
b001a02f
PB
5270 tree context = decl_function_context (decl);
5271 tree link;
7ad8c4bf 5272
38ee6ed9
JM
5273 if (context == 0
5274 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
7ad8c4bf 5275 return 0;
38ee6ed9 5276
6f086dfc
RS
5277 /* We treat inline_function_decl as an alias for the current function
5278 because that is the inline function whose vars, types, etc.
5279 are being merged into the current function.
5280 See expand_inline_function. */
5281 if (context == current_function_decl || context == inline_function_decl)
5282 return virtual_stack_vars_rtx;
5283
5284 for (link = context_display; link; link = TREE_CHAIN (link))
5285 if (TREE_PURPOSE (link) == context)
5286 return RTL_EXPR_RTL (TREE_VALUE (link));
5287
5288 abort ();
5289}
5290\f
5291/* Convert a stack slot address ADDR for variable VAR
5292 (from a containing function)
5293 into an address valid in this function (using a static chain). */
5294
5295rtx
5296fix_lexical_addr (addr, var)
5297 rtx addr;
5298 tree var;
5299{
5300 rtx basereg;
e5e809f4 5301 HOST_WIDE_INT displacement;
6f086dfc
RS
5302 tree context = decl_function_context (var);
5303 struct function *fp;
5304 rtx base = 0;
5305
5306 /* If this is the present function, we need not do anything. */
5307 if (context == current_function_decl || context == inline_function_decl)
5308 return addr;
5309
5310 for (fp = outer_function_chain; fp; fp = fp->next)
5311 if (fp->decl == context)
5312 break;
5313
5314 if (fp == 0)
5315 abort ();
5316
e9a25f70
JL
5317 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5318 addr = XEXP (XEXP (addr, 0), 0);
5319
6f086dfc
RS
5320 /* Decode given address as base reg plus displacement. */
5321 if (GET_CODE (addr) == REG)
5322 basereg = addr, displacement = 0;
5323 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5324 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5325 else
5326 abort ();
5327
5328 /* We accept vars reached via the containing function's
5329 incoming arg pointer and via its stack variables pointer. */
5330 if (basereg == fp->internal_arg_pointer)
5331 {
5332 /* If reached via arg pointer, get the arg pointer value
5333 out of that function's stack frame.
5334
5335 There are two cases: If a separate ap is needed, allocate a
5336 slot in the outer function for it and dereference it that way.
5337 This is correct even if the real ap is actually a pseudo.
5338 Otherwise, just adjust the offset from the frame pointer to
5339 compensate. */
5340
5341#ifdef NEED_SEPARATE_AP
5342 rtx addr;
5343
49ad7cfa
BS
5344 if (fp->x_arg_pointer_save_area == 0)
5345 fp->x_arg_pointer_save_area
e2ecd91c 5346 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
6f086dfc 5347
49ad7cfa 5348 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
6f086dfc
RS
5349 addr = memory_address (Pmode, addr);
5350
38a448ca 5351 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
6f086dfc
RS
5352#else
5353 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 5354 base = lookup_static_chain (var);
6f086dfc
RS
5355#endif
5356 }
5357
5358 else if (basereg == virtual_stack_vars_rtx)
5359 {
5360 /* This is the same code as lookup_static_chain, duplicated here to
5361 avoid an extra call to decl_function_context. */
5362 tree link;
5363
5364 for (link = context_display; link; link = TREE_CHAIN (link))
5365 if (TREE_PURPOSE (link) == context)
5366 {
5367 base = RTL_EXPR_RTL (TREE_VALUE (link));
5368 break;
5369 }
5370 }
5371
5372 if (base == 0)
5373 abort ();
5374
5375 /* Use same offset, relative to appropriate static chain or argument
5376 pointer. */
5377 return plus_constant (base, displacement);
5378}
5379\f
5380/* Return the address of the trampoline for entering nested fn FUNCTION.
5381 If necessary, allocate a trampoline (in the stack frame)
5382 and emit rtl to initialize its contents (at entry to this function). */
5383
5384rtx
5385trampoline_address (function)
5386 tree function;
5387{
5388 tree link;
5389 tree rtlexp;
5390 rtx tramp;
5391 struct function *fp;
5392 tree fn_context;
5393
5394 /* Find an existing trampoline and return it. */
5395 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5396 if (TREE_PURPOSE (link) == function)
e87ee2a9
RK
5397 return
5398 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5399
6f086dfc 5400 for (fp = outer_function_chain; fp; fp = fp->next)
49ad7cfa 5401 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
6f086dfc
RS
5402 if (TREE_PURPOSE (link) == function)
5403 {
5404 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5405 function);
5406 return round_trampoline_addr (tramp);
5407 }
5408
5409 /* None exists; we must make one. */
5410
5411 /* Find the `struct function' for the function containing FUNCTION. */
5412 fp = 0;
5413 fn_context = decl_function_context (function);
4ac74fb8
RK
5414 if (fn_context != current_function_decl
5415 && fn_context != inline_function_decl)
6f086dfc
RS
5416 for (fp = outer_function_chain; fp; fp = fp->next)
5417 if (fp->decl == fn_context)
5418 break;
5419
5420 /* Allocate run-time space for this trampoline
5421 (usually in the defining function's stack frame). */
5422#ifdef ALLOCATE_TRAMPOLINE
5423 tramp = ALLOCATE_TRAMPOLINE (fp);
5424#else
5425 /* If rounding needed, allocate extra space
5426 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5427#ifdef TRAMPOLINE_ALIGNMENT
b02ab63a
RK
5428#define TRAMPOLINE_REAL_SIZE \
5429 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
6f086dfc
RS
5430#else
5431#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5432#endif
e2ecd91c
BS
5433 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5434 fp ? fp : current_function);
6f086dfc
RS
5435#endif
5436
5437 /* Record the trampoline for reuse and note it for later initialization
5438 by expand_function_end. */
5439 if (fp != 0)
5440 {
28498644
RK
5441 push_obstacks (fp->function_maybepermanent_obstack,
5442 fp->function_maybepermanent_obstack);
6f086dfc
RS
5443 rtlexp = make_node (RTL_EXPR);
5444 RTL_EXPR_RTL (rtlexp) = tramp;
49ad7cfa
BS
5445 fp->x_trampoline_list = tree_cons (function, rtlexp,
5446 fp->x_trampoline_list);
6f086dfc
RS
5447 pop_obstacks ();
5448 }
5449 else
5450 {
5451 /* Make the RTL_EXPR node temporary, not momentary, so that the
5452 trampoline_list doesn't become garbage. */
5453 int momentary = suspend_momentary ();
5454 rtlexp = make_node (RTL_EXPR);
5455 resume_momentary (momentary);
5456
5457 RTL_EXPR_RTL (rtlexp) = tramp;
5458 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5459 }
5460
5461 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5462 return round_trampoline_addr (tramp);
5463}
5464
5465/* Given a trampoline address,
5466 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5467
5468static rtx
5469round_trampoline_addr (tramp)
5470 rtx tramp;
5471{
5472#ifdef TRAMPOLINE_ALIGNMENT
5473 /* Round address up to desired boundary. */
5474 rtx temp = gen_reg_rtx (Pmode);
5475 temp = expand_binop (Pmode, add_optab, tramp,
b02ab63a 5476 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
6f086dfc
RS
5477 temp, 0, OPTAB_LIB_WIDEN);
5478 tramp = expand_binop (Pmode, and_optab, temp,
b02ab63a 5479 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
6f086dfc
RS
5480 temp, 0, OPTAB_LIB_WIDEN);
5481#endif
5482 return tramp;
5483}
5484\f
e6fd097e
MM
5485/* Insert the BLOCK in the block-tree before LAST_INSN. */
5486
5487void
5488retrofit_block (block, last_insn)
5489 tree block;
5490 rtx last_insn;
5491{
5492 rtx insn;
5493
5494 /* Now insert the new BLOCK at the right place in the block trees
5495 for the function which called the inline function. We just look
5496 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5497 beginning of a block, then this new block becomes the first
5498 subblock of that block. If we find the end of a block, then this
5499 new block follows that block in the list of blocks. */
5500 for (insn = last_insn; insn; insn = PREV_INSN (insn))
5501 if (GET_CODE (insn) == NOTE
5502 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
5503 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
5504 break;
5505 if (!insn || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5506 {
5507 tree superblock;
5508
5509 if (insn)
5510 superblock = NOTE_BLOCK (insn);
5511 else
5512 superblock = DECL_INITIAL (current_function_decl);
5513
5514 BLOCK_SUPERCONTEXT (block) = superblock;
5515 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (superblock);
5516 BLOCK_SUBBLOCKS (superblock) = block;
5517 }
5518 else
5519 {
5520 tree prevblock = NOTE_BLOCK (insn);
5521
5522 BLOCK_SUPERCONTEXT (block) = BLOCK_SUPERCONTEXT (prevblock);
5523 BLOCK_CHAIN (block) = BLOCK_CHAIN (prevblock);
5524 BLOCK_CHAIN (prevblock) = block;
5525 }
5526}
5527
467456d0
RS
5528/* The functions identify_blocks and reorder_blocks provide a way to
5529 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5530 duplicate portions of the RTL code. Call identify_blocks before
5531 changing the RTL, and call reorder_blocks after. */
5532
b2a59b15
MS
5533/* Put all this function's BLOCK nodes including those that are chained
5534 onto the first block into a vector, and return it.
467456d0
RS
5535 Also store in each NOTE for the beginning or end of a block
5536 the index of that block in the vector.
b2a59b15 5537 The arguments are BLOCK, the chain of top-level blocks of the function,
467456d0
RS
5538 and INSNS, the insn chain of the function. */
5539
1a4450c7 5540void
b2a59b15
MS
5541identify_blocks (block, insns)
5542 tree block;
467456d0
RS
5543 rtx insns;
5544{
fc289cd1
JW
5545 int n_blocks;
5546 tree *block_vector;
1a4450c7 5547 tree *block_stack;
467456d0 5548 int depth = 0;
b2a59b15 5549 int current_block_number = 1;
467456d0
RS
5550 rtx insn;
5551
b2a59b15 5552 if (block == 0)
1a4450c7 5553 return;
fc289cd1 5554
1a4450c7
MM
5555 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5556 depth-first order. */
b2a59b15 5557 n_blocks = all_blocks (block, 0);
fc289cd1 5558 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
b2a59b15 5559 all_blocks (block, block_vector);
467456d0 5560
4da896b2 5561 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
1a4450c7 5562
467456d0
RS
5563 for (insn = insns; insn; insn = NEXT_INSN (insn))
5564 if (GET_CODE (insn) == NOTE)
5565 {
5566 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5567 {
21204d34 5568 tree b;
1a4450c7 5569
e6fd097e
MM
5570 /* If there are more block notes than BLOCKs, something
5571 is badly wrong. */
5572 if (current_block_number == n_blocks)
5573 abort ();
5574
21204d34
MM
5575 b = block_vector[current_block_number++];
5576 NOTE_BLOCK (insn) = b;
5577 block_stack[depth++] = b;
467456d0 5578 }
e6fd097e
MM
5579 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5580 {
5581 if (depth == 0)
5582 /* There are more NOTE_INSN_BLOCK_ENDs that
5583 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5584 abort ();
5585
5586 NOTE_BLOCK (insn) = block_stack[--depth];
5587 }
467456d0
RS
5588 }
5589
e6fd097e
MM
5590 /* In whole-function mode, we might not have seen the whole function
5591 yet, so we might not use up all the blocks. */
5592 if (n_blocks != current_block_number
5593 && !current_function->x_whole_function_mode_p)
b2a59b15
MS
5594 abort ();
5595
1a4450c7 5596 free (block_vector);
4da896b2 5597 free (block_stack);
467456d0
RS
5598}
5599
1a4450c7
MM
5600/* Given a revised instruction chain, rebuild the tree structure of
5601 BLOCK nodes to correspond to the new order of RTL. The new block
5602 tree is inserted below TOP_BLOCK. Returns the current top-level
5603 block. */
467456d0
RS
5604
5605tree
1a4450c7 5606reorder_blocks (block, insns)
b2a59b15 5607 tree block;
467456d0
RS
5608 rtx insns;
5609{
b2a59b15 5610 tree current_block = block;
467456d0
RS
5611 rtx insn;
5612
1a4450c7
MM
5613 if (block == NULL_TREE)
5614 return NULL_TREE;
fc289cd1 5615
b2a59b15 5616 /* Prune the old trees away, so that it doesn't get in the way. */
fc289cd1 5617 BLOCK_SUBBLOCKS (current_block) = 0;
b2a59b15 5618 BLOCK_CHAIN (current_block) = 0;
fc289cd1 5619
467456d0
RS
5620 for (insn = insns; insn; insn = NEXT_INSN (insn))
5621 if (GET_CODE (insn) == NOTE)
5622 {
5623 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5624 {
1a4450c7 5625 tree block = NOTE_BLOCK (insn);
467456d0
RS
5626 /* If we have seen this block before, copy it. */
5627 if (TREE_ASM_WRITTEN (block))
5628 block = copy_node (block);
fc289cd1 5629 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
5630 TREE_ASM_WRITTEN (block) = 1;
5631 BLOCK_SUPERCONTEXT (block) = current_block;
5632 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5633 BLOCK_SUBBLOCKS (current_block) = block;
5634 current_block = block;
1b2ac438 5635 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5636 }
5637 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5638 {
5639 BLOCK_SUBBLOCKS (current_block)
5640 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5641 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 5642 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
5643 }
5644 }
5645
b2a59b15
MS
5646 BLOCK_SUBBLOCKS (current_block)
5647 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
467456d0
RS
5648 return current_block;
5649}
5650
5651/* Reverse the order of elements in the chain T of blocks,
5652 and return the new head of the chain (old last element). */
5653
5654static tree
5655blocks_nreverse (t)
5656 tree t;
5657{
5658 register tree prev = 0, decl, next;
5659 for (decl = t; decl; decl = next)
5660 {
5661 next = BLOCK_CHAIN (decl);
5662 BLOCK_CHAIN (decl) = prev;
5663 prev = decl;
5664 }
5665 return prev;
5666}
5667
b2a59b15
MS
5668/* Count the subblocks of the list starting with BLOCK, and list them
5669 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5670 blocks. */
467456d0
RS
5671
5672static int
5673all_blocks (block, vector)
5674 tree block;
5675 tree *vector;
5676{
b2a59b15
MS
5677 int n_blocks = 0;
5678
5679 while (block)
5680 {
5681 TREE_ASM_WRITTEN (block) = 0;
5682
5683 /* Record this block. */
5684 if (vector)
5685 vector[n_blocks] = block;
5686
5687 ++n_blocks;
5688
5689 /* Record the subblocks, and their subblocks... */
5690 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5691 vector ? vector + n_blocks : 0);
5692 block = BLOCK_CHAIN (block);
5693 }
467456d0
RS
5694
5695 return n_blocks;
5696}
5697\f
b384405b
BS
5698/* Allocate a function structure and reset its contents to the defaults. */
5699static void
5700prepare_function_start ()
6f086dfc 5701{
b384405b 5702 current_function = (struct function *) xcalloc (1, sizeof (struct function));
e2ecd91c 5703
6f086dfc 5704 init_stmt_for_function ();
fa51b01b 5705 init_eh_for_function ();
6f086dfc
RS
5706
5707 cse_not_expected = ! optimize;
5708
5709 /* Caller save not needed yet. */
5710 caller_save_needed = 0;
5711
5712 /* No stack slots have been made yet. */
5713 stack_slot_list = 0;
5714
b384405b
BS
5715 current_function_has_nonlocal_label = 0;
5716 current_function_has_nonlocal_goto = 0;
5717
6f086dfc 5718 /* There is no stack slot for handling nonlocal gotos. */
ba716ac9 5719 nonlocal_goto_handler_slots = 0;
6f086dfc
RS
5720 nonlocal_goto_stack_level = 0;
5721
5722 /* No labels have been declared for nonlocal use. */
5723 nonlocal_labels = 0;
e881bb1b 5724 nonlocal_goto_handler_labels = 0;
6f086dfc
RS
5725
5726 /* No function calls so far in this function. */
5727 function_call_count = 0;
5728
5729 /* No parm regs have been allocated.
5730 (This is important for output_inline_function.) */
5731 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5732
5733 /* Initialize the RTL mechanism. */
5734 init_emit ();
5735
5736 /* Initialize the queue of pending postincrement and postdecrements,
5737 and some other info in expr.c. */
5738 init_expr ();
b384405b 5739
6f086dfc
RS
5740 /* We haven't done register allocation yet. */
5741 reg_renumber = 0;
5742
36edd3cc 5743 init_varasm_status (current_function);
6f086dfc 5744
e2ecd91c
BS
5745 /* Clear out data used for inlining. */
5746 current_function->inlinable = 0;
5747 current_function->original_decl_initial = 0;
5748 current_function->original_arg_vector = 0;
5749
6f086dfc
RS
5750 /* Set if a call to setjmp is seen. */
5751 current_function_calls_setjmp = 0;
5752
5753 /* Set if a call to longjmp is seen. */
5754 current_function_calls_longjmp = 0;
5755
5756 current_function_calls_alloca = 0;
6f086dfc 5757 current_function_contains_functions = 0;
54ff41b7 5758 current_function_is_leaf = 0;
fdb8a883 5759 current_function_sp_is_unchanging = 0;
54ff41b7 5760 current_function_uses_only_leaf_regs = 0;
acd693d1 5761 current_function_has_computed_jump = 0;
173cd503 5762 current_function_is_thunk = 0;
6f086dfc
RS
5763
5764 current_function_returns_pcc_struct = 0;
5765 current_function_returns_struct = 0;
5766 current_function_epilogue_delay_list = 0;
5767 current_function_uses_const_pool = 0;
5768 current_function_uses_pic_offset_table = 0;
aeb302bb 5769 current_function_cannot_inline = 0;
6f086dfc
RS
5770
5771 /* We have not yet needed to make a label to jump to for tail-recursion. */
5772 tail_recursion_label = 0;
5773
5774 /* We haven't had a need to make a save area for ap yet. */
6f086dfc
RS
5775 arg_pointer_save_area = 0;
5776
5777 /* No stack slots allocated yet. */
5778 frame_offset = 0;
5779
5780 /* No SAVE_EXPRs in this function yet. */
5781 save_expr_regs = 0;
5782
5783 /* No RTL_EXPRs in this function yet. */
5784 rtl_expr_chain = 0;
5785
bc0ebdf9
RK
5786 /* Set up to allocate temporaries. */
5787 init_temp_slots ();
6f086dfc 5788
b384405b
BS
5789 /* Indicate that we need to distinguish between the return value of the
5790 present function and the return value of a function being called. */
5791 rtx_equal_function_value_matters = 1;
5792
5793 /* Indicate that we have not instantiated virtual registers yet. */
5794 virtuals_instantiated = 0;
5795
5796 /* Indicate we have no need of a frame pointer yet. */
5797 frame_pointer_needed = 0;
5798
5799 /* By default assume not varargs or stdarg. */
5800 current_function_varargs = 0;
5801 current_function_stdarg = 0;
6f086dfc 5802
d9a98e1a
RK
5803 /* We haven't made any trampolines for this function yet. */
5804 trampoline_list = 0;
5805
6f086dfc
RS
5806 init_pending_stack_adjust ();
5807 inhibit_defer_pop = 0;
5808
5809 current_function_outgoing_args_size = 0;
36edd3cc 5810
0a8a198c
MM
5811 if (init_lang_status)
5812 (*init_lang_status) (current_function);
36edd3cc
BS
5813 if (init_machine_status)
5814 (*init_machine_status) (current_function);
b384405b
BS
5815}
5816
5817/* Initialize the rtl expansion mechanism so that we can do simple things
5818 like generate sequences. This is used to provide a context during global
5819 initialization of some passes. */
5820void
5821init_dummy_function_start ()
5822{
5823 prepare_function_start ();
5824}
5825
5826/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5827 and initialize static variables for generating RTL for the statements
5828 of the function. */
5829
5830void
5831init_function_start (subr, filename, line)
5832 tree subr;
5833 char *filename;
5834 int line;
5835{
5836 prepare_function_start ();
5837
5838 /* Remember this function for later. */
5839 current_function->next_global = all_functions;
5840 all_functions = current_function;
87ff9c8e 5841
b384405b 5842 current_function_name = (*decl_printable_name) (subr, 2);
0a8a198c 5843 current_function->decl = subr;
b384405b
BS
5844
5845 /* Nonzero if this is a nested function that uses a static chain. */
5846
5847 current_function_needs_context
5848 = (decl_function_context (current_function_decl) != 0
5849 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5850
5851 /* Within function body, compute a type's size as soon it is laid out. */
5852 immediate_size_expand++;
6f086dfc 5853
6f086dfc 5854 /* Prevent ever trying to delete the first instruction of a function.
b274104c
PB
5855 Also tell final how to output a linenum before the function prologue.
5856 Note linenums could be missing, e.g. when compiling a Java .class file. */
5857 if (line > 0)
5858 emit_line_note (filename, line);
6f086dfc
RS
5859
5860 /* Make sure first insn is a note even if we don't want linenums.
5861 This makes sure the first insn will never be deleted.
5862 Also, final expects a note to appear there. */
5f4f0e22 5863 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
5864
5865 /* Set flags used by final.c. */
5866 if (aggregate_value_p (DECL_RESULT (subr)))
5867 {
5868#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 5869 current_function_returns_pcc_struct = 1;
6f086dfc 5870#endif
1b8297c1 5871 current_function_returns_struct = 1;
6f086dfc
RS
5872 }
5873
5874 /* Warn if this value is an aggregate type,
5875 regardless of which calling convention we are using for it. */
5876 if (warn_aggregate_return
05e3bdb9 5877 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6f086dfc
RS
5878 warning ("function returns an aggregate");
5879
5880 current_function_returns_pointer
8eda074c 5881 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
49ad7cfa 5882}
5c7675e9 5883
49ad7cfa
BS
5884/* Make sure all values used by the optimization passes have sane
5885 defaults. */
5886void
5887init_function_for_compilation ()
5888{
5889 reg_renumber = 0;
5c7675e9
RH
5890 /* No prologue/epilogue insns yet. */
5891 prologue = epilogue = 0;
6f086dfc
RS
5892}
5893
5894/* Indicate that the current function uses extra args
5895 not explicitly mentioned in the argument list in any fashion. */
5896
5897void
5898mark_varargs ()
5899{
5900 current_function_varargs = 1;
5901}
5902
5903/* Expand a call to __main at the beginning of a possible main function. */
5904
e2fd1d94
JM
5905#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5906#undef HAS_INIT_SECTION
5907#define HAS_INIT_SECTION
5908#endif
5909
6f086dfc
RS
5910void
5911expand_main_function ()
5912{
e2fd1d94 5913#if !defined (HAS_INIT_SECTION)
b93a436e
JL
5914 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5915 VOIDmode, 0);
e2fd1d94 5916#endif /* not HAS_INIT_SECTION */
6f086dfc
RS
5917}
5918\f
c20bf1f3
JB
5919extern struct obstack permanent_obstack;
5920
6f086dfc
RS
5921/* Start the RTL for a new function, and set variables used for
5922 emitting RTL.
5923 SUBR is the FUNCTION_DECL node.
5924 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5925 the function's parameters, which must be run at any return statement. */
5926
5927void
5928expand_function_start (subr, parms_have_cleanups)
5929 tree subr;
5930 int parms_have_cleanups;
5931{
5932 register int i;
5933 tree tem;
4e86caed 5934 rtx last_ptr = NULL_RTX;
6f086dfc
RS
5935
5936 /* Make sure volatile mem refs aren't considered
5937 valid operands of arithmetic insns. */
5938 init_recog_no_volatile ();
5939
7d384cc0
KR
5940 /* Set this before generating any memory accesses. */
5941 current_function_check_memory_usage
5942 = (flag_check_memory_usage
5943 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5944
07417085
KR
5945 current_function_instrument_entry_exit
5946 = (flag_instrument_function_entry_exit
5947 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5948
a157febd
GK
5949 current_function_limit_stack
5950 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5951
6f086dfc
RS
5952 /* If function gets a static chain arg, store it in the stack frame.
5953 Do this first, so it gets the first stack slot offset. */
5954 if (current_function_needs_context)
3e2481e9
JW
5955 {
5956 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
f0c51a1e 5957
f0c51a1e
RK
5958 /* Delay copying static chain if it is not a register to avoid
5959 conflicts with regs used for parameters. */
f95182a4
ILT
5960 if (! SMALL_REGISTER_CLASSES
5961 || GET_CODE (static_chain_incoming_rtx) == REG)
f0c51a1e 5962 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3e2481e9 5963 }
6f086dfc
RS
5964
5965 /* If the parameters of this function need cleaning up, get a label
5966 for the beginning of the code which executes those cleanups. This must
5967 be done before doing anything with return_label. */
5968 if (parms_have_cleanups)
5969 cleanup_label = gen_label_rtx ();
5970 else
5971 cleanup_label = 0;
5972
5973 /* Make the label for return statements to jump to, if this machine
5974 does not have a one-instruction return and uses an epilogue,
5975 or if it returns a structure, or if it has parm cleanups. */
5976#ifdef HAVE_return
5977 if (cleanup_label == 0 && HAVE_return
07417085 5978 && ! current_function_instrument_entry_exit
6f086dfc
RS
5979 && ! current_function_returns_pcc_struct
5980 && ! (current_function_returns_struct && ! optimize))
5981 return_label = 0;
5982 else
5983 return_label = gen_label_rtx ();
5984#else
5985 return_label = gen_label_rtx ();
5986#endif
5987
5988 /* Initialize rtx used to return the value. */
5989 /* Do this before assign_parms so that we copy the struct value address
5990 before any library calls that assign parms might generate. */
5991
5992 /* Decide whether to return the value in memory or in a register. */
5993 if (aggregate_value_p (DECL_RESULT (subr)))
5994 {
5995 /* Returning something that won't go in a register. */
4acc00bf 5996 register rtx value_address = 0;
6f086dfc
RS
5997
5998#ifdef PCC_STATIC_STRUCT_RETURN
5999 if (current_function_returns_pcc_struct)
6000 {
6001 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6002 value_address = assemble_static_space (size);
6003 }
6004 else
6005#endif
6006 {
6007 /* Expect to be passed the address of a place to store the value.
6008 If it is passed as an argument, assign_parms will take care of
6009 it. */
6010 if (struct_value_incoming_rtx)
6011 {
6012 value_address = gen_reg_rtx (Pmode);
6013 emit_move_insn (value_address, struct_value_incoming_rtx);
6014 }
6015 }
6016 if (value_address)
ccdecf58
RK
6017 {
6018 DECL_RTL (DECL_RESULT (subr))
38a448ca 6019 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
c6df88cb
MM
6020 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6021 AGGREGATE_TYPE_P (TREE_TYPE
6022 (DECL_RESULT
6023 (subr))));
ccdecf58 6024 }
6f086dfc
RS
6025 }
6026 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6027 /* If return mode is void, this decl rtl should not be used. */
6028 DECL_RTL (DECL_RESULT (subr)) = 0;
07417085 6029 else if (parms_have_cleanups || current_function_instrument_entry_exit)
a53e14c0
RK
6030 {
6031 /* If function will end with cleanup code for parms,
6032 compute the return values into a pseudo reg,
6033 which we will copy into the true return register
6034 after the cleanups are done. */
6035
6036 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
a5a52dbc 6037
a53e14c0
RK
6038#ifdef PROMOTE_FUNCTION_RETURN
6039 tree type = TREE_TYPE (DECL_RESULT (subr));
6040 int unsignedp = TREE_UNSIGNED (type);
6041
a5a52dbc 6042 mode = promote_mode (type, mode, &unsignedp, 1);
a53e14c0
RK
6043#endif
6044
6045 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6046 }
6f086dfc
RS
6047 else
6048 /* Scalar, returned in a register. */
6049 {
6050#ifdef FUNCTION_OUTGOING_VALUE
6051 DECL_RTL (DECL_RESULT (subr))
6052 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6053#else
6054 DECL_RTL (DECL_RESULT (subr))
6055 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6056#endif
6057
6058 /* Mark this reg as the function's return value. */
6059 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6060 {
6061 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6062 /* Needed because we may need to move this to memory
6063 in case it's a named return value whose address is taken. */
a82ad570 6064 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
6065 }
6066 }
6067
6068 /* Initialize rtx for parameters and local variables.
6069 In some cases this requires emitting insns. */
6070
0d1416c6 6071 assign_parms (subr);
6f086dfc 6072
f0c51a1e
RK
6073 /* Copy the static chain now if it wasn't a register. The delay is to
6074 avoid conflicts with the parameter passing registers. */
6075
f95182a4 6076 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
f0c51a1e
RK
6077 if (GET_CODE (static_chain_incoming_rtx) != REG)
6078 emit_move_insn (last_ptr, static_chain_incoming_rtx);
f0c51a1e 6079
6f086dfc
RS
6080 /* The following was moved from init_function_start.
6081 The move is supposed to make sdb output more accurate. */
6082 /* Indicate the beginning of the function body,
6083 as opposed to parm setup. */
5f4f0e22 6084 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
6085
6086 /* If doing stupid allocation, mark parms as born here. */
6087
6088 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 6089 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
6090 parm_birth_insn = get_last_insn ();
6091
6092 if (obey_regdecls)
6093 {
6094 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6095 use_variable (regno_reg_rtx[i]);
6096
6097 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6098 use_variable (current_function_internal_arg_pointer);
6099 }
6100
6d7306f7
JM
6101 context_display = 0;
6102 if (current_function_needs_context)
ac9e20f0 6103 {
6d7306f7
JM
6104 /* Fetch static chain values for containing functions. */
6105 tem = decl_function_context (current_function_decl);
6106 /* If not doing stupid register allocation copy the static chain
6107 pointer into a pseudo. If we have small register classes, copy
6108 the value from memory if static_chain_incoming_rtx is a REG. If
6109 we do stupid register allocation, we use the stack address
6110 generated above. */
6111 if (tem && ! obey_regdecls)
6112 {
6d7306f7
JM
6113 /* If the static chain originally came in a register, put it back
6114 there, then move it out in the next insn. The reason for
6115 this peculiar code is to satisfy function integration. */
f95182a4
ILT
6116 if (SMALL_REGISTER_CLASSES
6117 && GET_CODE (static_chain_incoming_rtx) == REG)
6d7306f7 6118 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6d7306f7
JM
6119 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6120 }
ac9e20f0 6121
6d7306f7
JM
6122 while (tem)
6123 {
6124 tree rtlexp = make_node (RTL_EXPR);
6f086dfc 6125
6d7306f7
JM
6126 RTL_EXPR_RTL (rtlexp) = last_ptr;
6127 context_display = tree_cons (tem, rtlexp, context_display);
6128 tem = decl_function_context (tem);
6129 if (tem == 0)
6130 break;
6131 /* Chain thru stack frames, assuming pointer to next lexical frame
6132 is found at the place we always store it. */
6f086dfc 6133#ifdef FRAME_GROWS_DOWNWARD
6d7306f7 6134 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6f086dfc 6135#endif
38a448ca 6136 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
c5c76735
JL
6137 memory_address (Pmode,
6138 last_ptr)));
6d7306f7
JM
6139
6140 /* If we are not optimizing, ensure that we know that this
6141 piece of context is live over the entire function. */
6142 if (! optimize)
38a448ca
RH
6143 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6144 save_expr_regs);
6d7306f7 6145 }
6f086dfc
RS
6146 }
6147
07417085
KR
6148 if (current_function_instrument_entry_exit)
6149 {
6150 rtx fun = DECL_RTL (current_function_decl);
6151 if (GET_CODE (fun) == MEM)
6152 fun = XEXP (fun, 0);
6153 else
6154 abort ();
6155 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6156 fun, Pmode,
6157 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6158 0,
6159 hard_frame_pointer_rtx),
6160 Pmode);
6161 }
6162
6f086dfc
RS
6163 /* After the display initializations is where the tail-recursion label
6164 should go, if we end up needing one. Ensure we have a NOTE here
6165 since some things (like trampolines) get placed before this. */
5f4f0e22 6166 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
6167
6168 /* Evaluate now the sizes of any types declared among the arguments. */
6169 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
7b05e286 6170 {
86fa911a
RK
6171 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6172 EXPAND_MEMORY_USE_BAD);
7b05e286
JW
6173 /* Flush the queue in case this parameter declaration has
6174 side-effects. */
6175 emit_queue ();
6176 }
6f086dfc
RS
6177
6178 /* Make sure there is a line number after the function entry setup code. */
6179 force_next_line_note ();
6180}
6181\f
49ad7cfa
BS
6182/* Undo the effects of init_dummy_function_start. */
6183void
6184expand_dummy_function_end ()
6185{
6186 /* End any sequences that failed to be closed due to syntax errors. */
6187 while (in_sequence_p ())
6188 end_sequence ();
6189
6190 /* Outside function body, can't compute type's actual size
6191 until next function's body starts. */
fa51b01b
RH
6192
6193 free_after_parsing (current_function);
6194 free_after_compilation (current_function);
6195 free (current_function);
49ad7cfa
BS
6196 current_function = 0;
6197}
6198
bd695e1e
RH
6199/* Emit CODE for each register of the return value. Useful values for
6200 code are USE and CLOBBER. */
6201
6202void
6203diddle_return_value (code)
6204 enum rtx_code code;
6205{
d3f2edae 6206 tree decl_result = DECL_RESULT (current_function_decl);
cfb0346d 6207 rtx return_reg = DECL_RTL (decl_result);
bd695e1e 6208
cfb0346d 6209 if (return_reg)
bd695e1e
RH
6210 {
6211 if (GET_CODE (return_reg) == REG
6212 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
cfb0346d
JL
6213 {
6214 /* Use hard_function_value to avoid creating a reference to a BLKmode
6215 register in the USE/CLOBBER insn. */
6216 return_reg = hard_function_value (TREE_TYPE (decl_result),
4dc07bd7 6217 current_function_decl, 1);
2cfcc23e 6218 REG_FUNCTION_VALUE_P (return_reg) = 1;
cfb0346d
JL
6219 emit_insn (gen_rtx_fmt_e (code, VOIDmode, return_reg));
6220 }
bd695e1e
RH
6221 else if (GET_CODE (return_reg) == PARALLEL)
6222 {
6223 int i;
6224
6225 for (i = 0; i < XVECLEN (return_reg, 0); i++)
6226 {
6227 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
6228
6229 if (GET_CODE (x) == REG
6230 && REGNO (x) < FIRST_PSEUDO_REGISTER)
6231 emit_insn (gen_rtx_fmt_e (code, VOIDmode, x));
6232 }
6233 }
6234 }
6235}
6236
6f086dfc 6237/* Generate RTL for the end of the current function.
980697fd 6238 FILENAME and LINE are the current position in the source file.
6f086dfc 6239
980697fd 6240 It is up to language-specific callers to do cleanups for parameters--
1be07046 6241 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6f086dfc
RS
6242
6243void
1be07046 6244expand_function_end (filename, line, end_bindings)
6f086dfc
RS
6245 char *filename;
6246 int line;
1be07046 6247 int end_bindings;
6f086dfc
RS
6248{
6249 register int i;
6250 tree link;
6251
1e2414db 6252#ifdef TRAMPOLINE_TEMPLATE
6f086dfc 6253 static rtx initial_trampoline;
1e2414db 6254#endif
6f086dfc 6255
49ad7cfa
BS
6256 finish_expr_for_function ();
6257
6f086dfc
RS
6258#ifdef NON_SAVING_SETJMP
6259 /* Don't put any variables in registers if we call setjmp
6260 on a machine that fails to restore the registers. */
6261 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6262 {
b88a3142
RK
6263 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6264 setjmp_protect (DECL_INITIAL (current_function_decl));
6265
6f086dfc
RS
6266 setjmp_protect_args ();
6267 }
6268#endif
6269
6270 /* Save the argument pointer if a save area was made for it. */
6271 if (arg_pointer_save_area)
6272 {
ea0f9a85
JW
6273 /* arg_pointer_save_area may not be a valid memory address, so we
6274 have to check it and fix it if necessary. */
6275 rtx seq;
6276 start_sequence ();
6277 emit_move_insn (validize_mem (arg_pointer_save_area),
6278 virtual_incoming_args_rtx);
6279 seq = gen_sequence ();
6280 end_sequence ();
6281 emit_insn_before (seq, tail_recursion_reentry);
6f086dfc
RS
6282 }
6283
6284 /* Initialize any trampolines required by this function. */
6285 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6286 {
6287 tree function = TREE_PURPOSE (link);
6288 rtx context = lookup_static_chain (function);
6289 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7a87758d 6290#ifdef TRAMPOLINE_TEMPLATE
1e2414db 6291 rtx blktramp;
7a87758d 6292#endif
6f086dfc
RS
6293 rtx seq;
6294
1e2414db 6295#ifdef TRAMPOLINE_TEMPLATE
6f086dfc
RS
6296 /* First make sure this compilation has a template for
6297 initializing trampolines. */
6298 if (initial_trampoline == 0)
86f8eff3
RK
6299 {
6300 end_temporary_allocation ();
6301 initial_trampoline
38a448ca 6302 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
86f8eff3 6303 resume_temporary_allocation ();
76095e2f
RH
6304
6305 ggc_add_rtx_root (&initial_trampoline, 1);
86f8eff3 6306 }
1e2414db 6307#endif
6f086dfc
RS
6308
6309 /* Generate insns to initialize the trampoline. */
6310 start_sequence ();
1e2414db
RK
6311 tramp = round_trampoline_addr (XEXP (tramp, 0));
6312#ifdef TRAMPOLINE_TEMPLATE
6313 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6314 emit_block_move (blktramp, initial_trampoline,
6315 GEN_INT (TRAMPOLINE_SIZE),
189cc377 6316 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
1e2414db
RK
6317#endif
6318 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6f086dfc
RS
6319 seq = get_insns ();
6320 end_sequence ();
6321
6322 /* Put those insns at entry to the containing function (this one). */
6323 emit_insns_before (seq, tail_recursion_reentry);
6324 }
6f086dfc 6325
11044f66
RK
6326 /* If we are doing stack checking and this function makes calls,
6327 do a stack probe at the start of the function to ensure we have enough
6328 space for another stack frame. */
6329 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6330 {
6331 rtx insn, seq;
6332
6333 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6334 if (GET_CODE (insn) == CALL_INSN)
6335 {
6336 start_sequence ();
6337 probe_stack_range (STACK_CHECK_PROTECT,
6338 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6339 seq = get_insns ();
6340 end_sequence ();
6341 emit_insns_before (seq, tail_recursion_reentry);
6342 break;
6343 }
6344 }
6345
db8717d9
RK
6346 /* Warn about unused parms if extra warnings were specified. */
6347 if (warn_unused && extra_warnings)
6f086dfc 6348 {
db8717d9 6349 tree decl;
6f086dfc
RS
6350
6351 for (decl = DECL_ARGUMENTS (current_function_decl);
6352 decl; decl = TREE_CHAIN (decl))
497dc802
JM
6353 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6354 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6f086dfc
RS
6355 warning_with_decl (decl, "unused parameter `%s'");
6356 }
6f086dfc
RS
6357
6358 /* Delete handlers for nonlocal gotos if nothing uses them. */
ba716ac9
BS
6359 if (nonlocal_goto_handler_slots != 0
6360 && ! current_function_has_nonlocal_label)
6f086dfc
RS
6361 delete_handlers ();
6362
6363 /* End any sequences that failed to be closed due to syntax errors. */
6364 while (in_sequence_p ())
5f4f0e22 6365 end_sequence ();
6f086dfc
RS
6366
6367 /* Outside function body, can't compute type's actual size
6368 until next function's body starts. */
6369 immediate_size_expand--;
6370
6371 /* If doing stupid register allocation,
6372 mark register parms as dying here. */
6373
6374 if (obey_regdecls)
6375 {
6376 rtx tem;
6377 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6378 use_variable (regno_reg_rtx[i]);
6379
6380 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6381
6382 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6383 {
6384 use_variable (XEXP (tem, 0));
6385 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6386 }
6387
6388 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6389 use_variable (current_function_internal_arg_pointer);
6390 }
6391
6392 clear_pending_stack_adjust ();
6393 do_pending_stack_adjust ();
6394
6395 /* Mark the end of the function body.
6396 If control reaches this insn, the function can drop through
6397 without returning a value. */
5f4f0e22 6398 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc 6399
82e415a3
DE
6400 /* Must mark the last line number note in the function, so that the test
6401 coverage code can avoid counting the last line twice. This just tells
6402 the code to ignore the immediately following line note, since there
6403 already exists a copy of this note somewhere above. This line number
6404 note is still needed for debugging though, so we can't delete it. */
6405 if (flag_test_coverage)
6406 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6407
6f086dfc
RS
6408 /* Output a linenumber for the end of the function.
6409 SDB depends on this. */
6410 emit_line_note_force (filename, line);
6411
6412 /* Output the label for the actual return from the function,
6413 if one is expected. This happens either because a function epilogue
6414 is used instead of a return instruction, or because a return was done
6415 with a goto in order to run local cleanups, or because of pcc-style
6416 structure returning. */
6417
6418 if (return_label)
bd695e1e
RH
6419 {
6420 /* Before the return label, clobber the return registers so that
6421 they are not propogated live to the rest of the function. This
6422 can only happen with functions that drop through; if there had
6423 been a return statement, there would have either been a return
6424 rtx, or a jump to the return label. */
6425 diddle_return_value (CLOBBER);
6426
6427 emit_label (return_label);
6428 }
6f086dfc 6429
1be07046
RS
6430 /* C++ uses this. */
6431 if (end_bindings)
6432 expand_end_bindings (0, 0, 0);
6433
e5a1e0e8
MS
6434 /* Now handle any leftover exception regions that may have been
6435 created for the parameters. */
6436 {
6437 rtx last = get_last_insn ();
6438 rtx label;
6439
6440 expand_leftover_cleanups ();
6441
1e4ceb6f
MM
6442 /* If there are any catch_clauses remaining, output them now. */
6443 emit_insns (catch_clauses);
6444 catch_clauses = NULL_RTX;
e5a1e0e8
MS
6445 /* If the above emitted any code, may sure we jump around it. */
6446 if (last != get_last_insn ())
6447 {
6448 label = gen_label_rtx ();
6449 last = emit_jump_insn_after (gen_jump (label), last);
6450 last = emit_barrier_after (last);
6451 emit_label (label);
6452 }
6453 }
6454
07417085
KR
6455 if (current_function_instrument_entry_exit)
6456 {
6457 rtx fun = DECL_RTL (current_function_decl);
6458 if (GET_CODE (fun) == MEM)
6459 fun = XEXP (fun, 0);
6460 else
6461 abort ();
6462 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6463 fun, Pmode,
6464 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6465 0,
6466 hard_frame_pointer_rtx),
6467 Pmode);
6468 }
6469
6f086dfc
RS
6470 /* If we had calls to alloca, and this machine needs
6471 an accurate stack pointer to exit the function,
6472 insert some code to save and restore the stack pointer. */
6473#ifdef EXIT_IGNORE_STACK
6474 if (! EXIT_IGNORE_STACK)
6475#endif
6476 if (current_function_calls_alloca)
6477 {
59257ff7
RK
6478 rtx tem = 0;
6479
6480 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 6481 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
6482 }
6483
6484 /* If scalar return value was computed in a pseudo-reg,
6485 copy that to the hard return register. */
6486 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6487 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6488 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6489 >= FIRST_PSEUDO_REGISTER))
6490 {
6491 rtx real_decl_result;
6492
6493#ifdef FUNCTION_OUTGOING_VALUE
6494 real_decl_result
6495 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6496 current_function_decl);
6497#else
6498 real_decl_result
6499 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6500 current_function_decl);
6501#endif
6502 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
ecec4441
JW
6503 /* If this is a BLKmode structure being returned in registers, then use
6504 the mode computed in expand_return. */
6505 if (GET_MODE (real_decl_result) == BLKmode)
6506 PUT_MODE (real_decl_result,
6507 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6f086dfc
RS
6508 emit_move_insn (real_decl_result,
6509 DECL_RTL (DECL_RESULT (current_function_decl)));
38a448ca 6510 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
f345de42
JL
6511
6512 /* The delay slot scheduler assumes that current_function_return_rtx
6513 holds the hard register containing the return value, not a temporary
6514 pseudo. */
6515 current_function_return_rtx = real_decl_result;
6f086dfc
RS
6516 }
6517
6518 /* If returning a structure, arrange to return the address of the value
6519 in a place where debuggers expect to find it.
6520
6521 If returning a structure PCC style,
6522 the caller also depends on this value.
6523 And current_function_returns_pcc_struct is not necessarily set. */
6524 if (current_function_returns_struct
6525 || current_function_returns_pcc_struct)
6526 {
6527 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6528 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6529#ifdef FUNCTION_OUTGOING_VALUE
6530 rtx outgoing
6531 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6532 current_function_decl);
6533#else
6534 rtx outgoing
6535 = FUNCTION_VALUE (build_pointer_type (type),
6536 current_function_decl);
6537#endif
6538
6539 /* Mark this as a function return value so integrate will delete the
6540 assignment and USE below when inlining this function. */
6541 REG_FUNCTION_VALUE_P (outgoing) = 1;
6542
6543 emit_move_insn (outgoing, value_address);
6544 use_variable (outgoing);
6545 }
6546
71038426
RH
6547 /* If this is an implementation of __throw, do what's necessary to
6548 communicate between __builtin_eh_return and the epilogue. */
6549 expand_eh_return ();
6550
6f086dfc
RS
6551 /* Output a return insn if we are using one.
6552 Otherwise, let the rtl chain end here, to drop through
6553 into the epilogue. */
6554
6555#ifdef HAVE_return
6556 if (HAVE_return)
6557 {
6558 emit_jump_insn (gen_return ());
6559 emit_barrier ();
6560 }
6561#endif
6562
6563 /* Fix up any gotos that jumped out to the outermost
6564 binding level of the function.
6565 Must follow emitting RETURN_LABEL. */
6566
6567 /* If you have any cleanups to do at this point,
6568 and they need to create temporary variables,
6569 then you will lose. */
e15679f8 6570 expand_fixups (get_insns ());
6f086dfc 6571}
bdac5f58 6572\f
bdac5f58
TW
6573/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6574 or a single insn). */
6575
6576static int *
6577record_insns (insns)
6578 rtx insns;
6579{
6580 int *vec;
6581
6582 if (GET_CODE (insns) == SEQUENCE)
6583 {
6584 int len = XVECLEN (insns, 0);
6585 vec = (int *) oballoc ((len + 1) * sizeof (int));
6586 vec[len] = 0;
6587 while (--len >= 0)
6588 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6589 }
6590 else
6591 {
6592 vec = (int *) oballoc (2 * sizeof (int));
6593 vec[0] = INSN_UID (insns);
6594 vec[1] = 0;
6595 }
6596 return vec;
6597}
6598
10914065 6599/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 6600
10914065 6601static int
bdac5f58
TW
6602contains (insn, vec)
6603 rtx insn;
6604 int *vec;
6605{
6606 register int i, j;
6607
6608 if (GET_CODE (insn) == INSN
6609 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6610 {
10914065 6611 int count = 0;
bdac5f58
TW
6612 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6613 for (j = 0; vec[j]; j++)
6614 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
6615 count++;
6616 return count;
bdac5f58
TW
6617 }
6618 else
6619 {
6620 for (j = 0; vec[j]; j++)
6621 if (INSN_UID (insn) == vec[j])
10914065 6622 return 1;
bdac5f58
TW
6623 }
6624 return 0;
6625}
5c7675e9
RH
6626
6627int
6628prologue_epilogue_contains (insn)
6629 rtx insn;
6630{
6631 if (prologue && contains (insn, prologue))
6632 return 1;
6633 if (epilogue && contains (insn, epilogue))
6634 return 1;
6635 return 0;
6636}
bdac5f58 6637
9faa82d8 6638/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
6639 this into place with notes indicating where the prologue ends and where
6640 the epilogue begins. Update the basic block information when possible. */
6641
6642void
6643thread_prologue_and_epilogue_insns (f)
54ea1de9 6644 rtx f ATTRIBUTE_UNUSED;
bdac5f58 6645{
e881bb1b
RH
6646 int insertted = 0;
6647
bdac5f58
TW
6648#ifdef HAVE_prologue
6649 if (HAVE_prologue)
6650 {
e881bb1b 6651 rtx seq;
bdac5f58 6652
e881bb1b
RH
6653 start_sequence ();
6654 seq = gen_prologue();
6655 emit_insn (seq);
bdac5f58
TW
6656
6657 /* Retain a map of the prologue insns. */
e881bb1b
RH
6658 if (GET_CODE (seq) != SEQUENCE)
6659 seq = get_insns ();
6660 prologue = record_insns (seq);
6661
6662 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6663 seq = gen_sequence ();
6664 end_sequence ();
6665
6666 /* If optimization is off, and perhaps in an empty function,
6667 the entry block will have no successors. */
6668 if (ENTRY_BLOCK_PTR->succ)
6669 {
6670 /* Can't deal with multiple successsors of the entry block. */
6671 if (ENTRY_BLOCK_PTR->succ->succ_next)
6672 abort ();
6673
6674 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6675 insertted = 1;
6676 }
6677 else
6678 emit_insn_after (seq, f);
bdac5f58 6679 }
bdac5f58 6680#endif
bdac5f58
TW
6681
6682#ifdef HAVE_epilogue
6683 if (HAVE_epilogue)
6684 {
e881bb1b
RH
6685 edge e;
6686 basic_block bb = 0;
6687 rtx tail = get_last_insn ();
6688
6689 /* ??? This is gastly. If function returns were not done via uses,
6690 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6691 and all of this uglyness would go away. */
bdac5f58 6692
e881bb1b 6693 switch (optimize)
bdac5f58 6694 {
e881bb1b
RH
6695 default:
6696 /* If the exit block has no non-fake predecessors, we don't
6697 need an epilogue. Furthermore, only pay attention to the
6698 fallthru predecessors; if (conditional) return insns were
6699 generated, by definition we do not need to emit epilogue
6700 insns. */
6701
6702 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6703 if ((e->flags & EDGE_FAKE) == 0
6704 && (e->flags & EDGE_FALLTHRU) != 0)
6705 break;
6706 if (e == NULL)
6707 break;
6708
6709 /* We can't handle multiple epilogues -- if one is needed,
6710 we won't be able to place it multiple times.
6711
6712 ??? Fix epilogue expanders to not assume they are the
6713 last thing done compiling the function. Either that
6714 or copy_rtx each insn.
6715
6716 ??? Blah, it's not a simple expression to assert that
6717 we've exactly one fallthru exit edge. */
6718
6719 bb = e->src;
6720 tail = bb->end;
6721
6722 /* ??? If the last insn of the basic block is a jump, then we
6723 are creating a new basic block. Wimp out and leave these
6724 insns outside any block. */
6725 if (GET_CODE (tail) == JUMP_INSN)
6726 bb = 0;
6727
6728 /* FALLTHRU */
6729 case 0:
6730 {
6731 rtx prev, seq, first_use;
6732
6733 /* Move the USE insns at the end of a function onto a list. */
6734 prev = tail;
6735 if (GET_CODE (prev) == BARRIER
6736 || GET_CODE (prev) == NOTE)
bdac5f58 6737 prev = prev_nonnote_insn (prev);
a78bdb38 6738
e881bb1b
RH
6739 first_use = 0;
6740 if (prev
6741 && GET_CODE (prev) == INSN
6742 && GET_CODE (PATTERN (prev)) == USE)
6743 {
6744 /* If the end of the block is the use, grab hold of something
6745 else so that we emit barriers etc in the right place. */
6746 if (prev == tail)
6747 {
6748 do
6749 tail = PREV_INSN (tail);
6750 while (GET_CODE (tail) == INSN
6751 && GET_CODE (PATTERN (tail)) == USE);
6752 }
bdac5f58 6753
e881bb1b
RH
6754 do
6755 {
6756 rtx use = prev;
6757 prev = prev_nonnote_insn (prev);
6758
6759 remove_insn (use);
6760 if (first_use)
6761 {
6762 NEXT_INSN (use) = first_use;
6763 PREV_INSN (first_use) = use;
6764 }
6765 else
6766 NEXT_INSN (use) = NULL_RTX;
6767 first_use = use;
6768 }
6769 while (prev
6770 && GET_CODE (prev) == INSN
6771 && GET_CODE (PATTERN (prev)) == USE);
6772 }
a78bdb38 6773
e881bb1b
RH
6774 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6775 epilogue insns, the USE insns at the end of a function,
6776 the jump insn that returns, and then a BARRIER. */
a78bdb38 6777
e881bb1b
RH
6778 if (GET_CODE (tail) != BARRIER)
6779 {
6780 prev = next_nonnote_insn (tail);
6781 if (!prev || GET_CODE (prev) != BARRIER)
6782 emit_barrier_after (tail);
6783 }
a78bdb38 6784
e881bb1b
RH
6785 seq = gen_epilogue ();
6786 prev = tail;
6787 tail = emit_jump_insn_after (seq, tail);
bdac5f58 6788
e881bb1b
RH
6789 /* Insert the USE insns immediately before the return insn, which
6790 must be the last instruction emitted in the sequence. */
6791 if (first_use)
6792 emit_insns_before (first_use, tail);
6793 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
bdac5f58 6794
e881bb1b
RH
6795 /* Update the tail of the basic block. */
6796 if (bb)
6797 bb->end = tail;
6798
6799 /* Retain a map of the epilogue insns. */
6800 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6801 }
bdac5f58
TW
6802 }
6803 }
6804#endif
e881bb1b
RH
6805
6806 if (insertted)
6807 commit_edge_insertions ();
bdac5f58
TW
6808}
6809
6810/* Reposition the prologue-end and epilogue-begin notes after instruction
6811 scheduling and delayed branch scheduling. */
6812
6813void
6814reposition_prologue_and_epilogue_notes (f)
79c9824e 6815 rtx f ATTRIBUTE_UNUSED;
bdac5f58
TW
6816{
6817#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6818 /* Reposition the prologue and epilogue notes. */
6819 if (n_basic_blocks)
6820 {
bf526252 6821 int len;
bdac5f58
TW
6822
6823 if (prologue)
6824 {
bf526252
RK
6825 register rtx insn, note = 0;
6826
6827 /* Scan from the beginning until we reach the last prologue insn.
6828 We apparently can't depend on basic_block_{head,end} after
6829 reorg has run. */
6830 for (len = 0; prologue[len]; len++)
6831 ;
9392c110
JH
6832 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6833 {
6834 if (GET_CODE (insn) == NOTE)
6835 {
6836 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6837 note = insn;
6838 }
6839 else if ((len -= contains (insn, prologue)) == 0)
6840 {
89e99eea 6841 rtx next;
9392c110
JH
6842 /* Find the prologue-end note if we haven't already, and
6843 move it to just after the last prologue insn. */
6844 if (note == 0)
6845 {
51723711 6846 for (note = insn; (note = NEXT_INSN (note));)
9392c110
JH
6847 if (GET_CODE (note) == NOTE
6848 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6849 break;
6850 }
c93b03c2 6851
9392c110 6852 next = NEXT_INSN (note);
c93b03c2 6853
3b413743 6854 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2 6855 attempt to keep it up-to-date. */
3b413743
RH
6856 if (BLOCK_HEAD (0) == note)
6857 BLOCK_HEAD (0) = next;
c93b03c2 6858
89e99eea 6859 remove_insn (note);
9392c110
JH
6860 add_insn_after (note, insn);
6861 }
6862 }
bdac5f58
TW
6863 }
6864
6865 if (epilogue)
6866 {
bf526252
RK
6867 register rtx insn, note = 0;
6868
6869 /* Scan from the end until we reach the first epilogue insn.
6870 We apparently can't depend on basic_block_{head,end} after
6871 reorg has run. */
6872 for (len = 0; epilogue[len]; len++)
6873 ;
9392c110
JH
6874 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6875 {
6876 if (GET_CODE (insn) == NOTE)
6877 {
6878 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6879 note = insn;
6880 }
6881 else if ((len -= contains (insn, epilogue)) == 0)
6882 {
6883 /* Find the epilogue-begin note if we haven't already, and
6884 move it to just before the first epilogue insn. */
6885 if (note == 0)
6886 {
51723711 6887 for (note = insn; (note = PREV_INSN (note));)
9392c110
JH
6888 if (GET_CODE (note) == NOTE
6889 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6890 break;
6891 }
c93b03c2 6892
3b413743 6893 /* Whether or not we can depend on BLOCK_HEAD,
c93b03c2
RH
6894 attempt to keep it up-to-date. */
6895 if (n_basic_blocks
3b413743
RH
6896 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6897 BLOCK_HEAD (n_basic_blocks-1) = note;
c93b03c2 6898
89e99eea 6899 remove_insn (note);
c93b03c2 6900 add_insn_before (note, insn);
9392c110
JH
6901 }
6902 }
bdac5f58
TW
6903 }
6904 }
6905#endif /* HAVE_prologue or HAVE_epilogue */
6906}
87ff9c8e
RH
6907
6908/* Mark T for GC. */
6909
6910static void
6911mark_temp_slot (t)
6912 struct temp_slot *t;
6913{
6914 while (t)
6915 {
6916 ggc_mark_rtx (t->slot);
6917 ggc_mark_rtx (t->address);
6918 ggc_mark_tree (t->rtl_expr);
6919
6920 t = t->next;
6921 }
6922}
6923
6924/* Mark P for GC. */
6925
6926static void
fa51b01b 6927mark_function_status (p)
87ff9c8e
RH
6928 struct function *p;
6929{
6930 int i;
6931 rtx *r;
6932
6933 if (p == 0)
6934 return;
6935
6936 ggc_mark_rtx (p->arg_offset_rtx);
6937
21cd906e
MM
6938 if (p->x_parm_reg_stack_loc)
6939 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6940 i > 0; --i, ++r)
6941 ggc_mark_rtx (*r);
87ff9c8e
RH
6942
6943 ggc_mark_rtx (p->return_rtx);
6944 ggc_mark_rtx (p->x_cleanup_label);
6945 ggc_mark_rtx (p->x_return_label);
6946 ggc_mark_rtx (p->x_save_expr_regs);
6947 ggc_mark_rtx (p->x_stack_slot_list);
6948 ggc_mark_rtx (p->x_parm_birth_insn);
6949 ggc_mark_rtx (p->x_tail_recursion_label);
6950 ggc_mark_rtx (p->x_tail_recursion_reentry);
6951 ggc_mark_rtx (p->internal_arg_pointer);
6952 ggc_mark_rtx (p->x_arg_pointer_save_area);
6953 ggc_mark_tree (p->x_rtl_expr_chain);
6954 ggc_mark_rtx (p->x_last_parm_insn);
6955 ggc_mark_tree (p->x_context_display);
6956 ggc_mark_tree (p->x_trampoline_list);
6957 ggc_mark_rtx (p->epilogue_delay_list);
6958
6959 mark_temp_slot (p->x_temp_slots);
6960
6961 {
6962 struct var_refs_queue *q = p->fixup_var_refs_queue;
6963 while (q)
6964 {
6965 ggc_mark_rtx (q->modified);
6966 q = q->next;
6967 }
6968 }
6969
6970 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
afe3d090 6971 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
87ff9c8e
RH
6972 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6973 ggc_mark_tree (p->x_nonlocal_labels);
6974}
6975
6976/* Mark the function chain ARG (which is really a struct function **)
6977 for GC. */
6978
6979static void
6980mark_function_chain (arg)
6981 void *arg;
6982{
6983 struct function *f = *(struct function **) arg;
6984
6985 for (; f; f = f->next_global)
6986 {
87ff9c8e
RH
6987 ggc_mark_tree (f->decl);
6988
fa51b01b
RH
6989 mark_function_status (f);
6990 mark_eh_status (f->eh);
6991 mark_stmt_status (f->stmt);
6992 mark_expr_status (f->expr);
6993 mark_emit_status (f->emit);
6994 mark_varasm_status (f->varasm);
87ff9c8e
RH
6995
6996 if (mark_machine_status)
6997 (*mark_machine_status) (f);
6998 if (mark_lang_status)
6999 (*mark_lang_status) (f);
7000
7001 if (f->original_arg_vector)
7002 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7003 if (f->original_decl_initial)
7004 ggc_mark_tree (f->original_decl_initial);
7005 }
7006}
7007
7008/* Called once, at initialization, to initialize function.c. */
7009
7010void
7011init_function_once ()
7012{
7013 ggc_add_root (&all_functions, 1, sizeof all_functions,
7014 mark_function_chain);
7015}
This page took 1.813371 seconds and 5 git commands to generate.