]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
Make it possible to prototype port-specific functions (and convert i386 to use this)
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "except.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "toplev.h"
60 #include "hash.h"
61 #include "ggc.h"
62
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
66
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
70
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
79
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
84
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
88
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
94
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
98
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
104
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
108 int current_function_sp_is_unchanging;
109
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
114
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117 static int virtuals_instantiated;
118
119 /* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
122 void (*init_machine_status) PROTO((struct function *));
123 void (*save_machine_status) PROTO((struct function *));
124 void (*restore_machine_status) PROTO((struct function *));
125 void (*mark_machine_status) PROTO((struct function *));
126 void (*free_machine_status) PROTO((struct function *));
127
128 /* Likewise, but for language-specific data. */
129 void (*init_lang_status) PROTO((struct function *));
130 void (*save_lang_status) PROTO((struct function *));
131 void (*restore_lang_status) PROTO((struct function *));
132 void (*mark_lang_status) PROTO((struct function *));
133 void (*free_lang_status) PROTO((struct function *));
134
135 /* The FUNCTION_DECL for an inline function currently being expanded. */
136 tree inline_function_decl;
137
138 /* The currently compiled function. */
139 struct function *current_function = 0;
140
141 /* Global list of all compiled functions. */
142 struct function *all_functions = 0;
143
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static int *prologue;
146 static int *epilogue;
147 \f
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
151
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
161
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
165
166 struct temp_slot
167 {
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
189 int alias_set;
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 tree rtl_expr;
192 /* Non-zero if this temporary is currently in use. */
193 char in_use;
194 /* Non-zero if this temporary has its address taken. */
195 char addr_taken;
196 /* Nesting level at which this slot is being used. */
197 int level;
198 /* Non-zero if this should survive a call to free_temp_slots. */
199 int keep;
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
202 HOST_WIDE_INT base_offset;
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
205 HOST_WIDE_INT full_size;
206 };
207 \f
208 /* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
212
213 struct fixup_replacement
214 {
215 rtx old;
216 rtx new;
217 struct fixup_replacement *next;
218 };
219
220 struct insns_for_mem_entry {
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
225 };
226
227 /* Forward declarations. */
228
229 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
232 int, tree));
233 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
234 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
235 enum machine_mode, enum machine_mode,
236 int, int, int,
237 struct hash_table *));
238 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
239 struct hash_table *));
240 static struct fixup_replacement
241 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
242 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
243 rtx, int, struct hash_table *));
244 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **));
246 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
247 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
248 static rtx fixup_stack_1 PROTO((rtx, rtx));
249 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
250 static void instantiate_decls PROTO((tree, int));
251 static void instantiate_decls_1 PROTO((tree, int));
252 static void instantiate_decl PROTO((rtx, int, int));
253 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
254 static void delete_handlers PROTO((void));
255 static void pad_to_arg_alignment PROTO((struct args_size *, int));
256 #ifndef ARGS_GROW_DOWNWARD
257 static void pad_below PROTO((struct args_size *, enum machine_mode,
258 tree));
259 #endif
260 #ifdef ARGS_GROW_DOWNWARD
261 static tree round_down PROTO((tree, int));
262 #endif
263 static rtx round_trampoline_addr PROTO((rtx));
264 static tree blocks_nreverse PROTO((tree));
265 static int all_blocks PROTO((tree, tree *));
266 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
267 static int *record_insns PROTO((rtx));
268 static int contains PROTO((rtx, int *));
269 #endif /* HAVE_prologue || HAVE_epilogue */
270 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
271 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
272 struct hash_table *));
273 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
274 struct hash_table *,
275 hash_table_key));
276 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
277 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
278 static int insns_for_mem_walk PROTO ((rtx *, void *));
279 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
280 static void mark_temp_slot PROTO ((struct temp_slot *));
281 static void mark_function_status PROTO ((struct function *));
282 static void mark_function_chain PROTO ((void *));
283
284 \f
285 /* Pointer to chain of `struct function' for containing functions. */
286 struct function *outer_function_chain;
287
288 /* Given a function decl for a containing function,
289 return the `struct function' for it. */
290
291 struct function *
292 find_function_data (decl)
293 tree decl;
294 {
295 struct function *p;
296
297 for (p = outer_function_chain; p; p = p->next)
298 if (p->decl == decl)
299 return p;
300
301 abort ();
302 }
303
304 /* Save the current context for compilation of a nested function.
305 This is called from language-specific code. The caller should use
306 the save_lang_status callback to save any language-specific state,
307 since this function knows only about language-independent
308 variables. */
309
310 void
311 push_function_context_to (context)
312 tree context;
313 {
314 struct function *p, *context_data;
315
316 if (context)
317 {
318 context_data = (context == current_function_decl
319 ? current_function
320 : find_function_data (context));
321 context_data->contains_functions = 1;
322 }
323
324 if (current_function == 0)
325 init_dummy_function_start ();
326 p = current_function;
327
328 p->next = outer_function_chain;
329 outer_function_chain = p;
330 p->fixup_var_refs_queue = 0;
331
332 save_tree_status (p);
333 if (save_lang_status)
334 (*save_lang_status) (p);
335 if (save_machine_status)
336 (*save_machine_status) (p);
337
338 current_function = 0;
339 }
340
341 void
342 push_function_context ()
343 {
344 push_function_context_to (current_function_decl);
345 }
346
347 /* Restore the last saved context, at the end of a nested function.
348 This function is called from language-specific code. */
349
350 void
351 pop_function_context_from (context)
352 tree context;
353 {
354 struct function *p = outer_function_chain;
355 struct var_refs_queue *queue;
356 struct var_refs_queue *next;
357
358 current_function = p;
359 outer_function_chain = p->next;
360
361 current_function_decl = p->decl;
362 reg_renumber = 0;
363
364 restore_tree_status (p);
365 restore_emit_status (p);
366
367 if (restore_machine_status)
368 (*restore_machine_status) (p);
369 if (restore_lang_status)
370 (*restore_lang_status) (p);
371
372 /* Finish doing put_var_into_stack for any of our variables
373 which became addressable during the nested function. */
374 for (queue = p->fixup_var_refs_queue; queue; queue = next)
375 {
376 next = queue->next;
377 fixup_var_refs (queue->modified, queue->promoted_mode,
378 queue->unsignedp, 0);
379 free (queue);
380 }
381 p->fixup_var_refs_queue = 0;
382
383 /* Reset variables that have known state during rtx generation. */
384 rtx_equal_function_value_matters = 1;
385 virtuals_instantiated = 0;
386 }
387
388 void
389 pop_function_context ()
390 {
391 pop_function_context_from (current_function_decl);
392 }
393
394 /* Clear out all parts of the state in F that can safely be discarded
395 after the function has been parsed, but not compiled, to let
396 garbage collection reclaim the memory. */
397
398 void
399 free_after_parsing (f)
400 struct function *f;
401 {
402 /* f->expr->forced_labels is used by code generation. */
403 /* f->emit->regno_reg_rtx is used by code generation. */
404 /* f->varasm is used by code generation. */
405 /* f->eh->eh_return_stub_label is used by code generation. */
406
407 if (free_lang_status)
408 (*free_lang_status) (f);
409 free_stmt_status (f);
410 }
411
412 /* Clear out all parts of the state in F that can safely be discarded
413 after the function has been compiled, to let garbage collection
414 reclaim the memory. */
415
416 void
417 free_after_compilation (f)
418 struct function *f;
419 {
420 free_eh_status (f);
421 free_expr_status (f);
422 free_emit_status (f);
423 free_varasm_status (f);
424
425 if (free_machine_status)
426 (*free_machine_status) (f);
427
428 free (f->x_parm_reg_stack_loc);
429
430 f->arg_offset_rtx = NULL;
431 f->return_rtx = NULL;
432 f->internal_arg_pointer = NULL;
433 f->x_nonlocal_labels = NULL;
434 f->x_nonlocal_goto_handler_slots = NULL;
435 f->x_nonlocal_goto_handler_labels = NULL;
436 f->x_nonlocal_goto_stack_level = NULL;
437 f->x_cleanup_label = NULL;
438 f->x_return_label = NULL;
439 f->x_save_expr_regs = NULL;
440 f->x_stack_slot_list = NULL;
441 f->x_rtl_expr_chain = NULL;
442 f->x_tail_recursion_label = NULL;
443 f->x_tail_recursion_reentry = NULL;
444 f->x_arg_pointer_save_area = NULL;
445 f->x_context_display = NULL;
446 f->x_trampoline_list = NULL;
447 f->x_parm_birth_insn = NULL;
448 f->x_last_parm_insn = NULL;
449 f->x_parm_reg_stack_loc = NULL;
450 f->x_temp_slots = NULL;
451 f->fixup_var_refs_queue = NULL;
452 f->original_arg_vector = NULL;
453 f->original_decl_initial = NULL;
454 f->inl_last_parm_insn = NULL;
455 f->epilogue_delay_list = NULL;
456 }
457
458 \f
459 /* Allocate fixed slots in the stack frame of the current function. */
460
461 /* Return size needed for stack frame based on slots so far allocated in
462 function F.
463 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
464 the caller may have to do that. */
465
466 HOST_WIDE_INT
467 get_func_frame_size (f)
468 struct function *f;
469 {
470 #ifdef FRAME_GROWS_DOWNWARD
471 return -f->x_frame_offset;
472 #else
473 return f->x_frame_offset;
474 #endif
475 }
476
477 /* Return size needed for stack frame based on slots so far allocated.
478 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
479 the caller may have to do that. */
480 HOST_WIDE_INT
481 get_frame_size ()
482 {
483 return get_func_frame_size (current_function);
484 }
485
486 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
487 with machine mode MODE.
488
489 ALIGN controls the amount of alignment for the address of the slot:
490 0 means according to MODE,
491 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
492 positive specifies alignment boundary in bits.
493
494 We do not round to stack_boundary here.
495
496 FUNCTION specifies the function to allocate in. */
497
498 static rtx
499 assign_stack_local_1 (mode, size, align, function)
500 enum machine_mode mode;
501 HOST_WIDE_INT size;
502 int align;
503 struct function *function;
504 {
505 register rtx x, addr;
506 int bigend_correction = 0;
507 int alignment;
508
509 /* Allocate in the memory associated with the function in whose frame
510 we are assigning. */
511 if (function != current_function)
512 push_obstacks (function->function_obstack,
513 function->function_maybepermanent_obstack);
514
515 if (align == 0)
516 {
517 tree type;
518
519 alignment = GET_MODE_ALIGNMENT (mode);
520 if (mode == BLKmode)
521 alignment = BIGGEST_ALIGNMENT;
522
523 /* Allow the target to (possibly) increase the alignment of this
524 stack slot. */
525 type = type_for_mode (mode, 0);
526 if (type)
527 alignment = LOCAL_ALIGNMENT (type, alignment);
528
529 alignment /= BITS_PER_UNIT;
530 }
531 else if (align == -1)
532 {
533 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
534 size = CEIL_ROUND (size, alignment);
535 }
536 else
537 alignment = align / BITS_PER_UNIT;
538
539 #ifdef FRAME_GROWS_DOWNWARD
540 function->x_frame_offset -= size;
541 #endif
542
543 /* Round frame offset to that alignment.
544 We must be careful here, since FRAME_OFFSET might be negative and
545 division with a negative dividend isn't as well defined as we might
546 like. So we instead assume that ALIGNMENT is a power of two and
547 use logical operations which are unambiguous. */
548 #ifdef FRAME_GROWS_DOWNWARD
549 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
550 #else
551 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
552 #endif
553
554 /* On a big-endian machine, if we are allocating more space than we will use,
555 use the least significant bytes of those that are allocated. */
556 if (BYTES_BIG_ENDIAN && mode != BLKmode)
557 bigend_correction = size - GET_MODE_SIZE (mode);
558
559 /* If we have already instantiated virtual registers, return the actual
560 address relative to the frame pointer. */
561 if (function == current_function && virtuals_instantiated)
562 addr = plus_constant (frame_pointer_rtx,
563 (frame_offset + bigend_correction
564 + STARTING_FRAME_OFFSET));
565 else
566 addr = plus_constant (virtual_stack_vars_rtx,
567 function->x_frame_offset + bigend_correction);
568
569 #ifndef FRAME_GROWS_DOWNWARD
570 function->x_frame_offset += size;
571 #endif
572
573 x = gen_rtx_MEM (mode, addr);
574
575 function->x_stack_slot_list
576 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
577
578 if (function != current_function)
579 pop_obstacks ();
580
581 return x;
582 }
583
584 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
585 current function. */
586 rtx
587 assign_stack_local (mode, size, align)
588 enum machine_mode mode;
589 HOST_WIDE_INT size;
590 int align;
591 {
592 return assign_stack_local_1 (mode, size, align, current_function);
593 }
594 \f
595 /* Allocate a temporary stack slot and record it for possible later
596 reuse.
597
598 MODE is the machine mode to be given to the returned rtx.
599
600 SIZE is the size in units of the space required. We do no rounding here
601 since assign_stack_local will do any required rounding.
602
603 KEEP is 1 if this slot is to be retained after a call to
604 free_temp_slots. Automatic variables for a block are allocated
605 with this flag. KEEP is 2 if we allocate a longer term temporary,
606 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
607 if we are to allocate something at an inner level to be treated as
608 a variable in the block (e.g., a SAVE_EXPR).
609
610 TYPE is the type that will be used for the stack slot. */
611
612 static rtx
613 assign_stack_temp_for_type (mode, size, keep, type)
614 enum machine_mode mode;
615 HOST_WIDE_INT size;
616 int keep;
617 tree type;
618 {
619 int align;
620 int alias_set;
621 struct temp_slot *p, *best_p = 0;
622
623 /* If SIZE is -1 it means that somebody tried to allocate a temporary
624 of a variable size. */
625 if (size == -1)
626 abort ();
627
628 /* If we know the alias set for the memory that will be used, use
629 it. If there's no TYPE, then we don't know anything about the
630 alias set for the memory. */
631 if (type)
632 alias_set = get_alias_set (type);
633 else
634 alias_set = 0;
635
636 align = GET_MODE_ALIGNMENT (mode);
637 if (mode == BLKmode)
638 align = BIGGEST_ALIGNMENT;
639
640 if (! type)
641 type = type_for_mode (mode, 0);
642 if (type)
643 align = LOCAL_ALIGNMENT (type, align);
644
645 /* Try to find an available, already-allocated temporary of the proper
646 mode which meets the size and alignment requirements. Choose the
647 smallest one with the closest alignment. */
648 for (p = temp_slots; p; p = p->next)
649 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
650 && ! p->in_use
651 && (!flag_strict_aliasing
652 || (alias_set && p->alias_set == alias_set))
653 && (best_p == 0 || best_p->size > p->size
654 || (best_p->size == p->size && best_p->align > p->align)))
655 {
656 if (p->align == align && p->size == size)
657 {
658 best_p = 0;
659 break;
660 }
661 best_p = p;
662 }
663
664 /* Make our best, if any, the one to use. */
665 if (best_p)
666 {
667 /* If there are enough aligned bytes left over, make them into a new
668 temp_slot so that the extra bytes don't get wasted. Do this only
669 for BLKmode slots, so that we can be sure of the alignment. */
670 if (GET_MODE (best_p->slot) == BLKmode
671 /* We can't split slots if -fstrict-aliasing because the
672 information about the alias set for the new slot will be
673 lost. */
674 && !flag_strict_aliasing)
675 {
676 int alignment = best_p->align / BITS_PER_UNIT;
677 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
678
679 if (best_p->size - rounded_size >= alignment)
680 {
681 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
682 p->in_use = p->addr_taken = 0;
683 p->size = best_p->size - rounded_size;
684 p->base_offset = best_p->base_offset + rounded_size;
685 p->full_size = best_p->full_size - rounded_size;
686 p->slot = gen_rtx_MEM (BLKmode,
687 plus_constant (XEXP (best_p->slot, 0),
688 rounded_size));
689 p->align = best_p->align;
690 p->address = 0;
691 p->rtl_expr = 0;
692 p->next = temp_slots;
693 temp_slots = p;
694
695 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
696 stack_slot_list);
697
698 best_p->size = rounded_size;
699 best_p->full_size = rounded_size;
700 }
701 }
702
703 p = best_p;
704 }
705
706 /* If we still didn't find one, make a new temporary. */
707 if (p == 0)
708 {
709 HOST_WIDE_INT frame_offset_old = frame_offset;
710
711 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
712
713 /* We are passing an explicit alignment request to assign_stack_local.
714 One side effect of that is assign_stack_local will not round SIZE
715 to ensure the frame offset remains suitably aligned.
716
717 So for requests which depended on the rounding of SIZE, we go ahead
718 and round it now. We also make sure ALIGNMENT is at least
719 BIGGEST_ALIGNMENT. */
720 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
721 abort();
722 p->slot = assign_stack_local (mode,
723 (mode == BLKmode
724 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
725 : size),
726 align);
727
728 p->align = align;
729 p->alias_set = alias_set;
730
731 /* The following slot size computation is necessary because we don't
732 know the actual size of the temporary slot until assign_stack_local
733 has performed all the frame alignment and size rounding for the
734 requested temporary. Note that extra space added for alignment
735 can be either above or below this stack slot depending on which
736 way the frame grows. We include the extra space if and only if it
737 is above this slot. */
738 #ifdef FRAME_GROWS_DOWNWARD
739 p->size = frame_offset_old - frame_offset;
740 #else
741 p->size = size;
742 #endif
743
744 /* Now define the fields used by combine_temp_slots. */
745 #ifdef FRAME_GROWS_DOWNWARD
746 p->base_offset = frame_offset;
747 p->full_size = frame_offset_old - frame_offset;
748 #else
749 p->base_offset = frame_offset_old;
750 p->full_size = frame_offset - frame_offset_old;
751 #endif
752 p->address = 0;
753 p->next = temp_slots;
754 temp_slots = p;
755 }
756
757 p->in_use = 1;
758 p->addr_taken = 0;
759 p->rtl_expr = seq_rtl_expr;
760
761 if (keep == 2)
762 {
763 p->level = target_temp_slot_level;
764 p->keep = 0;
765 }
766 else if (keep == 3)
767 {
768 p->level = var_temp_slot_level;
769 p->keep = 0;
770 }
771 else
772 {
773 p->level = temp_slot_level;
774 p->keep = keep;
775 }
776
777 /* We may be reusing an old slot, so clear any MEM flags that may have been
778 set from before. */
779 RTX_UNCHANGING_P (p->slot) = 0;
780 MEM_IN_STRUCT_P (p->slot) = 0;
781 MEM_SCALAR_P (p->slot) = 0;
782 MEM_ALIAS_SET (p->slot) = 0;
783 return p->slot;
784 }
785
786 /* Allocate a temporary stack slot and record it for possible later
787 reuse. First three arguments are same as in preceding function. */
788
789 rtx
790 assign_stack_temp (mode, size, keep)
791 enum machine_mode mode;
792 HOST_WIDE_INT size;
793 int keep;
794 {
795 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
796 }
797 \f
798 /* Assign a temporary of given TYPE.
799 KEEP is as for assign_stack_temp.
800 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
801 it is 0 if a register is OK.
802 DONT_PROMOTE is 1 if we should not promote values in register
803 to wider modes. */
804
805 rtx
806 assign_temp (type, keep, memory_required, dont_promote)
807 tree type;
808 int keep;
809 int memory_required;
810 int dont_promote;
811 {
812 enum machine_mode mode = TYPE_MODE (type);
813 int unsignedp = TREE_UNSIGNED (type);
814
815 if (mode == BLKmode || memory_required)
816 {
817 HOST_WIDE_INT size = int_size_in_bytes (type);
818 rtx tmp;
819
820 /* Unfortunately, we don't yet know how to allocate variable-sized
821 temporaries. However, sometimes we have a fixed upper limit on
822 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
823 instead. This is the case for Chill variable-sized strings. */
824 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
825 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
826 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
827 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
828
829 tmp = assign_stack_temp_for_type (mode, size, keep, type);
830 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
831 return tmp;
832 }
833
834 #ifndef PROMOTE_FOR_CALL_ONLY
835 if (! dont_promote)
836 mode = promote_mode (type, mode, &unsignedp, 0);
837 #endif
838
839 return gen_reg_rtx (mode);
840 }
841 \f
842 /* Combine temporary stack slots which are adjacent on the stack.
843
844 This allows for better use of already allocated stack space. This is only
845 done for BLKmode slots because we can be sure that we won't have alignment
846 problems in this case. */
847
848 void
849 combine_temp_slots ()
850 {
851 struct temp_slot *p, *q;
852 struct temp_slot *prev_p, *prev_q;
853 int num_slots;
854
855 /* We can't combine slots, because the information about which slot
856 is in which alias set will be lost. */
857 if (flag_strict_aliasing)
858 return;
859
860 /* If there are a lot of temp slots, don't do anything unless
861 high levels of optimizaton. */
862 if (! flag_expensive_optimizations)
863 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
864 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
865 return;
866
867 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
868 {
869 int delete_p = 0;
870
871 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
872 for (q = p->next, prev_q = p; q; q = prev_q->next)
873 {
874 int delete_q = 0;
875 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
876 {
877 if (p->base_offset + p->full_size == q->base_offset)
878 {
879 /* Q comes after P; combine Q into P. */
880 p->size += q->size;
881 p->full_size += q->full_size;
882 delete_q = 1;
883 }
884 else if (q->base_offset + q->full_size == p->base_offset)
885 {
886 /* P comes after Q; combine P into Q. */
887 q->size += p->size;
888 q->full_size += p->full_size;
889 delete_p = 1;
890 break;
891 }
892 }
893 /* Either delete Q or advance past it. */
894 if (delete_q)
895 prev_q->next = q->next;
896 else
897 prev_q = q;
898 }
899 /* Either delete P or advance past it. */
900 if (delete_p)
901 {
902 if (prev_p)
903 prev_p->next = p->next;
904 else
905 temp_slots = p->next;
906 }
907 else
908 prev_p = p;
909 }
910 }
911 \f
912 /* Find the temp slot corresponding to the object at address X. */
913
914 static struct temp_slot *
915 find_temp_slot_from_address (x)
916 rtx x;
917 {
918 struct temp_slot *p;
919 rtx next;
920
921 for (p = temp_slots; p; p = p->next)
922 {
923 if (! p->in_use)
924 continue;
925
926 else if (XEXP (p->slot, 0) == x
927 || p->address == x
928 || (GET_CODE (x) == PLUS
929 && XEXP (x, 0) == virtual_stack_vars_rtx
930 && GET_CODE (XEXP (x, 1)) == CONST_INT
931 && INTVAL (XEXP (x, 1)) >= p->base_offset
932 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
933 return p;
934
935 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
936 for (next = p->address; next; next = XEXP (next, 1))
937 if (XEXP (next, 0) == x)
938 return p;
939 }
940
941 return 0;
942 }
943
944 /* Indicate that NEW is an alternate way of referring to the temp slot
945 that previously was known by OLD. */
946
947 void
948 update_temp_slot_address (old, new)
949 rtx old, new;
950 {
951 struct temp_slot *p = find_temp_slot_from_address (old);
952
953 /* If none, return. Else add NEW as an alias. */
954 if (p == 0)
955 return;
956 else if (p->address == 0)
957 p->address = new;
958 else
959 {
960 if (GET_CODE (p->address) != EXPR_LIST)
961 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
962
963 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
964 }
965 }
966
967 /* If X could be a reference to a temporary slot, mark the fact that its
968 address was taken. */
969
970 void
971 mark_temp_addr_taken (x)
972 rtx x;
973 {
974 struct temp_slot *p;
975
976 if (x == 0)
977 return;
978
979 /* If X is not in memory or is at a constant address, it cannot be in
980 a temporary slot. */
981 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
982 return;
983
984 p = find_temp_slot_from_address (XEXP (x, 0));
985 if (p != 0)
986 p->addr_taken = 1;
987 }
988
989 /* If X could be a reference to a temporary slot, mark that slot as
990 belonging to the to one level higher than the current level. If X
991 matched one of our slots, just mark that one. Otherwise, we can't
992 easily predict which it is, so upgrade all of them. Kept slots
993 need not be touched.
994
995 This is called when an ({...}) construct occurs and a statement
996 returns a value in memory. */
997
998 void
999 preserve_temp_slots (x)
1000 rtx x;
1001 {
1002 struct temp_slot *p = 0;
1003
1004 /* If there is no result, we still might have some objects whose address
1005 were taken, so we need to make sure they stay around. */
1006 if (x == 0)
1007 {
1008 for (p = temp_slots; p; p = p->next)
1009 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1010 p->level--;
1011
1012 return;
1013 }
1014
1015 /* If X is a register that is being used as a pointer, see if we have
1016 a temporary slot we know it points to. To be consistent with
1017 the code below, we really should preserve all non-kept slots
1018 if we can't find a match, but that seems to be much too costly. */
1019 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1020 p = find_temp_slot_from_address (x);
1021
1022 /* If X is not in memory or is at a constant address, it cannot be in
1023 a temporary slot, but it can contain something whose address was
1024 taken. */
1025 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1026 {
1027 for (p = temp_slots; p; p = p->next)
1028 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1029 p->level--;
1030
1031 return;
1032 }
1033
1034 /* First see if we can find a match. */
1035 if (p == 0)
1036 p = find_temp_slot_from_address (XEXP (x, 0));
1037
1038 if (p != 0)
1039 {
1040 /* Move everything at our level whose address was taken to our new
1041 level in case we used its address. */
1042 struct temp_slot *q;
1043
1044 if (p->level == temp_slot_level)
1045 {
1046 for (q = temp_slots; q; q = q->next)
1047 if (q != p && q->addr_taken && q->level == p->level)
1048 q->level--;
1049
1050 p->level--;
1051 p->addr_taken = 0;
1052 }
1053 return;
1054 }
1055
1056 /* Otherwise, preserve all non-kept slots at this level. */
1057 for (p = temp_slots; p; p = p->next)
1058 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1059 p->level--;
1060 }
1061
1062 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1063 with that RTL_EXPR, promote it into a temporary slot at the present
1064 level so it will not be freed when we free slots made in the
1065 RTL_EXPR. */
1066
1067 void
1068 preserve_rtl_expr_result (x)
1069 rtx x;
1070 {
1071 struct temp_slot *p;
1072
1073 /* If X is not in memory or is at a constant address, it cannot be in
1074 a temporary slot. */
1075 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1076 return;
1077
1078 /* If we can find a match, move it to our level unless it is already at
1079 an upper level. */
1080 p = find_temp_slot_from_address (XEXP (x, 0));
1081 if (p != 0)
1082 {
1083 p->level = MIN (p->level, temp_slot_level);
1084 p->rtl_expr = 0;
1085 }
1086
1087 return;
1088 }
1089
1090 /* Free all temporaries used so far. This is normally called at the end
1091 of generating code for a statement. Don't free any temporaries
1092 currently in use for an RTL_EXPR that hasn't yet been emitted.
1093 We could eventually do better than this since it can be reused while
1094 generating the same RTL_EXPR, but this is complex and probably not
1095 worthwhile. */
1096
1097 void
1098 free_temp_slots ()
1099 {
1100 struct temp_slot *p;
1101
1102 for (p = temp_slots; p; p = p->next)
1103 if (p->in_use && p->level == temp_slot_level && ! p->keep
1104 && p->rtl_expr == 0)
1105 p->in_use = 0;
1106
1107 combine_temp_slots ();
1108 }
1109
1110 /* Free all temporary slots used in T, an RTL_EXPR node. */
1111
1112 void
1113 free_temps_for_rtl_expr (t)
1114 tree t;
1115 {
1116 struct temp_slot *p;
1117
1118 for (p = temp_slots; p; p = p->next)
1119 if (p->rtl_expr == t)
1120 p->in_use = 0;
1121
1122 combine_temp_slots ();
1123 }
1124
1125 /* Mark all temporaries ever allocated in this function as not suitable
1126 for reuse until the current level is exited. */
1127
1128 void
1129 mark_all_temps_used ()
1130 {
1131 struct temp_slot *p;
1132
1133 for (p = temp_slots; p; p = p->next)
1134 {
1135 p->in_use = p->keep = 1;
1136 p->level = MIN (p->level, temp_slot_level);
1137 }
1138 }
1139
1140 /* Push deeper into the nesting level for stack temporaries. */
1141
1142 void
1143 push_temp_slots ()
1144 {
1145 temp_slot_level++;
1146 }
1147
1148 /* Likewise, but save the new level as the place to allocate variables
1149 for blocks. */
1150
1151 void
1152 push_temp_slots_for_block ()
1153 {
1154 push_temp_slots ();
1155
1156 var_temp_slot_level = temp_slot_level;
1157 }
1158
1159 /* Likewise, but save the new level as the place to allocate temporaries
1160 for TARGET_EXPRs. */
1161
1162 void
1163 push_temp_slots_for_target ()
1164 {
1165 push_temp_slots ();
1166
1167 target_temp_slot_level = temp_slot_level;
1168 }
1169
1170 /* Set and get the value of target_temp_slot_level. The only
1171 permitted use of these functions is to save and restore this value. */
1172
1173 int
1174 get_target_temp_slot_level ()
1175 {
1176 return target_temp_slot_level;
1177 }
1178
1179 void
1180 set_target_temp_slot_level (level)
1181 int level;
1182 {
1183 target_temp_slot_level = level;
1184 }
1185
1186 /* Pop a temporary nesting level. All slots in use in the current level
1187 are freed. */
1188
1189 void
1190 pop_temp_slots ()
1191 {
1192 struct temp_slot *p;
1193
1194 for (p = temp_slots; p; p = p->next)
1195 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1196 p->in_use = 0;
1197
1198 combine_temp_slots ();
1199
1200 temp_slot_level--;
1201 }
1202
1203 /* Initialize temporary slots. */
1204
1205 void
1206 init_temp_slots ()
1207 {
1208 /* We have not allocated any temporaries yet. */
1209 temp_slots = 0;
1210 temp_slot_level = 0;
1211 var_temp_slot_level = 0;
1212 target_temp_slot_level = 0;
1213 }
1214 \f
1215 /* Retroactively move an auto variable from a register to a stack slot.
1216 This is done when an address-reference to the variable is seen. */
1217
1218 void
1219 put_var_into_stack (decl)
1220 tree decl;
1221 {
1222 register rtx reg;
1223 enum machine_mode promoted_mode, decl_mode;
1224 struct function *function = 0;
1225 tree context;
1226 int can_use_addressof;
1227
1228 context = decl_function_context (decl);
1229
1230 /* Get the current rtl used for this object and its original mode. */
1231 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1232
1233 /* No need to do anything if decl has no rtx yet
1234 since in that case caller is setting TREE_ADDRESSABLE
1235 and a stack slot will be assigned when the rtl is made. */
1236 if (reg == 0)
1237 return;
1238
1239 /* Get the declared mode for this object. */
1240 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1241 : DECL_MODE (decl));
1242 /* Get the mode it's actually stored in. */
1243 promoted_mode = GET_MODE (reg);
1244
1245 /* If this variable comes from an outer function,
1246 find that function's saved context. */
1247 if (context != current_function_decl && context != inline_function_decl)
1248 for (function = outer_function_chain; function; function = function->next)
1249 if (function->decl == context)
1250 break;
1251
1252 /* If this is a variable-size object with a pseudo to address it,
1253 put that pseudo into the stack, if the var is nonlocal. */
1254 if (DECL_NONLOCAL (decl)
1255 && GET_CODE (reg) == MEM
1256 && GET_CODE (XEXP (reg, 0)) == REG
1257 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1258 {
1259 reg = XEXP (reg, 0);
1260 decl_mode = promoted_mode = GET_MODE (reg);
1261 }
1262
1263 can_use_addressof
1264 = (function == 0
1265 && optimize > 0
1266 /* FIXME make it work for promoted modes too */
1267 && decl_mode == promoted_mode
1268 #ifdef NON_SAVING_SETJMP
1269 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1270 #endif
1271 );
1272
1273 /* If we can't use ADDRESSOF, make sure we see through one we already
1274 generated. */
1275 if (! can_use_addressof && GET_CODE (reg) == MEM
1276 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1277 reg = XEXP (XEXP (reg, 0), 0);
1278
1279 /* Now we should have a value that resides in one or more pseudo regs. */
1280
1281 if (GET_CODE (reg) == REG)
1282 {
1283 /* If this variable lives in the current function and we don't need
1284 to put things in the stack for the sake of setjmp, try to keep it
1285 in a register until we know we actually need the address. */
1286 if (can_use_addressof)
1287 gen_mem_addressof (reg, decl);
1288 else
1289 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1290 promoted_mode, decl_mode,
1291 TREE_SIDE_EFFECTS (decl), 0,
1292 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1293 0);
1294 }
1295 else if (GET_CODE (reg) == CONCAT)
1296 {
1297 /* A CONCAT contains two pseudos; put them both in the stack.
1298 We do it so they end up consecutive. */
1299 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1300 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1301 #ifdef FRAME_GROWS_DOWNWARD
1302 /* Since part 0 should have a lower address, do it second. */
1303 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1304 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1305 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1306 0);
1307 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1308 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1309 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1310 0);
1311 #else
1312 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1313 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1314 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1315 0);
1316 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1317 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1318 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1319 0);
1320 #endif
1321
1322 /* Change the CONCAT into a combined MEM for both parts. */
1323 PUT_CODE (reg, MEM);
1324 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1325 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1326
1327 /* The two parts are in memory order already.
1328 Use the lower parts address as ours. */
1329 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1330 /* Prevent sharing of rtl that might lose. */
1331 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1332 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1333 }
1334 else
1335 return;
1336
1337 if (current_function_check_memory_usage)
1338 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1339 XEXP (reg, 0), Pmode,
1340 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1341 TYPE_MODE (sizetype),
1342 GEN_INT (MEMORY_USE_RW),
1343 TYPE_MODE (integer_type_node));
1344 }
1345
1346 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1347 into the stack frame of FUNCTION (0 means the current function).
1348 DECL_MODE is the machine mode of the user-level data type.
1349 PROMOTED_MODE is the machine mode of the register.
1350 VOLATILE_P is nonzero if this is for a "volatile" decl.
1351 USED_P is nonzero if this reg might have already been used in an insn. */
1352
1353 static void
1354 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1355 original_regno, used_p, ht)
1356 struct function *function;
1357 rtx reg;
1358 tree type;
1359 enum machine_mode promoted_mode, decl_mode;
1360 int volatile_p;
1361 int original_regno;
1362 int used_p;
1363 struct hash_table *ht;
1364 {
1365 struct function *func = function ? function : current_function;
1366 rtx new = 0;
1367 int regno = original_regno;
1368
1369 if (regno == 0)
1370 regno = REGNO (reg);
1371
1372 if (regno < func->x_max_parm_reg)
1373 new = func->x_parm_reg_stack_loc[regno];
1374 if (new == 0)
1375 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1376
1377 PUT_CODE (reg, MEM);
1378 PUT_MODE (reg, decl_mode);
1379 XEXP (reg, 0) = XEXP (new, 0);
1380 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1381 MEM_VOLATILE_P (reg) = volatile_p;
1382
1383 /* If this is a memory ref that contains aggregate components,
1384 mark it as such for cse and loop optimize. If we are reusing a
1385 previously generated stack slot, then we need to copy the bit in
1386 case it was set for other reasons. For instance, it is set for
1387 __builtin_va_alist. */
1388 MEM_SET_IN_STRUCT_P (reg,
1389 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1390 MEM_ALIAS_SET (reg) = get_alias_set (type);
1391
1392 /* Now make sure that all refs to the variable, previously made
1393 when it was a register, are fixed up to be valid again. */
1394
1395 if (used_p && function != 0)
1396 {
1397 struct var_refs_queue *temp;
1398
1399 temp
1400 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1401 temp->modified = reg;
1402 temp->promoted_mode = promoted_mode;
1403 temp->unsignedp = TREE_UNSIGNED (type);
1404 temp->next = function->fixup_var_refs_queue;
1405 function->fixup_var_refs_queue = temp;
1406 }
1407 else if (used_p)
1408 /* Variable is local; fix it up now. */
1409 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1410 }
1411 \f
1412 static void
1413 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1414 rtx var;
1415 enum machine_mode promoted_mode;
1416 int unsignedp;
1417 struct hash_table *ht;
1418 {
1419 tree pending;
1420 rtx first_insn = get_insns ();
1421 struct sequence_stack *stack = seq_stack;
1422 tree rtl_exps = rtl_expr_chain;
1423
1424 /* Must scan all insns for stack-refs that exceed the limit. */
1425 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1426 stack == 0, ht);
1427 /* If there's a hash table, it must record all uses of VAR. */
1428 if (ht)
1429 return;
1430
1431 /* Scan all pending sequences too. */
1432 for (; stack; stack = stack->next)
1433 {
1434 push_to_sequence (stack->first);
1435 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1436 stack->first, stack->next != 0, 0);
1437 /* Update remembered end of sequence
1438 in case we added an insn at the end. */
1439 stack->last = get_last_insn ();
1440 end_sequence ();
1441 }
1442
1443 /* Scan all waiting RTL_EXPRs too. */
1444 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1445 {
1446 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1447 if (seq != const0_rtx && seq != 0)
1448 {
1449 push_to_sequence (seq);
1450 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1451 0);
1452 end_sequence ();
1453 }
1454 }
1455
1456 /* Scan the catch clauses for exception handling too. */
1457 push_to_sequence (catch_clauses);
1458 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1459 0, 0);
1460 end_sequence ();
1461 }
1462 \f
1463 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1464 some part of an insn. Return a struct fixup_replacement whose OLD
1465 value is equal to X. Allocate a new structure if no such entry exists. */
1466
1467 static struct fixup_replacement *
1468 find_fixup_replacement (replacements, x)
1469 struct fixup_replacement **replacements;
1470 rtx x;
1471 {
1472 struct fixup_replacement *p;
1473
1474 /* See if we have already replaced this. */
1475 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1476 ;
1477
1478 if (p == 0)
1479 {
1480 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1481 p->old = x;
1482 p->new = 0;
1483 p->next = *replacements;
1484 *replacements = p;
1485 }
1486
1487 return p;
1488 }
1489
1490 /* Scan the insn-chain starting with INSN for refs to VAR
1491 and fix them up. TOPLEVEL is nonzero if this chain is the
1492 main chain of insns for the current function. */
1493
1494 static void
1495 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1496 rtx var;
1497 enum machine_mode promoted_mode;
1498 int unsignedp;
1499 rtx insn;
1500 int toplevel;
1501 struct hash_table *ht;
1502 {
1503 rtx call_dest = 0;
1504 rtx insn_list = NULL_RTX;
1505
1506 /* If we already know which INSNs reference VAR there's no need
1507 to walk the entire instruction chain. */
1508 if (ht)
1509 {
1510 insn_list = ((struct insns_for_mem_entry *)
1511 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1512 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1513 insn_list = XEXP (insn_list, 1);
1514 }
1515
1516 while (insn)
1517 {
1518 rtx next = NEXT_INSN (insn);
1519 rtx set, prev, prev_set;
1520 rtx note;
1521
1522 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1523 {
1524 /* Remember the notes in case we delete the insn. */
1525 note = REG_NOTES (insn);
1526
1527 /* If this is a CLOBBER of VAR, delete it.
1528
1529 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1530 and REG_RETVAL notes too. */
1531 if (GET_CODE (PATTERN (insn)) == CLOBBER
1532 && (XEXP (PATTERN (insn), 0) == var
1533 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1534 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1535 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1536 {
1537 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1538 /* The REG_LIBCALL note will go away since we are going to
1539 turn INSN into a NOTE, so just delete the
1540 corresponding REG_RETVAL note. */
1541 remove_note (XEXP (note, 0),
1542 find_reg_note (XEXP (note, 0), REG_RETVAL,
1543 NULL_RTX));
1544
1545 /* In unoptimized compilation, we shouldn't call delete_insn
1546 except in jump.c doing warnings. */
1547 PUT_CODE (insn, NOTE);
1548 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1549 NOTE_SOURCE_FILE (insn) = 0;
1550 }
1551
1552 /* The insn to load VAR from a home in the arglist
1553 is now a no-op. When we see it, just delete it.
1554 Similarly if this is storing VAR from a register from which
1555 it was loaded in the previous insn. This will occur
1556 when an ADDRESSOF was made for an arglist slot. */
1557 else if (toplevel
1558 && (set = single_set (insn)) != 0
1559 && SET_DEST (set) == var
1560 /* If this represents the result of an insn group,
1561 don't delete the insn. */
1562 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1563 && (rtx_equal_p (SET_SRC (set), var)
1564 || (GET_CODE (SET_SRC (set)) == REG
1565 && (prev = prev_nonnote_insn (insn)) != 0
1566 && (prev_set = single_set (prev)) != 0
1567 && SET_DEST (prev_set) == SET_SRC (set)
1568 && rtx_equal_p (SET_SRC (prev_set), var))))
1569 {
1570 /* In unoptimized compilation, we shouldn't call delete_insn
1571 except in jump.c doing warnings. */
1572 PUT_CODE (insn, NOTE);
1573 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1574 NOTE_SOURCE_FILE (insn) = 0;
1575 if (insn == last_parm_insn)
1576 last_parm_insn = PREV_INSN (next);
1577 }
1578 else
1579 {
1580 struct fixup_replacement *replacements = 0;
1581 rtx next_insn = NEXT_INSN (insn);
1582
1583 if (SMALL_REGISTER_CLASSES)
1584 {
1585 /* If the insn that copies the results of a CALL_INSN
1586 into a pseudo now references VAR, we have to use an
1587 intermediate pseudo since we want the life of the
1588 return value register to be only a single insn.
1589
1590 If we don't use an intermediate pseudo, such things as
1591 address computations to make the address of VAR valid
1592 if it is not can be placed between the CALL_INSN and INSN.
1593
1594 To make sure this doesn't happen, we record the destination
1595 of the CALL_INSN and see if the next insn uses both that
1596 and VAR. */
1597
1598 if (call_dest != 0 && GET_CODE (insn) == INSN
1599 && reg_mentioned_p (var, PATTERN (insn))
1600 && reg_mentioned_p (call_dest, PATTERN (insn)))
1601 {
1602 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1603
1604 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1605
1606 PATTERN (insn) = replace_rtx (PATTERN (insn),
1607 call_dest, temp);
1608 }
1609
1610 if (GET_CODE (insn) == CALL_INSN
1611 && GET_CODE (PATTERN (insn)) == SET)
1612 call_dest = SET_DEST (PATTERN (insn));
1613 else if (GET_CODE (insn) == CALL_INSN
1614 && GET_CODE (PATTERN (insn)) == PARALLEL
1615 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1616 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1617 else
1618 call_dest = 0;
1619 }
1620
1621 /* See if we have to do anything to INSN now that VAR is in
1622 memory. If it needs to be loaded into a pseudo, use a single
1623 pseudo for the entire insn in case there is a MATCH_DUP
1624 between two operands. We pass a pointer to the head of
1625 a list of struct fixup_replacements. If fixup_var_refs_1
1626 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1627 it will record them in this list.
1628
1629 If it allocated a pseudo for any replacement, we copy into
1630 it here. */
1631
1632 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1633 &replacements);
1634
1635 /* If this is last_parm_insn, and any instructions were output
1636 after it to fix it up, then we must set last_parm_insn to
1637 the last such instruction emitted. */
1638 if (insn == last_parm_insn)
1639 last_parm_insn = PREV_INSN (next_insn);
1640
1641 while (replacements)
1642 {
1643 if (GET_CODE (replacements->new) == REG)
1644 {
1645 rtx insert_before;
1646 rtx seq;
1647
1648 /* OLD might be a (subreg (mem)). */
1649 if (GET_CODE (replacements->old) == SUBREG)
1650 replacements->old
1651 = fixup_memory_subreg (replacements->old, insn, 0);
1652 else
1653 replacements->old
1654 = fixup_stack_1 (replacements->old, insn);
1655
1656 insert_before = insn;
1657
1658 /* If we are changing the mode, do a conversion.
1659 This might be wasteful, but combine.c will
1660 eliminate much of the waste. */
1661
1662 if (GET_MODE (replacements->new)
1663 != GET_MODE (replacements->old))
1664 {
1665 start_sequence ();
1666 convert_move (replacements->new,
1667 replacements->old, unsignedp);
1668 seq = gen_sequence ();
1669 end_sequence ();
1670 }
1671 else
1672 seq = gen_move_insn (replacements->new,
1673 replacements->old);
1674
1675 emit_insn_before (seq, insert_before);
1676 }
1677
1678 replacements = replacements->next;
1679 }
1680 }
1681
1682 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1683 But don't touch other insns referred to by reg-notes;
1684 we will get them elsewhere. */
1685 while (note)
1686 {
1687 if (GET_CODE (note) != INSN_LIST)
1688 XEXP (note, 0)
1689 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1690 note = XEXP (note, 1);
1691 }
1692 }
1693
1694 if (!ht)
1695 insn = next;
1696 else if (insn_list)
1697 {
1698 insn = XEXP (insn_list, 0);
1699 insn_list = XEXP (insn_list, 1);
1700 }
1701 else
1702 insn = NULL_RTX;
1703 }
1704 }
1705 \f
1706 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1707 See if the rtx expression at *LOC in INSN needs to be changed.
1708
1709 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1710 contain a list of original rtx's and replacements. If we find that we need
1711 to modify this insn by replacing a memory reference with a pseudo or by
1712 making a new MEM to implement a SUBREG, we consult that list to see if
1713 we have already chosen a replacement. If none has already been allocated,
1714 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1715 or the SUBREG, as appropriate, to the pseudo. */
1716
1717 static void
1718 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1719 register rtx var;
1720 enum machine_mode promoted_mode;
1721 register rtx *loc;
1722 rtx insn;
1723 struct fixup_replacement **replacements;
1724 {
1725 register int i;
1726 register rtx x = *loc;
1727 RTX_CODE code = GET_CODE (x);
1728 register const char *fmt;
1729 register rtx tem, tem1;
1730 struct fixup_replacement *replacement;
1731
1732 switch (code)
1733 {
1734 case ADDRESSOF:
1735 if (XEXP (x, 0) == var)
1736 {
1737 /* Prevent sharing of rtl that might lose. */
1738 rtx sub = copy_rtx (XEXP (var, 0));
1739
1740 if (! validate_change (insn, loc, sub, 0))
1741 {
1742 rtx y = gen_reg_rtx (GET_MODE (sub));
1743 rtx seq, new_insn;
1744
1745 /* We should be able to replace with a register or all is lost.
1746 Note that we can't use validate_change to verify this, since
1747 we're not caring for replacing all dups simultaneously. */
1748 if (! validate_replace_rtx (*loc, y, insn))
1749 abort ();
1750
1751 /* Careful! First try to recognize a direct move of the
1752 value, mimicking how things are done in gen_reload wrt
1753 PLUS. Consider what happens when insn is a conditional
1754 move instruction and addsi3 clobbers flags. */
1755
1756 start_sequence ();
1757 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1758 seq = gen_sequence ();
1759 end_sequence ();
1760
1761 if (recog_memoized (new_insn) < 0)
1762 {
1763 /* That failed. Fall back on force_operand and hope. */
1764
1765 start_sequence ();
1766 force_operand (sub, y);
1767 seq = gen_sequence ();
1768 end_sequence ();
1769 }
1770
1771 #ifdef HAVE_cc0
1772 /* Don't separate setter from user. */
1773 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1774 insn = PREV_INSN (insn);
1775 #endif
1776
1777 emit_insn_before (seq, insn);
1778 }
1779 }
1780 return;
1781
1782 case MEM:
1783 if (var == x)
1784 {
1785 /* If we already have a replacement, use it. Otherwise,
1786 try to fix up this address in case it is invalid. */
1787
1788 replacement = find_fixup_replacement (replacements, var);
1789 if (replacement->new)
1790 {
1791 *loc = replacement->new;
1792 return;
1793 }
1794
1795 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1796
1797 /* Unless we are forcing memory to register or we changed the mode,
1798 we can leave things the way they are if the insn is valid. */
1799
1800 INSN_CODE (insn) = -1;
1801 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1802 && recog_memoized (insn) >= 0)
1803 return;
1804
1805 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1806 return;
1807 }
1808
1809 /* If X contains VAR, we need to unshare it here so that we update
1810 each occurrence separately. But all identical MEMs in one insn
1811 must be replaced with the same rtx because of the possibility of
1812 MATCH_DUPs. */
1813
1814 if (reg_mentioned_p (var, x))
1815 {
1816 replacement = find_fixup_replacement (replacements, x);
1817 if (replacement->new == 0)
1818 replacement->new = copy_most_rtx (x, var);
1819
1820 *loc = x = replacement->new;
1821 }
1822 break;
1823
1824 case REG:
1825 case CC0:
1826 case PC:
1827 case CONST_INT:
1828 case CONST:
1829 case SYMBOL_REF:
1830 case LABEL_REF:
1831 case CONST_DOUBLE:
1832 return;
1833
1834 case SIGN_EXTRACT:
1835 case ZERO_EXTRACT:
1836 /* Note that in some cases those types of expressions are altered
1837 by optimize_bit_field, and do not survive to get here. */
1838 if (XEXP (x, 0) == var
1839 || (GET_CODE (XEXP (x, 0)) == SUBREG
1840 && SUBREG_REG (XEXP (x, 0)) == var))
1841 {
1842 /* Get TEM as a valid MEM in the mode presently in the insn.
1843
1844 We don't worry about the possibility of MATCH_DUP here; it
1845 is highly unlikely and would be tricky to handle. */
1846
1847 tem = XEXP (x, 0);
1848 if (GET_CODE (tem) == SUBREG)
1849 {
1850 if (GET_MODE_BITSIZE (GET_MODE (tem))
1851 > GET_MODE_BITSIZE (GET_MODE (var)))
1852 {
1853 replacement = find_fixup_replacement (replacements, var);
1854 if (replacement->new == 0)
1855 replacement->new = gen_reg_rtx (GET_MODE (var));
1856 SUBREG_REG (tem) = replacement->new;
1857 }
1858 else
1859 tem = fixup_memory_subreg (tem, insn, 0);
1860 }
1861 else
1862 tem = fixup_stack_1 (tem, insn);
1863
1864 /* Unless we want to load from memory, get TEM into the proper mode
1865 for an extract from memory. This can only be done if the
1866 extract is at a constant position and length. */
1867
1868 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1869 && GET_CODE (XEXP (x, 2)) == CONST_INT
1870 && ! mode_dependent_address_p (XEXP (tem, 0))
1871 && ! MEM_VOLATILE_P (tem))
1872 {
1873 enum machine_mode wanted_mode = VOIDmode;
1874 enum machine_mode is_mode = GET_MODE (tem);
1875 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1876
1877 #ifdef HAVE_extzv
1878 if (GET_CODE (x) == ZERO_EXTRACT)
1879 {
1880 wanted_mode
1881 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1882 if (wanted_mode == VOIDmode)
1883 wanted_mode = word_mode;
1884 }
1885 #endif
1886 #ifdef HAVE_extv
1887 if (GET_CODE (x) == SIGN_EXTRACT)
1888 {
1889 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1890 if (wanted_mode == VOIDmode)
1891 wanted_mode = word_mode;
1892 }
1893 #endif
1894 /* If we have a narrower mode, we can do something. */
1895 if (wanted_mode != VOIDmode
1896 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1897 {
1898 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1899 rtx old_pos = XEXP (x, 2);
1900 rtx newmem;
1901
1902 /* If the bytes and bits are counted differently, we
1903 must adjust the offset. */
1904 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1905 offset = (GET_MODE_SIZE (is_mode)
1906 - GET_MODE_SIZE (wanted_mode) - offset);
1907
1908 pos %= GET_MODE_BITSIZE (wanted_mode);
1909
1910 newmem = gen_rtx_MEM (wanted_mode,
1911 plus_constant (XEXP (tem, 0), offset));
1912 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1913 MEM_COPY_ATTRIBUTES (newmem, tem);
1914
1915 /* Make the change and see if the insn remains valid. */
1916 INSN_CODE (insn) = -1;
1917 XEXP (x, 0) = newmem;
1918 XEXP (x, 2) = GEN_INT (pos);
1919
1920 if (recog_memoized (insn) >= 0)
1921 return;
1922
1923 /* Otherwise, restore old position. XEXP (x, 0) will be
1924 restored later. */
1925 XEXP (x, 2) = old_pos;
1926 }
1927 }
1928
1929 /* If we get here, the bitfield extract insn can't accept a memory
1930 reference. Copy the input into a register. */
1931
1932 tem1 = gen_reg_rtx (GET_MODE (tem));
1933 emit_insn_before (gen_move_insn (tem1, tem), insn);
1934 XEXP (x, 0) = tem1;
1935 return;
1936 }
1937 break;
1938
1939 case SUBREG:
1940 if (SUBREG_REG (x) == var)
1941 {
1942 /* If this is a special SUBREG made because VAR was promoted
1943 from a wider mode, replace it with VAR and call ourself
1944 recursively, this time saying that the object previously
1945 had its current mode (by virtue of the SUBREG). */
1946
1947 if (SUBREG_PROMOTED_VAR_P (x))
1948 {
1949 *loc = var;
1950 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1951 return;
1952 }
1953
1954 /* If this SUBREG makes VAR wider, it has become a paradoxical
1955 SUBREG with VAR in memory, but these aren't allowed at this
1956 stage of the compilation. So load VAR into a pseudo and take
1957 a SUBREG of that pseudo. */
1958 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1959 {
1960 replacement = find_fixup_replacement (replacements, var);
1961 if (replacement->new == 0)
1962 replacement->new = gen_reg_rtx (GET_MODE (var));
1963 SUBREG_REG (x) = replacement->new;
1964 return;
1965 }
1966
1967 /* See if we have already found a replacement for this SUBREG.
1968 If so, use it. Otherwise, make a MEM and see if the insn
1969 is recognized. If not, or if we should force MEM into a register,
1970 make a pseudo for this SUBREG. */
1971 replacement = find_fixup_replacement (replacements, x);
1972 if (replacement->new)
1973 {
1974 *loc = replacement->new;
1975 return;
1976 }
1977
1978 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1979
1980 INSN_CODE (insn) = -1;
1981 if (! flag_force_mem && recog_memoized (insn) >= 0)
1982 return;
1983
1984 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1985 return;
1986 }
1987 break;
1988
1989 case SET:
1990 /* First do special simplification of bit-field references. */
1991 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1992 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1993 optimize_bit_field (x, insn, 0);
1994 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1995 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1996 optimize_bit_field (x, insn, NULL_PTR);
1997
1998 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1999 into a register and then store it back out. */
2000 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2001 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2002 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2003 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2004 > GET_MODE_SIZE (GET_MODE (var))))
2005 {
2006 replacement = find_fixup_replacement (replacements, var);
2007 if (replacement->new == 0)
2008 replacement->new = gen_reg_rtx (GET_MODE (var));
2009
2010 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2011 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2012 }
2013
2014 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2015 insn into a pseudo and store the low part of the pseudo into VAR. */
2016 if (GET_CODE (SET_DEST (x)) == SUBREG
2017 && SUBREG_REG (SET_DEST (x)) == var
2018 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2019 > GET_MODE_SIZE (GET_MODE (var))))
2020 {
2021 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2022 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2023 tem)),
2024 insn);
2025 break;
2026 }
2027
2028 {
2029 rtx dest = SET_DEST (x);
2030 rtx src = SET_SRC (x);
2031 #ifdef HAVE_insv
2032 rtx outerdest = dest;
2033 #endif
2034
2035 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2036 || GET_CODE (dest) == SIGN_EXTRACT
2037 || GET_CODE (dest) == ZERO_EXTRACT)
2038 dest = XEXP (dest, 0);
2039
2040 if (GET_CODE (src) == SUBREG)
2041 src = XEXP (src, 0);
2042
2043 /* If VAR does not appear at the top level of the SET
2044 just scan the lower levels of the tree. */
2045
2046 if (src != var && dest != var)
2047 break;
2048
2049 /* We will need to rerecognize this insn. */
2050 INSN_CODE (insn) = -1;
2051
2052 #ifdef HAVE_insv
2053 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2054 {
2055 /* Since this case will return, ensure we fixup all the
2056 operands here. */
2057 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2058 insn, replacements);
2059 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2060 insn, replacements);
2061 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2062 insn, replacements);
2063
2064 tem = XEXP (outerdest, 0);
2065
2066 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2067 that may appear inside a ZERO_EXTRACT.
2068 This was legitimate when the MEM was a REG. */
2069 if (GET_CODE (tem) == SUBREG
2070 && SUBREG_REG (tem) == var)
2071 tem = fixup_memory_subreg (tem, insn, 0);
2072 else
2073 tem = fixup_stack_1 (tem, insn);
2074
2075 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2076 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2077 && ! mode_dependent_address_p (XEXP (tem, 0))
2078 && ! MEM_VOLATILE_P (tem))
2079 {
2080 enum machine_mode wanted_mode;
2081 enum machine_mode is_mode = GET_MODE (tem);
2082 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2083
2084 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2085 if (wanted_mode == VOIDmode)
2086 wanted_mode = word_mode;
2087
2088 /* If we have a narrower mode, we can do something. */
2089 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2090 {
2091 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2092 rtx old_pos = XEXP (outerdest, 2);
2093 rtx newmem;
2094
2095 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2096 offset = (GET_MODE_SIZE (is_mode)
2097 - GET_MODE_SIZE (wanted_mode) - offset);
2098
2099 pos %= GET_MODE_BITSIZE (wanted_mode);
2100
2101 newmem = gen_rtx_MEM (wanted_mode,
2102 plus_constant (XEXP (tem, 0),
2103 offset));
2104 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2105 MEM_COPY_ATTRIBUTES (newmem, tem);
2106
2107 /* Make the change and see if the insn remains valid. */
2108 INSN_CODE (insn) = -1;
2109 XEXP (outerdest, 0) = newmem;
2110 XEXP (outerdest, 2) = GEN_INT (pos);
2111
2112 if (recog_memoized (insn) >= 0)
2113 return;
2114
2115 /* Otherwise, restore old position. XEXP (x, 0) will be
2116 restored later. */
2117 XEXP (outerdest, 2) = old_pos;
2118 }
2119 }
2120
2121 /* If we get here, the bit-field store doesn't allow memory
2122 or isn't located at a constant position. Load the value into
2123 a register, do the store, and put it back into memory. */
2124
2125 tem1 = gen_reg_rtx (GET_MODE (tem));
2126 emit_insn_before (gen_move_insn (tem1, tem), insn);
2127 emit_insn_after (gen_move_insn (tem, tem1), insn);
2128 XEXP (outerdest, 0) = tem1;
2129 return;
2130 }
2131 #endif
2132
2133 /* STRICT_LOW_PART is a no-op on memory references
2134 and it can cause combinations to be unrecognizable,
2135 so eliminate it. */
2136
2137 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2138 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2139
2140 /* A valid insn to copy VAR into or out of a register
2141 must be left alone, to avoid an infinite loop here.
2142 If the reference to VAR is by a subreg, fix that up,
2143 since SUBREG is not valid for a memref.
2144 Also fix up the address of the stack slot.
2145
2146 Note that we must not try to recognize the insn until
2147 after we know that we have valid addresses and no
2148 (subreg (mem ...) ...) constructs, since these interfere
2149 with determining the validity of the insn. */
2150
2151 if ((SET_SRC (x) == var
2152 || (GET_CODE (SET_SRC (x)) == SUBREG
2153 && SUBREG_REG (SET_SRC (x)) == var))
2154 && (GET_CODE (SET_DEST (x)) == REG
2155 || (GET_CODE (SET_DEST (x)) == SUBREG
2156 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2157 && GET_MODE (var) == promoted_mode
2158 && x == single_set (insn))
2159 {
2160 rtx pat;
2161
2162 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2163 if (replacement->new)
2164 SET_SRC (x) = replacement->new;
2165 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2166 SET_SRC (x) = replacement->new
2167 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2168 else
2169 SET_SRC (x) = replacement->new
2170 = fixup_stack_1 (SET_SRC (x), insn);
2171
2172 if (recog_memoized (insn) >= 0)
2173 return;
2174
2175 /* INSN is not valid, but we know that we want to
2176 copy SET_SRC (x) to SET_DEST (x) in some way. So
2177 we generate the move and see whether it requires more
2178 than one insn. If it does, we emit those insns and
2179 delete INSN. Otherwise, we an just replace the pattern
2180 of INSN; we have already verified above that INSN has
2181 no other function that to do X. */
2182
2183 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2184 if (GET_CODE (pat) == SEQUENCE)
2185 {
2186 emit_insn_after (pat, insn);
2187 PUT_CODE (insn, NOTE);
2188 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2189 NOTE_SOURCE_FILE (insn) = 0;
2190 }
2191 else
2192 PATTERN (insn) = pat;
2193
2194 return;
2195 }
2196
2197 if ((SET_DEST (x) == var
2198 || (GET_CODE (SET_DEST (x)) == SUBREG
2199 && SUBREG_REG (SET_DEST (x)) == var))
2200 && (GET_CODE (SET_SRC (x)) == REG
2201 || (GET_CODE (SET_SRC (x)) == SUBREG
2202 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2203 && GET_MODE (var) == promoted_mode
2204 && x == single_set (insn))
2205 {
2206 rtx pat;
2207
2208 if (GET_CODE (SET_DEST (x)) == SUBREG)
2209 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2210 else
2211 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2212
2213 if (recog_memoized (insn) >= 0)
2214 return;
2215
2216 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2217 if (GET_CODE (pat) == SEQUENCE)
2218 {
2219 emit_insn_after (pat, insn);
2220 PUT_CODE (insn, NOTE);
2221 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2222 NOTE_SOURCE_FILE (insn) = 0;
2223 }
2224 else
2225 PATTERN (insn) = pat;
2226
2227 return;
2228 }
2229
2230 /* Otherwise, storing into VAR must be handled specially
2231 by storing into a temporary and copying that into VAR
2232 with a new insn after this one. Note that this case
2233 will be used when storing into a promoted scalar since
2234 the insn will now have different modes on the input
2235 and output and hence will be invalid (except for the case
2236 of setting it to a constant, which does not need any
2237 change if it is valid). We generate extra code in that case,
2238 but combine.c will eliminate it. */
2239
2240 if (dest == var)
2241 {
2242 rtx temp;
2243 rtx fixeddest = SET_DEST (x);
2244
2245 /* STRICT_LOW_PART can be discarded, around a MEM. */
2246 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2247 fixeddest = XEXP (fixeddest, 0);
2248 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2249 if (GET_CODE (fixeddest) == SUBREG)
2250 {
2251 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2252 promoted_mode = GET_MODE (fixeddest);
2253 }
2254 else
2255 fixeddest = fixup_stack_1 (fixeddest, insn);
2256
2257 temp = gen_reg_rtx (promoted_mode);
2258
2259 emit_insn_after (gen_move_insn (fixeddest,
2260 gen_lowpart (GET_MODE (fixeddest),
2261 temp)),
2262 insn);
2263
2264 SET_DEST (x) = temp;
2265 }
2266 }
2267
2268 default:
2269 break;
2270 }
2271
2272 /* Nothing special about this RTX; fix its operands. */
2273
2274 fmt = GET_RTX_FORMAT (code);
2275 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2276 {
2277 if (fmt[i] == 'e')
2278 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2279 if (fmt[i] == 'E')
2280 {
2281 register int j;
2282 for (j = 0; j < XVECLEN (x, i); j++)
2283 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2284 insn, replacements);
2285 }
2286 }
2287 }
2288 \f
2289 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2290 return an rtx (MEM:m1 newaddr) which is equivalent.
2291 If any insns must be emitted to compute NEWADDR, put them before INSN.
2292
2293 UNCRITICAL nonzero means accept paradoxical subregs.
2294 This is used for subregs found inside REG_NOTES. */
2295
2296 static rtx
2297 fixup_memory_subreg (x, insn, uncritical)
2298 rtx x;
2299 rtx insn;
2300 int uncritical;
2301 {
2302 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2303 rtx addr = XEXP (SUBREG_REG (x), 0);
2304 enum machine_mode mode = GET_MODE (x);
2305 rtx result;
2306
2307 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2308 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2309 && ! uncritical)
2310 abort ();
2311
2312 if (BYTES_BIG_ENDIAN)
2313 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2314 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2315 addr = plus_constant (addr, offset);
2316 if (!flag_force_addr && memory_address_p (mode, addr))
2317 /* Shortcut if no insns need be emitted. */
2318 return change_address (SUBREG_REG (x), mode, addr);
2319 start_sequence ();
2320 result = change_address (SUBREG_REG (x), mode, addr);
2321 emit_insn_before (gen_sequence (), insn);
2322 end_sequence ();
2323 return result;
2324 }
2325
2326 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2327 Replace subexpressions of X in place.
2328 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2329 Otherwise return X, with its contents possibly altered.
2330
2331 If any insns must be emitted to compute NEWADDR, put them before INSN.
2332
2333 UNCRITICAL is as in fixup_memory_subreg. */
2334
2335 static rtx
2336 walk_fixup_memory_subreg (x, insn, uncritical)
2337 register rtx x;
2338 rtx insn;
2339 int uncritical;
2340 {
2341 register enum rtx_code code;
2342 register const char *fmt;
2343 register int i;
2344
2345 if (x == 0)
2346 return 0;
2347
2348 code = GET_CODE (x);
2349
2350 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2351 return fixup_memory_subreg (x, insn, uncritical);
2352
2353 /* Nothing special about this RTX; fix its operands. */
2354
2355 fmt = GET_RTX_FORMAT (code);
2356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2357 {
2358 if (fmt[i] == 'e')
2359 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2360 if (fmt[i] == 'E')
2361 {
2362 register int j;
2363 for (j = 0; j < XVECLEN (x, i); j++)
2364 XVECEXP (x, i, j)
2365 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2366 }
2367 }
2368 return x;
2369 }
2370 \f
2371 /* For each memory ref within X, if it refers to a stack slot
2372 with an out of range displacement, put the address in a temp register
2373 (emitting new insns before INSN to load these registers)
2374 and alter the memory ref to use that register.
2375 Replace each such MEM rtx with a copy, to avoid clobberage. */
2376
2377 static rtx
2378 fixup_stack_1 (x, insn)
2379 rtx x;
2380 rtx insn;
2381 {
2382 register int i;
2383 register RTX_CODE code = GET_CODE (x);
2384 register const char *fmt;
2385
2386 if (code == MEM)
2387 {
2388 register rtx ad = XEXP (x, 0);
2389 /* If we have address of a stack slot but it's not valid
2390 (displacement is too large), compute the sum in a register. */
2391 if (GET_CODE (ad) == PLUS
2392 && GET_CODE (XEXP (ad, 0)) == REG
2393 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2394 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2395 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2396 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2397 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2398 #endif
2399 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2400 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2401 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2402 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2403 {
2404 rtx temp, seq;
2405 if (memory_address_p (GET_MODE (x), ad))
2406 return x;
2407
2408 start_sequence ();
2409 temp = copy_to_reg (ad);
2410 seq = gen_sequence ();
2411 end_sequence ();
2412 emit_insn_before (seq, insn);
2413 return change_address (x, VOIDmode, temp);
2414 }
2415 return x;
2416 }
2417
2418 fmt = GET_RTX_FORMAT (code);
2419 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2420 {
2421 if (fmt[i] == 'e')
2422 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2423 if (fmt[i] == 'E')
2424 {
2425 register int j;
2426 for (j = 0; j < XVECLEN (x, i); j++)
2427 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2428 }
2429 }
2430 return x;
2431 }
2432 \f
2433 /* Optimization: a bit-field instruction whose field
2434 happens to be a byte or halfword in memory
2435 can be changed to a move instruction.
2436
2437 We call here when INSN is an insn to examine or store into a bit-field.
2438 BODY is the SET-rtx to be altered.
2439
2440 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2441 (Currently this is called only from function.c, and EQUIV_MEM
2442 is always 0.) */
2443
2444 static void
2445 optimize_bit_field (body, insn, equiv_mem)
2446 rtx body;
2447 rtx insn;
2448 rtx *equiv_mem;
2449 {
2450 register rtx bitfield;
2451 int destflag;
2452 rtx seq = 0;
2453 enum machine_mode mode;
2454
2455 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2456 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2457 bitfield = SET_DEST (body), destflag = 1;
2458 else
2459 bitfield = SET_SRC (body), destflag = 0;
2460
2461 /* First check that the field being stored has constant size and position
2462 and is in fact a byte or halfword suitably aligned. */
2463
2464 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2465 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2466 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2467 != BLKmode)
2468 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2469 {
2470 register rtx memref = 0;
2471
2472 /* Now check that the containing word is memory, not a register,
2473 and that it is safe to change the machine mode. */
2474
2475 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2476 memref = XEXP (bitfield, 0);
2477 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2478 && equiv_mem != 0)
2479 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2480 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2481 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2482 memref = SUBREG_REG (XEXP (bitfield, 0));
2483 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2484 && equiv_mem != 0
2485 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2486 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2487
2488 if (memref
2489 && ! mode_dependent_address_p (XEXP (memref, 0))
2490 && ! MEM_VOLATILE_P (memref))
2491 {
2492 /* Now adjust the address, first for any subreg'ing
2493 that we are now getting rid of,
2494 and then for which byte of the word is wanted. */
2495
2496 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2497 rtx insns;
2498
2499 /* Adjust OFFSET to count bits from low-address byte. */
2500 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2501 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2502 - offset - INTVAL (XEXP (bitfield, 1)));
2503
2504 /* Adjust OFFSET to count bytes from low-address byte. */
2505 offset /= BITS_PER_UNIT;
2506 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2507 {
2508 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2509 if (BYTES_BIG_ENDIAN)
2510 offset -= (MIN (UNITS_PER_WORD,
2511 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2512 - MIN (UNITS_PER_WORD,
2513 GET_MODE_SIZE (GET_MODE (memref))));
2514 }
2515
2516 start_sequence ();
2517 memref = change_address (memref, mode,
2518 plus_constant (XEXP (memref, 0), offset));
2519 insns = get_insns ();
2520 end_sequence ();
2521 emit_insns_before (insns, insn);
2522
2523 /* Store this memory reference where
2524 we found the bit field reference. */
2525
2526 if (destflag)
2527 {
2528 validate_change (insn, &SET_DEST (body), memref, 1);
2529 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2530 {
2531 rtx src = SET_SRC (body);
2532 while (GET_CODE (src) == SUBREG
2533 && SUBREG_WORD (src) == 0)
2534 src = SUBREG_REG (src);
2535 if (GET_MODE (src) != GET_MODE (memref))
2536 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2537 validate_change (insn, &SET_SRC (body), src, 1);
2538 }
2539 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2540 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2541 /* This shouldn't happen because anything that didn't have
2542 one of these modes should have got converted explicitly
2543 and then referenced through a subreg.
2544 This is so because the original bit-field was
2545 handled by agg_mode and so its tree structure had
2546 the same mode that memref now has. */
2547 abort ();
2548 }
2549 else
2550 {
2551 rtx dest = SET_DEST (body);
2552
2553 while (GET_CODE (dest) == SUBREG
2554 && SUBREG_WORD (dest) == 0
2555 && (GET_MODE_CLASS (GET_MODE (dest))
2556 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2557 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2558 <= UNITS_PER_WORD))
2559 dest = SUBREG_REG (dest);
2560
2561 validate_change (insn, &SET_DEST (body), dest, 1);
2562
2563 if (GET_MODE (dest) == GET_MODE (memref))
2564 validate_change (insn, &SET_SRC (body), memref, 1);
2565 else
2566 {
2567 /* Convert the mem ref to the destination mode. */
2568 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2569
2570 start_sequence ();
2571 convert_move (newreg, memref,
2572 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2573 seq = get_insns ();
2574 end_sequence ();
2575
2576 validate_change (insn, &SET_SRC (body), newreg, 1);
2577 }
2578 }
2579
2580 /* See if we can convert this extraction or insertion into
2581 a simple move insn. We might not be able to do so if this
2582 was, for example, part of a PARALLEL.
2583
2584 If we succeed, write out any needed conversions. If we fail,
2585 it is hard to guess why we failed, so don't do anything
2586 special; just let the optimization be suppressed. */
2587
2588 if (apply_change_group () && seq)
2589 emit_insns_before (seq, insn);
2590 }
2591 }
2592 }
2593 \f
2594 /* These routines are responsible for converting virtual register references
2595 to the actual hard register references once RTL generation is complete.
2596
2597 The following four variables are used for communication between the
2598 routines. They contain the offsets of the virtual registers from their
2599 respective hard registers. */
2600
2601 static int in_arg_offset;
2602 static int var_offset;
2603 static int dynamic_offset;
2604 static int out_arg_offset;
2605 static int cfa_offset;
2606
2607 /* In most machines, the stack pointer register is equivalent to the bottom
2608 of the stack. */
2609
2610 #ifndef STACK_POINTER_OFFSET
2611 #define STACK_POINTER_OFFSET 0
2612 #endif
2613
2614 /* If not defined, pick an appropriate default for the offset of dynamically
2615 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2616 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2617
2618 #ifndef STACK_DYNAMIC_OFFSET
2619
2620 #ifdef ACCUMULATE_OUTGOING_ARGS
2621 /* The bottom of the stack points to the actual arguments. If
2622 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2623 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2624 stack space for register parameters is not pushed by the caller, but
2625 rather part of the fixed stack areas and hence not included in
2626 `current_function_outgoing_args_size'. Nevertheless, we must allow
2627 for it when allocating stack dynamic objects. */
2628
2629 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2630 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2631 (current_function_outgoing_args_size \
2632 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2633
2634 #else
2635 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2636 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2637 #endif
2638
2639 #else
2640 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2641 #endif
2642 #endif
2643
2644 /* On a few machines, the CFA coincides with the arg pointer. */
2645
2646 #ifndef ARG_POINTER_CFA_OFFSET
2647 #define ARG_POINTER_CFA_OFFSET 0
2648 #endif
2649
2650
2651 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2652 its address taken. DECL is the decl for the object stored in the
2653 register, for later use if we do need to force REG into the stack.
2654 REG is overwritten by the MEM like in put_reg_into_stack. */
2655
2656 rtx
2657 gen_mem_addressof (reg, decl)
2658 rtx reg;
2659 tree decl;
2660 {
2661 tree type = TREE_TYPE (decl);
2662 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2663 REGNO (reg), decl);
2664 /* If the original REG was a user-variable, then so is the REG whose
2665 address is being taken. */
2666 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2667
2668 PUT_CODE (reg, MEM);
2669 PUT_MODE (reg, DECL_MODE (decl));
2670 XEXP (reg, 0) = r;
2671 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2672 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2673 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2674
2675 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2676 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2677
2678 return reg;
2679 }
2680
2681 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2682
2683 void
2684 flush_addressof (decl)
2685 tree decl;
2686 {
2687 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2688 && DECL_RTL (decl) != 0
2689 && GET_CODE (DECL_RTL (decl)) == MEM
2690 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2691 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2692 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2693 }
2694
2695 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2696
2697 static void
2698 put_addressof_into_stack (r, ht)
2699 rtx r;
2700 struct hash_table *ht;
2701 {
2702 tree decl = ADDRESSOF_DECL (r);
2703 rtx reg = XEXP (r, 0);
2704
2705 if (GET_CODE (reg) != REG)
2706 abort ();
2707
2708 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2709 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2710 ADDRESSOF_REGNO (r),
2711 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2712 }
2713
2714 /* List of replacements made below in purge_addressof_1 when creating
2715 bitfield insertions. */
2716 static rtx purge_bitfield_addressof_replacements;
2717
2718 /* List of replacements made below in purge_addressof_1 for patterns
2719 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2720 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2721 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2722 enough in complex cases, e.g. when some field values can be
2723 extracted by usage MEM with narrower mode. */
2724 static rtx purge_addressof_replacements;
2725
2726 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2727 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2728 the stack. */
2729
2730 static void
2731 purge_addressof_1 (loc, insn, force, store, ht)
2732 rtx *loc;
2733 rtx insn;
2734 int force, store;
2735 struct hash_table *ht;
2736 {
2737 rtx x;
2738 RTX_CODE code;
2739 int i, j;
2740 const char *fmt;
2741
2742 /* Re-start here to avoid recursion in common cases. */
2743 restart:
2744
2745 x = *loc;
2746 if (x == 0)
2747 return;
2748
2749 code = GET_CODE (x);
2750
2751 /* If we don't return in any of the cases below, we will recurse inside
2752 the RTX, which will normally result in any ADDRESSOF being forced into
2753 memory. */
2754 if (code == SET)
2755 {
2756 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2757 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2758 return;
2759 }
2760
2761 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2762 {
2763 /* We must create a copy of the rtx because it was created by
2764 overwriting a REG rtx which is always shared. */
2765 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2766 rtx insns;
2767
2768 if (validate_change (insn, loc, sub, 0)
2769 || validate_replace_rtx (x, sub, insn))
2770 return;
2771
2772 start_sequence ();
2773 sub = force_operand (sub, NULL_RTX);
2774 if (! validate_change (insn, loc, sub, 0)
2775 && ! validate_replace_rtx (x, sub, insn))
2776 abort ();
2777
2778 insns = gen_sequence ();
2779 end_sequence ();
2780 emit_insn_before (insns, insn);
2781 return;
2782 }
2783
2784 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2785 {
2786 rtx sub = XEXP (XEXP (x, 0), 0);
2787 rtx sub2;
2788
2789 if (GET_CODE (sub) == MEM)
2790 {
2791 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2792 MEM_COPY_ATTRIBUTES (sub2, sub);
2793 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2794 sub = sub2;
2795 }
2796 else if (GET_CODE (sub) == REG
2797 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2798 ;
2799 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2800 {
2801 int size_x, size_sub;
2802
2803 if (!insn)
2804 {
2805 /* When processing REG_NOTES look at the list of
2806 replacements done on the insn to find the register that X
2807 was replaced by. */
2808 rtx tem;
2809
2810 for (tem = purge_bitfield_addressof_replacements;
2811 tem != NULL_RTX;
2812 tem = XEXP (XEXP (tem, 1), 1))
2813 if (rtx_equal_p (x, XEXP (tem, 0)))
2814 {
2815 *loc = XEXP (XEXP (tem, 1), 0);
2816 return;
2817 }
2818
2819 /* See comment for purge_addressof_replacements. */
2820 for (tem = purge_addressof_replacements;
2821 tem != NULL_RTX;
2822 tem = XEXP (XEXP (tem, 1), 1))
2823 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2824 {
2825 rtx z = XEXP (XEXP (tem, 1), 0);
2826
2827 if (GET_MODE (x) == GET_MODE (z)
2828 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2829 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2830 abort ();
2831
2832 /* It can happen that the note may speak of things
2833 in a wider (or just different) mode than the
2834 code did. This is especially true of
2835 REG_RETVAL. */
2836
2837 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2838 z = SUBREG_REG (z);
2839
2840 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2841 && (GET_MODE_SIZE (GET_MODE (x))
2842 > GET_MODE_SIZE (GET_MODE (z))))
2843 {
2844 /* This can occur as a result in invalid
2845 pointer casts, e.g. float f; ...
2846 *(long long int *)&f.
2847 ??? We could emit a warning here, but
2848 without a line number that wouldn't be
2849 very helpful. */
2850 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2851 }
2852 else
2853 z = gen_lowpart (GET_MODE (x), z);
2854
2855 *loc = z;
2856 return;
2857 }
2858
2859 /* There should always be such a replacement. */
2860 abort ();
2861 }
2862
2863 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2864 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2865
2866 /* Don't even consider working with paradoxical subregs,
2867 or the moral equivalent seen here. */
2868 if (size_x <= size_sub
2869 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2870 {
2871 /* Do a bitfield insertion to mirror what would happen
2872 in memory. */
2873
2874 rtx val, seq;
2875
2876 if (store)
2877 {
2878 rtx p = PREV_INSN (insn);
2879
2880 start_sequence ();
2881 val = gen_reg_rtx (GET_MODE (x));
2882 if (! validate_change (insn, loc, val, 0))
2883 {
2884 /* Discard the current sequence and put the
2885 ADDRESSOF on stack. */
2886 end_sequence ();
2887 goto give_up;
2888 }
2889 seq = gen_sequence ();
2890 end_sequence ();
2891 emit_insn_before (seq, insn);
2892 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2893 insn, ht);
2894
2895 start_sequence ();
2896 store_bit_field (sub, size_x, 0, GET_MODE (x),
2897 val, GET_MODE_SIZE (GET_MODE (sub)),
2898 GET_MODE_SIZE (GET_MODE (sub)));
2899
2900 /* Make sure to unshare any shared rtl that store_bit_field
2901 might have created. */
2902 for (p = get_insns(); p; p = NEXT_INSN (p))
2903 {
2904 reset_used_flags (PATTERN (p));
2905 reset_used_flags (REG_NOTES (p));
2906 reset_used_flags (LOG_LINKS (p));
2907 }
2908 unshare_all_rtl (get_insns ());
2909
2910 seq = gen_sequence ();
2911 end_sequence ();
2912 p = emit_insn_after (seq, insn);
2913 if (NEXT_INSN (insn))
2914 compute_insns_for_mem (NEXT_INSN (insn),
2915 p ? NEXT_INSN (p) : NULL_RTX,
2916 ht);
2917 }
2918 else
2919 {
2920 rtx p = PREV_INSN (insn);
2921
2922 start_sequence ();
2923 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2924 GET_MODE (x), GET_MODE (x),
2925 GET_MODE_SIZE (GET_MODE (sub)),
2926 GET_MODE_SIZE (GET_MODE (sub)));
2927
2928 if (! validate_change (insn, loc, val, 0))
2929 {
2930 /* Discard the current sequence and put the
2931 ADDRESSOF on stack. */
2932 end_sequence ();
2933 goto give_up;
2934 }
2935
2936 seq = gen_sequence ();
2937 end_sequence ();
2938 emit_insn_before (seq, insn);
2939 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2940 insn, ht);
2941 }
2942
2943 /* Remember the replacement so that the same one can be done
2944 on the REG_NOTES. */
2945 purge_bitfield_addressof_replacements
2946 = gen_rtx_EXPR_LIST (VOIDmode, x,
2947 gen_rtx_EXPR_LIST
2948 (VOIDmode, val,
2949 purge_bitfield_addressof_replacements));
2950
2951 /* We replaced with a reg -- all done. */
2952 return;
2953 }
2954 }
2955
2956 else if (validate_change (insn, loc, sub, 0))
2957 {
2958 /* Remember the replacement so that the same one can be done
2959 on the REG_NOTES. */
2960 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2961 {
2962 rtx tem;
2963
2964 for (tem = purge_addressof_replacements;
2965 tem != NULL_RTX;
2966 tem = XEXP (XEXP (tem, 1), 1))
2967 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2968 {
2969 XEXP (XEXP (tem, 1), 0) = sub;
2970 return;
2971 }
2972 purge_addressof_replacements
2973 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2974 gen_rtx_EXPR_LIST (VOIDmode, sub,
2975 purge_addressof_replacements));
2976 return;
2977 }
2978 goto restart;
2979 }
2980 give_up:;
2981 /* else give up and put it into the stack */
2982 }
2983
2984 else if (code == ADDRESSOF)
2985 {
2986 put_addressof_into_stack (x, ht);
2987 return;
2988 }
2989 else if (code == SET)
2990 {
2991 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2992 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2993 return;
2994 }
2995
2996 /* Scan all subexpressions. */
2997 fmt = GET_RTX_FORMAT (code);
2998 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2999 {
3000 if (*fmt == 'e')
3001 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3002 else if (*fmt == 'E')
3003 for (j = 0; j < XVECLEN (x, i); j++)
3004 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3005 }
3006 }
3007
3008 /* Return a new hash table entry in HT. */
3009
3010 static struct hash_entry *
3011 insns_for_mem_newfunc (he, ht, k)
3012 struct hash_entry *he;
3013 struct hash_table *ht;
3014 hash_table_key k ATTRIBUTE_UNUSED;
3015 {
3016 struct insns_for_mem_entry *ifmhe;
3017 if (he)
3018 return he;
3019
3020 ifmhe = ((struct insns_for_mem_entry *)
3021 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3022 ifmhe->insns = NULL_RTX;
3023
3024 return &ifmhe->he;
3025 }
3026
3027 /* Return a hash value for K, a REG. */
3028
3029 static unsigned long
3030 insns_for_mem_hash (k)
3031 hash_table_key k;
3032 {
3033 /* K is really a RTX. Just use the address as the hash value. */
3034 return (unsigned long) k;
3035 }
3036
3037 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3038
3039 static boolean
3040 insns_for_mem_comp (k1, k2)
3041 hash_table_key k1;
3042 hash_table_key k2;
3043 {
3044 return k1 == k2;
3045 }
3046
3047 struct insns_for_mem_walk_info {
3048 /* The hash table that we are using to record which INSNs use which
3049 MEMs. */
3050 struct hash_table *ht;
3051
3052 /* The INSN we are currently proessing. */
3053 rtx insn;
3054
3055 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3056 to find the insns that use the REGs in the ADDRESSOFs. */
3057 int pass;
3058 };
3059
3060 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3061 that might be used in an ADDRESSOF expression, record this INSN in
3062 the hash table given by DATA (which is really a pointer to an
3063 insns_for_mem_walk_info structure). */
3064
3065 static int
3066 insns_for_mem_walk (r, data)
3067 rtx *r;
3068 void *data;
3069 {
3070 struct insns_for_mem_walk_info *ifmwi
3071 = (struct insns_for_mem_walk_info *) data;
3072
3073 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3074 && GET_CODE (XEXP (*r, 0)) == REG)
3075 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3076 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3077 {
3078 /* Lookup this MEM in the hashtable, creating it if necessary. */
3079 struct insns_for_mem_entry *ifme
3080 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3081 *r,
3082 /*create=*/0,
3083 /*copy=*/0);
3084
3085 /* If we have not already recorded this INSN, do so now. Since
3086 we process the INSNs in order, we know that if we have
3087 recorded it it must be at the front of the list. */
3088 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3089 {
3090 /* We do the allocation on the same obstack as is used for
3091 the hash table since this memory will not be used once
3092 the hash table is deallocated. */
3093 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3094 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3095 ifme->insns);
3096 pop_obstacks ();
3097 }
3098 }
3099
3100 return 0;
3101 }
3102
3103 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3104 which REGs in HT. */
3105
3106 static void
3107 compute_insns_for_mem (insns, last_insn, ht)
3108 rtx insns;
3109 rtx last_insn;
3110 struct hash_table *ht;
3111 {
3112 rtx insn;
3113 struct insns_for_mem_walk_info ifmwi;
3114 ifmwi.ht = ht;
3115
3116 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3117 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3118 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3119 {
3120 ifmwi.insn = insn;
3121 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3122 }
3123 }
3124
3125 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3126 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3127 stack. */
3128
3129 void
3130 purge_addressof (insns)
3131 rtx insns;
3132 {
3133 rtx insn;
3134 struct hash_table ht;
3135
3136 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3137 requires a fixup pass over the instruction stream to correct
3138 INSNs that depended on the REG being a REG, and not a MEM. But,
3139 these fixup passes are slow. Furthermore, more MEMs are not
3140 mentioned in very many instructions. So, we speed up the process
3141 by pre-calculating which REGs occur in which INSNs; that allows
3142 us to perform the fixup passes much more quickly. */
3143 hash_table_init (&ht,
3144 insns_for_mem_newfunc,
3145 insns_for_mem_hash,
3146 insns_for_mem_comp);
3147 compute_insns_for_mem (insns, NULL_RTX, &ht);
3148
3149 for (insn = insns; insn; insn = NEXT_INSN (insn))
3150 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3151 || GET_CODE (insn) == CALL_INSN)
3152 {
3153 purge_addressof_1 (&PATTERN (insn), insn,
3154 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3155 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
3156 }
3157
3158 /* Clean up. */
3159 hash_table_free (&ht);
3160 purge_bitfield_addressof_replacements = 0;
3161 purge_addressof_replacements = 0;
3162 }
3163 \f
3164 /* Pass through the INSNS of function FNDECL and convert virtual register
3165 references to hard register references. */
3166
3167 void
3168 instantiate_virtual_regs (fndecl, insns)
3169 tree fndecl;
3170 rtx insns;
3171 {
3172 rtx insn;
3173 int i;
3174
3175 /* Compute the offsets to use for this function. */
3176 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3177 var_offset = STARTING_FRAME_OFFSET;
3178 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3179 out_arg_offset = STACK_POINTER_OFFSET;
3180 cfa_offset = ARG_POINTER_CFA_OFFSET;
3181
3182 /* Scan all variables and parameters of this function. For each that is
3183 in memory, instantiate all virtual registers if the result is a valid
3184 address. If not, we do it later. That will handle most uses of virtual
3185 regs on many machines. */
3186 instantiate_decls (fndecl, 1);
3187
3188 /* Initialize recognition, indicating that volatile is OK. */
3189 init_recog ();
3190
3191 /* Scan through all the insns, instantiating every virtual register still
3192 present. */
3193 for (insn = insns; insn; insn = NEXT_INSN (insn))
3194 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3195 || GET_CODE (insn) == CALL_INSN)
3196 {
3197 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3198 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3199 }
3200
3201 /* Instantiate the stack slots for the parm registers, for later use in
3202 addressof elimination. */
3203 for (i = 0; i < max_parm_reg; ++i)
3204 if (parm_reg_stack_loc[i])
3205 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3206
3207 /* Now instantiate the remaining register equivalences for debugging info.
3208 These will not be valid addresses. */
3209 instantiate_decls (fndecl, 0);
3210
3211 /* Indicate that, from now on, assign_stack_local should use
3212 frame_pointer_rtx. */
3213 virtuals_instantiated = 1;
3214 }
3215
3216 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3217 all virtual registers in their DECL_RTL's.
3218
3219 If VALID_ONLY, do this only if the resulting address is still valid.
3220 Otherwise, always do it. */
3221
3222 static void
3223 instantiate_decls (fndecl, valid_only)
3224 tree fndecl;
3225 int valid_only;
3226 {
3227 tree decl;
3228
3229 if (DECL_SAVED_INSNS (fndecl))
3230 /* When compiling an inline function, the obstack used for
3231 rtl allocation is the maybepermanent_obstack. Calling
3232 `resume_temporary_allocation' switches us back to that
3233 obstack while we process this function's parameters. */
3234 resume_temporary_allocation ();
3235
3236 /* Process all parameters of the function. */
3237 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3238 {
3239 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3240
3241 instantiate_decl (DECL_RTL (decl), size, valid_only);
3242
3243 /* If the parameter was promoted, then the incoming RTL mode may be
3244 larger than the declared type size. We must use the larger of
3245 the two sizes. */
3246 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3247 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3248 }
3249
3250 /* Now process all variables defined in the function or its subblocks. */
3251 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3252
3253 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3254 {
3255 /* Save all rtl allocated for this function by raising the
3256 high-water mark on the maybepermanent_obstack. */
3257 preserve_data ();
3258 /* All further rtl allocation is now done in the current_obstack. */
3259 rtl_in_current_obstack ();
3260 }
3261 }
3262
3263 /* Subroutine of instantiate_decls: Process all decls in the given
3264 BLOCK node and all its subblocks. */
3265
3266 static void
3267 instantiate_decls_1 (let, valid_only)
3268 tree let;
3269 int valid_only;
3270 {
3271 tree t;
3272
3273 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3274 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3275 valid_only);
3276
3277 /* Process all subblocks. */
3278 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3279 instantiate_decls_1 (t, valid_only);
3280 }
3281
3282 /* Subroutine of the preceding procedures: Given RTL representing a
3283 decl and the size of the object, do any instantiation required.
3284
3285 If VALID_ONLY is non-zero, it means that the RTL should only be
3286 changed if the new address is valid. */
3287
3288 static void
3289 instantiate_decl (x, size, valid_only)
3290 rtx x;
3291 int size;
3292 int valid_only;
3293 {
3294 enum machine_mode mode;
3295 rtx addr;
3296
3297 /* If this is not a MEM, no need to do anything. Similarly if the
3298 address is a constant or a register that is not a virtual register. */
3299
3300 if (x == 0 || GET_CODE (x) != MEM)
3301 return;
3302
3303 addr = XEXP (x, 0);
3304 if (CONSTANT_P (addr)
3305 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3306 || (GET_CODE (addr) == REG
3307 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3308 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3309 return;
3310
3311 /* If we should only do this if the address is valid, copy the address.
3312 We need to do this so we can undo any changes that might make the
3313 address invalid. This copy is unfortunate, but probably can't be
3314 avoided. */
3315
3316 if (valid_only)
3317 addr = copy_rtx (addr);
3318
3319 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3320
3321 if (valid_only)
3322 {
3323 /* Now verify that the resulting address is valid for every integer or
3324 floating-point mode up to and including SIZE bytes long. We do this
3325 since the object might be accessed in any mode and frame addresses
3326 are shared. */
3327
3328 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3329 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3330 mode = GET_MODE_WIDER_MODE (mode))
3331 if (! memory_address_p (mode, addr))
3332 return;
3333
3334 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3335 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3336 mode = GET_MODE_WIDER_MODE (mode))
3337 if (! memory_address_p (mode, addr))
3338 return;
3339 }
3340
3341 /* Put back the address now that we have updated it and we either know
3342 it is valid or we don't care whether it is valid. */
3343
3344 XEXP (x, 0) = addr;
3345 }
3346 \f
3347 /* Given a pointer to a piece of rtx and an optional pointer to the
3348 containing object, instantiate any virtual registers present in it.
3349
3350 If EXTRA_INSNS, we always do the replacement and generate
3351 any extra insns before OBJECT. If it zero, we do nothing if replacement
3352 is not valid.
3353
3354 Return 1 if we either had nothing to do or if we were able to do the
3355 needed replacement. Return 0 otherwise; we only return zero if
3356 EXTRA_INSNS is zero.
3357
3358 We first try some simple transformations to avoid the creation of extra
3359 pseudos. */
3360
3361 static int
3362 instantiate_virtual_regs_1 (loc, object, extra_insns)
3363 rtx *loc;
3364 rtx object;
3365 int extra_insns;
3366 {
3367 rtx x;
3368 RTX_CODE code;
3369 rtx new = 0;
3370 HOST_WIDE_INT offset = 0;
3371 rtx temp;
3372 rtx seq;
3373 int i, j;
3374 const char *fmt;
3375
3376 /* Re-start here to avoid recursion in common cases. */
3377 restart:
3378
3379 x = *loc;
3380 if (x == 0)
3381 return 1;
3382
3383 code = GET_CODE (x);
3384
3385 /* Check for some special cases. */
3386 switch (code)
3387 {
3388 case CONST_INT:
3389 case CONST_DOUBLE:
3390 case CONST:
3391 case SYMBOL_REF:
3392 case CODE_LABEL:
3393 case PC:
3394 case CC0:
3395 case ASM_INPUT:
3396 case ADDR_VEC:
3397 case ADDR_DIFF_VEC:
3398 case RETURN:
3399 return 1;
3400
3401 case SET:
3402 /* We are allowed to set the virtual registers. This means that
3403 the actual register should receive the source minus the
3404 appropriate offset. This is used, for example, in the handling
3405 of non-local gotos. */
3406 if (SET_DEST (x) == virtual_incoming_args_rtx)
3407 new = arg_pointer_rtx, offset = - in_arg_offset;
3408 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3409 new = frame_pointer_rtx, offset = - var_offset;
3410 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3411 new = stack_pointer_rtx, offset = - dynamic_offset;
3412 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3413 new = stack_pointer_rtx, offset = - out_arg_offset;
3414 else if (SET_DEST (x) == virtual_cfa_rtx)
3415 new = arg_pointer_rtx, offset = - cfa_offset;
3416
3417 if (new)
3418 {
3419 /* The only valid sources here are PLUS or REG. Just do
3420 the simplest possible thing to handle them. */
3421 if (GET_CODE (SET_SRC (x)) != REG
3422 && GET_CODE (SET_SRC (x)) != PLUS)
3423 abort ();
3424
3425 start_sequence ();
3426 if (GET_CODE (SET_SRC (x)) != REG)
3427 temp = force_operand (SET_SRC (x), NULL_RTX);
3428 else
3429 temp = SET_SRC (x);
3430 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3431 seq = get_insns ();
3432 end_sequence ();
3433
3434 emit_insns_before (seq, object);
3435 SET_DEST (x) = new;
3436
3437 if (! validate_change (object, &SET_SRC (x), temp, 0)
3438 || ! extra_insns)
3439 abort ();
3440
3441 return 1;
3442 }
3443
3444 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3445 loc = &SET_SRC (x);
3446 goto restart;
3447
3448 case PLUS:
3449 /* Handle special case of virtual register plus constant. */
3450 if (CONSTANT_P (XEXP (x, 1)))
3451 {
3452 rtx old, new_offset;
3453
3454 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3455 if (GET_CODE (XEXP (x, 0)) == PLUS)
3456 {
3457 rtx inner = XEXP (XEXP (x, 0), 0);
3458
3459 if (inner == virtual_incoming_args_rtx)
3460 new = arg_pointer_rtx, offset = in_arg_offset;
3461 else if (inner == virtual_stack_vars_rtx)
3462 new = frame_pointer_rtx, offset = var_offset;
3463 else if (inner == virtual_stack_dynamic_rtx)
3464 new = stack_pointer_rtx, offset = dynamic_offset;
3465 else if (inner == virtual_outgoing_args_rtx)
3466 new = stack_pointer_rtx, offset = out_arg_offset;
3467 else if (inner == virtual_cfa_rtx)
3468 new = arg_pointer_rtx, offset = cfa_offset;
3469 else
3470 {
3471 loc = &XEXP (x, 0);
3472 goto restart;
3473 }
3474
3475 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3476 extra_insns);
3477 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3478 }
3479
3480 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3481 new = arg_pointer_rtx, offset = in_arg_offset;
3482 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3483 new = frame_pointer_rtx, offset = var_offset;
3484 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3485 new = stack_pointer_rtx, offset = dynamic_offset;
3486 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3487 new = stack_pointer_rtx, offset = out_arg_offset;
3488 else if (XEXP (x, 0) == virtual_cfa_rtx)
3489 new = arg_pointer_rtx, offset = cfa_offset;
3490 else
3491 {
3492 /* We know the second operand is a constant. Unless the
3493 first operand is a REG (which has been already checked),
3494 it needs to be checked. */
3495 if (GET_CODE (XEXP (x, 0)) != REG)
3496 {
3497 loc = &XEXP (x, 0);
3498 goto restart;
3499 }
3500 return 1;
3501 }
3502
3503 new_offset = plus_constant (XEXP (x, 1), offset);
3504
3505 /* If the new constant is zero, try to replace the sum with just
3506 the register. */
3507 if (new_offset == const0_rtx
3508 && validate_change (object, loc, new, 0))
3509 return 1;
3510
3511 /* Next try to replace the register and new offset.
3512 There are two changes to validate here and we can't assume that
3513 in the case of old offset equals new just changing the register
3514 will yield a valid insn. In the interests of a little efficiency,
3515 however, we only call validate change once (we don't queue up the
3516 changes and then call apply_change_group). */
3517
3518 old = XEXP (x, 0);
3519 if (offset == 0
3520 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3521 : (XEXP (x, 0) = new,
3522 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3523 {
3524 if (! extra_insns)
3525 {
3526 XEXP (x, 0) = old;
3527 return 0;
3528 }
3529
3530 /* Otherwise copy the new constant into a register and replace
3531 constant with that register. */
3532 temp = gen_reg_rtx (Pmode);
3533 XEXP (x, 0) = new;
3534 if (validate_change (object, &XEXP (x, 1), temp, 0))
3535 emit_insn_before (gen_move_insn (temp, new_offset), object);
3536 else
3537 {
3538 /* If that didn't work, replace this expression with a
3539 register containing the sum. */
3540
3541 XEXP (x, 0) = old;
3542 new = gen_rtx_PLUS (Pmode, new, new_offset);
3543
3544 start_sequence ();
3545 temp = force_operand (new, NULL_RTX);
3546 seq = get_insns ();
3547 end_sequence ();
3548
3549 emit_insns_before (seq, object);
3550 if (! validate_change (object, loc, temp, 0)
3551 && ! validate_replace_rtx (x, temp, object))
3552 abort ();
3553 }
3554 }
3555
3556 return 1;
3557 }
3558
3559 /* Fall through to generic two-operand expression case. */
3560 case EXPR_LIST:
3561 case CALL:
3562 case COMPARE:
3563 case MINUS:
3564 case MULT:
3565 case DIV: case UDIV:
3566 case MOD: case UMOD:
3567 case AND: case IOR: case XOR:
3568 case ROTATERT: case ROTATE:
3569 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3570 case NE: case EQ:
3571 case GE: case GT: case GEU: case GTU:
3572 case LE: case LT: case LEU: case LTU:
3573 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3574 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3575 loc = &XEXP (x, 0);
3576 goto restart;
3577
3578 case MEM:
3579 /* Most cases of MEM that convert to valid addresses have already been
3580 handled by our scan of decls. The only special handling we
3581 need here is to make a copy of the rtx to ensure it isn't being
3582 shared if we have to change it to a pseudo.
3583
3584 If the rtx is a simple reference to an address via a virtual register,
3585 it can potentially be shared. In such cases, first try to make it
3586 a valid address, which can also be shared. Otherwise, copy it and
3587 proceed normally.
3588
3589 First check for common cases that need no processing. These are
3590 usually due to instantiation already being done on a previous instance
3591 of a shared rtx. */
3592
3593 temp = XEXP (x, 0);
3594 if (CONSTANT_ADDRESS_P (temp)
3595 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3596 || temp == arg_pointer_rtx
3597 #endif
3598 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3599 || temp == hard_frame_pointer_rtx
3600 #endif
3601 || temp == frame_pointer_rtx)
3602 return 1;
3603
3604 if (GET_CODE (temp) == PLUS
3605 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3606 && (XEXP (temp, 0) == frame_pointer_rtx
3607 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3608 || XEXP (temp, 0) == hard_frame_pointer_rtx
3609 #endif
3610 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3611 || XEXP (temp, 0) == arg_pointer_rtx
3612 #endif
3613 ))
3614 return 1;
3615
3616 if (temp == virtual_stack_vars_rtx
3617 || temp == virtual_incoming_args_rtx
3618 || (GET_CODE (temp) == PLUS
3619 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3620 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3621 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3622 {
3623 /* This MEM may be shared. If the substitution can be done without
3624 the need to generate new pseudos, we want to do it in place
3625 so all copies of the shared rtx benefit. The call below will
3626 only make substitutions if the resulting address is still
3627 valid.
3628
3629 Note that we cannot pass X as the object in the recursive call
3630 since the insn being processed may not allow all valid
3631 addresses. However, if we were not passed on object, we can
3632 only modify X without copying it if X will have a valid
3633 address.
3634
3635 ??? Also note that this can still lose if OBJECT is an insn that
3636 has less restrictions on an address that some other insn.
3637 In that case, we will modify the shared address. This case
3638 doesn't seem very likely, though. One case where this could
3639 happen is in the case of a USE or CLOBBER reference, but we
3640 take care of that below. */
3641
3642 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3643 object ? object : x, 0))
3644 return 1;
3645
3646 /* Otherwise make a copy and process that copy. We copy the entire
3647 RTL expression since it might be a PLUS which could also be
3648 shared. */
3649 *loc = x = copy_rtx (x);
3650 }
3651
3652 /* Fall through to generic unary operation case. */
3653 case SUBREG:
3654 case STRICT_LOW_PART:
3655 case NEG: case NOT:
3656 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3657 case SIGN_EXTEND: case ZERO_EXTEND:
3658 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3659 case FLOAT: case FIX:
3660 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3661 case ABS:
3662 case SQRT:
3663 case FFS:
3664 /* These case either have just one operand or we know that we need not
3665 check the rest of the operands. */
3666 loc = &XEXP (x, 0);
3667 goto restart;
3668
3669 case USE:
3670 case CLOBBER:
3671 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3672 go ahead and make the invalid one, but do it to a copy. For a REG,
3673 just make the recursive call, since there's no chance of a problem. */
3674
3675 if ((GET_CODE (XEXP (x, 0)) == MEM
3676 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3677 0))
3678 || (GET_CODE (XEXP (x, 0)) == REG
3679 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3680 return 1;
3681
3682 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3683 loc = &XEXP (x, 0);
3684 goto restart;
3685
3686 case REG:
3687 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3688 in front of this insn and substitute the temporary. */
3689 if (x == virtual_incoming_args_rtx)
3690 new = arg_pointer_rtx, offset = in_arg_offset;
3691 else if (x == virtual_stack_vars_rtx)
3692 new = frame_pointer_rtx, offset = var_offset;
3693 else if (x == virtual_stack_dynamic_rtx)
3694 new = stack_pointer_rtx, offset = dynamic_offset;
3695 else if (x == virtual_outgoing_args_rtx)
3696 new = stack_pointer_rtx, offset = out_arg_offset;
3697 else if (x == virtual_cfa_rtx)
3698 new = arg_pointer_rtx, offset = cfa_offset;
3699
3700 if (new)
3701 {
3702 temp = plus_constant (new, offset);
3703 if (!validate_change (object, loc, temp, 0))
3704 {
3705 if (! extra_insns)
3706 return 0;
3707
3708 start_sequence ();
3709 temp = force_operand (temp, NULL_RTX);
3710 seq = get_insns ();
3711 end_sequence ();
3712
3713 emit_insns_before (seq, object);
3714 if (! validate_change (object, loc, temp, 0)
3715 && ! validate_replace_rtx (x, temp, object))
3716 abort ();
3717 }
3718 }
3719
3720 return 1;
3721
3722 case ADDRESSOF:
3723 if (GET_CODE (XEXP (x, 0)) == REG)
3724 return 1;
3725
3726 else if (GET_CODE (XEXP (x, 0)) == MEM)
3727 {
3728 /* If we have a (addressof (mem ..)), do any instantiation inside
3729 since we know we'll be making the inside valid when we finally
3730 remove the ADDRESSOF. */
3731 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3732 return 1;
3733 }
3734 break;
3735
3736 default:
3737 break;
3738 }
3739
3740 /* Scan all subexpressions. */
3741 fmt = GET_RTX_FORMAT (code);
3742 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3743 if (*fmt == 'e')
3744 {
3745 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3746 return 0;
3747 }
3748 else if (*fmt == 'E')
3749 for (j = 0; j < XVECLEN (x, i); j++)
3750 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3751 extra_insns))
3752 return 0;
3753
3754 return 1;
3755 }
3756 \f
3757 /* Optimization: assuming this function does not receive nonlocal gotos,
3758 delete the handlers for such, as well as the insns to establish
3759 and disestablish them. */
3760
3761 static void
3762 delete_handlers ()
3763 {
3764 rtx insn;
3765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3766 {
3767 /* Delete the handler by turning off the flag that would
3768 prevent jump_optimize from deleting it.
3769 Also permit deletion of the nonlocal labels themselves
3770 if nothing local refers to them. */
3771 if (GET_CODE (insn) == CODE_LABEL)
3772 {
3773 tree t, last_t;
3774
3775 LABEL_PRESERVE_P (insn) = 0;
3776
3777 /* Remove it from the nonlocal_label list, to avoid confusing
3778 flow. */
3779 for (t = nonlocal_labels, last_t = 0; t;
3780 last_t = t, t = TREE_CHAIN (t))
3781 if (DECL_RTL (TREE_VALUE (t)) == insn)
3782 break;
3783 if (t)
3784 {
3785 if (! last_t)
3786 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3787 else
3788 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3789 }
3790 }
3791 if (GET_CODE (insn) == INSN)
3792 {
3793 int can_delete = 0;
3794 rtx t;
3795 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3796 if (reg_mentioned_p (t, PATTERN (insn)))
3797 {
3798 can_delete = 1;
3799 break;
3800 }
3801 if (can_delete
3802 || (nonlocal_goto_stack_level != 0
3803 && reg_mentioned_p (nonlocal_goto_stack_level,
3804 PATTERN (insn))))
3805 delete_insn (insn);
3806 }
3807 }
3808 }
3809 \f
3810 /* Output a USE for any register use in RTL.
3811 This is used with -noreg to mark the extent of lifespan
3812 of any registers used in a user-visible variable's DECL_RTL. */
3813
3814 void
3815 use_variable (rtl)
3816 rtx rtl;
3817 {
3818 if (GET_CODE (rtl) == REG)
3819 /* This is a register variable. */
3820 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3821 else if (GET_CODE (rtl) == MEM
3822 && GET_CODE (XEXP (rtl, 0)) == REG
3823 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3824 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3825 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3826 /* This is a variable-sized structure. */
3827 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3828 }
3829
3830 /* Like use_variable except that it outputs the USEs after INSN
3831 instead of at the end of the insn-chain. */
3832
3833 void
3834 use_variable_after (rtl, insn)
3835 rtx rtl, insn;
3836 {
3837 if (GET_CODE (rtl) == REG)
3838 /* This is a register variable. */
3839 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3840 else if (GET_CODE (rtl) == MEM
3841 && GET_CODE (XEXP (rtl, 0)) == REG
3842 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3843 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3844 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3845 /* This is a variable-sized structure. */
3846 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3847 }
3848 \f
3849 int
3850 max_parm_reg_num ()
3851 {
3852 return max_parm_reg;
3853 }
3854
3855 /* Return the first insn following those generated by `assign_parms'. */
3856
3857 rtx
3858 get_first_nonparm_insn ()
3859 {
3860 if (last_parm_insn)
3861 return NEXT_INSN (last_parm_insn);
3862 return get_insns ();
3863 }
3864
3865 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3866 Crash if there is none. */
3867
3868 rtx
3869 get_first_block_beg ()
3870 {
3871 register rtx searcher;
3872 register rtx insn = get_first_nonparm_insn ();
3873
3874 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3875 if (GET_CODE (searcher) == NOTE
3876 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3877 return searcher;
3878
3879 abort (); /* Invalid call to this function. (See comments above.) */
3880 return NULL_RTX;
3881 }
3882
3883 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3884 This means a type for which function calls must pass an address to the
3885 function or get an address back from the function.
3886 EXP may be a type node or an expression (whose type is tested). */
3887
3888 int
3889 aggregate_value_p (exp)
3890 tree exp;
3891 {
3892 int i, regno, nregs;
3893 rtx reg;
3894 tree type;
3895 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3896 type = exp;
3897 else
3898 type = TREE_TYPE (exp);
3899
3900 if (RETURN_IN_MEMORY (type))
3901 return 1;
3902 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3903 and thus can't be returned in registers. */
3904 if (TREE_ADDRESSABLE (type))
3905 return 1;
3906 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3907 return 1;
3908 /* Make sure we have suitable call-clobbered regs to return
3909 the value in; if not, we must return it in memory. */
3910 reg = hard_function_value (type, 0);
3911
3912 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3913 it is OK. */
3914 if (GET_CODE (reg) != REG)
3915 return 0;
3916
3917 regno = REGNO (reg);
3918 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3919 for (i = 0; i < nregs; i++)
3920 if (! call_used_regs[regno + i])
3921 return 1;
3922 return 0;
3923 }
3924 \f
3925 /* Assign RTL expressions to the function's parameters.
3926 This may involve copying them into registers and using
3927 those registers as the RTL for them. */
3928
3929 void
3930 assign_parms (fndecl)
3931 tree fndecl;
3932 {
3933 register tree parm;
3934 register rtx entry_parm = 0;
3935 register rtx stack_parm = 0;
3936 CUMULATIVE_ARGS args_so_far;
3937 enum machine_mode promoted_mode, passed_mode;
3938 enum machine_mode nominal_mode, promoted_nominal_mode;
3939 int unsignedp;
3940 /* Total space needed so far for args on the stack,
3941 given as a constant and a tree-expression. */
3942 struct args_size stack_args_size;
3943 tree fntype = TREE_TYPE (fndecl);
3944 tree fnargs = DECL_ARGUMENTS (fndecl);
3945 /* This is used for the arg pointer when referring to stack args. */
3946 rtx internal_arg_pointer;
3947 /* This is a dummy PARM_DECL that we used for the function result if
3948 the function returns a structure. */
3949 tree function_result_decl = 0;
3950 #ifdef SETUP_INCOMING_VARARGS
3951 int varargs_setup = 0;
3952 #endif
3953 rtx conversion_insns = 0;
3954
3955 /* Nonzero if the last arg is named `__builtin_va_alist',
3956 which is used on some machines for old-fashioned non-ANSI varargs.h;
3957 this should be stuck onto the stack as if it had arrived there. */
3958 int hide_last_arg
3959 = (current_function_varargs
3960 && fnargs
3961 && (parm = tree_last (fnargs)) != 0
3962 && DECL_NAME (parm)
3963 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3964 "__builtin_va_alist")));
3965
3966 /* Nonzero if function takes extra anonymous args.
3967 This means the last named arg must be on the stack
3968 right before the anonymous ones. */
3969 int stdarg
3970 = (TYPE_ARG_TYPES (fntype) != 0
3971 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3972 != void_type_node));
3973
3974 current_function_stdarg = stdarg;
3975
3976 /* If the reg that the virtual arg pointer will be translated into is
3977 not a fixed reg or is the stack pointer, make a copy of the virtual
3978 arg pointer, and address parms via the copy. The frame pointer is
3979 considered fixed even though it is not marked as such.
3980
3981 The second time through, simply use ap to avoid generating rtx. */
3982
3983 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3984 || ! (fixed_regs[ARG_POINTER_REGNUM]
3985 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3986 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3987 else
3988 internal_arg_pointer = virtual_incoming_args_rtx;
3989 current_function_internal_arg_pointer = internal_arg_pointer;
3990
3991 stack_args_size.constant = 0;
3992 stack_args_size.var = 0;
3993
3994 /* If struct value address is treated as the first argument, make it so. */
3995 if (aggregate_value_p (DECL_RESULT (fndecl))
3996 && ! current_function_returns_pcc_struct
3997 && struct_value_incoming_rtx == 0)
3998 {
3999 tree type = build_pointer_type (TREE_TYPE (fntype));
4000
4001 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4002
4003 DECL_ARG_TYPE (function_result_decl) = type;
4004 TREE_CHAIN (function_result_decl) = fnargs;
4005 fnargs = function_result_decl;
4006 }
4007
4008 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4009 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4010
4011 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4012 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4013 #else
4014 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4015 #endif
4016
4017 /* We haven't yet found an argument that we must push and pretend the
4018 caller did. */
4019 current_function_pretend_args_size = 0;
4020
4021 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4022 {
4023 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4024 struct args_size stack_offset;
4025 struct args_size arg_size;
4026 int passed_pointer = 0;
4027 int did_conversion = 0;
4028 tree passed_type = DECL_ARG_TYPE (parm);
4029 tree nominal_type = TREE_TYPE (parm);
4030 int pretend_named;
4031
4032 /* Set LAST_NAMED if this is last named arg before some
4033 anonymous args. */
4034 int last_named = ((TREE_CHAIN (parm) == 0
4035 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4036 && (stdarg || current_function_varargs));
4037 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4038 most machines, if this is a varargs/stdarg function, then we treat
4039 the last named arg as if it were anonymous too. */
4040 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4041
4042 if (TREE_TYPE (parm) == error_mark_node
4043 /* This can happen after weird syntax errors
4044 or if an enum type is defined among the parms. */
4045 || TREE_CODE (parm) != PARM_DECL
4046 || passed_type == NULL)
4047 {
4048 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4049 = gen_rtx_MEM (BLKmode, const0_rtx);
4050 TREE_USED (parm) = 1;
4051 continue;
4052 }
4053
4054 /* For varargs.h function, save info about regs and stack space
4055 used by the individual args, not including the va_alist arg. */
4056 if (hide_last_arg && last_named)
4057 current_function_args_info = args_so_far;
4058
4059 /* Find mode of arg as it is passed, and mode of arg
4060 as it should be during execution of this function. */
4061 passed_mode = TYPE_MODE (passed_type);
4062 nominal_mode = TYPE_MODE (nominal_type);
4063
4064 /* If the parm's mode is VOID, its value doesn't matter,
4065 and avoid the usual things like emit_move_insn that could crash. */
4066 if (nominal_mode == VOIDmode)
4067 {
4068 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4069 continue;
4070 }
4071
4072 /* If the parm is to be passed as a transparent union, use the
4073 type of the first field for the tests below. We have already
4074 verified that the modes are the same. */
4075 if (DECL_TRANSPARENT_UNION (parm)
4076 || TYPE_TRANSPARENT_UNION (passed_type))
4077 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4078
4079 /* See if this arg was passed by invisible reference. It is if
4080 it is an object whose size depends on the contents of the
4081 object itself or if the machine requires these objects be passed
4082 that way. */
4083
4084 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4085 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4086 || TREE_ADDRESSABLE (passed_type)
4087 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4088 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4089 passed_type, named_arg)
4090 #endif
4091 )
4092 {
4093 passed_type = nominal_type = build_pointer_type (passed_type);
4094 passed_pointer = 1;
4095 passed_mode = nominal_mode = Pmode;
4096 }
4097
4098 promoted_mode = passed_mode;
4099
4100 #ifdef PROMOTE_FUNCTION_ARGS
4101 /* Compute the mode in which the arg is actually extended to. */
4102 unsignedp = TREE_UNSIGNED (passed_type);
4103 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4104 #endif
4105
4106 /* Let machine desc say which reg (if any) the parm arrives in.
4107 0 means it arrives on the stack. */
4108 #ifdef FUNCTION_INCOMING_ARG
4109 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4110 passed_type, named_arg);
4111 #else
4112 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4113 passed_type, named_arg);
4114 #endif
4115
4116 if (entry_parm == 0)
4117 promoted_mode = passed_mode;
4118
4119 #ifdef SETUP_INCOMING_VARARGS
4120 /* If this is the last named parameter, do any required setup for
4121 varargs or stdargs. We need to know about the case of this being an
4122 addressable type, in which case we skip the registers it
4123 would have arrived in.
4124
4125 For stdargs, LAST_NAMED will be set for two parameters, the one that
4126 is actually the last named, and the dummy parameter. We only
4127 want to do this action once.
4128
4129 Also, indicate when RTL generation is to be suppressed. */
4130 if (last_named && !varargs_setup)
4131 {
4132 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4133 current_function_pretend_args_size, 0);
4134 varargs_setup = 1;
4135 }
4136 #endif
4137
4138 /* Determine parm's home in the stack,
4139 in case it arrives in the stack or we should pretend it did.
4140
4141 Compute the stack position and rtx where the argument arrives
4142 and its size.
4143
4144 There is one complexity here: If this was a parameter that would
4145 have been passed in registers, but wasn't only because it is
4146 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4147 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4148 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4149 0 as it was the previous time. */
4150
4151 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4152 locate_and_pad_parm (promoted_mode, passed_type,
4153 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4154 1,
4155 #else
4156 #ifdef FUNCTION_INCOMING_ARG
4157 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4158 passed_type,
4159 pretend_named) != 0,
4160 #else
4161 FUNCTION_ARG (args_so_far, promoted_mode,
4162 passed_type,
4163 pretend_named) != 0,
4164 #endif
4165 #endif
4166 fndecl, &stack_args_size, &stack_offset, &arg_size);
4167
4168 {
4169 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4170
4171 if (offset_rtx == const0_rtx)
4172 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4173 else
4174 stack_parm = gen_rtx_MEM (promoted_mode,
4175 gen_rtx_PLUS (Pmode,
4176 internal_arg_pointer,
4177 offset_rtx));
4178
4179 /* If this is a memory ref that contains aggregate components,
4180 mark it as such for cse and loop optimize. Likewise if it
4181 is readonly. */
4182 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4183 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4184 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4185 }
4186
4187 /* If this parameter was passed both in registers and in the stack,
4188 use the copy on the stack. */
4189 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4190 entry_parm = 0;
4191
4192 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4193 /* If this parm was passed part in regs and part in memory,
4194 pretend it arrived entirely in memory
4195 by pushing the register-part onto the stack.
4196
4197 In the special case of a DImode or DFmode that is split,
4198 we could put it together in a pseudoreg directly,
4199 but for now that's not worth bothering with. */
4200
4201 if (entry_parm)
4202 {
4203 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4204 passed_type, named_arg);
4205
4206 if (nregs > 0)
4207 {
4208 current_function_pretend_args_size
4209 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4210 / (PARM_BOUNDARY / BITS_PER_UNIT)
4211 * (PARM_BOUNDARY / BITS_PER_UNIT));
4212
4213 /* Handle calls that pass values in multiple non-contiguous
4214 locations. The Irix 6 ABI has examples of this. */
4215 if (GET_CODE (entry_parm) == PARALLEL)
4216 emit_group_store (validize_mem (stack_parm), entry_parm,
4217 int_size_in_bytes (TREE_TYPE (parm)),
4218 (TYPE_ALIGN (TREE_TYPE (parm))
4219 / BITS_PER_UNIT));
4220 else
4221 move_block_from_reg (REGNO (entry_parm),
4222 validize_mem (stack_parm), nregs,
4223 int_size_in_bytes (TREE_TYPE (parm)));
4224
4225 entry_parm = stack_parm;
4226 }
4227 }
4228 #endif
4229
4230 /* If we didn't decide this parm came in a register,
4231 by default it came on the stack. */
4232 if (entry_parm == 0)
4233 entry_parm = stack_parm;
4234
4235 /* Record permanently how this parm was passed. */
4236 DECL_INCOMING_RTL (parm) = entry_parm;
4237
4238 /* If there is actually space on the stack for this parm,
4239 count it in stack_args_size; otherwise set stack_parm to 0
4240 to indicate there is no preallocated stack slot for the parm. */
4241
4242 if (entry_parm == stack_parm
4243 || (GET_CODE (entry_parm) == PARALLEL
4244 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4245 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4246 /* On some machines, even if a parm value arrives in a register
4247 there is still an (uninitialized) stack slot allocated for it.
4248
4249 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4250 whether this parameter already has a stack slot allocated,
4251 because an arg block exists only if current_function_args_size
4252 is larger than some threshold, and we haven't calculated that
4253 yet. So, for now, we just assume that stack slots never exist
4254 in this case. */
4255 || REG_PARM_STACK_SPACE (fndecl) > 0
4256 #endif
4257 )
4258 {
4259 stack_args_size.constant += arg_size.constant;
4260 if (arg_size.var)
4261 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4262 }
4263 else
4264 /* No stack slot was pushed for this parm. */
4265 stack_parm = 0;
4266
4267 /* Update info on where next arg arrives in registers. */
4268
4269 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4270 passed_type, named_arg);
4271
4272 /* If we can't trust the parm stack slot to be aligned enough
4273 for its ultimate type, don't use that slot after entry.
4274 We'll make another stack slot, if we need one. */
4275 {
4276 int thisparm_boundary
4277 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4278
4279 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4280 stack_parm = 0;
4281 }
4282
4283 /* If parm was passed in memory, and we need to convert it on entry,
4284 don't store it back in that same slot. */
4285 if (entry_parm != 0
4286 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4287 stack_parm = 0;
4288
4289 #if 0
4290 /* Now adjust STACK_PARM to the mode and precise location
4291 where this parameter should live during execution,
4292 if we discover that it must live in the stack during execution.
4293 To make debuggers happier on big-endian machines, we store
4294 the value in the last bytes of the space available. */
4295
4296 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4297 && stack_parm != 0)
4298 {
4299 rtx offset_rtx;
4300
4301 if (BYTES_BIG_ENDIAN
4302 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4303 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4304 - GET_MODE_SIZE (nominal_mode));
4305
4306 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4307 if (offset_rtx == const0_rtx)
4308 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4309 else
4310 stack_parm = gen_rtx_MEM (nominal_mode,
4311 gen_rtx_PLUS (Pmode,
4312 internal_arg_pointer,
4313 offset_rtx));
4314
4315 /* If this is a memory ref that contains aggregate components,
4316 mark it as such for cse and loop optimize. */
4317 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4318 }
4319 #endif /* 0 */
4320
4321 #ifdef STACK_REGS
4322 /* We need this "use" info, because the gcc-register->stack-register
4323 converter in reg-stack.c needs to know which registers are active
4324 at the start of the function call. The actual parameter loading
4325 instructions are not always available then anymore, since they might
4326 have been optimised away. */
4327
4328 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4329 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4330 #endif
4331
4332 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4333 in the mode in which it arrives.
4334 STACK_PARM is an RTX for a stack slot where the parameter can live
4335 during the function (in case we want to put it there).
4336 STACK_PARM is 0 if no stack slot was pushed for it.
4337
4338 Now output code if necessary to convert ENTRY_PARM to
4339 the type in which this function declares it,
4340 and store that result in an appropriate place,
4341 which may be a pseudo reg, may be STACK_PARM,
4342 or may be a local stack slot if STACK_PARM is 0.
4343
4344 Set DECL_RTL to that place. */
4345
4346 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4347 {
4348 /* If a BLKmode arrives in registers, copy it to a stack slot.
4349 Handle calls that pass values in multiple non-contiguous
4350 locations. The Irix 6 ABI has examples of this. */
4351 if (GET_CODE (entry_parm) == REG
4352 || GET_CODE (entry_parm) == PARALLEL)
4353 {
4354 int size_stored
4355 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4356 UNITS_PER_WORD);
4357
4358 /* Note that we will be storing an integral number of words.
4359 So we have to be careful to ensure that we allocate an
4360 integral number of words. We do this below in the
4361 assign_stack_local if space was not allocated in the argument
4362 list. If it was, this will not work if PARM_BOUNDARY is not
4363 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4364 if it becomes a problem. */
4365
4366 if (stack_parm == 0)
4367 {
4368 stack_parm
4369 = assign_stack_local (GET_MODE (entry_parm),
4370 size_stored, 0);
4371
4372 /* If this is a memory ref that contains aggregate
4373 components, mark it as such for cse and loop optimize. */
4374 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4375 }
4376
4377 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4378 abort ();
4379
4380 if (TREE_READONLY (parm))
4381 RTX_UNCHANGING_P (stack_parm) = 1;
4382
4383 /* Handle calls that pass values in multiple non-contiguous
4384 locations. The Irix 6 ABI has examples of this. */
4385 if (GET_CODE (entry_parm) == PARALLEL)
4386 emit_group_store (validize_mem (stack_parm), entry_parm,
4387 int_size_in_bytes (TREE_TYPE (parm)),
4388 (TYPE_ALIGN (TREE_TYPE (parm))
4389 / BITS_PER_UNIT));
4390 else
4391 move_block_from_reg (REGNO (entry_parm),
4392 validize_mem (stack_parm),
4393 size_stored / UNITS_PER_WORD,
4394 int_size_in_bytes (TREE_TYPE (parm)));
4395 }
4396 DECL_RTL (parm) = stack_parm;
4397 }
4398 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4399 && ! DECL_INLINE (fndecl))
4400 /* layout_decl may set this. */
4401 || TREE_ADDRESSABLE (parm)
4402 || TREE_SIDE_EFFECTS (parm)
4403 /* If -ffloat-store specified, don't put explicit
4404 float variables into registers. */
4405 || (flag_float_store
4406 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4407 /* Always assign pseudo to structure return or item passed
4408 by invisible reference. */
4409 || passed_pointer || parm == function_result_decl)
4410 {
4411 /* Store the parm in a pseudoregister during the function, but we
4412 may need to do it in a wider mode. */
4413
4414 register rtx parmreg;
4415 int regno, regnoi = 0, regnor = 0;
4416
4417 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4418
4419 promoted_nominal_mode
4420 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4421
4422 parmreg = gen_reg_rtx (promoted_nominal_mode);
4423 mark_user_reg (parmreg);
4424
4425 /* If this was an item that we received a pointer to, set DECL_RTL
4426 appropriately. */
4427 if (passed_pointer)
4428 {
4429 DECL_RTL (parm)
4430 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4431 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4432 }
4433 else
4434 DECL_RTL (parm) = parmreg;
4435
4436 /* Copy the value into the register. */
4437 if (nominal_mode != passed_mode
4438 || promoted_nominal_mode != promoted_mode)
4439 {
4440 int save_tree_used;
4441 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4442 mode, by the caller. We now have to convert it to
4443 NOMINAL_MODE, if different. However, PARMREG may be in
4444 a different mode than NOMINAL_MODE if it is being stored
4445 promoted.
4446
4447 If ENTRY_PARM is a hard register, it might be in a register
4448 not valid for operating in its mode (e.g., an odd-numbered
4449 register for a DFmode). In that case, moves are the only
4450 thing valid, so we can't do a convert from there. This
4451 occurs when the calling sequence allow such misaligned
4452 usages.
4453
4454 In addition, the conversion may involve a call, which could
4455 clobber parameters which haven't been copied to pseudo
4456 registers yet. Therefore, we must first copy the parm to
4457 a pseudo reg here, and save the conversion until after all
4458 parameters have been moved. */
4459
4460 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4461
4462 emit_move_insn (tempreg, validize_mem (entry_parm));
4463
4464 push_to_sequence (conversion_insns);
4465 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4466
4467 /* TREE_USED gets set erroneously during expand_assignment. */
4468 save_tree_used = TREE_USED (parm);
4469 expand_assignment (parm,
4470 make_tree (nominal_type, tempreg), 0, 0);
4471 TREE_USED (parm) = save_tree_used;
4472 conversion_insns = get_insns ();
4473 did_conversion = 1;
4474 end_sequence ();
4475 }
4476 else
4477 emit_move_insn (parmreg, validize_mem (entry_parm));
4478
4479 /* If we were passed a pointer but the actual value
4480 can safely live in a register, put it in one. */
4481 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4482 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4483 && ! DECL_INLINE (fndecl))
4484 /* layout_decl may set this. */
4485 || TREE_ADDRESSABLE (parm)
4486 || TREE_SIDE_EFFECTS (parm)
4487 /* If -ffloat-store specified, don't put explicit
4488 float variables into registers. */
4489 || (flag_float_store
4490 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4491 {
4492 /* We can't use nominal_mode, because it will have been set to
4493 Pmode above. We must use the actual mode of the parm. */
4494 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4495 mark_user_reg (parmreg);
4496 emit_move_insn (parmreg, DECL_RTL (parm));
4497 DECL_RTL (parm) = parmreg;
4498 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4499 now the parm. */
4500 stack_parm = 0;
4501 }
4502 #ifdef FUNCTION_ARG_CALLEE_COPIES
4503 /* If we are passed an arg by reference and it is our responsibility
4504 to make a copy, do it now.
4505 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4506 original argument, so we must recreate them in the call to
4507 FUNCTION_ARG_CALLEE_COPIES. */
4508 /* ??? Later add code to handle the case that if the argument isn't
4509 modified, don't do the copy. */
4510
4511 else if (passed_pointer
4512 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4513 TYPE_MODE (DECL_ARG_TYPE (parm)),
4514 DECL_ARG_TYPE (parm),
4515 named_arg)
4516 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4517 {
4518 rtx copy;
4519 tree type = DECL_ARG_TYPE (parm);
4520
4521 /* This sequence may involve a library call perhaps clobbering
4522 registers that haven't been copied to pseudos yet. */
4523
4524 push_to_sequence (conversion_insns);
4525
4526 if (TYPE_SIZE (type) == 0
4527 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4528 /* This is a variable sized object. */
4529 copy = gen_rtx_MEM (BLKmode,
4530 allocate_dynamic_stack_space
4531 (expr_size (parm), NULL_RTX,
4532 TYPE_ALIGN (type)));
4533 else
4534 copy = assign_stack_temp (TYPE_MODE (type),
4535 int_size_in_bytes (type), 1);
4536 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4537 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4538
4539 store_expr (parm, copy, 0);
4540 emit_move_insn (parmreg, XEXP (copy, 0));
4541 if (current_function_check_memory_usage)
4542 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4543 XEXP (copy, 0), Pmode,
4544 GEN_INT (int_size_in_bytes (type)),
4545 TYPE_MODE (sizetype),
4546 GEN_INT (MEMORY_USE_RW),
4547 TYPE_MODE (integer_type_node));
4548 conversion_insns = get_insns ();
4549 did_conversion = 1;
4550 end_sequence ();
4551 }
4552 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4553
4554 /* In any case, record the parm's desired stack location
4555 in case we later discover it must live in the stack.
4556
4557 If it is a COMPLEX value, store the stack location for both
4558 halves. */
4559
4560 if (GET_CODE (parmreg) == CONCAT)
4561 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4562 else
4563 regno = REGNO (parmreg);
4564
4565 if (regno >= max_parm_reg)
4566 {
4567 rtx *new;
4568 int old_max_parm_reg = max_parm_reg;
4569
4570 /* It's slow to expand this one register at a time,
4571 but it's also rare and we need max_parm_reg to be
4572 precisely correct. */
4573 max_parm_reg = regno + 1;
4574 new = (rtx *) xrealloc (parm_reg_stack_loc,
4575 max_parm_reg * sizeof (rtx));
4576 bzero ((char *) (new + old_max_parm_reg),
4577 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4578 parm_reg_stack_loc = new;
4579 }
4580
4581 if (GET_CODE (parmreg) == CONCAT)
4582 {
4583 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4584
4585 regnor = REGNO (gen_realpart (submode, parmreg));
4586 regnoi = REGNO (gen_imagpart (submode, parmreg));
4587
4588 if (stack_parm != 0)
4589 {
4590 parm_reg_stack_loc[regnor]
4591 = gen_realpart (submode, stack_parm);
4592 parm_reg_stack_loc[regnoi]
4593 = gen_imagpart (submode, stack_parm);
4594 }
4595 else
4596 {
4597 parm_reg_stack_loc[regnor] = 0;
4598 parm_reg_stack_loc[regnoi] = 0;
4599 }
4600 }
4601 else
4602 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4603
4604 /* Mark the register as eliminable if we did no conversion
4605 and it was copied from memory at a fixed offset,
4606 and the arg pointer was not copied to a pseudo-reg.
4607 If the arg pointer is a pseudo reg or the offset formed
4608 an invalid address, such memory-equivalences
4609 as we make here would screw up life analysis for it. */
4610 if (nominal_mode == passed_mode
4611 && ! did_conversion
4612 && stack_parm != 0
4613 && GET_CODE (stack_parm) == MEM
4614 && stack_offset.var == 0
4615 && reg_mentioned_p (virtual_incoming_args_rtx,
4616 XEXP (stack_parm, 0)))
4617 {
4618 rtx linsn = get_last_insn ();
4619 rtx sinsn, set;
4620
4621 /* Mark complex types separately. */
4622 if (GET_CODE (parmreg) == CONCAT)
4623 /* Scan backwards for the set of the real and
4624 imaginary parts. */
4625 for (sinsn = linsn; sinsn != 0;
4626 sinsn = prev_nonnote_insn (sinsn))
4627 {
4628 set = single_set (sinsn);
4629 if (set != 0
4630 && SET_DEST (set) == regno_reg_rtx [regnoi])
4631 REG_NOTES (sinsn)
4632 = gen_rtx_EXPR_LIST (REG_EQUIV,
4633 parm_reg_stack_loc[regnoi],
4634 REG_NOTES (sinsn));
4635 else if (set != 0
4636 && SET_DEST (set) == regno_reg_rtx [regnor])
4637 REG_NOTES (sinsn)
4638 = gen_rtx_EXPR_LIST (REG_EQUIV,
4639 parm_reg_stack_loc[regnor],
4640 REG_NOTES (sinsn));
4641 }
4642 else if ((set = single_set (linsn)) != 0
4643 && SET_DEST (set) == parmreg)
4644 REG_NOTES (linsn)
4645 = gen_rtx_EXPR_LIST (REG_EQUIV,
4646 stack_parm, REG_NOTES (linsn));
4647 }
4648
4649 /* For pointer data type, suggest pointer register. */
4650 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4651 mark_reg_pointer (parmreg,
4652 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4653 / BITS_PER_UNIT));
4654 }
4655 else
4656 {
4657 /* Value must be stored in the stack slot STACK_PARM
4658 during function execution. */
4659
4660 if (promoted_mode != nominal_mode)
4661 {
4662 /* Conversion is required. */
4663 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4664
4665 emit_move_insn (tempreg, validize_mem (entry_parm));
4666
4667 push_to_sequence (conversion_insns);
4668 entry_parm = convert_to_mode (nominal_mode, tempreg,
4669 TREE_UNSIGNED (TREE_TYPE (parm)));
4670 if (stack_parm)
4671 {
4672 /* ??? This may need a big-endian conversion on sparc64. */
4673 stack_parm = change_address (stack_parm, nominal_mode,
4674 NULL_RTX);
4675 }
4676 conversion_insns = get_insns ();
4677 did_conversion = 1;
4678 end_sequence ();
4679 }
4680
4681 if (entry_parm != stack_parm)
4682 {
4683 if (stack_parm == 0)
4684 {
4685 stack_parm
4686 = assign_stack_local (GET_MODE (entry_parm),
4687 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4688 /* If this is a memory ref that contains aggregate components,
4689 mark it as such for cse and loop optimize. */
4690 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4691 }
4692
4693 if (promoted_mode != nominal_mode)
4694 {
4695 push_to_sequence (conversion_insns);
4696 emit_move_insn (validize_mem (stack_parm),
4697 validize_mem (entry_parm));
4698 conversion_insns = get_insns ();
4699 end_sequence ();
4700 }
4701 else
4702 emit_move_insn (validize_mem (stack_parm),
4703 validize_mem (entry_parm));
4704 }
4705 if (current_function_check_memory_usage)
4706 {
4707 push_to_sequence (conversion_insns);
4708 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4709 XEXP (stack_parm, 0), Pmode,
4710 GEN_INT (GET_MODE_SIZE (GET_MODE
4711 (entry_parm))),
4712 TYPE_MODE (sizetype),
4713 GEN_INT (MEMORY_USE_RW),
4714 TYPE_MODE (integer_type_node));
4715
4716 conversion_insns = get_insns ();
4717 end_sequence ();
4718 }
4719 DECL_RTL (parm) = stack_parm;
4720 }
4721
4722 /* If this "parameter" was the place where we are receiving the
4723 function's incoming structure pointer, set up the result. */
4724 if (parm == function_result_decl)
4725 {
4726 tree result = DECL_RESULT (fndecl);
4727 tree restype = TREE_TYPE (result);
4728
4729 DECL_RTL (result)
4730 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4731
4732 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4733 AGGREGATE_TYPE_P (restype));
4734 }
4735
4736 if (TREE_THIS_VOLATILE (parm))
4737 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4738 if (TREE_READONLY (parm))
4739 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4740 }
4741
4742 /* Output all parameter conversion instructions (possibly including calls)
4743 now that all parameters have been copied out of hard registers. */
4744 emit_insns (conversion_insns);
4745
4746 last_parm_insn = get_last_insn ();
4747
4748 current_function_args_size = stack_args_size.constant;
4749
4750 /* Adjust function incoming argument size for alignment and
4751 minimum length. */
4752
4753 #ifdef REG_PARM_STACK_SPACE
4754 #ifndef MAYBE_REG_PARM_STACK_SPACE
4755 current_function_args_size = MAX (current_function_args_size,
4756 REG_PARM_STACK_SPACE (fndecl));
4757 #endif
4758 #endif
4759
4760 #ifdef STACK_BOUNDARY
4761 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4762
4763 current_function_args_size
4764 = ((current_function_args_size + STACK_BYTES - 1)
4765 / STACK_BYTES) * STACK_BYTES;
4766 #endif
4767
4768 #ifdef ARGS_GROW_DOWNWARD
4769 current_function_arg_offset_rtx
4770 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4771 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4772 size_int (-stack_args_size.constant)),
4773 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4774 #else
4775 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4776 #endif
4777
4778 /* See how many bytes, if any, of its args a function should try to pop
4779 on return. */
4780
4781 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4782 current_function_args_size);
4783
4784 /* For stdarg.h function, save info about
4785 regs and stack space used by the named args. */
4786
4787 if (!hide_last_arg)
4788 current_function_args_info = args_so_far;
4789
4790 /* Set the rtx used for the function return value. Put this in its
4791 own variable so any optimizers that need this information don't have
4792 to include tree.h. Do this here so it gets done when an inlined
4793 function gets output. */
4794
4795 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4796 }
4797 \f
4798 /* Indicate whether REGNO is an incoming argument to the current function
4799 that was promoted to a wider mode. If so, return the RTX for the
4800 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4801 that REGNO is promoted from and whether the promotion was signed or
4802 unsigned. */
4803
4804 #ifdef PROMOTE_FUNCTION_ARGS
4805
4806 rtx
4807 promoted_input_arg (regno, pmode, punsignedp)
4808 int regno;
4809 enum machine_mode *pmode;
4810 int *punsignedp;
4811 {
4812 tree arg;
4813
4814 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4815 arg = TREE_CHAIN (arg))
4816 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4817 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4818 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4819 {
4820 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4821 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4822
4823 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4824 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4825 && mode != DECL_MODE (arg))
4826 {
4827 *pmode = DECL_MODE (arg);
4828 *punsignedp = unsignedp;
4829 return DECL_INCOMING_RTL (arg);
4830 }
4831 }
4832
4833 return 0;
4834 }
4835
4836 #endif
4837 \f
4838 /* Compute the size and offset from the start of the stacked arguments for a
4839 parm passed in mode PASSED_MODE and with type TYPE.
4840
4841 INITIAL_OFFSET_PTR points to the current offset into the stacked
4842 arguments.
4843
4844 The starting offset and size for this parm are returned in *OFFSET_PTR
4845 and *ARG_SIZE_PTR, respectively.
4846
4847 IN_REGS is non-zero if the argument will be passed in registers. It will
4848 never be set if REG_PARM_STACK_SPACE is not defined.
4849
4850 FNDECL is the function in which the argument was defined.
4851
4852 There are two types of rounding that are done. The first, controlled by
4853 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4854 list to be aligned to the specific boundary (in bits). This rounding
4855 affects the initial and starting offsets, but not the argument size.
4856
4857 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4858 optionally rounds the size of the parm to PARM_BOUNDARY. The
4859 initial offset is not affected by this rounding, while the size always
4860 is and the starting offset may be. */
4861
4862 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4863 initial_offset_ptr is positive because locate_and_pad_parm's
4864 callers pass in the total size of args so far as
4865 initial_offset_ptr. arg_size_ptr is always positive.*/
4866
4867 void
4868 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4869 initial_offset_ptr, offset_ptr, arg_size_ptr)
4870 enum machine_mode passed_mode;
4871 tree type;
4872 int in_regs;
4873 tree fndecl ATTRIBUTE_UNUSED;
4874 struct args_size *initial_offset_ptr;
4875 struct args_size *offset_ptr;
4876 struct args_size *arg_size_ptr;
4877 {
4878 tree sizetree
4879 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4880 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4881 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4882
4883 #ifdef REG_PARM_STACK_SPACE
4884 /* If we have found a stack parm before we reach the end of the
4885 area reserved for registers, skip that area. */
4886 if (! in_regs)
4887 {
4888 int reg_parm_stack_space = 0;
4889
4890 #ifdef MAYBE_REG_PARM_STACK_SPACE
4891 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4892 #else
4893 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4894 #endif
4895 if (reg_parm_stack_space > 0)
4896 {
4897 if (initial_offset_ptr->var)
4898 {
4899 initial_offset_ptr->var
4900 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4901 size_int (reg_parm_stack_space));
4902 initial_offset_ptr->constant = 0;
4903 }
4904 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4905 initial_offset_ptr->constant = reg_parm_stack_space;
4906 }
4907 }
4908 #endif /* REG_PARM_STACK_SPACE */
4909
4910 arg_size_ptr->var = 0;
4911 arg_size_ptr->constant = 0;
4912
4913 #ifdef ARGS_GROW_DOWNWARD
4914 if (initial_offset_ptr->var)
4915 {
4916 offset_ptr->constant = 0;
4917 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4918 initial_offset_ptr->var);
4919 }
4920 else
4921 {
4922 offset_ptr->constant = - initial_offset_ptr->constant;
4923 offset_ptr->var = 0;
4924 }
4925 if (where_pad != none
4926 && (TREE_CODE (sizetree) != INTEGER_CST
4927 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4928 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4929 SUB_PARM_SIZE (*offset_ptr, sizetree);
4930 if (where_pad != downward)
4931 pad_to_arg_alignment (offset_ptr, boundary);
4932 if (initial_offset_ptr->var)
4933 {
4934 arg_size_ptr->var = size_binop (MINUS_EXPR,
4935 size_binop (MINUS_EXPR,
4936 integer_zero_node,
4937 initial_offset_ptr->var),
4938 offset_ptr->var);
4939 }
4940 else
4941 {
4942 arg_size_ptr->constant = (- initial_offset_ptr->constant
4943 - offset_ptr->constant);
4944 }
4945 #else /* !ARGS_GROW_DOWNWARD */
4946 pad_to_arg_alignment (initial_offset_ptr, boundary);
4947 *offset_ptr = *initial_offset_ptr;
4948
4949 #ifdef PUSH_ROUNDING
4950 if (passed_mode != BLKmode)
4951 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4952 #endif
4953
4954 /* Pad_below needs the pre-rounded size to know how much to pad below
4955 so this must be done before rounding up. */
4956 if (where_pad == downward
4957 /* However, BLKmode args passed in regs have their padding done elsewhere.
4958 The stack slot must be able to hold the entire register. */
4959 && !(in_regs && passed_mode == BLKmode))
4960 pad_below (offset_ptr, passed_mode, sizetree);
4961
4962 if (where_pad != none
4963 && (TREE_CODE (sizetree) != INTEGER_CST
4964 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4965 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4966
4967 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4968 #endif /* ARGS_GROW_DOWNWARD */
4969 }
4970
4971 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4972 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4973
4974 static void
4975 pad_to_arg_alignment (offset_ptr, boundary)
4976 struct args_size *offset_ptr;
4977 int boundary;
4978 {
4979 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4980
4981 if (boundary > BITS_PER_UNIT)
4982 {
4983 if (offset_ptr->var)
4984 {
4985 offset_ptr->var =
4986 #ifdef ARGS_GROW_DOWNWARD
4987 round_down
4988 #else
4989 round_up
4990 #endif
4991 (ARGS_SIZE_TREE (*offset_ptr),
4992 boundary / BITS_PER_UNIT);
4993 offset_ptr->constant = 0; /*?*/
4994 }
4995 else
4996 offset_ptr->constant =
4997 #ifdef ARGS_GROW_DOWNWARD
4998 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4999 #else
5000 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5001 #endif
5002 }
5003 }
5004
5005 #ifndef ARGS_GROW_DOWNWARD
5006 static void
5007 pad_below (offset_ptr, passed_mode, sizetree)
5008 struct args_size *offset_ptr;
5009 enum machine_mode passed_mode;
5010 tree sizetree;
5011 {
5012 if (passed_mode != BLKmode)
5013 {
5014 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5015 offset_ptr->constant
5016 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5017 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5018 - GET_MODE_SIZE (passed_mode));
5019 }
5020 else
5021 {
5022 if (TREE_CODE (sizetree) != INTEGER_CST
5023 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5024 {
5025 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5026 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5027 /* Add it in. */
5028 ADD_PARM_SIZE (*offset_ptr, s2);
5029 SUB_PARM_SIZE (*offset_ptr, sizetree);
5030 }
5031 }
5032 }
5033 #endif
5034
5035 #ifdef ARGS_GROW_DOWNWARD
5036 static tree
5037 round_down (value, divisor)
5038 tree value;
5039 int divisor;
5040 {
5041 return size_binop (MULT_EXPR,
5042 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5043 size_int (divisor));
5044 }
5045 #endif
5046 \f
5047 /* Walk the tree of blocks describing the binding levels within a function
5048 and warn about uninitialized variables.
5049 This is done after calling flow_analysis and before global_alloc
5050 clobbers the pseudo-regs to hard regs. */
5051
5052 void
5053 uninitialized_vars_warning (block)
5054 tree block;
5055 {
5056 register tree decl, sub;
5057 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5058 {
5059 if (TREE_CODE (decl) == VAR_DECL
5060 /* These warnings are unreliable for and aggregates
5061 because assigning the fields one by one can fail to convince
5062 flow.c that the entire aggregate was initialized.
5063 Unions are troublesome because members may be shorter. */
5064 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5065 && DECL_RTL (decl) != 0
5066 && GET_CODE (DECL_RTL (decl)) == REG
5067 /* Global optimizations can make it difficult to determine if a
5068 particular variable has been initialized. However, a VAR_DECL
5069 with a nonzero DECL_INITIAL had an initializer, so do not
5070 claim it is potentially uninitialized.
5071
5072 We do not care about the actual value in DECL_INITIAL, so we do
5073 not worry that it may be a dangling pointer. */
5074 && DECL_INITIAL (decl) == NULL_TREE
5075 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5076 warning_with_decl (decl,
5077 "`%s' might be used uninitialized in this function");
5078 if (TREE_CODE (decl) == VAR_DECL
5079 && DECL_RTL (decl) != 0
5080 && GET_CODE (DECL_RTL (decl)) == REG
5081 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5082 warning_with_decl (decl,
5083 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5084 }
5085 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5086 uninitialized_vars_warning (sub);
5087 }
5088
5089 /* Do the appropriate part of uninitialized_vars_warning
5090 but for arguments instead of local variables. */
5091
5092 void
5093 setjmp_args_warning ()
5094 {
5095 register tree decl;
5096 for (decl = DECL_ARGUMENTS (current_function_decl);
5097 decl; decl = TREE_CHAIN (decl))
5098 if (DECL_RTL (decl) != 0
5099 && GET_CODE (DECL_RTL (decl)) == REG
5100 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5101 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5102 }
5103
5104 /* If this function call setjmp, put all vars into the stack
5105 unless they were declared `register'. */
5106
5107 void
5108 setjmp_protect (block)
5109 tree block;
5110 {
5111 register tree decl, sub;
5112 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5113 if ((TREE_CODE (decl) == VAR_DECL
5114 || TREE_CODE (decl) == PARM_DECL)
5115 && DECL_RTL (decl) != 0
5116 && (GET_CODE (DECL_RTL (decl)) == REG
5117 || (GET_CODE (DECL_RTL (decl)) == MEM
5118 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5119 /* If this variable came from an inline function, it must be
5120 that its life doesn't overlap the setjmp. If there was a
5121 setjmp in the function, it would already be in memory. We
5122 must exclude such variable because their DECL_RTL might be
5123 set to strange things such as virtual_stack_vars_rtx. */
5124 && ! DECL_FROM_INLINE (decl)
5125 && (
5126 #ifdef NON_SAVING_SETJMP
5127 /* If longjmp doesn't restore the registers,
5128 don't put anything in them. */
5129 NON_SAVING_SETJMP
5130 ||
5131 #endif
5132 ! DECL_REGISTER (decl)))
5133 put_var_into_stack (decl);
5134 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5135 setjmp_protect (sub);
5136 }
5137 \f
5138 /* Like the previous function, but for args instead of local variables. */
5139
5140 void
5141 setjmp_protect_args ()
5142 {
5143 register tree decl;
5144 for (decl = DECL_ARGUMENTS (current_function_decl);
5145 decl; decl = TREE_CHAIN (decl))
5146 if ((TREE_CODE (decl) == VAR_DECL
5147 || TREE_CODE (decl) == PARM_DECL)
5148 && DECL_RTL (decl) != 0
5149 && (GET_CODE (DECL_RTL (decl)) == REG
5150 || (GET_CODE (DECL_RTL (decl)) == MEM
5151 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5152 && (
5153 /* If longjmp doesn't restore the registers,
5154 don't put anything in them. */
5155 #ifdef NON_SAVING_SETJMP
5156 NON_SAVING_SETJMP
5157 ||
5158 #endif
5159 ! DECL_REGISTER (decl)))
5160 put_var_into_stack (decl);
5161 }
5162 \f
5163 /* Return the context-pointer register corresponding to DECL,
5164 or 0 if it does not need one. */
5165
5166 rtx
5167 lookup_static_chain (decl)
5168 tree decl;
5169 {
5170 tree context = decl_function_context (decl);
5171 tree link;
5172
5173 if (context == 0
5174 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5175 return 0;
5176
5177 /* We treat inline_function_decl as an alias for the current function
5178 because that is the inline function whose vars, types, etc.
5179 are being merged into the current function.
5180 See expand_inline_function. */
5181 if (context == current_function_decl || context == inline_function_decl)
5182 return virtual_stack_vars_rtx;
5183
5184 for (link = context_display; link; link = TREE_CHAIN (link))
5185 if (TREE_PURPOSE (link) == context)
5186 return RTL_EXPR_RTL (TREE_VALUE (link));
5187
5188 abort ();
5189 }
5190 \f
5191 /* Convert a stack slot address ADDR for variable VAR
5192 (from a containing function)
5193 into an address valid in this function (using a static chain). */
5194
5195 rtx
5196 fix_lexical_addr (addr, var)
5197 rtx addr;
5198 tree var;
5199 {
5200 rtx basereg;
5201 HOST_WIDE_INT displacement;
5202 tree context = decl_function_context (var);
5203 struct function *fp;
5204 rtx base = 0;
5205
5206 /* If this is the present function, we need not do anything. */
5207 if (context == current_function_decl || context == inline_function_decl)
5208 return addr;
5209
5210 for (fp = outer_function_chain; fp; fp = fp->next)
5211 if (fp->decl == context)
5212 break;
5213
5214 if (fp == 0)
5215 abort ();
5216
5217 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5218 addr = XEXP (XEXP (addr, 0), 0);
5219
5220 /* Decode given address as base reg plus displacement. */
5221 if (GET_CODE (addr) == REG)
5222 basereg = addr, displacement = 0;
5223 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5224 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5225 else
5226 abort ();
5227
5228 /* We accept vars reached via the containing function's
5229 incoming arg pointer and via its stack variables pointer. */
5230 if (basereg == fp->internal_arg_pointer)
5231 {
5232 /* If reached via arg pointer, get the arg pointer value
5233 out of that function's stack frame.
5234
5235 There are two cases: If a separate ap is needed, allocate a
5236 slot in the outer function for it and dereference it that way.
5237 This is correct even if the real ap is actually a pseudo.
5238 Otherwise, just adjust the offset from the frame pointer to
5239 compensate. */
5240
5241 #ifdef NEED_SEPARATE_AP
5242 rtx addr;
5243
5244 if (fp->x_arg_pointer_save_area == 0)
5245 fp->x_arg_pointer_save_area
5246 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5247
5248 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5249 addr = memory_address (Pmode, addr);
5250
5251 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5252 #else
5253 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5254 base = lookup_static_chain (var);
5255 #endif
5256 }
5257
5258 else if (basereg == virtual_stack_vars_rtx)
5259 {
5260 /* This is the same code as lookup_static_chain, duplicated here to
5261 avoid an extra call to decl_function_context. */
5262 tree link;
5263
5264 for (link = context_display; link; link = TREE_CHAIN (link))
5265 if (TREE_PURPOSE (link) == context)
5266 {
5267 base = RTL_EXPR_RTL (TREE_VALUE (link));
5268 break;
5269 }
5270 }
5271
5272 if (base == 0)
5273 abort ();
5274
5275 /* Use same offset, relative to appropriate static chain or argument
5276 pointer. */
5277 return plus_constant (base, displacement);
5278 }
5279 \f
5280 /* Return the address of the trampoline for entering nested fn FUNCTION.
5281 If necessary, allocate a trampoline (in the stack frame)
5282 and emit rtl to initialize its contents (at entry to this function). */
5283
5284 rtx
5285 trampoline_address (function)
5286 tree function;
5287 {
5288 tree link;
5289 tree rtlexp;
5290 rtx tramp;
5291 struct function *fp;
5292 tree fn_context;
5293
5294 /* Find an existing trampoline and return it. */
5295 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5296 if (TREE_PURPOSE (link) == function)
5297 return
5298 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5299
5300 for (fp = outer_function_chain; fp; fp = fp->next)
5301 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5302 if (TREE_PURPOSE (link) == function)
5303 {
5304 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5305 function);
5306 return round_trampoline_addr (tramp);
5307 }
5308
5309 /* None exists; we must make one. */
5310
5311 /* Find the `struct function' for the function containing FUNCTION. */
5312 fp = 0;
5313 fn_context = decl_function_context (function);
5314 if (fn_context != current_function_decl
5315 && fn_context != inline_function_decl)
5316 for (fp = outer_function_chain; fp; fp = fp->next)
5317 if (fp->decl == fn_context)
5318 break;
5319
5320 /* Allocate run-time space for this trampoline
5321 (usually in the defining function's stack frame). */
5322 #ifdef ALLOCATE_TRAMPOLINE
5323 tramp = ALLOCATE_TRAMPOLINE (fp);
5324 #else
5325 /* If rounding needed, allocate extra space
5326 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5327 #ifdef TRAMPOLINE_ALIGNMENT
5328 #define TRAMPOLINE_REAL_SIZE \
5329 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5330 #else
5331 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5332 #endif
5333 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5334 fp ? fp : current_function);
5335 #endif
5336
5337 /* Record the trampoline for reuse and note it for later initialization
5338 by expand_function_end. */
5339 if (fp != 0)
5340 {
5341 push_obstacks (fp->function_maybepermanent_obstack,
5342 fp->function_maybepermanent_obstack);
5343 rtlexp = make_node (RTL_EXPR);
5344 RTL_EXPR_RTL (rtlexp) = tramp;
5345 fp->x_trampoline_list = tree_cons (function, rtlexp,
5346 fp->x_trampoline_list);
5347 pop_obstacks ();
5348 }
5349 else
5350 {
5351 /* Make the RTL_EXPR node temporary, not momentary, so that the
5352 trampoline_list doesn't become garbage. */
5353 int momentary = suspend_momentary ();
5354 rtlexp = make_node (RTL_EXPR);
5355 resume_momentary (momentary);
5356
5357 RTL_EXPR_RTL (rtlexp) = tramp;
5358 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5359 }
5360
5361 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5362 return round_trampoline_addr (tramp);
5363 }
5364
5365 /* Given a trampoline address,
5366 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5367
5368 static rtx
5369 round_trampoline_addr (tramp)
5370 rtx tramp;
5371 {
5372 #ifdef TRAMPOLINE_ALIGNMENT
5373 /* Round address up to desired boundary. */
5374 rtx temp = gen_reg_rtx (Pmode);
5375 temp = expand_binop (Pmode, add_optab, tramp,
5376 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5377 temp, 0, OPTAB_LIB_WIDEN);
5378 tramp = expand_binop (Pmode, and_optab, temp,
5379 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5380 temp, 0, OPTAB_LIB_WIDEN);
5381 #endif
5382 return tramp;
5383 }
5384 \f
5385 /* Insert the BLOCK in the block-tree before LAST_INSN. */
5386
5387 void
5388 retrofit_block (block, last_insn)
5389 tree block;
5390 rtx last_insn;
5391 {
5392 rtx insn;
5393
5394 /* Now insert the new BLOCK at the right place in the block trees
5395 for the function which called the inline function. We just look
5396 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5397 beginning of a block, then this new block becomes the first
5398 subblock of that block. If we find the end of a block, then this
5399 new block follows that block in the list of blocks. */
5400 for (insn = last_insn; insn; insn = PREV_INSN (insn))
5401 if (GET_CODE (insn) == NOTE
5402 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
5403 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
5404 break;
5405 if (!insn || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5406 {
5407 tree superblock;
5408
5409 if (insn)
5410 superblock = NOTE_BLOCK (insn);
5411 else
5412 superblock = DECL_INITIAL (current_function_decl);
5413
5414 BLOCK_SUPERCONTEXT (block) = superblock;
5415 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (superblock);
5416 BLOCK_SUBBLOCKS (superblock) = block;
5417 }
5418 else
5419 {
5420 tree prevblock = NOTE_BLOCK (insn);
5421
5422 BLOCK_SUPERCONTEXT (block) = BLOCK_SUPERCONTEXT (prevblock);
5423 BLOCK_CHAIN (block) = BLOCK_CHAIN (prevblock);
5424 BLOCK_CHAIN (prevblock) = block;
5425 }
5426 }
5427
5428 /* The functions identify_blocks and reorder_blocks provide a way to
5429 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5430 duplicate portions of the RTL code. Call identify_blocks before
5431 changing the RTL, and call reorder_blocks after. */
5432
5433 /* Put all this function's BLOCK nodes including those that are chained
5434 onto the first block into a vector, and return it.
5435 Also store in each NOTE for the beginning or end of a block
5436 the index of that block in the vector.
5437 The arguments are BLOCK, the chain of top-level blocks of the function,
5438 and INSNS, the insn chain of the function. */
5439
5440 void
5441 identify_blocks (block, insns)
5442 tree block;
5443 rtx insns;
5444 {
5445 int n_blocks;
5446 tree *block_vector;
5447 tree *block_stack;
5448 int depth = 0;
5449 int current_block_number = 1;
5450 rtx insn;
5451
5452 if (block == 0)
5453 return;
5454
5455 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5456 depth-first order. */
5457 n_blocks = all_blocks (block, 0);
5458 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5459 all_blocks (block, block_vector);
5460
5461 block_stack = (tree *) alloca (n_blocks * sizeof (tree));
5462
5463 for (insn = insns; insn; insn = NEXT_INSN (insn))
5464 if (GET_CODE (insn) == NOTE)
5465 {
5466 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5467 {
5468 tree b;
5469
5470 /* If there are more block notes than BLOCKs, something
5471 is badly wrong. */
5472 if (current_block_number == n_blocks)
5473 abort ();
5474
5475 b = block_vector[current_block_number++];
5476 NOTE_BLOCK (insn) = b;
5477 block_stack[depth++] = b;
5478 }
5479 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5480 {
5481 if (depth == 0)
5482 /* There are more NOTE_INSN_BLOCK_ENDs that
5483 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5484 abort ();
5485
5486 NOTE_BLOCK (insn) = block_stack[--depth];
5487 }
5488 }
5489
5490 /* In whole-function mode, we might not have seen the whole function
5491 yet, so we might not use up all the blocks. */
5492 if (n_blocks != current_block_number
5493 && !current_function->x_whole_function_mode_p)
5494 abort ();
5495
5496 free (block_vector);
5497 }
5498
5499 /* Given a revised instruction chain, rebuild the tree structure of
5500 BLOCK nodes to correspond to the new order of RTL. The new block
5501 tree is inserted below TOP_BLOCK. Returns the current top-level
5502 block. */
5503
5504 tree
5505 reorder_blocks (block, insns)
5506 tree block;
5507 rtx insns;
5508 {
5509 tree current_block = block;
5510 rtx insn;
5511
5512 if (block == NULL_TREE)
5513 return NULL_TREE;
5514
5515 /* Prune the old trees away, so that it doesn't get in the way. */
5516 BLOCK_SUBBLOCKS (current_block) = 0;
5517 BLOCK_CHAIN (current_block) = 0;
5518
5519 for (insn = insns; insn; insn = NEXT_INSN (insn))
5520 if (GET_CODE (insn) == NOTE)
5521 {
5522 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5523 {
5524 tree block = NOTE_BLOCK (insn);
5525 /* If we have seen this block before, copy it. */
5526 if (TREE_ASM_WRITTEN (block))
5527 block = copy_node (block);
5528 BLOCK_SUBBLOCKS (block) = 0;
5529 TREE_ASM_WRITTEN (block) = 1;
5530 BLOCK_SUPERCONTEXT (block) = current_block;
5531 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5532 BLOCK_SUBBLOCKS (current_block) = block;
5533 current_block = block;
5534 NOTE_SOURCE_FILE (insn) = 0;
5535 }
5536 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5537 {
5538 BLOCK_SUBBLOCKS (current_block)
5539 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5540 current_block = BLOCK_SUPERCONTEXT (current_block);
5541 NOTE_SOURCE_FILE (insn) = 0;
5542 }
5543 }
5544
5545 BLOCK_SUBBLOCKS (current_block)
5546 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5547 return current_block;
5548 }
5549
5550 /* Reverse the order of elements in the chain T of blocks,
5551 and return the new head of the chain (old last element). */
5552
5553 static tree
5554 blocks_nreverse (t)
5555 tree t;
5556 {
5557 register tree prev = 0, decl, next;
5558 for (decl = t; decl; decl = next)
5559 {
5560 next = BLOCK_CHAIN (decl);
5561 BLOCK_CHAIN (decl) = prev;
5562 prev = decl;
5563 }
5564 return prev;
5565 }
5566
5567 /* Count the subblocks of the list starting with BLOCK, and list them
5568 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5569 blocks. */
5570
5571 static int
5572 all_blocks (block, vector)
5573 tree block;
5574 tree *vector;
5575 {
5576 int n_blocks = 0;
5577
5578 while (block)
5579 {
5580 TREE_ASM_WRITTEN (block) = 0;
5581
5582 /* Record this block. */
5583 if (vector)
5584 vector[n_blocks] = block;
5585
5586 ++n_blocks;
5587
5588 /* Record the subblocks, and their subblocks... */
5589 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5590 vector ? vector + n_blocks : 0);
5591 block = BLOCK_CHAIN (block);
5592 }
5593
5594 return n_blocks;
5595 }
5596 \f
5597 /* Allocate a function structure and reset its contents to the defaults. */
5598 static void
5599 prepare_function_start ()
5600 {
5601 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5602
5603 init_stmt_for_function ();
5604 init_eh_for_function ();
5605
5606 cse_not_expected = ! optimize;
5607
5608 /* Caller save not needed yet. */
5609 caller_save_needed = 0;
5610
5611 /* No stack slots have been made yet. */
5612 stack_slot_list = 0;
5613
5614 current_function_has_nonlocal_label = 0;
5615 current_function_has_nonlocal_goto = 0;
5616
5617 /* There is no stack slot for handling nonlocal gotos. */
5618 nonlocal_goto_handler_slots = 0;
5619 nonlocal_goto_stack_level = 0;
5620
5621 /* No labels have been declared for nonlocal use. */
5622 nonlocal_labels = 0;
5623 nonlocal_goto_handler_labels = 0;
5624
5625 /* No function calls so far in this function. */
5626 function_call_count = 0;
5627
5628 /* No parm regs have been allocated.
5629 (This is important for output_inline_function.) */
5630 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5631
5632 /* Initialize the RTL mechanism. */
5633 init_emit ();
5634
5635 /* Initialize the queue of pending postincrement and postdecrements,
5636 and some other info in expr.c. */
5637 init_expr ();
5638
5639 /* We haven't done register allocation yet. */
5640 reg_renumber = 0;
5641
5642 init_varasm_status (current_function);
5643
5644 /* Clear out data used for inlining. */
5645 current_function->inlinable = 0;
5646 current_function->original_decl_initial = 0;
5647 current_function->original_arg_vector = 0;
5648
5649 /* Set if a call to setjmp is seen. */
5650 current_function_calls_setjmp = 0;
5651
5652 /* Set if a call to longjmp is seen. */
5653 current_function_calls_longjmp = 0;
5654
5655 current_function_calls_alloca = 0;
5656 current_function_contains_functions = 0;
5657 current_function_is_leaf = 0;
5658 current_function_sp_is_unchanging = 0;
5659 current_function_uses_only_leaf_regs = 0;
5660 current_function_has_computed_jump = 0;
5661 current_function_is_thunk = 0;
5662
5663 current_function_returns_pcc_struct = 0;
5664 current_function_returns_struct = 0;
5665 current_function_epilogue_delay_list = 0;
5666 current_function_uses_const_pool = 0;
5667 current_function_uses_pic_offset_table = 0;
5668 current_function_cannot_inline = 0;
5669
5670 /* We have not yet needed to make a label to jump to for tail-recursion. */
5671 tail_recursion_label = 0;
5672
5673 /* We haven't had a need to make a save area for ap yet. */
5674 arg_pointer_save_area = 0;
5675
5676 /* No stack slots allocated yet. */
5677 frame_offset = 0;
5678
5679 /* No SAVE_EXPRs in this function yet. */
5680 save_expr_regs = 0;
5681
5682 /* No RTL_EXPRs in this function yet. */
5683 rtl_expr_chain = 0;
5684
5685 /* Set up to allocate temporaries. */
5686 init_temp_slots ();
5687
5688 /* Indicate that we need to distinguish between the return value of the
5689 present function and the return value of a function being called. */
5690 rtx_equal_function_value_matters = 1;
5691
5692 /* Indicate that we have not instantiated virtual registers yet. */
5693 virtuals_instantiated = 0;
5694
5695 /* Indicate we have no need of a frame pointer yet. */
5696 frame_pointer_needed = 0;
5697
5698 /* By default assume not varargs or stdarg. */
5699 current_function_varargs = 0;
5700 current_function_stdarg = 0;
5701
5702 /* We haven't made any trampolines for this function yet. */
5703 trampoline_list = 0;
5704
5705 init_pending_stack_adjust ();
5706 inhibit_defer_pop = 0;
5707
5708 current_function_outgoing_args_size = 0;
5709
5710 if (init_lang_status)
5711 (*init_lang_status) (current_function);
5712 if (init_machine_status)
5713 (*init_machine_status) (current_function);
5714 }
5715
5716 /* Initialize the rtl expansion mechanism so that we can do simple things
5717 like generate sequences. This is used to provide a context during global
5718 initialization of some passes. */
5719 void
5720 init_dummy_function_start ()
5721 {
5722 prepare_function_start ();
5723 }
5724
5725 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5726 and initialize static variables for generating RTL for the statements
5727 of the function. */
5728
5729 void
5730 init_function_start (subr, filename, line)
5731 tree subr;
5732 char *filename;
5733 int line;
5734 {
5735 prepare_function_start ();
5736
5737 /* Remember this function for later. */
5738 current_function->next_global = all_functions;
5739 all_functions = current_function;
5740
5741 current_function_name = (*decl_printable_name) (subr, 2);
5742 current_function->decl = subr;
5743
5744 /* Nonzero if this is a nested function that uses a static chain. */
5745
5746 current_function_needs_context
5747 = (decl_function_context (current_function_decl) != 0
5748 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5749
5750 /* Within function body, compute a type's size as soon it is laid out. */
5751 immediate_size_expand++;
5752
5753 /* Prevent ever trying to delete the first instruction of a function.
5754 Also tell final how to output a linenum before the function prologue.
5755 Note linenums could be missing, e.g. when compiling a Java .class file. */
5756 if (line > 0)
5757 emit_line_note (filename, line);
5758
5759 /* Make sure first insn is a note even if we don't want linenums.
5760 This makes sure the first insn will never be deleted.
5761 Also, final expects a note to appear there. */
5762 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5763
5764 /* Set flags used by final.c. */
5765 if (aggregate_value_p (DECL_RESULT (subr)))
5766 {
5767 #ifdef PCC_STATIC_STRUCT_RETURN
5768 current_function_returns_pcc_struct = 1;
5769 #endif
5770 current_function_returns_struct = 1;
5771 }
5772
5773 /* Warn if this value is an aggregate type,
5774 regardless of which calling convention we are using for it. */
5775 if (warn_aggregate_return
5776 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5777 warning ("function returns an aggregate");
5778
5779 current_function_returns_pointer
5780 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5781 }
5782
5783 /* Make sure all values used by the optimization passes have sane
5784 defaults. */
5785 void
5786 init_function_for_compilation ()
5787 {
5788 reg_renumber = 0;
5789 /* No prologue/epilogue insns yet. */
5790 prologue = epilogue = 0;
5791 }
5792
5793 /* Indicate that the current function uses extra args
5794 not explicitly mentioned in the argument list in any fashion. */
5795
5796 void
5797 mark_varargs ()
5798 {
5799 current_function_varargs = 1;
5800 }
5801
5802 /* Expand a call to __main at the beginning of a possible main function. */
5803
5804 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5805 #undef HAS_INIT_SECTION
5806 #define HAS_INIT_SECTION
5807 #endif
5808
5809 void
5810 expand_main_function ()
5811 {
5812 #if !defined (HAS_INIT_SECTION)
5813 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5814 VOIDmode, 0);
5815 #endif /* not HAS_INIT_SECTION */
5816 }
5817 \f
5818 extern struct obstack permanent_obstack;
5819
5820 /* Start the RTL for a new function, and set variables used for
5821 emitting RTL.
5822 SUBR is the FUNCTION_DECL node.
5823 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5824 the function's parameters, which must be run at any return statement. */
5825
5826 void
5827 expand_function_start (subr, parms_have_cleanups)
5828 tree subr;
5829 int parms_have_cleanups;
5830 {
5831 register int i;
5832 tree tem;
5833 rtx last_ptr = NULL_RTX;
5834
5835 /* Make sure volatile mem refs aren't considered
5836 valid operands of arithmetic insns. */
5837 init_recog_no_volatile ();
5838
5839 /* Set this before generating any memory accesses. */
5840 current_function_check_memory_usage
5841 = (flag_check_memory_usage
5842 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5843
5844 current_function_instrument_entry_exit
5845 = (flag_instrument_function_entry_exit
5846 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5847
5848 /* If function gets a static chain arg, store it in the stack frame.
5849 Do this first, so it gets the first stack slot offset. */
5850 if (current_function_needs_context)
5851 {
5852 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5853
5854 /* Delay copying static chain if it is not a register to avoid
5855 conflicts with regs used for parameters. */
5856 if (! SMALL_REGISTER_CLASSES
5857 || GET_CODE (static_chain_incoming_rtx) == REG)
5858 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5859 }
5860
5861 /* If the parameters of this function need cleaning up, get a label
5862 for the beginning of the code which executes those cleanups. This must
5863 be done before doing anything with return_label. */
5864 if (parms_have_cleanups)
5865 cleanup_label = gen_label_rtx ();
5866 else
5867 cleanup_label = 0;
5868
5869 /* Make the label for return statements to jump to, if this machine
5870 does not have a one-instruction return and uses an epilogue,
5871 or if it returns a structure, or if it has parm cleanups. */
5872 #ifdef HAVE_return
5873 if (cleanup_label == 0 && HAVE_return
5874 && ! current_function_instrument_entry_exit
5875 && ! current_function_returns_pcc_struct
5876 && ! (current_function_returns_struct && ! optimize))
5877 return_label = 0;
5878 else
5879 return_label = gen_label_rtx ();
5880 #else
5881 return_label = gen_label_rtx ();
5882 #endif
5883
5884 /* Initialize rtx used to return the value. */
5885 /* Do this before assign_parms so that we copy the struct value address
5886 before any library calls that assign parms might generate. */
5887
5888 /* Decide whether to return the value in memory or in a register. */
5889 if (aggregate_value_p (DECL_RESULT (subr)))
5890 {
5891 /* Returning something that won't go in a register. */
5892 register rtx value_address = 0;
5893
5894 #ifdef PCC_STATIC_STRUCT_RETURN
5895 if (current_function_returns_pcc_struct)
5896 {
5897 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5898 value_address = assemble_static_space (size);
5899 }
5900 else
5901 #endif
5902 {
5903 /* Expect to be passed the address of a place to store the value.
5904 If it is passed as an argument, assign_parms will take care of
5905 it. */
5906 if (struct_value_incoming_rtx)
5907 {
5908 value_address = gen_reg_rtx (Pmode);
5909 emit_move_insn (value_address, struct_value_incoming_rtx);
5910 }
5911 }
5912 if (value_address)
5913 {
5914 DECL_RTL (DECL_RESULT (subr))
5915 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5916 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5917 AGGREGATE_TYPE_P (TREE_TYPE
5918 (DECL_RESULT
5919 (subr))));
5920 }
5921 }
5922 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5923 /* If return mode is void, this decl rtl should not be used. */
5924 DECL_RTL (DECL_RESULT (subr)) = 0;
5925 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5926 {
5927 /* If function will end with cleanup code for parms,
5928 compute the return values into a pseudo reg,
5929 which we will copy into the true return register
5930 after the cleanups are done. */
5931
5932 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5933
5934 #ifdef PROMOTE_FUNCTION_RETURN
5935 tree type = TREE_TYPE (DECL_RESULT (subr));
5936 int unsignedp = TREE_UNSIGNED (type);
5937
5938 mode = promote_mode (type, mode, &unsignedp, 1);
5939 #endif
5940
5941 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5942 }
5943 else
5944 /* Scalar, returned in a register. */
5945 {
5946 #ifdef FUNCTION_OUTGOING_VALUE
5947 DECL_RTL (DECL_RESULT (subr))
5948 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5949 #else
5950 DECL_RTL (DECL_RESULT (subr))
5951 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5952 #endif
5953
5954 /* Mark this reg as the function's return value. */
5955 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5956 {
5957 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5958 /* Needed because we may need to move this to memory
5959 in case it's a named return value whose address is taken. */
5960 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5961 }
5962 }
5963
5964 /* Initialize rtx for parameters and local variables.
5965 In some cases this requires emitting insns. */
5966
5967 assign_parms (subr);
5968
5969 /* Copy the static chain now if it wasn't a register. The delay is to
5970 avoid conflicts with the parameter passing registers. */
5971
5972 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5973 if (GET_CODE (static_chain_incoming_rtx) != REG)
5974 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5975
5976 /* The following was moved from init_function_start.
5977 The move is supposed to make sdb output more accurate. */
5978 /* Indicate the beginning of the function body,
5979 as opposed to parm setup. */
5980 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5981
5982 /* If doing stupid allocation, mark parms as born here. */
5983
5984 if (GET_CODE (get_last_insn ()) != NOTE)
5985 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5986 parm_birth_insn = get_last_insn ();
5987
5988 if (obey_regdecls)
5989 {
5990 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5991 use_variable (regno_reg_rtx[i]);
5992
5993 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5994 use_variable (current_function_internal_arg_pointer);
5995 }
5996
5997 context_display = 0;
5998 if (current_function_needs_context)
5999 {
6000 /* Fetch static chain values for containing functions. */
6001 tem = decl_function_context (current_function_decl);
6002 /* If not doing stupid register allocation copy the static chain
6003 pointer into a pseudo. If we have small register classes, copy
6004 the value from memory if static_chain_incoming_rtx is a REG. If
6005 we do stupid register allocation, we use the stack address
6006 generated above. */
6007 if (tem && ! obey_regdecls)
6008 {
6009 /* If the static chain originally came in a register, put it back
6010 there, then move it out in the next insn. The reason for
6011 this peculiar code is to satisfy function integration. */
6012 if (SMALL_REGISTER_CLASSES
6013 && GET_CODE (static_chain_incoming_rtx) == REG)
6014 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6015 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6016 }
6017
6018 while (tem)
6019 {
6020 tree rtlexp = make_node (RTL_EXPR);
6021
6022 RTL_EXPR_RTL (rtlexp) = last_ptr;
6023 context_display = tree_cons (tem, rtlexp, context_display);
6024 tem = decl_function_context (tem);
6025 if (tem == 0)
6026 break;
6027 /* Chain thru stack frames, assuming pointer to next lexical frame
6028 is found at the place we always store it. */
6029 #ifdef FRAME_GROWS_DOWNWARD
6030 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6031 #endif
6032 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6033 memory_address (Pmode,
6034 last_ptr)));
6035
6036 /* If we are not optimizing, ensure that we know that this
6037 piece of context is live over the entire function. */
6038 if (! optimize)
6039 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6040 save_expr_regs);
6041 }
6042 }
6043
6044 if (current_function_instrument_entry_exit)
6045 {
6046 rtx fun = DECL_RTL (current_function_decl);
6047 if (GET_CODE (fun) == MEM)
6048 fun = XEXP (fun, 0);
6049 else
6050 abort ();
6051 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6052 fun, Pmode,
6053 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6054 0,
6055 hard_frame_pointer_rtx),
6056 Pmode);
6057 }
6058
6059 /* After the display initializations is where the tail-recursion label
6060 should go, if we end up needing one. Ensure we have a NOTE here
6061 since some things (like trampolines) get placed before this. */
6062 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6063
6064 /* Evaluate now the sizes of any types declared among the arguments. */
6065 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6066 {
6067 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6068 EXPAND_MEMORY_USE_BAD);
6069 /* Flush the queue in case this parameter declaration has
6070 side-effects. */
6071 emit_queue ();
6072 }
6073
6074 /* Make sure there is a line number after the function entry setup code. */
6075 force_next_line_note ();
6076 }
6077 \f
6078 /* Undo the effects of init_dummy_function_start. */
6079 void
6080 expand_dummy_function_end ()
6081 {
6082 /* End any sequences that failed to be closed due to syntax errors. */
6083 while (in_sequence_p ())
6084 end_sequence ();
6085
6086 /* Outside function body, can't compute type's actual size
6087 until next function's body starts. */
6088
6089 free_after_parsing (current_function);
6090 free_after_compilation (current_function);
6091 free (current_function);
6092 current_function = 0;
6093 }
6094
6095 /* Generate RTL for the end of the current function.
6096 FILENAME and LINE are the current position in the source file.
6097
6098 It is up to language-specific callers to do cleanups for parameters--
6099 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6100
6101 void
6102 expand_function_end (filename, line, end_bindings)
6103 char *filename;
6104 int line;
6105 int end_bindings;
6106 {
6107 register int i;
6108 tree link;
6109
6110 #ifdef TRAMPOLINE_TEMPLATE
6111 static rtx initial_trampoline;
6112 #endif
6113
6114 finish_expr_for_function ();
6115
6116 #ifdef NON_SAVING_SETJMP
6117 /* Don't put any variables in registers if we call setjmp
6118 on a machine that fails to restore the registers. */
6119 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6120 {
6121 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6122 setjmp_protect (DECL_INITIAL (current_function_decl));
6123
6124 setjmp_protect_args ();
6125 }
6126 #endif
6127
6128 /* Save the argument pointer if a save area was made for it. */
6129 if (arg_pointer_save_area)
6130 {
6131 /* arg_pointer_save_area may not be a valid memory address, so we
6132 have to check it and fix it if necessary. */
6133 rtx seq;
6134 start_sequence ();
6135 emit_move_insn (validize_mem (arg_pointer_save_area),
6136 virtual_incoming_args_rtx);
6137 seq = gen_sequence ();
6138 end_sequence ();
6139 emit_insn_before (seq, tail_recursion_reentry);
6140 }
6141
6142 /* Initialize any trampolines required by this function. */
6143 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6144 {
6145 tree function = TREE_PURPOSE (link);
6146 rtx context = lookup_static_chain (function);
6147 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6148 #ifdef TRAMPOLINE_TEMPLATE
6149 rtx blktramp;
6150 #endif
6151 rtx seq;
6152
6153 #ifdef TRAMPOLINE_TEMPLATE
6154 /* First make sure this compilation has a template for
6155 initializing trampolines. */
6156 if (initial_trampoline == 0)
6157 {
6158 end_temporary_allocation ();
6159 initial_trampoline
6160 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6161 resume_temporary_allocation ();
6162
6163 ggc_add_rtx_root (&initial_trampoline, 1);
6164 }
6165 #endif
6166
6167 /* Generate insns to initialize the trampoline. */
6168 start_sequence ();
6169 tramp = round_trampoline_addr (XEXP (tramp, 0));
6170 #ifdef TRAMPOLINE_TEMPLATE
6171 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6172 emit_block_move (blktramp, initial_trampoline,
6173 GEN_INT (TRAMPOLINE_SIZE),
6174 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6175 #endif
6176 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6177 seq = get_insns ();
6178 end_sequence ();
6179
6180 /* Put those insns at entry to the containing function (this one). */
6181 emit_insns_before (seq, tail_recursion_reentry);
6182 }
6183
6184 /* If we are doing stack checking and this function makes calls,
6185 do a stack probe at the start of the function to ensure we have enough
6186 space for another stack frame. */
6187 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6188 {
6189 rtx insn, seq;
6190
6191 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6192 if (GET_CODE (insn) == CALL_INSN)
6193 {
6194 start_sequence ();
6195 probe_stack_range (STACK_CHECK_PROTECT,
6196 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6197 seq = get_insns ();
6198 end_sequence ();
6199 emit_insns_before (seq, tail_recursion_reentry);
6200 break;
6201 }
6202 }
6203
6204 /* Warn about unused parms if extra warnings were specified. */
6205 if (warn_unused && extra_warnings)
6206 {
6207 tree decl;
6208
6209 for (decl = DECL_ARGUMENTS (current_function_decl);
6210 decl; decl = TREE_CHAIN (decl))
6211 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6212 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6213 warning_with_decl (decl, "unused parameter `%s'");
6214 }
6215
6216 /* Delete handlers for nonlocal gotos if nothing uses them. */
6217 if (nonlocal_goto_handler_slots != 0
6218 && ! current_function_has_nonlocal_label)
6219 delete_handlers ();
6220
6221 /* End any sequences that failed to be closed due to syntax errors. */
6222 while (in_sequence_p ())
6223 end_sequence ();
6224
6225 /* Outside function body, can't compute type's actual size
6226 until next function's body starts. */
6227 immediate_size_expand--;
6228
6229 /* If doing stupid register allocation,
6230 mark register parms as dying here. */
6231
6232 if (obey_regdecls)
6233 {
6234 rtx tem;
6235 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6236 use_variable (regno_reg_rtx[i]);
6237
6238 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6239
6240 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6241 {
6242 use_variable (XEXP (tem, 0));
6243 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6244 }
6245
6246 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6247 use_variable (current_function_internal_arg_pointer);
6248 }
6249
6250 clear_pending_stack_adjust ();
6251 do_pending_stack_adjust ();
6252
6253 /* Mark the end of the function body.
6254 If control reaches this insn, the function can drop through
6255 without returning a value. */
6256 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6257
6258 /* Must mark the last line number note in the function, so that the test
6259 coverage code can avoid counting the last line twice. This just tells
6260 the code to ignore the immediately following line note, since there
6261 already exists a copy of this note somewhere above. This line number
6262 note is still needed for debugging though, so we can't delete it. */
6263 if (flag_test_coverage)
6264 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6265
6266 /* Output a linenumber for the end of the function.
6267 SDB depends on this. */
6268 emit_line_note_force (filename, line);
6269
6270 /* Output the label for the actual return from the function,
6271 if one is expected. This happens either because a function epilogue
6272 is used instead of a return instruction, or because a return was done
6273 with a goto in order to run local cleanups, or because of pcc-style
6274 structure returning. */
6275
6276 if (return_label)
6277 emit_label (return_label);
6278
6279 /* C++ uses this. */
6280 if (end_bindings)
6281 expand_end_bindings (0, 0, 0);
6282
6283 /* Now handle any leftover exception regions that may have been
6284 created for the parameters. */
6285 {
6286 rtx last = get_last_insn ();
6287 rtx label;
6288
6289 expand_leftover_cleanups ();
6290
6291 /* If the above emitted any code, may sure we jump around it. */
6292 if (last != get_last_insn ())
6293 {
6294 label = gen_label_rtx ();
6295 last = emit_jump_insn_after (gen_jump (label), last);
6296 last = emit_barrier_after (last);
6297 emit_label (label);
6298 }
6299 }
6300
6301 if (current_function_instrument_entry_exit)
6302 {
6303 rtx fun = DECL_RTL (current_function_decl);
6304 if (GET_CODE (fun) == MEM)
6305 fun = XEXP (fun, 0);
6306 else
6307 abort ();
6308 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6309 fun, Pmode,
6310 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6311 0,
6312 hard_frame_pointer_rtx),
6313 Pmode);
6314 }
6315
6316 /* If we had calls to alloca, and this machine needs
6317 an accurate stack pointer to exit the function,
6318 insert some code to save and restore the stack pointer. */
6319 #ifdef EXIT_IGNORE_STACK
6320 if (! EXIT_IGNORE_STACK)
6321 #endif
6322 if (current_function_calls_alloca)
6323 {
6324 rtx tem = 0;
6325
6326 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6327 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6328 }
6329
6330 /* If scalar return value was computed in a pseudo-reg,
6331 copy that to the hard return register. */
6332 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6333 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6334 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6335 >= FIRST_PSEUDO_REGISTER))
6336 {
6337 rtx real_decl_result;
6338
6339 #ifdef FUNCTION_OUTGOING_VALUE
6340 real_decl_result
6341 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6342 current_function_decl);
6343 #else
6344 real_decl_result
6345 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6346 current_function_decl);
6347 #endif
6348 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6349 /* If this is a BLKmode structure being returned in registers, then use
6350 the mode computed in expand_return. */
6351 if (GET_MODE (real_decl_result) == BLKmode)
6352 PUT_MODE (real_decl_result,
6353 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6354 emit_move_insn (real_decl_result,
6355 DECL_RTL (DECL_RESULT (current_function_decl)));
6356 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6357
6358 /* The delay slot scheduler assumes that current_function_return_rtx
6359 holds the hard register containing the return value, not a temporary
6360 pseudo. */
6361 current_function_return_rtx = real_decl_result;
6362 }
6363
6364 /* If returning a structure, arrange to return the address of the value
6365 in a place where debuggers expect to find it.
6366
6367 If returning a structure PCC style,
6368 the caller also depends on this value.
6369 And current_function_returns_pcc_struct is not necessarily set. */
6370 if (current_function_returns_struct
6371 || current_function_returns_pcc_struct)
6372 {
6373 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6374 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6375 #ifdef FUNCTION_OUTGOING_VALUE
6376 rtx outgoing
6377 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6378 current_function_decl);
6379 #else
6380 rtx outgoing
6381 = FUNCTION_VALUE (build_pointer_type (type),
6382 current_function_decl);
6383 #endif
6384
6385 /* Mark this as a function return value so integrate will delete the
6386 assignment and USE below when inlining this function. */
6387 REG_FUNCTION_VALUE_P (outgoing) = 1;
6388
6389 emit_move_insn (outgoing, value_address);
6390 use_variable (outgoing);
6391 }
6392
6393 /* If this is an implementation of __throw, do what's necessary to
6394 communicate between __builtin_eh_return and the epilogue. */
6395 expand_eh_return ();
6396
6397 /* Output a return insn if we are using one.
6398 Otherwise, let the rtl chain end here, to drop through
6399 into the epilogue. */
6400
6401 #ifdef HAVE_return
6402 if (HAVE_return)
6403 {
6404 emit_jump_insn (gen_return ());
6405 emit_barrier ();
6406 }
6407 #endif
6408
6409 /* Fix up any gotos that jumped out to the outermost
6410 binding level of the function.
6411 Must follow emitting RETURN_LABEL. */
6412
6413 /* If you have any cleanups to do at this point,
6414 and they need to create temporary variables,
6415 then you will lose. */
6416 expand_fixups (get_insns ());
6417 }
6418 \f
6419 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6420 or a single insn). */
6421
6422 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6423 static int *
6424 record_insns (insns)
6425 rtx insns;
6426 {
6427 int *vec;
6428
6429 if (GET_CODE (insns) == SEQUENCE)
6430 {
6431 int len = XVECLEN (insns, 0);
6432 vec = (int *) oballoc ((len + 1) * sizeof (int));
6433 vec[len] = 0;
6434 while (--len >= 0)
6435 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6436 }
6437 else
6438 {
6439 vec = (int *) oballoc (2 * sizeof (int));
6440 vec[0] = INSN_UID (insns);
6441 vec[1] = 0;
6442 }
6443 return vec;
6444 }
6445
6446 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6447
6448 static int
6449 contains (insn, vec)
6450 rtx insn;
6451 int *vec;
6452 {
6453 register int i, j;
6454
6455 if (GET_CODE (insn) == INSN
6456 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6457 {
6458 int count = 0;
6459 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6460 for (j = 0; vec[j]; j++)
6461 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6462 count++;
6463 return count;
6464 }
6465 else
6466 {
6467 for (j = 0; vec[j]; j++)
6468 if (INSN_UID (insn) == vec[j])
6469 return 1;
6470 }
6471 return 0;
6472 }
6473
6474 int
6475 prologue_epilogue_contains (insn)
6476 rtx insn;
6477 {
6478 if (prologue && contains (insn, prologue))
6479 return 1;
6480 if (epilogue && contains (insn, epilogue))
6481 return 1;
6482 return 0;
6483 }
6484 #endif /* HAVE_prologue || HAVE_epilogue */
6485
6486 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6487 this into place with notes indicating where the prologue ends and where
6488 the epilogue begins. Update the basic block information when possible. */
6489
6490 void
6491 thread_prologue_and_epilogue_insns (f)
6492 rtx f ATTRIBUTE_UNUSED;
6493 {
6494 int insertted = 0;
6495
6496 #ifdef HAVE_prologue
6497 if (HAVE_prologue)
6498 {
6499 rtx seq;
6500
6501 start_sequence ();
6502 seq = gen_prologue();
6503 emit_insn (seq);
6504
6505 /* Retain a map of the prologue insns. */
6506 if (GET_CODE (seq) != SEQUENCE)
6507 seq = get_insns ();
6508 prologue = record_insns (seq);
6509
6510 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6511 seq = gen_sequence ();
6512 end_sequence ();
6513
6514 /* If optimization is off, and perhaps in an empty function,
6515 the entry block will have no successors. */
6516 if (ENTRY_BLOCK_PTR->succ)
6517 {
6518 /* Can't deal with multiple successsors of the entry block. */
6519 if (ENTRY_BLOCK_PTR->succ->succ_next)
6520 abort ();
6521
6522 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6523 insertted = 1;
6524 }
6525 else
6526 emit_insn_after (seq, f);
6527 }
6528 #endif
6529
6530 #ifdef HAVE_epilogue
6531 if (HAVE_epilogue)
6532 {
6533 edge e;
6534 basic_block bb = 0;
6535 rtx tail = get_last_insn ();
6536
6537 /* ??? This is gastly. If function returns were not done via uses,
6538 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6539 and all of this uglyness would go away. */
6540
6541 switch (optimize)
6542 {
6543 default:
6544 /* If the exit block has no non-fake predecessors, we don't
6545 need an epilogue. Furthermore, only pay attention to the
6546 fallthru predecessors; if (conditional) return insns were
6547 generated, by definition we do not need to emit epilogue
6548 insns. */
6549
6550 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6551 if ((e->flags & EDGE_FAKE) == 0
6552 && (e->flags & EDGE_FALLTHRU) != 0)
6553 break;
6554 if (e == NULL)
6555 break;
6556
6557 /* We can't handle multiple epilogues -- if one is needed,
6558 we won't be able to place it multiple times.
6559
6560 ??? Fix epilogue expanders to not assume they are the
6561 last thing done compiling the function. Either that
6562 or copy_rtx each insn.
6563
6564 ??? Blah, it's not a simple expression to assert that
6565 we've exactly one fallthru exit edge. */
6566
6567 bb = e->src;
6568 tail = bb->end;
6569
6570 /* ??? If the last insn of the basic block is a jump, then we
6571 are creating a new basic block. Wimp out and leave these
6572 insns outside any block. */
6573 if (GET_CODE (tail) == JUMP_INSN)
6574 bb = 0;
6575
6576 /* FALLTHRU */
6577 case 0:
6578 {
6579 rtx prev, seq, first_use;
6580
6581 /* Move the USE insns at the end of a function onto a list. */
6582 prev = tail;
6583 if (GET_CODE (prev) == BARRIER
6584 || GET_CODE (prev) == NOTE)
6585 prev = prev_nonnote_insn (prev);
6586
6587 first_use = 0;
6588 if (prev
6589 && GET_CODE (prev) == INSN
6590 && GET_CODE (PATTERN (prev)) == USE)
6591 {
6592 /* If the end of the block is the use, grab hold of something
6593 else so that we emit barriers etc in the right place. */
6594 if (prev == tail)
6595 {
6596 do
6597 tail = PREV_INSN (tail);
6598 while (GET_CODE (tail) == INSN
6599 && GET_CODE (PATTERN (tail)) == USE);
6600 }
6601
6602 do
6603 {
6604 rtx use = prev;
6605 prev = prev_nonnote_insn (prev);
6606
6607 remove_insn (use);
6608 if (first_use)
6609 {
6610 NEXT_INSN (use) = first_use;
6611 PREV_INSN (first_use) = use;
6612 }
6613 else
6614 NEXT_INSN (use) = NULL_RTX;
6615 first_use = use;
6616 }
6617 while (prev
6618 && GET_CODE (prev) == INSN
6619 && GET_CODE (PATTERN (prev)) == USE);
6620 }
6621
6622 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6623 epilogue insns, the USE insns at the end of a function,
6624 the jump insn that returns, and then a BARRIER. */
6625
6626 if (GET_CODE (tail) != BARRIER)
6627 {
6628 prev = next_nonnote_insn (tail);
6629 if (!prev || GET_CODE (prev) != BARRIER)
6630 emit_barrier_after (tail);
6631 }
6632
6633 seq = gen_epilogue ();
6634 prev = tail;
6635 tail = emit_jump_insn_after (seq, tail);
6636
6637 /* Insert the USE insns immediately before the return insn, which
6638 must be the last instruction emitted in the sequence. */
6639 if (first_use)
6640 emit_insns_before (first_use, tail);
6641 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6642
6643 /* Update the tail of the basic block. */
6644 if (bb)
6645 bb->end = tail;
6646
6647 /* Retain a map of the epilogue insns. */
6648 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6649 }
6650 }
6651 }
6652 #endif
6653
6654 if (insertted)
6655 commit_edge_insertions ();
6656 }
6657
6658 /* Reposition the prologue-end and epilogue-begin notes after instruction
6659 scheduling and delayed branch scheduling. */
6660
6661 void
6662 reposition_prologue_and_epilogue_notes (f)
6663 rtx f ATTRIBUTE_UNUSED;
6664 {
6665 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6666 /* Reposition the prologue and epilogue notes. */
6667 if (n_basic_blocks)
6668 {
6669 int len;
6670
6671 if (prologue)
6672 {
6673 register rtx insn, note = 0;
6674
6675 /* Scan from the beginning until we reach the last prologue insn.
6676 We apparently can't depend on basic_block_{head,end} after
6677 reorg has run. */
6678 for (len = 0; prologue[len]; len++)
6679 ;
6680 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6681 {
6682 if (GET_CODE (insn) == NOTE)
6683 {
6684 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6685 note = insn;
6686 }
6687 else if ((len -= contains (insn, prologue)) == 0)
6688 {
6689 rtx next;
6690 /* Find the prologue-end note if we haven't already, and
6691 move it to just after the last prologue insn. */
6692 if (note == 0)
6693 {
6694 for (note = insn; (note = NEXT_INSN (note));)
6695 if (GET_CODE (note) == NOTE
6696 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6697 break;
6698 }
6699
6700 next = NEXT_INSN (note);
6701
6702 /* Whether or not we can depend on BLOCK_HEAD,
6703 attempt to keep it up-to-date. */
6704 if (BLOCK_HEAD (0) == note)
6705 BLOCK_HEAD (0) = next;
6706
6707 remove_insn (note);
6708 add_insn_after (note, insn);
6709 }
6710 }
6711 }
6712
6713 if (epilogue)
6714 {
6715 register rtx insn, note = 0;
6716
6717 /* Scan from the end until we reach the first epilogue insn.
6718 We apparently can't depend on basic_block_{head,end} after
6719 reorg has run. */
6720 for (len = 0; epilogue[len]; len++)
6721 ;
6722 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6723 {
6724 if (GET_CODE (insn) == NOTE)
6725 {
6726 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6727 note = insn;
6728 }
6729 else if ((len -= contains (insn, epilogue)) == 0)
6730 {
6731 /* Find the epilogue-begin note if we haven't already, and
6732 move it to just before the first epilogue insn. */
6733 if (note == 0)
6734 {
6735 for (note = insn; (note = PREV_INSN (note));)
6736 if (GET_CODE (note) == NOTE
6737 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6738 break;
6739 }
6740
6741 /* Whether or not we can depend on BLOCK_HEAD,
6742 attempt to keep it up-to-date. */
6743 if (n_basic_blocks
6744 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6745 BLOCK_HEAD (n_basic_blocks-1) = note;
6746
6747 remove_insn (note);
6748 add_insn_before (note, insn);
6749 }
6750 }
6751 }
6752 }
6753 #endif /* HAVE_prologue or HAVE_epilogue */
6754 }
6755
6756 /* Mark T for GC. */
6757
6758 static void
6759 mark_temp_slot (t)
6760 struct temp_slot *t;
6761 {
6762 while (t)
6763 {
6764 ggc_mark_rtx (t->slot);
6765 ggc_mark_rtx (t->address);
6766 ggc_mark_tree (t->rtl_expr);
6767
6768 t = t->next;
6769 }
6770 }
6771
6772 /* Mark P for GC. */
6773
6774 static void
6775 mark_function_status (p)
6776 struct function *p;
6777 {
6778 int i;
6779 rtx *r;
6780
6781 if (p == 0)
6782 return;
6783
6784 ggc_mark_rtx (p->arg_offset_rtx);
6785
6786 if (p->x_parm_reg_stack_loc)
6787 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6788 i > 0; --i, ++r)
6789 ggc_mark_rtx (*r);
6790
6791 ggc_mark_rtx (p->return_rtx);
6792 ggc_mark_rtx (p->x_cleanup_label);
6793 ggc_mark_rtx (p->x_return_label);
6794 ggc_mark_rtx (p->x_save_expr_regs);
6795 ggc_mark_rtx (p->x_stack_slot_list);
6796 ggc_mark_rtx (p->x_parm_birth_insn);
6797 ggc_mark_rtx (p->x_tail_recursion_label);
6798 ggc_mark_rtx (p->x_tail_recursion_reentry);
6799 ggc_mark_rtx (p->internal_arg_pointer);
6800 ggc_mark_rtx (p->x_arg_pointer_save_area);
6801 ggc_mark_tree (p->x_rtl_expr_chain);
6802 ggc_mark_rtx (p->x_last_parm_insn);
6803 ggc_mark_tree (p->x_context_display);
6804 ggc_mark_tree (p->x_trampoline_list);
6805 ggc_mark_rtx (p->epilogue_delay_list);
6806
6807 mark_temp_slot (p->x_temp_slots);
6808
6809 {
6810 struct var_refs_queue *q = p->fixup_var_refs_queue;
6811 while (q)
6812 {
6813 ggc_mark_rtx (q->modified);
6814 q = q->next;
6815 }
6816 }
6817
6818 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6819 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6820 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6821 ggc_mark_tree (p->x_nonlocal_labels);
6822 }
6823
6824 /* Mark the function chain ARG (which is really a struct function **)
6825 for GC. */
6826
6827 static void
6828 mark_function_chain (arg)
6829 void *arg;
6830 {
6831 struct function *f = *(struct function **) arg;
6832
6833 for (; f; f = f->next_global)
6834 {
6835 ggc_mark_tree (f->decl);
6836
6837 mark_function_status (f);
6838 mark_eh_status (f->eh);
6839 mark_stmt_status (f->stmt);
6840 mark_expr_status (f->expr);
6841 mark_emit_status (f->emit);
6842 mark_varasm_status (f->varasm);
6843
6844 if (mark_machine_status)
6845 (*mark_machine_status) (f);
6846 if (mark_lang_status)
6847 (*mark_lang_status) (f);
6848
6849 if (f->original_arg_vector)
6850 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6851 if (f->original_decl_initial)
6852 ggc_mark_tree (f->original_decl_initial);
6853 }
6854 }
6855
6856 /* Called once, at initialization, to initialize function.c. */
6857
6858 void
6859 init_function_once ()
6860 {
6861 ggc_add_root (&all_functions, 1, sizeof all_functions,
6862 mark_function_chain);
6863 }
This page took 0.344637 seconds and 5 git commands to generate.