1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register.
37 Call `put_var_into_stack' when you learn, belatedly, that a variable
38 previously given a pseudo-register must in fact go in the stack.
39 This function changes the DECL_RTL to be a stack slot instead of a reg
40 then scans all the RTL instructions so far generated to correct them. */
49 #include "insn-flags.h"
51 #include "insn-codes.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
57 #include "basic-block.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
73 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
76 /* Some systems use __main in a way incompatible with its use in gcc, in these
77 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
78 give the same symbol without quotes for an alternative entry point. You
79 must define both, or neither. */
81 #define NAME__MAIN "__main"
82 #define SYMBOL__MAIN __main
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
108 int current_function_is_leaf
;
110 /* Nonzero if function being compiled doesn't contain any instructions
111 that can throw an exception. This is set prior to final. */
113 int current_function_nothrow
;
115 /* Nonzero if function being compiled doesn't modify the stack pointer
116 (ignoring the prologue and epilogue). This is only valid after
117 life_analysis has run. */
118 int current_function_sp_is_unchanging
;
120 /* Nonzero if the function being compiled is a leaf function which only
121 uses leaf registers. This is valid after reload (specifically after
122 sched2) and is useful only if the port defines LEAF_REGISTERS. */
123 int current_function_uses_only_leaf_regs
;
125 /* Nonzero once virtual register instantiation has been done.
126 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
127 static int virtuals_instantiated
;
129 /* These variables hold pointers to functions to
130 save and restore machine-specific data,
131 in push_function_context and pop_function_context. */
132 void (*init_machine_status
) PARAMS ((struct function
*));
133 void (*save_machine_status
) PARAMS ((struct function
*));
134 void (*restore_machine_status
) PARAMS ((struct function
*));
135 void (*mark_machine_status
) PARAMS ((struct function
*));
136 void (*free_machine_status
) PARAMS ((struct function
*));
138 /* Likewise, but for language-specific data. */
139 void (*init_lang_status
) PARAMS ((struct function
*));
140 void (*save_lang_status
) PARAMS ((struct function
*));
141 void (*restore_lang_status
) PARAMS ((struct function
*));
142 void (*mark_lang_status
) PARAMS ((struct function
*));
143 void (*free_lang_status
) PARAMS ((struct function
*));
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl
;
148 /* The currently compiled function. */
149 struct function
*cfun
= 0;
151 /* Global list of all compiled functions. */
152 struct function
*all_functions
= 0;
154 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
155 static varray_type prologue
;
156 static varray_type epilogue
;
158 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
160 static varray_type sibcall_epilogue
;
162 /* In order to evaluate some expressions, such as function calls returning
163 structures in memory, we need to temporarily allocate stack locations.
164 We record each allocated temporary in the following structure.
166 Associated with each temporary slot is a nesting level. When we pop up
167 one level, all temporaries associated with the previous level are freed.
168 Normally, all temporaries are freed after the execution of the statement
169 in which they were created. However, if we are inside a ({...}) grouping,
170 the result may be in a temporary and hence must be preserved. If the
171 result could be in a temporary, we preserve it if we can determine which
172 one it is in. If we cannot determine which temporary may contain the
173 result, all temporaries are preserved. A temporary is preserved by
174 pretending it was allocated at the previous nesting level.
176 Automatic variables are also assigned temporary slots, at the nesting
177 level where they are defined. They are marked a "kept" so that
178 free_temp_slots will not free them. */
182 /* Points to next temporary slot. */
183 struct temp_slot
*next
;
184 /* The rtx to used to reference the slot. */
186 /* The rtx used to represent the address if not the address of the
187 slot above. May be an EXPR_LIST if multiple addresses exist. */
189 /* The alignment (in bits) of the slot. */
191 /* The size, in units, of the slot. */
193 /* The alias set for the slot. If the alias set is zero, we don't
194 know anything about the alias set of the slot. We must only
195 reuse a slot if it is assigned an object of the same alias set.
196 Otherwise, the rest of the compiler may assume that the new use
197 of the slot cannot alias the old use of the slot, which is
198 false. If the slot has alias set zero, then we can't reuse the
199 slot at all, since we have no idea what alias set may have been
200 imposed on the memory. For example, if the stack slot is the
201 call frame for an inline functioned, we have no idea what alias
202 sets will be assigned to various pieces of the call frame. */
204 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
206 /* Non-zero if this temporary is currently in use. */
208 /* Non-zero if this temporary has its address taken. */
210 /* Nesting level at which this slot is being used. */
212 /* Non-zero if this should survive a call to free_temp_slots. */
214 /* The offset of the slot from the frame_pointer, including extra space
215 for alignment. This info is for combine_temp_slots. */
216 HOST_WIDE_INT base_offset
;
217 /* The size of the slot, including extra space for alignment. This
218 info is for combine_temp_slots. */
219 HOST_WIDE_INT full_size
;
222 /* This structure is used to record MEMs or pseudos used to replace VAR, any
223 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
224 maintain this list in case two operands of an insn were required to match;
225 in that case we must ensure we use the same replacement. */
227 struct fixup_replacement
231 struct fixup_replacement
*next
;
234 struct insns_for_mem_entry
{
235 /* The KEY in HE will be a MEM. */
236 struct hash_entry he
;
237 /* These are the INSNS which reference the MEM. */
241 /* Forward declarations. */
243 static rtx assign_stack_local_1
PARAMS ((enum machine_mode
, HOST_WIDE_INT
,
244 int, struct function
*));
245 static rtx assign_stack_temp_for_type
PARAMS ((enum machine_mode
,
246 HOST_WIDE_INT
, int, tree
));
247 static struct temp_slot
*find_temp_slot_from_address
PARAMS ((rtx
));
248 static void put_reg_into_stack
PARAMS ((struct function
*, rtx
, tree
,
249 enum machine_mode
, enum machine_mode
,
250 int, int, int, struct hash_table
*));
251 static void fixup_var_refs
PARAMS ((rtx
, enum machine_mode
, int,
252 struct hash_table
*));
253 static struct fixup_replacement
254 *find_fixup_replacement
PARAMS ((struct fixup_replacement
**, rtx
));
255 static void fixup_var_refs_insns
PARAMS ((rtx
, enum machine_mode
, int,
256 rtx
, int, struct hash_table
*));
257 static void fixup_var_refs_1
PARAMS ((rtx
, enum machine_mode
, rtx
*, rtx
,
258 struct fixup_replacement
**));
259 static rtx fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
260 static rtx walk_fixup_memory_subreg
PARAMS ((rtx
, rtx
, int));
261 static rtx fixup_stack_1
PARAMS ((rtx
, rtx
));
262 static void optimize_bit_field
PARAMS ((rtx
, rtx
, rtx
*));
263 static void instantiate_decls
PARAMS ((tree
, int));
264 static void instantiate_decls_1
PARAMS ((tree
, int));
265 static void instantiate_decl
PARAMS ((rtx
, int, int));
266 static int instantiate_virtual_regs_1
PARAMS ((rtx
*, rtx
, int));
267 static void delete_handlers
PARAMS ((void));
268 static void pad_to_arg_alignment
PARAMS ((struct args_size
*, int,
269 struct args_size
*));
270 #ifndef ARGS_GROW_DOWNWARD
271 static void pad_below
PARAMS ((struct args_size
*, enum machine_mode
,
274 #ifdef ARGS_GROW_DOWNWARD
275 static tree round_down
PARAMS ((tree
, int));
277 static rtx round_trampoline_addr
PARAMS ((rtx
));
278 static tree
*identify_blocks_1
PARAMS ((rtx
, tree
*, tree
*, tree
*));
279 static void reorder_blocks_1
PARAMS ((rtx
, tree
, varray_type
*));
280 static tree blocks_nreverse
PARAMS ((tree
));
281 static int all_blocks
PARAMS ((tree
, tree
*));
282 static tree
*get_block_vector
PARAMS ((tree
, int *));
283 /* We always define `record_insns' even if its not used so that we
284 can always export `prologue_epilogue_contains'. */
285 static void record_insns
PARAMS ((rtx
, varray_type
*)) ATTRIBUTE_UNUSED
;
286 static int contains
PARAMS ((rtx
, varray_type
));
288 static void emit_return_into_block
PARAMS ((basic_block
));
290 static void put_addressof_into_stack
PARAMS ((rtx
, struct hash_table
*));
291 static boolean purge_addressof_1
PARAMS ((rtx
*, rtx
, int, int,
292 struct hash_table
*));
293 static int is_addressof
PARAMS ((rtx
*, void *));
294 static struct hash_entry
*insns_for_mem_newfunc
PARAMS ((struct hash_entry
*,
297 static unsigned long insns_for_mem_hash
PARAMS ((hash_table_key
));
298 static boolean insns_for_mem_comp
PARAMS ((hash_table_key
, hash_table_key
));
299 static int insns_for_mem_walk
PARAMS ((rtx
*, void *));
300 static void compute_insns_for_mem
PARAMS ((rtx
, rtx
, struct hash_table
*));
301 static void mark_temp_slot
PARAMS ((struct temp_slot
*));
302 static void mark_function_status
PARAMS ((struct function
*));
303 static void mark_function_chain
PARAMS ((void *));
304 static void prepare_function_start
PARAMS ((void));
305 static void do_clobber_return_reg
PARAMS ((rtx
, void *));
306 static void do_use_return_reg
PARAMS ((rtx
, void *));
308 /* Pointer to chain of `struct function' for containing functions. */
309 struct function
*outer_function_chain
;
311 /* Given a function decl for a containing function,
312 return the `struct function' for it. */
315 find_function_data (decl
)
320 for (p
= outer_function_chain
; p
; p
= p
->next
)
327 /* Save the current context for compilation of a nested function.
328 This is called from language-specific code. The caller should use
329 the save_lang_status callback to save any language-specific state,
330 since this function knows only about language-independent
334 push_function_context_to (context
)
337 struct function
*p
, *context_data
;
341 context_data
= (context
== current_function_decl
343 : find_function_data (context
));
344 context_data
->contains_functions
= 1;
348 init_dummy_function_start ();
351 p
->next
= outer_function_chain
;
352 outer_function_chain
= p
;
353 p
->fixup_var_refs_queue
= 0;
355 save_tree_status (p
);
356 if (save_lang_status
)
357 (*save_lang_status
) (p
);
358 if (save_machine_status
)
359 (*save_machine_status
) (p
);
365 push_function_context ()
367 push_function_context_to (current_function_decl
);
370 /* Restore the last saved context, at the end of a nested function.
371 This function is called from language-specific code. */
374 pop_function_context_from (context
)
375 tree context ATTRIBUTE_UNUSED
;
377 struct function
*p
= outer_function_chain
;
378 struct var_refs_queue
*queue
;
379 struct var_refs_queue
*next
;
382 outer_function_chain
= p
->next
;
384 current_function_decl
= p
->decl
;
387 restore_tree_status (p
);
388 restore_emit_status (p
);
390 if (restore_machine_status
)
391 (*restore_machine_status
) (p
);
392 if (restore_lang_status
)
393 (*restore_lang_status
) (p
);
395 /* Finish doing put_var_into_stack for any of our variables
396 which became addressable during the nested function. */
397 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= next
)
400 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
401 queue
->unsignedp
, 0);
404 p
->fixup_var_refs_queue
= 0;
406 /* Reset variables that have known state during rtx generation. */
407 rtx_equal_function_value_matters
= 1;
408 virtuals_instantiated
= 0;
412 pop_function_context ()
414 pop_function_context_from (current_function_decl
);
417 /* Clear out all parts of the state in F that can safely be discarded
418 after the function has been parsed, but not compiled, to let
419 garbage collection reclaim the memory. */
422 free_after_parsing (f
)
425 /* f->expr->forced_labels is used by code generation. */
426 /* f->emit->regno_reg_rtx is used by code generation. */
427 /* f->varasm is used by code generation. */
428 /* f->eh->eh_return_stub_label is used by code generation. */
430 if (free_lang_status
)
431 (*free_lang_status
) (f
);
432 free_stmt_status (f
);
435 /* Clear out all parts of the state in F that can safely be discarded
436 after the function has been compiled, to let garbage collection
437 reclaim the memory. */
440 free_after_compilation (f
)
444 free_expr_status (f
);
445 free_emit_status (f
);
446 free_varasm_status (f
);
448 if (free_machine_status
)
449 (*free_machine_status
) (f
);
451 if (f
->x_parm_reg_stack_loc
)
452 free (f
->x_parm_reg_stack_loc
);
454 f
->arg_offset_rtx
= NULL
;
455 f
->return_rtx
= NULL
;
456 f
->internal_arg_pointer
= NULL
;
457 f
->x_nonlocal_labels
= NULL
;
458 f
->x_nonlocal_goto_handler_slots
= NULL
;
459 f
->x_nonlocal_goto_handler_labels
= NULL
;
460 f
->x_nonlocal_goto_stack_level
= NULL
;
461 f
->x_cleanup_label
= NULL
;
462 f
->x_return_label
= NULL
;
463 f
->x_save_expr_regs
= NULL
;
464 f
->x_stack_slot_list
= NULL
;
465 f
->x_rtl_expr_chain
= NULL
;
466 f
->x_tail_recursion_label
= NULL
;
467 f
->x_tail_recursion_reentry
= NULL
;
468 f
->x_arg_pointer_save_area
= NULL
;
469 f
->x_context_display
= NULL
;
470 f
->x_trampoline_list
= NULL
;
471 f
->x_parm_birth_insn
= NULL
;
472 f
->x_last_parm_insn
= NULL
;
473 f
->x_parm_reg_stack_loc
= NULL
;
474 f
->x_temp_slots
= NULL
;
475 f
->fixup_var_refs_queue
= NULL
;
476 f
->original_arg_vector
= NULL
;
477 f
->original_decl_initial
= NULL
;
478 f
->inl_last_parm_insn
= NULL
;
479 f
->epilogue_delay_list
= NULL
;
483 /* Allocate fixed slots in the stack frame of the current function. */
485 /* Return size needed for stack frame based on slots so far allocated in
487 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
488 the caller may have to do that. */
491 get_func_frame_size (f
)
494 #ifdef FRAME_GROWS_DOWNWARD
495 return -f
->x_frame_offset
;
497 return f
->x_frame_offset
;
501 /* Return size needed for stack frame based on slots so far allocated.
502 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
503 the caller may have to do that. */
507 return get_func_frame_size (cfun
);
510 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
511 with machine mode MODE.
513 ALIGN controls the amount of alignment for the address of the slot:
514 0 means according to MODE,
515 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
516 positive specifies alignment boundary in bits.
518 We do not round to stack_boundary here.
520 FUNCTION specifies the function to allocate in. */
523 assign_stack_local_1 (mode
, size
, align
, function
)
524 enum machine_mode mode
;
527 struct function
*function
;
529 register rtx x
, addr
;
530 int bigend_correction
= 0;
533 /* Allocate in the memory associated with the function in whose frame
535 if (function
!= cfun
)
536 push_obstacks (function
->function_obstack
,
537 function
->function_maybepermanent_obstack
);
543 alignment
= GET_MODE_ALIGNMENT (mode
);
545 alignment
= BIGGEST_ALIGNMENT
;
547 /* Allow the target to (possibly) increase the alignment of this
549 type
= type_for_mode (mode
, 0);
551 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
553 alignment
/= BITS_PER_UNIT
;
555 else if (align
== -1)
557 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
558 size
= CEIL_ROUND (size
, alignment
);
561 alignment
= align
/ BITS_PER_UNIT
;
563 #ifdef FRAME_GROWS_DOWNWARD
564 function
->x_frame_offset
-= size
;
567 /* Ignore alignment we can't do with expected alignment of the boundary. */
568 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
569 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
571 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
572 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
574 /* Round frame offset to that alignment.
575 We must be careful here, since FRAME_OFFSET might be negative and
576 division with a negative dividend isn't as well defined as we might
577 like. So we instead assume that ALIGNMENT is a power of two and
578 use logical operations which are unambiguous. */
579 #ifdef FRAME_GROWS_DOWNWARD
580 function
->x_frame_offset
= FLOOR_ROUND (function
->x_frame_offset
, alignment
);
582 function
->x_frame_offset
= CEIL_ROUND (function
->x_frame_offset
, alignment
);
585 /* On a big-endian machine, if we are allocating more space than we will use,
586 use the least significant bytes of those that are allocated. */
587 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
588 bigend_correction
= size
- GET_MODE_SIZE (mode
);
590 /* If we have already instantiated virtual registers, return the actual
591 address relative to the frame pointer. */
592 if (function
== cfun
&& virtuals_instantiated
)
593 addr
= plus_constant (frame_pointer_rtx
,
594 (frame_offset
+ bigend_correction
595 + STARTING_FRAME_OFFSET
));
597 addr
= plus_constant (virtual_stack_vars_rtx
,
598 function
->x_frame_offset
+ bigend_correction
);
600 #ifndef FRAME_GROWS_DOWNWARD
601 function
->x_frame_offset
+= size
;
604 x
= gen_rtx_MEM (mode
, addr
);
606 function
->x_stack_slot_list
607 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
609 if (function
!= cfun
)
615 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
618 assign_stack_local (mode
, size
, align
)
619 enum machine_mode mode
;
623 return assign_stack_local_1 (mode
, size
, align
, cfun
);
626 /* Allocate a temporary stack slot and record it for possible later
629 MODE is the machine mode to be given to the returned rtx.
631 SIZE is the size in units of the space required. We do no rounding here
632 since assign_stack_local will do any required rounding.
634 KEEP is 1 if this slot is to be retained after a call to
635 free_temp_slots. Automatic variables for a block are allocated
636 with this flag. KEEP is 2 if we allocate a longer term temporary,
637 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
638 if we are to allocate something at an inner level to be treated as
639 a variable in the block (e.g., a SAVE_EXPR).
641 TYPE is the type that will be used for the stack slot. */
644 assign_stack_temp_for_type (mode
, size
, keep
, type
)
645 enum machine_mode mode
;
652 struct temp_slot
*p
, *best_p
= 0;
654 /* If SIZE is -1 it means that somebody tried to allocate a temporary
655 of a variable size. */
659 /* If we know the alias set for the memory that will be used, use
660 it. If there's no TYPE, then we don't know anything about the
661 alias set for the memory. */
663 alias_set
= get_alias_set (type
);
667 align
= GET_MODE_ALIGNMENT (mode
);
669 align
= BIGGEST_ALIGNMENT
;
672 type
= type_for_mode (mode
, 0);
674 align
= LOCAL_ALIGNMENT (type
, align
);
676 /* Try to find an available, already-allocated temporary of the proper
677 mode which meets the size and alignment requirements. Choose the
678 smallest one with the closest alignment. */
679 for (p
= temp_slots
; p
; p
= p
->next
)
680 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
682 && (!flag_strict_aliasing
683 || (alias_set
&& p
->alias_set
== alias_set
))
684 && (best_p
== 0 || best_p
->size
> p
->size
685 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
687 if (p
->align
== align
&& p
->size
== size
)
695 /* Make our best, if any, the one to use. */
698 /* If there are enough aligned bytes left over, make them into a new
699 temp_slot so that the extra bytes don't get wasted. Do this only
700 for BLKmode slots, so that we can be sure of the alignment. */
701 if (GET_MODE (best_p
->slot
) == BLKmode
702 /* We can't split slots if -fstrict-aliasing because the
703 information about the alias set for the new slot will be
705 && !flag_strict_aliasing
)
707 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
708 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
710 if (best_p
->size
- rounded_size
>= alignment
)
712 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
713 p
->in_use
= p
->addr_taken
= 0;
714 p
->size
= best_p
->size
- rounded_size
;
715 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
716 p
->full_size
= best_p
->full_size
- rounded_size
;
717 p
->slot
= gen_rtx_MEM (BLKmode
,
718 plus_constant (XEXP (best_p
->slot
, 0),
720 p
->align
= best_p
->align
;
723 p
->next
= temp_slots
;
726 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
729 best_p
->size
= rounded_size
;
730 best_p
->full_size
= rounded_size
;
737 /* If we still didn't find one, make a new temporary. */
740 HOST_WIDE_INT frame_offset_old
= frame_offset
;
742 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
744 /* We are passing an explicit alignment request to assign_stack_local.
745 One side effect of that is assign_stack_local will not round SIZE
746 to ensure the frame offset remains suitably aligned.
748 So for requests which depended on the rounding of SIZE, we go ahead
749 and round it now. We also make sure ALIGNMENT is at least
750 BIGGEST_ALIGNMENT. */
751 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
753 p
->slot
= assign_stack_local (mode
,
755 ? CEIL_ROUND (size
, align
/ BITS_PER_UNIT
)
760 p
->alias_set
= alias_set
;
762 /* The following slot size computation is necessary because we don't
763 know the actual size of the temporary slot until assign_stack_local
764 has performed all the frame alignment and size rounding for the
765 requested temporary. Note that extra space added for alignment
766 can be either above or below this stack slot depending on which
767 way the frame grows. We include the extra space if and only if it
768 is above this slot. */
769 #ifdef FRAME_GROWS_DOWNWARD
770 p
->size
= frame_offset_old
- frame_offset
;
775 /* Now define the fields used by combine_temp_slots. */
776 #ifdef FRAME_GROWS_DOWNWARD
777 p
->base_offset
= frame_offset
;
778 p
->full_size
= frame_offset_old
- frame_offset
;
780 p
->base_offset
= frame_offset_old
;
781 p
->full_size
= frame_offset
- frame_offset_old
;
784 p
->next
= temp_slots
;
790 p
->rtl_expr
= seq_rtl_expr
;
794 p
->level
= target_temp_slot_level
;
799 p
->level
= var_temp_slot_level
;
804 p
->level
= temp_slot_level
;
808 /* We may be reusing an old slot, so clear any MEM flags that may have been
810 RTX_UNCHANGING_P (p
->slot
) = 0;
811 MEM_IN_STRUCT_P (p
->slot
) = 0;
812 MEM_SCALAR_P (p
->slot
) = 0;
813 MEM_ALIAS_SET (p
->slot
) = 0;
817 /* Allocate a temporary stack slot and record it for possible later
818 reuse. First three arguments are same as in preceding function. */
821 assign_stack_temp (mode
, size
, keep
)
822 enum machine_mode mode
;
826 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
829 /* Assign a temporary of given TYPE.
830 KEEP is as for assign_stack_temp.
831 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
832 it is 0 if a register is OK.
833 DONT_PROMOTE is 1 if we should not promote values in register
837 assign_temp (type
, keep
, memory_required
, dont_promote
)
841 int dont_promote ATTRIBUTE_UNUSED
;
843 enum machine_mode mode
= TYPE_MODE (type
);
844 #ifndef PROMOTE_FOR_CALL_ONLY
845 int unsignedp
= TREE_UNSIGNED (type
);
848 if (mode
== BLKmode
|| memory_required
)
850 HOST_WIDE_INT size
= int_size_in_bytes (type
);
853 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
854 problems with allocating the stack space. */
858 /* Unfortunately, we don't yet know how to allocate variable-sized
859 temporaries. However, sometimes we have a fixed upper limit on
860 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
861 instead. This is the case for Chill variable-sized strings. */
862 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
863 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
864 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
865 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
867 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
868 MEM_SET_IN_STRUCT_P (tmp
, AGGREGATE_TYPE_P (type
));
872 #ifndef PROMOTE_FOR_CALL_ONLY
874 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
877 return gen_reg_rtx (mode
);
880 /* Combine temporary stack slots which are adjacent on the stack.
882 This allows for better use of already allocated stack space. This is only
883 done for BLKmode slots because we can be sure that we won't have alignment
884 problems in this case. */
887 combine_temp_slots ()
889 struct temp_slot
*p
, *q
;
890 struct temp_slot
*prev_p
, *prev_q
;
893 /* We can't combine slots, because the information about which slot
894 is in which alias set will be lost. */
895 if (flag_strict_aliasing
)
898 /* If there are a lot of temp slots, don't do anything unless
899 high levels of optimizaton. */
900 if (! flag_expensive_optimizations
)
901 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
902 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
905 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
909 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
910 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
913 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
915 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
917 /* Q comes after P; combine Q into P. */
919 p
->full_size
+= q
->full_size
;
922 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
924 /* P comes after Q; combine P into Q. */
926 q
->full_size
+= p
->full_size
;
931 /* Either delete Q or advance past it. */
933 prev_q
->next
= q
->next
;
937 /* Either delete P or advance past it. */
941 prev_p
->next
= p
->next
;
943 temp_slots
= p
->next
;
950 /* Find the temp slot corresponding to the object at address X. */
952 static struct temp_slot
*
953 find_temp_slot_from_address (x
)
959 for (p
= temp_slots
; p
; p
= p
->next
)
964 else if (XEXP (p
->slot
, 0) == x
966 || (GET_CODE (x
) == PLUS
967 && XEXP (x
, 0) == virtual_stack_vars_rtx
968 && GET_CODE (XEXP (x
, 1)) == CONST_INT
969 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
970 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
973 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
974 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
975 if (XEXP (next
, 0) == x
)
979 /* If we have a sum involving a register, see if it points to a temp
981 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
982 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
984 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
985 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
991 /* Indicate that NEW is an alternate way of referring to the temp slot
992 that previously was known by OLD. */
995 update_temp_slot_address (old
, new)
1000 if (rtx_equal_p (old
, new))
1003 p
= find_temp_slot_from_address (old
);
1005 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1006 is a register, see if one operand of the PLUS is a temporary
1007 location. If so, NEW points into it. Otherwise, if both OLD and
1008 NEW are a PLUS and if there is a register in common between them.
1009 If so, try a recursive call on those values. */
1012 if (GET_CODE (old
) != PLUS
)
1015 if (GET_CODE (new) == REG
)
1017 update_temp_slot_address (XEXP (old
, 0), new);
1018 update_temp_slot_address (XEXP (old
, 1), new);
1021 else if (GET_CODE (new) != PLUS
)
1024 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1025 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1026 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1027 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1028 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1029 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1030 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1031 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1036 /* Otherwise add an alias for the temp's address. */
1037 else if (p
->address
== 0)
1041 if (GET_CODE (p
->address
) != EXPR_LIST
)
1042 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1044 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1048 /* If X could be a reference to a temporary slot, mark the fact that its
1049 address was taken. */
1052 mark_temp_addr_taken (x
)
1055 struct temp_slot
*p
;
1060 /* If X is not in memory or is at a constant address, it cannot be in
1061 a temporary slot. */
1062 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1065 p
= find_temp_slot_from_address (XEXP (x
, 0));
1070 /* If X could be a reference to a temporary slot, mark that slot as
1071 belonging to the to one level higher than the current level. If X
1072 matched one of our slots, just mark that one. Otherwise, we can't
1073 easily predict which it is, so upgrade all of them. Kept slots
1074 need not be touched.
1076 This is called when an ({...}) construct occurs and a statement
1077 returns a value in memory. */
1080 preserve_temp_slots (x
)
1083 struct temp_slot
*p
= 0;
1085 /* If there is no result, we still might have some objects whose address
1086 were taken, so we need to make sure they stay around. */
1089 for (p
= temp_slots
; p
; p
= p
->next
)
1090 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1096 /* If X is a register that is being used as a pointer, see if we have
1097 a temporary slot we know it points to. To be consistent with
1098 the code below, we really should preserve all non-kept slots
1099 if we can't find a match, but that seems to be much too costly. */
1100 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1101 p
= find_temp_slot_from_address (x
);
1103 /* If X is not in memory or is at a constant address, it cannot be in
1104 a temporary slot, but it can contain something whose address was
1106 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1108 for (p
= temp_slots
; p
; p
= p
->next
)
1109 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1115 /* First see if we can find a match. */
1117 p
= find_temp_slot_from_address (XEXP (x
, 0));
1121 /* Move everything at our level whose address was taken to our new
1122 level in case we used its address. */
1123 struct temp_slot
*q
;
1125 if (p
->level
== temp_slot_level
)
1127 for (q
= temp_slots
; q
; q
= q
->next
)
1128 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1137 /* Otherwise, preserve all non-kept slots at this level. */
1138 for (p
= temp_slots
; p
; p
= p
->next
)
1139 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1143 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1144 with that RTL_EXPR, promote it into a temporary slot at the present
1145 level so it will not be freed when we free slots made in the
1149 preserve_rtl_expr_result (x
)
1152 struct temp_slot
*p
;
1154 /* If X is not in memory or is at a constant address, it cannot be in
1155 a temporary slot. */
1156 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1159 /* If we can find a match, move it to our level unless it is already at
1161 p
= find_temp_slot_from_address (XEXP (x
, 0));
1164 p
->level
= MIN (p
->level
, temp_slot_level
);
1171 /* Free all temporaries used so far. This is normally called at the end
1172 of generating code for a statement. Don't free any temporaries
1173 currently in use for an RTL_EXPR that hasn't yet been emitted.
1174 We could eventually do better than this since it can be reused while
1175 generating the same RTL_EXPR, but this is complex and probably not
1181 struct temp_slot
*p
;
1183 for (p
= temp_slots
; p
; p
= p
->next
)
1184 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1185 && p
->rtl_expr
== 0)
1188 combine_temp_slots ();
1191 /* Free all temporary slots used in T, an RTL_EXPR node. */
1194 free_temps_for_rtl_expr (t
)
1197 struct temp_slot
*p
;
1199 for (p
= temp_slots
; p
; p
= p
->next
)
1200 if (p
->rtl_expr
== t
)
1202 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1203 needs to be preserved. This can happen if a temporary in
1204 the RTL_EXPR was addressed; preserve_temp_slots will move
1205 the temporary into a higher level. */
1206 if (temp_slot_level
<= p
->level
)
1209 p
->rtl_expr
= NULL_TREE
;
1212 combine_temp_slots ();
1215 /* Mark all temporaries ever allocated in this function as not suitable
1216 for reuse until the current level is exited. */
1219 mark_all_temps_used ()
1221 struct temp_slot
*p
;
1223 for (p
= temp_slots
; p
; p
= p
->next
)
1225 p
->in_use
= p
->keep
= 1;
1226 p
->level
= MIN (p
->level
, temp_slot_level
);
1230 /* Push deeper into the nesting level for stack temporaries. */
1238 /* Likewise, but save the new level as the place to allocate variables
1243 push_temp_slots_for_block ()
1247 var_temp_slot_level
= temp_slot_level
;
1250 /* Likewise, but save the new level as the place to allocate temporaries
1251 for TARGET_EXPRs. */
1254 push_temp_slots_for_target ()
1258 target_temp_slot_level
= temp_slot_level
;
1261 /* Set and get the value of target_temp_slot_level. The only
1262 permitted use of these functions is to save and restore this value. */
1265 get_target_temp_slot_level ()
1267 return target_temp_slot_level
;
1271 set_target_temp_slot_level (level
)
1274 target_temp_slot_level
= level
;
1278 /* Pop a temporary nesting level. All slots in use in the current level
1284 struct temp_slot
*p
;
1286 for (p
= temp_slots
; p
; p
= p
->next
)
1287 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1290 combine_temp_slots ();
1295 /* Initialize temporary slots. */
1300 /* We have not allocated any temporaries yet. */
1302 temp_slot_level
= 0;
1303 var_temp_slot_level
= 0;
1304 target_temp_slot_level
= 0;
1307 /* Retroactively move an auto variable from a register to a stack slot.
1308 This is done when an address-reference to the variable is seen. */
1311 put_var_into_stack (decl
)
1315 enum machine_mode promoted_mode
, decl_mode
;
1316 struct function
*function
= 0;
1318 int can_use_addressof
;
1320 context
= decl_function_context (decl
);
1322 /* Get the current rtl used for this object and its original mode. */
1323 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1325 /* No need to do anything if decl has no rtx yet
1326 since in that case caller is setting TREE_ADDRESSABLE
1327 and a stack slot will be assigned when the rtl is made. */
1331 /* Get the declared mode for this object. */
1332 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1333 : DECL_MODE (decl
));
1334 /* Get the mode it's actually stored in. */
1335 promoted_mode
= GET_MODE (reg
);
1337 /* If this variable comes from an outer function,
1338 find that function's saved context. */
1339 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1340 for (function
= outer_function_chain
; function
; function
= function
->next
)
1341 if (function
->decl
== context
)
1344 /* If this is a variable-size object with a pseudo to address it,
1345 put that pseudo into the stack, if the var is nonlocal. */
1346 if (DECL_NONLOCAL (decl
)
1347 && GET_CODE (reg
) == MEM
1348 && GET_CODE (XEXP (reg
, 0)) == REG
1349 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1351 reg
= XEXP (reg
, 0);
1352 decl_mode
= promoted_mode
= GET_MODE (reg
);
1358 /* FIXME make it work for promoted modes too */
1359 && decl_mode
== promoted_mode
1360 #ifdef NON_SAVING_SETJMP
1361 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1365 /* If we can't use ADDRESSOF, make sure we see through one we already
1367 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1368 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1369 reg
= XEXP (XEXP (reg
, 0), 0);
1371 /* Now we should have a value that resides in one or more pseudo regs. */
1373 if (GET_CODE (reg
) == REG
)
1375 /* If this variable lives in the current function and we don't need
1376 to put things in the stack for the sake of setjmp, try to keep it
1377 in a register until we know we actually need the address. */
1378 if (can_use_addressof
)
1379 gen_mem_addressof (reg
, decl
);
1381 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1382 promoted_mode
, decl_mode
,
1383 TREE_SIDE_EFFECTS (decl
), 0,
1384 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1387 else if (GET_CODE (reg
) == CONCAT
)
1389 /* A CONCAT contains two pseudos; put them both in the stack.
1390 We do it so they end up consecutive. */
1391 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1392 tree part_type
= type_for_mode (part_mode
, 0);
1393 #ifdef FRAME_GROWS_DOWNWARD
1394 /* Since part 0 should have a lower address, do it second. */
1395 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1396 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1397 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1399 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1400 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1401 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1404 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1405 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1406 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1408 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1409 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1410 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1414 /* Change the CONCAT into a combined MEM for both parts. */
1415 PUT_CODE (reg
, MEM
);
1416 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1417 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
1418 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (TREE_TYPE (decl
)));
1420 /* The two parts are in memory order already.
1421 Use the lower parts address as ours. */
1422 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1423 /* Prevent sharing of rtl that might lose. */
1424 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1425 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1430 if (current_function_check_memory_usage
)
1431 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1432 XEXP (reg
, 0), Pmode
,
1433 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1434 TYPE_MODE (sizetype
),
1435 GEN_INT (MEMORY_USE_RW
),
1436 TYPE_MODE (integer_type_node
));
1439 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1440 into the stack frame of FUNCTION (0 means the current function).
1441 DECL_MODE is the machine mode of the user-level data type.
1442 PROMOTED_MODE is the machine mode of the register.
1443 VOLATILE_P is nonzero if this is for a "volatile" decl.
1444 USED_P is nonzero if this reg might have already been used in an insn. */
1447 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1448 original_regno
, used_p
, ht
)
1449 struct function
*function
;
1452 enum machine_mode promoted_mode
, decl_mode
;
1456 struct hash_table
*ht
;
1458 struct function
*func
= function
? function
: cfun
;
1460 int regno
= original_regno
;
1463 regno
= REGNO (reg
);
1465 if (regno
< func
->x_max_parm_reg
)
1466 new = func
->x_parm_reg_stack_loc
[regno
];
1468 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
), 0, func
);
1470 PUT_CODE (reg
, MEM
);
1471 PUT_MODE (reg
, decl_mode
);
1472 XEXP (reg
, 0) = XEXP (new, 0);
1473 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1474 MEM_VOLATILE_P (reg
) = volatile_p
;
1476 /* If this is a memory ref that contains aggregate components,
1477 mark it as such for cse and loop optimize. If we are reusing a
1478 previously generated stack slot, then we need to copy the bit in
1479 case it was set for other reasons. For instance, it is set for
1480 __builtin_va_alist. */
1481 MEM_SET_IN_STRUCT_P (reg
,
1482 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1483 MEM_ALIAS_SET (reg
) = get_alias_set (type
);
1485 /* Now make sure that all refs to the variable, previously made
1486 when it was a register, are fixed up to be valid again. */
1488 if (used_p
&& function
!= 0)
1490 struct var_refs_queue
*temp
;
1493 = (struct var_refs_queue
*) xmalloc (sizeof (struct var_refs_queue
));
1494 temp
->modified
= reg
;
1495 temp
->promoted_mode
= promoted_mode
;
1496 temp
->unsignedp
= TREE_UNSIGNED (type
);
1497 temp
->next
= function
->fixup_var_refs_queue
;
1498 function
->fixup_var_refs_queue
= temp
;
1501 /* Variable is local; fix it up now. */
1502 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
), ht
);
1506 fixup_var_refs (var
, promoted_mode
, unsignedp
, ht
)
1508 enum machine_mode promoted_mode
;
1510 struct hash_table
*ht
;
1513 rtx first_insn
= get_insns ();
1514 struct sequence_stack
*stack
= seq_stack
;
1515 tree rtl_exps
= rtl_expr_chain
;
1518 /* Must scan all insns for stack-refs that exceed the limit. */
1519 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
,
1521 /* If there's a hash table, it must record all uses of VAR. */
1525 /* Scan all pending sequences too. */
1526 for (; stack
; stack
= stack
->next
)
1528 push_to_sequence (stack
->first
);
1529 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1530 stack
->first
, stack
->next
!= 0, 0);
1531 /* Update remembered end of sequence
1532 in case we added an insn at the end. */
1533 stack
->last
= get_last_insn ();
1537 /* Scan all waiting RTL_EXPRs too. */
1538 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1540 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1541 if (seq
!= const0_rtx
&& seq
!= 0)
1543 push_to_sequence (seq
);
1544 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0,
1550 /* Scan the catch clauses for exception handling too. */
1551 push_to_full_sequence (catch_clauses
, catch_clauses_last
);
1552 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, catch_clauses
,
1554 end_full_sequence (&catch_clauses
, &catch_clauses_last
);
1556 /* Scan sequences saved in CALL_PLACEHOLDERS too. */
1557 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
1559 if (GET_CODE (insn
) == CALL_INSN
1560 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1564 /* Look at the Normal call, sibling call and tail recursion
1565 sequences attached to the CALL_PLACEHOLDER. */
1566 for (i
= 0; i
< 3; i
++)
1568 rtx seq
= XEXP (PATTERN (insn
), i
);
1571 push_to_sequence (seq
);
1572 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1574 XEXP (PATTERN (insn
), i
) = get_insns ();
1582 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1583 some part of an insn. Return a struct fixup_replacement whose OLD
1584 value is equal to X. Allocate a new structure if no such entry exists. */
1586 static struct fixup_replacement
*
1587 find_fixup_replacement (replacements
, x
)
1588 struct fixup_replacement
**replacements
;
1591 struct fixup_replacement
*p
;
1593 /* See if we have already replaced this. */
1594 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1599 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1602 p
->next
= *replacements
;
1609 /* Scan the insn-chain starting with INSN for refs to VAR
1610 and fix them up. TOPLEVEL is nonzero if this chain is the
1611 main chain of insns for the current function. */
1614 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
, ht
)
1616 enum machine_mode promoted_mode
;
1620 struct hash_table
*ht
;
1623 rtx insn_list
= NULL_RTX
;
1625 /* If we already know which INSNs reference VAR there's no need
1626 to walk the entire instruction chain. */
1629 insn_list
= ((struct insns_for_mem_entry
*)
1630 hash_lookup (ht
, var
, /*create=*/0, /*copy=*/0))->insns
;
1631 insn
= insn_list
? XEXP (insn_list
, 0) : NULL_RTX
;
1632 insn_list
= XEXP (insn_list
, 1);
1637 rtx next
= NEXT_INSN (insn
);
1638 rtx set
, prev
, prev_set
;
1641 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1643 /* Remember the notes in case we delete the insn. */
1644 note
= REG_NOTES (insn
);
1646 /* If this is a CLOBBER of VAR, delete it.
1648 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1649 and REG_RETVAL notes too. */
1650 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1651 && (XEXP (PATTERN (insn
), 0) == var
1652 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1653 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1654 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1656 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1657 /* The REG_LIBCALL note will go away since we are going to
1658 turn INSN into a NOTE, so just delete the
1659 corresponding REG_RETVAL note. */
1660 remove_note (XEXP (note
, 0),
1661 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1664 /* In unoptimized compilation, we shouldn't call delete_insn
1665 except in jump.c doing warnings. */
1666 PUT_CODE (insn
, NOTE
);
1667 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1668 NOTE_SOURCE_FILE (insn
) = 0;
1671 /* The insn to load VAR from a home in the arglist
1672 is now a no-op. When we see it, just delete it.
1673 Similarly if this is storing VAR from a register from which
1674 it was loaded in the previous insn. This will occur
1675 when an ADDRESSOF was made for an arglist slot. */
1677 && (set
= single_set (insn
)) != 0
1678 && SET_DEST (set
) == var
1679 /* If this represents the result of an insn group,
1680 don't delete the insn. */
1681 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1682 && (rtx_equal_p (SET_SRC (set
), var
)
1683 || (GET_CODE (SET_SRC (set
)) == REG
1684 && (prev
= prev_nonnote_insn (insn
)) != 0
1685 && (prev_set
= single_set (prev
)) != 0
1686 && SET_DEST (prev_set
) == SET_SRC (set
)
1687 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1689 /* In unoptimized compilation, we shouldn't call delete_insn
1690 except in jump.c doing warnings. */
1691 PUT_CODE (insn
, NOTE
);
1692 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1693 NOTE_SOURCE_FILE (insn
) = 0;
1694 if (insn
== last_parm_insn
)
1695 last_parm_insn
= PREV_INSN (next
);
1699 struct fixup_replacement
*replacements
= 0;
1700 rtx next_insn
= NEXT_INSN (insn
);
1702 if (SMALL_REGISTER_CLASSES
)
1704 /* If the insn that copies the results of a CALL_INSN
1705 into a pseudo now references VAR, we have to use an
1706 intermediate pseudo since we want the life of the
1707 return value register to be only a single insn.
1709 If we don't use an intermediate pseudo, such things as
1710 address computations to make the address of VAR valid
1711 if it is not can be placed between the CALL_INSN and INSN.
1713 To make sure this doesn't happen, we record the destination
1714 of the CALL_INSN and see if the next insn uses both that
1717 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1718 && reg_mentioned_p (var
, PATTERN (insn
))
1719 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1721 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1723 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1725 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1729 if (GET_CODE (insn
) == CALL_INSN
1730 && GET_CODE (PATTERN (insn
)) == SET
)
1731 call_dest
= SET_DEST (PATTERN (insn
));
1732 else if (GET_CODE (insn
) == CALL_INSN
1733 && GET_CODE (PATTERN (insn
)) == PARALLEL
1734 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1735 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1740 /* See if we have to do anything to INSN now that VAR is in
1741 memory. If it needs to be loaded into a pseudo, use a single
1742 pseudo for the entire insn in case there is a MATCH_DUP
1743 between two operands. We pass a pointer to the head of
1744 a list of struct fixup_replacements. If fixup_var_refs_1
1745 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1746 it will record them in this list.
1748 If it allocated a pseudo for any replacement, we copy into
1751 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1754 /* If this is last_parm_insn, and any instructions were output
1755 after it to fix it up, then we must set last_parm_insn to
1756 the last such instruction emitted. */
1757 if (insn
== last_parm_insn
)
1758 last_parm_insn
= PREV_INSN (next_insn
);
1760 while (replacements
)
1762 if (GET_CODE (replacements
->new) == REG
)
1767 /* OLD might be a (subreg (mem)). */
1768 if (GET_CODE (replacements
->old
) == SUBREG
)
1770 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1773 = fixup_stack_1 (replacements
->old
, insn
);
1775 insert_before
= insn
;
1777 /* If we are changing the mode, do a conversion.
1778 This might be wasteful, but combine.c will
1779 eliminate much of the waste. */
1781 if (GET_MODE (replacements
->new)
1782 != GET_MODE (replacements
->old
))
1785 convert_move (replacements
->new,
1786 replacements
->old
, unsignedp
);
1787 seq
= gen_sequence ();
1791 seq
= gen_move_insn (replacements
->new,
1794 emit_insn_before (seq
, insert_before
);
1797 replacements
= replacements
->next
;
1801 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1802 But don't touch other insns referred to by reg-notes;
1803 we will get them elsewhere. */
1806 if (GET_CODE (note
) != INSN_LIST
)
1808 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1809 note
= XEXP (note
, 1);
1817 insn
= XEXP (insn_list
, 0);
1818 insn_list
= XEXP (insn_list
, 1);
1825 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1826 See if the rtx expression at *LOC in INSN needs to be changed.
1828 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1829 contain a list of original rtx's and replacements. If we find that we need
1830 to modify this insn by replacing a memory reference with a pseudo or by
1831 making a new MEM to implement a SUBREG, we consult that list to see if
1832 we have already chosen a replacement. If none has already been allocated,
1833 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1834 or the SUBREG, as appropriate, to the pseudo. */
1837 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1839 enum machine_mode promoted_mode
;
1842 struct fixup_replacement
**replacements
;
1845 register rtx x
= *loc
;
1846 RTX_CODE code
= GET_CODE (x
);
1847 register const char *fmt
;
1848 register rtx tem
, tem1
;
1849 struct fixup_replacement
*replacement
;
1854 if (XEXP (x
, 0) == var
)
1856 /* Prevent sharing of rtl that might lose. */
1857 rtx sub
= copy_rtx (XEXP (var
, 0));
1859 if (! validate_change (insn
, loc
, sub
, 0))
1861 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1864 /* We should be able to replace with a register or all is lost.
1865 Note that we can't use validate_change to verify this, since
1866 we're not caring for replacing all dups simultaneously. */
1867 if (! validate_replace_rtx (*loc
, y
, insn
))
1870 /* Careful! First try to recognize a direct move of the
1871 value, mimicking how things are done in gen_reload wrt
1872 PLUS. Consider what happens when insn is a conditional
1873 move instruction and addsi3 clobbers flags. */
1876 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1877 seq
= gen_sequence ();
1880 if (recog_memoized (new_insn
) < 0)
1882 /* That failed. Fall back on force_operand and hope. */
1885 force_operand (sub
, y
);
1886 seq
= gen_sequence ();
1891 /* Don't separate setter from user. */
1892 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1893 insn
= PREV_INSN (insn
);
1896 emit_insn_before (seq
, insn
);
1904 /* If we already have a replacement, use it. Otherwise,
1905 try to fix up this address in case it is invalid. */
1907 replacement
= find_fixup_replacement (replacements
, var
);
1908 if (replacement
->new)
1910 *loc
= replacement
->new;
1914 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1916 /* Unless we are forcing memory to register or we changed the mode,
1917 we can leave things the way they are if the insn is valid. */
1919 INSN_CODE (insn
) = -1;
1920 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1921 && recog_memoized (insn
) >= 0)
1924 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1928 /* If X contains VAR, we need to unshare it here so that we update
1929 each occurrence separately. But all identical MEMs in one insn
1930 must be replaced with the same rtx because of the possibility of
1933 if (reg_mentioned_p (var
, x
))
1935 replacement
= find_fixup_replacement (replacements
, x
);
1936 if (replacement
->new == 0)
1937 replacement
->new = copy_most_rtx (x
, var
);
1939 *loc
= x
= replacement
->new;
1955 /* Note that in some cases those types of expressions are altered
1956 by optimize_bit_field, and do not survive to get here. */
1957 if (XEXP (x
, 0) == var
1958 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1959 && SUBREG_REG (XEXP (x
, 0)) == var
))
1961 /* Get TEM as a valid MEM in the mode presently in the insn.
1963 We don't worry about the possibility of MATCH_DUP here; it
1964 is highly unlikely and would be tricky to handle. */
1967 if (GET_CODE (tem
) == SUBREG
)
1969 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1970 > GET_MODE_BITSIZE (GET_MODE (var
)))
1972 replacement
= find_fixup_replacement (replacements
, var
);
1973 if (replacement
->new == 0)
1974 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1975 SUBREG_REG (tem
) = replacement
->new;
1978 tem
= fixup_memory_subreg (tem
, insn
, 0);
1981 tem
= fixup_stack_1 (tem
, insn
);
1983 /* Unless we want to load from memory, get TEM into the proper mode
1984 for an extract from memory. This can only be done if the
1985 extract is at a constant position and length. */
1987 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1988 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1989 && ! mode_dependent_address_p (XEXP (tem
, 0))
1990 && ! MEM_VOLATILE_P (tem
))
1992 enum machine_mode wanted_mode
= VOIDmode
;
1993 enum machine_mode is_mode
= GET_MODE (tem
);
1994 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1997 if (GET_CODE (x
) == ZERO_EXTRACT
)
2000 = insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
2001 if (wanted_mode
== VOIDmode
)
2002 wanted_mode
= word_mode
;
2006 if (GET_CODE (x
) == SIGN_EXTRACT
)
2008 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
2009 if (wanted_mode
== VOIDmode
)
2010 wanted_mode
= word_mode
;
2013 /* If we have a narrower mode, we can do something. */
2014 if (wanted_mode
!= VOIDmode
2015 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2017 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2018 rtx old_pos
= XEXP (x
, 2);
2021 /* If the bytes and bits are counted differently, we
2022 must adjust the offset. */
2023 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2024 offset
= (GET_MODE_SIZE (is_mode
)
2025 - GET_MODE_SIZE (wanted_mode
) - offset
);
2027 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2029 newmem
= gen_rtx_MEM (wanted_mode
,
2030 plus_constant (XEXP (tem
, 0), offset
));
2031 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2032 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2034 /* Make the change and see if the insn remains valid. */
2035 INSN_CODE (insn
) = -1;
2036 XEXP (x
, 0) = newmem
;
2037 XEXP (x
, 2) = GEN_INT (pos
);
2039 if (recog_memoized (insn
) >= 0)
2042 /* Otherwise, restore old position. XEXP (x, 0) will be
2044 XEXP (x
, 2) = old_pos
;
2048 /* If we get here, the bitfield extract insn can't accept a memory
2049 reference. Copy the input into a register. */
2051 tem1
= gen_reg_rtx (GET_MODE (tem
));
2052 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2059 if (SUBREG_REG (x
) == var
)
2061 /* If this is a special SUBREG made because VAR was promoted
2062 from a wider mode, replace it with VAR and call ourself
2063 recursively, this time saying that the object previously
2064 had its current mode (by virtue of the SUBREG). */
2066 if (SUBREG_PROMOTED_VAR_P (x
))
2069 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
2073 /* If this SUBREG makes VAR wider, it has become a paradoxical
2074 SUBREG with VAR in memory, but these aren't allowed at this
2075 stage of the compilation. So load VAR into a pseudo and take
2076 a SUBREG of that pseudo. */
2077 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2079 replacement
= find_fixup_replacement (replacements
, var
);
2080 if (replacement
->new == 0)
2081 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2082 SUBREG_REG (x
) = replacement
->new;
2086 /* See if we have already found a replacement for this SUBREG.
2087 If so, use it. Otherwise, make a MEM and see if the insn
2088 is recognized. If not, or if we should force MEM into a register,
2089 make a pseudo for this SUBREG. */
2090 replacement
= find_fixup_replacement (replacements
, x
);
2091 if (replacement
->new)
2093 *loc
= replacement
->new;
2097 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2099 INSN_CODE (insn
) = -1;
2100 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2103 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2109 /* First do special simplification of bit-field references. */
2110 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2111 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2112 optimize_bit_field (x
, insn
, 0);
2113 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2114 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2115 optimize_bit_field (x
, insn
, NULL_PTR
);
2117 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2118 into a register and then store it back out. */
2119 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2120 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2121 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2122 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2123 > GET_MODE_SIZE (GET_MODE (var
))))
2125 replacement
= find_fixup_replacement (replacements
, var
);
2126 if (replacement
->new == 0)
2127 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2129 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2130 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2133 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2134 insn into a pseudo and store the low part of the pseudo into VAR. */
2135 if (GET_CODE (SET_DEST (x
)) == SUBREG
2136 && SUBREG_REG (SET_DEST (x
)) == var
2137 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2138 > GET_MODE_SIZE (GET_MODE (var
))))
2140 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2141 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2148 rtx dest
= SET_DEST (x
);
2149 rtx src
= SET_SRC (x
);
2151 rtx outerdest
= dest
;
2154 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2155 || GET_CODE (dest
) == SIGN_EXTRACT
2156 || GET_CODE (dest
) == ZERO_EXTRACT
)
2157 dest
= XEXP (dest
, 0);
2159 if (GET_CODE (src
) == SUBREG
)
2160 src
= XEXP (src
, 0);
2162 /* If VAR does not appear at the top level of the SET
2163 just scan the lower levels of the tree. */
2165 if (src
!= var
&& dest
!= var
)
2168 /* We will need to rerecognize this insn. */
2169 INSN_CODE (insn
) = -1;
2172 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2174 /* Since this case will return, ensure we fixup all the
2176 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2177 insn
, replacements
);
2178 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2179 insn
, replacements
);
2180 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2181 insn
, replacements
);
2183 tem
= XEXP (outerdest
, 0);
2185 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2186 that may appear inside a ZERO_EXTRACT.
2187 This was legitimate when the MEM was a REG. */
2188 if (GET_CODE (tem
) == SUBREG
2189 && SUBREG_REG (tem
) == var
)
2190 tem
= fixup_memory_subreg (tem
, insn
, 0);
2192 tem
= fixup_stack_1 (tem
, insn
);
2194 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2195 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2196 && ! mode_dependent_address_p (XEXP (tem
, 0))
2197 && ! MEM_VOLATILE_P (tem
))
2199 enum machine_mode wanted_mode
;
2200 enum machine_mode is_mode
= GET_MODE (tem
);
2201 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2203 wanted_mode
= insn_data
[(int) CODE_FOR_insv
].operand
[0].mode
;
2204 if (wanted_mode
== VOIDmode
)
2205 wanted_mode
= word_mode
;
2207 /* If we have a narrower mode, we can do something. */
2208 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2210 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2211 rtx old_pos
= XEXP (outerdest
, 2);
2214 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2215 offset
= (GET_MODE_SIZE (is_mode
)
2216 - GET_MODE_SIZE (wanted_mode
) - offset
);
2218 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2220 newmem
= gen_rtx_MEM (wanted_mode
,
2221 plus_constant (XEXP (tem
, 0),
2223 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2224 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2226 /* Make the change and see if the insn remains valid. */
2227 INSN_CODE (insn
) = -1;
2228 XEXP (outerdest
, 0) = newmem
;
2229 XEXP (outerdest
, 2) = GEN_INT (pos
);
2231 if (recog_memoized (insn
) >= 0)
2234 /* Otherwise, restore old position. XEXP (x, 0) will be
2236 XEXP (outerdest
, 2) = old_pos
;
2240 /* If we get here, the bit-field store doesn't allow memory
2241 or isn't located at a constant position. Load the value into
2242 a register, do the store, and put it back into memory. */
2244 tem1
= gen_reg_rtx (GET_MODE (tem
));
2245 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2246 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2247 XEXP (outerdest
, 0) = tem1
;
2252 /* STRICT_LOW_PART is a no-op on memory references
2253 and it can cause combinations to be unrecognizable,
2256 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2257 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2259 /* A valid insn to copy VAR into or out of a register
2260 must be left alone, to avoid an infinite loop here.
2261 If the reference to VAR is by a subreg, fix that up,
2262 since SUBREG is not valid for a memref.
2263 Also fix up the address of the stack slot.
2265 Note that we must not try to recognize the insn until
2266 after we know that we have valid addresses and no
2267 (subreg (mem ...) ...) constructs, since these interfere
2268 with determining the validity of the insn. */
2270 if ((SET_SRC (x
) == var
2271 || (GET_CODE (SET_SRC (x
)) == SUBREG
2272 && SUBREG_REG (SET_SRC (x
)) == var
))
2273 && (GET_CODE (SET_DEST (x
)) == REG
2274 || (GET_CODE (SET_DEST (x
)) == SUBREG
2275 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2276 && GET_MODE (var
) == promoted_mode
2277 && x
== single_set (insn
))
2281 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2282 if (replacement
->new)
2283 SET_SRC (x
) = replacement
->new;
2284 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2285 SET_SRC (x
) = replacement
->new
2286 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2288 SET_SRC (x
) = replacement
->new
2289 = fixup_stack_1 (SET_SRC (x
), insn
);
2291 if (recog_memoized (insn
) >= 0)
2294 /* INSN is not valid, but we know that we want to
2295 copy SET_SRC (x) to SET_DEST (x) in some way. So
2296 we generate the move and see whether it requires more
2297 than one insn. If it does, we emit those insns and
2298 delete INSN. Otherwise, we an just replace the pattern
2299 of INSN; we have already verified above that INSN has
2300 no other function that to do X. */
2302 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2303 if (GET_CODE (pat
) == SEQUENCE
)
2305 emit_insn_after (pat
, insn
);
2306 PUT_CODE (insn
, NOTE
);
2307 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2308 NOTE_SOURCE_FILE (insn
) = 0;
2311 PATTERN (insn
) = pat
;
2316 if ((SET_DEST (x
) == var
2317 || (GET_CODE (SET_DEST (x
)) == SUBREG
2318 && SUBREG_REG (SET_DEST (x
)) == var
))
2319 && (GET_CODE (SET_SRC (x
)) == REG
2320 || (GET_CODE (SET_SRC (x
)) == SUBREG
2321 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2322 && GET_MODE (var
) == promoted_mode
2323 && x
== single_set (insn
))
2327 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2328 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2330 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2332 if (recog_memoized (insn
) >= 0)
2335 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2336 if (GET_CODE (pat
) == SEQUENCE
)
2338 emit_insn_after (pat
, insn
);
2339 PUT_CODE (insn
, NOTE
);
2340 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2341 NOTE_SOURCE_FILE (insn
) = 0;
2344 PATTERN (insn
) = pat
;
2349 /* Otherwise, storing into VAR must be handled specially
2350 by storing into a temporary and copying that into VAR
2351 with a new insn after this one. Note that this case
2352 will be used when storing into a promoted scalar since
2353 the insn will now have different modes on the input
2354 and output and hence will be invalid (except for the case
2355 of setting it to a constant, which does not need any
2356 change if it is valid). We generate extra code in that case,
2357 but combine.c will eliminate it. */
2362 rtx fixeddest
= SET_DEST (x
);
2364 /* STRICT_LOW_PART can be discarded, around a MEM. */
2365 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2366 fixeddest
= XEXP (fixeddest
, 0);
2367 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2368 if (GET_CODE (fixeddest
) == SUBREG
)
2370 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2371 promoted_mode
= GET_MODE (fixeddest
);
2374 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2376 temp
= gen_reg_rtx (promoted_mode
);
2378 emit_insn_after (gen_move_insn (fixeddest
,
2379 gen_lowpart (GET_MODE (fixeddest
),
2383 SET_DEST (x
) = temp
;
2391 /* Nothing special about this RTX; fix its operands. */
2393 fmt
= GET_RTX_FORMAT (code
);
2394 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2397 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2398 else if (fmt
[i
] == 'E')
2401 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2402 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2403 insn
, replacements
);
2408 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2409 return an rtx (MEM:m1 newaddr) which is equivalent.
2410 If any insns must be emitted to compute NEWADDR, put them before INSN.
2412 UNCRITICAL nonzero means accept paradoxical subregs.
2413 This is used for subregs found inside REG_NOTES. */
2416 fixup_memory_subreg (x
, insn
, uncritical
)
2421 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2422 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2423 enum machine_mode mode
= GET_MODE (x
);
2426 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2427 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2431 if (BYTES_BIG_ENDIAN
)
2432 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2433 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2434 addr
= plus_constant (addr
, offset
);
2435 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2436 /* Shortcut if no insns need be emitted. */
2437 return change_address (SUBREG_REG (x
), mode
, addr
);
2439 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2440 emit_insn_before (gen_sequence (), insn
);
2445 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2446 Replace subexpressions of X in place.
2447 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2448 Otherwise return X, with its contents possibly altered.
2450 If any insns must be emitted to compute NEWADDR, put them before INSN.
2452 UNCRITICAL is as in fixup_memory_subreg. */
2455 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2460 register enum rtx_code code
;
2461 register const char *fmt
;
2467 code
= GET_CODE (x
);
2469 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2470 return fixup_memory_subreg (x
, insn
, uncritical
);
2472 /* Nothing special about this RTX; fix its operands. */
2474 fmt
= GET_RTX_FORMAT (code
);
2475 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2478 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2479 else if (fmt
[i
] == 'E')
2482 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2484 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2490 /* For each memory ref within X, if it refers to a stack slot
2491 with an out of range displacement, put the address in a temp register
2492 (emitting new insns before INSN to load these registers)
2493 and alter the memory ref to use that register.
2494 Replace each such MEM rtx with a copy, to avoid clobberage. */
2497 fixup_stack_1 (x
, insn
)
2502 register RTX_CODE code
= GET_CODE (x
);
2503 register const char *fmt
;
2507 register rtx ad
= XEXP (x
, 0);
2508 /* If we have address of a stack slot but it's not valid
2509 (displacement is too large), compute the sum in a register. */
2510 if (GET_CODE (ad
) == PLUS
2511 && GET_CODE (XEXP (ad
, 0)) == REG
2512 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2513 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2514 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2515 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2516 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2518 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2519 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2520 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2521 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2524 if (memory_address_p (GET_MODE (x
), ad
))
2528 temp
= copy_to_reg (ad
);
2529 seq
= gen_sequence ();
2531 emit_insn_before (seq
, insn
);
2532 return change_address (x
, VOIDmode
, temp
);
2537 fmt
= GET_RTX_FORMAT (code
);
2538 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2541 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2542 else if (fmt
[i
] == 'E')
2545 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2546 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2552 /* Optimization: a bit-field instruction whose field
2553 happens to be a byte or halfword in memory
2554 can be changed to a move instruction.
2556 We call here when INSN is an insn to examine or store into a bit-field.
2557 BODY is the SET-rtx to be altered.
2559 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2560 (Currently this is called only from function.c, and EQUIV_MEM
2564 optimize_bit_field (body
, insn
, equiv_mem
)
2569 register rtx bitfield
;
2572 enum machine_mode mode
;
2574 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2575 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2576 bitfield
= SET_DEST (body
), destflag
= 1;
2578 bitfield
= SET_SRC (body
), destflag
= 0;
2580 /* First check that the field being stored has constant size and position
2581 and is in fact a byte or halfword suitably aligned. */
2583 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2584 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2585 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2587 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2589 register rtx memref
= 0;
2591 /* Now check that the containing word is memory, not a register,
2592 and that it is safe to change the machine mode. */
2594 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2595 memref
= XEXP (bitfield
, 0);
2596 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2598 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2599 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2600 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2601 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2602 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2604 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2605 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2608 && ! mode_dependent_address_p (XEXP (memref
, 0))
2609 && ! MEM_VOLATILE_P (memref
))
2611 /* Now adjust the address, first for any subreg'ing
2612 that we are now getting rid of,
2613 and then for which byte of the word is wanted. */
2615 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2618 /* Adjust OFFSET to count bits from low-address byte. */
2619 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2620 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2621 - offset
- INTVAL (XEXP (bitfield
, 1)));
2623 /* Adjust OFFSET to count bytes from low-address byte. */
2624 offset
/= BITS_PER_UNIT
;
2625 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2627 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2628 if (BYTES_BIG_ENDIAN
)
2629 offset
-= (MIN (UNITS_PER_WORD
,
2630 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2631 - MIN (UNITS_PER_WORD
,
2632 GET_MODE_SIZE (GET_MODE (memref
))));
2636 memref
= change_address (memref
, mode
,
2637 plus_constant (XEXP (memref
, 0), offset
));
2638 insns
= get_insns ();
2640 emit_insns_before (insns
, insn
);
2642 /* Store this memory reference where
2643 we found the bit field reference. */
2647 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2648 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2650 rtx src
= SET_SRC (body
);
2651 while (GET_CODE (src
) == SUBREG
2652 && SUBREG_WORD (src
) == 0)
2653 src
= SUBREG_REG (src
);
2654 if (GET_MODE (src
) != GET_MODE (memref
))
2655 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2656 validate_change (insn
, &SET_SRC (body
), src
, 1);
2658 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2659 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2660 /* This shouldn't happen because anything that didn't have
2661 one of these modes should have got converted explicitly
2662 and then referenced through a subreg.
2663 This is so because the original bit-field was
2664 handled by agg_mode and so its tree structure had
2665 the same mode that memref now has. */
2670 rtx dest
= SET_DEST (body
);
2672 while (GET_CODE (dest
) == SUBREG
2673 && SUBREG_WORD (dest
) == 0
2674 && (GET_MODE_CLASS (GET_MODE (dest
))
2675 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2676 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2678 dest
= SUBREG_REG (dest
);
2680 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2682 if (GET_MODE (dest
) == GET_MODE (memref
))
2683 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2686 /* Convert the mem ref to the destination mode. */
2687 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2690 convert_move (newreg
, memref
,
2691 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2695 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2699 /* See if we can convert this extraction or insertion into
2700 a simple move insn. We might not be able to do so if this
2701 was, for example, part of a PARALLEL.
2703 If we succeed, write out any needed conversions. If we fail,
2704 it is hard to guess why we failed, so don't do anything
2705 special; just let the optimization be suppressed. */
2707 if (apply_change_group () && seq
)
2708 emit_insns_before (seq
, insn
);
2713 /* These routines are responsible for converting virtual register references
2714 to the actual hard register references once RTL generation is complete.
2716 The following four variables are used for communication between the
2717 routines. They contain the offsets of the virtual registers from their
2718 respective hard registers. */
2720 static int in_arg_offset
;
2721 static int var_offset
;
2722 static int dynamic_offset
;
2723 static int out_arg_offset
;
2724 static int cfa_offset
;
2726 /* In most machines, the stack pointer register is equivalent to the bottom
2729 #ifndef STACK_POINTER_OFFSET
2730 #define STACK_POINTER_OFFSET 0
2733 /* If not defined, pick an appropriate default for the offset of dynamically
2734 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2735 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2737 #ifndef STACK_DYNAMIC_OFFSET
2739 #ifdef ACCUMULATE_OUTGOING_ARGS
2740 /* The bottom of the stack points to the actual arguments. If
2741 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2742 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2743 stack space for register parameters is not pushed by the caller, but
2744 rather part of the fixed stack areas and hence not included in
2745 `current_function_outgoing_args_size'. Nevertheless, we must allow
2746 for it when allocating stack dynamic objects. */
2748 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2749 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2750 (current_function_outgoing_args_size \
2751 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2754 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2755 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2759 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2763 /* On a few machines, the CFA coincides with the arg pointer. */
2765 #ifndef ARG_POINTER_CFA_OFFSET
2766 #define ARG_POINTER_CFA_OFFSET 0
2770 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2771 its address taken. DECL is the decl for the object stored in the
2772 register, for later use if we do need to force REG into the stack.
2773 REG is overwritten by the MEM like in put_reg_into_stack. */
2776 gen_mem_addressof (reg
, decl
)
2780 tree type
= TREE_TYPE (decl
);
2781 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2784 /* If the original REG was a user-variable, then so is the REG whose
2785 address is being taken. Likewise for unchanging. */
2786 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2787 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2789 PUT_CODE (reg
, MEM
);
2790 PUT_MODE (reg
, DECL_MODE (decl
));
2792 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2793 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (type
));
2794 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
2796 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2797 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
), 0);
2802 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2806 flush_addressof (decl
)
2809 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2810 && DECL_RTL (decl
) != 0
2811 && GET_CODE (DECL_RTL (decl
)) == MEM
2812 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2813 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2814 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2818 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2821 put_addressof_into_stack (r
, ht
)
2823 struct hash_table
*ht
;
2825 tree decl
= ADDRESSOF_DECL (r
);
2826 rtx reg
= XEXP (r
, 0);
2828 if (GET_CODE (reg
) != REG
)
2831 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2832 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2833 ADDRESSOF_REGNO (r
),
2834 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0, ht
);
2837 /* List of replacements made below in purge_addressof_1 when creating
2838 bitfield insertions. */
2839 static rtx purge_bitfield_addressof_replacements
;
2841 /* List of replacements made below in purge_addressof_1 for patterns
2842 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2843 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2844 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2845 enough in complex cases, e.g. when some field values can be
2846 extracted by usage MEM with narrower mode. */
2847 static rtx purge_addressof_replacements
;
2849 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2850 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2851 the stack. If the function returns FALSE then the replacement could not
2855 purge_addressof_1 (loc
, insn
, force
, store
, ht
)
2859 struct hash_table
*ht
;
2865 boolean result
= true;
2867 /* Re-start here to avoid recursion in common cases. */
2874 code
= GET_CODE (x
);
2876 /* If we don't return in any of the cases below, we will recurse inside
2877 the RTX, which will normally result in any ADDRESSOF being forced into
2881 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
2882 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
2886 else if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2888 /* We must create a copy of the rtx because it was created by
2889 overwriting a REG rtx which is always shared. */
2890 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2893 if (validate_change (insn
, loc
, sub
, 0)
2894 || validate_replace_rtx (x
, sub
, insn
))
2898 sub
= force_operand (sub
, NULL_RTX
);
2899 if (! validate_change (insn
, loc
, sub
, 0)
2900 && ! validate_replace_rtx (x
, sub
, insn
))
2903 insns
= gen_sequence ();
2905 emit_insn_before (insns
, insn
);
2909 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2911 rtx sub
= XEXP (XEXP (x
, 0), 0);
2914 if (GET_CODE (sub
) == MEM
)
2916 sub2
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2917 MEM_COPY_ATTRIBUTES (sub2
, sub
);
2918 RTX_UNCHANGING_P (sub2
) = RTX_UNCHANGING_P (sub
);
2921 else if (GET_CODE (sub
) == REG
2922 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2924 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2926 int size_x
, size_sub
;
2930 /* When processing REG_NOTES look at the list of
2931 replacements done on the insn to find the register that X
2935 for (tem
= purge_bitfield_addressof_replacements
;
2937 tem
= XEXP (XEXP (tem
, 1), 1))
2938 if (rtx_equal_p (x
, XEXP (tem
, 0)))
2940 *loc
= XEXP (XEXP (tem
, 1), 0);
2944 /* See comment for purge_addressof_replacements. */
2945 for (tem
= purge_addressof_replacements
;
2947 tem
= XEXP (XEXP (tem
, 1), 1))
2948 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
2950 rtx z
= XEXP (XEXP (tem
, 1), 0);
2952 if (GET_MODE (x
) == GET_MODE (z
)
2953 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
2954 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
2957 /* It can happen that the note may speak of things
2958 in a wider (or just different) mode than the
2959 code did. This is especially true of
2962 if (GET_CODE (z
) == SUBREG
&& SUBREG_WORD (z
) == 0)
2965 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2966 && (GET_MODE_SIZE (GET_MODE (x
))
2967 > GET_MODE_SIZE (GET_MODE (z
))))
2969 /* This can occur as a result in invalid
2970 pointer casts, e.g. float f; ...
2971 *(long long int *)&f.
2972 ??? We could emit a warning here, but
2973 without a line number that wouldn't be
2975 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
2978 z
= gen_lowpart (GET_MODE (x
), z
);
2984 /* Sometimes we may not be able to find the replacement. For
2985 example when the original insn was a MEM in a wider mode,
2986 and the note is part of a sign extension of a narrowed
2987 version of that MEM. Gcc testcase compile/990829-1.c can
2988 generate an example of this siutation. Rather than complain
2989 we return false, which will prompt our caller to remove the
2994 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
2995 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
2997 /* Don't even consider working with paradoxical subregs,
2998 or the moral equivalent seen here. */
2999 if (size_x
<= size_sub
3000 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
3002 /* Do a bitfield insertion to mirror what would happen
3009 rtx p
= PREV_INSN (insn
);
3012 val
= gen_reg_rtx (GET_MODE (x
));
3013 if (! validate_change (insn
, loc
, val
, 0))
3015 /* Discard the current sequence and put the
3016 ADDRESSOF on stack. */
3020 seq
= gen_sequence ();
3022 emit_insn_before (seq
, insn
);
3023 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3027 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
3028 val
, GET_MODE_SIZE (GET_MODE (sub
)),
3029 GET_MODE_SIZE (GET_MODE (sub
)));
3031 /* Make sure to unshare any shared rtl that store_bit_field
3032 might have created. */
3033 unshare_all_rtl_again (get_insns ());
3035 seq
= gen_sequence ();
3037 p
= emit_insn_after (seq
, insn
);
3038 if (NEXT_INSN (insn
))
3039 compute_insns_for_mem (NEXT_INSN (insn
),
3040 p
? NEXT_INSN (p
) : NULL_RTX
,
3045 rtx p
= PREV_INSN (insn
);
3048 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
3049 GET_MODE (x
), GET_MODE (x
),
3050 GET_MODE_SIZE (GET_MODE (sub
)),
3051 GET_MODE_SIZE (GET_MODE (sub
)));
3053 if (! validate_change (insn
, loc
, val
, 0))
3055 /* Discard the current sequence and put the
3056 ADDRESSOF on stack. */
3061 seq
= gen_sequence ();
3063 emit_insn_before (seq
, insn
);
3064 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3068 /* Remember the replacement so that the same one can be done
3069 on the REG_NOTES. */
3070 purge_bitfield_addressof_replacements
3071 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
3074 purge_bitfield_addressof_replacements
));
3076 /* We replaced with a reg -- all done. */
3081 else if (validate_change (insn
, loc
, sub
, 0))
3083 /* Remember the replacement so that the same one can be done
3084 on the REG_NOTES. */
3085 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3089 for (tem
= purge_addressof_replacements
;
3091 tem
= XEXP (XEXP (tem
, 1), 1))
3092 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3094 XEXP (XEXP (tem
, 1), 0) = sub
;
3097 purge_addressof_replacements
3098 = gen_rtx (EXPR_LIST
, VOIDmode
, XEXP (x
, 0),
3099 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3100 purge_addressof_replacements
));
3106 /* else give up and put it into the stack */
3109 else if (code
== ADDRESSOF
)
3111 put_addressof_into_stack (x
, ht
);
3114 else if (code
== SET
)
3116 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
3117 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
3121 /* Scan all subexpressions. */
3122 fmt
= GET_RTX_FORMAT (code
);
3123 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3126 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0, ht
);
3127 else if (*fmt
== 'E')
3128 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3129 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0, ht
);
3135 /* Return a new hash table entry in HT. */
3137 static struct hash_entry
*
3138 insns_for_mem_newfunc (he
, ht
, k
)
3139 struct hash_entry
*he
;
3140 struct hash_table
*ht
;
3141 hash_table_key k ATTRIBUTE_UNUSED
;
3143 struct insns_for_mem_entry
*ifmhe
;
3147 ifmhe
= ((struct insns_for_mem_entry
*)
3148 hash_allocate (ht
, sizeof (struct insns_for_mem_entry
)));
3149 ifmhe
->insns
= NULL_RTX
;
3154 /* Return a hash value for K, a REG. */
3156 static unsigned long
3157 insns_for_mem_hash (k
)
3160 /* K is really a RTX. Just use the address as the hash value. */
3161 return (unsigned long) k
;
3164 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3167 insns_for_mem_comp (k1
, k2
)
3174 struct insns_for_mem_walk_info
{
3175 /* The hash table that we are using to record which INSNs use which
3177 struct hash_table
*ht
;
3179 /* The INSN we are currently proessing. */
3182 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3183 to find the insns that use the REGs in the ADDRESSOFs. */
3187 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3188 that might be used in an ADDRESSOF expression, record this INSN in
3189 the hash table given by DATA (which is really a pointer to an
3190 insns_for_mem_walk_info structure). */
3193 insns_for_mem_walk (r
, data
)
3197 struct insns_for_mem_walk_info
*ifmwi
3198 = (struct insns_for_mem_walk_info
*) data
;
3200 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3201 && GET_CODE (XEXP (*r
, 0)) == REG
)
3202 hash_lookup (ifmwi
->ht
, XEXP (*r
, 0), /*create=*/1, /*copy=*/0);
3203 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3205 /* Lookup this MEM in the hashtable, creating it if necessary. */
3206 struct insns_for_mem_entry
*ifme
3207 = (struct insns_for_mem_entry
*) hash_lookup (ifmwi
->ht
,
3212 /* If we have not already recorded this INSN, do so now. Since
3213 we process the INSNs in order, we know that if we have
3214 recorded it it must be at the front of the list. */
3215 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3217 /* We do the allocation on the same obstack as is used for
3218 the hash table since this memory will not be used once
3219 the hash table is deallocated. */
3220 push_obstacks (&ifmwi
->ht
->memory
, &ifmwi
->ht
->memory
);
3221 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3230 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3231 which REGs in HT. */
3234 compute_insns_for_mem (insns
, last_insn
, ht
)
3237 struct hash_table
*ht
;
3240 struct insns_for_mem_walk_info ifmwi
;
3243 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3244 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3245 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3248 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3252 /* Helper function for purge_addressof called through for_each_rtx.
3253 Returns true iff the rtl is an ADDRESSOF. */
3255 is_addressof (rtl
, data
)
3257 void * data ATTRIBUTE_UNUSED
;
3259 return GET_CODE (* rtl
) == ADDRESSOF
;
3262 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3263 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3267 purge_addressof (insns
)
3271 struct hash_table ht
;
3273 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3274 requires a fixup pass over the instruction stream to correct
3275 INSNs that depended on the REG being a REG, and not a MEM. But,
3276 these fixup passes are slow. Furthermore, more MEMs are not
3277 mentioned in very many instructions. So, we speed up the process
3278 by pre-calculating which REGs occur in which INSNs; that allows
3279 us to perform the fixup passes much more quickly. */
3280 hash_table_init (&ht
,
3281 insns_for_mem_newfunc
,
3283 insns_for_mem_comp
);
3284 compute_insns_for_mem (insns
, NULL_RTX
, &ht
);
3286 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3287 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3288 || GET_CODE (insn
) == CALL_INSN
)
3290 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3291 asm_noperands (PATTERN (insn
)) > 0, 0, &ht
))
3292 /* If we could not replace the ADDRESSOFs in the insn,
3293 something is wrong. */
3296 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, &ht
))
3298 /* If we could not replace the ADDRESSOFs in the insn's notes,
3299 we can just remove the offending notes instead. */
3302 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3304 /* If we find a REG_RETVAL note then the insn is a libcall.
3305 Such insns must have REG_EQUAL notes as well, in order
3306 for later passes of the compiler to work. So it is not
3307 safe to delete the notes here, and instead we abort. */
3308 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3310 if (for_each_rtx (& note
, is_addressof
, NULL
))
3311 remove_note (insn
, note
);
3317 hash_table_free (&ht
);
3318 purge_bitfield_addressof_replacements
= 0;
3319 purge_addressof_replacements
= 0;
3322 /* Pass through the INSNS of function FNDECL and convert virtual register
3323 references to hard register references. */
3326 instantiate_virtual_regs (fndecl
, insns
)
3333 /* Compute the offsets to use for this function. */
3334 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3335 var_offset
= STARTING_FRAME_OFFSET
;
3336 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3337 out_arg_offset
= STACK_POINTER_OFFSET
;
3338 cfa_offset
= ARG_POINTER_CFA_OFFSET
;
3340 /* Scan all variables and parameters of this function. For each that is
3341 in memory, instantiate all virtual registers if the result is a valid
3342 address. If not, we do it later. That will handle most uses of virtual
3343 regs on many machines. */
3344 instantiate_decls (fndecl
, 1);
3346 /* Initialize recognition, indicating that volatile is OK. */
3349 /* Scan through all the insns, instantiating every virtual register still
3351 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3352 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3353 || GET_CODE (insn
) == CALL_INSN
)
3355 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3356 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3359 /* Instantiate the stack slots for the parm registers, for later use in
3360 addressof elimination. */
3361 for (i
= 0; i
< max_parm_reg
; ++i
)
3362 if (parm_reg_stack_loc
[i
])
3363 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3365 /* Now instantiate the remaining register equivalences for debugging info.
3366 These will not be valid addresses. */
3367 instantiate_decls (fndecl
, 0);
3369 /* Indicate that, from now on, assign_stack_local should use
3370 frame_pointer_rtx. */
3371 virtuals_instantiated
= 1;
3374 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3375 all virtual registers in their DECL_RTL's.
3377 If VALID_ONLY, do this only if the resulting address is still valid.
3378 Otherwise, always do it. */
3381 instantiate_decls (fndecl
, valid_only
)
3387 if (DECL_SAVED_INSNS (fndecl
))
3388 /* When compiling an inline function, the obstack used for
3389 rtl allocation is the maybepermanent_obstack. Calling
3390 `resume_temporary_allocation' switches us back to that
3391 obstack while we process this function's parameters. */
3392 resume_temporary_allocation ();
3394 /* Process all parameters of the function. */
3395 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3397 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3399 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3401 /* If the parameter was promoted, then the incoming RTL mode may be
3402 larger than the declared type size. We must use the larger of
3404 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
3405 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3408 /* Now process all variables defined in the function or its subblocks. */
3409 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3411 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
3413 /* Save all rtl allocated for this function by raising the
3414 high-water mark on the maybepermanent_obstack. */
3416 /* All further rtl allocation is now done in the current_obstack. */
3417 rtl_in_current_obstack ();
3421 /* Subroutine of instantiate_decls: Process all decls in the given
3422 BLOCK node and all its subblocks. */
3425 instantiate_decls_1 (let
, valid_only
)
3431 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3432 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
3435 /* Process all subblocks. */
3436 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3437 instantiate_decls_1 (t
, valid_only
);
3440 /* Subroutine of the preceding procedures: Given RTL representing a
3441 decl and the size of the object, do any instantiation required.
3443 If VALID_ONLY is non-zero, it means that the RTL should only be
3444 changed if the new address is valid. */
3447 instantiate_decl (x
, size
, valid_only
)
3452 enum machine_mode mode
;
3455 /* If this is not a MEM, no need to do anything. Similarly if the
3456 address is a constant or a register that is not a virtual register. */
3458 if (x
== 0 || GET_CODE (x
) != MEM
)
3462 if (CONSTANT_P (addr
)
3463 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3464 || (GET_CODE (addr
) == REG
3465 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3466 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3469 /* If we should only do this if the address is valid, copy the address.
3470 We need to do this so we can undo any changes that might make the
3471 address invalid. This copy is unfortunate, but probably can't be
3475 addr
= copy_rtx (addr
);
3477 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3481 /* Now verify that the resulting address is valid for every integer or
3482 floating-point mode up to and including SIZE bytes long. We do this
3483 since the object might be accessed in any mode and frame addresses
3486 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3487 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3488 mode
= GET_MODE_WIDER_MODE (mode
))
3489 if (! memory_address_p (mode
, addr
))
3492 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3493 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3494 mode
= GET_MODE_WIDER_MODE (mode
))
3495 if (! memory_address_p (mode
, addr
))
3499 /* Put back the address now that we have updated it and we either know
3500 it is valid or we don't care whether it is valid. */
3505 /* Given a pointer to a piece of rtx and an optional pointer to the
3506 containing object, instantiate any virtual registers present in it.
3508 If EXTRA_INSNS, we always do the replacement and generate
3509 any extra insns before OBJECT. If it zero, we do nothing if replacement
3512 Return 1 if we either had nothing to do or if we were able to do the
3513 needed replacement. Return 0 otherwise; we only return zero if
3514 EXTRA_INSNS is zero.
3516 We first try some simple transformations to avoid the creation of extra
3520 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3528 HOST_WIDE_INT offset
= 0;
3534 /* Re-start here to avoid recursion in common cases. */
3541 code
= GET_CODE (x
);
3543 /* Check for some special cases. */
3560 /* We are allowed to set the virtual registers. This means that
3561 the actual register should receive the source minus the
3562 appropriate offset. This is used, for example, in the handling
3563 of non-local gotos. */
3564 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3565 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3566 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3567 new = frame_pointer_rtx
, offset
= - var_offset
;
3568 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3569 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3570 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3571 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3572 else if (SET_DEST (x
) == virtual_cfa_rtx
)
3573 new = arg_pointer_rtx
, offset
= - cfa_offset
;
3577 rtx src
= SET_SRC (x
);
3579 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3581 /* The only valid sources here are PLUS or REG. Just do
3582 the simplest possible thing to handle them. */
3583 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3587 if (GET_CODE (src
) != REG
)
3588 temp
= force_operand (src
, NULL_RTX
);
3591 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3595 emit_insns_before (seq
, object
);
3598 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3605 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3610 /* Handle special case of virtual register plus constant. */
3611 if (CONSTANT_P (XEXP (x
, 1)))
3613 rtx old
, new_offset
;
3615 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3616 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3618 rtx inner
= XEXP (XEXP (x
, 0), 0);
3620 if (inner
== virtual_incoming_args_rtx
)
3621 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3622 else if (inner
== virtual_stack_vars_rtx
)
3623 new = frame_pointer_rtx
, offset
= var_offset
;
3624 else if (inner
== virtual_stack_dynamic_rtx
)
3625 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3626 else if (inner
== virtual_outgoing_args_rtx
)
3627 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3628 else if (inner
== virtual_cfa_rtx
)
3629 new = arg_pointer_rtx
, offset
= cfa_offset
;
3636 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3638 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3641 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3642 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3643 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3644 new = frame_pointer_rtx
, offset
= var_offset
;
3645 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3646 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3647 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3648 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3649 else if (XEXP (x
, 0) == virtual_cfa_rtx
)
3650 new = arg_pointer_rtx
, offset
= cfa_offset
;
3653 /* We know the second operand is a constant. Unless the
3654 first operand is a REG (which has been already checked),
3655 it needs to be checked. */
3656 if (GET_CODE (XEXP (x
, 0)) != REG
)
3664 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3666 /* If the new constant is zero, try to replace the sum with just
3668 if (new_offset
== const0_rtx
3669 && validate_change (object
, loc
, new, 0))
3672 /* Next try to replace the register and new offset.
3673 There are two changes to validate here and we can't assume that
3674 in the case of old offset equals new just changing the register
3675 will yield a valid insn. In the interests of a little efficiency,
3676 however, we only call validate change once (we don't queue up the
3677 changes and then call apply_change_group). */
3681 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3682 : (XEXP (x
, 0) = new,
3683 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3691 /* Otherwise copy the new constant into a register and replace
3692 constant with that register. */
3693 temp
= gen_reg_rtx (Pmode
);
3695 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3696 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3699 /* If that didn't work, replace this expression with a
3700 register containing the sum. */
3703 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3706 temp
= force_operand (new, NULL_RTX
);
3710 emit_insns_before (seq
, object
);
3711 if (! validate_change (object
, loc
, temp
, 0)
3712 && ! validate_replace_rtx (x
, temp
, object
))
3720 /* Fall through to generic two-operand expression case. */
3726 case DIV
: case UDIV
:
3727 case MOD
: case UMOD
:
3728 case AND
: case IOR
: case XOR
:
3729 case ROTATERT
: case ROTATE
:
3730 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3732 case GE
: case GT
: case GEU
: case GTU
:
3733 case LE
: case LT
: case LEU
: case LTU
:
3734 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3735 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3740 /* Most cases of MEM that convert to valid addresses have already been
3741 handled by our scan of decls. The only special handling we
3742 need here is to make a copy of the rtx to ensure it isn't being
3743 shared if we have to change it to a pseudo.
3745 If the rtx is a simple reference to an address via a virtual register,
3746 it can potentially be shared. In such cases, first try to make it
3747 a valid address, which can also be shared. Otherwise, copy it and
3750 First check for common cases that need no processing. These are
3751 usually due to instantiation already being done on a previous instance
3755 if (CONSTANT_ADDRESS_P (temp
)
3756 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3757 || temp
== arg_pointer_rtx
3759 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3760 || temp
== hard_frame_pointer_rtx
3762 || temp
== frame_pointer_rtx
)
3765 if (GET_CODE (temp
) == PLUS
3766 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3767 && (XEXP (temp
, 0) == frame_pointer_rtx
3768 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3769 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3771 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3772 || XEXP (temp
, 0) == arg_pointer_rtx
3777 if (temp
== virtual_stack_vars_rtx
3778 || temp
== virtual_incoming_args_rtx
3779 || (GET_CODE (temp
) == PLUS
3780 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3781 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3782 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3784 /* This MEM may be shared. If the substitution can be done without
3785 the need to generate new pseudos, we want to do it in place
3786 so all copies of the shared rtx benefit. The call below will
3787 only make substitutions if the resulting address is still
3790 Note that we cannot pass X as the object in the recursive call
3791 since the insn being processed may not allow all valid
3792 addresses. However, if we were not passed on object, we can
3793 only modify X without copying it if X will have a valid
3796 ??? Also note that this can still lose if OBJECT is an insn that
3797 has less restrictions on an address that some other insn.
3798 In that case, we will modify the shared address. This case
3799 doesn't seem very likely, though. One case where this could
3800 happen is in the case of a USE or CLOBBER reference, but we
3801 take care of that below. */
3803 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3804 object
? object
: x
, 0))
3807 /* Otherwise make a copy and process that copy. We copy the entire
3808 RTL expression since it might be a PLUS which could also be
3810 *loc
= x
= copy_rtx (x
);
3813 /* Fall through to generic unary operation case. */
3815 case STRICT_LOW_PART
:
3817 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3818 case SIGN_EXTEND
: case ZERO_EXTEND
:
3819 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3820 case FLOAT
: case FIX
:
3821 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3825 /* These case either have just one operand or we know that we need not
3826 check the rest of the operands. */
3832 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3833 go ahead and make the invalid one, but do it to a copy. For a REG,
3834 just make the recursive call, since there's no chance of a problem. */
3836 if ((GET_CODE (XEXP (x
, 0)) == MEM
3837 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3839 || (GET_CODE (XEXP (x
, 0)) == REG
3840 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3843 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3848 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3849 in front of this insn and substitute the temporary. */
3850 if (x
== virtual_incoming_args_rtx
)
3851 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3852 else if (x
== virtual_stack_vars_rtx
)
3853 new = frame_pointer_rtx
, offset
= var_offset
;
3854 else if (x
== virtual_stack_dynamic_rtx
)
3855 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3856 else if (x
== virtual_outgoing_args_rtx
)
3857 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3858 else if (x
== virtual_cfa_rtx
)
3859 new = arg_pointer_rtx
, offset
= cfa_offset
;
3863 temp
= plus_constant (new, offset
);
3864 if (!validate_change (object
, loc
, temp
, 0))
3870 temp
= force_operand (temp
, NULL_RTX
);
3874 emit_insns_before (seq
, object
);
3875 if (! validate_change (object
, loc
, temp
, 0)
3876 && ! validate_replace_rtx (x
, temp
, object
))
3884 if (GET_CODE (XEXP (x
, 0)) == REG
)
3887 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3889 /* If we have a (addressof (mem ..)), do any instantiation inside
3890 since we know we'll be making the inside valid when we finally
3891 remove the ADDRESSOF. */
3892 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3901 /* Scan all subexpressions. */
3902 fmt
= GET_RTX_FORMAT (code
);
3903 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3906 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3909 else if (*fmt
== 'E')
3910 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3911 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3918 /* Optimization: assuming this function does not receive nonlocal gotos,
3919 delete the handlers for such, as well as the insns to establish
3920 and disestablish them. */
3926 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3928 /* Delete the handler by turning off the flag that would
3929 prevent jump_optimize from deleting it.
3930 Also permit deletion of the nonlocal labels themselves
3931 if nothing local refers to them. */
3932 if (GET_CODE (insn
) == CODE_LABEL
)
3936 LABEL_PRESERVE_P (insn
) = 0;
3938 /* Remove it from the nonlocal_label list, to avoid confusing
3940 for (t
= nonlocal_labels
, last_t
= 0; t
;
3941 last_t
= t
, t
= TREE_CHAIN (t
))
3942 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3947 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3949 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3952 if (GET_CODE (insn
) == INSN
)
3956 for (t
= nonlocal_goto_handler_slots
; t
!= 0; t
= XEXP (t
, 1))
3957 if (reg_mentioned_p (t
, PATTERN (insn
)))
3963 || (nonlocal_goto_stack_level
!= 0
3964 && reg_mentioned_p (nonlocal_goto_stack_level
,
3974 return max_parm_reg
;
3977 /* Return the first insn following those generated by `assign_parms'. */
3980 get_first_nonparm_insn ()
3983 return NEXT_INSN (last_parm_insn
);
3984 return get_insns ();
3987 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3988 Crash if there is none. */
3991 get_first_block_beg ()
3993 register rtx searcher
;
3994 register rtx insn
= get_first_nonparm_insn ();
3996 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3997 if (GET_CODE (searcher
) == NOTE
3998 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
4001 abort (); /* Invalid call to this function. (See comments above.) */
4005 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4006 This means a type for which function calls must pass an address to the
4007 function or get an address back from the function.
4008 EXP may be a type node or an expression (whose type is tested). */
4011 aggregate_value_p (exp
)
4014 int i
, regno
, nregs
;
4017 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
4020 type
= TREE_TYPE (exp
);
4022 if (RETURN_IN_MEMORY (type
))
4024 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4025 and thus can't be returned in registers. */
4026 if (TREE_ADDRESSABLE (type
))
4028 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
4030 /* Make sure we have suitable call-clobbered regs to return
4031 the value in; if not, we must return it in memory. */
4032 reg
= hard_function_value (type
, 0, 0);
4034 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4036 if (GET_CODE (reg
) != REG
)
4039 regno
= REGNO (reg
);
4040 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
4041 for (i
= 0; i
< nregs
; i
++)
4042 if (! call_used_regs
[regno
+ i
])
4047 /* Assign RTL expressions to the function's parameters.
4048 This may involve copying them into registers and using
4049 those registers as the RTL for them. */
4052 assign_parms (fndecl
)
4056 register rtx entry_parm
= 0;
4057 register rtx stack_parm
= 0;
4058 CUMULATIVE_ARGS args_so_far
;
4059 enum machine_mode promoted_mode
, passed_mode
;
4060 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4062 /* Total space needed so far for args on the stack,
4063 given as a constant and a tree-expression. */
4064 struct args_size stack_args_size
;
4065 tree fntype
= TREE_TYPE (fndecl
);
4066 tree fnargs
= DECL_ARGUMENTS (fndecl
);
4067 /* This is used for the arg pointer when referring to stack args. */
4068 rtx internal_arg_pointer
;
4069 /* This is a dummy PARM_DECL that we used for the function result if
4070 the function returns a structure. */
4071 tree function_result_decl
= 0;
4072 #ifdef SETUP_INCOMING_VARARGS
4073 int varargs_setup
= 0;
4075 rtx conversion_insns
= 0;
4076 struct args_size alignment_pad
;
4078 /* Nonzero if the last arg is named `__builtin_va_alist',
4079 which is used on some machines for old-fashioned non-ANSI varargs.h;
4080 this should be stuck onto the stack as if it had arrived there. */
4082 = (current_function_varargs
4084 && (parm
= tree_last (fnargs
)) != 0
4086 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
4087 "__builtin_va_alist")));
4089 /* Nonzero if function takes extra anonymous args.
4090 This means the last named arg must be on the stack
4091 right before the anonymous ones. */
4093 = (TYPE_ARG_TYPES (fntype
) != 0
4094 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4095 != void_type_node
));
4097 current_function_stdarg
= stdarg
;
4099 /* If the reg that the virtual arg pointer will be translated into is
4100 not a fixed reg or is the stack pointer, make a copy of the virtual
4101 arg pointer, and address parms via the copy. The frame pointer is
4102 considered fixed even though it is not marked as such.
4104 The second time through, simply use ap to avoid generating rtx. */
4106 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4107 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4108 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4109 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4111 internal_arg_pointer
= virtual_incoming_args_rtx
;
4112 current_function_internal_arg_pointer
= internal_arg_pointer
;
4114 stack_args_size
.constant
= 0;
4115 stack_args_size
.var
= 0;
4117 /* If struct value address is treated as the first argument, make it so. */
4118 if (aggregate_value_p (DECL_RESULT (fndecl
))
4119 && ! current_function_returns_pcc_struct
4120 && struct_value_incoming_rtx
== 0)
4122 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4124 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4126 DECL_ARG_TYPE (function_result_decl
) = type
;
4127 TREE_CHAIN (function_result_decl
) = fnargs
;
4128 fnargs
= function_result_decl
;
4131 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4132 parm_reg_stack_loc
= (rtx
*) xcalloc (max_parm_reg
, sizeof (rtx
));
4134 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4135 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4137 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
4140 /* We haven't yet found an argument that we must push and pretend the
4142 current_function_pretend_args_size
= 0;
4144 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4146 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
4147 struct args_size stack_offset
;
4148 struct args_size arg_size
;
4149 int passed_pointer
= 0;
4150 int did_conversion
= 0;
4151 tree passed_type
= DECL_ARG_TYPE (parm
);
4152 tree nominal_type
= TREE_TYPE (parm
);
4155 /* Set LAST_NAMED if this is last named arg before some
4157 int last_named
= ((TREE_CHAIN (parm
) == 0
4158 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
4159 && (stdarg
|| current_function_varargs
));
4160 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4161 most machines, if this is a varargs/stdarg function, then we treat
4162 the last named arg as if it were anonymous too. */
4163 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
4165 if (TREE_TYPE (parm
) == error_mark_node
4166 /* This can happen after weird syntax errors
4167 or if an enum type is defined among the parms. */
4168 || TREE_CODE (parm
) != PARM_DECL
4169 || passed_type
== NULL
)
4171 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
4172 = gen_rtx_MEM (BLKmode
, const0_rtx
);
4173 TREE_USED (parm
) = 1;
4177 /* For varargs.h function, save info about regs and stack space
4178 used by the individual args, not including the va_alist arg. */
4179 if (hide_last_arg
&& last_named
)
4180 current_function_args_info
= args_so_far
;
4182 /* Find mode of arg as it is passed, and mode of arg
4183 as it should be during execution of this function. */
4184 passed_mode
= TYPE_MODE (passed_type
);
4185 nominal_mode
= TYPE_MODE (nominal_type
);
4187 /* If the parm's mode is VOID, its value doesn't matter,
4188 and avoid the usual things like emit_move_insn that could crash. */
4189 if (nominal_mode
== VOIDmode
)
4191 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
4195 /* If the parm is to be passed as a transparent union, use the
4196 type of the first field for the tests below. We have already
4197 verified that the modes are the same. */
4198 if (DECL_TRANSPARENT_UNION (parm
)
4199 || TYPE_TRANSPARENT_UNION (passed_type
))
4200 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4202 /* See if this arg was passed by invisible reference. It is if
4203 it is an object whose size depends on the contents of the
4204 object itself or if the machine requires these objects be passed
4207 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
4208 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
4209 || TREE_ADDRESSABLE (passed_type
)
4210 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4211 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4212 passed_type
, named_arg
)
4216 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4218 passed_mode
= nominal_mode
= Pmode
;
4221 promoted_mode
= passed_mode
;
4223 #ifdef PROMOTE_FUNCTION_ARGS
4224 /* Compute the mode in which the arg is actually extended to. */
4225 unsignedp
= TREE_UNSIGNED (passed_type
);
4226 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
4229 /* Let machine desc say which reg (if any) the parm arrives in.
4230 0 means it arrives on the stack. */
4231 #ifdef FUNCTION_INCOMING_ARG
4232 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4233 passed_type
, named_arg
);
4235 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4236 passed_type
, named_arg
);
4239 if (entry_parm
== 0)
4240 promoted_mode
= passed_mode
;
4242 #ifdef SETUP_INCOMING_VARARGS
4243 /* If this is the last named parameter, do any required setup for
4244 varargs or stdargs. We need to know about the case of this being an
4245 addressable type, in which case we skip the registers it
4246 would have arrived in.
4248 For stdargs, LAST_NAMED will be set for two parameters, the one that
4249 is actually the last named, and the dummy parameter. We only
4250 want to do this action once.
4252 Also, indicate when RTL generation is to be suppressed. */
4253 if (last_named
&& !varargs_setup
)
4255 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
4256 current_function_pretend_args_size
, 0);
4261 /* Determine parm's home in the stack,
4262 in case it arrives in the stack or we should pretend it did.
4264 Compute the stack position and rtx where the argument arrives
4267 There is one complexity here: If this was a parameter that would
4268 have been passed in registers, but wasn't only because it is
4269 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4270 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4271 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4272 0 as it was the previous time. */
4274 pretend_named
= named_arg
|| PRETEND_OUTGOING_VARARGS_NAMED
;
4275 locate_and_pad_parm (promoted_mode
, passed_type
,
4276 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4279 #ifdef FUNCTION_INCOMING_ARG
4280 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4282 pretend_named
) != 0,
4284 FUNCTION_ARG (args_so_far
, promoted_mode
,
4286 pretend_named
) != 0,
4289 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
,
4293 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4295 if (offset_rtx
== const0_rtx
)
4296 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4298 stack_parm
= gen_rtx_MEM (promoted_mode
,
4299 gen_rtx_PLUS (Pmode
,
4300 internal_arg_pointer
,
4303 /* If this is a memory ref that contains aggregate components,
4304 mark it as such for cse and loop optimize. Likewise if it
4306 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4307 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
4308 MEM_ALIAS_SET (stack_parm
) = get_alias_set (parm
);
4311 /* If this parameter was passed both in registers and in the stack,
4312 use the copy on the stack. */
4313 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4316 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4317 /* If this parm was passed part in regs and part in memory,
4318 pretend it arrived entirely in memory
4319 by pushing the register-part onto the stack.
4321 In the special case of a DImode or DFmode that is split,
4322 we could put it together in a pseudoreg directly,
4323 but for now that's not worth bothering with. */
4327 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4328 passed_type
, named_arg
);
4332 current_function_pretend_args_size
4333 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4334 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4335 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4337 /* Handle calls that pass values in multiple non-contiguous
4338 locations. The Irix 6 ABI has examples of this. */
4339 if (GET_CODE (entry_parm
) == PARALLEL
)
4340 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4341 int_size_in_bytes (TREE_TYPE (parm
)),
4342 (TYPE_ALIGN (TREE_TYPE (parm
))
4345 move_block_from_reg (REGNO (entry_parm
),
4346 validize_mem (stack_parm
), nregs
,
4347 int_size_in_bytes (TREE_TYPE (parm
)));
4349 entry_parm
= stack_parm
;
4354 /* If we didn't decide this parm came in a register,
4355 by default it came on the stack. */
4356 if (entry_parm
== 0)
4357 entry_parm
= stack_parm
;
4359 /* Record permanently how this parm was passed. */
4360 DECL_INCOMING_RTL (parm
) = entry_parm
;
4362 /* If there is actually space on the stack for this parm,
4363 count it in stack_args_size; otherwise set stack_parm to 0
4364 to indicate there is no preallocated stack slot for the parm. */
4366 if (entry_parm
== stack_parm
4367 || (GET_CODE (entry_parm
) == PARALLEL
4368 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4369 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4370 /* On some machines, even if a parm value arrives in a register
4371 there is still an (uninitialized) stack slot allocated for it.
4373 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4374 whether this parameter already has a stack slot allocated,
4375 because an arg block exists only if current_function_args_size
4376 is larger than some threshold, and we haven't calculated that
4377 yet. So, for now, we just assume that stack slots never exist
4379 || REG_PARM_STACK_SPACE (fndecl
) > 0
4383 stack_args_size
.constant
+= arg_size
.constant
;
4385 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
4388 /* No stack slot was pushed for this parm. */
4391 /* Update info on where next arg arrives in registers. */
4393 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4394 passed_type
, named_arg
);
4396 /* If we can't trust the parm stack slot to be aligned enough
4397 for its ultimate type, don't use that slot after entry.
4398 We'll make another stack slot, if we need one. */
4400 unsigned int thisparm_boundary
4401 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4403 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4407 /* If parm was passed in memory, and we need to convert it on entry,
4408 don't store it back in that same slot. */
4410 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4414 /* Now adjust STACK_PARM to the mode and precise location
4415 where this parameter should live during execution,
4416 if we discover that it must live in the stack during execution.
4417 To make debuggers happier on big-endian machines, we store
4418 the value in the last bytes of the space available. */
4420 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4425 if (BYTES_BIG_ENDIAN
4426 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4427 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4428 - GET_MODE_SIZE (nominal_mode
));
4430 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4431 if (offset_rtx
== const0_rtx
)
4432 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4434 stack_parm
= gen_rtx_MEM (nominal_mode
,
4435 gen_rtx_PLUS (Pmode
,
4436 internal_arg_pointer
,
4439 /* If this is a memory ref that contains aggregate components,
4440 mark it as such for cse and loop optimize. */
4441 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4445 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4446 in the mode in which it arrives.
4447 STACK_PARM is an RTX for a stack slot where the parameter can live
4448 during the function (in case we want to put it there).
4449 STACK_PARM is 0 if no stack slot was pushed for it.
4451 Now output code if necessary to convert ENTRY_PARM to
4452 the type in which this function declares it,
4453 and store that result in an appropriate place,
4454 which may be a pseudo reg, may be STACK_PARM,
4455 or may be a local stack slot if STACK_PARM is 0.
4457 Set DECL_RTL to that place. */
4459 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4461 /* If a BLKmode arrives in registers, copy it to a stack slot.
4462 Handle calls that pass values in multiple non-contiguous
4463 locations. The Irix 6 ABI has examples of this. */
4464 if (GET_CODE (entry_parm
) == REG
4465 || GET_CODE (entry_parm
) == PARALLEL
)
4468 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4471 /* Note that we will be storing an integral number of words.
4472 So we have to be careful to ensure that we allocate an
4473 integral number of words. We do this below in the
4474 assign_stack_local if space was not allocated in the argument
4475 list. If it was, this will not work if PARM_BOUNDARY is not
4476 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4477 if it becomes a problem. */
4479 if (stack_parm
== 0)
4482 = assign_stack_local (GET_MODE (entry_parm
),
4485 /* If this is a memory ref that contains aggregate
4486 components, mark it as such for cse and loop optimize. */
4487 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4490 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4493 if (TREE_READONLY (parm
))
4494 RTX_UNCHANGING_P (stack_parm
) = 1;
4496 /* Handle calls that pass values in multiple non-contiguous
4497 locations. The Irix 6 ABI has examples of this. */
4498 if (GET_CODE (entry_parm
) == PARALLEL
)
4499 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4500 int_size_in_bytes (TREE_TYPE (parm
)),
4501 (TYPE_ALIGN (TREE_TYPE (parm
))
4504 move_block_from_reg (REGNO (entry_parm
),
4505 validize_mem (stack_parm
),
4506 size_stored
/ UNITS_PER_WORD
,
4507 int_size_in_bytes (TREE_TYPE (parm
)));
4509 DECL_RTL (parm
) = stack_parm
;
4511 else if (! ((! optimize
4512 && ! DECL_REGISTER (parm
)
4513 && ! DECL_INLINE (fndecl
))
4514 /* layout_decl may set this. */
4515 || TREE_ADDRESSABLE (parm
)
4516 || TREE_SIDE_EFFECTS (parm
)
4517 /* If -ffloat-store specified, don't put explicit
4518 float variables into registers. */
4519 || (flag_float_store
4520 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4521 /* Always assign pseudo to structure return or item passed
4522 by invisible reference. */
4523 || passed_pointer
|| parm
== function_result_decl
)
4525 /* Store the parm in a pseudoregister during the function, but we
4526 may need to do it in a wider mode. */
4528 register rtx parmreg
;
4529 int regno
, regnoi
= 0, regnor
= 0;
4531 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4533 promoted_nominal_mode
4534 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4536 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4537 mark_user_reg (parmreg
);
4539 /* If this was an item that we received a pointer to, set DECL_RTL
4544 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4545 MEM_SET_IN_STRUCT_P (DECL_RTL (parm
), aggregate
);
4548 DECL_RTL (parm
) = parmreg
;
4550 /* Copy the value into the register. */
4551 if (nominal_mode
!= passed_mode
4552 || promoted_nominal_mode
!= promoted_mode
)
4555 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4556 mode, by the caller. We now have to convert it to
4557 NOMINAL_MODE, if different. However, PARMREG may be in
4558 a different mode than NOMINAL_MODE if it is being stored
4561 If ENTRY_PARM is a hard register, it might be in a register
4562 not valid for operating in its mode (e.g., an odd-numbered
4563 register for a DFmode). In that case, moves are the only
4564 thing valid, so we can't do a convert from there. This
4565 occurs when the calling sequence allow such misaligned
4568 In addition, the conversion may involve a call, which could
4569 clobber parameters which haven't been copied to pseudo
4570 registers yet. Therefore, we must first copy the parm to
4571 a pseudo reg here, and save the conversion until after all
4572 parameters have been moved. */
4574 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4576 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4578 push_to_sequence (conversion_insns
);
4579 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4581 /* TREE_USED gets set erroneously during expand_assignment. */
4582 save_tree_used
= TREE_USED (parm
);
4583 expand_assignment (parm
,
4584 make_tree (nominal_type
, tempreg
), 0, 0);
4585 TREE_USED (parm
) = save_tree_used
;
4586 conversion_insns
= get_insns ();
4591 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4593 /* If we were passed a pointer but the actual value
4594 can safely live in a register, put it in one. */
4595 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4597 && ! DECL_REGISTER (parm
)
4598 && ! DECL_INLINE (fndecl
))
4599 /* layout_decl may set this. */
4600 || TREE_ADDRESSABLE (parm
)
4601 || TREE_SIDE_EFFECTS (parm
)
4602 /* If -ffloat-store specified, don't put explicit
4603 float variables into registers. */
4604 || (flag_float_store
4605 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4607 /* We can't use nominal_mode, because it will have been set to
4608 Pmode above. We must use the actual mode of the parm. */
4609 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4610 mark_user_reg (parmreg
);
4611 emit_move_insn (parmreg
, DECL_RTL (parm
));
4612 DECL_RTL (parm
) = parmreg
;
4613 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4617 #ifdef FUNCTION_ARG_CALLEE_COPIES
4618 /* If we are passed an arg by reference and it is our responsibility
4619 to make a copy, do it now.
4620 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4621 original argument, so we must recreate them in the call to
4622 FUNCTION_ARG_CALLEE_COPIES. */
4623 /* ??? Later add code to handle the case that if the argument isn't
4624 modified, don't do the copy. */
4626 else if (passed_pointer
4627 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4628 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4629 DECL_ARG_TYPE (parm
),
4631 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4634 tree type
= DECL_ARG_TYPE (parm
);
4636 /* This sequence may involve a library call perhaps clobbering
4637 registers that haven't been copied to pseudos yet. */
4639 push_to_sequence (conversion_insns
);
4641 if (TYPE_SIZE (type
) == 0
4642 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4643 /* This is a variable sized object. */
4644 copy
= gen_rtx_MEM (BLKmode
,
4645 allocate_dynamic_stack_space
4646 (expr_size (parm
), NULL_RTX
,
4647 TYPE_ALIGN (type
)));
4649 copy
= assign_stack_temp (TYPE_MODE (type
),
4650 int_size_in_bytes (type
), 1);
4651 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
4652 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4654 store_expr (parm
, copy
, 0);
4655 emit_move_insn (parmreg
, XEXP (copy
, 0));
4656 if (current_function_check_memory_usage
)
4657 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4658 XEXP (copy
, 0), Pmode
,
4659 GEN_INT (int_size_in_bytes (type
)),
4660 TYPE_MODE (sizetype
),
4661 GEN_INT (MEMORY_USE_RW
),
4662 TYPE_MODE (integer_type_node
));
4663 conversion_insns
= get_insns ();
4667 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4669 /* In any case, record the parm's desired stack location
4670 in case we later discover it must live in the stack.
4672 If it is a COMPLEX value, store the stack location for both
4675 if (GET_CODE (parmreg
) == CONCAT
)
4676 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4678 regno
= REGNO (parmreg
);
4680 if (regno
>= max_parm_reg
)
4683 int old_max_parm_reg
= max_parm_reg
;
4685 /* It's slow to expand this one register at a time,
4686 but it's also rare and we need max_parm_reg to be
4687 precisely correct. */
4688 max_parm_reg
= regno
+ 1;
4689 new = (rtx
*) xrealloc (parm_reg_stack_loc
,
4690 max_parm_reg
* sizeof (rtx
));
4691 bzero ((char *) (new + old_max_parm_reg
),
4692 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4693 parm_reg_stack_loc
= new;
4696 if (GET_CODE (parmreg
) == CONCAT
)
4698 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4700 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4701 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4703 if (stack_parm
!= 0)
4705 parm_reg_stack_loc
[regnor
]
4706 = gen_realpart (submode
, stack_parm
);
4707 parm_reg_stack_loc
[regnoi
]
4708 = gen_imagpart (submode
, stack_parm
);
4712 parm_reg_stack_loc
[regnor
] = 0;
4713 parm_reg_stack_loc
[regnoi
] = 0;
4717 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4719 /* Mark the register as eliminable if we did no conversion
4720 and it was copied from memory at a fixed offset,
4721 and the arg pointer was not copied to a pseudo-reg.
4722 If the arg pointer is a pseudo reg or the offset formed
4723 an invalid address, such memory-equivalences
4724 as we make here would screw up life analysis for it. */
4725 if (nominal_mode
== passed_mode
4728 && GET_CODE (stack_parm
) == MEM
4729 && stack_offset
.var
== 0
4730 && reg_mentioned_p (virtual_incoming_args_rtx
,
4731 XEXP (stack_parm
, 0)))
4733 rtx linsn
= get_last_insn ();
4736 /* Mark complex types separately. */
4737 if (GET_CODE (parmreg
) == CONCAT
)
4738 /* Scan backwards for the set of the real and
4740 for (sinsn
= linsn
; sinsn
!= 0;
4741 sinsn
= prev_nonnote_insn (sinsn
))
4743 set
= single_set (sinsn
);
4745 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4747 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4748 parm_reg_stack_loc
[regnoi
],
4751 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4753 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4754 parm_reg_stack_loc
[regnor
],
4757 else if ((set
= single_set (linsn
)) != 0
4758 && SET_DEST (set
) == parmreg
)
4760 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4761 stack_parm
, REG_NOTES (linsn
));
4764 /* For pointer data type, suggest pointer register. */
4765 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4766 mark_reg_pointer (parmreg
,
4767 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4772 /* Value must be stored in the stack slot STACK_PARM
4773 during function execution. */
4775 if (promoted_mode
!= nominal_mode
)
4777 /* Conversion is required. */
4778 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4780 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4782 push_to_sequence (conversion_insns
);
4783 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4784 TREE_UNSIGNED (TREE_TYPE (parm
)));
4787 /* ??? This may need a big-endian conversion on sparc64. */
4788 stack_parm
= change_address (stack_parm
, nominal_mode
,
4791 conversion_insns
= get_insns ();
4796 if (entry_parm
!= stack_parm
)
4798 if (stack_parm
== 0)
4801 = assign_stack_local (GET_MODE (entry_parm
),
4802 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4803 /* If this is a memory ref that contains aggregate components,
4804 mark it as such for cse and loop optimize. */
4805 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4808 if (promoted_mode
!= nominal_mode
)
4810 push_to_sequence (conversion_insns
);
4811 emit_move_insn (validize_mem (stack_parm
),
4812 validize_mem (entry_parm
));
4813 conversion_insns
= get_insns ();
4817 emit_move_insn (validize_mem (stack_parm
),
4818 validize_mem (entry_parm
));
4820 if (current_function_check_memory_usage
)
4822 push_to_sequence (conversion_insns
);
4823 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4824 XEXP (stack_parm
, 0), Pmode
,
4825 GEN_INT (GET_MODE_SIZE (GET_MODE
4827 TYPE_MODE (sizetype
),
4828 GEN_INT (MEMORY_USE_RW
),
4829 TYPE_MODE (integer_type_node
));
4831 conversion_insns
= get_insns ();
4834 DECL_RTL (parm
) = stack_parm
;
4837 /* If this "parameter" was the place where we are receiving the
4838 function's incoming structure pointer, set up the result. */
4839 if (parm
== function_result_decl
)
4841 tree result
= DECL_RESULT (fndecl
);
4842 tree restype
= TREE_TYPE (result
);
4845 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4847 MEM_SET_IN_STRUCT_P (DECL_RTL (result
),
4848 AGGREGATE_TYPE_P (restype
));
4851 if (TREE_THIS_VOLATILE (parm
))
4852 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4853 if (TREE_READONLY (parm
))
4854 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4857 /* Output all parameter conversion instructions (possibly including calls)
4858 now that all parameters have been copied out of hard registers. */
4859 emit_insns (conversion_insns
);
4861 last_parm_insn
= get_last_insn ();
4863 current_function_args_size
= stack_args_size
.constant
;
4865 /* Adjust function incoming argument size for alignment and
4868 #ifdef REG_PARM_STACK_SPACE
4869 #ifndef MAYBE_REG_PARM_STACK_SPACE
4870 current_function_args_size
= MAX (current_function_args_size
,
4871 REG_PARM_STACK_SPACE (fndecl
));
4875 #ifdef STACK_BOUNDARY
4876 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4878 current_function_args_size
4879 = ((current_function_args_size
+ STACK_BYTES
- 1)
4880 / STACK_BYTES
) * STACK_BYTES
;
4883 #ifdef ARGS_GROW_DOWNWARD
4884 current_function_arg_offset_rtx
4885 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4886 : expand_expr (size_diffop (stack_args_size
.var
,
4887 size_int (-stack_args_size
.constant
)),
4888 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4890 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4893 /* See how many bytes, if any, of its args a function should try to pop
4896 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4897 current_function_args_size
);
4899 /* For stdarg.h function, save info about
4900 regs and stack space used by the named args. */
4903 current_function_args_info
= args_so_far
;
4905 /* Set the rtx used for the function return value. Put this in its
4906 own variable so any optimizers that need this information don't have
4907 to include tree.h. Do this here so it gets done when an inlined
4908 function gets output. */
4910 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4913 /* Indicate whether REGNO is an incoming argument to the current function
4914 that was promoted to a wider mode. If so, return the RTX for the
4915 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4916 that REGNO is promoted from and whether the promotion was signed or
4919 #ifdef PROMOTE_FUNCTION_ARGS
4922 promoted_input_arg (regno
, pmode
, punsignedp
)
4924 enum machine_mode
*pmode
;
4929 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4930 arg
= TREE_CHAIN (arg
))
4931 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4932 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4933 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4935 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4936 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4938 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4939 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4940 && mode
!= DECL_MODE (arg
))
4942 *pmode
= DECL_MODE (arg
);
4943 *punsignedp
= unsignedp
;
4944 return DECL_INCOMING_RTL (arg
);
4953 /* Compute the size and offset from the start of the stacked arguments for a
4954 parm passed in mode PASSED_MODE and with type TYPE.
4956 INITIAL_OFFSET_PTR points to the current offset into the stacked
4959 The starting offset and size for this parm are returned in *OFFSET_PTR
4960 and *ARG_SIZE_PTR, respectively.
4962 IN_REGS is non-zero if the argument will be passed in registers. It will
4963 never be set if REG_PARM_STACK_SPACE is not defined.
4965 FNDECL is the function in which the argument was defined.
4967 There are two types of rounding that are done. The first, controlled by
4968 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4969 list to be aligned to the specific boundary (in bits). This rounding
4970 affects the initial and starting offsets, but not the argument size.
4972 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4973 optionally rounds the size of the parm to PARM_BOUNDARY. The
4974 initial offset is not affected by this rounding, while the size always
4975 is and the starting offset may be. */
4977 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4978 initial_offset_ptr is positive because locate_and_pad_parm's
4979 callers pass in the total size of args so far as
4980 initial_offset_ptr. arg_size_ptr is always positive.*/
4983 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4984 initial_offset_ptr
, offset_ptr
, arg_size_ptr
,
4986 enum machine_mode passed_mode
;
4988 int in_regs ATTRIBUTE_UNUSED
;
4989 tree fndecl ATTRIBUTE_UNUSED
;
4990 struct args_size
*initial_offset_ptr
;
4991 struct args_size
*offset_ptr
;
4992 struct args_size
*arg_size_ptr
;
4993 struct args_size
*alignment_pad
;
4997 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4998 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4999 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
5001 #ifdef REG_PARM_STACK_SPACE
5002 /* If we have found a stack parm before we reach the end of the
5003 area reserved for registers, skip that area. */
5006 int reg_parm_stack_space
= 0;
5008 #ifdef MAYBE_REG_PARM_STACK_SPACE
5009 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
5011 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
5013 if (reg_parm_stack_space
> 0)
5015 if (initial_offset_ptr
->var
)
5017 initial_offset_ptr
->var
5018 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
5019 ssize_int (reg_parm_stack_space
));
5020 initial_offset_ptr
->constant
= 0;
5022 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
5023 initial_offset_ptr
->constant
= reg_parm_stack_space
;
5026 #endif /* REG_PARM_STACK_SPACE */
5028 arg_size_ptr
->var
= 0;
5029 arg_size_ptr
->constant
= 0;
5031 #ifdef ARGS_GROW_DOWNWARD
5032 if (initial_offset_ptr
->var
)
5034 offset_ptr
->constant
= 0;
5035 offset_ptr
->var
= size_binop (MINUS_EXPR
, ssize_int (0),
5036 initial_offset_ptr
->var
);
5040 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
5041 offset_ptr
->var
= 0;
5043 if (where_pad
!= none
5044 && (TREE_CODE (sizetree
) != INTEGER_CST
5045 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5046 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5047 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5048 if (where_pad
!= downward
)
5049 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
);
5050 if (initial_offset_ptr
->var
)
5051 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
5052 size_binop (MINUS_EXPR
,
5054 initial_offset_ptr
->var
),
5058 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
5059 - offset_ptr
->constant
);
5061 #else /* !ARGS_GROW_DOWNWARD */
5062 pad_to_arg_alignment (initial_offset_ptr
, boundary
, alignment_pad
);
5063 *offset_ptr
= *initial_offset_ptr
;
5065 #ifdef PUSH_ROUNDING
5066 if (passed_mode
!= BLKmode
)
5067 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5070 /* Pad_below needs the pre-rounded size to know how much to pad below
5071 so this must be done before rounding up. */
5072 if (where_pad
== downward
5073 /* However, BLKmode args passed in regs have their padding done elsewhere.
5074 The stack slot must be able to hold the entire register. */
5075 && !(in_regs
&& passed_mode
== BLKmode
))
5076 pad_below (offset_ptr
, passed_mode
, sizetree
);
5078 if (where_pad
!= none
5079 && (TREE_CODE (sizetree
) != INTEGER_CST
5080 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5081 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5083 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
5084 #endif /* ARGS_GROW_DOWNWARD */
5087 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5088 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5091 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
)
5092 struct args_size
*offset_ptr
;
5094 struct args_size
*alignment_pad
;
5096 tree save_var
= NULL_TREE
;
5097 HOST_WIDE_INT save_constant
= 0;
5099 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5101 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5103 save_var
= offset_ptr
->var
;
5104 save_constant
= offset_ptr
->constant
;
5107 alignment_pad
->var
= NULL_TREE
;
5108 alignment_pad
->constant
= 0;
5110 if (boundary
> BITS_PER_UNIT
)
5112 if (offset_ptr
->var
)
5115 #ifdef ARGS_GROW_DOWNWARD
5120 (ARGS_SIZE_TREE (*offset_ptr
),
5121 boundary
/ BITS_PER_UNIT
);
5122 offset_ptr
->constant
= 0; /*?*/
5123 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5124 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
5129 offset_ptr
->constant
=
5130 #ifdef ARGS_GROW_DOWNWARD
5131 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5133 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5135 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5136 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5141 #ifndef ARGS_GROW_DOWNWARD
5143 pad_below (offset_ptr
, passed_mode
, sizetree
)
5144 struct args_size
*offset_ptr
;
5145 enum machine_mode passed_mode
;
5148 if (passed_mode
!= BLKmode
)
5150 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5151 offset_ptr
->constant
5152 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5153 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5154 - GET_MODE_SIZE (passed_mode
));
5158 if (TREE_CODE (sizetree
) != INTEGER_CST
5159 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5161 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5162 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5164 ADD_PARM_SIZE (*offset_ptr
, s2
);
5165 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5171 /* Walk the tree of blocks describing the binding levels within a function
5172 and warn about uninitialized variables.
5173 This is done after calling flow_analysis and before global_alloc
5174 clobbers the pseudo-regs to hard regs. */
5177 uninitialized_vars_warning (block
)
5180 register tree decl
, sub
;
5181 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5183 if (warn_uninitialized
5184 && TREE_CODE (decl
) == VAR_DECL
5185 /* These warnings are unreliable for and aggregates
5186 because assigning the fields one by one can fail to convince
5187 flow.c that the entire aggregate was initialized.
5188 Unions are troublesome because members may be shorter. */
5189 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5190 && DECL_RTL (decl
) != 0
5191 && GET_CODE (DECL_RTL (decl
)) == REG
5192 /* Global optimizations can make it difficult to determine if a
5193 particular variable has been initialized. However, a VAR_DECL
5194 with a nonzero DECL_INITIAL had an initializer, so do not
5195 claim it is potentially uninitialized.
5197 We do not care about the actual value in DECL_INITIAL, so we do
5198 not worry that it may be a dangling pointer. */
5199 && DECL_INITIAL (decl
) == NULL_TREE
5200 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
5201 warning_with_decl (decl
,
5202 "`%s' might be used uninitialized in this function");
5204 && TREE_CODE (decl
) == VAR_DECL
5205 && DECL_RTL (decl
) != 0
5206 && GET_CODE (DECL_RTL (decl
)) == REG
5207 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5208 warning_with_decl (decl
,
5209 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5211 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5212 uninitialized_vars_warning (sub
);
5215 /* Do the appropriate part of uninitialized_vars_warning
5216 but for arguments instead of local variables. */
5219 setjmp_args_warning ()
5222 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5223 decl
; decl
= TREE_CHAIN (decl
))
5224 if (DECL_RTL (decl
) != 0
5225 && GET_CODE (DECL_RTL (decl
)) == REG
5226 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5227 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5230 /* If this function call setjmp, put all vars into the stack
5231 unless they were declared `register'. */
5234 setjmp_protect (block
)
5237 register tree decl
, sub
;
5238 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5239 if ((TREE_CODE (decl
) == VAR_DECL
5240 || TREE_CODE (decl
) == PARM_DECL
)
5241 && DECL_RTL (decl
) != 0
5242 && (GET_CODE (DECL_RTL (decl
)) == REG
5243 || (GET_CODE (DECL_RTL (decl
)) == MEM
5244 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5245 /* If this variable came from an inline function, it must be
5246 that its life doesn't overlap the setjmp. If there was a
5247 setjmp in the function, it would already be in memory. We
5248 must exclude such variable because their DECL_RTL might be
5249 set to strange things such as virtual_stack_vars_rtx. */
5250 && ! DECL_FROM_INLINE (decl
)
5252 #ifdef NON_SAVING_SETJMP
5253 /* If longjmp doesn't restore the registers,
5254 don't put anything in them. */
5258 ! DECL_REGISTER (decl
)))
5259 put_var_into_stack (decl
);
5260 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5261 setjmp_protect (sub
);
5264 /* Like the previous function, but for args instead of local variables. */
5267 setjmp_protect_args ()
5270 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5271 decl
; decl
= TREE_CHAIN (decl
))
5272 if ((TREE_CODE (decl
) == VAR_DECL
5273 || TREE_CODE (decl
) == PARM_DECL
)
5274 && DECL_RTL (decl
) != 0
5275 && (GET_CODE (DECL_RTL (decl
)) == REG
5276 || (GET_CODE (DECL_RTL (decl
)) == MEM
5277 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5279 /* If longjmp doesn't restore the registers,
5280 don't put anything in them. */
5281 #ifdef NON_SAVING_SETJMP
5285 ! DECL_REGISTER (decl
)))
5286 put_var_into_stack (decl
);
5289 /* Return the context-pointer register corresponding to DECL,
5290 or 0 if it does not need one. */
5293 lookup_static_chain (decl
)
5296 tree context
= decl_function_context (decl
);
5300 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
5303 /* We treat inline_function_decl as an alias for the current function
5304 because that is the inline function whose vars, types, etc.
5305 are being merged into the current function.
5306 See expand_inline_function. */
5307 if (context
== current_function_decl
|| context
== inline_function_decl
)
5308 return virtual_stack_vars_rtx
;
5310 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5311 if (TREE_PURPOSE (link
) == context
)
5312 return RTL_EXPR_RTL (TREE_VALUE (link
));
5317 /* Convert a stack slot address ADDR for variable VAR
5318 (from a containing function)
5319 into an address valid in this function (using a static chain). */
5322 fix_lexical_addr (addr
, var
)
5327 HOST_WIDE_INT displacement
;
5328 tree context
= decl_function_context (var
);
5329 struct function
*fp
;
5332 /* If this is the present function, we need not do anything. */
5333 if (context
== current_function_decl
|| context
== inline_function_decl
)
5336 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5337 if (fp
->decl
== context
)
5343 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5344 addr
= XEXP (XEXP (addr
, 0), 0);
5346 /* Decode given address as base reg plus displacement. */
5347 if (GET_CODE (addr
) == REG
)
5348 basereg
= addr
, displacement
= 0;
5349 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5350 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5354 /* We accept vars reached via the containing function's
5355 incoming arg pointer and via its stack variables pointer. */
5356 if (basereg
== fp
->internal_arg_pointer
)
5358 /* If reached via arg pointer, get the arg pointer value
5359 out of that function's stack frame.
5361 There are two cases: If a separate ap is needed, allocate a
5362 slot in the outer function for it and dereference it that way.
5363 This is correct even if the real ap is actually a pseudo.
5364 Otherwise, just adjust the offset from the frame pointer to
5367 #ifdef NEED_SEPARATE_AP
5370 if (fp
->x_arg_pointer_save_area
== 0)
5371 fp
->x_arg_pointer_save_area
5372 = assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
5374 addr
= fix_lexical_addr (XEXP (fp
->x_arg_pointer_save_area
, 0), var
);
5375 addr
= memory_address (Pmode
, addr
);
5377 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
5379 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
5380 base
= lookup_static_chain (var
);
5384 else if (basereg
== virtual_stack_vars_rtx
)
5386 /* This is the same code as lookup_static_chain, duplicated here to
5387 avoid an extra call to decl_function_context. */
5390 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5391 if (TREE_PURPOSE (link
) == context
)
5393 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
5401 /* Use same offset, relative to appropriate static chain or argument
5403 return plus_constant (base
, displacement
);
5406 /* Return the address of the trampoline for entering nested fn FUNCTION.
5407 If necessary, allocate a trampoline (in the stack frame)
5408 and emit rtl to initialize its contents (at entry to this function). */
5411 trampoline_address (function
)
5417 struct function
*fp
;
5420 /* Find an existing trampoline and return it. */
5421 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5422 if (TREE_PURPOSE (link
) == function
)
5424 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5426 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5427 for (link
= fp
->x_trampoline_list
; link
; link
= TREE_CHAIN (link
))
5428 if (TREE_PURPOSE (link
) == function
)
5430 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5432 return round_trampoline_addr (tramp
);
5435 /* None exists; we must make one. */
5437 /* Find the `struct function' for the function containing FUNCTION. */
5439 fn_context
= decl_function_context (function
);
5440 if (fn_context
!= current_function_decl
5441 && fn_context
!= inline_function_decl
)
5442 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5443 if (fp
->decl
== fn_context
)
5446 /* Allocate run-time space for this trampoline
5447 (usually in the defining function's stack frame). */
5448 #ifdef ALLOCATE_TRAMPOLINE
5449 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5451 /* If rounding needed, allocate extra space
5452 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5453 #ifdef TRAMPOLINE_ALIGNMENT
5454 #define TRAMPOLINE_REAL_SIZE \
5455 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5457 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5459 tramp
= assign_stack_local_1 (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0,
5463 /* Record the trampoline for reuse and note it for later initialization
5464 by expand_function_end. */
5467 push_obstacks (fp
->function_maybepermanent_obstack
,
5468 fp
->function_maybepermanent_obstack
);
5469 rtlexp
= make_node (RTL_EXPR
);
5470 RTL_EXPR_RTL (rtlexp
) = tramp
;
5471 fp
->x_trampoline_list
= tree_cons (function
, rtlexp
,
5472 fp
->x_trampoline_list
);
5477 /* Make the RTL_EXPR node temporary, not momentary, so that the
5478 trampoline_list doesn't become garbage. */
5479 int momentary
= suspend_momentary ();
5480 rtlexp
= make_node (RTL_EXPR
);
5481 resume_momentary (momentary
);
5483 RTL_EXPR_RTL (rtlexp
) = tramp
;
5484 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5487 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5488 return round_trampoline_addr (tramp
);
5491 /* Given a trampoline address,
5492 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5495 round_trampoline_addr (tramp
)
5498 #ifdef TRAMPOLINE_ALIGNMENT
5499 /* Round address up to desired boundary. */
5500 rtx temp
= gen_reg_rtx (Pmode
);
5501 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5502 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5503 temp
, 0, OPTAB_LIB_WIDEN
);
5504 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5505 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5506 temp
, 0, OPTAB_LIB_WIDEN
);
5511 /* Put all this function's BLOCK nodes including those that are chained
5512 onto the first block into a vector, and return it.
5513 Also store in each NOTE for the beginning or end of a block
5514 the index of that block in the vector.
5515 The arguments are BLOCK, the chain of top-level blocks of the function,
5516 and INSNS, the insn chain of the function. */
5522 tree
*block_vector
, *last_block_vector
;
5524 tree block
= DECL_INITIAL (current_function_decl
);
5529 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5530 depth-first order. */
5531 block_vector
= get_block_vector (block
, &n_blocks
);
5532 block_stack
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5534 last_block_vector
= identify_blocks_1 (get_insns (),
5536 block_vector
+ n_blocks
,
5539 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5540 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5541 if (0 && last_block_vector
!= block_vector
+ n_blocks
)
5544 free (block_vector
);
5548 /* Subroutine of identify_blocks. Do the block substitution on the
5549 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5551 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5552 BLOCK_VECTOR is incremented for each block seen. */
5555 identify_blocks_1 (insns
, block_vector
, end_block_vector
, orig_block_stack
)
5558 tree
*end_block_vector
;
5559 tree
*orig_block_stack
;
5562 tree
*block_stack
= orig_block_stack
;
5564 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5566 if (GET_CODE (insn
) == NOTE
)
5568 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5572 /* If there are more block notes than BLOCKs, something
5574 if (block_vector
== end_block_vector
)
5577 b
= *block_vector
++;
5578 NOTE_BLOCK (insn
) = b
;
5581 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5583 /* If there are more NOTE_INSN_BLOCK_ENDs than
5584 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5585 if (block_stack
== orig_block_stack
)
5588 NOTE_BLOCK (insn
) = *--block_stack
;
5591 else if (GET_CODE (insn
) == CALL_INSN
5592 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
5594 rtx cp
= PATTERN (insn
);
5596 block_vector
= identify_blocks_1 (XEXP (cp
, 0), block_vector
,
5597 end_block_vector
, block_stack
);
5599 block_vector
= identify_blocks_1 (XEXP (cp
, 1), block_vector
,
5600 end_block_vector
, block_stack
);
5602 block_vector
= identify_blocks_1 (XEXP (cp
, 2), block_vector
,
5603 end_block_vector
, block_stack
);
5607 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5608 something is badly wrong. */
5609 if (block_stack
!= orig_block_stack
)
5612 return block_vector
;
5615 /* Identify BLOCKs referenced by more than one
5616 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5621 tree block
= DECL_INITIAL (current_function_decl
);
5622 varray_type block_stack
;
5624 if (block
== NULL_TREE
)
5627 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
5629 /* Prune the old trees away, so that they don't get in the way. */
5630 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
5631 BLOCK_CHAIN (block
) = NULL_TREE
;
5633 reorder_blocks_1 (get_insns (), block
, &block_stack
);
5635 BLOCK_SUBBLOCKS (block
)
5636 = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
5638 VARRAY_FREE (block_stack
);
5641 /* Helper function for reorder_blocks. Process the insn chain beginning
5642 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5645 reorder_blocks_1 (insns
, current_block
, p_block_stack
)
5648 varray_type
*p_block_stack
;
5652 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5654 if (GET_CODE (insn
) == NOTE
)
5656 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5658 tree block
= NOTE_BLOCK (insn
);
5659 /* If we have seen this block before, copy it. */
5660 if (TREE_ASM_WRITTEN (block
))
5662 block
= copy_node (block
);
5663 NOTE_BLOCK (insn
) = block
;
5665 BLOCK_SUBBLOCKS (block
) = 0;
5666 TREE_ASM_WRITTEN (block
) = 1;
5667 BLOCK_SUPERCONTEXT (block
) = current_block
;
5668 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5669 BLOCK_SUBBLOCKS (current_block
) = block
;
5670 current_block
= block
;
5671 VARRAY_PUSH_TREE (*p_block_stack
, block
);
5673 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5675 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (*p_block_stack
);
5676 VARRAY_POP (*p_block_stack
);
5677 BLOCK_SUBBLOCKS (current_block
)
5678 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5679 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5682 else if (GET_CODE (insn
) == CALL_INSN
5683 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
5685 rtx cp
= PATTERN (insn
);
5686 reorder_blocks_1 (XEXP (cp
, 0), current_block
, p_block_stack
);
5688 reorder_blocks_1 (XEXP (cp
, 1), current_block
, p_block_stack
);
5690 reorder_blocks_1 (XEXP (cp
, 2), current_block
, p_block_stack
);
5695 /* Reverse the order of elements in the chain T of blocks,
5696 and return the new head of the chain (old last element). */
5702 register tree prev
= 0, decl
, next
;
5703 for (decl
= t
; decl
; decl
= next
)
5705 next
= BLOCK_CHAIN (decl
);
5706 BLOCK_CHAIN (decl
) = prev
;
5712 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5713 non-NULL, list them all into VECTOR, in a depth-first preorder
5714 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5718 all_blocks (block
, vector
)
5726 TREE_ASM_WRITTEN (block
) = 0;
5728 /* Record this block. */
5730 vector
[n_blocks
] = block
;
5734 /* Record the subblocks, and their subblocks... */
5735 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5736 vector
? vector
+ n_blocks
: 0);
5737 block
= BLOCK_CHAIN (block
);
5743 /* Return a vector containing all the blocks rooted at BLOCK. The
5744 number of elements in the vector is stored in N_BLOCKS_P. The
5745 vector is dynamically allocated; it is the caller's responsibility
5746 to call `free' on the pointer returned. */
5749 get_block_vector (block
, n_blocks_p
)
5755 *n_blocks_p
= all_blocks (block
, NULL
);
5756 block_vector
= (tree
*) xmalloc (*n_blocks_p
* sizeof (tree
));
5757 all_blocks (block
, block_vector
);
5759 return block_vector
;
5762 static int next_block_index
= 2;
5764 /* Set BLOCK_NUMBER for all the blocks in FN. */
5774 /* For SDB and XCOFF debugging output, we start numbering the blocks
5775 from 1 within each function, rather than keeping a running
5777 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5778 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
5779 next_block_index
= 1;
5782 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
5784 /* The top-level BLOCK isn't numbered at all. */
5785 for (i
= 1; i
< n_blocks
; ++i
)
5786 /* We number the blocks from two. */
5787 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
5789 free (block_vector
);
5795 /* Allocate a function structure and reset its contents to the defaults. */
5797 prepare_function_start ()
5799 cfun
= (struct function
*) xcalloc (1, sizeof (struct function
));
5801 init_stmt_for_function ();
5802 init_eh_for_function ();
5804 cse_not_expected
= ! optimize
;
5806 /* Caller save not needed yet. */
5807 caller_save_needed
= 0;
5809 /* No stack slots have been made yet. */
5810 stack_slot_list
= 0;
5812 current_function_has_nonlocal_label
= 0;
5813 current_function_has_nonlocal_goto
= 0;
5815 /* There is no stack slot for handling nonlocal gotos. */
5816 nonlocal_goto_handler_slots
= 0;
5817 nonlocal_goto_stack_level
= 0;
5819 /* No labels have been declared for nonlocal use. */
5820 nonlocal_labels
= 0;
5821 nonlocal_goto_handler_labels
= 0;
5823 /* No function calls so far in this function. */
5824 function_call_count
= 0;
5826 /* No parm regs have been allocated.
5827 (This is important for output_inline_function.) */
5828 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5830 /* Initialize the RTL mechanism. */
5833 /* Initialize the queue of pending postincrement and postdecrements,
5834 and some other info in expr.c. */
5837 /* We haven't done register allocation yet. */
5840 init_varasm_status (cfun
);
5842 /* Clear out data used for inlining. */
5843 cfun
->inlinable
= 0;
5844 cfun
->original_decl_initial
= 0;
5845 cfun
->original_arg_vector
= 0;
5847 #ifdef STACK_BOUNDARY
5848 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
5849 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
5851 cfun
->stack_alignment_needed
= 0;
5852 cfun
->preferred_stack_boundary
= 0;
5855 /* Set if a call to setjmp is seen. */
5856 current_function_calls_setjmp
= 0;
5858 /* Set if a call to longjmp is seen. */
5859 current_function_calls_longjmp
= 0;
5861 current_function_calls_alloca
= 0;
5862 current_function_contains_functions
= 0;
5863 current_function_is_leaf
= 0;
5864 current_function_nothrow
= 0;
5865 current_function_sp_is_unchanging
= 0;
5866 current_function_uses_only_leaf_regs
= 0;
5867 current_function_has_computed_jump
= 0;
5868 current_function_is_thunk
= 0;
5870 current_function_returns_pcc_struct
= 0;
5871 current_function_returns_struct
= 0;
5872 current_function_epilogue_delay_list
= 0;
5873 current_function_uses_const_pool
= 0;
5874 current_function_uses_pic_offset_table
= 0;
5875 current_function_cannot_inline
= 0;
5877 /* We have not yet needed to make a label to jump to for tail-recursion. */
5878 tail_recursion_label
= 0;
5880 /* We haven't had a need to make a save area for ap yet. */
5881 arg_pointer_save_area
= 0;
5883 /* No stack slots allocated yet. */
5886 /* No SAVE_EXPRs in this function yet. */
5889 /* No RTL_EXPRs in this function yet. */
5892 /* Set up to allocate temporaries. */
5895 /* Indicate that we need to distinguish between the return value of the
5896 present function and the return value of a function being called. */
5897 rtx_equal_function_value_matters
= 1;
5899 /* Indicate that we have not instantiated virtual registers yet. */
5900 virtuals_instantiated
= 0;
5902 /* Indicate we have no need of a frame pointer yet. */
5903 frame_pointer_needed
= 0;
5905 /* By default assume not varargs or stdarg. */
5906 current_function_varargs
= 0;
5907 current_function_stdarg
= 0;
5909 /* We haven't made any trampolines for this function yet. */
5910 trampoline_list
= 0;
5912 init_pending_stack_adjust ();
5913 inhibit_defer_pop
= 0;
5915 current_function_outgoing_args_size
= 0;
5917 if (init_lang_status
)
5918 (*init_lang_status
) (cfun
);
5919 if (init_machine_status
)
5920 (*init_machine_status
) (cfun
);
5923 /* Initialize the rtl expansion mechanism so that we can do simple things
5924 like generate sequences. This is used to provide a context during global
5925 initialization of some passes. */
5927 init_dummy_function_start ()
5929 prepare_function_start ();
5932 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5933 and initialize static variables for generating RTL for the statements
5937 init_function_start (subr
, filename
, line
)
5942 prepare_function_start ();
5944 /* Remember this function for later. */
5945 cfun
->next_global
= all_functions
;
5946 all_functions
= cfun
;
5948 current_function_name
= (*decl_printable_name
) (subr
, 2);
5951 /* Nonzero if this is a nested function that uses a static chain. */
5953 current_function_needs_context
5954 = (decl_function_context (current_function_decl
) != 0
5955 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5957 /* Within function body, compute a type's size as soon it is laid out. */
5958 immediate_size_expand
++;
5960 /* Prevent ever trying to delete the first instruction of a function.
5961 Also tell final how to output a linenum before the function prologue.
5962 Note linenums could be missing, e.g. when compiling a Java .class file. */
5964 emit_line_note (filename
, line
);
5966 /* Make sure first insn is a note even if we don't want linenums.
5967 This makes sure the first insn will never be deleted.
5968 Also, final expects a note to appear there. */
5969 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5971 /* Set flags used by final.c. */
5972 if (aggregate_value_p (DECL_RESULT (subr
)))
5974 #ifdef PCC_STATIC_STRUCT_RETURN
5975 current_function_returns_pcc_struct
= 1;
5977 current_function_returns_struct
= 1;
5980 /* Warn if this value is an aggregate type,
5981 regardless of which calling convention we are using for it. */
5982 if (warn_aggregate_return
5983 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5984 warning ("function returns an aggregate");
5986 current_function_returns_pointer
5987 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5990 /* Make sure all values used by the optimization passes have sane
5993 init_function_for_compilation ()
5997 /* No prologue/epilogue insns yet. */
5998 VARRAY_GROW (prologue
, 0);
5999 VARRAY_GROW (epilogue
, 0);
6000 VARRAY_GROW (sibcall_epilogue
, 0);
6003 /* Indicate that the current function uses extra args
6004 not explicitly mentioned in the argument list in any fashion. */
6009 current_function_varargs
= 1;
6012 /* Expand a call to __main at the beginning of a possible main function. */
6014 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6015 #undef HAS_INIT_SECTION
6016 #define HAS_INIT_SECTION
6020 expand_main_function ()
6022 #if !defined (HAS_INIT_SECTION)
6023 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
6025 #endif /* not HAS_INIT_SECTION */
6028 extern struct obstack permanent_obstack
;
6030 /* Start the RTL for a new function, and set variables used for
6032 SUBR is the FUNCTION_DECL node.
6033 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6034 the function's parameters, which must be run at any return statement. */
6037 expand_function_start (subr
, parms_have_cleanups
)
6039 int parms_have_cleanups
;
6042 rtx last_ptr
= NULL_RTX
;
6044 /* Make sure volatile mem refs aren't considered
6045 valid operands of arithmetic insns. */
6046 init_recog_no_volatile ();
6048 /* Set this before generating any memory accesses. */
6049 current_function_check_memory_usage
6050 = (flag_check_memory_usage
6051 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl
));
6053 current_function_instrument_entry_exit
6054 = (flag_instrument_function_entry_exit
6055 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
6057 current_function_limit_stack
6058 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
6060 /* If function gets a static chain arg, store it in the stack frame.
6061 Do this first, so it gets the first stack slot offset. */
6062 if (current_function_needs_context
)
6064 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
6066 /* Delay copying static chain if it is not a register to avoid
6067 conflicts with regs used for parameters. */
6068 if (! SMALL_REGISTER_CLASSES
6069 || GET_CODE (static_chain_incoming_rtx
) == REG
)
6070 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6073 /* If the parameters of this function need cleaning up, get a label
6074 for the beginning of the code which executes those cleanups. This must
6075 be done before doing anything with return_label. */
6076 if (parms_have_cleanups
)
6077 cleanup_label
= gen_label_rtx ();
6081 /* Make the label for return statements to jump to, if this machine
6082 does not have a one-instruction return and uses an epilogue,
6083 or if it returns a structure, or if it has parm cleanups. */
6085 if (cleanup_label
== 0 && HAVE_return
6086 && ! current_function_instrument_entry_exit
6087 && ! current_function_returns_pcc_struct
6088 && ! (current_function_returns_struct
&& ! optimize
))
6091 return_label
= gen_label_rtx ();
6093 return_label
= gen_label_rtx ();
6096 /* Initialize rtx used to return the value. */
6097 /* Do this before assign_parms so that we copy the struct value address
6098 before any library calls that assign parms might generate. */
6100 /* Decide whether to return the value in memory or in a register. */
6101 if (aggregate_value_p (DECL_RESULT (subr
)))
6103 /* Returning something that won't go in a register. */
6104 register rtx value_address
= 0;
6106 #ifdef PCC_STATIC_STRUCT_RETURN
6107 if (current_function_returns_pcc_struct
)
6109 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
6110 value_address
= assemble_static_space (size
);
6115 /* Expect to be passed the address of a place to store the value.
6116 If it is passed as an argument, assign_parms will take care of
6118 if (struct_value_incoming_rtx
)
6120 value_address
= gen_reg_rtx (Pmode
);
6121 emit_move_insn (value_address
, struct_value_incoming_rtx
);
6126 DECL_RTL (DECL_RESULT (subr
))
6127 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
6128 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)),
6129 AGGREGATE_TYPE_P (TREE_TYPE
6134 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
6135 /* If return mode is void, this decl rtl should not be used. */
6136 DECL_RTL (DECL_RESULT (subr
)) = 0;
6137 else if (parms_have_cleanups
|| current_function_instrument_entry_exit
)
6139 /* If function will end with cleanup code for parms,
6140 compute the return values into a pseudo reg,
6141 which we will copy into the true return register
6142 after the cleanups are done. */
6144 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
6146 #ifdef PROMOTE_FUNCTION_RETURN
6147 tree type
= TREE_TYPE (DECL_RESULT (subr
));
6148 int unsignedp
= TREE_UNSIGNED (type
);
6150 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
6153 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
6156 /* Scalar, returned in a register. */
6158 #ifdef FUNCTION_OUTGOING_VALUE
6159 DECL_RTL (DECL_RESULT (subr
))
6160 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6162 DECL_RTL (DECL_RESULT (subr
))
6163 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6166 /* Mark this reg as the function's return value. */
6167 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
6169 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
6170 /* Needed because we may need to move this to memory
6171 in case it's a named return value whose address is taken. */
6172 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6176 /* Initialize rtx for parameters and local variables.
6177 In some cases this requires emitting insns. */
6179 assign_parms (subr
);
6181 /* Copy the static chain now if it wasn't a register. The delay is to
6182 avoid conflicts with the parameter passing registers. */
6184 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
6185 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
6186 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6188 /* The following was moved from init_function_start.
6189 The move is supposed to make sdb output more accurate. */
6190 /* Indicate the beginning of the function body,
6191 as opposed to parm setup. */
6192 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
6194 if (GET_CODE (get_last_insn ()) != NOTE
)
6195 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6196 parm_birth_insn
= get_last_insn ();
6198 context_display
= 0;
6199 if (current_function_needs_context
)
6201 /* Fetch static chain values for containing functions. */
6202 tem
= decl_function_context (current_function_decl
);
6203 /* Copy the static chain pointer into a pseudo. If we have
6204 small register classes, copy the value from memory if
6205 static_chain_incoming_rtx is a REG. */
6208 /* If the static chain originally came in a register, put it back
6209 there, then move it out in the next insn. The reason for
6210 this peculiar code is to satisfy function integration. */
6211 if (SMALL_REGISTER_CLASSES
6212 && GET_CODE (static_chain_incoming_rtx
) == REG
)
6213 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
6214 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
6219 tree rtlexp
= make_node (RTL_EXPR
);
6221 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
6222 context_display
= tree_cons (tem
, rtlexp
, context_display
);
6223 tem
= decl_function_context (tem
);
6226 /* Chain thru stack frames, assuming pointer to next lexical frame
6227 is found at the place we always store it. */
6228 #ifdef FRAME_GROWS_DOWNWARD
6229 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
6231 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
6232 memory_address (Pmode
,
6235 /* If we are not optimizing, ensure that we know that this
6236 piece of context is live over the entire function. */
6238 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
6243 if (current_function_instrument_entry_exit
)
6245 rtx fun
= DECL_RTL (current_function_decl
);
6246 if (GET_CODE (fun
) == MEM
)
6247 fun
= XEXP (fun
, 0);
6250 emit_library_call (profile_function_entry_libfunc
, 0, VOIDmode
, 2,
6252 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6254 hard_frame_pointer_rtx
),
6258 /* After the display initializations is where the tail-recursion label
6259 should go, if we end up needing one. Ensure we have a NOTE here
6260 since some things (like trampolines) get placed before this. */
6261 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6263 /* Evaluate now the sizes of any types declared among the arguments. */
6264 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
6266 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
6267 EXPAND_MEMORY_USE_BAD
);
6268 /* Flush the queue in case this parameter declaration has
6273 /* Make sure there is a line number after the function entry setup code. */
6274 force_next_line_note ();
6277 /* Undo the effects of init_dummy_function_start. */
6279 expand_dummy_function_end ()
6281 /* End any sequences that failed to be closed due to syntax errors. */
6282 while (in_sequence_p ())
6285 /* Outside function body, can't compute type's actual size
6286 until next function's body starts. */
6288 free_after_parsing (cfun
);
6289 free_after_compilation (cfun
);
6294 /* Call DOIT for each hard register used as a return value from
6295 the current function. */
6298 diddle_return_value (doit
, arg
)
6299 void (*doit
) PARAMS ((rtx
, void *));
6302 rtx outgoing
= current_function_return_rtx
;
6307 if (GET_CODE (outgoing
) == REG
6308 && REGNO (outgoing
) >= FIRST_PSEUDO_REGISTER
)
6310 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6311 #ifdef FUNCTION_OUTGOING_VALUE
6312 outgoing
= FUNCTION_OUTGOING_VALUE (type
, current_function_decl
);
6314 outgoing
= FUNCTION_VALUE (type
, current_function_decl
);
6316 /* If this is a BLKmode structure being returned in registers, then use
6317 the mode computed in expand_return. */
6318 if (GET_MODE (outgoing
) == BLKmode
)
6320 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6323 if (GET_CODE (outgoing
) == REG
)
6324 (*doit
) (outgoing
, arg
);
6325 else if (GET_CODE (outgoing
) == PARALLEL
)
6329 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
6331 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
6333 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6340 do_clobber_return_reg (reg
, arg
)
6342 void *arg ATTRIBUTE_UNUSED
;
6344 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
6348 clobber_return_register ()
6350 diddle_return_value (do_clobber_return_reg
, NULL
);
6354 do_use_return_reg (reg
, arg
)
6356 void *arg ATTRIBUTE_UNUSED
;
6358 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
6362 use_return_register ()
6364 diddle_return_value (do_use_return_reg
, NULL
);
6367 /* Generate RTL for the end of the current function.
6368 FILENAME and LINE are the current position in the source file.
6370 It is up to language-specific callers to do cleanups for parameters--
6371 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6374 expand_function_end (filename
, line
, end_bindings
)
6381 #ifdef TRAMPOLINE_TEMPLATE
6382 static rtx initial_trampoline
;
6385 finish_expr_for_function ();
6387 #ifdef NON_SAVING_SETJMP
6388 /* Don't put any variables in registers if we call setjmp
6389 on a machine that fails to restore the registers. */
6390 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6392 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6393 setjmp_protect (DECL_INITIAL (current_function_decl
));
6395 setjmp_protect_args ();
6399 /* Save the argument pointer if a save area was made for it. */
6400 if (arg_pointer_save_area
)
6402 /* arg_pointer_save_area may not be a valid memory address, so we
6403 have to check it and fix it if necessary. */
6406 emit_move_insn (validize_mem (arg_pointer_save_area
),
6407 virtual_incoming_args_rtx
);
6408 seq
= gen_sequence ();
6410 emit_insn_before (seq
, tail_recursion_reentry
);
6413 /* Initialize any trampolines required by this function. */
6414 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
6416 tree function
= TREE_PURPOSE (link
);
6417 rtx context ATTRIBUTE_UNUSED
= lookup_static_chain (function
);
6418 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
6419 #ifdef TRAMPOLINE_TEMPLATE
6424 #ifdef TRAMPOLINE_TEMPLATE
6425 /* First make sure this compilation has a template for
6426 initializing trampolines. */
6427 if (initial_trampoline
== 0)
6429 end_temporary_allocation ();
6431 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
6432 resume_temporary_allocation ();
6434 ggc_add_rtx_root (&initial_trampoline
, 1);
6438 /* Generate insns to initialize the trampoline. */
6440 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
6441 #ifdef TRAMPOLINE_TEMPLATE
6442 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
6443 emit_block_move (blktramp
, initial_trampoline
,
6444 GEN_INT (TRAMPOLINE_SIZE
),
6445 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
6447 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
6451 /* Put those insns at entry to the containing function (this one). */
6452 emit_insns_before (seq
, tail_recursion_reentry
);
6455 /* If we are doing stack checking and this function makes calls,
6456 do a stack probe at the start of the function to ensure we have enough
6457 space for another stack frame. */
6458 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6462 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6463 if (GET_CODE (insn
) == CALL_INSN
)
6466 probe_stack_range (STACK_CHECK_PROTECT
,
6467 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6470 emit_insns_before (seq
, tail_recursion_reentry
);
6475 /* Warn about unused parms if extra warnings were specified. */
6476 if (warn_unused
&& extra_warnings
)
6480 for (decl
= DECL_ARGUMENTS (current_function_decl
);
6481 decl
; decl
= TREE_CHAIN (decl
))
6482 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
6483 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
6484 warning_with_decl (decl
, "unused parameter `%s'");
6487 /* Delete handlers for nonlocal gotos if nothing uses them. */
6488 if (nonlocal_goto_handler_slots
!= 0
6489 && ! current_function_has_nonlocal_label
)
6492 /* End any sequences that failed to be closed due to syntax errors. */
6493 while (in_sequence_p ())
6496 /* Outside function body, can't compute type's actual size
6497 until next function's body starts. */
6498 immediate_size_expand
--;
6500 clear_pending_stack_adjust ();
6501 do_pending_stack_adjust ();
6503 /* Mark the end of the function body.
6504 If control reaches this insn, the function can drop through
6505 without returning a value. */
6506 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
6508 /* Must mark the last line number note in the function, so that the test
6509 coverage code can avoid counting the last line twice. This just tells
6510 the code to ignore the immediately following line note, since there
6511 already exists a copy of this note somewhere above. This line number
6512 note is still needed for debugging though, so we can't delete it. */
6513 if (flag_test_coverage
)
6514 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
6516 /* Output a linenumber for the end of the function.
6517 SDB depends on this. */
6518 emit_line_note_force (filename
, line
);
6520 /* Output the label for the actual return from the function,
6521 if one is expected. This happens either because a function epilogue
6522 is used instead of a return instruction, or because a return was done
6523 with a goto in order to run local cleanups, or because of pcc-style
6524 structure returning. */
6528 /* Before the return label, clobber the return registers so that
6529 they are not propogated live to the rest of the function. This
6530 can only happen with functions that drop through; if there had
6531 been a return statement, there would have either been a return
6532 rtx, or a jump to the return label. */
6533 clobber_return_register ();
6535 emit_label (return_label
);
6538 /* C++ uses this. */
6540 expand_end_bindings (0, 0, 0);
6542 /* Now handle any leftover exception regions that may have been
6543 created for the parameters. */
6545 rtx last
= get_last_insn ();
6548 expand_leftover_cleanups ();
6550 /* If there are any catch_clauses remaining, output them now. */
6551 emit_insns (catch_clauses
);
6552 catch_clauses
= catch_clauses_last
= NULL_RTX
;
6553 /* If the above emitted any code, may sure we jump around it. */
6554 if (last
!= get_last_insn ())
6556 label
= gen_label_rtx ();
6557 last
= emit_jump_insn_after (gen_jump (label
), last
);
6558 last
= emit_barrier_after (last
);
6563 if (current_function_instrument_entry_exit
)
6565 rtx fun
= DECL_RTL (current_function_decl
);
6566 if (GET_CODE (fun
) == MEM
)
6567 fun
= XEXP (fun
, 0);
6570 emit_library_call (profile_function_exit_libfunc
, 0, VOIDmode
, 2,
6572 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6574 hard_frame_pointer_rtx
),
6578 /* If we had calls to alloca, and this machine needs
6579 an accurate stack pointer to exit the function,
6580 insert some code to save and restore the stack pointer. */
6581 #ifdef EXIT_IGNORE_STACK
6582 if (! EXIT_IGNORE_STACK
)
6584 if (current_function_calls_alloca
)
6588 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
6589 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
6592 /* If scalar return value was computed in a pseudo-reg,
6593 copy that to the hard return register. */
6594 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
6595 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
6596 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
6597 >= FIRST_PSEUDO_REGISTER
))
6599 rtx real_decl_result
;
6601 #ifdef FUNCTION_OUTGOING_VALUE
6603 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6604 current_function_decl
);
6607 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6608 current_function_decl
);
6610 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
6611 /* If this is a BLKmode structure being returned in registers, then use
6612 the mode computed in expand_return. */
6613 if (GET_MODE (real_decl_result
) == BLKmode
)
6614 PUT_MODE (real_decl_result
,
6615 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6616 emit_move_insn (real_decl_result
,
6617 DECL_RTL (DECL_RESULT (current_function_decl
)));
6619 /* The delay slot scheduler assumes that current_function_return_rtx
6620 holds the hard register containing the return value, not a temporary
6622 current_function_return_rtx
= real_decl_result
;
6625 /* If returning a structure, arrange to return the address of the value
6626 in a place where debuggers expect to find it.
6628 If returning a structure PCC style,
6629 the caller also depends on this value.
6630 And current_function_returns_pcc_struct is not necessarily set. */
6631 if (current_function_returns_struct
6632 || current_function_returns_pcc_struct
)
6634 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6635 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6636 #ifdef FUNCTION_OUTGOING_VALUE
6638 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
6639 current_function_decl
);
6642 = FUNCTION_VALUE (build_pointer_type (type
),
6643 current_function_decl
);
6646 /* Mark this as a function return value so integrate will delete the
6647 assignment and USE below when inlining this function. */
6648 REG_FUNCTION_VALUE_P (outgoing
) = 1;
6650 emit_move_insn (outgoing
, value_address
);
6653 /* ??? This should no longer be necessary since stupid is no longer with
6654 us, but there are some parts of the compiler (eg reload_combine, and
6655 sh mach_dep_reorg) that still try and compute their own lifetime info
6656 instead of using the general framework. */
6657 use_return_register ();
6659 /* If this is an implementation of __throw, do what's necessary to
6660 communicate between __builtin_eh_return and the epilogue. */
6661 expand_eh_return ();
6663 /* Output a return insn if we are using one.
6664 Otherwise, let the rtl chain end here, to drop through
6665 into the epilogue. */
6670 emit_jump_insn (gen_return ());
6675 /* Fix up any gotos that jumped out to the outermost
6676 binding level of the function.
6677 Must follow emitting RETURN_LABEL. */
6679 /* If you have any cleanups to do at this point,
6680 and they need to create temporary variables,
6681 then you will lose. */
6682 expand_fixups (get_insns ());
6685 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6686 sequence or a single insn). */
6689 record_insns (insns
, vecp
)
6693 if (GET_CODE (insns
) == SEQUENCE
)
6695 int len
= XVECLEN (insns
, 0);
6696 int i
= VARRAY_SIZE (*vecp
);
6698 VARRAY_GROW (*vecp
, i
+ len
);
6701 VARRAY_INT (*vecp
, i
) = INSN_UID (XVECEXP (insns
, 0, len
));
6707 int i
= VARRAY_SIZE (*vecp
);
6708 VARRAY_GROW (*vecp
, i
+ 1);
6709 VARRAY_INT (*vecp
, i
) = INSN_UID (insns
);
6713 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6716 contains (insn
, vec
)
6722 if (GET_CODE (insn
) == INSN
6723 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6726 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6727 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
6728 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == VARRAY_INT (vec
, j
))
6734 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
6735 if (INSN_UID (insn
) == VARRAY_INT (vec
, j
))
6742 prologue_epilogue_contains (insn
)
6745 if (contains (insn
, prologue
))
6747 if (contains (insn
, epilogue
))
6753 sibcall_epilogue_contains (insn
)
6756 if (sibcall_epilogue
)
6757 return contains (insn
, sibcall_epilogue
);
6762 /* Insert gen_return at the end of block BB. This also means updating
6763 block_for_insn appropriately. */
6766 emit_return_into_block (bb
)
6771 end
= emit_jump_insn_after (gen_return (), bb
->end
);
6772 p
= NEXT_INSN (bb
->end
);
6775 set_block_for_insn (p
, bb
);
6782 #endif /* HAVE_return */
6784 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6785 this into place with notes indicating where the prologue ends and where
6786 the epilogue begins. Update the basic block information when possible. */
6789 thread_prologue_and_epilogue_insns (f
)
6790 rtx f ATTRIBUTE_UNUSED
;
6796 #ifdef HAVE_prologue
6802 seq
= gen_prologue();
6805 /* Retain a map of the prologue insns. */
6806 if (GET_CODE (seq
) != SEQUENCE
)
6808 record_insns (seq
, &prologue
);
6809 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
6811 /* GDB handles `break f' by setting a breakpoint on the first
6812 line note *after* the prologue. That means that we should
6813 insert a line note here; otherwise, if the next line note
6814 comes part way into the next block, GDB will skip all the way
6816 insn
= next_nonnote_insn (f
);
6819 if (GET_CODE (insn
) == NOTE
6820 && NOTE_LINE_NUMBER (insn
) >= 0)
6822 emit_line_note_force (NOTE_SOURCE_FILE (insn
),
6823 NOTE_LINE_NUMBER (insn
));
6827 insn
= PREV_INSN (insn
);
6830 seq
= gen_sequence ();
6833 /* If optimization is off, and perhaps in an empty function,
6834 the entry block will have no successors. */
6835 if (ENTRY_BLOCK_PTR
->succ
)
6837 /* Can't deal with multiple successsors of the entry block. */
6838 if (ENTRY_BLOCK_PTR
->succ
->succ_next
)
6841 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
6845 emit_insn_after (seq
, f
);
6849 /* If the exit block has no non-fake predecessors, we don't need
6851 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6852 if ((e
->flags
& EDGE_FAKE
) == 0)
6858 if (optimize
&& HAVE_return
)
6860 /* If we're allowed to generate a simple return instruction,
6861 then by definition we don't need a full epilogue. Examine
6862 the block that falls through to EXIT. If it does not
6863 contain any code, examine its predecessors and try to
6864 emit (conditional) return instructions. */
6870 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6871 if (e
->flags
& EDGE_FALLTHRU
)
6877 /* Verify that there are no active instructions in the last block. */
6879 while (label
&& GET_CODE (label
) != CODE_LABEL
)
6881 if (active_insn_p (label
))
6883 label
= PREV_INSN (label
);
6886 if (last
->head
== label
&& GET_CODE (label
) == CODE_LABEL
)
6888 for (e
= last
->pred
; e
; e
= e_next
)
6890 basic_block bb
= e
->src
;
6893 e_next
= e
->pred_next
;
6894 if (bb
== ENTRY_BLOCK_PTR
)
6898 if ((GET_CODE (jump
) != JUMP_INSN
) || JUMP_LABEL (jump
) != label
)
6901 /* If we have an unconditional jump, we can replace that
6902 with a simple return instruction. */
6903 if (simplejump_p (jump
))
6905 emit_return_into_block (bb
);
6906 flow_delete_insn (jump
);
6909 /* If we have a conditional jump, we can try to replace
6910 that with a conditional return instruction. */
6911 else if (condjump_p (jump
))
6915 ret
= SET_SRC (PATTERN (jump
));
6916 if (GET_CODE (XEXP (ret
, 1)) == LABEL_REF
)
6917 loc
= &XEXP (ret
, 1);
6919 loc
= &XEXP (ret
, 2);
6920 ret
= gen_rtx_RETURN (VOIDmode
);
6922 if (! validate_change (jump
, loc
, ret
, 0))
6924 if (JUMP_LABEL (jump
))
6925 LABEL_NUSES (JUMP_LABEL (jump
))--;
6927 /* If this block has only one successor, it both jumps
6928 and falls through to the fallthru block, so we can't
6930 if (bb
->succ
->succ_next
== NULL
)
6936 /* Fix up the CFG for the successful change we just made. */
6938 make_edge (NULL
, bb
, EXIT_BLOCK_PTR
, 0);
6941 /* Emit a return insn for the exit fallthru block. Whether
6942 this is still reachable will be determined later. */
6944 emit_barrier_after (last
->end
);
6945 emit_return_into_block (last
);
6949 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6950 as it may be the exit block can go elsewhere as well
6953 emit_jump_insn (gen_return ());
6954 seq
= gen_sequence ();
6956 insert_insn_on_edge (seq
, e
);
6962 #ifdef HAVE_epilogue
6965 /* Find the edge that falls through to EXIT. Other edges may exist
6966 due to RETURN instructions, but those don't need epilogues.
6967 There really shouldn't be a mixture -- either all should have
6968 been converted or none, however... */
6970 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6971 if (e
->flags
& EDGE_FALLTHRU
)
6977 emit_note (NULL
, NOTE_INSN_EPILOGUE_BEG
);
6979 seq
= gen_epilogue ();
6980 emit_jump_insn (seq
);
6982 /* Retain a map of the epilogue insns. */
6983 if (GET_CODE (seq
) != SEQUENCE
)
6985 record_insns (seq
, &epilogue
);
6987 seq
= gen_sequence ();
6990 insert_insn_on_edge (seq
, e
);
6997 commit_edge_insertions ();
6999 #ifdef HAVE_sibcall_epilogue
7000 /* Emit sibling epilogues before any sibling call sites. */
7001 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7003 basic_block bb
= e
->src
;
7007 if (GET_CODE (insn
) != CALL_INSN
7008 || ! SIBLING_CALL_P (insn
))
7012 seq
= gen_sibcall_epilogue ();
7015 i
= PREV_INSN (insn
);
7016 emit_insn_before (seq
, insn
);
7018 /* Update the UID to basic block map. */
7019 for (i
= NEXT_INSN (i
); i
!= insn
; i
= NEXT_INSN (i
))
7020 set_block_for_insn (i
, bb
);
7022 /* Retain a map of the epilogue insns. Used in life analysis to
7023 avoid getting rid of sibcall epilogue insns. */
7024 record_insns (seq
, &sibcall_epilogue
);
7029 /* Reposition the prologue-end and epilogue-begin notes after instruction
7030 scheduling and delayed branch scheduling. */
7033 reposition_prologue_and_epilogue_notes (f
)
7034 rtx f ATTRIBUTE_UNUSED
;
7036 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7039 if ((len
= VARRAY_SIZE (prologue
)) > 0)
7041 register rtx insn
, note
= 0;
7043 /* Scan from the beginning until we reach the last prologue insn.
7044 We apparently can't depend on basic_block_{head,end} after
7046 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
7048 if (GET_CODE (insn
) == NOTE
)
7050 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
7053 else if ((len
-= contains (insn
, prologue
)) == 0)
7056 /* Find the prologue-end note if we haven't already, and
7057 move it to just after the last prologue insn. */
7060 for (note
= insn
; (note
= NEXT_INSN (note
));)
7061 if (GET_CODE (note
) == NOTE
7062 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
7066 next
= NEXT_INSN (note
);
7068 /* Whether or not we can depend on BLOCK_HEAD,
7069 attempt to keep it up-to-date. */
7070 if (BLOCK_HEAD (0) == note
)
7071 BLOCK_HEAD (0) = next
;
7074 add_insn_after (note
, insn
);
7079 if ((len
= VARRAY_SIZE (epilogue
)) > 0)
7081 register rtx insn
, note
= 0;
7083 /* Scan from the end until we reach the first epilogue insn.
7084 We apparently can't depend on basic_block_{head,end} after
7086 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
7088 if (GET_CODE (insn
) == NOTE
)
7090 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
7093 else if ((len
-= contains (insn
, epilogue
)) == 0)
7095 /* Find the epilogue-begin note if we haven't already, and
7096 move it to just before the first epilogue insn. */
7099 for (note
= insn
; (note
= PREV_INSN (note
));)
7100 if (GET_CODE (note
) == NOTE
7101 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
7105 /* Whether or not we can depend on BLOCK_HEAD,
7106 attempt to keep it up-to-date. */
7108 && BLOCK_HEAD (n_basic_blocks
-1) == insn
)
7109 BLOCK_HEAD (n_basic_blocks
-1) = note
;
7112 add_insn_before (note
, insn
);
7116 #endif /* HAVE_prologue or HAVE_epilogue */
7119 /* Mark T for GC. */
7123 struct temp_slot
*t
;
7127 ggc_mark_rtx (t
->slot
);
7128 ggc_mark_rtx (t
->address
);
7129 ggc_mark_tree (t
->rtl_expr
);
7135 /* Mark P for GC. */
7138 mark_function_status (p
)
7147 ggc_mark_rtx (p
->arg_offset_rtx
);
7149 if (p
->x_parm_reg_stack_loc
)
7150 for (i
= p
->x_max_parm_reg
, r
= p
->x_parm_reg_stack_loc
;
7154 ggc_mark_rtx (p
->return_rtx
);
7155 ggc_mark_rtx (p
->x_cleanup_label
);
7156 ggc_mark_rtx (p
->x_return_label
);
7157 ggc_mark_rtx (p
->x_save_expr_regs
);
7158 ggc_mark_rtx (p
->x_stack_slot_list
);
7159 ggc_mark_rtx (p
->x_parm_birth_insn
);
7160 ggc_mark_rtx (p
->x_tail_recursion_label
);
7161 ggc_mark_rtx (p
->x_tail_recursion_reentry
);
7162 ggc_mark_rtx (p
->internal_arg_pointer
);
7163 ggc_mark_rtx (p
->x_arg_pointer_save_area
);
7164 ggc_mark_tree (p
->x_rtl_expr_chain
);
7165 ggc_mark_rtx (p
->x_last_parm_insn
);
7166 ggc_mark_tree (p
->x_context_display
);
7167 ggc_mark_tree (p
->x_trampoline_list
);
7168 ggc_mark_rtx (p
->epilogue_delay_list
);
7170 mark_temp_slot (p
->x_temp_slots
);
7173 struct var_refs_queue
*q
= p
->fixup_var_refs_queue
;
7176 ggc_mark_rtx (q
->modified
);
7181 ggc_mark_rtx (p
->x_nonlocal_goto_handler_slots
);
7182 ggc_mark_rtx (p
->x_nonlocal_goto_handler_labels
);
7183 ggc_mark_rtx (p
->x_nonlocal_goto_stack_level
);
7184 ggc_mark_tree (p
->x_nonlocal_labels
);
7187 /* Mark the function chain ARG (which is really a struct function **)
7191 mark_function_chain (arg
)
7194 struct function
*f
= *(struct function
**) arg
;
7196 for (; f
; f
= f
->next_global
)
7198 ggc_mark_tree (f
->decl
);
7200 mark_function_status (f
);
7201 mark_eh_status (f
->eh
);
7202 mark_stmt_status (f
->stmt
);
7203 mark_expr_status (f
->expr
);
7204 mark_emit_status (f
->emit
);
7205 mark_varasm_status (f
->varasm
);
7207 if (mark_machine_status
)
7208 (*mark_machine_status
) (f
);
7209 if (mark_lang_status
)
7210 (*mark_lang_status
) (f
);
7212 if (f
->original_arg_vector
)
7213 ggc_mark_rtvec ((rtvec
) f
->original_arg_vector
);
7214 if (f
->original_decl_initial
)
7215 ggc_mark_tree (f
->original_decl_initial
);
7219 /* Called once, at initialization, to initialize function.c. */
7222 init_function_once ()
7224 ggc_add_root (&all_functions
, 1, sizeof all_functions
,
7225 mark_function_chain
);
7227 VARRAY_INT_INIT (prologue
, 0, "prologue");
7228 VARRAY_INT_INIT (epilogue
, 0, "epilogue");
7229 VARRAY_INT_INIT (sibcall_epilogue
, 0, "sibcall_epilogue");